Skip to content

Commit ab1e794

Browse files
ggerganoviThalay
authored andcommitted
examples : fix compile warnings [no ci] (#0)
1 parent 54de82b commit ab1e794

File tree

2 files changed

+2
-3
lines changed

2 files changed

+2
-3
lines changed

examples/common.cpp

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -147,7 +147,6 @@ std::string gpt_random_prompt(std::mt19937 & rng) {
147147
case 7: return "He";
148148
case 8: return "She";
149149
case 9: return "They";
150-
default: return "To";
151150
}
152151

153152
return "The";

examples/talk-llama/talk-llama.cpp

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -417,7 +417,7 @@ int main(int argc, char ** argv) {
417417

418418
session_tokens.resize(llama_n_ctx(ctx_llama));
419419
size_t n_token_count_out = 0;
420-
if (!llama_load_session_file(ctx_llama, path_session.c_str(), session_tokens.data(), session_tokens.capacity(), &n_token_count_out)) {
420+
if (!llama_state_load_file(ctx_llama, path_session.c_str(), session_tokens.data(), session_tokens.capacity(), &n_token_count_out)) {
421421
fprintf(stderr, "%s: error: failed to load session file '%s'\n", __func__, path_session.c_str());
422422
return 1;
423423
}
@@ -709,7 +709,7 @@ int main(int argc, char ** argv) {
709709

710710
if (!path_session.empty() && need_to_save_session) {
711711
need_to_save_session = false;
712-
llama_save_session_file(ctx_llama, path_session.c_str(), session_tokens.data(), session_tokens.size());
712+
llama_state_save_file(ctx_llama, path_session.c_str(), session_tokens.data(), session_tokens.size());
713713
}
714714

715715
llama_token id = 0;

0 commit comments

Comments
 (0)