Skip to content

Commit 9d26580

Browse files
committed
remove unused variables
1 parent 8288b36 commit 9d26580

File tree

1 file changed

+0
-4
lines changed

1 file changed

+0
-4
lines changed

llama.cpp

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2288,8 +2288,6 @@ size_t llama_copy_state_data(struct llama_context * ctx, uint8_t * dest) {
22882288
char rng_buf[64*1024];
22892289
memset(&rng_buf[0], 0, 64*1024);
22902290
memcpy(&rng_buf[0], rng_ss.str().data(), rng_ss.str().size());
2291-
const int32_t has_evaluated_once = ctx->has_evaluated_once ? 1 : 0;
2292-
const int32_t logits_all = ctx->logits_all ? 1 : 0;
22932291
const size_t logits_capacity = ctx->logits.capacity();
22942292
const size_t logits_size = ctx->logits.size();
22952293
const size_t embedding_size = ctx->embedding.size();
@@ -2333,8 +2331,6 @@ size_t llama_set_state_data(struct llama_context * ctx, const uint8_t * src) {
23332331
rng_ss >> ctx->rng;
23342332
LLAMA_ASSERT(rng_ss.fail() == false);
23352333

2336-
int32_t has_evaluated_once;
2337-
int32_t logits_all;
23382334
size_t logits_capacity;
23392335
size_t logits_size;
23402336
size_t embedding_size;

0 commit comments

Comments
 (0)