Skip to content

Commit d7de15a

Browse files
committed
remove deprecated llama_sampler_init_softmax ggml-org/llama.cpp#9896 (comment)
1 parent c72739e commit d7de15a

File tree

2 files changed

+0
-13
lines changed

2 files changed

+0
-13
lines changed

llama_cpp/_internals.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -685,10 +685,6 @@ def add_dist(self, seed: int):
685685
sampler = llama_cpp.llama_sampler_init_dist(seed)
686686
llama_cpp.llama_sampler_chain_add(self.sampler, sampler)
687687

688-
def add_softmax(self):
689-
sampler = llama_cpp.llama_sampler_init_softmax()
690-
llama_cpp.llama_sampler_chain_add(self.sampler, sampler)
691-
692688
def add_top_k(self, k: int):
693689
sampler = llama_cpp.llama_sampler_init_top_k(k)
694690
llama_cpp.llama_sampler_chain_add(self.sampler, sampler)

llama_cpp/llama_cpp.py

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -3551,15 +3551,6 @@ def llama_sampler_init_dist(seed: int) -> llama_sampler_p:
35513551
...
35523552

35533553

3554-
# /// @details Sorts candidate tokens by their logits in descending order and calculate probabilities based on logits.
3555-
# /// NOTE: Avoid using on the full vocabulary as the sorting can become slow. For example, apply top-k or top-p sampling first.
3556-
# DEPRECATED(LLAMA_API struct llama_sampler * llama_sampler_init_softmax (void),
3557-
# "will be removed in the future (see https://github.com/ggml-org/llama.cpp/pull/9896#discussion_r1800920915)");
3558-
@ctypes_function("llama_sampler_init_softmax", [], llama_sampler_p_ctypes)
3559-
def llama_sampler_init_softmax() -> llama_sampler_p:
3560-
...
3561-
3562-
35633554
# /// @details Top-K sampling described in academic paper "The Curious Case of Neural Text Degeneration" https://arxiv.org/abs/1904.09751
35643555
# /// Setting k <= 0 makes this a noop
35653556
# LLAMA_API struct llama_sampler * llama_sampler_init_top_k (int32_t k);

0 commit comments

Comments
 (0)