From d696251fbe40015e8616ea7a7d7ad5257fd1b896 Mon Sep 17 00:00:00 2001 From: Andrei Betlen Date: Sat, 30 Sep 2023 16:02:35 -0400 Subject: [PATCH] Fix logits_all bug --- llama_cpp/llama.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/llama_cpp/llama.py b/llama_cpp/llama.py index 615662e..fdde7ea 100644 --- a/llama_cpp/llama.py +++ b/llama_cpp/llama.py @@ -439,7 +439,7 @@ class Llama: def eval_logits(self) -> Deque[List[float]]: return deque( self.scores[: self.n_tokens, :].tolist(), - maxlen=self._n_ctx if self.model_params.logits_all else 1, + maxlen=self._n_ctx if self.context_params.logits_all else 1, ) def tokenize(self, text: bytes, add_bos: bool = True) -> List[int]: @@ -964,7 +964,7 @@ class Llama: else: stop_sequences = [] - if logprobs is not None and self.model_params.logits_all is False: + if logprobs is not None and self.context_params.logits_all is False: raise ValueError( "logprobs is not supported for models created with logits_all=False" )