Add n_ctx, n_vocab, and n_embd properties

This commit is contained in:
Andrei Betlen 2023-05-20 08:13:41 -04:00
parent 01a010be52
commit a7ba85834f

View file

@ -1291,6 +1291,24 @@ class Llama:
if llama_cpp.llama_set_state_data(self.ctx, state.llama_state) != state_size: if llama_cpp.llama_set_state_data(self.ctx, state.llama_state) != state_size:
raise RuntimeError("Failed to set llama state data") raise RuntimeError("Failed to set llama state data")
@property
def n_ctx(self) -> int:
"""Return the context window size."""
assert self.ctx is not None
return llama_cpp.llama_n_ctx(self.ctx)
@property
def n_embd(self) -> int:
"""Return the embedding size."""
assert self.ctx is not None
return llama_cpp.llama_n_embd(self.ctx)
@property
def n_vocab(self) -> int:
"""Return the vocabulary size."""
assert self.ctx is not None
return llama_cpp.llama_n_vocab(self.ctx)
@staticmethod @staticmethod
def token_eos() -> int: def token_eos() -> int:
"""Return the end-of-sequence token.""" """Return the end-of-sequence token."""