diff --git a/README.md b/README.md index b2e879e..ad5d0f1 100644 --- a/README.md +++ b/README.md @@ -14,6 +14,7 @@ This package provides: - High-level Python API for text completion - OpenAI-like API - [LangChain compatibility](https://python.langchain.com/docs/integrations/llms/llamacpp) + - [LlamaIndex compatibility](https://docs.llamaindex.ai/en/stable/examples/llm/llama_2_llama_cpp.html) - OpenAI compatible web server - [Local Copilot replacement](https://llama-cpp-python.readthedocs.io/en/latest/server/#code-completion) - [Function Calling support](https://llama-cpp-python.readthedocs.io/en/latest/server/#function-calling) diff --git a/llama_cpp/server/settings.py b/llama_cpp/server/settings.py index 902a439..a10390c 100644 --- a/llama_cpp/server/settings.py +++ b/llama_cpp/server/settings.py @@ -60,7 +60,7 @@ class ModelSettings(BaseSettings): seed: int = Field( default=llama_cpp.LLAMA_DEFAULT_SEED, description="Random seed. -1 for random." ) - n_ctx: int = Field(default=2048, ge=1, description="The context size.") + n_ctx: int = Field(default=2048, ge=0, description="The context size.") n_batch: int = Field( default=512, ge=1, description="The batch size to use per eval." )