llama.cpp/pyproject.toml

50 lines
1.2 KiB
TOML
Raw Normal View History

[tool.poetry]
name = "llama_cpp_python"
2023-06-26 12:53:54 +00:00
version = "0.1.66"
2023-03-23 09:33:06 +00:00
description = "Python bindings for the llama.cpp library"
authors = ["Andrei Betlen <abetlen@gmail.com>"]
license = "MIT"
2023-03-23 09:33:06 +00:00
readme = "README.md"
homepage = "https://github.com/abetlen/llama-cpp-python"
repository = "https://github.com/abetlen/llama-cpp-python"
packages = [{include = "llama_cpp"}]
include = [
"LICENSE.md",
2023-06-09 01:49:42 +00:00
]
2023-03-23 09:33:06 +00:00
[tool.poetry.dependencies]
python = "^3.8.1"
typing-extensions = "^4.6.3"
2023-05-30 07:06:57 +00:00
numpy = "^1.20.0"
2023-05-31 20:41:35 +00:00
diskcache = "^5.6.1"
uvicorn = { version = "^0.22.0", optional = true }
fastapi = { version = "^0.97.0", optional = true }
sse-starlette = { version = "^1.6.1", optional = true }
2023-03-23 09:33:06 +00:00
[tool.poetry.group.dev.dependencies]
black = "^23.3.0"
2023-03-23 18:24:08 +00:00
twine = "^4.0.2"
mkdocs = "^1.4.3"
mkdocstrings = {extras = ["python"], version = "^0.22.0"}
mkdocs-material = "^9.1.16"
pytest = "^7.3.2"
httpx = "^0.24.1"
scikit-build = "0.17.6"
2023-03-23 09:33:06 +00:00
[tool.poetry.extras]
server = ["uvicorn", "fastapi", "sse-starlette"]
[build-system]
requires = [
"setuptools>=42",
"scikit-build>=0.13",
"cmake>=3.18",
"ninja",
]
build-backend = "setuptools.build_meta"
2023-06-26 20:25:17 +00:00
[project.optional-dependencies]
server = ["uvicorn", "fastapi", "sse-starlette"]
test = ["pytest"]
docs = ["mkdocs", "mkdocstrings[python]", "mkdocs-material"]