llama.cpp/pyproject.toml

72 lines
1.6 KiB
TOML
Raw Normal View History

[build-system]
2023-09-12 22:56:22 +00:00
requires = ["scikit-build-core>=0.5.0"]
build-backend = "scikit_build_core.build"
[project]
name = "llama_cpp_python"
2023-09-12 22:56:36 +00:00
dynamic = ["version"]
2023-03-23 09:33:06 +00:00
description = "Python bindings for the llama.cpp library"
readme = "README.md"
license = { text = "MIT" }
authors = [
{ name = "Andrei Betlen", email = "abetlen@gmail.com" },
]
dependencies = [
2023-07-18 23:30:06 +00:00
"typing-extensions>=4.5.0",
"numpy>=1.20.0",
"diskcache>=5.6.1",
2023-06-09 01:49:42 +00:00
]
2023-07-18 23:37:52 +00:00
requires-python = ">=3.8"
classifiers = [
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
]
2023-06-09 01:49:42 +00:00
2023-03-23 09:33:06 +00:00
[project.optional-dependencies]
server = [
"uvicorn>=0.22.0",
"fastapi>=0.100.0",
"pydantic-settings>=2.0.1",
"sse-starlette>=1.6.1",
]
test = [
"pytest>=7.4.0",
"httpx>=0.24.1",
]
dev = [
"black>=23.3.0",
"twine>=4.0.2",
"mkdocs>=1.4.3",
"mkdocstrings[python]>=0.22.0",
"mkdocs-material>=9.1.18",
"pytest>=7.4.0",
"httpx>=0.24.1",
]
all = [
"llama_cpp_python[server,test,dev]",
]
2023-03-23 09:33:06 +00:00
[tool.scikit-build]
wheel.packages = ["llama_cpp"]
cmake.verbose = true
2023-09-12 21:47:17 +00:00
cmake.minimum-version = "3.12"
minimum-version = "0.5"
ninja.make-fallback = false
sdist.exclude = [".git", "vendor/llama.cpp/.git"]
2023-09-12 22:56:36 +00:00
[tool.scikit-build.metadata.version]
provider = "scikit_build_core.metadata.regex"
input = "llama_cpp/__init__.py"
[project.urls]
Homepage = "https://github.com/abetlen/llama-cpp-python"
Issues = "https://github.com/abetlen/llama-cpp-python/issues"
2023-08-25 18:35:27 +00:00
[tool.pytest.ini_options]
addopts = "--ignore=vendor"