hf_text-generation-inference/server/pyproject.toml

44 lines
1.2 KiB
TOML
Raw Normal View History

2022-10-08 04:30:12 -06:00
[tool.poetry]
2023-03-07 10:52:22 -07:00
name = "text-generation-server"
2023-06-01 11:49:13 -06:00
version = "0.8.2"
description = "Text Generation Inference Python gRPC Server"
2022-10-08 04:30:12 -06:00
authors = ["Olivier Dehaene <olivier@huggingface.co>"]
2022-10-17 06:59:00 -06:00
[tool.poetry.scripts]
2023-03-07 10:52:22 -07:00
text-generation-server = 'text_generation_server.cli:app'
2022-10-17 06:59:00 -06:00
2022-10-08 04:30:12 -06:00
[tool.poetry.dependencies]
python = "^3.9"
protobuf = "^4.21.7"
grpcio = "^1.51.1"
grpcio-status = "^1.51.1"
grpcio-reflection = "^1.51.1"
grpc-interceptor = "^0.15.0"
2022-10-08 04:30:12 -06:00
typer = "^0.6.1"
accelerate = { version = "^0.19.0", optional = true }
bitsandbytes = { version = "^0.38.1", optional = true }
safetensors = "0.3.1"
loguru = "^0.6.0"
2023-02-13 05:02:45 -07:00
opentelemetry-api = "^1.15.0"
opentelemetry-exporter-otlp = "^1.15.0"
opentelemetry-instrumentation-grpc = "^0.36b0"
hf-transfer = "^0.1.2"
sentencepiece = "^0.1.97"
tokenizers = "0.13.3"
huggingface-hub = "0.14.0"
2022-10-08 04:30:12 -06:00
[tool.poetry.extras]
accelerate = ["accelerate"]
bnb = ["bitsandbytes"]
2022-10-08 04:30:12 -06:00
[tool.poetry.group.dev.dependencies]
2023-02-13 05:02:45 -07:00
grpcio-tools = "^1.51.1"
pytest = "^7.3.0"
2022-10-08 04:30:12 -06:00
[tool.pytest.ini_options]
markers = ["private: marks tests as requiring an admin hf token (deselect with '-m \"not private\"')"]
2022-10-08 04:30:12 -06:00
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"