hf_text-generation-inference/server/pyproject.toml

41 lines
1.1 KiB
TOML
Raw Normal View History

2022-10-08 04:30:12 -06:00
[tool.poetry]
2023-03-07 10:52:22 -07:00
name = "text-generation-server"
2023-04-21 13:00:57 -06:00
version = "0.6.0"
description = "Text Generation Inference Python gRPC Server"
2022-10-08 04:30:12 -06:00
authors = ["Olivier Dehaene <olivier@huggingface.co>"]
2022-10-17 06:59:00 -06:00
[tool.poetry.scripts]
2023-03-07 10:52:22 -07:00
text-generation-server = 'text_generation_server.cli:app'
2022-10-17 06:59:00 -06:00
2022-10-08 04:30:12 -06:00
[tool.poetry.dependencies]
python = "^3.9"
protobuf = "^4.21.7"
grpcio = "^1.51.1"
grpcio-status = "^1.51.1"
grpcio-reflection = "^1.51.1"
grpc-interceptor = "^0.15.0"
2022-10-08 04:30:12 -06:00
typer = "^0.6.1"
accelerate = { version = "^0.15.0", optional = true }
bitsandbytes = { version = "^0.38.1", optional = true }
2022-11-07 04:53:56 -07:00
safetensors = "^0.2.4"
loguru = "^0.6.0"
2023-02-13 05:02:45 -07:00
opentelemetry-api = "^1.15.0"
opentelemetry-exporter-otlp = "^1.15.0"
opentelemetry-instrumentation-grpc = "^0.36b0"
hf-transfer = "^0.1.2"
sentencepiece = "^0.1.97"
tokenizers = "0.13.3"
huggingface-hub = {git = "https://github.com/huggingface/huggingface_hub.git", rev = "4f27b44ee536cd654e171c7f37478eaf1996cc3f"}
2022-10-08 04:30:12 -06:00
[tool.poetry.extras]
accelerate = ["accelerate"]
bnb = ["bitsandbytes"]
2022-10-08 04:30:12 -06:00
[tool.poetry.group.dev.dependencies]
2023-02-13 05:02:45 -07:00
grpcio-tools = "^1.51.1"
pytest = "^7.3.0"
2022-10-08 04:30:12 -06:00
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"