[tool.poetry] name = "text-generation-server" version = "2.0.5-dev0" description = "Text Generation Inference Python gRPC Server" authors = ["Olivier Dehaene "] [tool.poetry.scripts] text-generation-server = 'text_generation_server.cli:app' [tool.poetry.dependencies] python = ">=3.9,<3.13" protobuf = "^4.25.3" grpcio = "^1.51.1" grpcio-status = "^1.51.1" grpcio-reflection = "^1.51.1" grpc-interceptor = "^0.15.0" typer = "^0.6.1" accelerate = { version = "^0.29.1", optional = true } bitsandbytes = { version = "^0.43.0", optional = true } safetensors = "^0.4" loguru = "^0.6.0" opentelemetry-api = "^1.25.0" opentelemetry-exporter-otlp = "^1.25.0" opentelemetry-instrumentation-grpc = "^0.46b0" hf-transfer = "^0.1.2" sentencepiece = "^0.1.97" tokenizers = "^0.19.1" huggingface-hub = "^0.23" transformers = "^4.41" einops = "^0.6.1" texttable = { version = "^1.6.7", optional = true } datasets = { version = "^2.14.0", optional = true } peft = { version = "^0.10", optional = true } torch = { version = "^2.3.0", optional = true } scipy = "^1.11.1" pillow = "^10.0.0" outlines= { version = "^0.0.34", optional = true } prometheus-client = "^0.20.0" py-cpuinfo = "^9.0.0" [tool.poetry.extras] torch = ["torch"] accelerate = ["accelerate"] bnb = ["bitsandbytes"] peft = ["peft"] quantize = ["texttable", "datasets", "accelerate"] outlines = ["outlines"] [tool.poetry.group.dev.dependencies] grpcio-tools = "^1.51.1" pytest = "^7.3.0" [[tool.poetry.source]] name = "pytorch-gpu-src" url = "https://download.pytorch.org/whl/cu121" priority = "explicit" [tool.pytest.ini_options] markers = ["private: marks tests as requiring an admin hf token (deselect with '-m \"not private\"')"] [build-system] requires = [ "poetry-core>=1.0.0", ] build-backend = "poetry.core.masonry.api"