hf_text-generation-inference/Cargo.toml

24 lines
400 B
TOML

[workspace]
members = [
"benchmark",
"router",
"router/client",
"router/grpc-metadata",
"launcher"
]
resolver = "2"
[workspace.package]
version = "2.0.0"
edition = "2021"
authors = ["Olivier Dehaene"]
homepage = "https://github.com/huggingface/text-generation-inference"
[profile.release]
debug = 1
incremental = true
lto = "fat"
opt-level = 3
codegen-units = 1
panic = "abort"