Add router name to /info endpoint (#1854)
Add `router` key in `/info` endpoint and set it to `env!("CARGO_PKG_NAME")` => so always set to `"text-generation-router"` in TGI. Happy to change the naming if you think of a better one (framework? package_name?) The goal is to use this information in `InferenceClient` to know the model is served with TGI. At the moment we can use https://api-inference.huggingface.co/models/mistralai/Mistral-7B-Instruct-v0.2/info to infer it is TGI-served because it returns information but having a proper key would be better. For context, a transformers-served model is only outputting `{"ok": "ok"}` (see [here](https://api-inference.huggingface.co/models/microsoft/DialoGPT-large/info)).
This commit is contained in:
parent
a25737139d
commit
bb2b2959a2
|
@ -159,6 +159,8 @@ pub struct Info {
|
|||
#[schema(example = "32")]
|
||||
pub max_client_batch_size: usize,
|
||||
/// Router Info
|
||||
#[schema(example = "text-generation-router")]
|
||||
pub router: &'static str,
|
||||
#[schema(example = "0.5.0")]
|
||||
pub version: &'static str,
|
||||
#[schema(nullable = true, example = "null")]
|
||||
|
|
|
@ -1564,6 +1564,7 @@ pub async fn run(
|
|||
max_batch_size,
|
||||
validation_workers,
|
||||
max_client_batch_size,
|
||||
router: env!("CARGO_PKG_NAME"),
|
||||
version: env!("CARGO_PKG_VERSION"),
|
||||
sha: option_env!("VERGEN_GIT_SHA"),
|
||||
docker_label: option_env!("DOCKER_LABEL"),
|
||||
|
|
Loading…
Reference in New Issue