fix(router): Handle tokenizer errors

This commit is contained in:
OlivierDehaene 2022-11-14 17:15:19 +01:00
parent feb7806ca4
commit d6d5b12e03
1 changed files with 20 additions and 12 deletions

View File

@ -123,7 +123,8 @@ fn validation_worker(
}
// Get the number of tokens in the input
let inputs = tokenizer.encode(request.inputs.clone(), false).unwrap();
match tokenizer.encode(request.inputs.clone(), false) {
Ok(inputs) => {
let input_length = inputs.len();
if input_length > max_input_length {
@ -138,6 +139,11 @@ fn validation_worker(
response_tx.send(Ok((input_length, request))).unwrap_or(());
}
Err(err) => response_tx
.send(Err(ValidationError::Tokenizer(err.to_string())))
.unwrap_or(()),
};
}
}
type ValidationRequest = (
@ -157,6 +163,8 @@ pub enum ValidationError {
MaxNewTokens,
#[error("inputs must have less than {1} tokens. Given: {0}")]
InputLength(usize, usize),
#[error("tokenizer error {0}")]
Tokenizer(String),
}
impl From<ValidationError> for (StatusCode, Json<ErrorResponse>) {