From f9ee2500cf271e8d01150abe943ff635db0b7597 Mon Sep 17 00:00:00 2001 From: Miquel Farre Date: Thu, 14 Nov 2024 11:20:23 +0000 Subject: [PATCH] fix --- server/text_generation_server/models/vlm_causal_lm.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/server/text_generation_server/models/vlm_causal_lm.py b/server/text_generation_server/models/vlm_causal_lm.py index 64e10a3a..f5417c29 100644 --- a/server/text_generation_server/models/vlm_causal_lm.py +++ b/server/text_generation_server/models/vlm_causal_lm.py @@ -189,6 +189,11 @@ class VlmCausalLMBatch(FlashCausalLMBatch): images.append(image) else: images.append([image]) + elif chunk_type == "video": + if config.model_type == "qwen2_vl": + # For now, treat video URLs as special tokens + # This will be processed in the text replacement section below + pass else: raise RuntimeError(f"Invalid chunk type {chunk_type}")