[Model] Remove image mm limit for LLaMa4 (#16365)

Signed-off-by: Ye (Charlotte) Qi <yeq@meta.com>
This commit is contained in:
Ye (Charlotte) Qi
2025-04-10 02:36:27 -07:00
committed by GitHub
parent ec1f9c8c91
commit 61de3ef74b
2 changed files with 26 additions and 7 deletions

View File

@@ -477,7 +477,9 @@ class Mllama4ProcessingInfo(BaseProcessingInfo):
**kwargs)
def get_supported_mm_limits(self) -> Mapping[str, Optional[int]]:
return {"image": 10}
# Although vLLM can support more images from an infra capability
# perspective, we do not recommend using >10 images in practice.
return {"image": None}
@staticmethod
def get_patch_per_chunk(vision_config: Llama4VisionConfig) -> int: