[Model] Remove image mm limit for LLaMa4 (#16365)
Signed-off-by: Ye (Charlotte) Qi <yeq@meta.com>
This commit is contained in:
committed by
GitHub
parent
ec1f9c8c91
commit
61de3ef74b
@@ -477,7 +477,9 @@ class Mllama4ProcessingInfo(BaseProcessingInfo):
|
||||
**kwargs)
|
||||
|
||||
def get_supported_mm_limits(self) -> Mapping[str, Optional[int]]:
|
||||
return {"image": 10}
|
||||
# Although vLLM can support more images from an infra capability
|
||||
# perspective, we do not recommend using >10 images in practice.
|
||||
return {"image": None}
|
||||
|
||||
@staticmethod
|
||||
def get_patch_per_chunk(vision_config: Llama4VisionConfig) -> int:
|
||||
|
||||
Reference in New Issue
Block a user