[Model] Add support for the multi-modal Llama 3.2 model (#8811)
Co-authored-by: simon-mo <xmo@berkeley.edu> Co-authored-by: Chang Su <chang.s.su@oracle.com> Co-authored-by: Simon Mo <simon.mo@hey.com> Co-authored-by: Roger Wang <136131678+ywang96@users.noreply.github.com> Co-authored-by: Roger Wang <ywang@roblox.com>
This commit is contained in:
@@ -10,6 +10,7 @@ from vllm.transformers_utils.configs.granite import GraniteConfig
|
||||
from vllm.transformers_utils.configs.internvl import InternVLChatConfig
|
||||
from vllm.transformers_utils.configs.jais import JAISConfig
|
||||
from vllm.transformers_utils.configs.medusa import MedusaConfig
|
||||
from vllm.transformers_utils.configs.mllama import MllamaConfig
|
||||
from vllm.transformers_utils.configs.mlp_speculator import MLPSpeculatorConfig
|
||||
from vllm.transformers_utils.configs.mpt import MPTConfig
|
||||
from vllm.transformers_utils.configs.nemotron import NemotronConfig
|
||||
@@ -26,6 +27,7 @@ __all__ = [
|
||||
"MedusaConfig",
|
||||
"EAGLEConfig",
|
||||
"ExaoneConfig",
|
||||
"MllamaConfig",
|
||||
"MLPSpeculatorConfig",
|
||||
"NemotronConfig",
|
||||
"SolarConfig",
|
||||
|
||||
28
vllm/transformers_utils/configs/mllama.py
Normal file
28
vllm/transformers_utils/configs/mllama.py
Normal file
@@ -0,0 +1,28 @@
|
||||
from transformers.models.mllama import configuration_mllama as mllama_hf_config
|
||||
|
||||
|
||||
class MllamaTextConfig(mllama_hf_config.MllamaTextConfig):
|
||||
'''
|
||||
Use this class to override is_encoder_decoder:
|
||||
- transformers regards mllama as is_encoder_decoder=False
|
||||
- vllm needs is_encoder_decoder=True to enable cross-attention
|
||||
'''
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
**kwargs,
|
||||
):
|
||||
super().__init__(**kwargs)
|
||||
self.is_encoder_decoder = True
|
||||
|
||||
|
||||
class MllamaConfig(mllama_hf_config.MllamaConfig):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
text_config=None,
|
||||
**kwargs,
|
||||
):
|
||||
if isinstance(text_config, dict):
|
||||
text_config = MllamaTextConfig(**text_config)
|
||||
super().__init__(text_config=text_config, **kwargs)
|
||||
Reference in New Issue
Block a user