[BugFix] skip language model in Encoder (#30242)
Signed-off-by: dengyunyang <584797741@qq.com>
This commit is contained in:
@@ -34,7 +34,7 @@ import einops
|
||||
import torch
|
||||
import torch.nn as nn
|
||||
import torch.nn.functional as F
|
||||
from transformers import BatchFeature
|
||||
from transformers import BatchFeature, Qwen2ForCausalLM
|
||||
from transformers.models.qwen2_5_vl import Qwen2_5_VLProcessor
|
||||
from transformers.models.qwen2_5_vl.configuration_qwen2_5_vl import (
|
||||
Qwen2_5_VLConfig,
|
||||
@@ -1567,3 +1567,11 @@ class Qwen2_5_VLForConditionalGeneration(
|
||||
connector="visual.merger.",
|
||||
tower_model="visual.",
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_language_model_spec(cls) -> tuple[nn.Module | None, str | None]:
|
||||
"""
|
||||
Return the language model spec:
|
||||
(language model class, language model attr)
|
||||
"""
|
||||
return Qwen2ForCausalLM, "language_model"
|
||||
|
||||
Reference in New Issue
Block a user