[BugFix] skip language model in Encoder (#30242)

Signed-off-by: dengyunyang <584797741@qq.com>
This commit is contained in:
dengyunyang
2025-12-22 21:25:59 +08:00
committed by GitHub
parent 2cf91c2ea4
commit 8f8f469b1b
8 changed files with 116 additions and 3 deletions

View File

@@ -34,7 +34,7 @@ import einops
import torch
import torch.nn as nn
import torch.nn.functional as F
from transformers import BatchFeature
from transformers import BatchFeature, Qwen2ForCausalLM
from transformers.models.qwen2_5_vl import Qwen2_5_VLProcessor
from transformers.models.qwen2_5_vl.configuration_qwen2_5_vl import (
Qwen2_5_VLConfig,
@@ -1567,3 +1567,11 @@ class Qwen2_5_VLForConditionalGeneration(
connector="visual.merger.",
tower_model="visual.",
)
@classmethod
def get_language_model_spec(cls) -> tuple[nn.Module | None, str | None]:
"""
Return the language model spec:
(language model class, language model attr)
"""
return Qwen2ForCausalLM, "language_model"