[Chore] Remove use_data_parallel kwargs from ViT implementation (#33310)

Signed-off-by: Isotr0py <mozf@mail2.sysu.edu.cn>
This commit is contained in:
Isotr0py
2026-01-29 18:20:52 +08:00
committed by GitHub
parent 3a92c6f3b5
commit 5400014d55
9 changed files with 36 additions and 89 deletions

View File

@@ -39,7 +39,7 @@ from vllm.model_executor.layers.linear import (
from vllm.model_executor.layers.quantization import QuantizationConfig
from vllm.model_executor.model_loader.weight_utils import default_weight_loader
from .vision import run_dp_sharded_vision_model
from .vision import is_vit_use_data_parallel, run_dp_sharded_vision_model
class Idefics2VisionEmbeddings(nn.Module):
@@ -126,9 +126,9 @@ class Idefics2VisionAttention(nn.Module):
config: Idefics2VisionConfig,
quant_config: QuantizationConfig | None = None,
prefix: str = "",
use_data_parallel: bool = False,
) -> None:
super().__init__()
use_data_parallel = is_vit_use_data_parallel()
self.config = config
self.embed_dim = config.hidden_size
self.num_heads = config.num_attention_heads
@@ -187,11 +187,12 @@ class Idefics2VisionMLP(nn.Module):
config: Idefics2VisionConfig,
quant_config: QuantizationConfig | None = None,
prefix: str = "",
use_data_parallel: bool = False,
) -> None:
super().__init__()
self.config = config
self.activation_fn = get_act_fn(config.hidden_act)
use_data_parallel = is_vit_use_data_parallel()
self.fc1 = ColumnParallelLinear(
config.hidden_size,
config.intermediate_size,
@@ -222,7 +223,6 @@ class Idefics2EncoderLayer(nn.Module):
config: Idefics2Config,
quant_config: QuantizationConfig | None = None,
prefix: str = "",
use_data_parallel: bool = False,
) -> None:
super().__init__()
self.embed_dim = config.hidden_size
@@ -230,14 +230,12 @@ class Idefics2EncoderLayer(nn.Module):
config,
quant_config=quant_config,
prefix=f"{prefix}.self_attn",
use_data_parallel=use_data_parallel,
)
self.layer_norm1 = nn.LayerNorm(self.embed_dim, eps=config.layer_norm_eps)
self.mlp = Idefics2VisionMLP(
config,
quant_config=quant_config,
prefix=f"{prefix}.mlp",
use_data_parallel=use_data_parallel,
)
self.layer_norm2 = nn.LayerNorm(self.embed_dim, eps=config.layer_norm_eps)
@@ -279,7 +277,6 @@ class Idefics2Encoder(nn.Module):
*,
num_hidden_layers_override: int | None = None,
prefix: str = "",
use_data_parallel: bool = False,
) -> None:
super().__init__()
@@ -296,7 +293,6 @@ class Idefics2Encoder(nn.Module):
config,
quant_config=quant_config,
prefix=f"{prefix}.layers.{layer_idx}",
use_data_parallel=use_data_parallel,
)
for layer_idx in range(num_hidden_layers)
]
@@ -331,20 +327,18 @@ class Idefics2VisionTransformer(nn.Module):
num_hidden_layers_override: int | None = None,
require_post_norm: bool = True,
prefix: str = "",
use_data_parallel: bool = False,
) -> None:
super().__init__()
embed_dim = config.hidden_size
self.config = config
self.use_data_parallel = use_data_parallel
self.use_data_parallel = is_vit_use_data_parallel()
self.embeddings = Idefics2VisionEmbeddings(config)
self.encoder = Idefics2Encoder(
config,
quant_config=quant_config,
num_hidden_layers_override=num_hidden_layers_override,
prefix=f"{prefix}.encoder",
use_data_parallel=use_data_parallel,
)
num_hidden_layers = config.num_hidden_layers