Clean up unused padding_idx variables across many model definitions (#13240)

Signed-off-by: Tyler Michael Smith <tyler@neuralmagic.com>
This commit is contained in:
Tyler Michael Smith
2025-03-04 16:27:00 -05:00
committed by GitHub
parent 288ca110f6
commit 4f5b059f14
30 changed files with 1 additions and 35 deletions

View File

@@ -49,10 +49,7 @@ class WhisperAudioInputs(TypedDict):
class WhisperPositionalEmbedding(nn.Embedding):
def __init__(self,
num_positions: int,
embedding_dim: int,
padding_idx: Optional[int] = None):
def __init__(self, num_positions: int, embedding_dim: int):
super().__init__(num_positions, embedding_dim)
def forward(self, position_ids):
@@ -359,7 +356,6 @@ class WhisperEncoder(nn.Module):
config = vllm_config.model_config.hf_config
embed_dim = config.d_model
self.num_mel_bins = config.num_mel_bins
self.padding_idx = config.pad_token_id
self.max_source_positions = config.max_source_positions
self.embed_scale = (math.sqrt(embed_dim)
if config.scale_embedding else 1.0)