Clean up unused padding_idx variables across many model definitions (#13240)

Signed-off-by: Tyler Michael Smith <tyler@neuralmagic.com>
This commit is contained in:
Tyler Michael Smith
2025-03-04 16:27:00 -05:00
committed by GitHub
parent 288ca110f6
commit 4f5b059f14
30 changed files with 1 additions and 35 deletions

View File

@@ -90,7 +90,6 @@ class MambaModel(nn.Module):
is_lora_enabled = bool(lora_config)
self.config = config
self.padding_idx = config.pad_token_id
lora_vocab = ((lora_config.lora_extra_vocab_size *
(lora_config.max_loras or 1)) if lora_config else 0)
self.vocab_size = config.vocab_size + lora_vocab