Clean up unused padding_idx variables across many model definitions (#13240)

Signed-off-by: Tyler Michael Smith <tyler@neuralmagic.com>
This commit is contained in:
Tyler Michael Smith
2025-03-04 16:27:00 -05:00
committed by GitHub
parent 288ca110f6
commit 4f5b059f14
30 changed files with 1 additions and 35 deletions

View File

@@ -365,7 +365,6 @@ class MiniCPMModel(nn.Module):
self.config = config
self.cache_config = cache_config
self.quant_config = quant_config
self.padding_idx = config.pad_token_id
lora_vocab = (lora_config.lora_extra_vocab_size *
(lora_config.max_loras or 1)) if lora_config else 0
self.vocab_size = config.vocab_size + lora_vocab