[Misc] Delete LoRA-related redundancy code (#17841)

Signed-off-by: Jee Jee Li <pandaleefree@gmail.com>
This commit is contained in:
Jee Jee Li
2025-05-08 21:02:21 +08:00
committed by GitHub
parent 015815fe01
commit a944f8ede7
4 changed files with 3 additions and 17 deletions

View File

@@ -955,11 +955,7 @@ class Phi4MMForCausalLM(nn.Module, SupportsLoRA, SupportsMultiModal):
self.unpadded_vocab_size,
config.hidden_size,
org_num_embeddings=config.vocab_size,
padding_size=(
DEFAULT_VOCAB_PADDING_SIZE
# We need bigger padding if using lora for kernel
# compatibility
if not lora_config else lora_config.lora_vocab_padding_size),
padding_size=DEFAULT_VOCAB_PADDING_SIZE,
quant_config=quant_config,
)
if config.tie_word_embeddings: