[LoRA][1/N]Remove LoRA extra vocab (#28382)

Signed-off-by: Jee Jee Li <pandaleefree@gmail.com>
This commit is contained in:
Jee Jee Li
2025-11-12 03:06:21 +08:00
committed by GitHub
parent 8c32c6e4b4
commit 9d1c474704
65 changed files with 197 additions and 754 deletions

View File

@@ -963,9 +963,9 @@ class ChameleonForConditionalGeneration(
self.model = ChameleonModel(
vllm_config=vllm_config, prefix=maybe_prefix(prefix, "model")
)
self.unpadded_vocab_size = config.vocab_size
self.lm_head = ParallelLMHead(
self.unpadded_vocab_size,
config.vocab_size,
config.hidden_size,
prefix=maybe_prefix(prefix, "lm_head"),
)
@@ -973,9 +973,7 @@ class ChameleonForConditionalGeneration(
self.lm_head.weight = self.model.embed_tokens.weight
logit_scale = getattr(config, "logit_scale", 1.0)
self.logits_processor = LogitsProcessor(
self.unpadded_vocab_size, config.vocab_size, logit_scale
)
self.logits_processor = LogitsProcessor(config.vocab_size, scale=logit_scale)
self.make_empty_intermediate_tensors = (
self.model.make_empty_intermediate_tensors
)