[LoRA][1/N]Remove LoRA extra vocab (#28382)

Signed-off-by: Jee Jee Li <pandaleefree@gmail.com>
This commit is contained in:
Jee Jee Li
2025-11-12 03:06:21 +08:00
committed by GitHub
parent 8c32c6e4b4
commit 9d1c474704
65 changed files with 197 additions and 754 deletions

View File

@@ -554,7 +554,6 @@ class LongcatFlashForCausalLM(nn.Module, SupportsLoRA, SupportsPP):
super().__init__()
config = FlashConfig(**vllm_config.model_config.hf_config.__dict__)
quant_config = vllm_config.quant_config
lora_config = vllm_config.lora_config
self.config = config
config.intermediate_size = (
@@ -562,7 +561,7 @@ class LongcatFlashForCausalLM(nn.Module, SupportsLoRA, SupportsPP):
if hasattr(config, "ffn_hidden_size")
else config.intermediate_size
)
self.lora_config = lora_config
self.quant_config = quant_config
self.model = FlashModel(