[LoRA][2/2]Remove LoRA extra vocab (#28545)

Signed-off-by: Jee Jee Li <pandaleefree@gmail.com>
This commit is contained in:
Jee Jee Li
2025-11-21 09:46:43 +08:00
committed by GitHub
parent df44df0143
commit 9875be6431
28 changed files with 133 additions and 528 deletions

View File

@@ -484,7 +484,6 @@ class EngineArgs:
fully_sharded_loras: bool = LoRAConfig.fully_sharded_loras
max_cpu_loras: int | None = LoRAConfig.max_cpu_loras
lora_dtype: str | torch.dtype | None = LoRAConfig.lora_dtype
lora_extra_vocab_size: int = LoRAConfig.lora_extra_vocab_size
ray_workers_use_nsight: bool = ParallelConfig.ray_workers_use_nsight
num_gpu_blocks_override: int | None = CacheConfig.num_gpu_blocks_override
@@ -1011,9 +1010,6 @@ class EngineArgs:
)
lora_group.add_argument("--max-loras", **lora_kwargs["max_loras"])
lora_group.add_argument("--max-lora-rank", **lora_kwargs["max_lora_rank"])
lora_group.add_argument(
"--lora-extra-vocab-size", **lora_kwargs["lora_extra_vocab_size"]
)
lora_group.add_argument(
"--lora-dtype",
**lora_kwargs["lora_dtype"],
@@ -1680,7 +1676,6 @@ class EngineArgs:
max_loras=self.max_loras,
default_mm_loras=self.default_mm_loras,
fully_sharded_loras=self.fully_sharded_loras,
lora_extra_vocab_size=self.lora_extra_vocab_size,
lora_dtype=self.lora_dtype,
max_cpu_loras=self.max_cpu_loras
if self.max_cpu_loras and self.max_cpu_loras > 0