Remove LoRA bias support (#25807)

Signed-off-by: Ashwin Phadke <ashwinphadke12@rediffmail.com>
Signed-off-by: Ashwin Phadke <23502062+ashwin-phadke@users.noreply.github.com>
Signed-off-by: Jee Jee Li <pandaleefree@gmail.com>
Co-authored-by: Jee Jee Li <pandaleefree@gmail.com>
This commit is contained in:
Ashwin Phadke
2025-10-10 15:20:33 +05:30
committed by GitHub
parent 3ee202ea1e
commit ab196edefb
20 changed files with 35 additions and 366 deletions

View File

@@ -439,7 +439,6 @@ class EngineArgs:
video_pruning_rate: float = MultiModalConfig.video_pruning_rate
# LoRA fields
enable_lora: bool = False
enable_lora_bias: bool = LoRAConfig.bias_enabled
max_loras: int = LoRAConfig.max_loras
max_lora_rank: int = LoRAConfig.max_lora_rank
default_mm_loras: Optional[dict[str, str]] = LoRAConfig.default_mm_loras
@@ -916,7 +915,6 @@ class EngineArgs:
action=argparse.BooleanOptionalAction,
help="If True, enable handling of LoRA adapters.",
)
lora_group.add_argument("--enable-lora-bias", **lora_kwargs["bias_enabled"])
lora_group.add_argument("--max-loras", **lora_kwargs["max_loras"])
lora_group.add_argument("--max-lora-rank", **lora_kwargs["max_lora_rank"])
lora_group.add_argument(
@@ -1515,7 +1513,6 @@ class EngineArgs:
lora_config = (
LoRAConfig(
bias_enabled=self.enable_lora_bias,
max_lora_rank=self.max_lora_rank,
max_loras=self.max_loras,
default_mm_loras=self.default_mm_loras,