[Bugfix] Fix LoRA extra vocab size (#15047)

Signed-off-by: Jee Jee Li <pandaleefree@gmail.com>
This commit is contained in:
Jee Jee Li
2025-03-19 00:40:29 +08:00
committed by GitHub
parent 179a619c21
commit 46c759c165
5 changed files with 1 additions and 5 deletions

View File

@@ -2324,7 +2324,7 @@ class LoRAConfig:
# Setting the maximum rank to 512 should be able to satisfy the vast
# majority of applications.
possible_max_ranks = (8, 16, 32, 64, 128, 256, 320, 512)
possible_lora_extra_vocab_size = (0, 256, 512)
possible_lora_extra_vocab_size = (256, 512)
if self.max_lora_rank not in possible_max_ranks:
raise ValueError(
f"max_lora_rank ({self.max_lora_rank}) must be one of "