[Core] Relax the LoRA max rank (#26461)

Signed-off-by: Jee Jee Li <pandaleefree@gmail.com>
This commit is contained in:
Jee Jee Li
2025-10-09 14:47:14 +08:00
committed by GitHub
parent 0f29dca988
commit 1b2c440cd6
2 changed files with 5 additions and 5 deletions

View File

@@ -103,7 +103,7 @@ class LoRAConfig:
# Setting the maximum rank to 512 should be able to satisfy the vast
# majority of applications.
possible_max_ranks = (8, 16, 32, 64, 128, 256, 320, 512)
possible_max_ranks = (1, 8, 16, 32, 64, 128, 256, 320, 512)
possible_lora_extra_vocab_size = (256, 512)
if self.max_lora_rank not in possible_max_ranks:
raise ValueError(