[LoRA] Relax LoRA condition (#7146)

This commit is contained in:
Jee Jee Li
2024-08-06 09:57:25 +08:00
committed by GitHub
parent e3c664bfcb
commit 9118217f58
4 changed files with 8 additions and 7 deletions

View File

@@ -1311,8 +1311,9 @@ class LoRAConfig:
long_lora_scaling_factors: Optional[Tuple[float]] = None
def __post_init__(self):
# TODO: Increase the range of rank
possible_max_ranks = (8, 16, 32, 64)
# Setting the maximum rank to 256 should be able to satisfy the vast
# majority of applications.
possible_max_ranks = (8, 16, 32, 64, 128, 256)
possible_lora_extra_vocab_size = (0, 256, 512)
if self.max_lora_rank not in possible_max_ranks:
raise ValueError(