[Misc] Improve LoRA spelling (#13831)

This commit is contained in:
Jee Jee Li
2025-02-26 15:43:01 +08:00
committed by GitHub
parent e206b54331
commit 5157338ed9
25 changed files with 80 additions and 80 deletions

View File

@@ -8,7 +8,7 @@ import pytest
import vllm
from vllm import SamplingParams
from vllm.lora.layers import LinearScalingRotaryEmbeddingWithLora
from vllm.lora.layers import LinearScalingRotaryEmbeddingWithLoRA
from vllm.lora.request import LoRARequest
from vllm.model_executor.layers.rotary_embedding import (
LinearScalingRotaryEmbedding)
@@ -151,7 +151,7 @@ def test_rotary_emb_replaced(dist_init):
if "rotary_emb" in module_name:
if "base_layer" not in module_name:
rotary_emb_count += 1
assert isinstance(module, LinearScalingRotaryEmbeddingWithLora)
assert isinstance(module, LinearScalingRotaryEmbeddingWithLoRA)
else:
assert isinstance(module, LinearScalingRotaryEmbedding)
# Llama 2 has 32 layers.