[Misc] Reduce LoRA-related static variable (#13166)

This commit is contained in:
Jee Jee Li
2025-02-22 16:21:30 +08:00
committed by GitHub
parent 2cb8c1540e
commit 105b8ce4c0
41 changed files with 120 additions and 395 deletions

View File

@@ -261,15 +261,12 @@ class GPTBigCodeModel(nn.Module):
class GPTBigCodeForCausalLM(nn.Module, SupportsLoRA, SupportsPP):
packed_modules_mapping = {"c_attn": ["c_attn"]}
supported_lora_modules = ["c_fc", "c_proj", "wte", "c_attn"]
# LoRA specific attributes
embedding_modules = {
"wte": "input_embeddings",
"lm_head": "output_embeddings",
}
embedding_padding_modules = []
def __init__(self, *, vllm_config: VllmConfig, prefix: str = ""):
super().__init__()
config = vllm_config.model_config.hf_config