Bump transformers version for Llama 3.1 hotfix and patch Chameleon (#6690)
This commit is contained in:
@@ -16,8 +16,6 @@ _GENERATION_MODELS = {
|
||||
"BaiChuanForCausalLM": ("baichuan", "BaiChuanForCausalLM"), # baichuan-7b
|
||||
"BaichuanForCausalLM": ("baichuan", "BaichuanForCausalLM"), # baichuan-13b
|
||||
"BloomForCausalLM": ("bloom", "BloomForCausalLM"),
|
||||
#TODO(ywang96): remove this when huggingface fixes the model repo
|
||||
"ChameleonForCausalLM": ("chameleon", "ChameleonForConditionalGeneration"),
|
||||
"ChameleonForConditionalGeneration":
|
||||
("chameleon", "ChameleonForConditionalGeneration"),
|
||||
"ChatGLMModel": ("chatglm", "ChatGLMForCausalLM"),
|
||||
|
||||
Reference in New Issue
Block a user