ChatGLM Support (#1261)

This commit is contained in:
GoHomeToMacDonal
2023-11-07 08:09:33 +08:00
committed by GitHub
parent e7f579eb97
commit 1a2bbc9301
7 changed files with 490 additions and 4 deletions

View File

@@ -1,5 +1,6 @@
from vllm.transformers_utils.configs.baichuan import BaiChuanConfig
from vllm.transformers_utils.configs.aquila import AquilaConfig
from vllm.transformers_utils.configs.baichuan import BaiChuanConfig
from vllm.transformers_utils.configs.chatglm import ChatGLMConfig
from vllm.transformers_utils.configs.qwen import QWenConfig
# RWConfig is for the original tiiuae/falcon-40b(-instruct) and
# tiiuae/falcon-7b(-instruct) models. Newer Falcon models will use the
@@ -8,8 +9,9 @@ from vllm.transformers_utils.configs.falcon import RWConfig
from vllm.transformers_utils.configs.yi import YiConfig
__all__ = [
"BaiChuanConfig",
"AquilaConfig",
"BaiChuanConfig",
"ChatGLMConfig",
"QWenConfig",
"RWConfig",
"YiConfig",