ChatGLM Support (#1261)
This commit is contained in:
@@ -1,5 +1,6 @@
|
||||
from vllm.transformers_utils.configs.baichuan import BaiChuanConfig
|
||||
from vllm.transformers_utils.configs.aquila import AquilaConfig
|
||||
from vllm.transformers_utils.configs.baichuan import BaiChuanConfig
|
||||
from vllm.transformers_utils.configs.chatglm import ChatGLMConfig
|
||||
from vllm.transformers_utils.configs.qwen import QWenConfig
|
||||
# RWConfig is for the original tiiuae/falcon-40b(-instruct) and
|
||||
# tiiuae/falcon-7b(-instruct) models. Newer Falcon models will use the
|
||||
@@ -8,8 +9,9 @@ from vllm.transformers_utils.configs.falcon import RWConfig
|
||||
from vllm.transformers_utils.configs.yi import YiConfig
|
||||
|
||||
__all__ = [
|
||||
"BaiChuanConfig",
|
||||
"AquilaConfig",
|
||||
"BaiChuanConfig",
|
||||
"ChatGLMConfig",
|
||||
"QWenConfig",
|
||||
"RWConfig",
|
||||
"YiConfig",
|
||||
|
||||
Reference in New Issue
Block a user