Remove Yi model definition, please use LlamaForCausalLM instead (#2854)

Co-authored-by: Roy <jasonailu87@gmail.com>
This commit is contained in:
Philipp Moritz
2024-02-13 14:22:22 -08:00
committed by GitHub
parent a463c333dd
commit 317b29de0f
5 changed files with 2 additions and 402 deletions

View File

@@ -7,7 +7,6 @@ from vllm.transformers_utils.configs.qwen import QWenConfig
# tiiuae/falcon-7b(-instruct) models. Newer Falcon models will use the
# `FalconConfig` class from the official HuggingFace transformers library.
from vllm.transformers_utils.configs.falcon import RWConfig
from vllm.transformers_utils.configs.yi import YiConfig
__all__ = [
"AquilaConfig",
@@ -16,5 +15,4 @@ __all__ = [
"MPTConfig",
"QWenConfig",
"RWConfig",
"YiConfig",
]