[Misc] Bump up transformers to v4.39.0 & Remove StarCoder2Config (#3551)

Co-authored-by: Roy <jasonailu87@gmail.com>
Co-authored-by: Roger Meier <r.meier@siemens.com>
This commit is contained in:
Woosuk Kwon
2024-03-21 07:58:12 -07:00
committed by GitHub
parent 865732342b
commit c188ecb080
6 changed files with 3 additions and 76 deletions

View File

@@ -9,7 +9,6 @@ _CONFIG_REGISTRY = {
"mpt": MPTConfig,
"RefinedWeb": RWConfig, # For tiiuae/falcon-40b(-instruct)
"RefinedWebModel": RWConfig, # For tiiuae/falcon-7b(-instruct)
"starcoder2": Starcoder2Config,
"jais": JAISConfig,
}
@@ -18,15 +17,6 @@ def get_config(model: str,
trust_remote_code: bool,
revision: Optional[str] = None,
code_revision: Optional[str] = None) -> PretrainedConfig:
# FIXME(woosuk): This is a temporary fix for StarCoder2.
# Remove this when the model is supported by HuggingFace transformers.
if "bigcode" in model and "starcoder2" in model:
config_class = _CONFIG_REGISTRY["starcoder2"]
config = config_class.from_pretrained(model,
revision=revision,
code_revision=code_revision)
return config
try:
config = AutoConfig.from_pretrained(
model,