Use Transformers helper get_text_config() instead of checking for text_config (#17105)

Signed-off-by: Harry Mellor <19981378+hmellor@users.noreply.github.com>
This commit is contained in:
Harry Mellor
2025-04-25 16:47:35 +01:00
committed by GitHub
parent 0bd7f8fca5
commit 423e9f1cbe
7 changed files with 30 additions and 46 deletions

View File

@@ -24,10 +24,7 @@ def test_can_initialize(model_arch):
def hf_overrides(hf_config: PretrainedConfig) -> PretrainedConfig:
hf_config.update(model_info.hf_overrides)
if hasattr(hf_config, "text_config"):
text_config: PretrainedConfig = hf_config.text_config
else:
text_config = hf_config
text_config = hf_config.get_text_config()
text_config.update({
"num_layers": 1,