Fix Mistral yarn warning in Transformers v5 (#37292)
Signed-off-by: Harry Mellor <19981378+hmellor@users.noreply.github.com> Co-authored-by: Julien Denize <40604584+juliendenize@users.noreply.github.com>
This commit is contained in:
@@ -2,7 +2,9 @@
|
||||
# SPDX-FileCopyrightText: Copyright contributors to the vLLM project
|
||||
from typing import Any
|
||||
|
||||
from packaging.version import Version
|
||||
from transformers import PretrainedConfig, WhisperConfig
|
||||
from transformers import __version__ as TRANSFORMERS_VERSION
|
||||
|
||||
from vllm.logger import init_logger
|
||||
|
||||
@@ -134,6 +136,10 @@ def _remap_mistral_yarn_args(config: dict) -> dict:
|
||||
# Cast to remove Transformers > v5 type warnings
|
||||
config["rope_parameters"][new_name] = cast(yarn_config.pop(old_name))
|
||||
|
||||
# Ignore apply_yarn_scaling in Transformers > v5 RoPE validation to remove warnings
|
||||
if Version(TRANSFORMERS_VERSION) >= Version("5.3.0.dev0"):
|
||||
config["ignore_keys_at_rope_validation"] = {"apply_yarn_scaling"}
|
||||
|
||||
assert len(yarn_config) == 0, f"Unparsed yarn config: {yarn_config}"
|
||||
|
||||
return config
|
||||
|
||||
Reference in New Issue
Block a user