diff --git a/vllm/transformers_utils/configs/mistral.py b/vllm/transformers_utils/configs/mistral.py index bdeadec1b..2b0796691 100644 --- a/vllm/transformers_utils/configs/mistral.py +++ b/vllm/transformers_utils/configs/mistral.py @@ -2,7 +2,9 @@ # SPDX-FileCopyrightText: Copyright contributors to the vLLM project from typing import Any +from packaging.version import Version from transformers import PretrainedConfig, WhisperConfig +from transformers import __version__ as TRANSFORMERS_VERSION from vllm.logger import init_logger @@ -134,6 +136,10 @@ def _remap_mistral_yarn_args(config: dict) -> dict: # Cast to remove Transformers > v5 type warnings config["rope_parameters"][new_name] = cast(yarn_config.pop(old_name)) + # Ignore apply_yarn_scaling in Transformers > v5 RoPE validation to remove warnings + if Version(TRANSFORMERS_VERSION) >= Version("5.3.0.dev0"): + config["ignore_keys_at_rope_validation"] = {"apply_yarn_scaling"} + assert len(yarn_config) == 0, f"Unparsed yarn config: {yarn_config}" return config