[CI] Skip the pooling models that do not support transformers v4.55 (#22411)
Signed-off-by: wang.yuqi <noooop@126.com>
This commit is contained in:
@@ -10,6 +10,7 @@ from transformers import AutoModel
|
||||
from vllm.platforms import current_platform
|
||||
|
||||
from ....conftest import HfRunner
|
||||
from ...utils import check_transformers_version
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
@@ -86,6 +87,9 @@ def test_prm_models(
|
||||
dtype: str,
|
||||
monkeypatch,
|
||||
) -> None:
|
||||
check_transformers_version("Qwen/Qwen2.5-Math-PRM-7B",
|
||||
max_transformers_version="4.53.2")
|
||||
|
||||
if current_platform.is_cpu() and os.environ.get("VLLM_USE_V1", "0") == "0":
|
||||
pytest.skip("CPU only supports V1")
|
||||
|
||||
|
||||
Reference in New Issue
Block a user