[CI] Accelerate mteb test by setting SentenceTransformers mteb score to a constant (#24088)

Signed-off-by: wang.yuqi <noooop@126.com>
This commit is contained in:
wang.yuqi
2025-09-03 17:23:56 +08:00
committed by GitHub
parent 9c99e4871f
commit 51383bd472
17 changed files with 83 additions and 52 deletions

View File

@@ -7,7 +7,7 @@ import pytest
from vllm.config import PoolerConfig
from vllm.platforms import current_platform
from ...utils import check_embeddings_close, check_transformers_version
from ...utils import check_embeddings_close
@pytest.mark.parametrize(
@@ -30,7 +30,6 @@ from ...utils import check_embeddings_close, check_transformers_version
pytest.param("BAAI/bge-base-en-v1.5", marks=[pytest.mark.core_model]),
pytest.param("sentence-transformers/all-MiniLM-L12-v2"),
pytest.param("intfloat/multilingual-e5-small"),
pytest.param("Alibaba-NLP/gte-Qwen2-1.5B-instruct"),
# [Cross-Encoder]
pytest.param("sentence-transformers/stsb-roberta-base-v2"),
],
@@ -42,8 +41,6 @@ def test_models(
model,
monkeypatch,
) -> None:
if model == "Alibaba-NLP/gte-Qwen2-1.5B-instruct":
check_transformers_version(model, max_transformers_version="4.53.2")
if model == "BAAI/bge-multilingual-gemma2" and current_platform.is_rocm():
# ROCm Triton FA does not currently support sliding window attention