2025-10-21 00:30:10 -07:00
|
|
|
# SPDX-License-Identifier: Apache-2.0
|
|
|
|
|
# SPDX-FileCopyrightText: Copyright contributors to the vLLM project
|
|
|
|
|
|
|
|
|
|
import pytest
|
|
|
|
|
|
2025-11-11 06:40:44 -06:00
|
|
|
from vllm.attention.backends.registry import AttentionBackendEnum
|
2025-10-21 00:30:10 -07:00
|
|
|
from vllm.config.multimodal import MultiModalConfig
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_mm_encoder_attn_backend_str_conversion():
|
|
|
|
|
config = MultiModalConfig(mm_encoder_attn_backend="FLASH_ATTN")
|
2025-11-11 06:40:44 -06:00
|
|
|
assert config.mm_encoder_attn_backend == AttentionBackendEnum.FLASH_ATTN
|
2025-10-21 00:30:10 -07:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_mm_encoder_attn_backend_invalid():
|
|
|
|
|
with pytest.raises(ValueError):
|
|
|
|
|
MultiModalConfig(mm_encoder_attn_backend="not_a_backend")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_mm_encoder_attn_backend_hash_updates():
|
|
|
|
|
base_hash = MultiModalConfig().compute_hash()
|
|
|
|
|
overridden_hash = MultiModalConfig(
|
2025-11-11 06:40:44 -06:00
|
|
|
mm_encoder_attn_backend=AttentionBackendEnum.FLASH_ATTN
|
2025-10-21 00:30:10 -07:00
|
|
|
).compute_hash()
|
|
|
|
|
assert base_hash != overridden_hash
|