[Bugfix][llama4_eagle] Fix missing 'lm_head' attribute (#29926)

Signed-off-by: Divakar Verma <divakar.verma@amd.com>
This commit is contained in:
Divakar Verma
2025-12-05 13:57:26 -06:00
committed by GitHub
parent e23ca3a0e8
commit 962d703818
2 changed files with 16 additions and 3 deletions

View File

@@ -402,7 +402,11 @@ def test_eagle_correctness(
# Scout requires default backend selection
# because vision encoder has head_dim 88 being incompatible
# with FLASH_ATTN and needs to fall back to Flex Attn
pass
# pass if not ROCm
if current_platform.is_rocm():
# TODO: Enable Flex Attn for spec_decode on ROCm
pytest.skip("Flex Attn for spec_decode not supported on ROCm currently")
else:
m.setenv("VLLM_MLA_DISABLE", "1")
m.setenv("VLLM_ATTENTION_BACKEND", attn_backend)