[Model] Remove MotifForCausalLM (#25866)

Signed-off-by: Jee Jee Li <pandaleefree@gmail.com>
This commit is contained in:
Jee Jee Li
2025-09-30 00:36:30 +08:00
committed by GitHub
parent 0899ba5b42
commit e61eb5e09d
5 changed files with 1 additions and 355 deletions

View File

@@ -76,10 +76,6 @@ def can_initialize(model_arch: str, monkeypatch: pytest.MonkeyPatch,
if model_info.v0_only:
# NOTE(woosuk): skip the test for V0-only models
return
if model_arch in ("Phi4FlashForCausalLM", "MotifForCausalLM"):
pytest.skip(
"Differential Flash Attention backend has been removed.")
if model_arch == "GptOssForCausalLM":
# FIXME: A hack to bypass FA3 assertion because our CI's L4 GPU
# has cc==8.9 which hasn't supported FA3 yet. Remove this hack when