[ROCm] Add stablelm Head Size 80 To Supported Head Sizes For ROCM_ATTN (#35527)

Signed-off-by: Micah Williamson <micah.williamson@amd.com>
This commit is contained in:
Micah Williamson
2026-02-27 22:16:34 -06:00
committed by GitHub
parent d5b6f3ba36
commit 0edf101d2b
2 changed files with 2 additions and 2 deletions

View File

@@ -182,7 +182,7 @@ class RocmAttentionBackend(AttentionBackend):
@classmethod
def get_supported_head_sizes(cls) -> list[int]:
return [32, 64, 96, 128, 160, 192, 224, 256]
return [32, 64, 80, 96, 128, 160, 192, 224, 256]
@classmethod
def validate_head_size(cls, head_size: int) -> None: