[Misc][Attention] Clean up unused method in CPU_ATTN (#36673)

Signed-off-by: Matthew Bonanni <mbonanni@redhat.com>
This commit is contained in:
Matthew Bonanni
2026-03-11 00:27:22 -04:00
committed by GitHub
parent 7d6abdd022
commit 5f77ef15ae

View File

@@ -36,10 +36,6 @@ class CPUAttentionBackend(AttentionBackend):
torch.float32, torch.float32,
] ]
@classmethod
def get_supported_dtypes(cls) -> list[torch.dtype]:
return [torch.float16, torch.bfloat16, torch.float32]
@classmethod @classmethod
def get_supported_head_sizes(cls) -> list[int]: def get_supported_head_sizes(cls) -> list[int]:
return [32, 64, 80, 96, 112, 128, 160, 192, 224, 256] return [32, 64, 80, 96, 112, 128, 160, 192, 224, 256]