[Attention] FlashAttention ViT support, make default backend (#28763)

Signed-off-by: Matthew Bonanni <mbonanni@redhat.com>
This commit is contained in:
Matthew Bonanni
2025-11-18 23:06:21 -05:00
committed by GitHub
parent 814843e021
commit 4c23690f43
5 changed files with 15 additions and 46 deletions

View File

@@ -13,14 +13,14 @@ from vllm.vllm_flash_attn import (
)
NUM_HEADS = [(4, 4), (8, 2)]
HEAD_SIZES = [128, 256]
HEAD_SIZES = [40, 72, 80, 128, 256]
BLOCK_SIZES = [16]
DTYPES = [torch.bfloat16]
QDTYPES = [None, torch.float8_e4m3fn]
# one value large enough to test overflow in index calculation.
# one value small enough to test the schema op check
NUM_BLOCKS = [32768, 2048]
SOFT_CAPS = [None, 50.0]
SOFT_CAPS = [None]
SLIDING_WINDOWS = [None, 256]