[6/N][Attention] Move utils to more appropriate locations (#32215)

Signed-off-by: Matthew Bonanni <mbonanni@redhat.com>
This commit is contained in:
Matthew Bonanni
2026-01-13 08:38:52 -05:00
committed by GitHub
parent fefce49807
commit 98f60e5acb
14 changed files with 171 additions and 181 deletions

View File

@@ -13,10 +13,10 @@ from vllm.v1.attention.backend import (
AttentionCGSupport,
AttentionMetadataBuilder,
CommonAttentionMetadata,
subclass_attention_backend,
)
from vllm.v1.attention.backends.utils import (
make_local_attention_virtual_batches,
subclass_attention_backend,
)
from vllm.v1.attention.selector import get_attn_backend
from vllm.v1.kv_cache_interface import (

View File

@@ -15,8 +15,6 @@ from vllm.v1.attention.backend import (
AttentionMetadata,
AttentionType,
CommonAttentionMetadata,
)
from vllm.v1.attention.backends.utils import (
subclass_attention_backend,
)
from vllm.v1.attention.selector import get_attn_backend

View File

@@ -13,8 +13,6 @@ from vllm.v1.attention.backend import (
AttentionMetadata,
AttentionType,
CommonAttentionMetadata,
)
from vllm.v1.attention.backends.utils import (
subclass_attention_backend,
)
from vllm.v1.attention.selector import get_attn_backend

View File

@@ -16,8 +16,6 @@ from vllm.v1.attention.backend import (
AttentionMetadata,
AttentionType,
CommonAttentionMetadata,
)
from vllm.v1.attention.backends.utils import (
subclass_attention_backend,
)
from vllm.v1.attention.ops.triton_reshape_and_cache_flash import (