[5/N][Attention] Finish eliminating vllm/attention folder (#32064)
Signed-off-by: Matthew Bonanni <mbonanni@redhat.com>
This commit is contained in:
@@ -6,7 +6,6 @@ from itertools import islice
|
||||
import torch
|
||||
from torch import nn
|
||||
|
||||
from vllm.attention.layer import Attention
|
||||
from vllm.config import (
|
||||
CacheConfig,
|
||||
VllmConfig,
|
||||
@@ -22,6 +21,7 @@ from vllm.distributed import (
|
||||
)
|
||||
from vllm.logger import init_logger
|
||||
from vllm.model_executor.layers.activation import SiluAndMul
|
||||
from vllm.model_executor.layers.attention import Attention
|
||||
from vllm.model_executor.layers.fused_moe import FusedMoE
|
||||
from vllm.model_executor.layers.layernorm import RMSNorm
|
||||
from vllm.model_executor.layers.linear import (
|
||||
|
||||
Reference in New Issue
Block a user