[Misc] Remove unused attention prefix prefill ops functions (#26971)

Signed-off-by: Lukas Geiger <lukas.geiger94@gmail.com>
This commit is contained in:
Lukas Geiger
2025-11-11 18:26:04 +00:00
committed by GitHub
parent d5edcb8678
commit 76e4dcf225
2 changed files with 0 additions and 213 deletions

View File

@@ -98,9 +98,6 @@ __all__ = [
class CompressedTensorsMoEMethod(FusedMoEMethodBase):
def __init_(self, moe: FusedMoEConfig):
super().__init__(moe)
@staticmethod
def get_moe_method(
quant_config: "CompressedTensorsConfig", # type: ignore # noqa E501