[TPU] Re-enable the Pallas MoE kernel (#18025)

Signed-off-by: Michael Goin <mgoin64@gmail.com>
This commit is contained in:
Michael Goin
2025-05-20 22:52:27 -04:00
committed by GitHub
parent 23baa2180b
commit 3b17ea26e4
3 changed files with 24 additions and 9 deletions

View File

@@ -50,8 +50,7 @@ if is_rocm_aiter_moe_enabled():
else:
from vllm.model_executor.layers.fused_moe.fused_moe import grouped_topk
if current_platform.is_tpu():
# the iterative moe implementation is used until the moe_pallas is fixed
from .moe_torch_iterative import fused_moe as fused_moe_pallas
from .moe_pallas import fused_moe as fused_moe_pallas
else:
fused_moe_pallas = None # type: ignore
logger = init_logger(__name__)