Fix pre-commit (and XPU) on main (#28556)

Signed-off-by: Harry Mellor <19981378+hmellor@users.noreply.github.com>
This commit is contained in:
Harry Mellor
2025-11-12 14:13:41 +00:00
committed by GitHub
parent 10138c92a5
commit 54aecd9ed5
2 changed files with 1 additions and 3 deletions

View File

@@ -1145,7 +1145,7 @@ class IpexMxfp4MoEMethod(Mxfp4MoEMethod):
) -> torch.Tensor:
assert activation == "swigluoai", (
"Only swiglu_oai activation is supported for IPEX MXFP4 MoE"
) # noqa:
)
hidden_size_pad = round_up(self.original_hidden_size, 128)
x_pad = torch.nn.functional.pad(x, (0, hidden_size_pad - x.size(-1)))
hidden_states = layer.ipex_fusion(