[Kernel][CPU] Add Quick gelu to CPU (#5717)

This commit is contained in:
Roger Wang
2024-06-20 23:39:40 -07:00
committed by GitHub
parent d9a252bc8e
commit bd620b01fb
4 changed files with 29 additions and 0 deletions

View File

@@ -155,6 +155,9 @@ class QuickGELU(CustomOp):
ops.gelu_quick(out, x)
return out
# TODO implement forward_xpu for QuickGELU
# def forward_xpu(self, x: torch.Tensor) -> torch.Tensor:
class ScaledActivation(nn.Module):
"""An activation function with post-scale parameters.