Add fused top-K softmax kernel for MoE (#2769)

This commit is contained in:
Woosuk Kwon
2024-02-05 17:38:02 -08:00
committed by GitHub
parent 2ccee3def6
commit f0d4e14557
9 changed files with 591 additions and 50 deletions

7
csrc/moe/moe_ops.cpp Normal file
View File

@@ -0,0 +1,7 @@
#include "moe_ops.h"
#include <torch/extension.h>
PYBIND11_MODULE(TORCH_EXTENSION_NAME, m) {
m.def("topk_softmax", &topk_softmax, "Apply topk softmax to the gating outputs.");
}