Remove ScaledActivation for AWQ (#10057)

Signed-off-by: mgoin <michael@neuralmagic.com>
This commit is contained in:
Michael Goin
2024-11-06 09:27:06 -05:00
committed by GitHub
parent 406d4cc480
commit 399c798608
34 changed files with 19 additions and 124 deletions

View File

@@ -135,8 +135,7 @@ class GPTBigMLP(nn.Module):
bias=True,
quant_config=quant_config,
)
self.act = get_act_fn(config.activation_function, quant_config,
intermediate_size)
self.act = get_act_fn(config.activation_function)
def forward(self, hidden_states: torch.Tensor) -> torch.Tensor:
hidden_states, _ = self.c_fc(hidden_states)