[MM] Pass prefix parameter to MMEncoderAttention (#33674)

Signed-off-by: shen-shanshan <467638484@qq.com>
This commit is contained in:
Shanshan Shen
2026-02-03 22:47:41 +08:00
committed by GitHub
parent f3d8a34671
commit 5c4f2dd6ef
15 changed files with 58 additions and 11 deletions

View File

@@ -231,7 +231,11 @@ class MultiHeadDotProductAttention(nn.Module):
self.scale = self.head_dim**-0.5
self.attn = MMEncoderAttention(
self.num_heads, self.head_dim, self.scale, num_kv_heads=self.num_kv_heads
self.num_heads,
self.head_dim,
self.scale,
num_kv_heads=self.num_kv_heads,
prefix=prefix,
)
def forward(