[Kernel] Make rotary_embedding ops more flexible with input shape (#12777)

This commit is contained in:
Isotr0py
2025-02-07 00:46:13 +08:00
committed by GitHub
parent 1e57b1ee63
commit 85ac82d228
4 changed files with 115 additions and 57 deletions

View File

@@ -257,9 +257,7 @@ class DeepseekV2Attention(nn.Module):
prefix=f"{prefix}.o_proj")
if rope_scaling:
rope_scaling["rope_type"] = 'deepseek_yarn'
self.use_normal_rope = False
else:
self.use_normal_rope = True
self.rotary_emb = get_rope(qk_rope_head_dim,
rotary_dim=qk_rope_head_dim,
max_position=max_position_embeddings,
@@ -309,17 +307,8 @@ class DeepseekV2Attention(nn.Module):
k_nope, v = kv.split([self.qk_nope_head_dim, self.v_head_dim], dim=-1)
k_pe = latent_cache[:, :, self.kv_lora_rank:]
if self.use_normal_rope:
seq_len = positions.size(0)
ori_q_pe_shape, ori_k_pe_shape = q_pe.shape, k_pe.shape
q_pe = q_pe.reshape(seq_len, -1)
k_pe = k_pe.reshape(seq_len, -1)
q_pe, k_pe = self.rotary_emb(positions, q_pe, k_pe)
if self.use_normal_rope:
q_pe, k_pe = q_pe.view(ori_q_pe_shape), k_pe.view(ori_k_pe_shape)
q[..., self.qk_nope_head_dim:] = q_pe
k = torch.empty_like(q)
k[..., :self.qk_nope_head_dim] = k_nope