Update rope_scaling to rope_parameters in preparation for Transformers v5 (#28542)

Signed-off-by: Harry Mellor <19981378+hmellor@users.noreply.github.com>
This commit is contained in:
Harry Mellor
2025-11-19 18:06:36 +01:00
committed by GitHub
parent d44e9df7d4
commit a8b70304d6
104 changed files with 542 additions and 910 deletions

View File

@@ -137,7 +137,7 @@ class TestRotaryEmbedding(torch.nn.Module):
self.head_dim,
rotary_dim=self.rotary_dim,
max_position=max_position,
base=base,
rope_parameters={"rope_type": "default", "rope_theta": base},
)
def forward(self, positions, q, k):
@@ -172,7 +172,7 @@ class TestRotaryEmbeddingSliceScatter(torch.nn.Module):
self.head_dim,
rotary_dim=self.head_dim,
max_position=max_position,
base=base,
rope_parameters={"rope_type": "default", "rope_theta": base},
)
def forward(self, positions, hidden_states):