Don'e assume position_embedding_type will be present for BERT and RoBERTa models (#30770)
Signed-off-by: Harry Mellor <19981378+hmellor@users.noreply.github.com>
This commit is contained in:
@@ -55,7 +55,9 @@ class BertEmbedding(nn.Module):
|
||||
"position_ids",
|
||||
torch.arange(config.max_position_embeddings).unsqueeze(0),
|
||||
)
|
||||
self.position_embedding_type = config.position_embedding_type
|
||||
self.position_embedding_type = getattr(
|
||||
config, "position_embedding_type", "absolute"
|
||||
)
|
||||
if self.position_embedding_type != "absolute":
|
||||
raise ValueError(
|
||||
"Only 'absolute' position_embedding_type" + " is supported"
|
||||
|
||||
Reference in New Issue
Block a user