[Doc]: fixing typos in diverse files (#29492)
Signed-off-by: Didier Durand <durand.didier@gmail.com>
This commit is contained in:
@@ -428,7 +428,7 @@ def load_weights_using_from_2_way_softmax(
|
||||
)
|
||||
if text_config.tie_word_embeddings:
|
||||
# embed_tokens is the assumed name for input embeddings. If the model does not
|
||||
# have this attribute, we fallback to get_input_embeddings(), which is used by
|
||||
# have this attribute, we fall back to get_input_embeddings(), which is used by
|
||||
# the Transformers modeling backend.
|
||||
embed_tokens = (
|
||||
model.model.embed_tokens
|
||||
@@ -486,7 +486,7 @@ def load_weights_no_post_processing(model, weights: Iterable[tuple[str, torch.Te
|
||||
)
|
||||
if text_config.tie_word_embeddings:
|
||||
# embed_tokens is the assumed name for input embeddings. If the model does not
|
||||
# have this attribute, we fallback to get_input_embeddings(), which is used by
|
||||
# have this attribute, we fall back to get_input_embeddings(), which is used by
|
||||
# the Transformers modeling backend.
|
||||
embed_tokens = (
|
||||
model.model.embed_tokens
|
||||
|
||||
Reference in New Issue
Block a user