Stop testing for slow tokenizers as they will not exist soon (#34235)

Signed-off-by: Harry Mellor <19981378+hmellor@users.noreply.github.com>
This commit is contained in:
Harry Mellor
2026-02-10 13:08:20 +01:00
committed by GitHub
parent 94de871546
commit 61413973e8

View File

@@ -4,7 +4,6 @@ from typing import _get_protocol_attrs # type: ignore
import pytest
from transformers import (
PreTrainedTokenizer,
PreTrainedTokenizerBase,
PreTrainedTokenizerFast,
)
@@ -25,10 +24,6 @@ def _assert_tokenizer_like(tokenizer: object):
def test_tokenizer_like_protocol():
tokenizer = get_tokenizer("gpt2", use_fast=False)
assert isinstance(tokenizer, PreTrainedTokenizer)
_assert_tokenizer_like(tokenizer)
tokenizer = get_tokenizer("gpt2", use_fast=True)
assert isinstance(tokenizer, PreTrainedTokenizerFast)
_assert_tokenizer_like(tokenizer)