[Refactor] TokenizerRegistry only uses lazy imports (#30609)
Signed-off-by: DarkLight1337 <tlleungac@connect.ust.hk>
This commit is contained in:
@@ -3,38 +3,39 @@
|
||||
from typing import _get_protocol_attrs # type: ignore
|
||||
|
||||
import pytest
|
||||
from transformers import PreTrainedTokenizerBase
|
||||
from transformers import (
|
||||
PreTrainedTokenizer,
|
||||
PreTrainedTokenizerBase,
|
||||
PreTrainedTokenizerFast,
|
||||
)
|
||||
|
||||
from vllm.tokenizers import TokenizerLike, get_tokenizer
|
||||
from vllm.tokenizers.mistral import MistralTokenizer
|
||||
|
||||
|
||||
def _get_missing_attrs(obj: object, target: type):
|
||||
return [k for k in _get_protocol_attrs(target) if not hasattr(obj, k)]
|
||||
|
||||
|
||||
def _assert_tokenizer_like(tokenizer: object):
|
||||
missing_attrs = _get_missing_attrs(tokenizer, TokenizerLike)
|
||||
assert not missing_attrs, f"Missing attrs: {missing_attrs}"
|
||||
|
||||
|
||||
def test_tokenizer_like_protocol():
|
||||
assert not (
|
||||
missing_attrs := _get_missing_attrs(
|
||||
get_tokenizer("gpt2", use_fast=False),
|
||||
TokenizerLike,
|
||||
)
|
||||
), f"Missing attrs: {missing_attrs}"
|
||||
tokenizer = get_tokenizer("gpt2", use_fast=False)
|
||||
assert isinstance(tokenizer, PreTrainedTokenizer)
|
||||
_assert_tokenizer_like(tokenizer)
|
||||
|
||||
assert not (
|
||||
missing_attrs := _get_missing_attrs(
|
||||
get_tokenizer("gpt2", use_fast=True),
|
||||
TokenizerLike,
|
||||
)
|
||||
), f"Missing attrs: {missing_attrs}"
|
||||
tokenizer = get_tokenizer("gpt2", use_fast=True)
|
||||
assert isinstance(tokenizer, PreTrainedTokenizerFast)
|
||||
_assert_tokenizer_like(tokenizer)
|
||||
|
||||
assert not (
|
||||
missing_attrs := _get_missing_attrs(
|
||||
get_tokenizer(
|
||||
"mistralai/Mistral-7B-Instruct-v0.3", tokenizer_mode="mistral"
|
||||
),
|
||||
TokenizerLike,
|
||||
)
|
||||
), f"Missing attrs: {missing_attrs}"
|
||||
tokenizer = get_tokenizer(
|
||||
"mistralai/Mistral-7B-Instruct-v0.3", tokenizer_mode="mistral"
|
||||
)
|
||||
assert isinstance(tokenizer, MistralTokenizer)
|
||||
_assert_tokenizer_like(tokenizer)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("tokenizer_name", ["facebook/opt-125m", "gpt2"])
|
||||
|
||||
Reference in New Issue
Block a user