[Model] Bump transformers version for test registry (#33100)
Signed-off-by: DarkLight1337 <tlleungac@connect.ust.hk>
This commit is contained in:
@@ -350,7 +350,7 @@ class Base(
|
||||
# vLLM does not support encoder-decoder models, so if any encoder layer is
|
||||
# found in a text only model, we assume the whole model is an encoder model
|
||||
if has_encoder(self.model) and not is_multimodal(self.config):
|
||||
self.check_version("5.0.0.dev0", "encoder models support")
|
||||
self.check_version("5.0.0", "encoder models support")
|
||||
attn_type = AttentionType.ENCODER_ONLY
|
||||
else:
|
||||
attn_type = AttentionType.DECODER
|
||||
@@ -502,7 +502,7 @@ class Base(
|
||||
)
|
||||
|
||||
def set_aux_hidden_state_layers(self, layers: tuple[int, ...]) -> None:
|
||||
self.check_version("5.0.0.dev0", "Eagle3 support")
|
||||
self.check_version("5.0.0", "Eagle3 support")
|
||||
from transformers.utils.generic import OutputRecorder
|
||||
|
||||
# The default value in PreTrainedModel is None
|
||||
|
||||
@@ -118,7 +118,7 @@ direct_register_custom_op(
|
||||
|
||||
class MoEMixin(MixtureOfExperts):
|
||||
def __init__(self, *, vllm_config: "VllmConfig", prefix: str = ""):
|
||||
self.check_version("5.0.0.dev0", "MoE models support")
|
||||
self.check_version("5.0.0", "MoE models support")
|
||||
# Skip MixtureOfExperts.__init__ and call the next class in MRO
|
||||
super(MixtureOfExperts, self).__init__(vllm_config=vllm_config, prefix=prefix)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user