[CI/Build][Bugfix] Ensure compatibility with transformers 4.52 (#18678)
Signed-off-by: DarkLight1337 <tlleungac@connect.ust.hk>
This commit is contained in:
@@ -22,8 +22,8 @@ from typing import Literal, Optional, TypedDict, Union
|
||||
|
||||
import torch
|
||||
from torch import nn
|
||||
from transformers import (BatchFeature, Idefics3Config, Idefics3ImageProcessor,
|
||||
Idefics3Processor)
|
||||
from transformers import (AddedToken, BatchFeature, Idefics3Config,
|
||||
Idefics3ImageProcessor, Idefics3Processor)
|
||||
|
||||
from vllm.config import VllmConfig
|
||||
from vllm.model_executor.layers.linear import ReplicatedLinear
|
||||
@@ -199,13 +199,21 @@ class Idefics3ProcessingInfo(BaseProcessingInfo):
|
||||
|
||||
return grid_w * grid_h + 1
|
||||
|
||||
# TODO: Remove after requiring transformers>=4.52
|
||||
def _get_content(self, token: Union[AddedToken, str]) -> str:
|
||||
if isinstance(token, str):
|
||||
return token
|
||||
|
||||
return token.content
|
||||
|
||||
def _get_image_token(
|
||||
self,
|
||||
processor: Optional[Idefics3Processor]) -> tuple[str, str, str]:
|
||||
if processor is None:
|
||||
processor = self.get_hf_processor()
|
||||
image_token = processor.image_token.content
|
||||
fake_image_token = processor.fake_image_token.content
|
||||
|
||||
image_token = self._get_content(processor.image_token)
|
||||
fake_image_token = self._get_content(processor.fake_image_token)
|
||||
global_image_token = processor.global_image_tag
|
||||
return image_token, fake_image_token, global_image_token
|
||||
|
||||
|
||||
Reference in New Issue
Block a user