[Bugfix]: allow extra fields in requests to openai compatible server (#10463)
Signed-off-by: Guillaume Calmettes <gcalmettes@scaleway.com>
This commit is contained in:
committed by
GitHub
parent
0cd3d9717e
commit
c68f7ede6a
@@ -9,12 +9,15 @@ from pydantic import BaseModel, ConfigDict, Field, model_validator
|
||||
from typing_extensions import Annotated
|
||||
|
||||
from vllm.entrypoints.chat_utils import ChatCompletionMessageParam
|
||||
from vllm.logger import init_logger
|
||||
from vllm.pooling_params import PoolingParams
|
||||
from vllm.sampling_params import (BeamSearchParams, GuidedDecodingParams,
|
||||
RequestOutputKind, SamplingParams)
|
||||
from vllm.sequence import Logprob
|
||||
from vllm.utils import random_uuid
|
||||
|
||||
logger = init_logger(__name__)
|
||||
|
||||
# torch is mocked during docs generation,
|
||||
# so we have to provide the values as literals
|
||||
_MOCK_LONG_INFO = Namespace(min=-9223372036854775808, max=9223372036854775807)
|
||||
@@ -35,8 +38,19 @@ assert _LONG_INFO.max == _MOCK_LONG_INFO.max
|
||||
|
||||
|
||||
class OpenAIBaseModel(BaseModel):
|
||||
# OpenAI API does not allow extra fields
|
||||
model_config = ConfigDict(extra="forbid")
|
||||
# OpenAI API does allow extra fields
|
||||
model_config = ConfigDict(extra="allow")
|
||||
|
||||
@model_validator(mode="before")
|
||||
@classmethod
|
||||
def __log_extra_fields__(cls, data):
|
||||
if isinstance(data, dict):
|
||||
extra_fields = data.keys() - cls.model_fields.keys()
|
||||
if extra_fields:
|
||||
logger.warning(
|
||||
"The following fields were present in the request "
|
||||
"but ignored: %s", extra_fields)
|
||||
return data
|
||||
|
||||
|
||||
class ErrorResponse(OpenAIBaseModel):
|
||||
|
||||
Reference in New Issue
Block a user