Enable Pydantic mypy checks and convert configs to Pydantic dataclasses (#17599)
Signed-off-by: Harry Mellor <19981378+hmellor@users.noreply.github.com>
This commit is contained in:
@@ -175,11 +175,15 @@ class ChatCompletionNamedToolChoiceParam(OpenAIBaseModel):
|
||||
type: Literal["function"] = "function"
|
||||
|
||||
|
||||
# extra="forbid" is a workaround to have kwargs as a field,
|
||||
# see https://github.com/pydantic/pydantic/issues/3125
|
||||
class LogitsProcessorConstructor(BaseModel):
|
||||
qualname: str
|
||||
args: Optional[list[Any]] = None
|
||||
kwargs: Optional[dict[str, Any]] = None
|
||||
|
||||
model_config = ConfigDict(extra="forbid")
|
||||
|
||||
|
||||
LogitsProcessors = list[Union[str, LogitsProcessorConstructor]]
|
||||
|
||||
@@ -234,7 +238,7 @@ class ChatCompletionRequest(OpenAIBaseModel):
|
||||
presence_penalty: Optional[float] = 0.0
|
||||
response_format: Optional[AnyResponseFormat] = None
|
||||
seed: Optional[int] = Field(None, ge=_LONG_INFO.min, le=_LONG_INFO.max)
|
||||
stop: Optional[Union[str, list[str]]] = Field(default_factory=list)
|
||||
stop: Optional[Union[str, list[str]]] = []
|
||||
stream: Optional[bool] = False
|
||||
stream_options: Optional[StreamOptions] = None
|
||||
temperature: Optional[float] = None
|
||||
@@ -258,7 +262,7 @@ class ChatCompletionRequest(OpenAIBaseModel):
|
||||
min_p: Optional[float] = None
|
||||
repetition_penalty: Optional[float] = None
|
||||
length_penalty: float = 1.0
|
||||
stop_token_ids: Optional[list[int]] = Field(default_factory=list)
|
||||
stop_token_ids: Optional[list[int]] = []
|
||||
include_stop_str_in_output: bool = False
|
||||
ignore_eos: bool = False
|
||||
min_tokens: int = 0
|
||||
@@ -756,7 +760,7 @@ class CompletionRequest(OpenAIBaseModel):
|
||||
n: int = 1
|
||||
presence_penalty: Optional[float] = 0.0
|
||||
seed: Optional[int] = Field(None, ge=_LONG_INFO.min, le=_LONG_INFO.max)
|
||||
stop: Optional[Union[str, list[str]]] = Field(default_factory=list)
|
||||
stop: Optional[Union[str, list[str]]] = []
|
||||
stream: Optional[bool] = False
|
||||
stream_options: Optional[StreamOptions] = None
|
||||
suffix: Optional[str] = None
|
||||
@@ -770,7 +774,7 @@ class CompletionRequest(OpenAIBaseModel):
|
||||
min_p: Optional[float] = None
|
||||
repetition_penalty: Optional[float] = None
|
||||
length_penalty: float = 1.0
|
||||
stop_token_ids: Optional[list[int]] = Field(default_factory=list)
|
||||
stop_token_ids: Optional[list[int]] = []
|
||||
include_stop_str_in_output: bool = False
|
||||
ignore_eos: bool = False
|
||||
min_tokens: int = 0
|
||||
|
||||
Reference in New Issue
Block a user