Fix model name included in responses (#24663)
Signed-off-by: Harry Mellor <19981378+hmellor@users.noreply.github.com>
This commit is contained in:
@@ -186,7 +186,7 @@ class OpenAIServingChat(OpenAIServing):
|
||||
lora_request = self._maybe_get_adapters(
|
||||
request, supports_default_mm_loras=True)
|
||||
|
||||
model_name = self._get_model_name(request.model, lora_request)
|
||||
model_name = self.models.model_name(lora_request)
|
||||
|
||||
tokenizer = await self.engine_client.get_tokenizer(lora_request)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user