From 836d4ce140a65a0c5067cca58709cfb86c811b81 Mon Sep 17 00:00:00 2001 From: Chauncey Date: Mon, 16 Jun 2025 22:10:39 +0800 Subject: [PATCH] [Bugfix] fix missing 'finish_reason': null in streaming chat (#19662) Signed-off-by: chaunceyjiang --- vllm/entrypoints/openai/serving_chat.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vllm/entrypoints/openai/serving_chat.py b/vllm/entrypoints/openai/serving_chat.py index 79eac184a..2a0d4cd74 100644 --- a/vllm/entrypoints/openai/serving_chat.py +++ b/vllm/entrypoints/openai/serving_chat.py @@ -873,7 +873,7 @@ class OpenAIServingChat(OpenAIServing): total_tokens=num_prompt_tokens + completion_tokens, ) - data = chunk.model_dump_json(exclude_none=True) + data = chunk.model_dump_json(exclude_unset=True) yield f"data: {data}\n\n" # once the final token is handled, if stream_options.include_usage