[Bugfix] [Frontend] Cleanup gpt-oss non-streaming chat tool calls (#25514)

Signed-off-by: Ben Browning <bbrownin@redhat.com>
This commit is contained in:
Ben Browning
2025-09-23 23:20:38 -04:00
committed by GitHub
parent d747c2ef18
commit 5caaeb714c
4 changed files with 102 additions and 12 deletions

View File

@@ -70,7 +70,12 @@ def test_extract_tool_calls_no_tools(openai_tool_parser, harmony_encoding):
assert extracted_info.content == "This is a test"
def test_extract_tool_calls_single_tool(openai_tool_parser, harmony_encoding):
@pytest.mark.parametrize("tool_args", [
'{"location": "Tokyo"}',
'{\n"location": "Tokyo"\n}',
])
def test_extract_tool_calls_single_tool(openai_tool_parser, harmony_encoding,
tool_args):
convo = Conversation.from_messages([
Message.from_role_and_content(Role.USER,
"What is the weather in Tokyo?"),
@@ -80,7 +85,7 @@ def test_extract_tool_calls_single_tool(openai_tool_parser, harmony_encoding):
).with_channel("analysis"),
Message.from_role_and_content(
Role.ASSISTANT,
'{"location": "Tokyo"}').with_channel("commentary").with_recipient(
tool_args).with_channel("commentary").with_recipient(
"functions.get_current_weather").with_content_type("json"),
])
token_ids = harmony_encoding.render_conversation_for_completion(
@@ -121,6 +126,17 @@ def test_extract_tool_calls_multiple_tools(
Role.ASSISTANT,
'{"location": "Tokyo"}').with_channel("commentary").with_recipient(
"functions.get_user_location").with_content_type("json"),
Message.from_role_and_content(
Role.ASSISTANT, '{"location": "Tokyo"}').with_channel(
"commentary").with_recipient("functions.no_content_type"),
Message.from_role_and_content(Role.ASSISTANT, "foo").with_channel(
"commentary").with_recipient("functions.not_json_no_content_type"),
Message.from_role_and_content(
Role.ASSISTANT, '{}').with_channel("commentary").with_recipient(
"functions.empty_args").with_content_type("json"),
Message.from_role_and_content(
Role.ASSISTANT, '').with_channel("commentary").with_recipient(
"functions.no_args").with_content_type("json"),
])
token_ids = harmony_encoding.render_conversation_for_completion(
convo,
@@ -141,7 +157,63 @@ def test_extract_tool_calls_multiple_tools(
ToolCall(function=FunctionCall(
name="get_user_location",
arguments=json.dumps({"location": "Tokyo"}),
)),
ToolCall(function=FunctionCall(
name="no_content_type",
arguments=json.dumps({"location": "Tokyo"}),
)),
ToolCall(function=FunctionCall(
name="not_json_no_content_type",
arguments="foo",
)),
ToolCall(function=FunctionCall(
name="empty_args",
arguments=json.dumps({}),
)),
ToolCall(function=FunctionCall(
name="no_args",
arguments="",
))
]
assert_tool_calls(extracted_info.tool_calls, expected_tool_calls)
assert extracted_info.content is None
def test_extract_tool_calls_with_content(
openai_tool_parser,
harmony_encoding,
):
final_content = "This tool call will get the weather."
convo = Conversation.from_messages([
Message.from_role_and_content(
Role.USER, "What is the weather in Tokyo based on where I'm at?"),
Message.from_role_and_content(
Role.ASSISTANT,
'User asks: "What is the weather in Tokyo?" based on their location. We need to use get_current_weather tool and get_user_location tool.', # noqa: E501
).with_channel("analysis"),
Message.from_role_and_content(
Role.ASSISTANT,
'{"location": "Tokyo"}').with_channel("commentary").with_recipient(
"functions.get_current_weather").with_content_type("json"),
Message.from_role_and_content(Role.ASSISTANT,
final_content).with_channel("final"),
])
token_ids = harmony_encoding.render_conversation_for_completion(
convo,
Role.ASSISTANT,
)
extracted_info = openai_tool_parser.extract_tool_calls(
"",
request=None,
token_ids=token_ids,
)
assert extracted_info.tools_called
expected_tool_calls = [
ToolCall(function=FunctionCall(
name="get_current_weather",
arguments=json.dumps({"location": "Tokyo"}),
)),
]
assert_tool_calls(extracted_info.tool_calls, expected_tool_calls)
assert extracted_info.content == final_content