diff --git a/vllm/entrypoints/openai/tool_parsers/kimi_k2_tool_parser.py b/vllm/entrypoints/openai/tool_parsers/kimi_k2_tool_parser.py index b0df442dd..834b33052 100644 --- a/vllm/entrypoints/openai/tool_parsers/kimi_k2_tool_parser.py +++ b/vllm/entrypoints/openai/tool_parsers/kimi_k2_tool_parser.py @@ -38,15 +38,15 @@ class KimiK2ToolParser(ToolParser): self.tool_call_end_token: str = "<|tool_call_end|>" self.tool_call_regex = re.compile( - r"<\|tool_call_begin\|>\s*(?P[\w\.]+:\d+)\s*<\|tool_call_argument_begin\|>\s*(?P.*?)\s*<\|tool_call_end\|>" + r"<\|tool_call_begin\|>\s*(?P.+:\d+)\s*<\|tool_call_argument_begin\|>\s*(?P.*?)\s*<\|tool_call_end\|>" ) self.stream_tool_call_portion_regex = re.compile( - r"(?P[\w\.]+:\d+)\s*<\|tool_call_argument_begin\|>\s*(?P.*)" + r"(?P.+:\d+)\s*<\|tool_call_argument_begin\|>\s*(?P.*)" ) self.stream_tool_call_name_regex = re.compile( - r"(?P[\w\.]+:\d+)\s*") + r"(?P.+:\d+)\s*") if not self.model_tokenizer: raise ValueError( @@ -374,4 +374,4 @@ class KimiK2ToolParser(ToolParser): except Exception: logger.exception("Error trying to handle streaming tool call.") - return None # do not stream a delta. skip this token ID. \ No newline at end of file + return None # do not stream a delta. skip this token ID.