[Misc] Print stack trace using logger.exception (#9461)

This commit is contained in:
Cyrus Leung
2024-10-17 21:55:48 +08:00
committed by GitHub
parent e312e52b44
commit 390be74649
8 changed files with 26 additions and 30 deletions

View File

@@ -324,7 +324,7 @@ class OpenAIServingChat(OpenAIServing):
else:
tool_parsers = [None] * num_choices
except RuntimeError as e:
logger.error("Error in tool parser creation: %s", e)
logger.exception("Error in tool parser creation.")
data = self.create_streaming_error_response(str(e))
yield f"data: {data}\n\n"
yield "data: [DONE]\n\n"
@@ -600,7 +600,7 @@ class OpenAIServingChat(OpenAIServing):
except ValueError as e:
# TODO: Use a vllm-specific Validation Error
logger.error("error in chat completion stream generator: %s", e)
logger.exception("Error in chat completion stream generator.")
data = self.create_streaming_error_response(str(e))
yield f"data: {data}\n\n"
# Send the final done message after all response.n are finished
@@ -687,7 +687,7 @@ class OpenAIServingChat(OpenAIServing):
try:
tool_parser = self.tool_parser(tokenizer)
except RuntimeError as e:
logger.error("Error in tool parser creation: %s", e)
logger.exception("Error in tool parser creation.")
return self.create_error_response(str(e))
tool_call_info = tool_parser.extract_tool_calls(

View File

@@ -103,9 +103,9 @@ class Hermes2ProToolParser(ToolParser):
tool_calls=tool_calls,
content=content if content else None)
except Exception as e:
logger.error("Error in extracting tool call from response %s",
e)
except Exception:
logger.exception(
"Error in extracting tool call from response.")
return ExtractedToolCallInformation(tools_called=False,
tool_calls=[],
content=model_output)
@@ -333,6 +333,6 @@ class Hermes2ProToolParser(ToolParser):
return delta
except Exception as e:
logger.error("Error trying to handle streaming tool call: %s", e)
except Exception:
logger.exception("Error trying to handle streaming tool call.")
return None # do not stream a delta. skip this token ID.

View File

@@ -166,8 +166,8 @@ class Internlm2ToolParser(ToolParser):
tool_call_arr["arguments"] = self.get_argments(tool_call_arr)
self.prev_tool_call_arr = [tool_call_arr]
return delta
except Exception as e:
logger.error("Error trying to handle streaming tool call: %s", e)
except Exception:
logger.exception("Error trying to handle streaming tool call.")
logger.debug(
"Skipping chunk as a result of tool streaming extraction "
"error")

View File

@@ -112,9 +112,8 @@ class Llama3JsonToolParser(ToolParser):
content=None)
return ret
except Exception as e:
logger.error("Error in extracting tool call from response: %s", e)
print("ERROR", e)
except Exception:
logger.exception("Error in extracting tool call from response.")
# return information to just treat the tool call as regular JSON
return ExtractedToolCallInformation(tools_called=False,
tool_calls=[],
@@ -269,8 +268,8 @@ class Llama3JsonToolParser(ToolParser):
self.prev_tool_call_arr = tool_call_arr
return delta
except Exception as e:
logger.error("Error trying to handle streaming tool call: %s", e)
except Exception:
logger.exception("Error trying to handle streaming tool call.")
logger.debug(
"Skipping chunk as a result of tool streaming extraction "
"error")

View File

@@ -111,8 +111,8 @@ class MistralToolParser(ToolParser):
tool_calls=tool_calls,
content=content if len(content) > 0 else None)
except Exception as e:
logger.error("Error in extracting tool call from response: %s", e)
except Exception:
logger.exception("Error in extracting tool call from response.")
# return information to just treat the tool call as regular JSON
return ExtractedToolCallInformation(tools_called=False,
tool_calls=[],
@@ -298,8 +298,8 @@ class MistralToolParser(ToolParser):
self.prev_tool_call_arr = tool_call_arr
return delta
except Exception as e:
logger.error("Error trying to handle streaming tool call: %s", e)
except Exception:
logger.exception("Error trying to handle streaming tool call.")
logger.debug(
"Skipping chunk as a result of tool streaming extraction "
"error")