[Misc] Remove unnecessary detokenization in multimodal processing (#12868)

This commit is contained in:
Cyrus Leung
2025-02-07 22:21:17 +08:00
committed by GitHub
parent 1918aa1b80
commit ce26b16268
4 changed files with 7 additions and 10 deletions

View File

@@ -83,7 +83,7 @@ async def test_single_chat_session_audio(client: openai.AsyncOpenAI,
choice = chat_completion.choices[0]
assert choice.finish_reason == "length"
assert chat_completion.usage == openai.types.CompletionUsage(
completion_tokens=10, prompt_tokens=202, total_tokens=212)
completion_tokens=10, prompt_tokens=201, total_tokens=211)
message = choice.message
message = chat_completion.choices[0].message
@@ -140,7 +140,7 @@ async def test_single_chat_session_audio_base64encoded(
choice = chat_completion.choices[0]
assert choice.finish_reason == "length"
assert chat_completion.usage == openai.types.CompletionUsage(
completion_tokens=10, prompt_tokens=202, total_tokens=212)
completion_tokens=10, prompt_tokens=201, total_tokens=211)
message = choice.message
message = chat_completion.choices[0].message
@@ -196,7 +196,7 @@ async def test_single_chat_session_input_audio(
choice = chat_completion.choices[0]
assert choice.finish_reason == "length"
assert chat_completion.usage == openai.types.CompletionUsage(
completion_tokens=10, prompt_tokens=202, total_tokens=212)
completion_tokens=10, prompt_tokens=201, total_tokens=211)
message = choice.message
message = chat_completion.choices[0].message