(bugfix): Fixed encode in LLM entrypoint for IOProcessr plugin prompts (#34618)

Signed-off-by: Christian Pinto <christian.pinto@ibm.com>
This commit is contained in:
Christian Pinto
2026-02-16 15:33:55 +00:00
committed by GitHub
parent 03a8770a6d
commit 6930becd45
3 changed files with 17 additions and 5 deletions

View File

@@ -1135,7 +1135,15 @@ class LLM:
)
# Validate the request data is valid for the loaded plugin
validated_prompt = self.io_processor.parse_data(prompts)
prompt_data = prompts.get("data")
if prompt_data is None:
raise ValueError(
"The 'data' field of the prompt is expected to contain "
"the prompt data and it cannot be None. "
"Refer to the documentation of the IOProcessor "
"in use for more details."
)
validated_prompt = self.io_processor.parse_data(prompt_data)
# obtain the actual model prompts from the pre-processor
prompts = self.io_processor.pre_process(prompt=validated_prompt)