(bugfix): Fixed encode in LLM entrypoint for IOProcessr plugin prompts (#34618)
Signed-off-by: Christian Pinto <christian.pinto@ibm.com>
This commit is contained in:
@@ -20,13 +20,15 @@ def main():
|
||||
torch.set_default_dtype(torch.float16)
|
||||
image_url = "https://huggingface.co/christian-pinto/Prithvi-EO-2.0-300M-TL-VLLM/resolve/main/valencia_example_2024-10-26.tiff" # noqa: E501
|
||||
|
||||
img_prompt = dict(
|
||||
img_data = dict(
|
||||
data=image_url,
|
||||
data_format="url",
|
||||
image_format="tiff",
|
||||
out_data_format="b64_json",
|
||||
)
|
||||
|
||||
prompt = dict(data=img_data)
|
||||
|
||||
llm = LLM(
|
||||
model="ibm-nasa-geospatial/Prithvi-EO-2.0-300M-TL-Sen1Floods11",
|
||||
skip_tokenizer_init=True,
|
||||
@@ -41,7 +43,7 @@ def main():
|
||||
enable_mm_embeds=True,
|
||||
)
|
||||
|
||||
pooler_output = llm.encode(img_prompt, pooling_task="plugin")
|
||||
pooler_output = llm.encode(prompt, pooling_task="plugin")
|
||||
output = pooler_output[0].outputs
|
||||
|
||||
print(output)
|
||||
|
||||
Reference in New Issue
Block a user