Merge similar examples in offline_inference into single basic example (#12737)

This commit is contained in:
Harry Mellor
2025-02-20 12:53:51 +00:00
committed by GitHub
parent b69692a2d8
commit 992e5c3d34
29 changed files with 394 additions and 437 deletions

View File

@@ -147,7 +147,7 @@ class Example:
return content
content += "## Example materials\n\n"
for file in self.other_files:
for file in sorted(self.other_files):
include = "include" if file.suffix == ".md" else "literalinclude"
content += f":::{{admonition}} {file.relative_to(self.path)}\n"
content += ":class: dropdown\n\n"
@@ -194,7 +194,7 @@ def generate_examples():
path=EXAMPLE_DOC_DIR / "examples_offline_inference_index.md",
title="Offline Inference",
description=
"Offline inference examples demonstrate how to use vLLM in an offline setting, where the model is queried for predictions in batches.", # noqa: E501
"Offline inference examples demonstrate how to use vLLM in an offline setting, where the model is queried for predictions in batches. We recommend starting with <project:basic.md>.", # noqa: E501
caption="Examples",
),
}