[Frontend] Disallow passing model as both argument and option (#7347)
This commit is contained in:
@@ -9,6 +9,7 @@ from typing import List, Optional
|
||||
from openai import OpenAI
|
||||
from openai.types.chat import ChatCompletionMessageParam
|
||||
|
||||
from vllm.engine.arg_utils import EngineArgs
|
||||
from vllm.entrypoints.openai.api_server import run_server
|
||||
from vllm.entrypoints.openai.cli_args import make_arg_parser
|
||||
from vllm.utils import FlexibleArgumentParser
|
||||
@@ -24,6 +25,12 @@ def register_signal_handlers():
|
||||
|
||||
|
||||
def serve(args: argparse.Namespace) -> None:
|
||||
# The default value of `--model`
|
||||
if args.model != EngineArgs.model:
|
||||
raise ValueError(
|
||||
"With `vllm serve`, you should provide the model as a "
|
||||
"positional argument instead of via the `--model` option.")
|
||||
|
||||
# EngineArgs expects the model name to be passed as --model.
|
||||
args.model = args.model_tag
|
||||
|
||||
|
||||
Reference in New Issue
Block a user