Move ModelConfig from config/__init__.py to config/model.py (#25252)

Signed-off-by: Harry Mellor <19981378+hmellor@users.noreply.github.com>
This commit is contained in:
Harry Mellor
2025-09-19 17:22:33 +01:00
committed by GitHub
parent cf278ff3b2
commit aed16879a9
13 changed files with 2160 additions and 2149 deletions

View File

@@ -19,7 +19,7 @@ from typing import Callable, Optional, TypeVar, Union
import torch.nn as nn
import transformers
from vllm.config import (ModelConfig, ModelImpl, iter_architecture_defaults,
from vllm.config import (ModelConfig, iter_architecture_defaults,
try_match_architecture_defaults)
from vllm.logger import init_logger
from vllm.transformers_utils.dynamic_module import (
@@ -587,7 +587,7 @@ class _ModelRegistry:
if model_module is not None:
break
else:
if model_config.model_impl != ModelImpl.TRANSFORMERS:
if model_config.model_impl != "transformers":
return None
raise ValueError(
@@ -598,7 +598,7 @@ class _ModelRegistry:
"'auto_map' (relevant if the model is custom).")
if not model_module.is_backend_compatible():
if model_config.model_impl != ModelImpl.TRANSFORMERS:
if model_config.model_impl != "transformers":
return None
raise ValueError(
@@ -644,20 +644,20 @@ class _ModelRegistry:
raise ValueError("No model architectures are specified")
# Require transformers impl
if model_config.model_impl == ModelImpl.TRANSFORMERS:
if model_config.model_impl == "transformers":
arch = self._try_resolve_transformers(architectures[0],
model_config)
if arch is not None:
model_info = self._try_inspect_model_cls(arch)
if model_info is not None:
return (model_info, arch)
elif model_config.model_impl == ModelImpl.TERRATORCH:
elif model_config.model_impl == "terratorch":
model_info = self._try_inspect_model_cls("Terratorch")
return (model_info, "Terratorch")
# Fallback to transformers impl (after resolving convert_type)
if (all(arch not in self.models for arch in architectures)
and model_config.model_impl == ModelImpl.AUTO
and model_config.model_impl == "auto"
and getattr(model_config, "convert_type", "none") == "none"):
arch = self._try_resolve_transformers(architectures[0],
model_config)
@@ -674,7 +674,7 @@ class _ModelRegistry:
# Fallback to transformers impl (before resolving runner_type)
if (all(arch not in self.models for arch in architectures)
and model_config.model_impl == ModelImpl.AUTO):
and model_config.model_impl == "auto"):
arch = self._try_resolve_transformers(architectures[0],
model_config)
if arch is not None:
@@ -695,14 +695,14 @@ class _ModelRegistry:
raise ValueError("No model architectures are specified")
# Require transformers impl
if model_config.model_impl == ModelImpl.TRANSFORMERS:
if model_config.model_impl == "transformers":
arch = self._try_resolve_transformers(architectures[0],
model_config)
if arch is not None:
model_cls = self._try_load_model_cls(arch)
if model_cls is not None:
return (model_cls, arch)
elif model_config.model_impl == ModelImpl.TERRATORCH:
elif model_config.model_impl == "terratorch":
arch = "Terratorch"
model_cls = self._try_load_model_cls(arch)
if model_cls is not None:
@@ -710,7 +710,7 @@ class _ModelRegistry:
# Fallback to transformers impl (after resolving convert_type)
if (all(arch not in self.models for arch in architectures)
and model_config.model_impl == ModelImpl.AUTO
and model_config.model_impl == "auto"
and getattr(model_config, "convert_type", "none") == "none"):
arch = self._try_resolve_transformers(architectures[0],
model_config)
@@ -727,7 +727,7 @@ class _ModelRegistry:
# Fallback to transformers impl (before resolving runner_type)
if (all(arch not in self.models for arch in architectures)
and model_config.model_impl == ModelImpl.AUTO):
and model_config.model_impl == "auto"):
arch = self._try_resolve_transformers(architectures[0],
model_config)
if arch is not None: