Convert formatting to use ruff instead of yapf + isort (#26247)

Signed-off-by: Harry Mellor <19981378+hmellor@users.noreply.github.com>
This commit is contained in:
Harry Mellor
2025-10-05 15:06:22 +01:00
committed by GitHub
parent 17edd8a807
commit d6953beb91
1508 changed files with 115244 additions and 94146 deletions

View File

@@ -21,9 +21,10 @@ def prepare_object_to_dump(obj) -> str:
if isinstance(obj, str):
return f"'{obj}'" # Double quotes
elif isinstance(obj, dict):
dict_str = ', '.join({f'{str(k)}: {prepare_object_to_dump(v)}' \
for k, v in obj.items()})
return f'{{{dict_str}}}'
dict_str = ", ".join(
{f"{str(k)}: {prepare_object_to_dump(v)}" for k, v in obj.items()}
)
return f"{{{dict_str}}}"
elif isinstance(obj, list):
return f"[{', '.join([prepare_object_to_dump(v) for v in obj])}]"
elif isinstance(obj, set):
@@ -36,15 +37,14 @@ def prepare_object_to_dump(obj) -> str:
elif isinstance(obj, torch.Tensor):
# We only print the 'draft' of the tensor to not expose sensitive data
# and to get some metadata in case of CUDA runtime crashed
return (f"Tensor(shape={obj.shape}, "
f"device={obj.device},"
f"dtype={obj.dtype})")
elif hasattr(obj, 'anon_repr'):
return f"Tensor(shape={obj.shape}, device={obj.device},dtype={obj.dtype})"
elif hasattr(obj, "anon_repr"):
return obj.anon_repr()
elif hasattr(obj, '__dict__'):
elif hasattr(obj, "__dict__"):
items = obj.__dict__.items()
dict_str = ', '.join([f'{str(k)}={prepare_object_to_dump(v)}' \
for k, v in items])
dict_str = ", ".join(
[f"{str(k)}={prepare_object_to_dump(v)}" for k, v in items]
)
return f"{type(obj).__name__}({dict_str})"
else:
# Hacky way to make sure we can serialize the object in JSON format
@@ -54,18 +54,22 @@ def prepare_object_to_dump(obj) -> str:
return repr(obj)
def dump_engine_exception(config: VllmConfig,
scheduler_output: SchedulerOutput,
scheduler_stats: Optional[SchedulerStats]):
def dump_engine_exception(
config: VllmConfig,
scheduler_output: SchedulerOutput,
scheduler_stats: Optional[SchedulerStats],
):
# NOTE: ensure we can log extra info without risking raises
# unexpected errors during logging
with contextlib.suppress(Exception):
_dump_engine_exception(config, scheduler_output, scheduler_stats)
def _dump_engine_exception(config: VllmConfig,
scheduler_output: SchedulerOutput,
scheduler_stats: Optional[SchedulerStats]):
def _dump_engine_exception(
config: VllmConfig,
scheduler_output: SchedulerOutput,
scheduler_stats: Optional[SchedulerStats],
):
logger.error(
"Dumping input data for V1 LLM engine (v%s) with config: %s, ",
VLLM_VERSION,
@@ -73,8 +77,7 @@ def _dump_engine_exception(config: VllmConfig,
)
try:
dump_obj = prepare_object_to_dump(scheduler_output)
logger.error("Dumping scheduler output for model execution: %s",
dump_obj)
logger.error("Dumping scheduler output for model execution: %s", dump_obj)
if scheduler_stats:
logger.error("Dumping scheduler stats: %s", scheduler_stats)
except Exception:

View File

@@ -18,7 +18,6 @@ class NewLineFormatter(logging.Formatter):
self.root_dir = Path(__file__).resolve().parent.parent.parent
def format(self, record):
def shrink_path(relpath: Path) -> str:
"""
Shortens a file path for logging display:
@@ -62,8 +61,7 @@ class NewLineFormatter(logging.Formatter):
abs_path = getattr(record, "pathname", None)
if abs_path:
try:
relpath = Path(abs_path).resolve().relative_to(
self.root_dir)
relpath = Path(abs_path).resolve().relative_to(self.root_dir)
except Exception:
relpath = Path(record.filename)
else:

View File

@@ -15,15 +15,17 @@ def logtime(logger, msg=None):
"""
def _inner(func):
@functools.wraps(func)
def _wrapper(*args, **kwargs):
start = time.perf_counter()
result = func(*args, **kwargs)
elapsed = time.perf_counter() - start
prefix = f"Function '{func.__module__}.{func.__qualname__}'" \
if msg is None else msg
prefix = (
f"Function '{func.__module__}.{func.__qualname__}'"
if msg is None
else msg
)
logger.debug("%s: Elapsed time %.7f secs", prefix, elapsed)
return result