[TPU] Lazy Import (#15656)

Signed-off-by: rshaw@neuralmagic.com <robertgshaw2@gmail.com>
This commit is contained in:
Robert Shaw
2025-03-27 21:57:01 -04:00
committed by GitHub
parent 15dac210f0
commit bd45912b99

View File

@@ -15,7 +15,6 @@ import torch
from torch.distributed import ProcessGroup, TCPStore from torch.distributed import ProcessGroup, TCPStore
from torch.distributed.distributed_c10d import (Backend, PrefixStore, from torch.distributed.distributed_c10d import (Backend, PrefixStore,
_get_default_timeout, _get_default_timeout,
_shutdown_backend,
_unregister_process_group, _unregister_process_group,
is_nccl_available) is_nccl_available)
from torch.distributed.rendezvous import rendezvous from torch.distributed.rendezvous import rendezvous
@@ -343,5 +342,7 @@ def stateless_destroy_torch_distributed_process_group(
Destroy ProcessGroup returned by Destroy ProcessGroup returned by
stateless_init_torch_distributed_process_group(). stateless_init_torch_distributed_process_group().
""" """
# Lazy import for non-CUDA backends.
from torch.distributed.distributed_c10d import _shutdown_backend
_shutdown_backend(pg) _shutdown_backend(pg)
_unregister_process_group(pg.group_name) _unregister_process_group(pg.group_name)