[Misc]allow disable pynccl (#25421)

Signed-off-by: Lu Fang <fanglu@fb.com>
Co-authored-by: Lucia (Lu) Fang <fanglu@meta.com>
This commit is contained in:
Lucia Fang
2025-09-30 23:04:13 -07:00
committed by GitHub
parent 2a69ab4899
commit f48b6a03ba
3 changed files with 12 additions and 1 deletions

View File

@@ -147,6 +147,10 @@ class CudaCommunicator(DeviceCommunicatorBase):
assert out is not None
return out
pynccl_comm = self.pynccl_comm
if pynccl_comm is None or pynccl_comm.disabled:
out = input_.clone()
torch.distributed.all_reduce(out, group=self.device_group)
return out
assert pynccl_comm is not None
out = pynccl_comm.all_reduce(input_)
if out is None:

View File

@@ -8,6 +8,7 @@ import torch
import torch.distributed as dist
from torch.distributed import ProcessGroup, ReduceOp
import vllm.envs as envs
from vllm.distributed.device_communicators.pynccl_wrapper import (
NCCLLibrary, buffer_type, cudaStream_t, ncclComm_t, ncclDataTypeEnum,
ncclRedOpTypeEnum, ncclUniqueId)
@@ -83,7 +84,7 @@ class PyNcclCommunicator:
self.group = group
# if world_size == 1, no need to create communicator
if self.world_size == 1:
if self.world_size == 1 or envs.VLLM_DISABLE_PYNCCL:
self.available = False
self.disabled = True
return