We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent ab2555e commit 5a24bd5Copy full SHA for 5a24bd5
vllm/distributed/parallel_state.py
@@ -938,6 +938,13 @@ def init_distributed_environment(
938
assert distributed_init_method is not None, (
939
"distributed_init_method must be provided when initializing "
940
"distributed environment")
941
+ if not torch.distributed.is_backend_available(backend):
942
+ logger.warning(
943
+ "Distributed backend %s is not available; "
944
+ "falling back to gloo.", backend)
945
+ assert torch.distributed.is_gloo_available(), (
946
+ "Fallback Gloo backend is not available.")
947
+ backend = "gloo"
948
# this backend is used for WORLD
949
torch.distributed.init_process_group(
950
backend=backend,
0 commit comments