File tree Expand file tree Collapse file tree 3 files changed +5
-5
lines changed
distributed/device_communicators Expand file tree Collapse file tree 3 files changed +5
-5
lines changed Original file line number Diff line number Diff line change @@ -72,7 +72,7 @@ def __init__(self,
72
72
# currently be an MI300 series.
73
73
self .qr_comm = QuickAllReduce (group = self .cpu_group ,
74
74
device = self .device )
75
- if envs .VLLM_USE_SYMM_MEM and current_platform .is_cuda ():
75
+ if envs .VLLM_ALLREDUCE_USE_SYMM_MEM and current_platform .is_cuda ():
76
76
self .symm_mem_comm = SymmMemCommunicator (
77
77
group = self .cpu_group ,
78
78
device = self .device ,
Original file line number Diff line number Diff line change @@ -117,7 +117,7 @@ def __init__(self,
117
117
# now `device` is a `torch.device` object
118
118
assert isinstance (device , torch .device )
119
119
self .device = device
120
- if current_platform .is_cuda () and envs .VLLM_USE_SYMM_MEM :
120
+ if current_platform .is_cuda () and envs .VLLM_ALLREDUCE_USE_SYMM_MEM :
121
121
max_size = CustomAllreduce ._MAX_SIZES [world_size ]
122
122
123
123
cuda_visible_devices = envs .CUDA_VISIBLE_DEVICES
Original file line number Diff line number Diff line change 139
139
VLLM_ROCM_QUICK_REDUCE_CAST_BF16_TO_FP16 : bool = True
140
140
VLLM_ROCM_QUICK_REDUCE_MAX_SIZE_BYTES_MB : Optional [int ] = None
141
141
VLLM_NIXL_ABORT_REQUEST_TIMEOUT : int = 120
142
- VLLM_USE_SYMM_MEM : bool = False
142
+ VLLM_ALLREDUCE_USE_SYMM_MEM : bool = False
143
143
144
144
145
145
def get_default_cache_root ():
@@ -965,8 +965,8 @@ def get_vllm_port() -> Optional[int]:
965
965
lambda : int (os .getenv ("VLLM_NIXL_ABORT_REQUEST_TIMEOUT" , "120" )),
966
966
967
967
# Whether to use pytorch symmetric memory for allreduce
968
- "VLLM_USE_SYMM_MEM " :
969
- lambda : bool (int (os .getenv ("VLLM_USE_SYMM_MEM " , "0" ))),
968
+ "VLLM_ALLREDUCE_USE_SYMM_MEM " :
969
+ lambda : bool (int (os .getenv ("VLLM_ALLREDUCE_USE_SYMM_MEM " , "0" ))),
970
970
}
971
971
972
972
# --8<-- [end:env-vars-definition]
You can’t perform that action at this time.
0 commit comments