Skip to content

Commit 1d5c616

Browse files
maxdebayserChen-zexi
authored andcommitted
Remove extra tensor on CPU (#20693)
Signed-off-by: Max de Bayser <mbayser@br.ibm.com>
1 parent 3b1c762 commit 1d5c616

File tree

1 file changed

+13
-5
lines changed

1 file changed

+13
-5
lines changed

vllm/v1/sample/logits_processor.py

Lines changed: 13 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -234,10 +234,16 @@ def __init__(self, max_num_reqs: int, pin_memory: bool,
234234
device="cpu",
235235
pin_memory=pin_memory)
236236
self.min_p_cpu = self.min_p_cpu_tensor.numpy()
237-
# Pre-allocated device tensor
238-
self.min_p_device: torch.Tensor = torch.empty((max_num_reqs, ),
239-
dtype=torch.float32,
240-
device=device)
237+
238+
self.use_double_tensor = torch.device("cpu") != torch.device(device)
239+
240+
if self.use_double_tensor:
241+
# Pre-allocated device tensor
242+
self.min_p_device: torch.Tensor = torch.empty((max_num_reqs, ),
243+
dtype=torch.float32,
244+
device=device)
245+
else:
246+
self.min_p_device = self.min_p_cpu_tensor
241247
# Current slice of the device tensor
242248
self.min_p: torch.Tensor = self.min_p_device[:0]
243249

@@ -284,7 +290,9 @@ def update_state(self, batch_update: Optional[BatchUpdate]):
284290
size = batch_update.batch_size
285291
if self.min_p_count and (needs_update or self.min_p.shape[0] != size):
286292
self.min_p = self.min_p_device[:size]
287-
self.min_p.copy_(self.min_p_cpu_tensor[:size], non_blocking=True)
293+
if self.use_double_tensor:
294+
self.min_p.copy_(self.min_p_cpu_tensor[:size],
295+
non_blocking=True)
288296
self.min_p.unsqueeze_(1)
289297

290298
def apply(self, logits: torch.Tensor) -> torch.Tensor:

0 commit comments

Comments
 (0)