Skip to content

Commit b20641e

Browse files
authored
Remove config warning log from GradAcc API (#9006)
1 parent 39aef3a commit b20641e

File tree

1 file changed

+0
-8
lines changed

1 file changed

+0
-8
lines changed

torch_xla/experimental/gradient_accumulation.py

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -390,14 +390,6 @@ def body_fn(iteri: torch.Tensor, _: torch.Tensor,
390390
grads = [param.grad for param in params]
391391
return (iteri, loss, *iterable_tensors, *carried_tensors, *params, *grads)
392392

393-
if not torch_xla._XLAC._xla_get_enable_alias_with_buffer_donor_config():
394-
warnings.warn(
395-
'Buffer donation is currently not enabled for gradient accumulation '
396-
'The resulting computed gradients will be unaliased from the initial '
397-
'gradient tensors. In order to donate and discard the former gradient '
398-
'tensors, consider enabling `_xla_set_enable_alias_with_buffer_donor_config(True)`'
399-
)
400-
401393
for param in model_parameters:
402394
if not param.requires_grad:
403395
continue

0 commit comments

Comments
 (0)