Skip to content

Commit 49740c6

Browse files
committed
use delete_offload_module
Signed-off-by: Kyle Sayers <kylesayrs@gmail.com>
1 parent 0a4fea5 commit 49740c6

File tree

1 file changed

+6
-2
lines changed
  • src/compressed_tensors/transform/factory

1 file changed

+6
-2
lines changed

src/compressed_tensors/transform/factory/base.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@
2626
)
2727
from compressed_tensors.utils import (
2828
align_module_device,
29+
delete_offload_module,
2930
has_offloaded_params,
3031
patch_attr,
3132
register_offload_module,
@@ -99,10 +100,10 @@ def _apply_to_module(self, module: Module, args: TransformArgs):
99100
# create transform as submodule
100101
transform_name = f"{self.name}_{args.location.value}"
101102
transform = self.create_transform(module, args)
103+
register_offload_module(module, transform_name, transform)
102104

103105
# register input transformation hook
104106
if args.location == TransformLocation.INPUT:
105-
register_offload_module(module, transform_name, transform)
106107

107108
def input_hook(_, args):
108109
input = args[0]
@@ -118,6 +119,7 @@ def input_hook(_, args):
118119
assert isinstance(module, torch.nn.Linear)
119120
assert module.bias is None
120121

122+
# fuse transform into weight
121123
with torch.no_grad(), align_module_device(module):
122124
update_offload_parameter(module, "weight", transform(module.weight))
123125

@@ -128,9 +130,11 @@ def input_hook(_, args):
128130
raise ValueError("Offloaded training is not supported")
129131
P.register_parametrization(module, "weight", transform)
130132

133+
# transform is no longer needed (unfusing is not supported)
134+
delete_offload_module(module, transform_name)
135+
131136
# register output transformation hook
132137
elif args.location == TransformLocation.OUTPUT:
133-
register_offload_module(module, transform_name, transform)
134138

135139
def output_hook(_, _input, output):
136140
return transform(output)

0 commit comments

Comments
 (0)