-
Notifications
You must be signed in to change notification settings - Fork 687
Dedup constants in emitter #15139
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Dedup constants in emitter #15139
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -466,6 +466,15 @@ def _tensor_spec_to_evalue( | |
and spec.extra_tensor_info.location == TensorDataLocation.EXTERNAL | ||
): | ||
buffer_idx = self.program_state.external_constant_hash.get(hashed, -1) | ||
if buffer_idx != -1: | ||
# Save the constant tag for the external tensor | ||
if constant_tag not in self.program_state.external_constant_map: | ||
# pyre-ignore Undefined attribute [16]: `Optional` has no attribute `fully_qualified_name`. | ||
self.program_state.external_constant_map[constant_tag] = {} | ||
# pyre-ignore Undefined attribute [16]: `Optional` has no attribute `fully_qualified_name`. | ||
self.program_state.external_constant_map[constant_tag][ | ||
spec.extra_tensor_info.fully_qualified_name | ||
] = buffer_idx | ||
Comment on lines
+471
to
+477
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. also please assert that constant_tag is not None |
||
else: | ||
buffer_idx = self.program_state.cached_spec_hash_values.get(hashed, -1) | ||
|
||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -1719,6 +1719,45 @@ def forward(self, x): | |
self.assertEqual(external_map["linear.weight"], 0) | ||
self.assertEqual(external_map["linear.bias"], 1) | ||
|
||
def test_constant_tagged_tensor_dedup(self) -> None: | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. also add another test with 4 identical constants |
||
class ConstantModule(nn.Module): | ||
def __init__(self): | ||
super().__init__() | ||
constant_value = torch.tensor([1.0, 2.0, 3.0]) | ||
|
||
# Register the same value with two different names as persistent buffers | ||
self.register_buffer( | ||
"constant_a", constant_value.clone(), persistent=True | ||
) | ||
self.register_buffer( | ||
"constant_b", constant_value.clone(), persistent=True | ||
) | ||
|
||
def forward(self, x): | ||
return x + self.constant_a + self.constant_b | ||
|
||
model = to_edge( | ||
export(ConstantModule(), (torch.ones(1, 3),), strict=True) | ||
).to_executorch( | ||
config=ExecutorchBackendConfig( | ||
external_constants=True, | ||
) | ||
) | ||
emitter_output = model._emitter_output | ||
# Check that constant_buffer is empty besides the non-constant placeholder 0. | ||
self.assertEqual(len(emitter_output.program.constant_buffer), 1) | ||
# Check that constant weights are in the external constant buffer. | ||
self.assertEqual(len(emitter_output.external_constant_buffer), 1) | ||
# Setting external_constants=True, saves all constants to the key | ||
# '_default_external_constant'. | ||
external_map = emitter_output.external_constant_map[ | ||
"_default_external_constant" | ||
] | ||
self.assertEqual(len(external_map), 2) | ||
# Confirm that the same tensor is used for both constants. | ||
self.assertEqual(external_map["constant_a"], 0) | ||
self.assertEqual(external_map["constant_b"], 0) | ||
|
||
def test_delegate_deduplicate(self) -> None: | ||
class SharedModule(torch.nn.Module): | ||
def __init__(self): | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Write a big inline comment on why this is necessary. This is quite subtle.