Skip to content
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .ci/docker/ci_commit_pins/pytorch.txt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
e47e8794499a4a0130ff4efb8713ff93f4b40c36
c8a648d4dffb9f0133ff4a2ea0e660b42105d3ad
25 changes: 9 additions & 16 deletions examples/models/llama3_2_vision/preprocess/export_preprocess.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,29 +24,22 @@ def main():
strict=False,
)

# Executorch
# AOTInductor. Note: export AOTI before ExecuTorch, as
# ExecuTorch will modify the ExportedProgram.
torch._inductor.aot_compile(
ep.module(),
model.get_example_inputs(),
options={"aot_inductor.output_path": "preprocess_aoti.so"},
)

# Executorch.
edge_program = to_edge(
ep, compile_config=EdgeCompileConfig(_check_ir_validity=False)
)
et_program = edge_program.to_executorch()
with open("preprocess_et.pte", "wb") as file:
et_program.write_to_file(file)

# Export.
# ep = torch.export.export(
# model.get_eager_model(),
# model.get_example_inputs(),
# dynamic_shapes=model.get_dynamic_shapes(),
# strict=False,
# )
#
# # AOTInductor
# torch._inductor.aot_compile(
# ep.module(),
# model.get_example_inputs(),
# options={"aot_inductor.output_path": "preprocess_aoti.so"},
# )


if __name__ == "__main__":
main()
30 changes: 20 additions & 10 deletions examples/models/llama3_2_vision/preprocess/test_preprocess.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,22 +64,30 @@ def initialize_models(resize_to_max_canvas: bool) -> Dict[str, Any]:
strict=False,
)

# aoti_path = torch._inductor.aot_compile(
# exported_model.module(),
# model.get_example_inputs(),
# )
aoti_path = torch._inductor.aot_compile(
exported_model.module(),
model.get_example_inputs(),
)

edge_program = to_edge(
exported_model, compile_config=EdgeCompileConfig(_check_ir_validity=False)
)
executorch_model = edge_program.to_executorch()

# Re-export as ExecuTorch edits the ExportedProgram.
exported_model = torch.export.export(
model.get_eager_model(),
model.get_example_inputs(),
dynamic_shapes=model.get_dynamic_shapes(),
strict=False,
)

return {
"config": config,
"reference_model": reference_model,
"model": model,
"exported_model": exported_model,
# "aoti_path": aoti_path,
"aoti_path": aoti_path,
"executorch_model": executorch_model,
}

Expand Down Expand Up @@ -265,11 +273,13 @@ def run_preprocess(
), f"Executorch model: expected {reference_ar} but got {et_ar.tolist()}"

# Run aoti model and check it matches reference model.
# aoti_path = models["aoti_path"]
# aoti_model = torch._export.aot_load(aoti_path, "cpu")
# aoti_image, aoti_ar = aoti_model(image_tensor, inscribed_size, best_resolution)
# self.assertTrue(torch.allclose(reference_image, aoti_image))
# self.assertEqual(reference_ar, aoti_ar.tolist())
aoti_path = models["aoti_path"]
aoti_model = torch._export.aot_load(aoti_path, "cpu")
aoti_image, aoti_ar = aoti_model(image_tensor, inscribed_size, best_resolution)
assert_expected(aoti_image, reference_image, rtol=0, atol=1e-4)
assert (
reference_ar == aoti_ar.tolist()
), f"AOTI model: expected {reference_ar} but got {aoti_ar.tolist()}"

# This test setup mirrors the one in torchtune:
# https://github.com/pytorch/torchtune/blob/main/tests/torchtune/models/clip/test_clip_image_transform.py
Expand Down
2 changes: 1 addition & 1 deletion install_requirements.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ def python_is_compatible():
# NOTE: If a newly-fetched version of the executorch repo changes the value of
# NIGHTLY_VERSION, you should re-run this script to install the necessary
# package versions.
NIGHTLY_VERSION = "dev20241030"
NIGHTLY_VERSION = "dev20241101"

# The pip repository that hosts nightly torch packages.
TORCH_NIGHTLY_URL = "https://download.pytorch.org/whl/nightly/cpu"
Expand Down
Loading