From 4755a209072e58ec0dd73b048b6d716d1d0268a5 Mon Sep 17 00:00:00 2001 From: lucylq Date: Tue, 29 Oct 2024 11:43:45 -0700 Subject: [PATCH 1/2] enable aoti for preprocess ci [ghstack-poisoned] --- .../preprocess/test_preprocess.py | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/examples/models/llama3_2_vision/preprocess/test_preprocess.py b/examples/models/llama3_2_vision/preprocess/test_preprocess.py index ba97f37dbac..93c239549e7 100644 --- a/examples/models/llama3_2_vision/preprocess/test_preprocess.py +++ b/examples/models/llama3_2_vision/preprocess/test_preprocess.py @@ -76,10 +76,10 @@ def initialize_models(resize_to_max_canvas: bool) -> Dict[str, Any]: strict=False, ) - # aoti_path = torch._inductor.aot_compile( - # exported_model.module(), - # model.get_example_inputs(), - # ) + aoti_path = torch._inductor.aot_compile( + exported_model.module(), + model.get_example_inputs(), + ) edge_program = to_edge( exported_model, compile_config=EdgeCompileConfig(_check_ir_validity=False) @@ -91,7 +91,7 @@ def initialize_models(resize_to_max_canvas: bool) -> Dict[str, Any]: "reference_model": reference_model, "model": model, "exported_model": exported_model, - # "aoti_path": aoti_path, + "aoti_path": aoti_path, "executorch_model": executorch_model, } @@ -237,11 +237,11 @@ def run_preprocess( self.assertEqual(reference_ar, et_ar.tolist()) # Run aoti model and check it matches reference model. - # aoti_path = models["aoti_path"] - # aoti_model = torch._export.aot_load(aoti_path, "cpu") - # aoti_image, aoti_ar = aoti_model(image_tensor, inscribed_size, best_resolution) - # self.assertTrue(torch.allclose(reference_image, aoti_image)) - # self.assertEqual(reference_ar, aoti_ar.tolist()) + aoti_path = models["aoti_path"] + aoti_model = torch._export.aot_load(aoti_path, "cpu") + aoti_image, aoti_ar = aoti_model(image_tensor, inscribed_size, best_resolution) + self.assertTrue(torch.allclose(reference_image, aoti_image)) + self.assertEqual(reference_ar, aoti_ar.tolist()) # This test setup mirrors the one in torchtune: # https://github.com/pytorch/torchtune/blob/main/tests/torchtune/models/clip/test_clip_image_transform.py From 58fa495df0e5526d571a612a3f87c72d5611d41c Mon Sep 17 00:00:00 2001 From: lucylq Date: Tue, 29 Oct 2024 12:59:16 -0700 Subject: [PATCH 2/2] Update on "Enable aoti for preprocess" Land and update torch nightly pin after: https://github.com/pytorch/pytorch/pull/137063 Test Plan: With https://github.com/pytorch/pytorch/pull/137063: ``` python -m unittest examples.models.llama3_2_vision.preprocess.test_preprocess ``` [ghstack-poisoned] --- .../preprocess/export_preprocess.py | 25 +++++++------------ .../preprocess/test_preprocess.py | 8 ++++++ 2 files changed, 17 insertions(+), 16 deletions(-) diff --git a/examples/models/llama3_2_vision/preprocess/export_preprocess.py b/examples/models/llama3_2_vision/preprocess/export_preprocess.py index d82f79c2f35..550d1bfb48d 100644 --- a/examples/models/llama3_2_vision/preprocess/export_preprocess.py +++ b/examples/models/llama3_2_vision/preprocess/export_preprocess.py @@ -24,7 +24,15 @@ def main(): strict=False, ) - # Executorch + # AOTInductor. Note: export AOTI before ExecuTorch, as + # ExecuTorch will modify the ExportedProgram. + torch._inductor.aot_compile( + ep.module(), + model.get_example_inputs(), + options={"aot_inductor.output_path": "preprocess_aoti.so"}, + ) + + # Executorch. edge_program = to_edge( ep, compile_config=EdgeCompileConfig(_check_ir_validity=False) ) @@ -32,21 +40,6 @@ def main(): with open("preprocess_et.pte", "wb") as file: et_program.write_to_file(file) - # Export. - # ep = torch.export.export( - # model.get_eager_model(), - # model.get_example_inputs(), - # dynamic_shapes=model.get_dynamic_shapes(), - # strict=False, - # ) - # - # # AOTInductor - # torch._inductor.aot_compile( - # ep.module(), - # model.get_example_inputs(), - # options={"aot_inductor.output_path": "preprocess_aoti.so"}, - # ) - if __name__ == "__main__": main() diff --git a/examples/models/llama3_2_vision/preprocess/test_preprocess.py b/examples/models/llama3_2_vision/preprocess/test_preprocess.py index 93c239549e7..0a41ca018d7 100644 --- a/examples/models/llama3_2_vision/preprocess/test_preprocess.py +++ b/examples/models/llama3_2_vision/preprocess/test_preprocess.py @@ -86,6 +86,14 @@ def initialize_models(resize_to_max_canvas: bool) -> Dict[str, Any]: ) executorch_model = edge_program.to_executorch() + # Re-export, as lowering to executorch changes the graph. + exported_model = torch.export.export( + model.get_eager_model(), + model.get_example_inputs(), + dynamic_shapes=model.get_dynamic_shapes(), + strict=False, + ) + return { "config": config, "reference_model": reference_model,