From e6aed8e5a4fe0bc7318147a28b7006f29b6ca89f Mon Sep 17 00:00:00 2001 From: Arjun Suresh Date: Wed, 19 Feb 2025 04:26:09 +0530 Subject: [PATCH 1/3] ../get-onnxruntime-prebuilt/meta.yaml --- script/get-onnxruntime-prebuilt/meta.yaml | 2 +- .../run-all-mlperf-models/run-cpp-implementation.sh | 11 +++++++++-- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/script/get-onnxruntime-prebuilt/meta.yaml b/script/get-onnxruntime-prebuilt/meta.yaml index 57078077a..3a3a185a9 100644 --- a/script/get-onnxruntime-prebuilt/meta.yaml +++ b/script/get-onnxruntime-prebuilt/meta.yaml @@ -4,7 +4,7 @@ automation_uid: 5b4e0237da074764 cache: true category: AI/ML frameworks clean_files: [] -default_version: 1.16.3 +default_version: 1.20.1 deps: - tags: detect,os new_env_keys: diff --git a/script/run-all-mlperf-models/run-cpp-implementation.sh b/script/run-all-mlperf-models/run-cpp-implementation.sh index 345a57a3d..fbe13dc54 100644 --- a/script/run-all-mlperf-models/run-cpp-implementation.sh +++ b/script/run-all-mlperf-models/run-cpp-implementation.sh @@ -27,9 +27,16 @@ division="closed" # run "$MLC_RUN_CMD" POWER=" --power=yes --adr.mlperf-power-client.power_server=192.168.0.15 --adr.mlperf-power-client.port=4950 " -POWER="" +POWER=" --env.MLC_GET_PLATFORM_DETAILS=no" + +#run "mlcr set,system,performance,mode" + +run "mlcr generate-run-cmds,inference,_find-performance \ +--model=resnet50 --implementation=cpp --device=cuda --backend=onnxruntime \ +--adr.compiler.tags=gcc \ +--test_query_count=20000 \ +--category=edge --division=open --scenario=Offline --quiet" -run "mlcr set,system,performance,mode" #cpp run "mlcr generate-run-cmds,inference,_find-performance \ From e92bb3eae148919943e8749af5a3ca61687583ad Mon Sep 17 00:00:00 2001 From: Arjun Suresh Date: Wed, 19 Feb 2025 04:27:57 +0530 Subject: [PATCH 2/3] ../get-onnxruntime-prebuilt/meta.yaml --- .../run-cpp-implementation.sh | 27 +++++++------------ 1 file changed, 10 insertions(+), 17 deletions(-) diff --git a/script/run-all-mlperf-models/run-cpp-implementation.sh b/script/run-all-mlperf-models/run-cpp-implementation.sh index fbe13dc54..3d4c7b93d 100644 --- a/script/run-all-mlperf-models/run-cpp-implementation.sh +++ b/script/run-all-mlperf-models/run-cpp-implementation.sh @@ -31,13 +31,6 @@ POWER=" --env.MLC_GET_PLATFORM_DETAILS=no" #run "mlcr set,system,performance,mode" -run "mlcr generate-run-cmds,inference,_find-performance \ ---model=resnet50 --implementation=cpp --device=cuda --backend=onnxruntime \ ---adr.compiler.tags=gcc \ ---test_query_count=20000 \ ---category=edge --division=open --scenario=Offline --quiet" - - #cpp run "mlcr generate-run-cmds,inference,_find-performance \ --model=resnet50 --implementation=cpp --device=cpu --backend=onnxruntime \ @@ -58,7 +51,7 @@ run "mlcr generate-run-cmds,inference,_submission \ --execution_mode=valid \ --skip_submission_generation=yes \ ${POWER} \ ---results_dir=$HOME/results_dir" +" run "mlcr generate-run-cmds,inference,_submission \ --model=retinanet --implementation=cpp --device=cpu --backend=onnxruntime \ @@ -68,7 +61,7 @@ run "mlcr generate-run-cmds,inference,_submission \ --execution_mode=valid \ --skip_submission_generation=yes \ ${POWER} \ ---results_dir=$HOME/results_dir" +" run "mlcr generate-run-cmds,inference,_submission \ --model=resnet50 --implementation=cpp --device=cpu --backend=onnxruntime \ @@ -78,7 +71,7 @@ run "mlcr generate-run-cmds,inference,_submission \ --execution_mode=valid \ --skip_submission_generation=yes \ ${POWER} \ ---results_dir=$HOME/results_dir" +" run "mlcr generate-run-cmds,inference,_submission \ --model=retinanet --implementation=cpp --device=cpu --backend=onnxruntime \ @@ -88,7 +81,7 @@ run "mlcr generate-run-cmds,inference,_submission \ --execution_mode=valid \ --skip_submission_generation=yes \ ${POWER} \ ---results_dir=$HOME/results_dir" +" # GPU @@ -113,7 +106,7 @@ run "mlcr generate-run-cmds,inference,_submission \ --execution_mode=valid \ --skip_submission_generation=yes \ ${POWER} \ ---results_dir=$HOME/results_dir" +" run "mlcr generate-run-cmds,inference,_submission \ --model=retinanet --implementation=cpp --device=cuda --backend=onnxruntime \ @@ -123,7 +116,7 @@ run "mlcr generate-run-cmds,inference,_submission \ --execution_mode=valid \ --skip_submission_generation=yes \ ${POWER} \ ---results_dir=$HOME/results_dir" +" run "mlcr generate-run-cmds,inference,_submission \ @@ -135,7 +128,7 @@ run "mlcr generate-run-cmds,inference,_submission \ --execution_mode=valid \ --skip_submission_generation=yes \ ${POWER} \ ---results_dir=$HOME/results_dir" +" run "mlcr generate-run-cmds,inference,_submission \ --model=retinanet --implementation=cpp --device=cuda --backend=onnxruntime \ @@ -145,7 +138,7 @@ run "mlcr generate-run-cmds,inference,_submission \ --execution_mode=valid \ --skip_submission_generation=yes \ ${POWER} \ ---results_dir=$HOME/results_dir" +" #multistream run "mlcr generate-run-cmds,inference,_submission \ @@ -157,7 +150,7 @@ run "mlcr generate-run-cmds,inference,_submission \ --execution_mode=valid \ --skip_submission_generation=yes \ ${POWER} \ ---results_dir=$HOME/results_dir" +" run "mlcr generate-run-cmds,inference,_submission \ --model=retinanet --implementation=cpp --device=cuda --backend=onnxruntime \ @@ -167,4 +160,4 @@ run "mlcr generate-run-cmds,inference,_submission \ --execution_mode=valid \ --skip_submission_generation=yes \ ${POWER} \ ---results_dir=$HOME/results_dir" +" From 997219a6b08ca993a732f7f262f39039868f81a7 Mon Sep 17 00:00:00 2001 From: Arjun Suresh Date: Fri, 21 Feb 2025 00:37:10 +0530 Subject: [PATCH 3/3] Fix pythron version for bert deepsparse --- script/run-all-mlperf-models/run-pruned-bert.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/script/run-all-mlperf-models/run-pruned-bert.sh b/script/run-all-mlperf-models/run-pruned-bert.sh index f5ed64042..a0b7af75a 100644 --- a/script/run-all-mlperf-models/run-pruned-bert.sh +++ b/script/run-all-mlperf-models/run-pruned-bert.sh @@ -46,7 +46,7 @@ scenario="Offline" if [[ $scenario == "Offline" ]]; then for stub in ${zoo_stub_list[@]}; do cmd="mlcr run,mlperf,inference,generate-run-cmds,_find-performance \ - --adr.python.version_min=3.8 \ + --adr.python.version=3.9.12 \ --implementation=reference \ --model=bert-99 \ --precision=int8 \ @@ -65,7 +65,7 @@ fi for stub in ${zoo_stub_list[@]}; do cmd="mlcr run,mlperf,inference,generate-run-cmds \ - --adr.python.version_min=3.8 \ + --adr.python.version=3.9.12 \ --adr.compiler.tags=gcc \ --implementation=reference \ --model=bert-99 \