Skip to content

Commit 86f7e11

Browse files
authored
Fixes for mlperf submission (#249)
* Fix pythron version for bert deepsparse
1 parent af51c74 commit 86f7e11

File tree

3 files changed

+15
-15
lines changed

3 files changed

+15
-15
lines changed

script/get-onnxruntime-prebuilt/meta.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ automation_uid: 5b4e0237da074764
44
cache: true
55
category: AI/ML frameworks
66
clean_files: []
7-
default_version: 1.16.3
7+
default_version: 1.20.1
88
deps:
99
- tags: detect,os
1010
new_env_keys:

script/run-all-mlperf-models/run-cpp-implementation.sh

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -27,9 +27,9 @@ division="closed"
2727
# run "$MLC_RUN_CMD"
2828

2929
POWER=" --power=yes --adr.mlperf-power-client.power_server=192.168.0.15 --adr.mlperf-power-client.port=4950 "
30-
POWER=""
30+
POWER=" --env.MLC_GET_PLATFORM_DETAILS=no"
3131

32-
run "mlcr set,system,performance,mode"
32+
#run "mlcr set,system,performance,mode"
3333

3434
#cpp
3535
run "mlcr generate-run-cmds,inference,_find-performance \
@@ -51,7 +51,7 @@ run "mlcr generate-run-cmds,inference,_submission \
5151
--execution_mode=valid \
5252
--skip_submission_generation=yes \
5353
${POWER} \
54-
--results_dir=$HOME/results_dir"
54+
"
5555

5656
run "mlcr generate-run-cmds,inference,_submission \
5757
--model=retinanet --implementation=cpp --device=cpu --backend=onnxruntime \
@@ -61,7 +61,7 @@ run "mlcr generate-run-cmds,inference,_submission \
6161
--execution_mode=valid \
6262
--skip_submission_generation=yes \
6363
${POWER} \
64-
--results_dir=$HOME/results_dir"
64+
"
6565

6666
run "mlcr generate-run-cmds,inference,_submission \
6767
--model=resnet50 --implementation=cpp --device=cpu --backend=onnxruntime \
@@ -71,7 +71,7 @@ run "mlcr generate-run-cmds,inference,_submission \
7171
--execution_mode=valid \
7272
--skip_submission_generation=yes \
7373
${POWER} \
74-
--results_dir=$HOME/results_dir"
74+
"
7575

7676
run "mlcr generate-run-cmds,inference,_submission \
7777
--model=retinanet --implementation=cpp --device=cpu --backend=onnxruntime \
@@ -81,7 +81,7 @@ run "mlcr generate-run-cmds,inference,_submission \
8181
--execution_mode=valid \
8282
--skip_submission_generation=yes \
8383
${POWER} \
84-
--results_dir=$HOME/results_dir"
84+
"
8585

8686
# GPU
8787

@@ -106,7 +106,7 @@ run "mlcr generate-run-cmds,inference,_submission \
106106
--execution_mode=valid \
107107
--skip_submission_generation=yes \
108108
${POWER} \
109-
--results_dir=$HOME/results_dir"
109+
"
110110

111111
run "mlcr generate-run-cmds,inference,_submission \
112112
--model=retinanet --implementation=cpp --device=cuda --backend=onnxruntime \
@@ -116,7 +116,7 @@ run "mlcr generate-run-cmds,inference,_submission \
116116
--execution_mode=valid \
117117
--skip_submission_generation=yes \
118118
${POWER} \
119-
--results_dir=$HOME/results_dir"
119+
"
120120

121121

122122
run "mlcr generate-run-cmds,inference,_submission \
@@ -128,7 +128,7 @@ run "mlcr generate-run-cmds,inference,_submission \
128128
--execution_mode=valid \
129129
--skip_submission_generation=yes \
130130
${POWER} \
131-
--results_dir=$HOME/results_dir"
131+
"
132132

133133
run "mlcr generate-run-cmds,inference,_submission \
134134
--model=retinanet --implementation=cpp --device=cuda --backend=onnxruntime \
@@ -138,7 +138,7 @@ run "mlcr generate-run-cmds,inference,_submission \
138138
--execution_mode=valid \
139139
--skip_submission_generation=yes \
140140
${POWER} \
141-
--results_dir=$HOME/results_dir"
141+
"
142142

143143
#multistream
144144
run "mlcr generate-run-cmds,inference,_submission \
@@ -150,7 +150,7 @@ run "mlcr generate-run-cmds,inference,_submission \
150150
--execution_mode=valid \
151151
--skip_submission_generation=yes \
152152
${POWER} \
153-
--results_dir=$HOME/results_dir"
153+
"
154154

155155
run "mlcr generate-run-cmds,inference,_submission \
156156
--model=retinanet --implementation=cpp --device=cuda --backend=onnxruntime \
@@ -160,4 +160,4 @@ run "mlcr generate-run-cmds,inference,_submission \
160160
--execution_mode=valid \
161161
--skip_submission_generation=yes \
162162
${POWER} \
163-
--results_dir=$HOME/results_dir"
163+
"

script/run-all-mlperf-models/run-pruned-bert.sh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ scenario="Offline"
4646
if [[ $scenario == "Offline" ]]; then
4747
for stub in ${zoo_stub_list[@]}; do
4848
cmd="mlcr run,mlperf,inference,generate-run-cmds,_find-performance \
49-
--adr.python.version_min=3.8 \
49+
--adr.python.version=3.9.12 \
5050
--implementation=reference \
5151
--model=bert-99 \
5252
--precision=int8 \
@@ -65,7 +65,7 @@ fi
6565

6666
for stub in ${zoo_stub_list[@]}; do
6767
cmd="mlcr run,mlperf,inference,generate-run-cmds \
68-
--adr.python.version_min=3.8 \
68+
--adr.python.version=3.9.12 \
6969
--adr.compiler.tags=gcc \
7070
--implementation=reference \
7171
--model=bert-99 \

0 commit comments

Comments
 (0)