@@ -35,40 +35,41 @@ jobs:
35
35
36
36
- name : Test Python venv
37
37
run : |
38
- mlc run script --tags=install,python-venv --name=test --quiet
38
+ mlcr --tags=install,python-venv --name=test --quiet
39
39
mlc search cache --tags=get,python,virtual,name-test --quiet
40
40
41
41
- name : Test variations
42
42
run : |
43
- mlc run script --tags=get,dataset,preprocessed,imagenet,_NHWC --quiet
43
+ mlcr --tags=get,dataset,preprocessed,imagenet,_NHWC --quiet
44
44
mlc search cache --tags=get,dataset,preprocessed,imagenet,-_NCHW
45
45
mlc search cache --tags=get,dataset,preprocessed,imagenet,-_NHWC
46
46
47
47
- name : Test versions
48
48
continue-on-error : true
49
49
if : runner.os == 'linux'
50
50
run : |
51
- mlc run script --tags=get,generic-python-lib,_package.scipy --version=1.9.3 --quiet
51
+ mlcr --tags=get,generic-python-lib,_package.scipy --version=1.9.3 --quiet
52
52
test $? -eq 0 || exit $?
53
- mlc run script --tags=get,generic-python-lib,_package.scipy --version=1.9.2 --quiet
53
+ mlcr --tags=get,generic-python-lib,_package.scipy --version=1.9.2 --quiet
54
54
test $? -eq 0 || exit $?
55
- mlc run script --tags=get,generic-python-lib,_package.scipy --version=1.9.3 --quiet --only_execute_from_cache=True
56
- test $? -eq 0 || exit 0
55
+ # Need to add find cache here
56
+ # mlcr --tags=get,generic-python-lib,_package.scipy --version=1.9.3 --quiet --only_execute_from_cache=True
57
+ # test $? -eq 0 || exit 0
57
58
58
59
- name : Test python install from src
59
60
run : |
60
- mlc run script --tags=python,src,install,_shared --version=3.9.10 --quiet
61
+ mlcr --tags=python,src,install,_shared --version=3.9.10 --quiet
61
62
mlc search cache --tags=python,src,install,_shared,version-3.9.10
62
63
63
64
- name : Run docker container from dockerhub on linux
64
65
if : runner.os == 'linux'
65
66
run : |
66
- mlc run script --tags=run,docker,container --adr.compiler.tags=gcc --docker_mlc_repo=mlcommons@mlperf-automations --docker_mlc_repo_branch=dev --image_name=cm-script-app-image-classification-onnx-py --env.MLC_DOCKER_RUN_SCRIPT_TAGS=app,image-classification,onnx,python --env.MLC_DOCKER_IMAGE_BASE=ubuntu:22.04 --env.MLC_DOCKER_IMAGE_REPO=cknowledge --quiet
67
+ mlcr --tags=run,docker,container --adr.compiler.tags=gcc --docker_mlc_repo=mlcommons@mlperf-automations --docker_mlc_repo_branch=dev --image_name=cm-script-app-image-classification-onnx-py --env.MLC_DOCKER_RUN_SCRIPT_TAGS=app,image-classification,onnx,python --env.MLC_DOCKER_IMAGE_BASE=ubuntu:22.04 --env.MLC_DOCKER_IMAGE_REPO=cknowledge --quiet
67
68
68
69
- name : Run docker container locally on linux
69
70
if : runner.os == 'linux'
70
71
run : |
71
- mlc run script --tags=run,docker,container --adr.compiler.tags=gcc --docker_mlc_repo=mlcommons@mlperf-automations --docker_mlc_repo_branch=dev --image_name=mlc-script-app-image-classification-onnx-py --env.MLC_DOCKER_RUN_SCRIPT_TAGS=app,image-classification,onnx,python --env.MLC_DOCKER_IMAGE_BASE=ubuntu:22.04 --env.MLC_DOCKER_IMAGE_REPO=local --quiet
72
+ mlcr --tags=run,docker,container --adr.compiler.tags=gcc --docker_mlc_repo=mlcommons@mlperf-automations --docker_mlc_repo_branch=dev --image_name=mlc-script-app-image-classification-onnx-py --env.MLC_DOCKER_RUN_SCRIPT_TAGS=app,image-classification,onnx,python --env.MLC_DOCKER_IMAGE_BASE=ubuntu:22.04 --env.MLC_DOCKER_IMAGE_REPO=local --quiet
72
73
73
74
- name : Run MLPerf Inference Retinanet with native and virtual Python
74
75
if : runner.os == 'linux'
0 commit comments