Skip to content

Commit a2f0887

Browse files
authored
Support mlperf inference llama3.1 model (#223)
1 parent 33da462 commit a2f0887

File tree

8 files changed

+66
-8
lines changed

8 files changed

+66
-8
lines changed

.gitignore

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,10 @@ wheels/
1515
.coverage
1616
htmlcov
1717
*tmp/
18-
*tmp-ck-*/
18+
tmp-*
1919
local/cache/
20-
20+
mlc-log.txt
21+
repos.json
22+
index_script.json
23+
index_cache.json
24+
index_experiment.json

script/get-ml-model-llama2/meta.yaml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -89,6 +89,9 @@ variations:
8989
default: true
9090
env:
9191
MLC_DOWNLOAD_SRC: mlcommons
92+
prehook_deps:
93+
- tags: get,rclone-config,_mlperf-llama2
94+
force_cache: yes
9295
hf:
9396
group: download-source
9497
env:

script/get-ml-model-llama2/run-rclone.sh

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
1-
rclone config create mlc-llama2 drive config_is_local=false scope=drive.readonly root_folder_id=11tBZvvrh0FCm3XuR5E849K42TqftYdUF
2-
rclone config reconnect mlc-llama2:
31
cmd="rclone sync mlc-llama2:${MLC_GIT_CHECKOUT_FOLDER} ${LLAMA2_CHECKPOINT_PATH}/${MLC_GIT_CHECKOUT_FOLDER} -P"
42
echo $cmd
53
eval $cmd

script/get-ml-model-llama3/customize.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,12 @@ def preprocess(i):
2020
env['MLC_GIT_CHECKOUT_FOLDER'] = os.path.join(
2121
path, env['MLC_ML_MODEL_NAME'])
2222

23+
if env['MLC_DOWNLOAD_SRC'] == "mlcommons":
24+
i['run_script_input']['script_name'] = 'run-rclone'
25+
if env.get('MLC_OUTDIRNAME', '') != '':
26+
env['LLAMA3_CHECKPOINT_PATH'] = env['MLC_OUTDIRNAME']
27+
else:
28+
env['LLAMA3_CHECKPOINT_PATH'] = os.getcwd()
2329
env['MLC_TMP_REQUIRE_DOWNLOAD'] = 'yes'
2430

2531
return {'return': 0}

script/get-ml-model-llama3/meta.yaml

Lines changed: 32 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,8 @@ prehook_deps:
1212
- enable_if_env:
1313
MLC_TMP_REQUIRE_DOWNLOAD:
1414
- 'yes'
15+
MLC_DOWNLOAD_SRC:
16+
- huggingface
1517
env: {}
1618
extra_cache_tags: llama3,llama-3
1719
force_env_keys:
@@ -37,21 +39,48 @@ variations:
3739
MLC_ML_MODEL_PRECISION: fp16
3840
MLC_ML_MODEL_WEIGHT_DATA_TYPES: fp16
3941
group: precision
42+
405b:
43+
group: model-size
44+
default: true
45+
env:
46+
MLC_ML_MODEL_NAME: Llama-3.1-405B-Instruct
47+
8b:
48+
group: model-size
49+
env:
50+
MLC_ML_MODEL_NAME: Llama-3.1-8b-Instruct
51+
mlc:
52+
group: download-src
53+
default: true
54+
prehook_deps:
55+
- tags: get,rclone-config,_mlperf-llama3-1
56+
force_cache: true
57+
env:
58+
MLC_DOWNLOAD_SRC: mlcommons
59+
hf:
60+
group: download-src
61+
default_variations:
62+
huggingface-stub: meta-llama/Llama-3.1-405B-Instruct
63+
env:
64+
MLC_DOWNLOAD_SRC: huggingface
65+
4066
meta-llama/Llama-3.1-405B-Instruct:
67+
base:
68+
- 405b
4169
adr:
4270
hf-zoo:
4371
tags: _model-stub.meta-llama/Llama-3.1-405B-Instruct
44-
default: true
4572
env:
46-
MLC_ML_MODEL_NAME: Llama-3-405b-instruct
73+
MLC_ML_MODEL_NAME: Llama-3.1-405B-Instruct
4774
MLC_MODEL_ZOO_ENV_KEY: LLAMA3
4875
group: huggingface-stub
4976
meta-llama/Llama-3.1-8B-Instruct:
77+
base:
78+
- 8b
5079
adr:
5180
hf-zoo:
5281
tags: _model-stub.meta-llama/Llama-3.1-8B-Instruct
5382
env:
54-
MLC_ML_MODEL_NAME: Llama-3-8b-instruct
83+
MLC_ML_MODEL_NAME: Llama-3.1-8b-Instruct
5584
MLC_MODEL_ZOO_ENV_KEY: LLAMA3
5685
group: huggingface-stub
5786
vllm:
Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
cmd="rclone sync mlc-llama3-1:inference/${MLC_ML_MODEL_NAME} ${LLAMA3_CHECKPOINT_PATH}/${MLC_ML_MODEL_NAME} -P"
2+
echo $cmd
3+
eval $cmd
4+
test $? -eq 0 || exit $?

script/get-rclone-config/customize.py

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,14 @@ def preprocess(i):
1414

1515
quiet = (env.get('MLC_QUIET', False) == 'yes')
1616

17+
run_cmds = []
1718
if env.get('MLC_RCLONE_CONFIG_CMD', '') != '':
18-
env['MLC_RUN_CMD'] = env['MLC_RCLONE_CONFIG_CMD']
19+
run_cmds.append(env['MLC_RCLONE_CONFIG_CMD'])
20+
21+
if env.get('MLC_RCLONE_CONNECT_CMD', '') != '':
22+
run_cmds.append(env['MLC_RCLONE_CONNECT_CMD'])
23+
24+
env['MLC_RUN_CMD'] = ' && '.join(run_cmds)
1925

2026
return {'return': 0}
2127

script/get-rclone-config/meta.yaml

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,3 +11,11 @@ variations:
1111
mlc-inference:
1212
env:
1313
MLC_RCLONE_CONFIG_CMD: 'rclone config create mlc-inference s3 provider=Cloudflare access_key_id=f65ba5eef400db161ea49967de89f47b secret_access_key=fbea333914c292b854f14d3fe232bad6c5407bf0ab1bebf78833c2b359bdfd2b endpoint=https://c2686074cb2caf5cbaf6d134bdba8b47.r2.cloudflarestorage.com'
14+
mlperf-llama2:
15+
env:
16+
MLC_RCLONE_CONFIG_CMD: 'rclone config create mlc-llama2 drive config_is_local=false scope=drive.readonly root_folder_id=11tBZvvrh0FCm3XuR5E849K42TqftYdUF'
17+
MLC_RCLONE_CONNECT_CMD: 'rclone config reconnect mlc-llama2:'
18+
mlperf-llama3-1:
19+
env:
20+
MLC_RCLONE_CONFIG_CMD: 'rclone config create mlc-llama3-1 drive config_is_local=false scope=drive.readonly root_folder_id=12K-2yvmr1ZSZ7SLrhidCbWc0BriN98am'
21+
MLC_RCLONE_CONNECT_CMD: 'rclone config reconnect mlc-llama3-1:'

0 commit comments

Comments
 (0)