File tree Expand file tree Collapse file tree 8 files changed +66
-8
lines changed Expand file tree Collapse file tree 8 files changed +66
-8
lines changed Original file line number Diff line number Diff line change @@ -15,6 +15,10 @@ wheels/
15
15
.coverage
16
16
htmlcov
17
17
* tmp /
18
- * tmp-ck- * /
18
+ tmp- *
19
19
local /cache /
20
-
20
+ mlc-log.txt
21
+ repos.json
22
+ index_script.json
23
+ index_cache.json
24
+ index_experiment.json
Original file line number Diff line number Diff line change @@ -89,6 +89,9 @@ variations:
89
89
default : true
90
90
env :
91
91
MLC_DOWNLOAD_SRC : mlcommons
92
+ prehook_deps :
93
+ - tags : get,rclone-config,_mlperf-llama2
94
+ force_cache : yes
92
95
hf :
93
96
group : download-source
94
97
env :
Original file line number Diff line number Diff line change 1
- rclone config create mlc-llama2 drive config_is_local=false scope=drive.readonly root_folder_id=11tBZvvrh0FCm3XuR5E849K42TqftYdUF
2
- rclone config reconnect mlc-llama2:
3
1
cmd=" rclone sync mlc-llama2:${MLC_GIT_CHECKOUT_FOLDER} ${LLAMA2_CHECKPOINT_PATH} /${MLC_GIT_CHECKOUT_FOLDER} -P"
4
2
echo $cmd
5
3
eval $cmd
Original file line number Diff line number Diff line change @@ -20,6 +20,12 @@ def preprocess(i):
20
20
env ['MLC_GIT_CHECKOUT_FOLDER' ] = os .path .join (
21
21
path , env ['MLC_ML_MODEL_NAME' ])
22
22
23
+ if env ['MLC_DOWNLOAD_SRC' ] == "mlcommons" :
24
+ i ['run_script_input' ]['script_name' ] = 'run-rclone'
25
+ if env .get ('MLC_OUTDIRNAME' , '' ) != '' :
26
+ env ['LLAMA3_CHECKPOINT_PATH' ] = env ['MLC_OUTDIRNAME' ]
27
+ else :
28
+ env ['LLAMA3_CHECKPOINT_PATH' ] = os .getcwd ()
23
29
env ['MLC_TMP_REQUIRE_DOWNLOAD' ] = 'yes'
24
30
25
31
return {'return' : 0 }
Original file line number Diff line number Diff line change @@ -12,6 +12,8 @@ prehook_deps:
12
12
- enable_if_env :
13
13
MLC_TMP_REQUIRE_DOWNLOAD :
14
14
- ' yes'
15
+ MLC_DOWNLOAD_SRC :
16
+ - huggingface
15
17
env : {}
16
18
extra_cache_tags : llama3,llama-3
17
19
force_env_keys :
@@ -37,21 +39,48 @@ variations:
37
39
MLC_ML_MODEL_PRECISION : fp16
38
40
MLC_ML_MODEL_WEIGHT_DATA_TYPES : fp16
39
41
group : precision
42
+ 405b :
43
+ group : model-size
44
+ default : true
45
+ env :
46
+ MLC_ML_MODEL_NAME : Llama-3.1-405B-Instruct
47
+ 8b :
48
+ group : model-size
49
+ env :
50
+ MLC_ML_MODEL_NAME : Llama-3.1-8b-Instruct
51
+ mlc :
52
+ group : download-src
53
+ default : true
54
+ prehook_deps :
55
+ - tags : get,rclone-config,_mlperf-llama3-1
56
+ force_cache : true
57
+ env :
58
+ MLC_DOWNLOAD_SRC : mlcommons
59
+ hf :
60
+ group : download-src
61
+ default_variations :
62
+ huggingface-stub : meta-llama/Llama-3.1-405B-Instruct
63
+ env :
64
+ MLC_DOWNLOAD_SRC : huggingface
65
+
40
66
meta-llama/Llama-3.1-405B-Instruct :
67
+ base :
68
+ - 405b
41
69
adr :
42
70
hf-zoo :
43
71
tags : _model-stub.meta-llama/Llama-3.1-405B-Instruct
44
- default : true
45
72
env :
46
- MLC_ML_MODEL_NAME : Llama-3-405b-instruct
73
+ MLC_ML_MODEL_NAME : Llama-3.1-405B-Instruct
47
74
MLC_MODEL_ZOO_ENV_KEY : LLAMA3
48
75
group : huggingface-stub
49
76
meta-llama/Llama-3.1-8B-Instruct :
77
+ base :
78
+ - 8b
50
79
adr :
51
80
hf-zoo :
52
81
tags : _model-stub.meta-llama/Llama-3.1-8B-Instruct
53
82
env :
54
- MLC_ML_MODEL_NAME : Llama-3-8b-instruct
83
+ MLC_ML_MODEL_NAME : Llama-3.1 -8b-Instruct
55
84
MLC_MODEL_ZOO_ENV_KEY : LLAMA3
56
85
group : huggingface-stub
57
86
vllm :
Original file line number Diff line number Diff line change
1
+ cmd=" rclone sync mlc-llama3-1:inference/${MLC_ML_MODEL_NAME} ${LLAMA3_CHECKPOINT_PATH} /${MLC_ML_MODEL_NAME} -P"
2
+ echo $cmd
3
+ eval $cmd
4
+ test $? -eq 0 || exit $?
Original file line number Diff line number Diff line change @@ -14,8 +14,14 @@ def preprocess(i):
14
14
15
15
quiet = (env .get ('MLC_QUIET' , False ) == 'yes' )
16
16
17
+ run_cmds = []
17
18
if env .get ('MLC_RCLONE_CONFIG_CMD' , '' ) != '' :
18
- env ['MLC_RUN_CMD' ] = env ['MLC_RCLONE_CONFIG_CMD' ]
19
+ run_cmds .append (env ['MLC_RCLONE_CONFIG_CMD' ])
20
+
21
+ if env .get ('MLC_RCLONE_CONNECT_CMD' , '' ) != '' :
22
+ run_cmds .append (env ['MLC_RCLONE_CONNECT_CMD' ])
23
+
24
+ env ['MLC_RUN_CMD' ] = ' && ' .join (run_cmds )
19
25
20
26
return {'return' : 0 }
21
27
Original file line number Diff line number Diff line change @@ -11,3 +11,11 @@ variations:
11
11
mlc-inference :
12
12
env :
13
13
MLC_RCLONE_CONFIG_CMD : ' rclone config create mlc-inference s3 provider=Cloudflare access_key_id=f65ba5eef400db161ea49967de89f47b secret_access_key=fbea333914c292b854f14d3fe232bad6c5407bf0ab1bebf78833c2b359bdfd2b endpoint=https://c2686074cb2caf5cbaf6d134bdba8b47.r2.cloudflarestorage.com'
14
+ mlperf-llama2 :
15
+ env :
16
+ MLC_RCLONE_CONFIG_CMD : ' rclone config create mlc-llama2 drive config_is_local=false scope=drive.readonly root_folder_id=11tBZvvrh0FCm3XuR5E849K42TqftYdUF'
17
+ MLC_RCLONE_CONNECT_CMD : ' rclone config reconnect mlc-llama2:'
18
+ mlperf-llama3-1 :
19
+ env :
20
+ MLC_RCLONE_CONFIG_CMD : ' rclone config create mlc-llama3-1 drive config_is_local=false scope=drive.readonly root_folder_id=12K-2yvmr1ZSZ7SLrhidCbWc0BriN98am'
21
+ MLC_RCLONE_CONNECT_CMD : ' rclone config reconnect mlc-llama3-1:'
You can’t perform that action at this time.
0 commit comments