diff --git a/.github/scripts/process_individual_tests.py b/.github/scripts/process_individual_tests.py index a8df24239..708a80963 100644 --- a/.github/scripts/process_individual_tests.py +++ b/.github/scripts/process_individual_tests.py @@ -35,3 +35,5 @@ ret = mlc.access(ii) if ret['return'] > 0: raise Exception(r['error']) + ii = {'action': 'rm', 'target': 'cache', 'f': True} + ret = mlc.access(ii) diff --git a/.github/workflows/build_wheel.yml b/.github/workflows/build_wheel.yml index 8de31237d..1edf42589 100644 --- a/.github/workflows/build_wheel.yml +++ b/.github/workflows/build_wheel.yml @@ -10,7 +10,6 @@ on: paths: - VERSION - jobs: build_wheels: diff --git a/.github/workflows/check-broken-links.yml b/.github/workflows/check-broken-links.yml index 06034c32a..0a07b08ec 100644 --- a/.github/workflows/check-broken-links.yml +++ b/.github/workflows/check-broken-links.yml @@ -1,5 +1,4 @@ name: "Check .md README files for broken links" - on: pull_request: branches: @@ -13,7 +12,6 @@ jobs: steps: - uses: actions/checkout@v4 - # Checks the status of hyperlinks in .md files in verbose mode - name: Check links uses: gaurav-nelson/github-action-markdown-link-check@v1 diff --git a/.github/workflows/test-mlc-script-features.yml b/.github/workflows/test-mlc-script-features.yml index 05d62e2ae..88708f577 100644 --- a/.github/workflows/test-mlc-script-features.yml +++ b/.github/workflows/test-mlc-script-features.yml @@ -43,6 +43,11 @@ jobs: mlcr get,dataset,preprocessed,imagenet,_NHWC --quiet mlc search cache --tags=get,dataset,preprocessed,imagenet,-_NCHW mlc search cache --tags=get,dataset,preprocessed,imagenet,-_NHWC + # Tests run-scripts on windows,ubuntu,macos + + - name: Test native script name variants + run: | + mlcr get,wkhtmltopdf --quiet - name: Test versions continue-on-error: true @@ -52,6 +57,8 @@ jobs: test $? -eq 0 || exit $? mlcr get,generic-python-lib,_package.scipy --version=1.9.2 --quiet test $? -eq 0 || exit $? + mlc find cache --tags=get,generic-python-lib,_package.scipy,version-1.9.3 + test $? -eq 0 || exit $? # Need to add find cache here # mlcr get,generic-python-lib,_package.scipy --version=1.9.3 --quiet --only_execute_from_cache=True # test $? -eq 0 || exit 0 diff --git a/.github/workflows/test-mlperf-inference-resnet50-closed-division.yml b/.github/workflows/test-mlperf-inference-resnet50-closed-division.yml new file mode 100644 index 000000000..586317439 --- /dev/null +++ b/.github/workflows/test-mlperf-inference-resnet50-closed-division.yml @@ -0,0 +1,104 @@ +name: MLPerf inference ResNet50 Closed Division for testing Compliance + +on: + schedule: + - cron: '0 0 * * 0' # Runs once a week on Sunday at 00:00 UTC + workflow_dispatch: {} # Allows manual triggering of the workflow +jobs: + build: + name: MLPerf inference MLCommons ResNet50 Closed Division + runs-on: ${{ matrix.os }} + env: + MLC_INDEX: "on" + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, windows-latest, macos-latest] + python-version: [ "3.12" ] + backend: [ "onnxruntime", "tf" ] + implementation: [ "python", "cpp" ] + exclude: + - backend: tf + implementation: cpp + - os: macos-latest + backend: tf + - os: windows-latest + implementation: cpp + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v3 + with: + python-version: ${{ matrix.python-version }} + - name: Configure git longpaths (Windows) + if: matrix.os == 'windows-latest' + run: | + git config --system core.longpaths true + + - name: Install mlcflow + run: | + pip install mlcflow + pip install tabulate + + - name: Pull MLOps repo + run: | + mlc pull repo mlcommons@mlperf-automations --branch=dev + + - name: Test MLPerf Inference ResNet50 (Windows) + if: matrix.os == 'windows-latest' + run: | + mlcr run-mlperf,inference,_submission,_short,_all-scenarios --division=closed --submitter="MLCommons" --pull_changes=yes --pull_inference_changes=yes --hw_name="gh_${{ matrix.os }} x86" --model=resnet50 --adr.loadgen.tags=_from-pip --pip_loadgen=yes --implementation=${{ matrix.implementation }} --backend=${{ matrix.backend }} --device=cpu --test_query_count=1000 --quiet --execution_mode=valid + + - name: Test MLPerf Inference ResNet50 Offline(Linux/macOS) + if: matrix.os != 'windows-latest' + run: | + mlcr run-mlperf,inference,_submission,_short,_all-scenarios --division=closed --submitter="MLCommons" --pull_changes=yes --pull_inference_changes=yes --hw_name="gh_${{ matrix.os }} x86" --model=resnet50 --implementation=${{ matrix.implementation }} --backend=${{ matrix.backend }} --device=cpu --test_query_count=1000 --quiet --execution_mode=valid + + # Step for Linux/MacOS + - name: Randomly Execute Step (Linux/MacOS) + if: runner.os != 'Windows' + run: | + RANDOM_NUMBER=$((RANDOM % 10)) + echo "Random number is $RANDOM_NUMBER" + if [ "$RANDOM_NUMBER" -eq 0 ]; then + echo "run_step=true" >> $GITHUB_ENV + else + echo "run_step=false" >> $GITHUB_ENV + fi + + # Step for Windows + - name: Randomly Execute Step (Windows) + if: runner.os == 'Windows' + run: | + $RANDOM_NUMBER = Get-Random -Maximum 10 + Write-Host "Random number is $RANDOM_NUMBER" + if ($RANDOM_NUMBER -eq 0) { + Write-Host "run_step=true" | Out-File -FilePath $Env:GITHUB_ENV -Append + } else { + Write-Host "run_step=false" | Out-File -FilePath $Env:GITHUB_ENV -Append + } + + - name: Retrieve secrets from Keeper + if: github.repository_owner == 'mlcommons' && env.run_step == 'true' + id: ksecrets + uses: Keeper-Security/ksm-action@master + with: + keeper-secret-config: ${{ secrets.KSM_CONFIG }} + secrets: |- + ubwkjh-Ii8UJDpG2EoU6GQ/field/Access Token > env:PAT + - name: Push Results + env: + GITHUB_TOKEN: ${{ env.PAT }} + if: github.repository_owner == 'mlcommons' && env.run_step == 'true' + run: | + git config --global user.name "mlcommons-bot" + git config --global user.email "mlcommons-bot@users.noreply.github.com" + git config --global credential.https://github.com.helper "" + git config --global credential.https://github.com.helper "!gh auth git-credential" + git config --global credential.https://gist.github.com.helper "" + git config --global credential.https://gist.github.com.helper "!gh auth git-credential" + mlcr push,github,mlperf,inference,submission --repo_url=https://github.com/mlcommons/mlperf_inference_test_submissions_v5.0 --repo_branch=auto-update --commit_message="Results from R50 GH action on ${{ matrix.os }}" --quiet + diff --git a/README.md b/README.md index fba9394e4..0146c5752 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,7 @@ Welcome to the **MLPerf Automations and Scripts** repository! This repository is your go-to resource for tools, automations, and scripts designed to streamline the execution of **MLPerf benchmarks**—with a strong emphasis on **MLPerf Inference benchmarks**. -Starting **January 2025**, MLPerf automation scripts will be powered by the advanced [MLCFlow](https://github.com/mlcommons/mlcflow) automation interface. This modern framework replaces the previous [Collective Mind (CM)](https://github.com/mlcommons/ck/tree/master/cm), providing a more robust, efficient, and self-contained solution for benchmarking workflows, making MLPerf automations independent of any external projects. +Starting **January 2025**, MLPerf automation scripts is powered by [MLCFlow](https://github.com/mlcommons/mlcflow) automation interface. This new and simplified framework replaces the previous [Collective Mind (CM)](https://github.com/mlcommons/ck/tree/master/cm), providing a more robust, efficient, and self-contained solution for benchmarking workflows, making MLPerf automations independent of any external projects. --- @@ -17,7 +17,6 @@ Starting **January 2025**, MLPerf automation scripts will be powered by the adva - **Automated Benchmarking** – Simplifies running MLPerf Inference benchmarks with minimal manual intervention. - **Modular and Extensible** – Easily extend the scripts to support additional benchmarks and configurations. - **Seamless Integration** – Compatible with Docker, cloud environments, and local machines. -- **MLCFlow (MLC) Integration** – Utilizes the MLC framework to enhance reproducibility and automation. --- @@ -61,6 +60,7 @@ This project is made possible through the generous support of: - [OctoML](https://octoml.ai) - [cKnowledge.org](https://cKnowledge.org) - [cTuning Foundation](https://cTuning.org) +- [GATEOverflow](https://gateoverflow.in) - [MLCommons](https://mlcommons.org) We appreciate their contributions and sponsorship! diff --git a/automation/script/module.py b/automation/script/module.py index acdacff8e..db32015b4 100644 --- a/automation/script/module.py +++ b/automation/script/module.py @@ -460,7 +460,11 @@ def _run(self, i): # Add permanent env from OS (such as MLC_WINDOWS:"yes" on Windows) env_from_os_info = os_info.get('env', {}) if len(env_from_os_info) > 0: - env.update(env_from_os_info) + # env.update(env_from_os_info) + utils.merge_dicts({'dict1': env, + 'dict2': env_from_os_info, + 'append_lists': True, + 'append_unique': True}) # take some env from the user environment keys = [ @@ -502,8 +506,7 @@ def _run(self, i): ii = {} ii['tags'] = tags_string ii['out'] = None - for key in ["automation", "parsed_automation", - "artifact", "parsed_artifact"]: + for key in ["automation", "artifact", "item", "details"]: if i.get(key): ii[key] = i[key] @@ -834,7 +837,10 @@ def _run(self, i): script_item_env = meta.get('env', {}) # print(f"script meta env= {script_item_env}") - env.update(script_item_env) + utils.merge_dicts({'dict1': env, + 'dict2': script_item_env, + 'append_lists': True, + 'append_unique': True}) # print(f"env = {env}") script_item_state = meta.get('state', {}) @@ -1831,6 +1837,10 @@ def _run(self, i): import json logger.debug(json.dumps(env, indent=2, sort_keys=True)) + r = update_env_with_values(env) + if r['return'] > 0: + return r + # Check chain of pre hook dependencies on other MLC scripts if len(prehook_deps) > 0: logger.debug( @@ -1912,8 +1922,7 @@ def _run(self, i): # Finalize script # Force consts in the final new env and state - utils.merge_dicts({'dict1': env, 'dict2': const, - 'append_lists': True, 'append_unique': True}) + env.update(const) utils.merge_dicts({'dict1': state, 'dict2': const_state, 'append_lists': True, @@ -2584,6 +2593,7 @@ def _update_variation_tags_from_variations( variation_name = self._get_name_for_dynamic_variation_tag( variation_name) + # TODO: Move this to a function and apply it for combination of variations too # base variations are automatically turned on. Only # variations outside of any variation group can be added as # a base_variation @@ -2792,13 +2802,19 @@ def search(self, i): # Find MLC script(s) based on thier tags to get their meta (can be more than 1) # Then check if variations exists inside meta - i['tags'] = ','.join(script_tags) + ii = {} + ii['tags'] = ','.join(script_tags) - i['out'] = None - i['common'] = True + ii['out'] = None + ii['common'] = True - i['target_name'] = "script" - r = super(ScriptAutomation, self).search(i) + ii['target'] = "script" + for key in ["automation", "item", + "artifact", "details"]: + if i.get(key): + ii[key] = i[key] + + r = super(ScriptAutomation, self).search(ii) if r['return'] > 0: return r @@ -2901,7 +2917,6 @@ def test(self, i): # Find script item(s) i['out'] = None r = self.search(i) - if r['return'] > 0: return r @@ -2910,7 +2925,6 @@ def test(self, i): for script_item in lst: path = script_item.path meta = script_item.meta - original_meta = script_item.original_meta alias = meta.get('alias', '') uid = meta.get('uid', '') @@ -2970,14 +2984,14 @@ def test(self, i): if given_variations: v_split = [] run_variations = [] - for i, v in enumerate(given_variations): + for v in given_variations: v_split = v.split(",") - for t in v_split: + for index, t in enumerate(v_split): if not t.startswith("_"): # variations must begin with _. We # support both with and without _ # in the meta - given_variations[i] = f"_{t}" + v_split[index] = f"_{t}" if v_split: run_variations.append( ",".join(v_split)) @@ -3018,7 +3032,8 @@ def test(self, i): r = self.action_object.access(ii) if r['return'] > 0: return r - + if is_true(i.get('docker_prune', '')): + docker_prune() return {'return': 0, 'list': lst} ############################################################ @@ -3097,250 +3112,8 @@ def native_run(self, i): return {'return': 0, 'return_code': rc} - ############################################################ - def add(self, i): - """ - Add MLC script - - Args: - (MLC input dict): - - (out) (str): if 'con', output to console - - parsed_artifact (list): prepared in MLC CLI or MLC access function - [ (artifact alias, artifact UID) ] or - [ (artifact alias, artifact UID), (artifact repo alias, artifact repo UID) ] - - (tags) (str): tags to find an MLC script (MLC artifact) - - (script_name) (str): name of script (it will be copied to the new entry and added to the meta) - - (tags) (string or list): tags to be added to meta - - (new_tags) (string or list): new tags to be added to meta (the same as tags) - - (json) (bool): if True, record JSON meta instead of YAML - - (meta) (dict): preloaded meta - - (template) (string): template to use (python) - (python) (bool): template=python - (pytorch) (bool): template=pytorch - ... - - Returns: - (MLC return dict): - - * return (int): return code == 0 if no error and >0 if error - * (error) (str): error string if return>0 - - """ - - import shutil - - console = i.get('out') == 'con' - logger = self.action_object.logger - - # Try to find script artifact by alias and/or tags - # ii = utils.sub_input(i, self.cmind.cfg['artifact_keys']) - ii = {} - ii['tags'] = tags_string - ii['out'] = None - - for key in ["automation", "parsed_automation", - "artifact", "parsed_artifact"]: - if i.get(key): - ii[key] = i[key] - - parsed_artifact = i.get('parsed_artifact', []) - - artifact_obj = parsed_artifact[0] if len(parsed_artifact) > 0 else None - artifact_repo = parsed_artifact[1] if len( - parsed_artifact) > 1 else None - - script_name = '' - if 'script_name' in i: - script_name = i.get('script_name', '').strip() - del (i['script_name']) - - if script_name != '' and not os.path.isfile(script_name): - return {'return': 1, - 'error': 'file {} not found'.format(script_name)} - - # Move tags from input to meta of the newly created script artifact - res = utils.convert_tags_to_list(i['tags']) - if res['return'] > 0: - return res - tags_list = res['tags'] - if 'tags' in i: - del (i['tags']) - - if len(tags_list) == 0: - if console: - x = input( - 'Please specify a combination of unique tags separated by comma for this script: ') - x = x.strip() - if x != '': - tags_list = x.split(',') - - if len(tags_list) == 0: - return { - 'return': 1, 'error': 'you must specify a combination of unique tags separate by comman using "--new_tags"'} - - # Add placeholder (use common action) - ii['out'] = 'con' - # Avoid recursion - use internal MLC add function to add the script - # artifact - ii['common'] = True - - # Check template path - template_dir = 'template' - - template = i.get('template', '') - - if template == '': - if i.get('python', False): - template = 'python' - elif i.get('pytorch', False): - template = 'pytorch' - - if template != '': - template_dir += '-' + template - - template_path = os.path.join(self.path, template_dir) - - if not os.path.isdir(template_path): - return {'return': 1, 'error': 'template path {} not found'.format( - template_path)} - - # Check if preloaded meta exists - meta = { - 'cache': False - # 20240127: Grigori commented that because newly created script meta looks ugly - # 'new_env_keys':[], - # 'new_state_keys':[], - # 'input_mapping':{}, - # 'docker_input_mapping':{}, - # 'deps':[], - # 'prehook_deps':[], - # 'posthook_deps':[], - # 'post_deps':[], - # 'versions':{}, - # 'variations':{}, - # 'input_description':{} - } - - fmeta = os.path.join( - template_path, - self.action_object.cfg['file_cmeta']) - - r = utils.load_yaml_and_json(fmeta) - if r['return'] == 0: - utils.merge_dicts({'dict1': meta, - 'dict2': r['meta'], - 'append_lists': True, - 'append_unique': True}) - - # Check meta from CMD - xmeta = i.get('meta', {}) - - if len(xmeta) > 0: - utils.merge_dicts({'dict1': meta, 'dict2': xmeta, - 'append_lists': True, 'append_unique': True}) - - meta['automation_alias'] = self.meta['alias'] - meta['automation_uid'] = self.meta['uid'] - meta['tags'] = tags_list - - script_name_base = script_name - script_name_ext = '' - if script_name != '': - # separate name and extension - j = script_name.rfind('.') - if j >= 0: - script_name_base = script_name[:j] - script_name_ext = script_name[j:] - - meta['script_name'] = script_name_base - - ii['meta'] = meta - ii['action'] = 'add' - - use_yaml = True if not i.get('json', False) else False - - if use_yaml: - ii['yaml'] = True - - ii['automation'] = 'script,5b4e0237da074764' - - for k in ['parsed_automation', 'parsed_artifact']: - if k in ii: - del ii[k] - - if artifact_repo is not None: - ii['artifact'] = utils.assemble_object2( - artifact_repo) + ':' + utils.assemble_object2(artifact_obj) - - r_obj = self.action_object.access(ii) - if r_obj['return'] > 0: - return r_obj - - new_script_path = r_obj['path'] - - if console: - logger.info('Created script in {}'.format(new_script_path)) - - # Copy files from template (only if exist) - files = [ - (template_path, 'README-extra.md', ''), - (template_path, 'customize.py', ''), - (template_path, 'main.py', ''), - (template_path, 'requirements.txt', ''), - (template_path, 'install_deps.bat', ''), - (template_path, 'install_deps.sh', ''), - (template_path, 'plot.bat', ''), - (template_path, 'plot.sh', ''), - (template_path, 'analyze.bat', ''), - (template_path, 'analyze.sh', ''), - (template_path, 'validate.bat', ''), - (template_path, 'validate.sh', '') - ] - - if script_name == '': - files += [(template_path, 'run.bat', ''), - (template_path, 'run.sh', '')] - else: - if script_name_ext == '.bat': - files += [(template_path, 'run.sh', script_name_base + '.sh')] - files += [('', script_name, script_name)] - - else: - files += [(template_path, 'run.bat', - script_name_base + '.bat')] - files += [('', script_name, script_name_base + '.sh')] - - for x in files: - path = x[0] - f1 = x[1] - f2 = x[2] - - if f2 == '': - f2 = f1 - - if path != '': - f1 = os.path.join(path, f1) - - if os.path.isfile(f1): - f2 = os.path.join(new_script_path, f2) - - if console: - logger.info(' * Copying {} to {}'.format(f1, f2)) - - shutil.copyfile(f1, f2) - - return r_obj - ########################################################################## + def _get_name_for_dynamic_variation_tag(script, variation_tag): ''' Returns the variation name in meta for the dynamic_variation_tag @@ -3707,7 +3480,7 @@ def _run_deps(self, deps, clean_env_keys_deps, env, state, const, const_state, a if r['return'] > 0: return r - # Update env/state with cost + # Update env/state with const env.update(const) utils.merge_dicts({'dict1': state, 'dict2': const_state, @@ -5023,11 +4796,6 @@ def enable_or_skip_script(meta, env): (AND function) """ - if not isinstance(meta, dict): - logger.warn( - "The meta entry is not a dictionary for skip/enable if_env: %s", - meta) - for key in meta: meta_key = [str(v).lower() for v in meta[key]] if key in env: @@ -5101,6 +4869,17 @@ def _update_env(env, key=None, value=None): return {'return': 0} +def docker_prune(): + try: + # Run the docker prune command with -a (removes all unused images, not + # just dangling ones) + result = subprocess.run(["docker", "system", "prune", "-a", "-f"], + capture_output=True, text=True, check=True) + print("Docker prune output:\n", result.stdout) + except subprocess.CalledProcessError as e: + print("Error while running Docker prune:\n", e.stderr) + + ########################################################################## def update_env_with_values(env, fail_on_not_found=False, extra_env=None): """ @@ -5280,10 +5059,7 @@ def prepare_and_run_script_with_postprocessing(i, postprocess="postprocess"): 'return': 16, 'error': 'script {} not found - please add one'.format(path_to_run_script)} # Update env and state with const - utils.merge_dicts({'dict1': env, - 'dict2': const, - 'append_lists': True, - 'append_unique': True}) + env.update(const) utils.merge_dicts({'dict1': state, 'dict2': const_state, 'append_lists': True, 'append_unique': True}) @@ -5509,8 +5285,7 @@ def run_detect_version(customize_code, customize_common_input, logger.debug(recursion_spaces + ' - Running detect_version ...') # Update env and state with const - utils.merge_dicts({'dict1': env, 'dict2': const, - 'append_lists': True, 'append_unique': True}) + env.update(const) utils.merge_dicts({'dict1': state, 'dict2': const_state, 'append_lists': True, @@ -5540,8 +5315,7 @@ def run_postprocess(customize_code, customize_common_input, recursion_spaces, logger.debug(recursion_spaces + ' - Running postprocess ...') # Update env and state with const - utils.merge_dicts({'dict1': env, 'dict2': const, - 'append_lists': True, 'append_unique': True}) + env.update(const) utils.merge_dicts({'dict1': state, 'dict2': const_state, 'append_lists': True, @@ -5565,29 +5339,54 @@ def run_postprocess(customize_code, customize_common_input, recursion_spaces, def get_script_name(env, path, script_name='run'): - """ - Internal: find the most appropriate run script name for the detected OS - """ + # Extract environment variables safely, defaulting to empty strings if + # missing + os_flavor = env.get('MLC_HOST_OS_FLAVOR', '') + os_flavor_like = env.get('MLC_HOST_OS_FLAVOR_LIKE', '') + os_type = env.get('MLC_HOST_OS_TYPE', '') + # Only use version if flavor exists + os_version = env.get('MLC_HOST_OS_VERSION', '') if os_flavor else '' + platform_flavor = env.get('MLC_HOST_PLATFORM_FLAVOR', '') + + # Get a list of all files in the directory + try: + available_files = set(os.listdir(path)) + except FileNotFoundError: + # Default if directory doesn't exist + return os.path.join(path, f"{script_name}.sh") + + # Check if any script with a "script_name-" prefix exists + has_prefixed_scripts = any(f.startswith( + f"{script_name}-") for f in available_files) + + # Helper function to construct script filenames dynamically + def script_filename(*parts): + # Remove empty values to avoid extra '-' + suffix = "-".join(filter(None, parts)) + return f"{script_name}-{suffix}.sh" if suffix else f"{script_name}.sh" + + # Define file search order based on priority + candidates = [ + script_filename(os_flavor, os_version, platform_flavor), + script_filename(os_flavor, os_version), + script_filename(os_flavor, platform_flavor), + script_filename(os_flavor), + script_filename(os_flavor_like, platform_flavor), + script_filename(os_flavor_like), + script_filename(os_type, platform_flavor), + script_filename(os_type), + script_filename(platform_flavor), + ] + + # If prefixed scripts exist, check for the first matching candidate + if has_prefixed_scripts: + for candidate in candidates: + if candidate in available_files: + return os.path.join(path, candidate) + + # Fallback to the default script + return os.path.join(path, f"{script_name}.sh") - from os.path import exists - - tmp_suff1 = env.get('MLC_HOST_OS_FLAVOR', '') - tmp_suff2 = env.get('MLC_HOST_OS_VERSION', '') - tmp_suff3 = env.get('MLC_HOST_PLATFORM_FLAVOR', '') - - if exists(os.path.join(path, script_name + '-' + tmp_suff1 + - '-' + tmp_suff2 + '-' + tmp_suff3 + '.sh')): - return script_name + '-' + tmp_suff1 + '-' + tmp_suff2 + '-' + tmp_suff3 + '.sh' - elif exists(os.path.join(path, script_name + '-' + tmp_suff1 + '-' + tmp_suff3 + '.sh')): - return script_name + '-' + tmp_suff1 + '-' + tmp_suff3 + '.sh' - elif exists(os.path.join(path, script_name + '-' + tmp_suff1 + '-' + tmp_suff2 + '.sh')): - return script_name + '-' + tmp_suff1 + '-' + tmp_suff2 + '.sh' - elif exists(os.path.join(path, script_name + '-' + tmp_suff1 + '.sh')): - return script_name + '-' + tmp_suff1 + '.sh' - elif exists(os.path.join(path, script_name + '-' + tmp_suff3 + '.sh')): - return script_name + '-' + tmp_suff3 + '.sh' - else: - return script_name + '.sh' ############################################################################## @@ -5948,7 +5747,8 @@ def update_state_from_meta(meta, env, state, const, const_state, deps, post_deps env.setdefault(key, default_env[key]) update_env = meta.get('env', {}) - env.update(update_env) + utils.merge_dicts({'dict1': env, 'dict2': update_env, + 'append_lists': True, 'append_unique': True}) update_meta_if_env = meta.get('update_meta_if_env', []) update_meta_if_env_from_state = run_state.get('update_meta_if_env', []) diff --git a/script/app-mlperf-inference-mlcommons-python/meta.yaml b/script/app-mlperf-inference-mlcommons-python/meta.yaml index 720d59514..fe7793e2d 100644 --- a/script/app-mlperf-inference-mlcommons-python/meta.yaml +++ b/script/app-mlperf-inference-mlcommons-python/meta.yaml @@ -1252,6 +1252,7 @@ variations: group: models deps: - tags: get,generic-python-lib,_opencv-python + - tags: get,generic-sys-util,_libgl - tags: get,generic-python-lib,_numpy names: - numpy @@ -1270,6 +1271,7 @@ variations: MLC_MLPERF_USE_MLCOMMONS_RUN_SCRIPT: "yes" deps: - tags: get,generic-python-lib,_opencv-python + - tags: get,generic-sys-util,_libgl - tags: get,generic-python-lib,_numpy names: - numpy diff --git a/script/download-file/customize.py b/script/download-file/customize.py index 64066122f..f525c440f 100644 --- a/script/download-file/customize.py +++ b/script/download-file/customize.py @@ -92,6 +92,8 @@ def preprocess(i): if env.get('MLC_DOWNLOAD_PATH', '') != '': download_path = env['MLC_DOWNLOAD_PATH'] + if os.path.isfile(download_path): + download_path = os.path.dirname(download_path) if not os.path.exists(download_path): os.makedirs(download_path, exist_ok=True) os.chdir(download_path) diff --git a/script/extract-file/customize.py b/script/extract-file/customize.py index fd6ab8235..d55f8787c 100644 --- a/script/extract-file/customize.py +++ b/script/extract-file/customize.py @@ -37,6 +37,8 @@ def preprocess(i): # later if cache is cleaned) extract_path = env.get('MLC_EXTRACT_PATH', '') if extract_path != '': + if os.path.isfile(extract_path): + extract_path = os.path.dirname(extract_path) if not os.path.exists(extract_path): os.makedirs(extract_path, exist_ok=True) diff --git a/script/generate-mlperf-inference-user-conf/customize.py b/script/generate-mlperf-inference-user-conf/customize.py index a9f8e3eaa..70d920ed4 100644 --- a/script/generate-mlperf-inference-user-conf/customize.py +++ b/script/generate-mlperf-inference-user-conf/customize.py @@ -480,6 +480,7 @@ def run_files_exist(mode, OUTPUT_DIR, run_files, env): if ( "result_validity" not in mlperf_log.get_keys() or mlperf_log["result_validity"] != "VALID" + or "error_invalid_config" in mlperf_log.get_keys() ): return False diff --git a/script/get-aocc/customize.py b/script/get-aocc/customize.py index 383cac39f..354a34d80 100644 --- a/script/get-aocc/customize.py +++ b/script/get-aocc/customize.py @@ -20,23 +20,12 @@ def preprocess(i): exe_c = 'clang.exe' if os_info['platform'] == 'windows' else 'clang' - if env.get('MLC_AOCC_DIR_PATH', '') != '' and env.get( - 'MLC_AOCC_BIN_WITH_PATH', '') == '': - for f in os.listdir(env['MLC_AOCC_DIR_PATH']): - if os.path.exists(os.path.join( - env['MLC_AOCC_DIR_PATH'], f, "bin", exe_c)): - env['MLC_AOCC_BIN_WITH_PATH'] = os.path.join( - env['MLC_AOCC_DIR_PATH'], f, "bin", exe_c) - - if env.get('MLC_HOST_OS_FLAVOR', '') == 'rhel': - if "12" in env.get('MLC_VERSION', '') or "12" in env.get( - 'MLC_VERSION_MIN', ''): - if env.get('MLC_TMP_PATH', '') == '': - env['MLC_TMP_PATH'] = '' - env['MLC_TMP_PATH'] += "/opt/rh/aocc/root/usr/bin" - env['MLC_TMP_PATH_IGNORE_NON_EXISTANT'] = 'yes' - if 'MLC_AOCC_BIN_WITH_PATH' not in env: + if env.get('MLC_AOCC_DIR_PATH', '') != '': + aocc_path = env['MLC_AOCC_DIR_PATH'] + if os.path.exists(os.path.join(aocc_path, 'bin', 'clang')): + env['MLC_TMP_PATH'] = os.path.join(aocc_path, 'bin') + r = i['automation'].find_artifact({'file_name': exe_c, 'env': env, 'os_info': os_info, @@ -83,7 +72,8 @@ def postprocess(i): found_path = os.path.dirname(found_file_path) - env['MLC_AOCC_INSTALLED_PATH'] = found_path + env['MLC_AOCC_BIN_PATH'] = found_path + env['MLC_AOCC_INSTALLED_PATH'] = os.path.dirname(found_path) file_name_c = os.path.basename(found_file_path) file_name_cpp = file_name_c.replace('clang', 'clang++') diff --git a/script/get-aocc/meta.yaml b/script/get-aocc/meta.yaml index 80a21d311..ff12c56ec 100644 --- a/script/get-aocc/meta.yaml +++ b/script/get-aocc/meta.yaml @@ -20,6 +20,7 @@ deps: input_mapping: tar_file_path: MLC_AOCC_TAR_FILE_PATH + aocc_dir: MLC_AOCC_DIR_PATH name: Detect or install AOCC compiler new_env_keys: @@ -34,7 +35,7 @@ new_env_keys: - + LDFLAGS - +MLC_HOST_OS_DEFAULT_INCLUDE_PATH - +PATH -post_depsq: +post_deps_off: - tags: get,compiler-flags sort: 500 tags: @@ -42,3 +43,7 @@ tags: - get - aocc uid: 1ceb0656e99a44ec +variations: + _path.#: + env: + MLC_AOCC_DIR_PATH: # diff --git a/script/get-dataset-coco2014/meta.yaml b/script/get-dataset-coco2014/meta.yaml index fa3724f83..afdff9cc4 100644 --- a/script/get-dataset-coco2014/meta.yaml +++ b/script/get-dataset-coco2014/meta.yaml @@ -9,6 +9,9 @@ cache: true category: AI/ML datasets category_sort: 8500 +input_mapping: + num_workers: MLC_DATASET_COCO2014_NUM_WORKERS + tags: - get - dataset @@ -18,7 +21,8 @@ tags: default_env: MLC_DATASET_CALIBRATION: 'no' - + MLC_DATASET_COCO2014_NUM_WORKERS: 1 + deps: - names: diff --git a/script/get-dataset-coco2014/run.sh b/script/get-dataset-coco2014/run.sh index a891b2330..21829568e 100644 --- a/script/get-dataset-coco2014/run.sh +++ b/script/get-dataset-coco2014/run.sh @@ -27,7 +27,7 @@ if [[ ${MLC_DATASET_CALIBRATION} == "no" ]]; then eval $cmd test $? -eq 0 || exit $? else - cmd="./download-coco-2014-calibration.sh -d ${INSTALL_DIR}" + cmd="./download-coco-2014-calibration.sh -d ${INSTALL_DIR} -n ${MLC_DATASET_COCO2014_NUM_WORKERS}" echo $cmd eval $cmd test $? -eq 0 || exit $? diff --git a/script/get-dataset-mlperf-inference-llama3/meta.yaml b/script/get-dataset-mlperf-inference-llama3/meta.yaml index 1028ab0cf..9e8762f35 100644 --- a/script/get-dataset-mlperf-inference-llama3/meta.yaml +++ b/script/get-dataset-mlperf-inference-llama3/meta.yaml @@ -33,13 +33,13 @@ variations: default: true group: dataset-type env: - MLC_RCLONE_URL: mlc-inference:mlcommons-inference-wg-public/llama3_405b/mlperf_llama3.1_405b_dataset_8313_processed_fp16_eval.pkl + MLC_RCLONE_URL: mlc-inference:mlcommons-inference-wg-public/llama3.1_405b/mlperf_llama3.1_405b_dataset_8313_processed_fp16_eval.pkl MLC_DATASET_TYPE: validation MLC_DATASET_FILE_NAME: mlperf_llama3.1_405b_dataset_8313_processed_fp16_eval.pkl calibration: group: dataset-type env: - MLC_RCLONE_URL: mlc-inference:mlcommons-inference-wg-public/llama3_405b/mlperf_llama3.1_405b_calibration_dataset_512_processed_fp16_eval.pkl + MLC_RCLONE_URL: mlc-inference:mlcommons-inference-wg-public/llama3.1_405b/mlperf_llama3.1_405b_calibration_dataset_512_processed_fp16_eval.pkl MLC_DATASET_TYPE: calibration MLC_DATASET_FILE_NAME: mlperf_llama3.1_405b_calibration_dataset_512_processed_fp16_eval.pkl rclone: diff --git a/script/get-dataset-mlperf-inference-mixtral/meta.yaml b/script/get-dataset-mlperf-inference-mixtral/meta.yaml index 99b8e0c2f..744b5690d 100644 --- a/script/get-dataset-mlperf-inference-mixtral/meta.yaml +++ b/script/get-dataset-mlperf-inference-mixtral/meta.yaml @@ -16,14 +16,48 @@ prehook_deps: update_tags_from_env_with_prefix: _url.: - MLC_PACKAGE_URL + names: + - download-file tags: - get - dataset-mixtral - openorca-mbxp-gsm8k-combined uid: 89e7c91444804775 variations: - mlcommons-storage: + rclone: + group: download-tool + default: true + adr: + download-file: + tags: _rclone + env: + MLC_DOWNLOAD_TOOL: rclone + MLC_RCLONE_COPY_USING: copyurl + wget: + group: download-tool + adr: + download-file: + tags: _wget + env: + MLC_DOWNLOAD_TOOL: wget + validation: default: true + group: dataset-type + adr: + download-file: + extra_cache_tags: mixtral,get-mixtral-dataset,validation + env: + ML_MODEL_STARTING_WEIGHTS_FILENAME: https://github.com/mlcommons/inference/tree/master/language/mixtral-8x7b#preprocessed + MLC_DOWNLOAD_URL: https://inference.mlcommons-storage.org/mixtral_8x7b/09292024_mixtral_15k_mintoken2_v1.pkl + calibration: + group: dataset-type + adr: + download-file: + extra_cache_tags: mixtral,get-mixtral-dataset,calibration + env: + ML_MODEL_STARTING_WEIGHTS_FILENAME: https://github.com/mlcommons/inference/tree/master/language/mixtral-8x7b#calibration-dataset + MLC_DOWNLOAD_URL: https://inference.mlcommons-storage.org/mixtral_8x7b%2F2024.06.06_mixtral_15k_calibration_v4.pkl + mlcommons-storage: env: MLC_DOWNLOAD_CHECKSUM: 78823c13e0e73e518872105c4b09628b MLC_DOWNLOAD_FILENAME: 2024.06.06_mixtral_15k_v4.pkl diff --git a/script/get-docker/customize.py b/script/get-docker/customize.py index c8aaf7376..3097e343f 100644 --- a/script/get-docker/customize.py +++ b/script/get-docker/customize.py @@ -12,11 +12,15 @@ def preprocess(i): recursion_spaces = i['recursion_spaces'] - file_name = 'docker.exe' if os_info['platform'] == 'windows' else 'docker' - env['FILE_NAME'] = file_name + file_name_docker = 'docker.exe' if os_info['platform'] == 'windows' else 'docker' + file_name_podman = 'podman.exe' if os_info['platform'] == 'windows' else 'podman' if 'MLC_DOCKER_BIN_WITH_PATH' not in env: - r = i['automation'].find_artifact({'file_name': file_name, + # check for docker + # if docker is not found, podman is checked + env['FILE_NAME'] = file_name_docker + env['CONTAINER_TOOL_NAME'] = "docker" + r = i['automation'].find_artifact({'file_name': file_name_docker, 'env': env, 'os_info': os_info, 'default_path_env_key': 'PATH', @@ -26,11 +30,26 @@ def preprocess(i): 'recursion_spaces': recursion_spaces}) if r['return'] > 0: if r['return'] == 16: - run_file_name = "install" - r = automation.run_native_script( - {'run_script_input': i['run_script_input'], 'env': env, 'script_name': run_file_name}) + # check for podman + # if podman is also absent, the script will try to + # automatically install docker in the system + env['FILE_NAME'] = file_name_podman + env['CONTAINER_TOOL_NAME'] = "podman" + r = i['automation'].find_artifact({'file_name': file_name_podman, + 'env': env, + 'os_info': os_info, + 'default_path_env_key': 'PATH', + 'detect_version': True, + 'env_path_key': 'MLC_DOCKER_BIN_WITH_PATH', + 'run_script_input': i['run_script_input'], + 'recursion_spaces': recursion_spaces}) if r['return'] > 0: - return r + if r['return'] == 16: + run_file_name = "install" + r = automation.run_native_script( + {'run_script_input': i['run_script_input'], 'env': env, 'script_name': run_file_name}) + if r['return'] > 0: + return r else: return r diff --git a/script/get-docker/run.sh b/script/get-docker/run.sh index f7f946a7f..f78518097 100644 --- a/script/get-docker/run.sh +++ b/script/get-docker/run.sh @@ -1,3 +1,3 @@ #!/bin/bash -docker --version > tmp-ver.out +${CONTAINER_TOOL_NAME} --version > tmp-ver.out test $? -eq 0 || exit 1 diff --git a/script/get-gcc/customize.py b/script/get-gcc/customize.py index a8c8b3099..01749aba5 100644 --- a/script/get-gcc/customize.py +++ b/script/get-gcc/customize.py @@ -11,6 +11,12 @@ def preprocess(i): recursion_spaces = i['recursion_spaces'] file_name_c = 'gcc.exe' if os_info['platform'] == 'windows' else 'gcc' + if 'MLC_GCC_BIN_WITH_PATH' not in env: + if env.get('MLC_GCC_DIR_PATH', '') != '': + gcc_path = env['MLC_GCC_DIR_PATH'] + if os.path.exists(os.path.join(gcc_path, 'bin', 'gcc')): + env['MLC_TMP_PATH'] = os.path.join(gcc_path, 'bin') + if env.get('MLC_HOST_OS_FLAVOR', '') == 'rhel': if "12" in env.get('MLC_VERSION', '') or "12" in env.get( 'MLC_VERSION_MIN', ''): diff --git a/script/get-gcc/meta.yaml b/script/get-gcc/meta.yaml index 27a3b6feb..4de1812f5 100644 --- a/script/get-gcc/meta.yaml +++ b/script/get-gcc/meta.yaml @@ -30,3 +30,7 @@ tags: - cpp-compiler - get-gcc uid: dbf4ab5cbed74372 +variations: + _path.#: + env: + MLC_GCC_DIR_PATH: # diff --git a/script/get-generic-sys-util/customize.py b/script/get-generic-sys-util/customize.py index 74b8c75b6..0efd666bf 100644 --- a/script/get-generic-sys-util/customize.py +++ b/script/get-generic-sys-util/customize.py @@ -11,7 +11,7 @@ def preprocess(i): state = i['state'] automation = i['automation'] - # Use VERSION_CMD and CHECK_CMD if no CHECK_CMD is set + # Use VERSION_CMD as CHECK_CMD if no CHECK_CMD is set if env.get('MLC_SYS_UTIL_VERSION_CMD', '') != '' and env.get( 'MLC_SYS_UTIL_CHECK_CMD', '') == '': env['MLC_SYS_UTIL_CHECK_CMD'] = env['MLC_SYS_UTIL_VERSION_CMD'] diff --git a/script/get-generic-sys-util/meta.yaml b/script/get-generic-sys-util/meta.yaml index 4516a506a..bd4006e72 100644 --- a/script/get-generic-sys-util/meta.yaml +++ b/script/get-generic-sys-util/meta.yaml @@ -42,12 +42,14 @@ tests: fail_safe: 'yes' ignore_missing: 'yes' test-all-variations: 'yes' + docker_prune: 'yes' - docker: 'yes' docker_os: ubuntu docker_os_version: '24.04' fail_safe: 'yes' ignore_missing: 'yes' test-all-variations: 'yes' + docker_prune: 'yes' uid: bb0393afa8404a11 variations: cmake: @@ -108,6 +110,21 @@ variations: brew: '' dnf: '' yum: '' + + libgl: + env: + MLC_SYS_UTIL_NAME: libgl # tbd: regular expression for version as well as whether its installed? + MLC_SYS_UTIL_CHECK_CMD: 'ldconfig -p | grep -i libgl.so.*' + default_env: + MLC_GENERIC_SYS_UTIL_IGNORE_MISSING_PACKAGE: yes + new_env_keys: + - MLC_LIBGL_VERSION + state: + libgl: # tbd: complete for other flavours of linux + apt: libgl1 + brew: '' + dnf: mesa-libGL + yum: mesa-libGL libsm6: env: MLC_SYS_UTIL_NAME: libsm6 # tbd: regular expression for version as well as whether its installed? @@ -693,7 +710,7 @@ variations: unzip: env: MLC_SYS_UTIL_NAME: unzip - MLC_SYS_UTIL_VERSION_CMD: unzip --version + MLC_SYS_UTIL_VERSION_CMD: unzip -v new_env_keys: - MLC_UNZIP_VERSION state: @@ -743,6 +760,18 @@ variations: wkhtmltopdf: apt: wkhtmltopdf brew: wkhtmltopdf + choco: wkhtmltopdf + xfonts-base: + env: + MLC_SYS_UTIL_NAME: xfonts_base + new_env_keys: + - MLC_XFONTS_BASE_VERSION + state: + xfonts_base: + apt: xfonts-base + dnf: xorg-x11-fonts-misc + yum: xorg-x11-fonts-misc + zypper: xorg-x11-fonts xz: env: MLC_SYS_UTIL_NAME: xz diff --git a/script/get-lib-armnn/meta.yaml b/script/get-lib-armnn/meta.yaml index ff71bf95a..53b5e210d 100644 --- a/script/get-lib-armnn/meta.yaml +++ b/script/get-lib-armnn/meta.yaml @@ -37,3 +37,7 @@ versions: env: MLC_LIB_ARMNN_VERSION: v23.11 MLC_TMP_GIT_BRANCH_NAME: branches/armnn_23_11 + '25.02': + env: + MLC_LIB_ARMNN_VERSION: v23.11 + MLC_TMP_GIT_BRANCH_NAME: branches/armnn_23_11 diff --git a/script/template-script/COPYRIGHT.md b/script/get-one-api/COPYRIGHT.md similarity index 92% rename from script/template-script/COPYRIGHT.md rename to script/get-one-api/COPYRIGHT.md index 9e44ad290..d2ceead84 100644 --- a/script/template-script/COPYRIGHT.md +++ b/script/get-one-api/COPYRIGHT.md @@ -1,6 +1,6 @@ # Copyright Notice -© 2022-2025 MLCommons. All Rights Reserved. +© 2025-2026 MLCommons. All Rights Reserved. This file is licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License can be obtained at: diff --git a/script/get-one-api/customize.py b/script/get-one-api/customize.py new file mode 100644 index 000000000..0f6e78b10 --- /dev/null +++ b/script/get-one-api/customize.py @@ -0,0 +1,100 @@ +from mlc import utils +import os +from utils import * + + +def preprocess(i): + + os_info = i['os_info'] + + env = i['env'] + + recursion_spaces = i['recursion_spaces'] + file_name_c = 'icx.exe' if os_info['platform'] == 'windows' else 'icx' + + if 'MLC_ICX_BIN_WITH_PATH' not in env: + if env.get('MLC_ONEAPI_DIR_PATH', '') != '': + oneapi_path = env['MLC_ONEAPI_DIR_PATH'] + if os.path.exists(os.path.join(oneapi_path, 'bin', 'icx')): + env['MLC_TMP_PATH'] = os.path.join(oneapi_path, 'bin') + + if 'MLC_ONEAPI_BIN_WITH_PATH' not in env: + r = i['automation'].find_artifact({'file_name': file_name_c, + 'env': env, + 'os_info': os_info, + 'default_path_env_key': 'PATH', + 'detect_version': True, + 'env_path_key': 'MLC_ICX_BIN_WITH_PATH', + 'run_script_input': i['run_script_input'], + 'recursion_spaces': recursion_spaces}) + if r['return'] > 0: + # Uncomment when MLC script for installing oneapi compiler is integrated + # Initial finding suggests that oneapi could be installed without + # hastle in linux using apt, but is tricky in windows + + # if r['return'] == 16: + # if env.get('MLC_TMP_FAIL_IF_NOT_FOUND','').lower() == 'yes': + # return r + # + # print (recursion_spaces+' # {}'.format(r['error'])) + # + # # Attempt to run installer + # r = {'return':0, 'skip':True, 'script':{'tags':'install,gcc,src'}} + + return r + + return {'return': 0} + + +def detect_version(i): + r = i['automation'].parse_version({'match_text': r'oneAPI\s+.*\(([\d.]+)\)', + 'group_number': 1, + 'env_key': 'MLC_ONEAPI_VERSION', + 'which_env': i['env']}) + if r['return'] > 0: + return r + + version = r['version'] + + print(i['recursion_spaces'] + ' Detected version: {}'.format(version)) + + return {'return': 0, 'version': version} + + +def postprocess(i): + + env = i['env'] + r = detect_version(i) + if r['return'] > 0: + return r + + env['MLC_COMPILER_FAMILY'] = 'ONEAPI' + version = r['version'] + env['MLC_COMPILER_VERSION'] = env['MLC_ONEAPI_VERSION'] + env['MLC_ONEAPI_CACHE_TAGS'] = 'version-' + version + env['MLC_COMPILER_CACHE_TAGS'] = 'version-' + version + ',family-oneapi' + + found_file_path = env['MLC_ICX_BIN_WITH_PATH'] + + found_path = os.path.dirname(found_file_path) + + env['MLC_ONEAPI_INSTALLED_PATH'] = os.path.dirname(found_path) + + file_name_c = os.path.basename(found_file_path) + + env['MLC_ONEAPI_BIN'] = file_name_c + + # General compiler for general program compilation + env['MLC_ONEAPI_COMPILER_BIN'] = file_name_c + env['MLC_ONEAPI_COMPILER_FLAG_OUTPUT'] = '' + env['MLC_ONEAPI_COMPILER_WITH_PATH'] = found_file_path + env['MLC_ONEAPI_COMPILER_FLAG_VERSION'] = 'version' + + # env['MLC_COMPILER_FLAGS_FAST'] = "-O3" + # env['MLC_LINKER_FLAGS_FAST'] = "-O3" + # env['MLC_COMPILER_FLAGS_DEBUG'] = "-O0" + # env['MLC_LINKER_FLAGS_DEBUG'] = "-O0" + # env['MLC_COMPILER_FLAGS_DEFAULT'] = "-O2" + # env['MLC_LINKER_FLAGS_DEFAULT'] = "-O2" + + return {'return': 0, 'version': version} diff --git a/script/get-one-api/meta.yaml b/script/get-one-api/meta.yaml new file mode 100644 index 000000000..1033eb772 --- /dev/null +++ b/script/get-one-api/meta.yaml @@ -0,0 +1,31 @@ +alias: get-one-api +automation_alias: script +automation_uid: 5b4e0237da074764 +cache: true +category: Compiler automation +clean_files: [] +deps: +- tags: detect,os +name: Detect or install OneAPI compiler +new_env_keys: +- MLC_ONEAPI_* +- MLC_ONEAPI_COMPILER_* +- MLC_COMPILER_* +- MLC_LINKER_* + +input_mapping: + oneapi_dir: MLC_ONEAPI_DIR_PATH + +post_deps_off: +- tags: get,compiler-flags +sort: 500 +tags: +- get +- oneapi +- compiler +- get-oneapi +uid: 1af872e81ef54742 +variations: + _path.#: + env: + MLC_ONEAPI_DIR_PATH: "#" diff --git a/script/get-one-api/run.bat b/script/get-one-api/run.bat new file mode 100644 index 000000000..5c02e5332 --- /dev/null +++ b/script/get-one-api/run.bat @@ -0,0 +1,3 @@ +%MLC_GCC_BIN_WITH_PATH% version > tmp-ver.out +IF %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL% + diff --git a/script/get-one-api/run.sh b/script/get-one-api/run.sh new file mode 100644 index 000000000..756cc54e1 --- /dev/null +++ b/script/get-one-api/run.sh @@ -0,0 +1,6 @@ +#!/bin/bash +icx_bin=${MLC_ICX_BIN_WITH_PATH} +echo "${icx_bin} --version" + +${icx_bin} --version > tmp-ver.out +test $? -eq 0 || exit $? diff --git a/script/get-preprocessed-dataset-imagenet/meta.yaml b/script/get-preprocessed-dataset-imagenet/meta.yaml index eea817b2f..124f73572 100644 --- a/script/get-preprocessed-dataset-imagenet/meta.yaml +++ b/script/get-preprocessed-dataset-imagenet/meta.yaml @@ -51,6 +51,7 @@ deps: MLC_IMAGENET_PREPROCESSED_PATH: - 'on' tags: mlperf,mlcommons,inference,source,src +- tags: get,generic-sys-util,_libgl docker: run: false env: diff --git a/script/get-rclone-config/meta.yaml b/script/get-rclone-config/meta.yaml index f2f3332f0..a7bd8e5b3 100644 --- a/script/get-rclone-config/meta.yaml +++ b/script/get-rclone-config/meta.yaml @@ -8,18 +8,31 @@ tags: - rclone-config uid: 6c59ddbc6cd046e3 variations: + read-only: + group: permission + default: true + env: + MLC_RCLONE_DRIVE_SCOPE: drive.readonly + write: + group: permission + env: + MLC_RCLONE_DRIVE_SCOPE: drive mlc-inference: env: MLC_RCLONE_CONFIG_CMD: 'rclone config create mlc-inference s3 provider=Cloudflare access_key_id=f65ba5eef400db161ea49967de89f47b secret_access_key=fbea333914c292b854f14d3fe232bad6c5407bf0ab1bebf78833c2b359bdfd2b endpoint=https://c2686074cb2caf5cbaf6d134bdba8b47.r2.cloudflarestorage.com' mlperf-llama2: env: - MLC_RCLONE_CONFIG_CMD: 'rclone config create mlc-llama2 drive config_is_local=false scope=drive.readonly root_folder_id=11tBZvvrh0FCm3XuR5E849K42TqftYdUF' + MLC_RCLONE_CONFIG_CMD: 'rclone config create mlc-llama2 drive config_is_local=false scope=<<>> root_folder_id=11tBZvvrh0FCm3XuR5E849K42TqftYdUF' MLC_RCLONE_CONNECT_CMD: 'rclone config reconnect mlc-llama2:' mlperf-llama3-1: env: - MLC_RCLONE_CONFIG_CMD: 'rclone config create mlc-llama3-1 drive config_is_local=false scope=drive.readonly root_folder_id=12K-2yvmr1ZSZ7SLrhidCbWc0BriN98am' + MLC_RCLONE_CONFIG_CMD: 'rclone config create mlc-llama3-1 drive config_is_local=false scope=<<>> root_folder_id=12K-2yvmr1ZSZ7SLrhidCbWc0BriN98am' MLC_RCLONE_CONNECT_CMD: 'rclone config reconnect mlc-llama3-1:' waymo: env: - MLC_RCLONE_CONFIG_CMD: 'rclone config create mlc-waymo drive config_is_local=false scope=drive.readonly root_folder_id=1xbfnaUurFeXliFFl1i1gj48eRU2NDiH5' + MLC_RCLONE_CONFIG_CMD: 'rclone config create mlc-waymo drive config_is_local=false scope=<<>> root_folder_id=1xbfnaUurFeXliFFl1i1gj48eRU2NDiH5' MLC_RCLONE_CONNECT_CMD: 'rclone config reconnect mlc-waymo:' + config-name.#: + env: + MLC_RCLONE_CONFIG_CMD: 'rclone config create # drive config_is_local=false scope=<<>> root_folder_id=<<>>' + MLC_RCLONE_CONNECT_CMD: 'rclone config reconnect #:' diff --git a/script/get-sys-utils-cm/run-ubuntu.sh b/script/get-sys-utils-cm/run-ubuntu.sh index df0328c40..df10e4ef3 100644 --- a/script/get-sys-utils-cm/run-ubuntu.sh +++ b/script/get-sys-utils-cm/run-ubuntu.sh @@ -52,7 +52,6 @@ ${MLC_SUDO} ${MLC_APT_TOOL} update && \ libgl1 \ libjpeg9-dev \ unzip \ - libgl1 \ zlib1g-dev # Install Python deps though preference is to install them diff --git a/script/get-wkhtmltopdf/meta.yaml b/script/get-wkhtmltopdf/meta.yaml new file mode 100755 index 000000000..471c502eb --- /dev/null +++ b/script/get-wkhtmltopdf/meta.yaml @@ -0,0 +1,27 @@ +alias: get-wkhtmltopdf +automation_alias: script +automation_uid: 5b4e0237da074764 +cache: true +tags: +- get +- wkhtmltopdf +deps: + - tags: detect,os + - tags: detect,sudo + skip_if_env: + MLC_HOST_OS_TYPE: + - windows + - tags: get,generic-sys-util,_wkhtmltopdf + enable_if_any_env: + MLC_INSTALL_FROM_PACKAGE_MANAGER: + - on + MLC_HOST_OS_TYPE: + - windows +uid: 67ec874a3dfe4b87 +docker: + pre_run_cmds: + - mlc pull repo +variations: + with-qt: + group: qt + default: true diff --git a/script/get-wkhtmltopdf/run-fedora.sh b/script/get-wkhtmltopdf/run-fedora.sh new file mode 100644 index 000000000..d91b05b34 --- /dev/null +++ b/script/get-wkhtmltopdf/run-fedora.sh @@ -0,0 +1,11 @@ +#!/bin/bash +# This script installs wkhtmltopdf on Amazon Linux + +# Download the wkhtmltopdf package +wget https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6-1/wkhtmltox-0.12.6-1.centos6.x86_64.rpm + +# Install the package +${MLC_SUDO} yum localinstall -y wkhtmltox-0.12.6-1.centos6.x86_64.rpm --nobest --skip-broken + +wkhtmltopdf --version +test $? -eq 0 || exit $? diff --git a/script/get-wkhtmltopdf/run-macos.sh b/script/get-wkhtmltopdf/run-macos.sh new file mode 100644 index 000000000..b04695459 --- /dev/null +++ b/script/get-wkhtmltopdf/run-macos.sh @@ -0,0 +1,7 @@ +#!/bin/bash +# This script installs wkhtmltopdf on Amazon Linux +curl -L -o wkhtmltox-0.12.6-1.macos-cocoa.pkg https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6-1/wkhtmltox-0.12.6-1.macos-cocoa.pkg +${MLC_SUDO} installer -pkg wkhtmltox-0.12.6-1.macos-cocoa.pkg -target / + +wkhtmltopdf --version +test $? -eq 0 || exit $? diff --git a/script/get-wkhtmltopdf/run-ubuntu.sh b/script/get-wkhtmltopdf/run-ubuntu.sh new file mode 100644 index 000000000..ebafe84ef --- /dev/null +++ b/script/get-wkhtmltopdf/run-ubuntu.sh @@ -0,0 +1,7 @@ +#!/bin/bash +wget -nc https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6.1-2/wkhtmltox_0.12.6.1-2.jammy_amd64.deb +test $? -eq 0 || exit $? +${MLC_SUDO} dpkg -i wkhtmltox_0.12.6.1-2.jammy_amd64.deb +${MLC_SUDO} DEBIAN_FRONTEND=noninteractive apt-get install -f -y +test $? -eq 0 || exit $? + diff --git a/script/install-llvm-src/customize.py b/script/install-llvm-src/customize.py index 843e2027d..ea6b02c54 100644 --- a/script/install-llvm-src/customize.py +++ b/script/install-llvm-src/customize.py @@ -11,6 +11,8 @@ def preprocess(i): env = i['env'] + q = '"' if os_info['platform'] == 'windows' else "'" + clang_file_name = "clang" extra_cmake_options = '' @@ -27,22 +29,21 @@ def preprocess(i): # env['USE_LLVM'] = install_prefix # env['LLVM_DIR'] = os.path.join(env['USE_LLVM'], "lib", "cmake", "llvm") else: - if env.get('MLC_LLVM_ENABLE_RUNTIMES', '') != '': - enable_runtimes = env['MLC_LLVM_ENABLE_RUNTIMES'].replace(":", ";") + if env.get('+MLC_LLVM_ENABLE_RUNTIMES', '') != '': + enable_runtimes = ";".join(env['+MLC_LLVM_ENABLE_RUNTIMES']) else: enable_runtimes = '' - if env.get('MLC_LLVM_ENABLE_PROJECTS', '') != '': - enable_projects = env['MLC_LLVM_ENABLE_PROJECTS'].replace(":", ";") + if env.get('+MLC_LLVM_ENABLE_PROJECTS', '') != '': + enable_projects = ";".join(env['+MLC_LLVM_ENABLE_PROJECTS']) else: enable_projects = '' llvm_build_type = env['MLC_LLVM_BUILD_TYPE'] - cmake_cmd = "cmake " + os.path.join(env["MLC_LLVM_SRC_REPO_PATH"], "llvm") + " -GNinja -DCMAKE_BUILD_TYPE=" + llvm_build_type + " -DLLVM_ENABLE_PROJECTS=" + enable_projects + " -DLLVM_ENABLE_RUNTIMES='" + \ - enable_runtimes + "' -DCMAKE_INSTALL_PREFIX=" + install_prefix + \ - " -DLLVM_ENABLE_RTTI=ON -DLLVM_INSTALL_UTILS=ON -DLLVM_TARGETS_TO_BUILD=X86 " + \ - extra_cmake_options + targets_to_build = env.get('MLC_LLVM_TARGETS_TO_BUILD', 'X86') + + cmake_cmd = f"""cmake {os.path.join(env["MLC_LLVM_SRC_REPO_PATH"], "llvm")} -GNinja -DCMAKE_BUILD_TYPE={llvm_build_type } -DLLVM_ENABLE_PROJECTS={q}{enable_projects}{q} -DLLVM_ENABLE_RUNTIMES={q}{enable_runtimes}{q} -DCMAKE_INSTALL_PREFIX={q}{install_prefix}{q} -DLLVM_ENABLE_RTTI=ON -DLLVM_INSTALL_UTILS=ON -DLLVM_TARGETS_TO_BUILD={targets_to_build} {extra_cmake_options}""" env['MLC_LLVM_CMAKE_CMD'] = cmake_cmd diff --git a/script/install-llvm-src/meta.yaml b/script/install-llvm-src/meta.yaml index f1dd3657d..0c8d91c43 100644 --- a/script/install-llvm-src/meta.yaml +++ b/script/install-llvm-src/meta.yaml @@ -62,8 +62,19 @@ variations: clang: default: true env: - MLC_LLVM_ENABLE_PROJECTS: clang + +MLC_LLVM_ENABLE_PROJECTS: + - clang group: clang + lld: + default: true + env: + +MLC_LLVM_ENABLE_PROJECTS: + - lld + group: lld + no-clang: + group: clang + no-lld: + group: lld debug: env: MLC_LLVM_BUILD_TYPE: debug @@ -198,7 +209,8 @@ variations: group: repo runtimes.#: env: - MLC_LLVM_ENABLE_RUNTIMES: '#' + +MLC_LLVM_ENABLE_RUNTIMES: + - '#' sha.#: env: MLC_GIT_CHECKOUT_SHA: '#' @@ -208,3 +220,6 @@ variations: env: MLC_GIT_CHECKOUT_TAG: '#' versions: {} +tests: + run_inputs: + - version: "20.1.0" diff --git a/script/install-tensorflow-from-src/meta.yaml b/script/install-tensorflow-from-src/meta.yaml index c219d1179..5697128d3 100644 --- a/script/install-tensorflow-from-src/meta.yaml +++ b/script/install-tensorflow-from-src/meta.yaml @@ -56,7 +56,7 @@ versions: version_max_usable: 17.0.6 version_min: 17.0.6 - tags: get,bazel - version: 6.5.0 + version: 7.4.1 env: MLC_GIT_CHECKOUT: master v1.15.0: diff --git a/script/push-mlperf-inference-results-to-github/run.sh b/script/push-mlperf-inference-results-to-github/run.sh index ffac61801..5994b1068 100644 --- a/script/push-mlperf-inference-results-to-github/run.sh +++ b/script/push-mlperf-inference-results-to-github/run.sh @@ -16,8 +16,13 @@ if [[ -n ${MLC_MLPERF_INFERENCE_SUBMISSION_DIR} ]]; then fi test $? -eq 0 || exit $? -git commit -a -m "${MLC_MLPERF_RESULTS_REPO_COMMIT_MESSAGE}" -test $? -eq 0 || exit $? +if ! git diff-index --quiet HEAD --; then + git commit -a -m "${MLC_MLPERF_RESULTS_REPO_COMMIT_MESSAGE}" + test $? -eq 0 || exit $? +else + echo "No changes to commit." +fi + echo ${MLC_GIT_PUSH_CMD} ${MLC_GIT_PUSH_CMD} diff --git a/script/run-mlperf-inference-mobilenet-models/meta.yaml b/script/run-mlperf-inference-mobilenet-models/meta.yaml index 813b1ef93..2eb6647b3 100644 --- a/script/run-mlperf-inference-mobilenet-models/meta.yaml +++ b/script/run-mlperf-inference-mobilenet-models/meta.yaml @@ -159,13 +159,15 @@ variations: group: optimization use-neon: alias: neon + tests: run_inputs: - env: MLC_TEST_ONE_RUN: 'yes' - variations_list: - - tflite - - accuracy_only + variations_list: + - tflite,find-performance adr: compiler: tags: gcc + imagenet-preprocessed: + tags: _500