Skip to content

Cleaned the boolean usage in MLCFlow #246

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 11 commits into from
Feb 20, 2025
2 changes: 1 addition & 1 deletion automation/script/docker.py
Original file line number Diff line number Diff line change
Expand Up @@ -246,7 +246,7 @@ def docker_run(self_module, i):
for t in i.get('tags', '').split(",") if t.startswith("_")]

docker_cache = i.get('docker_cache', "yes")
if docker_cache.lower() in ["no", "false"]:
if is_false(docker_cache):
env.setdefault('MLC_DOCKER_CACHE', docker_cache)

image_repo = i.get('docker_image_repo', '')
Expand Down
4 changes: 2 additions & 2 deletions automation/script/module.py
Original file line number Diff line number Diff line change
Expand Up @@ -5039,7 +5039,7 @@ def enable_or_skip_script(meta, env):
value = str(env[key]).lower().strip()
if set(meta_key) & set(["yes", "on", "true", "1"]):
# Any set value other than false is taken as set
if value not in ["no", "off", "false", "0", ""]:
if not is_false(value):
continue
elif set(meta_key) & set(["no", "off", "false", "0"]):
if value in ["no", "off", "false", "0", ""]:
Expand Down Expand Up @@ -5072,7 +5072,7 @@ def any_enable_or_skip_script(meta, env):
meta_key = [str(v).lower() for v in meta[key]]

if set(meta_key) & set(["yes", "on", "true", "1"]):
if value not in ["no", "off", "false", "0", ""]:
if is_false(value):
found = True
elif set(meta_key) & set(["no", "off", "false", "0", ""]):
if value in ["no", "off", "false", "0", ""]:
Expand Down
5 changes: 3 additions & 2 deletions script/app-mlperf-inference-nvidia/customize.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from mlc import utils
import os
import shutil
from utils import *


def preprocess(i):
Expand Down Expand Up @@ -590,8 +591,8 @@ def preprocess(i):

run_infer_on_copy_streams = str(
env.get('MLC_MLPERF_NVIDIA_HARNESS_RUN_INFER_ON_COPY_STREAMS', ''))
if run_infer_on_copy_streams and run_infer_on_copy_streams.lower() not in [
"no", "false", "0", ""]:
if run_infer_on_copy_streams and not is_false(
run_infer_on_copy_streams):
run_config += " --run_infer_on_copy_streams"

start_from_device = str(
Expand Down
4 changes: 2 additions & 2 deletions script/app-mlperf-inference/customize.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import mlperf_utils
import re
from datetime import datetime, timezone
from utils import *


def preprocess(i):
Expand Down Expand Up @@ -286,8 +287,7 @@ def postprocess(i):
state['app_mlperf_inference_log_summary'][y[0].strip().lower()
] = y[1].strip()

if env.get("MLC_MLPERF_PRINT_SUMMARY", "").lower() not in [
"no", "0", "false"]:
if not is_false(env.get("MLC_MLPERF_PRINT_SUMMARY", "")):
print("\n")
print(mlperf_log_summary)

Expand Down
7 changes: 4 additions & 3 deletions script/benchmark-program/customize.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from mlc import utils
import os
from utils import *


def preprocess(i):
Expand All @@ -20,7 +21,7 @@ def preprocess(i):
env['MLC_RUN_CMD'] += ' ' + env['MLC_RUN_SUFFIX']

else:
if env['MLC_ENABLE_NUMACTL'].lower() in ["on", "1", "true", "yes"]:
if is_true(env['MLC_ENABLE_NUMACTL']):
env['MLC_ENABLE_NUMACTL'] = "1"
MLC_RUN_PREFIX = "numactl " + env['MLC_NUMACTL_MEMBIND'] + ' '
else:
Expand Down Expand Up @@ -49,8 +50,8 @@ def preprocess(i):
if x != '':
env['MLC_RUN_CMD'] = x + ' ' + env.get('MLC_RUN_CMD', '')

if os_info['platform'] != 'windows' and str(
env.get('MLC_SAVE_CONSOLE_LOG', True)).lower() not in ["no", "false", "0"]:
if os_info['platform'] != 'windows' and not is_false(
env.get('MLC_SAVE_CONSOLE_LOG', True)):
logs_dir = env.get('MLC_LOGS_DIR', env['MLC_RUN_DIR'])
env['MLC_RUN_CMD'] += r" 2>&1 | tee " + q + os.path.join(
logs_dir, "console.out") + q + r"; echo \${PIPESTATUS[0]} > exitstatus"
Expand Down
3 changes: 1 addition & 2 deletions script/download-file/customize.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,8 +85,7 @@ def preprocess(i):
extra_download_options = env.get('MLC_DOWNLOAD_EXTRA_OPTIONS', '')

verify_ssl = env.get('MLC_VERIFY_SSL', "True")
if str(verify_ssl).lower() in [
"no", "false"] or os_info['platform'] == 'windows':
if is_false(verify_ssl) or os_info['platform'] == 'windows':
verify_ssl = False
else:
verify_ssl = True
Expand Down
5 changes: 3 additions & 2 deletions script/generate-mlperf-inference-user-conf/customize.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import shutil
import subprocess
import sys
from utils import *


def preprocess(i):
Expand Down Expand Up @@ -112,8 +113,8 @@ def preprocess(i):
env['MLC_MLPERF_USE_MAX_DURATION'] = 'no'
elif scenario == "MultiStream" and (1000 / float(value) * 660 < 662):
env['MLC_MLPERF_USE_MAX_DURATION'] = 'no'
if env.get('MLC_MLPERF_MODEL_EQUAL_ISSUE_MODE', 'no').lower() not in ["yes", "1", "true"] and env.get(
'MLC_MLPERF_USE_MAX_DURATION', "yes").lower() not in ["no", "false", "0"]:
if not is_true(env.get('MLC_MLPERF_MODEL_EQUAL_ISSUE_MODE', 'no')) and not is_false(env.get(
'MLC_MLPERF_USE_MAX_DURATION', "yes")):
tolerance = 0.4 # much lower because we have max_duration
else:
tolerance = 0.9
Expand Down
4 changes: 2 additions & 2 deletions script/run-docker-container/customize.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,8 +185,8 @@ def postprocess(i):
if is_true(env.get('MLC_DOCKER_USE_GOOGLE_DNS', '')):
run_opts += ' --dns 8.8.8.8 --dns 8.8.4.4 '

if env.get('MLC_CONTAINER_TOOL', '') == 'podman' and env.get(
'MLC_PODMAN_MAP_USER_ID', '').lower() not in ["no", "0", "false"]:
if env.get('MLC_CONTAINER_TOOL', '') == 'podman' and not is_false(env.get(
'MLC_PODMAN_MAP_USER_ID', '')):
run_opts += " --userns=keep-id"

if env.get('MLC_DOCKER_PORT_MAPS', []):
Expand Down