Skip to content

Commit 6411f12

Browse files
committed
Refactor linting and formatting tools in Makefile and scripts
1 parent ddb8b3b commit 6411f12

File tree

4 files changed

+95
-35
lines changed

4 files changed

+95
-35
lines changed

.github/workflows/test.yml

Lines changed: 84 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,19 @@ name: Tests
22

33
on:
44
push:
5-
branches: [main, develop, findhao/setup_ci, findhao/add_linter]
5+
branches: [main, develop, findhao/add_linter]
6+
paths-ignore:
7+
- "website/**"
8+
- "docs/**"
9+
- "*.md"
10+
- ".gitignore"
611
pull_request:
712
branches: [main]
13+
paths-ignore:
14+
- "website/**"
15+
- "docs/**"
16+
- "*.md"
17+
- ".gitignore"
818
workflow_dispatch:
919
inputs:
1020
test-type:
@@ -34,14 +44,6 @@ jobs:
3444
with:
3545
python-version: "3.11"
3646

37-
- name: Cache pip dependencies
38-
uses: actions/cache@v3
39-
with:
40-
path: ~/.cache/pip
41-
key: ${{ runner.os }}-pip-format-${{ hashFiles('**/pyproject.toml') }}
42-
restore-keys: |
43-
${{ runner.os }}-pip-format-
44-
4547
- name: Install development dependencies
4648
run: |
4749
make install-dev
@@ -66,6 +68,78 @@ jobs:
6668
with:
6769
python-version: "3.11"
6870

71+
- name: Get daily cache timestamp
72+
id: daily-cache
73+
run: |
74+
# Calculate date (e.g., 2024-01-15) for daily cache expiration
75+
DATE_STAMP=$(date +"%Y-%m-%d")
76+
echo "date=$DATE_STAMP" >> $GITHUB_OUTPUT
77+
echo "Using daily cache stamp: $DATE_STAMP"
78+
79+
- name: Get weekly cache timestamp
80+
id: weekly-cache
81+
run: |
82+
# Calculate year-week (e.g., 2024-03) for weekly cache expiration
83+
WEEK_STAMP=$(date +"%Y-%U")
84+
echo "week=$WEEK_STAMP" >> $GITHUB_OUTPUT
85+
echo "Using weekly cache stamp: $WEEK_STAMP"
86+
87+
- name: Cache pip dependencies
88+
uses: actions/cache@v3
89+
with:
90+
path: ~/.cache/pip
91+
key: ${{ runner.os }}-pip-3.11-${{ steps.daily-cache.outputs.date }}
92+
restore-keys: |
93+
${{ runner.os }}-pip-3.11-
94+
95+
- name: Cache APT packages
96+
uses: actions/cache@v3
97+
with:
98+
path: |
99+
/var/cache/apt/archives
100+
/var/lib/apt/lists
101+
key: ${{ runner.os }}-apt-${{ hashFiles('.ci/setup.sh') }}-${{ steps.weekly-cache.outputs.week }}
102+
restore-keys: |
103+
${{ runner.os }}-apt-${{ hashFiles('.ci/setup.sh') }}-
104+
${{ runner.os }}-apt-
105+
106+
- name: Get Triton latest commit
107+
id: triton-commit
108+
run: |
109+
# Check if jq is available
110+
if ! command -v jq &> /dev/null; then
111+
echo "jq not found, installing..."
112+
sudo apt-get update && sudo apt-get install -y jq
113+
fi
114+
115+
# Get commit with error handling
116+
echo "Fetching latest Triton commit..."
117+
COMMIT=$(curl -s --max-time 30 --retry 3 https://api.github.com/repos/triton-lang/triton/commits/main | jq -r .sha 2>/dev/null || echo "")
118+
119+
if [ -n "$COMMIT" ] && [ "$COMMIT" != "null" ]; then
120+
echo "commit=$COMMIT" >> $GITHUB_OUTPUT
121+
echo "cache-key=$COMMIT" >> $GITHUB_OUTPUT
122+
echo "✅ Using Triton commit: $COMMIT"
123+
else
124+
echo "❌ Failed to get Triton commit, using 'main' as fallback"
125+
# Force cache miss by using timestamp when API fails
126+
TIMESTAMP=$(date +%Y%m%d%H%M%S)
127+
echo "commit=main" >> $GITHUB_OUTPUT
128+
echo "cache-key=main-fallback-$TIMESTAMP" >> $GITHUB_OUTPUT
129+
echo "⚠️ Using fallback cache key: main-fallback-$TIMESTAMP"
130+
fi
131+
132+
- name: Cache Triton source and build
133+
uses: actions/cache@v3
134+
with:
135+
path: |
136+
/tmp/triton
137+
/tmp/triton-cache
138+
key: ${{ runner.os }}-triton-source-${{ hashFiles('.ci/install-triton.sh') }}-${{ steps.triton-commit.outputs.cache-key }}
139+
restore-keys: |
140+
${{ runner.os }}-triton-source-${{ hashFiles('.ci/install-triton.sh') }}-
141+
${{ runner.os }}-triton-source-
142+
69143
- name: Setup environment
70144
env:
71145
CONDA_ENV: tritonparse
@@ -77,6 +151,7 @@ jobs:
77151
- name: Install Triton from source
78152
env:
79153
CONDA_ENV: tritonparse
154+
TRITON_COMMIT: ${{ steps.triton-commit.outputs.commit }}
80155
run: |
81156
bash .ci/install-triton.sh
82157
@@ -86,14 +161,6 @@ jobs:
86161
run: |
87162
bash .ci/install-project.sh
88163
89-
- name: Cache pip dependencies
90-
uses: actions/cache@v3
91-
with:
92-
path: ~/.cache/pip
93-
key: ${{ runner.os }}-pip-3.11-${{ hashFiles('**/pyproject.toml') }}
94-
restore-keys: |
95-
${{ runner.os }}-pip-3.11-
96-
97164
- name: Run tests
98165
env:
99166
CONDA_ENV: tritonparse

tritonparse/common.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -173,9 +173,9 @@ def print_parsed_files_summary(parsed_log_dir: str) -> None:
173173
if size_bytes < 1024:
174174
file_size = f"{size_bytes}B"
175175
elif size_bytes < 1024 * 1024:
176-
file_size = f"{size_bytes/1024:.1f}KB"
176+
file_size = f"{size_bytes / 1024:.1f}KB"
177177
else:
178-
file_size = f"{size_bytes/(1024*1024):.1f}MB"
178+
file_size = f"{size_bytes / (1024 * 1024):.1f}MB"
179179
except OSError:
180180
pass
181181

tritonparse/structured_logging.py

Lines changed: 6 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -142,8 +142,6 @@ def convert(obj):
142142
Returns:
143143
A serializable version of the input object where dataclasses are converted to dictionaries
144144
"""
145-
from triton.language.core import dtype
146-
147145
# 1. primitives that JSON already supports -------------------------------
148146
if obj is None or isinstance(obj, (bool, int, str)):
149147
return obj
@@ -178,11 +176,6 @@ def convert(obj):
178176
return convert(
179177
asdict(obj)
180178
) # Convert dataclass to dict and then process that dict
181-
182-
# 4. Common Triton constexpr objects
183-
if isinstance(obj, dtype):
184-
return f"triton.language.core.dtype('{str(obj)}')"
185-
186179
log.warning(f"Unknown type: {type(obj)}")
187180
return str(obj) # Return primitive types as-is
188181

@@ -378,18 +371,18 @@ def extract_file_content(trace_data: Dict[str, Any], metadata_group: Dict[str, s
378371
# Check file size before reading to avoid memory issues
379372
file_size = os.path.getsize(file_path)
380373
if file_size > MAX_FILE_SIZE:
381-
trace_data["file_content"][ir_filename] = (
382-
f"<file too large: {file_size} bytes>"
383-
)
374+
trace_data["file_content"][
375+
ir_filename
376+
] = f"<file too large: {file_size} bytes>"
384377
continue
385378

386379
with open(file_path, "r") as f:
387380
trace_data["file_content"][ir_filename] = f.read()
388381
except (UnicodeDecodeError, OSError) as e:
389382
# add more specific error type
390-
trace_data["file_content"][ir_filename] = (
391-
f"<error reading file: {str(e)}>"
392-
)
383+
trace_data["file_content"][
384+
ir_filename
385+
] = f"<error reading file: {str(e)}>"
393386
log.debug(f"Error reading file {file_path}: {e}")
394387

395388

tritonparse/utils.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,9 +6,6 @@
66
from pathlib import Path
77
from typing import Optional
88

9-
# argument parser for OSS
10-
parser = None
11-
129
from .common import (
1310
copy_local_to_tmpdir,
1411
is_fbcode,
@@ -19,6 +16,9 @@
1916
)
2017
from .source_type import Source, SourceType
2118

19+
# argument parser for OSS
20+
parser = None
21+
2222

2323
def init_parser():
2424
global parser

0 commit comments

Comments
 (0)