Skip to content

Commit 7543890

Browse files
xuzhao9facebook-github-bot
authored andcommitted
Remove hstu install (#93)
Summary: HSTU bug has been fixed upstream: meta-recsys/generative-recommenders#152, so we can remove the patch now. After upstream pytorch pin update, we now use `triton.__file__` to identify whether Triton is installed from main or torch-triton. Pull Request resolved: #93 Test Plan: Internal test: ``` buck2 test -c fbcode.nvcc_arch=h100a -c fbcode.platform010_cuda_version=12.4 'fbcode//mode/opt' fbcode//pytorch/tritonbench/test/test_gpu:test_gpu -- --exact 'pytorch/tritonbench/test/test_gpu:test_gpu - test_gpu_tritonbench_flash_attention (pytorch.tritonbench.test.test_gpu.main.TestTritonbenchGpu)' ``` Cache hits: 98%. Commands: 462286 (cached: 453091, remote: 7877, local: 1318) Tests finished: Pass 1. Fail 0. Fatal 0. Skip 0. Build failure 0 Reviewed By: plotfi Differential Revision: D66741231 Pulled By: xuzhao9 fbshipit-source-id: dece3777acc996015682e28d2ec31a21b1795260
1 parent 6b0b478 commit 7543890

File tree

6 files changed

+13
-64
lines changed

6 files changed

+13
-64
lines changed

.github/workflows/_linux-test-h100.yml

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -29,12 +29,9 @@ jobs:
2929
nvidia-smi
3030
- name: Install Tritonbench
3131
run: |
32-
# todo: remove this when the new docker rolls out
32+
# speedup install and skip compile by reusing the docker .so files
3333
mkdir -p /workspace/tritonbench/.data
34-
# speedup install and skip compile
3534
ln -s /workspace/tritonbench/.data .
36-
. "${SETUP_SCRIPT}"
37-
python install.py --colfax --tk --hstu
3835
- name: Test Tritonbench operators on H100 GPU
3936
run: |
4037
bash ./.ci/tritonbench/test-gpu.sh

install.py

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,6 @@ def setup_hip(args: argparse.Namespace):
8282
# We have to disable all third-parties that donot support hip/rocm
8383
args.all = False
8484
args.liger = True
85-
args.hstu = True
8685

8786

8887
if __name__ == "__main__":
@@ -102,7 +101,6 @@ def setup_hip(args: argparse.Namespace):
102101
parser.add_argument(
103102
"--fa3", action="store_true", help="Install optional flash_attention 3 kernels"
104103
)
105-
parser.add_argument("--hstu", action="store_true", help="Install HSTU.")
106104
parser.add_argument("--jax", action="store_true", help="Install jax nightly")
107105
parser.add_argument("--tk", action="store_true", help="Install ThunderKittens")
108106
parser.add_argument("--liger", action="store_true", help="Install Liger-kernel")
@@ -151,11 +149,6 @@ def setup_hip(args: argparse.Namespace):
151149
from tools.xformers.install import install_xformers
152150

153151
install_xformers()
154-
if args.hstu or args.all:
155-
logger.info("[tritonbench] installing hstu...")
156-
from tools.hstu.install import install_hstu
157-
158-
install_hstu()
159152
logger.info("[tritonbench] installation complete!")
160153
# run tests to check installation
161154
if args.test:

test/test_gpu/main.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -18,14 +18,12 @@
1818
fbcode_skip_file_path = "fb/skip_tests_h100_fbcode.yaml"
1919
SKIP_FILE = importlib.resources.files(__package__).joinpath(fbcode_skip_file_path)
2020
else:
21-
SKIP_FILE_NAME = "skip_tests_h100_pytorch.yaml"
22-
try:
23-
# test if it is Triton main branch
24-
import triton.tools.experimental_descriptor # @manual # noqa: F401
21+
import triton # @manual
2522

23+
if "site-packages" in triton.__file__:
24+
SKIP_FILE_NAME = "skip_tests_h100_pytorch.yaml"
25+
else:
2626
SKIP_FILE_NAME = "skip_tests_h100_triton_main.yaml"
27-
except ModuleNotFoundError:
28-
pass
2927
import os
3028

3129
SKIP_FILE = os.path.abspath(os.path.join(os.path.dirname(__file__), SKIP_FILE_NAME))

test/test_gpu/skip_tests_h100_pytorch.yaml

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,8 @@ bf16xint16_gemm:
1010
flash_attention:
1111
# thunderkittens cannot handle the default input shapes
1212
- tk
13+
# triton_op_flash_v2 will segfault on triton-pytorch
14+
- triton_op_flash_v2
1315
# triton_tutorial_* kernels require triton-main
1416
- triton_tutorial_flash_v2
1517
- triton_tutorial_flash_v2_opt
@@ -42,5 +44,11 @@ jagged_mean:
4244
jagged_softmax:
4345
jagged_sum:
4446
ragged_attention:
47+
# ../../../lib/Tools/LinearLayout.cpp:565: LinearLayout
48+
# mlir::triton::LinearLayout::reshapeOuts(ArrayRef<std::pair<StringAttr, int32_t>>) const:
49+
# Assertion `getTotalOutDimSize() == std::accumulate( newOutDims.begin(), newOutDims.end(),
50+
# 1, [&](int32_t acc, auto &outDim) { return acc * outDim.second; })' failed.
51+
- hstu_triton_ragged_attention
52+
# presistent kernel is not ready for OSS
4553
- hstu_triton_ragged_attention_persistent
4654
test_op:

tools/hstu/hstu.patch

Lines changed: 0 additions & 13 deletions
This file was deleted.

tools/hstu/install.py

Lines changed: 0 additions & 34 deletions
This file was deleted.

0 commit comments

Comments
 (0)