Skip to content

Commit 2555855

Browse files
authored
Set baseline for ragged_attention (#284)
1 parent 29404a1 commit 2555855

File tree

1 file changed

+1
-1
lines changed

1 file changed

+1
-1
lines changed

tritonbench/operators/ragged_attention/operator.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,7 @@ def __init__(
7474
self.alpha = 1.0 / self.attn_dim
7575
self.requires_grad = not (self.mode == Mode.FWD_NO_GRAD)
7676

77-
@register_benchmark()
77+
@register_benchmark(baseline=True)
7878
def hstu(self, q, k, v, seq_offsets, num_targets, max_seq_len):
7979
return lambda: triton_hstu_mha(
8080
max_seq_len,

0 commit comments

Comments
 (0)