Skip to content

Commit 74a052d

Browse files
committed
Weird behaviour, need to repeat wrap?
1 parent c117328 commit 74a052d

File tree

1 file changed

+5
-1
lines changed

1 file changed

+5
-1
lines changed

timm/layers/attention_pool2d.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,12 +12,16 @@
1212
import torch
1313
import torch.nn as nn
1414

15-
from. config import use_fused_attn
15+
from ._fx import register_notrace_function
16+
from .config import use_fused_attn
1617
from .helpers import to_2tuple
1718
from .pos_embed import resample_abs_pos_embed
1819
from .pos_embed_sincos import apply_rot_embed, RotaryEmbedding
1920
from .weight_init import trunc_normal_
2021

22+
# have to register again for some reason
23+
register_notrace_function(resample_abs_pos_embed)
24+
2125

2226
class RotAttentionPool2d(nn.Module):
2327
""" Attention based 2D feature pooling w/ rotary (relative) pos embedding.

0 commit comments

Comments
 (0)