We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent c117328 commit 74a052dCopy full SHA for 74a052d
timm/layers/attention_pool2d.py
@@ -12,12 +12,16 @@
12
import torch
13
import torch.nn as nn
14
15
-from. config import use_fused_attn
+from ._fx import register_notrace_function
16
+from .config import use_fused_attn
17
from .helpers import to_2tuple
18
from .pos_embed import resample_abs_pos_embed
19
from .pos_embed_sincos import apply_rot_embed, RotaryEmbedding
20
from .weight_init import trunc_normal_
21
22
+# have to register again for some reason
23
+register_notrace_function(resample_abs_pos_embed)
24
+
25
26
class RotAttentionPool2d(nn.Module):
27
""" Attention based 2D feature pooling w/ rotary (relative) pos embedding.
0 commit comments