@@ -12,7 +12,7 @@ class Mlp(nn.Module):
12
12
def __init__ (
13
13
self ,
14
14
in_channels : int ,
15
- mlp_ratio : int = 4 ,
15
+ mlp_ratio : int = 2 ,
16
16
activation : str = "star_relu" ,
17
17
dropout : float = 0.0 ,
18
18
bias : bool = False ,
@@ -31,7 +31,7 @@ def __init__(
31
31
----------
32
32
in_channels : int
33
33
Number of input features.
34
- mlp_ratio : int, default=4
34
+ mlp_ratio : int, default=2
35
35
Scaling factor to get the number hidden features from the `in_features`.
36
36
activation : str, default="star_relu"
37
37
The name of the activation function.
@@ -69,7 +69,7 @@ class MlpBlock(nn.Module):
69
69
def __init__ (
70
70
self ,
71
71
in_channels : int ,
72
- mlp_ratio : int = 4 ,
72
+ mlp_ratio : int = 2 ,
73
73
activation : str = "star_relu" ,
74
74
activation_kwargs : Dict [str , Any ] = None ,
75
75
dropout : float = 0.0 ,
@@ -85,7 +85,7 @@ def __init__(
85
85
----------
86
86
in_channels : int
87
87
Number of input features.
88
- mlp_ratio : int, default=4
88
+ mlp_ratio : int, default=2
89
89
Scaling factor to get the number hidden features from the `in_features`.
90
90
activation : str, default="star_relu"
91
91
The name of the activation function.
0 commit comments