Skip to content

Commit 195a622

Browse files
Add HGNet and HGNetV2 (#41)
* Add HGNet and HGNetV2 * Update README and requirements.txt * Update configs * Fix mixed precision for `LearnableAffine`
1 parent 8995b40 commit 195a622

14 files changed

+1224
-8
lines changed

README.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -154,6 +154,8 @@ Reference: [Grad-CAM class activation visualization (keras.io)](https://keras.io
154154
|EfficientNetV2|[ICML 2021](https://arxiv.org/abs/2104.00298)|`timm`|`kimm.models.EfficientNetV2*`|
155155
|GhostNet|[CVPR 2020](https://arxiv.org/abs/1911.11907)|`timm`|`kimm.models.GhostNet*`|
156156
|GhostNetV2|[NeurIPS 2022](https://arxiv.org/abs/2211.12905)|`timm`|`kimm.models.GhostNetV2*`|
157+
|HGNet||`timm`|`kimm.models.HGNet*`|
158+
|HGNetV2||`timm`|`kimm.models.HGNetV2*`|
157159
|InceptionNeXt|[arXiv 2023](https://arxiv.org/abs/2303.16900)|`timm`|`kimm.models.InceptionNeXt*`|
158160
|InceptionV3|[CVPR 2016](https://arxiv.org/abs/1512.00567)|`timm`|`kimm.models.InceptionV3`|
159161
|LCNet|[arXiv 2021](https://arxiv.org/abs/2109.15099)|`timm`|`kimm.models.LCNet*`|

kimm/blocks/base_block.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,8 @@ def apply_conv2d_block(
5555
if strides > 1:
5656
padding = "valid"
5757
x = layers.ZeroPadding2D(
58-
(kernel_size[0] // 2, kernel_size[1] // 2), name=f"{name}_pad"
58+
((kernel_size[0] - 1) // 2, (kernel_size[1] - 1) // 2),
59+
name=f"{name}_pad",
5960
)(x)
6061

6162
if not use_depthwise:

kimm/layers/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
from kimm.layers.attention import Attention
22
from kimm.layers.layer_scale import LayerScale
3+
from kimm.layers.learnable_affine import LearnableAffine
34
from kimm.layers.mobile_one_conv2d import MobileOneConv2D
45
from kimm.layers.position_embedding import PositionEmbedding
56
from kimm.layers.rep_conv2d import RepConv2D

kimm/layers/learnable_affine.py

Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
import keras
2+
from keras import layers
3+
from keras import ops
4+
5+
6+
@keras.saving.register_keras_serializable(package="kimm")
7+
class LearnableAffine(layers.Layer):
8+
def __init__(self, scale_value=1.0, bias_value=0.0, **kwargs):
9+
super().__init__(**kwargs)
10+
if isinstance(scale_value, int):
11+
raise ValueError(
12+
f"scale_value must be a integer. Received: {scale_value}"
13+
)
14+
if isinstance(bias_value, int):
15+
raise ValueError(
16+
f"bias_value must be a integer. Received: {bias_value}"
17+
)
18+
self.scale_value = scale_value
19+
self.bias_value = bias_value
20+
21+
def build(self, input_shape):
22+
self.scale = self.add_weight(
23+
shape=(1,),
24+
initializer=lambda shape, dtype: ops.cast(self.scale_value, dtype),
25+
trainable=True,
26+
name="scale",
27+
)
28+
self.bias = self.add_weight(
29+
shape=(1,),
30+
initializer=lambda shape, dtype: ops.cast(self.bias_value, dtype),
31+
trainable=True,
32+
name="bias",
33+
)
34+
self.built = True
35+
36+
def call(self, inputs, training=None, mask=None):
37+
scale = ops.cast(self.scale, self.compute_dtype)
38+
bias = ops.cast(self.bias, self.compute_dtype)
39+
return ops.add(ops.multiply(inputs, scale), bias)
40+
41+
def get_config(self):
42+
config = super().get_config()
43+
config.update(
44+
{
45+
"scale_value": self.scale_value,
46+
"bias_value": self.bias_value,
47+
"name": self.name,
48+
}
49+
)
50+
return config

kimm/layers/learnable_affine_test.py

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
import pytest
2+
from absl.testing import parameterized
3+
from keras.src import testing
4+
5+
from kimm.layers.learnable_affine import LearnableAffine
6+
7+
8+
class LearnableAffineTest(testing.TestCase, parameterized.TestCase):
9+
@pytest.mark.requires_trainable_backend
10+
def test_layer_scale_basic(self):
11+
self.run_layer_test(
12+
LearnableAffine,
13+
init_kwargs={"scale_value": 1.0, "bias_value": 0.0},
14+
input_shape=(1, 10),
15+
expected_output_shape=(1, 10),
16+
expected_num_trainable_weights=2,
17+
expected_num_non_trainable_weights=0,
18+
expected_num_losses=0,
19+
supports_masking=False,
20+
)

kimm/models/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
from kimm.models.densenet import * # noqa:F403
55
from kimm.models.efficientnet import * # noqa:F403
66
from kimm.models.ghostnet import * # noqa:F403
7+
from kimm.models.hgnet import * # noqa:F403
78
from kimm.models.inception_next import * # noqa:F403
89
from kimm.models.inception_v3 import * # noqa:F403
910
from kimm.models.mobilenet_v2 import * # noqa:F403

0 commit comments

Comments
 (0)