Skip to content
This repository was archived by the owner on Nov 3, 2022. It is now read-only.

Commit 95214b6

Browse files
committed
refactored tests
1 parent 9007618 commit 95214b6

File tree

2 files changed

+97
-19
lines changed

2 files changed

+97
-19
lines changed

keras_contrib/wrappers/cdropout.py

+9-6
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
from keras.initializers import RandomUniform
77
from keras.layers import InputSpec
88
from keras.layers.wrappers import Wrapper
9+
from keras_contrib.utils.test_utils import to_tuple
910

1011

1112
class ConcreteDropout(Wrapper):
@@ -34,8 +35,9 @@ class ConcreteDropout(Wrapper):
3435
Also known as inverse observation noise.
3536
prob_init: Tuple[float, float].
3637
Probability lower / upper bounds of dropout rate initialization.
37-
temp: float. Temperature. Not used to be optimized.
38-
seed: Seed for random probability sampling.
38+
temp: float. Temperature.
39+
Determines the speed of probability adjustments.
40+
seed: Seed for random probability sampling.
3941
4042
# References
4143
- [Concrete Dropout](https://arxiv.org/pdf/1705.07832.pdf)
@@ -44,10 +46,10 @@ class ConcreteDropout(Wrapper):
4446
def __init__(self,
4547
layer,
4648
n_data,
47-
length_scale=2e-2,
49+
length_scale=5e-2,
4850
model_precision=1,
4951
prob_init=(0.1, 0.5),
50-
temp=0.1,
52+
temp=0.4,
5153
seed=None,
5254
**kwargs):
5355
assert 'kernel_regularizer' not in kwargs
@@ -64,7 +66,7 @@ def __init__(self,
6466

6567
def _concrete_dropout(self, inputs, layer_type):
6668
"""Applies concrete dropout.
67-
Used at training time (gradients can be propagated)
69+
Used at training time (gradients can be propagated).
6870
6971
# Arguments
7072
inputs: Input.
@@ -99,6 +101,7 @@ def _concrete_dropout(self, inputs, layer_type):
99101
return inputs
100102

101103
def build(self, input_shape=None):
104+
input_shape = to_tuple(input_shape)
102105
if len(input_shape) == 2: # Dense_layer
103106
input_dim = np.prod(input_shape[-1]) # we drop only last dim
104107
elif len(input_shape) == 4: # Conv_layer
@@ -126,7 +129,7 @@ def build(self, input_shape=None):
126129

127130
super(ConcreteDropout, self).build(input_shape)
128131

129-
# initialise regularizer / prior KL term
132+
# initialize regularizer / prior KL term
130133
weight = self.layer.kernel
131134
kernel_regularizer = (
132135
self.weight_regularizer

tests/keras_contrib/wrappers/test_cdropout.py

+88-13
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,10 @@
1010
from keras_contrib.wrappers import ConcreteDropout
1111

1212

13-
def test_cdropout():
13+
@pytest.fixture(scope='module')
14+
def dense_model():
15+
"""Initialize to be tested dense model. Executed once.
16+
"""
1417
# DATA
1518
in_dim = 20
1619
init_prop = .1
@@ -28,34 +31,72 @@ def test_cdropout():
2831
# Model, reference w/o Dropout
2932
x_ref = dense(inputs)
3033
model_ref = Model(inputs, x_ref)
31-
model_ref.compile(loss='mse', optimizer='rmsprop')
34+
model_ref.compile(loss=None, optimizer='rmsprop')
35+
36+
return {'model': model,
37+
'model_ref': model_ref,
38+
'concrete_dropout': cd,
39+
'init_prop': init_prop,
40+
'in_dim': in_dim,
41+
'X': X}
42+
43+
44+
def test_cdropout_dense_3rdweight(dense_model):
45+
"""Check about correct 3rd weight (equal to initial value)
46+
"""
47+
model = dense_model['model']
48+
init_prop = dense_model['init_prop']
3249

33-
# CHECKS
34-
# Check about correct 3rd weight (equal to initial value)
3550
W = model.get_weights()
3651
assert_array_almost_equal(W[2], [np.log(init_prop)])
3752

38-
# Check if ConcreteDropout in prediction phase is the same as no dropout
53+
54+
def test_cdropout_dense_identity(dense_model):
55+
"""Check if ConcreteDropout in prediction phase is the same as no dropout
56+
"""
57+
model = dense_model['model']
58+
model_ref = dense_model['model_ref']
59+
X = dense_model['X']
60+
3961
out = model.predict(X)
4062
out_ref = model_ref.predict(X)
4163
assert_allclose(out, out_ref, atol=1e-5)
4264

43-
# Check if ConcreteDropout has the right amount of losses deposited
65+
66+
def test_cdropout_dense_loss(dense_model):
67+
"""Check if ConcreteDropout has the right amount of losses deposited
68+
"""
69+
model = dense_model['model']
70+
4471
assert_equal(len(model.losses), 1)
4572

46-
# Check if the loss correspons the the desired value
73+
74+
def test_cdropout_dense_loss_value(dense_model):
75+
"""Check if the loss corresponds the the desired value
76+
"""
77+
model = dense_model['model']
78+
X = dense_model['X']
79+
cd = dense_model['concrete_dropout']
80+
in_dim = dense_model['in_dim']
81+
4782
def sigmoid(x):
4883
return 1. / (1. + np.exp(-x))
84+
85+
W = model.get_weights()
4986
p = np.squeeze(sigmoid(W[2]))
5087
kernel_regularizer = cd.weight_regularizer * np.sum(np.square(W[0])) / (1. - p)
5188
dropout_regularizer = (p * np.log(p) + (1. - p) * np.log(1. - p))
5289
dropout_regularizer *= cd.dropout_regularizer * in_dim
5390
loss = np.sum(kernel_regularizer + dropout_regularizer)
91+
5492
eval_loss = model.evaluate(X)
5593
assert_approx_equal(eval_loss, loss)
5694

5795

58-
def test_cdropout_conv():
96+
@pytest.fixture(scope='module')
97+
def conv2d_model():
98+
"""Initialize to be tested conv model. Executed once.
99+
"""
59100
# DATA
60101
in_dim = 20
61102
init_prop = .1
@@ -75,27 +116,61 @@ def test_cdropout_conv():
75116
model_ref = Model(inputs, x_ref)
76117
model_ref.compile(loss=None, optimizer='rmsprop')
77118

78-
# CHECKS
79-
# Check about correct 3rd weight (equal to initial value)
119+
return {'model': model,
120+
'model_ref': model_ref,
121+
'concrete_dropout': cd,
122+
'init_prop': init_prop,
123+
'in_dim': in_dim,
124+
'X': X}
125+
126+
127+
def test_cdropout_conv2d_3rdweight(conv2d_model):
128+
"""Check about correct 3rd weight (equal to initial value)
129+
"""
130+
model = conv2d_model['model']
131+
init_prop = conv2d_model['init_prop']
132+
80133
W = model.get_weights()
81134
assert_array_almost_equal(W[2], [np.log(init_prop)])
82135

83-
# Check if ConcreteDropout in prediction phase is the same as no dropout
136+
137+
def test_cdropout_conv2d_identity(conv2d_model):
138+
"""Check if ConcreteDropout in prediction phase is the same as no dropout
139+
"""
140+
model = conv2d_model['model']
141+
model_ref = conv2d_model['model_ref']
142+
X = conv2d_model['X']
143+
84144
out = model.predict(X)
85145
out_ref = model_ref.predict(X)
86146
assert_allclose(out, out_ref, atol=1e-5)
87147

88-
# Check if ConcreteDropout has the right amount of losses deposited
148+
149+
def test_cdropout_conv2d_loss(conv2d_model):
150+
"""Check if ConcreteDropout has the right amount of losses deposited
151+
"""
152+
model = conv2d_model['model']
153+
89154
assert_equal(len(model.losses), 1)
90155

91-
# Check if the loss correspons the the desired value
156+
157+
def test_cdropout_conv2d_loss_value(conv2d_model):
158+
"""Check if the loss corresponds the the desired value
159+
"""
160+
model = conv2d_model['model']
161+
X = conv2d_model['X']
162+
cd = conv2d_model['concrete_dropout']
163+
92164
def sigmoid(x):
93165
return 1. / (1. + np.exp(-x))
166+
167+
W = model.get_weights()
94168
p = np.squeeze(sigmoid(W[2]))
95169
kernel_regularizer = cd.weight_regularizer * np.sum(np.square(W[0])) / (1. - p)
96170
dropout_regularizer = (p * np.log(p) + (1. - p) * np.log(1. - p))
97171
dropout_regularizer *= cd.dropout_regularizer * 1 # only channels are dropped
98172
loss = np.sum(kernel_regularizer + dropout_regularizer)
173+
99174
eval_loss = model.evaluate(X)
100175
assert_approx_equal(eval_loss, loss)
101176

0 commit comments

Comments
 (0)