Skip to content

Commit 7464bce

Browse files
authored
Adapt to KerasTuner 1.1.0rc0. (#1640)
Co-authored-by: Haifeng Jin <haifeng-jin@users.noreply.github.com>
1 parent 5037540 commit 7464bce

File tree

6 files changed

+25
-22
lines changed

6 files changed

+25
-22
lines changed

autokeras/auto_model.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -193,8 +193,12 @@ def _build_graph(self):
193193
graph = graph_module.Graph(inputs=self.inputs, outputs=self.outputs)
194194
# Using input/output API.
195195
elif all([isinstance(output, head_module.Head) for output in self.outputs]):
196+
# Clear session to reset get_uid(). The names of the blocks will
197+
# start to count from 1 for new blocks in a new AutoModel afterwards.
198+
tf.keras.backend.clear_session()
196199
graph = self._assemble()
197200
self.outputs = graph.outputs
201+
tf.keras.backend.clear_session()
198202

199203
return graph
200204

@@ -375,6 +379,7 @@ def _build_hyper_pipeline(self, dataset):
375379
inputs=[node.get_hyper_preprocessors() for node in self.inputs],
376380
outputs=[head.get_hyper_preprocessors() for head in self._heads],
377381
)
382+
self.tuner.hypermodel.hyper_pipeline = self.tuner.hyper_pipeline
378383

379384
def _convert_to_dataset(self, x, y, validation_data, batch_size):
380385
"""Convert the data to tf.data.Dataset."""

autokeras/engine/tuner.py

Lines changed: 14 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@
1818

1919
import keras_tuner
2020
import tensorflow as tf
21-
from keras_tuner.engine import hypermodel as hm_module
2221
from tensorflow.keras import callbacks as tf_callbacks
2322
from tensorflow.keras.layers.experimental import preprocessing
2423
from tensorflow.python.util import nest
@@ -43,7 +42,7 @@ class AutoTuner(keras_tuner.engine.tuner.Tuner):
4342
4443
# Arguments
4544
oracle: keras_tuner Oracle.
46-
hypermodel: keras_tuner KerasHyperModel.
45+
hypermodel: keras_tuner HyperModel.
4746
**kwargs: The args supported by KerasTuner.
4847
"""
4948

@@ -52,15 +51,15 @@ def __init__(self, oracle, hypermodel, **kwargs):
5251
self._finished = False
5352
super().__init__(oracle, hypermodel, **kwargs)
5453
# Save or load the HyperModel.
55-
self.hypermodel.hypermodel.save(os.path.join(self.project_dir, "graph"))
54+
self.hypermodel.save(os.path.join(self.project_dir, "graph"))
5655
self.hyper_pipeline = None
5756

5857
def _populate_initial_space(self):
5958
# Override the function to prevent building the model during initialization.
6059
return
6160

6261
def get_best_model(self):
63-
with hm_module.maybe_distribute(self.distribution_strategy):
62+
with keras_tuner.engine.tuner.maybe_distribute(self.distribution_strategy):
6463
model = tf.keras.models.load_model(self.best_model_path)
6564
return model
6665

@@ -80,27 +79,27 @@ def _prepare_model_build(self, hp, **kwargs):
8079
pipeline = self.hyper_pipeline.build(hp, dataset)
8180
pipeline.fit(dataset)
8281
dataset = pipeline.transform(dataset)
83-
self.hypermodel.hypermodel.set_io_shapes(data_utils.dataset_shape(dataset))
82+
self.hypermodel.set_io_shapes(data_utils.dataset_shape(dataset))
8483

8584
if "validation_data" in kwargs:
8685
validation_data = pipeline.transform(kwargs["validation_data"])
8786
else:
8887
validation_data = None
8988
return pipeline, dataset, validation_data
9089

91-
def _build_and_fit_model(self, trial, fit_args, fit_kwargs):
90+
def _build_and_fit_model(self, trial, *args, **kwargs):
91+
model = self.hypermodel.build(trial.hyperparameters)
9292
(
9393
pipeline,
94-
fit_kwargs["x"],
95-
fit_kwargs["validation_data"],
96-
) = self._prepare_model_build(trial.hyperparameters, **fit_kwargs)
94+
kwargs["x"],
95+
kwargs["validation_data"],
96+
) = self._prepare_model_build(trial.hyperparameters, **kwargs)
9797
pipeline.save(self._pipeline_path(trial.trial_id))
9898

99-
model = self.hypermodel.build(trial.hyperparameters)
100-
self.adapt(model, fit_kwargs["x"])
99+
self.adapt(model, kwargs["x"])
101100

102101
_, history = utils.fit_with_adaptive_batch_size(
103-
model, self.hypermodel.hypermodel.batch_size, **fit_kwargs
102+
model, self.hypermodel.batch_size, **kwargs
104103
)
105104
return history
106105

@@ -165,7 +164,7 @@ def search(
165164
if callbacks is None:
166165
callbacks = []
167166

168-
self.hypermodel.hypermodel.set_fit_args(validation_split, epochs=epochs)
167+
self.hypermodel.set_fit_args(validation_split, epochs=epochs)
169168

170169
# Insert early-stopping for adaptive number of epochs.
171170
epochs_provided = True
@@ -216,9 +215,7 @@ def search(
216215
)
217216
copied_fit_kwargs.pop("validation_data")
218217

219-
self.hypermodel.hypermodel.set_fit_args(
220-
0, epochs=copied_fit_kwargs["epochs"]
221-
)
218+
self.hypermodel.set_fit_args(0, epochs=copied_fit_kwargs["epochs"])
222219
pipeline, model, history = self.final_fit(**copied_fit_kwargs)
223220
else:
224221
# TODO: Add return history functionality in Keras Tuner
@@ -270,7 +267,7 @@ def final_fit(self, **kwargs):
270267
model = self._build_best_model()
271268
self.adapt(model, kwargs["x"])
272269
model, history = utils.fit_with_adaptive_batch_size(
273-
model, self.hypermodel.hypermodel.batch_size, **kwargs
270+
model, self.hypermodel.batch_size, **kwargs
274271
)
275272
return pipeline, model, history
276273

autokeras/tuners/greedy.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
14+
import copy
1415
from typing import Any
1516
from typing import Dict
1617
from typing import List
@@ -94,7 +95,7 @@ class GreedyOracle(keras_tuner.Oracle):
9495

9596
def __init__(self, initial_hps=None, seed=None, **kwargs):
9697
super().__init__(seed=seed, **kwargs)
97-
self.initial_hps = initial_hps or []
98+
self.initial_hps = copy.deepcopy(initial_hps) or []
9899
self._tried_initial_hps = [False] * len(self.initial_hps)
99100

100101
def get_state(self):

setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
keywords=["AutoML", "Keras"],
1919
install_requires=[
2020
"packaging",
21-
"keras-tuner>=1.0.2",
21+
"keras-tuner==1.1.0rc0",
2222
"tf-nightly==2.8.0.dev20211016",
2323
"scikit-learn",
2424
"pandas",

tests/unit_tests/blocks/basic_test.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -229,7 +229,7 @@ def test_conv_get_config_has_all_attributes():
229229

230230

231231
def test_rnn_build_return_tensor():
232-
block = blocks.RNNBlock()
232+
block = blocks.RNNBlock(bidirectional=False)
233233

234234
outputs = block.build(
235235
keras_tuner.HyperParameters(),

tests/unit_tests/tuners/task_specific_test.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -65,8 +65,8 @@ def test_txt_clf_init_hp2_equals_hp_of_a_model(tmp_path):
6565
clf.inputs[0].batch_size = 6
6666
clf.inputs[0].num_samples = 1000
6767
clf.outputs[0].in_blocks[0].shape = (10,)
68-
clf.tuner.hypermodel.hypermodel.epochs = 1000
69-
clf.tuner.hypermodel.hypermodel.num_samples = 20000
68+
clf.tuner.hypermodel.epochs = 1000
69+
clf.tuner.hypermodel.num_samples = 20000
7070
init_hp = task_specific.TEXT_CLASSIFIER[2]
7171
hp = keras_tuner.HyperParameters()
7272
hp.values = copy.copy(init_hp)

0 commit comments

Comments
 (0)