Skip to content

Commit

Permalink
Adapt to KerasTuner 1.1.0rc0. (#1640)
Browse files Browse the repository at this point in the history
Co-authored-by: Haifeng Jin <[email protected]>
  • Loading branch information
haifeng-jin and haifeng-jin authored Oct 20, 2021
1 parent 5037540 commit 7464bce
Show file tree
Hide file tree
Showing 6 changed files with 25 additions and 22 deletions.
5 changes: 5 additions & 0 deletions autokeras/auto_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -193,8 +193,12 @@ def _build_graph(self):
graph = graph_module.Graph(inputs=self.inputs, outputs=self.outputs)
# Using input/output API.
elif all([isinstance(output, head_module.Head) for output in self.outputs]):
# Clear session to reset get_uid(). The names of the blocks will
# start to count from 1 for new blocks in a new AutoModel afterwards.
tf.keras.backend.clear_session()
graph = self._assemble()
self.outputs = graph.outputs
tf.keras.backend.clear_session()

return graph

Expand Down Expand Up @@ -375,6 +379,7 @@ def _build_hyper_pipeline(self, dataset):
inputs=[node.get_hyper_preprocessors() for node in self.inputs],
outputs=[head.get_hyper_preprocessors() for head in self._heads],
)
self.tuner.hypermodel.hyper_pipeline = self.tuner.hyper_pipeline

def _convert_to_dataset(self, x, y, validation_data, batch_size):
"""Convert the data to tf.data.Dataset."""
Expand Down
31 changes: 14 additions & 17 deletions autokeras/engine/tuner.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@

import keras_tuner
import tensorflow as tf
from keras_tuner.engine import hypermodel as hm_module
from tensorflow.keras import callbacks as tf_callbacks
from tensorflow.keras.layers.experimental import preprocessing
from tensorflow.python.util import nest
Expand All @@ -43,7 +42,7 @@ class AutoTuner(keras_tuner.engine.tuner.Tuner):
# Arguments
oracle: keras_tuner Oracle.
hypermodel: keras_tuner KerasHyperModel.
hypermodel: keras_tuner HyperModel.
**kwargs: The args supported by KerasTuner.
"""

Expand All @@ -52,15 +51,15 @@ def __init__(self, oracle, hypermodel, **kwargs):
self._finished = False
super().__init__(oracle, hypermodel, **kwargs)
# Save or load the HyperModel.
self.hypermodel.hypermodel.save(os.path.join(self.project_dir, "graph"))
self.hypermodel.save(os.path.join(self.project_dir, "graph"))
self.hyper_pipeline = None

def _populate_initial_space(self):
# Override the function to prevent building the model during initialization.
return

def get_best_model(self):
with hm_module.maybe_distribute(self.distribution_strategy):
with keras_tuner.engine.tuner.maybe_distribute(self.distribution_strategy):
model = tf.keras.models.load_model(self.best_model_path)
return model

Expand All @@ -80,27 +79,27 @@ def _prepare_model_build(self, hp, **kwargs):
pipeline = self.hyper_pipeline.build(hp, dataset)
pipeline.fit(dataset)
dataset = pipeline.transform(dataset)
self.hypermodel.hypermodel.set_io_shapes(data_utils.dataset_shape(dataset))
self.hypermodel.set_io_shapes(data_utils.dataset_shape(dataset))

if "validation_data" in kwargs:
validation_data = pipeline.transform(kwargs["validation_data"])
else:
validation_data = None
return pipeline, dataset, validation_data

def _build_and_fit_model(self, trial, fit_args, fit_kwargs):
def _build_and_fit_model(self, trial, *args, **kwargs):
model = self.hypermodel.build(trial.hyperparameters)
(
pipeline,
fit_kwargs["x"],
fit_kwargs["validation_data"],
) = self._prepare_model_build(trial.hyperparameters, **fit_kwargs)
kwargs["x"],
kwargs["validation_data"],
) = self._prepare_model_build(trial.hyperparameters, **kwargs)
pipeline.save(self._pipeline_path(trial.trial_id))

model = self.hypermodel.build(trial.hyperparameters)
self.adapt(model, fit_kwargs["x"])
self.adapt(model, kwargs["x"])

_, history = utils.fit_with_adaptive_batch_size(
model, self.hypermodel.hypermodel.batch_size, **fit_kwargs
model, self.hypermodel.batch_size, **kwargs
)
return history

Expand Down Expand Up @@ -165,7 +164,7 @@ def search(
if callbacks is None:
callbacks = []

self.hypermodel.hypermodel.set_fit_args(validation_split, epochs=epochs)
self.hypermodel.set_fit_args(validation_split, epochs=epochs)

# Insert early-stopping for adaptive number of epochs.
epochs_provided = True
Expand Down Expand Up @@ -216,9 +215,7 @@ def search(
)
copied_fit_kwargs.pop("validation_data")

self.hypermodel.hypermodel.set_fit_args(
0, epochs=copied_fit_kwargs["epochs"]
)
self.hypermodel.set_fit_args(0, epochs=copied_fit_kwargs["epochs"])
pipeline, model, history = self.final_fit(**copied_fit_kwargs)
else:
# TODO: Add return history functionality in Keras Tuner
Expand Down Expand Up @@ -270,7 +267,7 @@ def final_fit(self, **kwargs):
model = self._build_best_model()
self.adapt(model, kwargs["x"])
model, history = utils.fit_with_adaptive_batch_size(
model, self.hypermodel.hypermodel.batch_size, **kwargs
model, self.hypermodel.batch_size, **kwargs
)
return pipeline, model, history

Expand Down
3 changes: 2 additions & 1 deletion autokeras/tuners/greedy.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
from typing import Any
from typing import Dict
from typing import List
Expand Down Expand Up @@ -94,7 +95,7 @@ class GreedyOracle(keras_tuner.Oracle):

def __init__(self, initial_hps=None, seed=None, **kwargs):
super().__init__(seed=seed, **kwargs)
self.initial_hps = initial_hps or []
self.initial_hps = copy.deepcopy(initial_hps) or []
self._tried_initial_hps = [False] * len(self.initial_hps)

def get_state(self):
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
keywords=["AutoML", "Keras"],
install_requires=[
"packaging",
"keras-tuner>=1.0.2",
"keras-tuner==1.1.0rc0",
"tf-nightly==2.8.0.dev20211016",
"scikit-learn",
"pandas",
Expand Down
2 changes: 1 addition & 1 deletion tests/unit_tests/blocks/basic_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -229,7 +229,7 @@ def test_conv_get_config_has_all_attributes():


def test_rnn_build_return_tensor():
block = blocks.RNNBlock()
block = blocks.RNNBlock(bidirectional=False)

outputs = block.build(
keras_tuner.HyperParameters(),
Expand Down
4 changes: 2 additions & 2 deletions tests/unit_tests/tuners/task_specific_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,8 +65,8 @@ def test_txt_clf_init_hp2_equals_hp_of_a_model(tmp_path):
clf.inputs[0].batch_size = 6
clf.inputs[0].num_samples = 1000
clf.outputs[0].in_blocks[0].shape = (10,)
clf.tuner.hypermodel.hypermodel.epochs = 1000
clf.tuner.hypermodel.hypermodel.num_samples = 20000
clf.tuner.hypermodel.epochs = 1000
clf.tuner.hypermodel.num_samples = 20000
init_hp = task_specific.TEXT_CLASSIFIER[2]
hp = keras_tuner.HyperParameters()
hp.values = copy.copy(init_hp)
Expand Down

0 comments on commit 7464bce

Please sign in to comment.