def test_hyper_graph_cycle(tmp_dir): input_node1 = ak.Input() input_node2 = ak.Input() output_node1 = ak.DenseBlock()(input_node1) output_node2 = ak.DenseBlock()(input_node2) output_node = ak.Merge()([output_node1, output_node2]) head = ak.RegressionHead() output_node = head(output_node) head.outputs = output_node1 with pytest.raises(ValueError) as info: ak.GraphAutoModel([input_node1, input_node2], output_node, directory=tmp_dir) assert str(info.value) == 'The network has a cycle.'
def test_set_hp(): input_node = ak.Input((32, )) output_node = input_node output_node = ak.DenseBlock()(output_node) head = ak.RegressionHead() head.output_shape = (1, ) output_node = head(output_node) graph = graph_module.HyperGraph(input_node, output_node, override_hps=[ hp_module.Choice( 'dense_block_1/num_layers', [6], default=6) ]) hp = kerastuner.HyperParameters() plain_graph = graph.hyper_build(hp) plain_graph.build_keras_graph().build(hp) for single_hp in hp.space: if single_hp.name == 'dense_block_1/num_layers': assert len(single_hp.values) == 1 assert single_hp.values[0] == 6 return assert False
def test_graph_save_load(tmp_path): input1 = ak.Input() input2 = ak.Input() output1 = ak.DenseBlock()(input1) output2 = ak.ConvBlock()(input2) output = ak.Merge()([output1, output2]) output1 = ak.RegressionHead()(output) output2 = ak.ClassificationHead()(output) graph = graph_module.Graph( inputs=[input1, input2], outputs=[output1, output2], override_hps=[ hp_module.Choice("dense_block_1/num_layers", [6], default=6) ], ) path = os.path.join(tmp_path, "graph") graph.save(path) graph = graph_module.load_graph(path) assert len(graph.inputs) == 2 assert len(graph.outputs) == 2 assert isinstance(graph.inputs[0].out_blocks[0], ak.DenseBlock) assert isinstance(graph.inputs[1].out_blocks[0], ak.ConvBlock) assert isinstance(graph.override_hps[0], hp_module.Choice)
def applyAutoKeras(X_train, y_train, X_test, y_test, SavePath, max_trials=100, epochs=300, useSavedModels = True): if not useSavedModels or not os.path.isdir(SavePath+"/keras_auto_model/best_model/"): input_node = ak.StructuredDataInput() output_node = ak.DenseBlock()(input_node) #output_node = ak.ConvBlock()(output_node) output_node = ak.RegressionHead()(output_node) AKRegressor = ak.AutoModel( inputs=input_node, outputs=output_node, max_trials=max_trials, overwrite=True, tuner="bayesian", project_name=SavePath+"/keras_auto_model" ) print(" X_train shape: {0}\n y_train shape: {1}\n X_test shape: {2}\n y_test shape: {3}".format(X_train.shape, y_train.shape, X_test.shape, y_test.shape)) AKRegressor.fit(x=X_train, y=y_train[:,0],epochs=epochs,verbose=1, batch_size=int(X_train.shape[0]/10), shuffle=False, use_multiprocessing=True) AKRegressor.export_model() else: AKRegressor = tf.keras.models.load_model(SavePath+"/keras_auto_model/best_model/") y_hat = AKRegressor.predict(X_test) print("AUTOKERAS - Score: ") print("MAE: %.4f" % mean_absolute_error(y_test[:,0], y_hat)) return y_hat
def test_graph_save_load(tmp_dir): input1 = ak.Input() input2 = ak.Input() output1 = ak.DenseBlock()(input1) output2 = ak.ConvBlock()(input2) output = ak.Merge()([output1, output2]) output1 = ak.RegressionHead()(output) output2 = ak.ClassificationHead()(output) graph = graph_module.HyperGraph(inputs=[input1, input2], outputs=[output1, output2], override_hps=[ hp_module.Choice( 'dense_block_1/num_layers', [6], default=6) ]) path = os.path.join(tmp_dir, 'graph') graph.save(path) config = graph.get_config() graph = graph_module.HyperGraph.from_config(config) graph.reload(path) assert len(graph.inputs) == 2 assert len(graph.outputs) == 2 assert isinstance(graph.inputs[0].out_blocks[0], ak.DenseBlock) assert isinstance(graph.inputs[1].out_blocks[0], ak.ConvBlock) assert isinstance(graph.override_hps[0], hp_module.Choice)
def test_input_missing(tmp_dir): input_node1 = ak.Input() input_node2 = ak.Input() output_node1 = ak.DenseBlock()(input_node1) output_node2 = ak.DenseBlock()(input_node2) output_node = ak.Merge()([output_node1, output_node2]) output_node = ak.RegressionHead()(output_node) input_node1.shape = (32,) input_node2.shape = (32,) output_node[0].shape = (1,) with pytest.raises(ValueError) as info: graph = ak.GraphAutoModel(input_node1, output_node, directory=tmp_dir) graph.build(kerastuner.HyperParameters()) assert str(info.value).startswith('A required input is missing for HyperModel')
def test_evaluate(tuner_fn, tmp_dir): pg = mock.Mock() pg.preprocess.return_value = (mock.Mock(), mock.Mock()) tuner_class = tuner_fn.return_value tuner = tuner_class.return_value tuner.get_best_model.return_value = (pg, mock.Mock()) x_train = np.random.rand(100, 32) y_train = np.random.rand(100, 1) input_node = ak.Input() output_node = input_node output_node = ak.DenseBlock()(output_node) output_node = ak.RegressionHead()(output_node) auto_model = ak.GraphAutoModel(input_node, output_node, directory=tmp_dir, max_trials=1) auto_model.fit(x_train, y_train, epochs=1, validation_data=(x_train, y_train)) auto_model.evaluate(x_train, y_train) assert tuner_fn.called assert tuner_class.called assert tuner.get_best_model.called
def test_input_output_disconnect(tmp_dir): input_node1 = ak.Input() output_node = input_node1 _ = ak.DenseBlock()(output_node) input_node = ak.Input() output_node = input_node output_node = ak.DenseBlock()(output_node) output_node = ak.RegressionHead()(output_node) input_node.shape = (32, ) output_node[0].shape = (1, ) with pytest.raises(ValueError) as info: graph = ak.GraphAutoModel(input_node1, output_node, directory=tmp_dir) graph.build(kerastuner.HyperParameters()) assert str(info.value) == 'Inputs and outputs not connected.'
def test_text_and_structured_data(tmp_path): # Prepare the data. num_instances = 80 (x_text, y_train), (x_test, y_test) = utils.imdb_raw() x_structured_data = pd.read_csv(utils.TRAIN_CSV_PATH) x_text = x_text[:num_instances] x_structured_data = x_structured_data[:num_instances] y_classification = utils.generate_one_hot_labels( num_instances=num_instances, num_classes=3) y_regression = utils.generate_data(num_instances=num_instances, shape=(1, )) # Build model and train. structured_data_input = ak.StructuredDataInput() structured_data_output = ak.CategoricalToNumerical()(structured_data_input) structured_data_output = ak.DenseBlock()(structured_data_output) text_input = ak.TextInput() outputs1 = ak.TextToIntSequence()(text_input) outputs1 = ak.Embedding()(outputs1) outputs1 = ak.ConvBlock(separable=True)(outputs1) outputs1 = ak.SpatialReduction()(outputs1) outputs2 = ak.TextToNgramVector()(text_input) outputs2 = ak.DenseBlock()(outputs2) text_output = ak.Merge()((outputs1, outputs2)) merged_outputs = ak.Merge()((structured_data_output, text_output)) regression_outputs = ak.RegressionHead()(merged_outputs) classification_outputs = ak.ClassificationHead()(merged_outputs) automodel = ak.AutoModel( inputs=[text_input, structured_data_input], directory=tmp_path, outputs=[regression_outputs, classification_outputs], max_trials=2, tuner=ak.Hyperband, seed=utils.SEED, ) automodel.fit( (x_text, x_structured_data), (y_regression, y_classification), validation_split=0.2, epochs=1, )
def test_merge(tmp_dir): x_train = np.random.rand(100, 32) y_train = np.random.rand(100) input_node1 = ak.Input() input_node2 = ak.Input() output_node1 = ak.DenseBlock()(input_node1) output_node2 = ak.DenseBlock()(input_node2) output_node = ak.Merge()([output_node1, output_node2]) output_node = ak.RegressionHead()(output_node) graph = ak.GraphAutoModel([input_node1, input_node2], output_node, directory=tmp_dir, max_trials=1) graph.fit([x_train, x_train], y_train, epochs=1, batch_size=100, verbose=False) result = graph.predict([x_train, x_train]) assert result.shape == (100, 1)
def test_graph_basics(): input_node = ak.Input(shape=(30, )) output_node = input_node output_node = ak.DenseBlock()(output_node) output_node = ak.RegressionHead(output_shape=(1, ))(output_node) graph = graph_module.HyperBuiltGraphHyperModel(input_node, output_node) model = graph.build(kerastuner.HyperParameters()) assert model.input_shape == (None, 30) assert model.output_shape == (None, 1)
def test_hyper_graph_cycle(tmp_dir): input_node1 = ak.Input() input_node2 = ak.Input() output_node1 = ak.DenseBlock()(input_node1) output_node2 = ak.DenseBlock()(input_node2) output_node = ak.Merge()([output_node1, output_node2]) head = ak.RegressionHead() output_node = head(output_node) head.outputs = output_node1 input_node1.shape = (32,) input_node2.shape = (32,) output_node[0].shape = (1,) with pytest.raises(ValueError) as info: graph = ak.GraphAutoModel([input_node1, input_node2], output_node, directory=tmp_dir) graph.build(kerastuner.HyperParameters()) assert str(info.value) == 'The network has a cycle.'
def create_image_regressor(self): input_node = ak.ImageInput() output_node = ak.ConvBlock()(input_node) output_node = ak.DenseBlock()(output_node) output_node = ak.RegressionHead()(output_node) reg = ak.AutoModel(inputs=input_node, outputs=output_node, max_trials=10) return reg
def functional_api(): (x_train, y_train), (x_test, y_test) = mnist.load_data() input_node = ak.ImageInput() output_node = input_node output_node = ak.Normalization()(output_node) output_node = ak.ConvBlock()(output_node) output_node = ak.SpatialReduction()(output_node) output_node = ak.DenseBlock()(output_node) output_node = ak.ClassificationHead()(output_node) clf = ak.GraphAutoModel(input_node, output_node, seed=5, max_trials=3) clf.fit(x_train, y_train, validation_split=0.2) return clf.evaluate(x_test, y_test)
def CreateSupergraph(output_dir, hp_tuner): input_node = ak.Input() conv2d_1 = ak.ConvBlock(num_blocks=1, num_layers=3, max_pooling=True, dropout=0)(input_node) dense_1 = ak.DenseBlock(dropout=0)(conv2d_1) output_node = ak.ClassificationHead(num_classes=4, metrics=['accuracy'])(dense_1) automodel = ak.AutoModel(inputs=input_node, outputs=output_node, max_trials=3, directory=output_dir, project_name="autoML", tuner=hp_tuner, seed=123) return automodel
def functional_api(): (x_train, y_train), (x_test, y_test) = cifar10.load_data() input_node = ak.ImageInput() output_node = input_node output_node = ak.Normalization()(output_node) output_node = ak.ImageAugmentation()(output_node) output_node = ak.ResNetBlock(version='next')(output_node) output_node = ak.SpatialReduction()(output_node) output_node = ak.DenseBlock()(output_node) output_node = ak.ClassificationHead()(output_node) clf = ak.AutoModel(input_node, output_node, seed=5, max_trials=3) clf.fit(x_train, y_train, validation_split=0.2) return clf.evaluate(x_test, y_test)
def test_merge(tmp_dir): x_train = np.random.rand(100, 32) y_train = np.random.rand(100) input_node1 = ak.Input() input_node2 = ak.Input() output_node1 = ak.DenseBlock()(input_node1) output_node2 = ak.DenseBlock()(input_node2) output_node = ak.Merge()([output_node1, output_node2]) output_node = ak.RegressionHead()(output_node) input_node1.shape = (32,) input_node2.shape = (32,) output_node[0].shape = (1,) graph = ak.GraphAutoModel([input_node1, input_node2], output_node, directory=tmp_dir) model = graph.build(kerastuner.HyperParameters()) model.fit([x_train, x_train], y_train, epochs=1, batch_size=100, verbose=False) result = model.predict([x_train, x_train]) assert result.shape == (100, 1)
def test_evaluate(tuner_fn, tmp_path): x_train = np.random.rand(100, 32) y_train = np.random.rand(100, 1) input_node = ak.Input() output_node = input_node output_node = ak.DenseBlock()(output_node) output_node = ak.RegressionHead()(output_node) auto_model = ak.AutoModel( input_node, output_node, directory=tmp_path, max_trials=1 ) auto_model.fit(x_train, y_train, epochs=1, validation_data=(x_train, y_train)) auto_model.evaluate(tf.data.Dataset.from_tensor_slices((x_train, y_train))) assert tuner_fn.called
def test_auto_model_basic(tmp_dir): x_train = np.random.rand(100, 32) y_train = np.random.rand(100) input_node = ak.Input() output_node = input_node output_node = ak.DenseBlock()(output_node) output_node = ak.RegressionHead()(output_node) auto_model = ak.GraphAutoModel(input_node, output_node, directory=tmp_dir) ak.const.Constant.NUM_TRAILS = 2 auto_model.fit(x_train, y_train, epochs=2) result = auto_model.predict(x_train) assert result.shape == (100, 1)
def test_graph_auto_model_basic(tmp_dir): x_train = np.random.rand(100, 32) y_train = np.random.rand(100) input_node = ak.Input() output_node = input_node output_node = ak.DenseBlock()(output_node) output_node = ak.RegressionHead()(output_node) graph = ak.GraphAutoModel(input_node, output_node, directory=tmp_dir, max_trials=1) graph.fit(x_train, y_train, epochs=1, validation_data=(x_train, y_train)) result = graph.predict(x_train) assert result.shape == (100, 1)
def test_hyper_graph_basic(tmp_dir): x_train = np.random.rand(100, 32) y_train = np.random.rand(100) input_node = ak.Input() output_node = input_node output_node = ak.DenseBlock()(output_node) output_node = ak.RegressionHead()(output_node) input_node.shape = (32,) output_node[0].shape = (1,) graph = ak.GraphAutoModel(input_node, output_node, directory=tmp_dir) model = graph.build(kerastuner.HyperParameters()) model.fit(x_train, y_train, epochs=1, batch_size=100, verbose=False) result = model.predict(x_train) assert result.shape == (100, 1)
def test_evaluate(tmp_dir): x_train = np.random.rand(100, 32) y_train = np.random.rand(100, 1) input_node = ak.Input() output_node = input_node output_node = ak.DenseBlock()(output_node) output_node = ak.RegressionHead()(output_node) auto_model = ak.GraphAutoModel(input_node, output_node, directory=tmp_dir, max_trials=1) auto_model.fit(x_train, y_train, epochs=1, validation_data=(x_train, y_train)) auto_model.evaluate(x_train, y_train)
def test_minist(self): (x_train, y_train), (x_test, y_test) = mnist.load_data() print(x_train.shape) # (60000, 28, 28) print(y_train.shape) # (60000,) print(y_train[:3]) # array([7, 2, 1], dtype=uint8) input_node = ak.ImageInput() output_node = ak.ConvBlock()(input_node) output_node = ak.DenseBlock()(output_node) output_node = ak.ClassificationHead()(output_node) clf = ak.AutoModel(inputs=input_node, outputs=output_node, max_trials=10) clf.fit(x_train, y_train) model = clf.export_model() print(type(model)) model.save('./auto_model.hd5')
def functional_api(): max_features = 20000 max_words = 400 (x_train, y_train), (x_test, y_test) = tf.keras.datasets.imdb.load_data( num_words=max_features, index_from=3) x_train = tf.keras.preprocessing.sequence.pad_sequences(x_train, maxlen=max_words) x_test = tf.keras.preprocessing.sequence.pad_sequences(x_test, maxlen=max_words) print(x_train.dtype) print(x_train[:10]) input_node = ak.Input() output_node = input_node output_node = ak.EmbeddingBlock(max_features=max_features)(output_node) output_node = ak.ConvBlock()(output_node) output_node = ak.SpatialReduction()(output_node) output_node = ak.DenseBlock()(output_node) output_node = ak.ClassificationHead()(output_node) clf = ak.AutoModel(input_node, output_node, seed=5, max_trials=3) clf.fit(x_train, y_train, validation_split=0.2) return clf.evaluate(x_test, y_test)
def test_graph_save_load(tmp_path): input1 = ak.Input() input2 = ak.Input() output1 = ak.DenseBlock()(input1) output2 = ak.ConvBlock()(input2) output = ak.Merge()([output1, output2]) output1 = ak.RegressionHead()(output) output2 = ak.ClassificationHead()(output) graph = graph_module.Graph( inputs=[input1, input2], outputs=[output1, output2], ) path = os.path.join(tmp_path, "graph") graph.save(path) graph = graph_module.load_graph(path) assert len(graph.inputs) == 2 assert len(graph.outputs) == 2 assert isinstance(graph.inputs[0].out_blocks[0], ak.DenseBlock) assert isinstance(graph.inputs[1].out_blocks[0], ak.ConvBlock)
def test_add_early_stopping(_2, get_trials, _1, _, run_trial, tmp_dir): trial = kerastuner.engine.trial.Trial() trial.hyperparameters = kerastuner.HyperParameters() get_trials.return_value = [trial] input_shape = (32,) num_instances = 100 num_classes = 10 x = common.generate_data(num_instances=num_instances, shape=input_shape, dtype='dataset') y = common.generate_one_hot_labels(num_instances=num_instances, num_classes=num_classes, dtype='dataset') input_node = ak.Input(shape=input_shape) output_node = input_node output_node = ak.DenseBlock()(output_node) output_node = ak.ClassificationHead(num_classes=num_classes, output_shape=(num_classes,))(output_node) hypermodel = ak.hypermodel.graph.HyperBuiltGraphHyperModel(input_node, output_node) tuner = ak.tuner.RandomSearch( hypermodel=hypermodel, objective='val_loss', max_trials=1, directory=tmp_dir, seed=common.SEED) tuner.search(x=tf.data.Dataset.zip((x, y)), validation_data=(x, y), epochs=20, callbacks=[]) _, kwargs = run_trial.call_args_list[0] callbacks = kwargs['callbacks'] assert len(callbacks) == 1 assert isinstance(callbacks[0], tf.keras.callbacks.EarlyStopping) _, kwargs = run_trial.call_args_list[1] callbacks = kwargs['callbacks'] assert len(callbacks) == 0
def test_evaluate(graph, tuner, tmp_dir): mc = graph.return_value mc.preprocess.return_value = (mock.Mock(), mock.Mock()) x_train = np.random.rand(100, 32) y_train = np.random.rand(100, 1) input_node = ak.Input() output_node = input_node output_node = ak.DenseBlock()(output_node) output_node = ak.RegressionHead()(output_node) auto_model = ak.GraphAutoModel(input_node, output_node, directory=tmp_dir, max_trials=1) auto_model.fit(x_train, y_train, epochs=1, validation_data=(x_train, y_train)) auto_model.evaluate(x_train, y_train) assert tuner.called assert graph.called
def test_set_hp(): x_train = np.random.rand(100, 32) y_train = np.random.rand(100, 1) input_node = ak.Input((32, )) output_node = input_node output_node = ak.DenseBlock()(output_node) head = ak.RegressionHead() head.output_shape = (1, ) output_node = head(output_node) graph = ak.hypermodel.graph.GraphHyperModel(input_node, output_node) hp = kerastuner.HyperParameters() graph.set_hps( [hp_module.Choice('dense_block_1/num_layers', [6], default=6)]) graph.build(hp) for single_hp in hp.space: if single_hp.name == 'dense_block_1/num_layers': assert len(single_hp.values) == 1 assert single_hp.values[0] == 6 return assert False
id10 --> id12(Regression Head) </div> """ import autokeras as ak input_node1 = ak.ImageInput() output_node = ak.Normalization()(input_node1) output_node = ak.ImageAugmentation()(output_node) output_node1 = ak.ConvBlock()(output_node) output_node2 = ak.ResNetBlock(version='v2')(output_node) output_node1 = ak.Merge()([output_node1, output_node2]) input_node2 = ak.StructuredDataInput() output_node = ak.CategoricalToNumerical()(input_node2) output_node2 = ak.DenseBlock()(output_node) output_node = ak.Merge()([output_node1, output_node2]) output_node1 = ak.ClassificationHead()(output_node) output_node2 = ak.RegressionHead()(output_node) auto_model = ak.AutoModel(inputs=[input_node1, input_node2], outputs=[output_node1, output_node2], overwrite=True, max_trials=2) image_data = np.random.rand(num_instances, 32, 32, 3).astype(np.float32) structured_data = np.random.rand(num_instances, 20).astype(np.float32) regression_target = np.random.rand(num_instances, 1).astype(np.float32) classification_target = np.random.randint(5, size=num_instances)
def test_functional_api(tmp_dir): # Prepare the data. num_instances = 20 (image_x, train_y), (test_x, test_y) = mnist.load_data() (text_x, train_y), (test_x, test_y) = common.imdb_raw() (structured_data_x, train_y), (test_x, test_y) = common.dataframe_numpy() image_x = image_x[:num_instances] text_x = text_x[:num_instances] structured_data_x = structured_data_x[:num_instances] classification_y = common.generate_one_hot_labels(num_instances=num_instances, num_classes=3) regression_y = common.generate_data(num_instances=num_instances, shape=(1,)) # Build model and train. image_input = ak.ImageInput() output = ak.Normalization()(image_input) output = ak.ImageAugmentation()(output) outputs1 = ak.ResNetBlock(version='next')(output) outputs2 = ak.XceptionBlock()(output) image_output = ak.Merge()((outputs1, outputs2)) structured_data_input = ak.StructuredDataInput() structured_data_output = ak.FeatureEngineering()(structured_data_input) structured_data_output = ak.DenseBlock()(structured_data_output) text_input = ak.TextInput() outputs1 = ak.TextToIntSequence()(text_input) outputs1 = ak.EmbeddingBlock()(outputs1) outputs1 = ak.ConvBlock(separable=True)(outputs1) outputs1 = ak.SpatialReduction()(outputs1) outputs2 = ak.TextToNgramVector()(text_input) outputs2 = ak.DenseBlock()(outputs2) text_output = ak.Merge()(( outputs1, outputs2 )) merged_outputs = ak.Merge()(( structured_data_output, image_output, text_output )) regression_outputs = ak.RegressionHead()(merged_outputs) classification_outputs = ak.ClassificationHead()(merged_outputs) automodel = ak.AutoModel( inputs=[ image_input, text_input, structured_data_input ], directory=tmp_dir, outputs=[regression_outputs, classification_outputs], max_trials=2, seed=common.SEED) automodel.fit( ( image_x, text_x, structured_data_x ), (regression_y, classification_y), validation_split=0.2, epochs=1)