def test_input_missing(tmp_dir): input_node1 = ak.Input() input_node2 = ak.Input() output_node1 = ak.DenseBlock()(input_node1) output_node2 = ak.DenseBlock()(input_node2) output_node = ak.Merge()([output_node1, output_node2]) output_node = ak.RegressionHead()(output_node) with pytest.raises(ValueError) as info: ak.GraphAutoModel(input_node1, output_node, directory=tmp_dir) assert str(info.value).startswith('A required input is missing for HyperModel')
def test_input_missing(): input_node1 = ak.Input() input_node2 = ak.Input() output_node1 = ak.DenseBlock()(input_node1) output_node2 = ak.DenseBlock()(input_node2) output_node = ak.Merge()([output_node1, output_node2]) output_node = ak.RegressionHead()(output_node) with pytest.raises(ValueError) as info: graph_module.GraphHyperModel(input_node1, output_node) assert 'A required input is missing for HyperModel' in str(info.value)
def test_merge(): input_node1 = ak.Input(shape=(30, )) input_node2 = ak.Input(shape=(40, )) output_node1 = ak.DenseBlock()(input_node1) output_node2 = ak.DenseBlock()(input_node2) output_node = ak.Merge()([output_node1, output_node2]) output_node = ak.RegressionHead(output_shape=(1, ))(output_node) graph = graph_module.PlainGraph([input_node1, input_node2], output_node) model = graph.build_keras_graph().build(kerastuner.HyperParameters()) assert model.input_shape == [(None, 30), (None, 40)] assert model.output_shape == (None, 1)
def test_hyper_graph_cycle(): input_node1 = ak.Input() input_node2 = ak.Input() output_node1 = ak.DenseBlock()(input_node1) output_node2 = ak.DenseBlock()(input_node2) output_node = ak.Merge()([output_node1, output_node2]) head = ak.RegressionHead() output_node = head(output_node) head.outputs = output_node1 with pytest.raises(ValueError) as info: graph_module.Graph(inputs=[input_node1, input_node2], outputs=output_node) assert "The network has a cycle." in str(info.value)
def test_hyper_graph_cycle(tmp_dir): input_node1 = ak.Input() input_node2 = ak.Input() output_node1 = ak.DenseBlock()(input_node1) output_node2 = ak.DenseBlock()(input_node2) output_node = ak.Merge()([output_node1, output_node2]) head = ak.RegressionHead() output_node = head(output_node) head.outputs = output_node1 with pytest.raises(ValueError) as info: ak.GraphAutoModel([input_node1, input_node2], output_node, directory=tmp_dir) assert str(info.value) == 'The network has a cycle.'
def test_input_missing(tmp_dir): input_node1 = ak.Input() input_node2 = ak.Input() output_node1 = ak.DenseBlock()(input_node1) output_node2 = ak.DenseBlock()(input_node2) output_node = ak.Merge()([output_node1, output_node2]) output_node = ak.RegressionHead()(output_node) input_node1.shape = (32,) input_node2.shape = (32,) output_node[0].shape = (1,) with pytest.raises(ValueError) as info: graph = ak.GraphAutoModel(input_node1, output_node, directory=tmp_dir) graph.build(kerastuner.HyperParameters()) assert str(info.value).startswith('A required input is missing for HyperModel')
def test_merge(tmp_dir): x_train = np.random.rand(100, 32) y_train = np.random.rand(100) input_node1 = ak.Input() input_node2 = ak.Input() output_node1 = ak.DenseBlock()(input_node1) output_node2 = ak.DenseBlock()(input_node2) output_node = ak.Merge()([output_node1, output_node2]) output_node = ak.RegressionHead()(output_node) graph = ak.GraphAutoModel([input_node1, input_node2], output_node, directory=tmp_dir, max_trials=1) graph.fit([x_train, x_train], y_train, epochs=1, batch_size=100, verbose=False) result = graph.predict([x_train, x_train]) assert result.shape == (100, 1)
def test_hyper_graph_cycle(tmp_dir): input_node1 = ak.Input() input_node2 = ak.Input() output_node1 = ak.DenseBlock()(input_node1) output_node2 = ak.DenseBlock()(input_node2) output_node = ak.Merge()([output_node1, output_node2]) head = ak.RegressionHead() output_node = head(output_node) head.outputs = output_node1 input_node1.shape = (32,) input_node2.shape = (32,) output_node[0].shape = (1,) with pytest.raises(ValueError) as info: graph = ak.GraphAutoModel([input_node1, input_node2], output_node, directory=tmp_dir) graph.build(kerastuner.HyperParameters()) assert str(info.value) == 'The network has a cycle.'
def test_graph_save_load(tmp_path): input1 = ak.Input() input2 = ak.Input() output1 = ak.DenseBlock()(input1) output2 = ak.ConvBlock()(input2) output = ak.Merge()([output1, output2]) output1 = ak.RegressionHead()(output) output2 = ak.ClassificationHead()(output) graph = graph_module.Graph( inputs=[input1, input2], outputs=[output1, output2], override_hps=[hp_module.Choice('dense_block_1/num_layers', [6], default=6)]) config = graph.get_config() graph = graph_module.Graph.from_config(config) assert len(graph.inputs) == 2 assert len(graph.outputs) == 2 assert isinstance(graph.inputs[0].out_blocks[0], ak.DenseBlock) assert isinstance(graph.inputs[1].out_blocks[0], ak.ConvBlock) assert isinstance(graph.override_hps[0], hp_module.Choice)
def test_graph_save_load(tmp_path): input1 = ak.Input() input2 = ak.Input() output1 = ak.DenseBlock()(input1) output2 = ak.ConvBlock()(input2) output = ak.Merge()([output1, output2]) output1 = ak.RegressionHead()(output) output2 = ak.ClassificationHead()(output) graph = graph_module.Graph( inputs=[input1, input2], outputs=[output1, output2], ) path = os.path.join(tmp_path, "graph") graph.save(path) graph = graph_module.load_graph(path) assert len(graph.inputs) == 2 assert len(graph.outputs) == 2 assert isinstance(graph.inputs[0].out_blocks[0], ak.DenseBlock) assert isinstance(graph.inputs[1].out_blocks[0], ak.ConvBlock)
def test_image_blocks(tmp_path): num_instances = 10 x_train = utils.generate_data(num_instances=num_instances, shape=(28, 28)) y_train = np.random.randint(0, 10, num_instances) input_node = ak.ImageInput() output = ak.Normalization()(input_node) output = ak.ImageAugmentation()(output) outputs1 = ak.ResNetBlock(version="v2")(output) outputs2 = ak.XceptionBlock()(output) output_node = ak.Merge()((outputs1, outputs2)) output_node = ak.ClassificationHead()(output_node) automodel = ak.AutoModel( inputs=input_node, outputs=output_node, directory=tmp_path, max_trials=1, seed=utils.SEED, ) automodel.fit(x_train, y_train, validation_data=(x_train, y_train), epochs=1)
def test_graph_save_load(tmp_path): input1 = ak.Input() input2 = ak.Input() output1 = ak.DenseBlock()(input1) output2 = ak.ConvBlock()(input2) output = ak.Merge()([output1, output2]) output1 = ak.RegressionHead()(output) output2 = ak.ClassificationHead()(output) graph = graph_module.Graph( inputs=[input1, input2], outputs=[output1, output2], override_hps=[hp_module.Choice("dense_block_1/num_layers", [6], default=6)], ) path = os.path.join(tmp_path, "graph") graph.save(path) graph = graph_module.load_graph(path) assert len(graph.inputs) == 2 assert len(graph.outputs) == 2 assert isinstance(graph.inputs[0].out_blocks[0], ak.DenseBlock) assert isinstance(graph.inputs[1].out_blocks[0], ak.ConvBlock) assert isinstance(graph.override_hps[0], hp_module.Choice)
def test_merge(tmp_dir): x_train = np.random.rand(100, 32) y_train = np.random.rand(100) input_node1 = ak.Input() input_node2 = ak.Input() output_node1 = ak.DenseBlock()(input_node1) output_node2 = ak.DenseBlock()(input_node2) output_node = ak.Merge()([output_node1, output_node2]) output_node = ak.RegressionHead()(output_node) input_node1.shape = (32,) input_node2.shape = (32,) output_node[0].shape = (1,) graph = ak.GraphAutoModel([input_node1, input_node2], output_node, directory=tmp_dir) model = graph.build(kerastuner.HyperParameters()) model.fit([x_train, x_train], y_train, epochs=1, batch_size=100, verbose=False) result = model.predict([x_train, x_train]) assert result.shape == (100, 1)
def test_functional_api(tmp_dir): # Prepare the data. num_instances = 20 (image_x, train_y), (test_x, test_y) = mnist.load_data() (text_x, train_y), (test_x, test_y) = common.imdb_raw() (structured_data_x, train_y), (test_x, test_y) = common.dataframe_numpy() image_x = image_x[:num_instances] text_x = text_x[:num_instances] structured_data_x = structured_data_x[:num_instances] classification_y = common.generate_one_hot_labels(num_instances=num_instances, num_classes=3) regression_y = common.generate_data(num_instances=num_instances, shape=(1,)) # Build model and train. image_input = ak.ImageInput() output = ak.Normalization()(image_input) output = ak.ImageAugmentation()(output) outputs1 = ak.ResNetBlock(version='next')(output) outputs2 = ak.XceptionBlock()(output) image_output = ak.Merge()((outputs1, outputs2)) structured_data_input = ak.StructuredDataInput() structured_data_output = ak.FeatureEngineering()(structured_data_input) structured_data_output = ak.DenseBlock()(structured_data_output) text_input = ak.TextInput() outputs1 = ak.TextToIntSequence()(text_input) outputs1 = ak.EmbeddingBlock()(outputs1) outputs1 = ak.ConvBlock(separable=True)(outputs1) outputs1 = ak.SpatialReduction()(outputs1) outputs2 = ak.TextToNgramVector()(text_input) outputs2 = ak.DenseBlock()(outputs2) text_output = ak.Merge()(( outputs1, outputs2 )) merged_outputs = ak.Merge()(( structured_data_output, image_output, text_output )) regression_outputs = ak.RegressionHead()(merged_outputs) classification_outputs = ak.ClassificationHead()(merged_outputs) automodel = ak.AutoModel( inputs=[ image_input, text_input, structured_data_input ], directory=tmp_dir, outputs=[regression_outputs, classification_outputs], max_trials=2, seed=common.SEED) automodel.fit( ( image_x, text_x, structured_data_x ), (regression_y, classification_y), validation_split=0.2, epochs=1)
import tensorflow as tf from keras.datasets import mnist # Prepare the data. (x_train, y_classification), (x_test, y_test) = mnist.load_data() x_image = x_train.reshape(x_train.shape + (1,)) x_test = x_test.reshape(x_test.shape + (1,)) x_structured = np.random.rand(x_train.shape[0], 100) y_regression = np.random.rand(x_train.shape[0], 1) # Build model and train. inputs = ak.ImageInput(shape=(28, 28, 1)) outputs1 = ak.ResNetBlock(version='next')(inputs) outputs2 = ak.XceptionBlock()(inputs) image_outputs = ak.Merge()((outputs1, outputs2)) structured_inputs = ak.StructuredInput() structured_outputs = ak.DenseBlock()(structured_inputs) merged_outputs = ak.Merge()((image_outputs, structured_outputs)) classification_outputs = ak.ClassificationHead()(merged_outputs) regression_outputs = ak.RegressionHead()(merged_outputs) automodel = ak.GraphAutoModel(inputs=inputs, outputs=[regression_outputs, classification_outputs]) automodel.fit((x_image, x_structured), (y_regression, y_classification), trials=100, epochs=200,
print(y_train.shape) print(y_test.shape) print(y_val.shape) print(X_train[1]) print(y_train[1]) # In[27]: id_input = ak.StructuredDataInput() id_den = ak.CategoricalToNumerical()(id_input) id_den = ak.Embedding()(id_den) x_input = ak.Input() layer = ak.DenseBlock()(x_input) mer = ak.Merge()([id_den, layer]) output_node = ak.RegressionHead(metrics=['mae'])(mer) # In[28]: # auto_model = ak.AutoModel( inputs= x_input, # #project_name="categorical_model", # outputs = output_node, # objective="loss", # tuner="bayesian", max_trials= 10 ) auto_model = ak.AutoModel( inputs=[id_input, x_input], #project_name="categorical_model", outputs=output_node, objective="loss", tuner="bayesian",
id8 --> id9(DenseBlock) id6 --> id10(Merge) id9 --> id10 id10 --> id11(Classification Head) id10 --> id12(Regression Head) </div> """ import autokeras as ak input_node1 = ak.ImageInput() output_node = ak.Normalization()(input_node1) output_node = ak.ImageAugmentation()(output_node) output_node1 = ak.ConvBlock()(output_node) output_node2 = ak.ResNetBlock(version='v2')(output_node) output_node1 = ak.Merge()([output_node1, output_node2]) input_node2 = ak.StructuredDataInput() output_node = ak.CategoricalToNumerical()(input_node2) output_node2 = ak.DenseBlock()(output_node) output_node = ak.Merge()([output_node1, output_node2]) output_node1 = ak.ClassificationHead()(output_node) output_node2 = ak.RegressionHead()(output_node) auto_model = ak.AutoModel(inputs=[input_node1, input_node2], outputs=[output_node1, output_node2], overwrite=True, max_trials=2) image_data = np.random.rand(num_instances, 32, 32, 3).astype(np.float32)
# Intermediate inputs = ak.ImageInput(shape=...) x = ak.ImageBlock(inputs) head = ak.ClassificationHead(num_classes, metrics=['accuracy']) outputs = head(x) automodel = ak.GraphAutoModel(inputs=inputs, outputs=outputs) # Loss, optimizer are picked automatically automodel.fit(x_train, y_train) # Advanced inputs = ak.ImageInput(shape=...) outputs1 = ak.ResNetBlock()(inputs) outputs2 = ak.XceptionBlock()(inputs) outputs = ak.Merge()((outputs1, outputs2)) outputs = ak.ClassificationHead(num_classes)(outputs) automodel = ak.GraphAutoModel(inputs=inputs, outputs=outputs) learning_rate = 1.0 automodel.compile(optimizer=tf.keras.optimizers.Adam(learning_rate), metrics=[tf.keras.metrics.CategoricalAccuracy()], loss=tf.keras.losses.CategoricalCrossentropy()) automodel.fit(ak.image_augment(x_train, y_train), time_limit=12 * 60 * 60, epochs=200, callbacks=[ tf.keras.callbacks.EarlyStopping(), tf.keras.callbacks.LearningRateScheduler(1) ])