Exemplo n.º 1
0
def test_preprocessing():
    input_shape = (33, )
    output_shape = (1, )
    x_train_1 = common.generate_data(num_instances=100,
                                     shape=input_shape,
                                     dtype='dataset')
    x_train_2 = common.generate_data(num_instances=100,
                                     shape=input_shape,
                                     dtype='dataset')
    y_train = common.generate_data(num_instances=100,
                                   shape=output_shape,
                                   dtype='dataset')
    dataset = tf.data.Dataset.zip(((x_train_1, x_train_2), y_train))

    input_node1 = ak.Input(shape=input_shape)
    temp_node1 = ak.Normalization()(input_node1)
    output_node1 = ak.DenseBlock()(temp_node1)

    output_node3 = ak.Normalization()(temp_node1)
    output_node3 = ak.DenseBlock()(output_node3)

    input_node2 = ak.Input(shape=input_shape)
    output_node2 = ak.Normalization()(input_node2)
    output_node2 = ak.DenseBlock()(output_node2)

    output_node = ak.Merge()([output_node1, output_node2, output_node3])
    output_node = ak.RegressionHead()(output_node)

    graph = graph_module.HyperBuiltGraphHyperModel([input_node1, input_node2],
                                                   output_node)
    graph.preprocess(hp=kerastuner.HyperParameters(),
                     dataset=dataset,
                     validation_data=dataset,
                     fit=True)
Exemplo n.º 2
0
def test_preprocessing(_, tmp_dir):
    x_train = np.random.rand(100, 33)
    y_train = np.random.rand(100, 1)

    input_node1 = ak.Input()
    temp_node1 = ak.Normalization()(input_node1)
    output_node1 = ak.DenseBlock()(temp_node1)

    output_node3 = ak.Normalization()(temp_node1)
    output_node3 = ak.DenseBlock()(output_node3)

    input_node2 = ak.Input()
    output_node2 = ak.Normalization()(input_node2)
    output_node2 = ak.DenseBlock()(output_node2)

    output_node = ak.Merge()([output_node1, output_node2, output_node3])
    output_node = ak.RegressionHead()(output_node)

    graph = ak.GraphAutoModel([input_node1, input_node2],
                              output_node,
                              directory=tmp_dir,
                              max_trials=1)
    graph.fit([x_train, x_train],
              y_train,
              epochs=1,
              batch_size=100,
              validation_data=([x_train, x_train], y_train),
              validation_split=0.5,
              verbose=False)
def test_image_blocks(tmp_path):
    num_instances = 10
    (x_train, y_train), (x_test, y_test) = mnist.load_data()
    x_train = x_train[:num_instances]
    y_regression = utils.generate_data(num_instances=num_instances,
                                       shape=(1, ))

    input_node = ak.ImageInput()
    output = ak.Normalization()(input_node)
    output = ak.ImageAugmentation()(output)
    outputs1 = ak.ResNetBlock(version="v2")(output)
    outputs2 = ak.XceptionBlock()(output)
    output_node = ak.Merge()((outputs1, outputs2))
    output_node = ak.ClassificationHead()(output_node)

    automodel = ak.AutoModel(
        inputs=input_node,
        outputs=output_node,
        directory=tmp_path,
        max_trials=1,
        seed=utils.SEED,
    )

    automodel.fit(x_train,
                  y_regression,
                  validation_data=(x_train, y_regression),
                  epochs=1)
Exemplo n.º 4
0
def test_image_blocks(tmp_path):
    num_instances = 10
    x_train = test_utils.generate_data(num_instances=num_instances,
                                       shape=(28, 28))
    y_train = np.random.randint(0, 10, num_instances)

    input_node = ak.ImageInput()
    output = ak.Normalization()(input_node)
    output = ak.ImageAugmentation()(output)
    outputs1 = ak.ResNetBlock(version="v2")(output)
    outputs2 = ak.XceptionBlock()(output)
    output_node = ak.Merge()((outputs1, outputs2))
    output_node = ak.ClassificationHead()(output_node)

    automodel = ak.AutoModel(
        inputs=input_node,
        outputs=output_node,
        directory=tmp_path,
        max_trials=1,
        seed=test_utils.SEED,
    )

    automodel.fit(x_train,
                  y_train,
                  validation_data=(x_train, y_train),
                  epochs=1)
Exemplo n.º 5
0
def train_ak():
    image_count = len(list(config.database_path.glob('**/*.jpg')))
    print("# of images found:", image_count)

    list_ds = tf.data.Dataset.list_files(str(config.database_path / '*/*.jpg'),
                                         shuffle=False)
    list_ds = list_ds.shuffle(image_count, reshuffle_each_iteration=False)

    # Set `num_parallel_calls` so multiple images are loaded/processed in parallel.
    AUTOTUNE = tf.data.experimental.AUTOTUNE
    train_ds = list_ds.map(utils.process_path, num_parallel_calls=AUTOTUNE)

    features = np.array([list(x[0].numpy()) for x in list(train_ds)])
    labels = np.array([x[1].numpy() for x in list(train_ds)])

    input_node = ak.ImageInput()
    output_node = ak.Normalization()(input_node)
    output_node = ak.ImageAugmentation(horizontal_flip=False,
                                       vertical_flip=False,
                                       rotation_factor=False,
                                       zoom_factor=False)(output_node)
    output_node = ak.ClassificationHead()(output_node)

    clf = ak.AutoModel(inputs=input_node,
                       outputs=output_node,
                       overwrite=True,
                       max_trials=config.max_trials,
                       directory=config.outpath_mpii)
    # Feed the tensorflow Dataset to the classifier.

    split = config.split
    x_val = features[split:]
    y_val = labels[split:]
    x_train = features[:split]
    y_train = labels[:split]

    clf.fit(x_train,
            y_train,
            validation_data=(x_val, y_val),
            epochs=config.epochs)

    # Predict with the best model.
    #predicted_y = clf.predict(x_val)
    #print(predicted_y)

    # Evaluate the best model with testing data.
    print(clf.evaluate(x_val, y_val))

    # Export as a Keras Model.
    model = clf.export_model()

    print(
        type(model))  # <class 'tensorflow.python.keras.engine.training.Model'>
    model.save(config.output_path + "model_ak_imgClsf.h5")

    return 0
Exemplo n.º 6
0
def test_functional_api(tmp_dir):
    # Prepare the data.
    num_instances = 20
    (image_x, train_y), (test_x, test_y) = mnist.load_data()
    (text_x, train_y), (test_x, test_y) = common.imdb_raw()
    (structured_data_x, train_y), (test_x, test_y) = common.dataframe_numpy()

    image_x = image_x[:num_instances]
    text_x = text_x[:num_instances]
    structured_data_x = structured_data_x[:num_instances]
    classification_y = common.generate_one_hot_labels(
        num_instances=num_instances, num_classes=3)
    regression_y = common.generate_data(num_instances=num_instances,
                                        shape=(1, ))

    # Build model and train.
    image_input = ak.ImageInput()
    output = ak.Normalization()(image_input)
    output = ak.ImageAugmentation()(output)
    outputs1 = ak.ResNetBlock(version='next')(image_input)
    outputs2 = ak.XceptionBlock()(image_input)
    image_output = ak.Merge()((outputs1, outputs2))

    structured_data_input = ak.StructuredDataInput(
        column_names=common.COLUMN_NAMES_FROM_CSV,
        column_types=common.COLUMN_TYPES_FROM_CSV)
    structured_data_output = ak.FeatureEngineering()(structured_data_input)
    structured_data_output = ak.DenseBlock()(structured_data_output)

    text_input = ak.TextInput()
    outputs1 = ak.TextToIntSequence()(text_input)
    outputs1 = ak.EmbeddingBlock()(outputs1)
    outputs1 = ak.ConvBlock(separable=True)(outputs1)
    outputs1 = ak.SpatialReduction()(outputs1)
    outputs2 = ak.TextToNgramVector()(text_input)
    outputs2 = ak.DenseBlock()(outputs2)
    text_output = ak.Merge()((outputs1, outputs2))

    merged_outputs = ak.Merge()(
        (structured_data_output, image_output, text_output))

    regression_outputs = ak.RegressionHead()(merged_outputs)
    classification_outputs = ak.ClassificationHead()(merged_outputs)
    automodel = ak.GraphAutoModel(
        inputs=[image_input, text_input, structured_data_input],
        directory=tmp_dir,
        outputs=[regression_outputs, classification_outputs],
        max_trials=2,
        seed=common.SEED)

    automodel.fit((image_x, text_x, structured_data_x),
                  (regression_y, classification_y),
                  validation_split=0.2,
                  epochs=2)
Exemplo n.º 7
0
def test_functional_api(tmp_path):
    # Prepare the data.
    num_instances = 80
    (image_x, train_y), (test_x, test_y) = mnist.load_data()
    (text_x, train_y), (test_x, test_y) = utils.imdb_raw()
    (structured_data_x, train_y), (test_x, test_y) = utils.dataframe_numpy()

    image_x = image_x[:num_instances]
    text_x = text_x[:num_instances]
    structured_data_x = structured_data_x[:num_instances]
    classification_y = utils.generate_one_hot_labels(
        num_instances=num_instances, num_classes=3)
    regression_y = utils.generate_data(num_instances=num_instances,
                                       shape=(1, ))

    # Build model and train.
    image_input = ak.ImageInput()
    output = ak.Normalization()(image_input)
    output = ak.ImageAugmentation()(output)
    outputs1 = ak.ResNetBlock(version='next')(output)
    outputs2 = ak.XceptionBlock()(output)
    image_output = ak.Merge()((outputs1, outputs2))

    structured_data_input = ak.StructuredDataInput()
    structured_data_output = ak.CategoricalToNumerical()(structured_data_input)
    structured_data_output = ak.DenseBlock()(structured_data_output)

    text_input = ak.TextInput()
    outputs1 = ak.TextToIntSequence()(text_input)
    outputs1 = ak.Embedding()(outputs1)
    outputs1 = ak.ConvBlock(separable=True)(outputs1)
    outputs1 = ak.SpatialReduction()(outputs1)
    outputs2 = ak.TextToNgramVector()(text_input)
    outputs2 = ak.DenseBlock()(outputs2)
    text_output = ak.Merge()((outputs1, outputs2))

    merged_outputs = ak.Merge()(
        (structured_data_output, image_output, text_output))

    regression_outputs = ak.RegressionHead()(merged_outputs)
    classification_outputs = ak.ClassificationHead()(merged_outputs)
    automodel = ak.AutoModel(
        inputs=[image_input, text_input, structured_data_input],
        directory=tmp_path,
        outputs=[regression_outputs, classification_outputs],
        max_trials=2,
        tuner=ak.Hyperband,
        seed=utils.SEED)

    automodel.fit((image_x, text_x, structured_data_x),
                  (regression_y, classification_y),
                  validation_split=0.2,
                  epochs=1)
Exemplo n.º 8
0
def functional_api():
    (x_train, y_train), (x_test, y_test) = mnist.load_data()
    input_node = ak.ImageInput()
    output_node = input_node
    output_node = ak.Normalization()(output_node)
    output_node = ak.ConvBlock()(output_node)
    output_node = ak.SpatialReduction()(output_node)
    output_node = ak.DenseBlock()(output_node)
    output_node = ak.ClassificationHead()(output_node)
    clf = ak.GraphAutoModel(input_node, output_node, seed=5, max_trials=3)
    clf.fit(x_train, y_train, validation_split=0.2)
    return clf.evaluate(x_test, y_test)
Exemplo n.º 9
0
def functional_api():
    (x_train, y_train), (x_test, y_test) = cifar10.load_data()
    input_node = ak.ImageInput()
    output_node = input_node
    output_node = ak.Normalization()(output_node)
    output_node = ak.ImageAugmentation()(output_node)
    output_node = ak.ResNetBlock(version='next')(output_node)
    output_node = ak.SpatialReduction()(output_node)
    output_node = ak.DenseBlock()(output_node)
    output_node = ak.ClassificationHead()(output_node)
    clf = ak.AutoModel(input_node, output_node, seed=5, max_trials=3)
    clf.fit(x_train, y_train, validation_split=0.2)
    return clf.evaluate(x_test, y_test)
Exemplo n.º 10
0
    id3 --> id5(ResNet V2)
    id4 --> id6(Merge)
    id5 --> id6
    id7(StructuredDataInput) --> id8(CategoricalToNumerical)
    id8 --> id9(DenseBlock)
    id6 --> id10(Merge)
    id9 --> id10
    id10 --> id11(Classification Head)
    id10 --> id12(Regression Head)
</div>
"""

import autokeras as ak

input_node1 = ak.ImageInput()
output_node = ak.Normalization()(input_node1)
output_node = ak.ImageAugmentation()(output_node)
output_node1 = ak.ConvBlock()(output_node)
output_node2 = ak.ResNetBlock(version='v2')(output_node)
output_node1 = ak.Merge()([output_node1, output_node2])

input_node2 = ak.StructuredDataInput()
output_node = ak.CategoricalToNumerical()(input_node2)
output_node2 = ak.DenseBlock()(output_node)

output_node = ak.Merge()([output_node1, output_node2])
output_node1 = ak.ClassificationHead()(output_node)
output_node2 = ak.RegressionHead()(output_node)

auto_model = ak.AutoModel(inputs=[input_node1, input_node2],
                          outputs=[output_node1, output_node2],
Exemplo n.º 11
0
import tensorflow as tf
import pandas as pd
import numpy as np
from sklearn.model_selection import train_test_split
import autokeras as ak
import datasets
from sklearn.metrics import precision_score, recall_score, confusion_matrix, classification_report, accuracy_score, f1_score

x_train, x_test, y_train, y_test = datasets.load_data()


input_node = ak.StructuredDataInput()
# x1 = ak.StructuredDataBlock(categorical_encoding=True, normalize=False)(input_node)
x1 = ak.Normalization()(input_node)
x1 = ak.DenseBlock(num_layers=1, num_units=32)(x1)
x1 = ak.DenseBlock(num_layers=1, num_units=32, dropout=0.1, use_batchnorm=False)(x1)
output_node = ak.ClassificationHead(loss = "categorical_crossentropy", num_classes=2)(x1)

import kerastuner
auto_model = ak.AutoModel(inputs=input_node,
                          outputs=output_node,
#                           num_classes=2,
                          overwrite=True,
                          max_trials=3,
                          objective=kerastuner.Objective('val_auroc', direction='max'),
                          metrics = [tf.keras.metrics.AUC(name="auroc", curve="ROC")])

es = tf.keras.callbacks.EarlyStopping(monitor='val_loss', patience=4)
auto_model.fit(x_train,
               y_train,
               epochs = 20,