예제 #1
0
def test_invalid_tuner_name_error(tmp_path):
    with pytest.raises(ValueError) as info:
        ak.AutoModel(ak.ImageInput(),
                     ak.RegressionHead(),
                     directory=tmp_path,
                     tuner="unknown")

    assert "Expect the tuner argument to be one of" in str(info.value)
예제 #2
0
def io_api():
    (x_train, y_train), (x_test, y_test) = mnist.load_data()
    clf = ak.AutoModel(ak.ImageInput(),
                       ak.ClassificationHead(),
                       seed=5,
                       max_trials=3)
    clf.fit(x_train, y_train, validation_split=0.2)
    return clf.evaluate(x_test, y_test)
예제 #3
0
def test_cat_to_num_with_img_input_error():
    input_node = ak.ImageInput()
    output_node = ak.CategoricalToNumerical()(input_node)

    with pytest.raises(TypeError) as info:
        graph_module.Graph(input_node, outputs=output_node).compile()

    assert "CategoricalToNumerical can only be used" in str(info.value)
예제 #4
0
파일: utils.py 프로젝트: ahxt/AutoIE.bak
def build_graph():
    tf.keras.backend.clear_session()
    image_input = ak.ImageInput(shape=(32, 32, 3))
    merged_outputs = ak.ImageBlock()(image_input)
    head = ak.ClassificationHead(num_classes=10)
    head.output_shape = (10, )
    classification_outputs = head(merged_outputs)
    return ak.graph.Graph(inputs=image_input, outputs=classification_outputs)
예제 #5
0
def test_image_classifier_tuner1():
    tf.keras.backend.clear_session()
    input_node = ak.ImageInput(shape=(32, 32, 3))
    output_node = ak.ImageBlock()(input_node)
    output_node = ak.ClassificationHead(loss='categorical_crossentropy',
                                        output_shape=(10, ))(output_node)
    graph = graph_module.Graph(input_node, output_node)
    check_initial_hp(task_specific.IMAGE_CLASSIFIER[1], graph)
예제 #6
0
def test_graph_compile_with_adadelta():
    input_node = ak.ImageInput(shape=(32, 32, 3))
    output_node = ak.ConvBlock()(input_node)
    output_node = ak.RegressionHead(output_shape=(1, ))(output_node)

    graph = graph_module.Graph(input_node, output_node)
    hp = kerastuner.HyperParameters()
    hp.values = {"optimizer": "adadelta"}
    graph.build(hp)
예제 #7
0
def build_graph():
    tf.keras.backend.clear_session()
    image_input = ak.ImageInput(shape=(32, 32, 3))
    image_input.batch_size = 32
    image_input.num_samples = 1000
    merged_outputs = ak.SpatialReduction()(image_input)
    head = ak.ClassificationHead(num_classes=10, shape=(10, ))
    classification_outputs = head(merged_outputs)
    return ak.graph.Graph(inputs=image_input, outputs=classification_outputs)
예제 #8
0
def test_image_block():
    block = wrapper.ImageBlock(normalize=None, augment=None)
    hp = kerastuner.HyperParameters()

    block.build(hp, ak.ImageInput(shape=(32, 32, 3)).build())

    assert utils.name_in_hps('block_type', hp)
    assert utils.name_in_hps('normalize', hp)
    assert utils.name_in_hps('augment', hp)
예제 #9
0
def test_auto_model_basic(_, tmp_dir):
    x_train = np.random.rand(100, 32, 32, 3)
    y_train = np.random.rand(100, 1)

    auto_model = ak.AutoModel(ak.ImageInput(),
                              ak.RegressionHead(),
                              directory=tmp_dir,
                              max_trials=2)
    auto_model.fit(x_train, y_train, epochs=2, validation_split=0.2)
예제 #10
0
def test_predict_tuple_x_and_tuple_y_predict_doesnt_crash(tuner_fn, tmp_path):
    auto_model = ak.AutoModel(
        ak.ImageInput(), ak.RegressionHead(), directory=tmp_path
    )
    dataset = tf.data.Dataset.from_tensor_slices(
        ((np.random.rand(100, 32, 32, 3),), (np.random.rand(100, 1),))
    )
    auto_model.fit(dataset)
    auto_model.predict(dataset)
예제 #11
0
def test_auto_model_project_name_field_as_specified(tmp_path):
    auto_model = ak.AutoModel(
        ak.ImageInput(),
        ak.RegressionHead(),
        directory=tmp_path,
        project_name="auto_model",
    )

    assert auto_model.project_name == "auto_model"
예제 #12
0
파일: train.py 프로젝트: ccaleanu/Gaze
def train_ak():
    image_count = len(list(config.database_path.glob('**/*.jpg')))
    print("# of images found:", image_count)

    list_ds = tf.data.Dataset.list_files(str(config.database_path / '*/*.jpg'),
                                         shuffle=False)
    list_ds = list_ds.shuffle(image_count, reshuffle_each_iteration=False)

    # Set `num_parallel_calls` so multiple images are loaded/processed in parallel.
    AUTOTUNE = tf.data.experimental.AUTOTUNE
    train_ds = list_ds.map(utils.process_path, num_parallel_calls=AUTOTUNE)

    features = np.array([list(x[0].numpy()) for x in list(train_ds)])
    labels = np.array([x[1].numpy() for x in list(train_ds)])

    input_node = ak.ImageInput()
    output_node = ak.Normalization()(input_node)
    output_node = ak.ImageAugmentation(horizontal_flip=False,
                                       vertical_flip=False,
                                       rotation_factor=False,
                                       zoom_factor=False)(output_node)
    output_node = ak.ClassificationHead()(output_node)

    clf = ak.AutoModel(inputs=input_node,
                       outputs=output_node,
                       overwrite=True,
                       max_trials=config.max_trials,
                       directory=config.outpath_mpii)
    # Feed the tensorflow Dataset to the classifier.

    split = config.split
    x_val = features[split:]
    y_val = labels[split:]
    x_train = features[:split]
    y_train = labels[:split]

    clf.fit(x_train,
            y_train,
            validation_data=(x_val, y_val),
            epochs=config.epochs)

    # Predict with the best model.
    #predicted_y = clf.predict(x_val)
    #print(predicted_y)

    # Evaluate the best model with testing data.
    print(clf.evaluate(x_val, y_val))

    # Export as a Keras Model.
    model = clf.export_model()

    print(
        type(model))  # <class 'tensorflow.python.keras.engine.training.Model'>
    model.save(config.output_path + "model_ak_imgClsf.h5")

    return 0
예제 #13
0
def test_auto_model_predict(tuner_fn, tmp_path):
    x_train = np.random.rand(100, 32, 32, 3)
    y_train = np.random.rand(100, 1)

    auto_model = ak.AutoModel(ak.ImageInput(),
                              ak.RegressionHead(),
                              directory=tmp_path,
                              max_trials=2)
    auto_model.fit(x_train, y_train, epochs=2, validation_split=0.2)
    auto_model.predict(x_train)
    assert tuner_fn.called
예제 #14
0
def test_functional_api(tmp_dir):
    # Prepare the data.
    num_instances = 20
    (image_x, train_y), (test_x, test_y) = mnist.load_data()
    (text_x, train_y), (test_x, test_y) = common.imdb_raw()
    (structured_data_x, train_y), (test_x, test_y) = common.dataframe_numpy()

    image_x = image_x[:num_instances]
    text_x = text_x[:num_instances]
    structured_data_x = structured_data_x[:num_instances]
    classification_y = common.generate_one_hot_labels(
        num_instances=num_instances, num_classes=3)
    regression_y = common.generate_data(num_instances=num_instances,
                                        shape=(1, ))

    # Build model and train.
    image_input = ak.ImageInput()
    output = ak.Normalization()(image_input)
    output = ak.ImageAugmentation()(output)
    outputs1 = ak.ResNetBlock(version='next')(image_input)
    outputs2 = ak.XceptionBlock()(image_input)
    image_output = ak.Merge()((outputs1, outputs2))

    structured_data_input = ak.StructuredDataInput(
        column_names=common.COLUMN_NAMES_FROM_CSV,
        column_types=common.COLUMN_TYPES_FROM_CSV)
    structured_data_output = ak.FeatureEngineering()(structured_data_input)
    structured_data_output = ak.DenseBlock()(structured_data_output)

    text_input = ak.TextInput()
    outputs1 = ak.TextToIntSequence()(text_input)
    outputs1 = ak.EmbeddingBlock()(outputs1)
    outputs1 = ak.ConvBlock(separable=True)(outputs1)
    outputs1 = ak.SpatialReduction()(outputs1)
    outputs2 = ak.TextToNgramVector()(text_input)
    outputs2 = ak.DenseBlock()(outputs2)
    text_output = ak.Merge()((outputs1, outputs2))

    merged_outputs = ak.Merge()(
        (structured_data_output, image_output, text_output))

    regression_outputs = ak.RegressionHead()(merged_outputs)
    classification_outputs = ak.ClassificationHead()(merged_outputs)
    automodel = ak.GraphAutoModel(
        inputs=[image_input, text_input, structured_data_input],
        directory=tmp_dir,
        outputs=[regression_outputs, classification_outputs],
        max_trials=2,
        seed=common.SEED)

    automodel.fit((image_x, text_x, structured_data_x),
                  (regression_y, classification_y),
                  validation_split=0.2,
                  epochs=2)
예제 #15
0
파일: dummy_agent.py 프로젝트: yasstake/mmf
    def create_image_regressor(self):
        input_node = ak.ImageInput()
        output_node = ak.ConvBlock()(input_node)
        output_node = ak.DenseBlock()(output_node)
        output_node = ak.RegressionHead()(output_node)

        reg = ak.AutoModel(inputs=input_node,
                           outputs=output_node,
                           max_trials=10)

        return reg
예제 #16
0
def test_no_validation_data_nor_split_error(tmp_path):
    auto_model = ak.AutoModel(
        ak.ImageInput(), ak.RegressionHead(), directory=tmp_path
    )
    with pytest.raises(ValueError) as info:
        auto_model.fit(
            x=np.random.rand(100, 32, 32, 3),
            y=np.random.rand(100, 1),
            validation_split=0,
        )

    assert "Either validation_data or a non-zero" in str(info.value)
예제 #17
0
def test_functional_api(tmp_path):
    # Prepare the data.
    num_instances = 80
    (image_x, train_y), (test_x, test_y) = mnist.load_data()
    (text_x, train_y), (test_x, test_y) = utils.imdb_raw()
    (structured_data_x, train_y), (test_x, test_y) = utils.dataframe_numpy()

    image_x = image_x[:num_instances]
    text_x = text_x[:num_instances]
    structured_data_x = structured_data_x[:num_instances]
    classification_y = utils.generate_one_hot_labels(
        num_instances=num_instances, num_classes=3)
    regression_y = utils.generate_data(num_instances=num_instances,
                                       shape=(1, ))

    # Build model and train.
    image_input = ak.ImageInput()
    output = ak.Normalization()(image_input)
    output = ak.ImageAugmentation()(output)
    outputs1 = ak.ResNetBlock(version='next')(output)
    outputs2 = ak.XceptionBlock()(output)
    image_output = ak.Merge()((outputs1, outputs2))

    structured_data_input = ak.StructuredDataInput()
    structured_data_output = ak.CategoricalToNumerical()(structured_data_input)
    structured_data_output = ak.DenseBlock()(structured_data_output)

    text_input = ak.TextInput()
    outputs1 = ak.TextToIntSequence()(text_input)
    outputs1 = ak.Embedding()(outputs1)
    outputs1 = ak.ConvBlock(separable=True)(outputs1)
    outputs1 = ak.SpatialReduction()(outputs1)
    outputs2 = ak.TextToNgramVector()(text_input)
    outputs2 = ak.DenseBlock()(outputs2)
    text_output = ak.Merge()((outputs1, outputs2))

    merged_outputs = ak.Merge()(
        (structured_data_output, image_output, text_output))

    regression_outputs = ak.RegressionHead()(merged_outputs)
    classification_outputs = ak.ClassificationHead()(merged_outputs)
    automodel = ak.AutoModel(
        inputs=[image_input, text_input, structured_data_input],
        directory=tmp_path,
        outputs=[regression_outputs, classification_outputs],
        max_trials=2,
        tuner=ak.Hyperband,
        seed=utils.SEED)

    automodel.fit((image_x, text_x, structured_data_x),
                  (regression_y, classification_y),
                  validation_split=0.2,
                  epochs=1)
예제 #18
0
def test_auto_model_basic(tmp_dir):
    x_train = np.random.rand(100, 32, 32, 3)
    y_train = np.random.rand(100)

    auto_model = ak.AutoModel(ak.ImageInput(),
                              ak.RegressionHead(),
                              directory=tmp_dir,
                              max_trials=2)
    auto_model.fit(x_train, y_train, epochs=2, validation_split=0.2)
    result = auto_model.predict(x_train)

    assert result.shape == (100, 1)
예제 #19
0
def test_single_nested_dataset_doesnt_crash(tuner_fn, tmp_path):
    auto_model = ak.AutoModel(
        ak.ImageInput(),
        ak.RegressionHead(),
        directory=tmp_path,
        max_trials=2,
        overwrite=False,
    )
    x1 = utils.generate_data()
    y1 = utils.generate_data(shape=(1,))
    dataset = tf.data.Dataset.from_tensor_slices(((x1,), y1))
    auto_model.fit(dataset, epochs=2)
예제 #20
0
def functional_api():
    (x_train, y_train), (x_test, y_test) = mnist.load_data()
    input_node = ak.ImageInput()
    output_node = input_node
    output_node = ak.Normalization()(output_node)
    output_node = ak.ConvBlock()(output_node)
    output_node = ak.SpatialReduction()(output_node)
    output_node = ak.DenseBlock()(output_node)
    output_node = ak.ClassificationHead()(output_node)
    clf = ak.GraphAutoModel(input_node, output_node, seed=5, max_trials=3)
    clf.fit(x_train, y_train, validation_split=0.2)
    return clf.evaluate(x_test, y_test)
예제 #21
0
def test_final_fit_concat(tuner_fn, tmp_dir):
    tuner_class = tuner_fn.return_value

    x_train = np.random.rand(100, 32, 32, 3)
    y_train = np.random.rand(100, 1)

    auto_model = ak.AutoModel(ak.ImageInput(),
                              ak.RegressionHead(),
                              directory=tmp_dir,
                              max_trials=2)
    auto_model.fit(x_train, y_train, epochs=2, validation_split=0.2)
    assert auto_model._split_dataset
    assert tuner_class.call_args_list[0][1]['fit_on_val_data']
예제 #22
0
def functional_api():
    (x_train, y_train), (x_test, y_test) = cifar10.load_data()
    input_node = ak.ImageInput()
    output_node = input_node
    output_node = ak.Normalization()(output_node)
    output_node = ak.ImageAugmentation()(output_node)
    output_node = ak.ResNetBlock(version='next')(output_node)
    output_node = ak.SpatialReduction()(output_node)
    output_node = ak.DenseBlock()(output_node)
    output_node = ak.ClassificationHead()(output_node)
    clf = ak.AutoModel(input_node, output_node, seed=5, max_trials=3)
    clf.fit(x_train, y_train, validation_split=0.2)
    return clf.evaluate(x_test, y_test)
예제 #23
0
def test_single_nested_dataset(tuner_fn, tmp_path):
    auto_model = ak.AutoModel(ak.ImageInput(),
                              ak.RegressionHead(),
                              directory=tmp_path,
                              max_trials=2,
                              overwrite=False)
    x1 = utils.generate_data()
    y1 = utils.generate_data(shape=(1, ))
    dataset = tf.data.Dataset.from_tensor_slices(((x1, ), y1))
    auto_model.fit(dataset, epochs=2)

    for adapter in auto_model._input_adapters + auto_model._output_adapters:
        assert adapter.shape is not None
예제 #24
0
def test_auto_model_predict(graph, tuner, tmp_dir):
    mc = graph.return_value
    mc.preprocess.return_value = (mock.Mock(), mock.Mock())
    x_train = np.random.rand(100, 32, 32, 3)
    y_train = np.random.rand(100, 1)

    auto_model = ak.AutoModel(ak.ImageInput(),
                              ak.RegressionHead(),
                              directory=tmp_dir,
                              max_trials=2)
    auto_model.fit(x_train, y_train, epochs=2, validation_split=0.2)
    auto_model.predict(x_train)
    assert tuner.called
    assert graph.called
예제 #25
0
def test_multi_model():

    context = an.AutoMLPipeline(
        an.MultiModel(
            inputs=[ak.ImageInput(), ak.StructuredDataInput()],
            outputs=[
                ak.RegressionHead(metrics=["mae"]),
                ak.ClassificationHead(loss="categorical_crossentropy",
                                      metrics=["accuracy"]),
            ],
            overwrite=True,
            max_trials=2,
        ))
    context.run_automl()
    assert context.return_automl["model"] != None
예제 #26
0
def test_overwrite(tuner_fn, tmp_dir):
    tuner_class = tuner_fn.return_value

    x_train = np.random.rand(100, 32, 32, 3)
    y_train = np.random.rand(100, 1)

    auto_model = ak.AutoModel(ak.ImageInput(),
                              ak.RegressionHead(),
                              directory=tmp_dir,
                              max_trials=2,
                              overwrite=False)
    auto_model.fit(x_train,
                   y_train,
                   epochs=2,
                   validation_data=(x_train, y_train))
    assert not tuner_class.call_args_list[0][1]['overwrite']
예제 #27
0
def test_final_fit_concat(graph, tuner, tmp_dir):
    pg = mock.Mock()
    pg.preprocess.return_value = (mock.Mock(), mock.Mock())
    mc = graph.return_value
    mc.build_graphs.return_value = (pg, mock.Mock())
    mc = tuner.return_value
    mc.get_best_model.return_value = (pg, mock.Mock())
    x_train = np.random.rand(100, 32, 32, 3)
    y_train = np.random.rand(100, 1)

    auto_model = ak.AutoModel(ak.ImageInput(),
                              ak.RegressionHead(),
                              directory=tmp_dir,
                              max_trials=2)
    auto_model.fit(x_train, y_train, epochs=2, validation_split=0.2)
    assert auto_model._split_dataset
    assert tuner.call_args_list[0][1]['fit_on_val_data']
예제 #28
0
def test_predict_tuple_x_and_tuple_y_call_model_predict_with_x(
        tuner_fn, tmp_path):
    model = mock.Mock()
    tuner = mock.Mock()
    tuner.get_best_model.return_value = model
    tuner_fn.return_value.return_value = tuner

    auto_model = ak.AutoModel(ak.ImageInput(),
                              ak.RegressionHead(),
                              directory=tmp_path)
    dataset = tf.data.Dataset.from_tensor_slices(
        ((np.random.rand(100, 32, 32, 3), ), (np.random.rand(100, 1), )))
    auto_model.fit(dataset)
    auto_model.predict(dataset)

    assert data_utils.dataset_shape(
        model.predict.call_args_list[0][0][0]).as_list() == [None, 32, 32, 3]
예제 #29
0
def test_export_model(tuner_fn, tmp_path):
    tuner_class = tuner_fn.return_value
    tuner = tuner_class.return_value

    x_train = np.random.rand(100, 32, 32, 3)
    y_train = np.random.rand(100, 1)

    auto_model = ak.AutoModel(ak.ImageInput(),
                              ak.RegressionHead(),
                              directory=tmp_path,
                              max_trials=2,
                              overwrite=False)
    auto_model.fit(x_train,
                   y_train,
                   epochs=2,
                   validation_data=(x_train, y_train))
    auto_model.export_model()
    assert tuner.get_best_model.called
예제 #30
0
def test_auto_model_predict(tuner_fn, tmp_dir):
    pg = mock.Mock()
    pg.preprocess.return_value = (mock.Mock(), mock.Mock())
    tuner_class = tuner_fn.return_value
    tuner = tuner_class.return_value
    tuner.get_best_model.return_value = (pg, mock.Mock())

    x_train = np.random.rand(100, 32, 32, 3)
    y_train = np.random.rand(100, 1)

    auto_model = ak.AutoModel(ak.ImageInput(),
                              ak.RegressionHead(),
                              directory=tmp_dir,
                              max_trials=2)
    auto_model.fit(x_train, y_train, epochs=2, validation_split=0.2)
    auto_model.predict(x_train)
    assert tuner_fn.called
    assert tuner_class.called
    assert tuner.get_best_model.called