예제 #1
0
    def test_search_space(self):
        from deephyper.problem import NaProblem

        pb = NaProblem()

        with pytest.raises(TypeError):
            pb.search_space(space_class="a")

        pb.search_space(OneLayerSpace)
예제 #2
0
from deephyper.benchmark.nas.linearReg.load_data import load_data
from deephyper.problem import NaProblem
from deepspace.tabular import OneLayerFactory


def create_search_space(input_shape, output_shape, **kwargs):
    return OneLayerFactory()(input_shape, output_shape, **kwargs)


Problem = NaProblem(seed=2019)

Problem.load_data(load_data)

Problem.search_space(create_search_space)

Problem.hyperparameters(batch_size=100,
                        learning_rate=0.1,
                        optimizer="adam",
                        num_epochs=1)

Problem.loss("mse")

Problem.metrics(["r2"])

Problem.objective("val_r2")

# Just to print your problem, to test its definition and imports in the current python environment.
if __name__ == "__main__":
    print(Problem)

    model = Problem.get_keras_model([4 for _ in range(20)])
예제 #3
0
from deephyper.problem import NaProblem
from nas_big_data.combo.load_data import load_data_cache
from nas_big_data.combo.search_space_shared import create_search_space

Problem = NaProblem(seed=2019)

Problem.load_data(load_data_cache)

Problem.search_space(create_search_space, num_layers=5)

# schedules: https://www.tensorflow.org/api_docs/python/tf/keras/optimizers/schedules

Problem.hyperparameters(
    lsr_batch_size=True,
    lsr_learning_rate=True,
    batch_size=Problem.add_hyperparameter((16, 2048, "log-uniform"),
                                          "batch_size"),
    learning_rate=Problem.add_hyperparameter(
        (1e-4, 0.01, "log-uniform"),
        "learning_rate",
    ),
    optimizer=Problem.add_hyperparameter(
        ["sgd", "rmsprop", "adagrad", "adam", "adadelta", "adamax", "nadam"],
        "optimizer"),
    patience_ReduceLROnPlateau=Problem.add_hyperparameter(
        (3, 30), "patience_ReduceLROnPlateau"),
    patience_EarlyStopping=Problem.add_hyperparameter(
        (3, 30), "patience_EarlyStopping"),
    num_epochs=100,
    verbose=0,
    callbacks=dict(
예제 #4
0
from deephyper.benchmark.nas.ensembleReg.load_data import load_data
from deephyper.problem import NaProblem
from deepspace.tabular import OneLayerSpace


Problem = NaProblem()

Problem.load_data(load_data)

Problem.search_space(OneLayerSpace)

Problem.hyperparameters(
    batch_size=100,
    learning_rate=0.1,
    optimizer="adam",
    num_epochs=1,
    callbacks=dict(
        ModelCheckpoint=dict(
            monitor="val_r2",
            mode="max",
            save_best_only=True,
            verbose=0,
            filepath="model.h5",
            save_weights_only=False,
        )
    ),
)

Problem.loss("mse")

Problem.metrics(["r2"])
예제 #5
0
from deephyper.benchmark.nas.covertype.load_data import load_data
from deephyper.problem import NaProblem
from deepspace.tabular import DenseSkipCoSpace

Problem = NaProblem()

Problem.load_data(load_data)

Problem.search_space(DenseSkipCoSpace, regression=False, bn=False, num_layers=10)

Problem.hyperparameters(
    batch_size=[32, 64, 128, 256, 512, 1024],
    learning_rate=(0.001, 0.1, "log-uniform"),
    optimizer="adam",
    num_epochs=20,
    verbose=0,
    callbacks=dict(CSVExtendedLogger=dict()),
)

Problem.loss("categorical_crossentropy")

Problem.metrics(["acc"])

Problem.objective("val_acc")


# Just to print your problem, to test its definition and imports in the current python environment.
if __name__ == "__main__":
    print(Problem)

    # model = Problem.get_keras_model([4 for _ in range(20)])
예제 #6
0
import traceback

from deephyper.benchmark.nas.covertype.load_data import load_data
from deephyper.problem import NaProblem
from deephyper.nas.space.dense_skipco import create_search_space

# from deephyper.nas.preprocessing import minmaxstdscaler

Problem = NaProblem(seed=2019)

Problem.load_data(load_data)

# Problem.preprocessing(minmaxstdscaler)

Problem.search_space(create_search_space,
                     num_layers=10,
                     regression=False,
                     bn=False)

Problem.hyperparameters(
    batch_size=256,
    learning_rate=0.01,
    optimizer="adam",
    num_epochs=20,
    verbose=0,
    callbacks=dict(CSVExtendedLogger=dict()),
)

Problem.loss("categorical_crossentropy")

Problem.metrics(["acc"])
예제 #7
0
from deephyper.benchmark.nas.linearReg.load_data import load_data
from deephyper.problem import NaProblem
from deepspace.tabular import OneLayerSpace

Problem = NaProblem()

Problem.load_data(load_data)

Problem.search_space(OneLayerSpace)

Problem.hyperparameters(
    batch_size=Problem.add_hyperparameter((1, 100), "batch_size"),
    learning_rate=Problem.add_hyperparameter((1e-4, 1e-1, "log-uniform"),
                                             "learning_rate"),
    optimizer=Problem.add_hyperparameter(["adam", "nadam", "rmsprop"],
                                         "optimizer"),
    num_epochs=1,
)

Problem.loss("mse")

Problem.metrics(["r2"])

Problem.objective("val_r2")

# Just to print your problem, to test its definition and imports in the current python environment.
if __name__ == "__main__":
    print(Problem)

    model = Problem.get_keras_model([1])
from nas_problems.polynome2.load_data import load_data
from nas_problems.polynome2.search_space import create_search_space
from deephyper.problem import NaProblem

Problem = NaProblem()

Problem.load_data(load_data, size=1000)

Problem.search_space(create_search_space)

Problem.hyperparameters(batch_size=128,
                        learning_rate=0.001,
                        optimizer="rmsprop",
                        num_epochs=5)

Problem.loss("mse")

Problem.metrics(["r2"])

Problem.objective("val_r2__last")

Problem.post_training(
    num_epochs=60,
    metrics=["r2"],
    model_checkpoint={
        "monitor": "val_r2",
        "mode": "max",
        "save_best_only": True,
        "verbose": 1,
    },
    early_stopping={
예제 #9
0
from deephyper.problem import NaProblem
from nas_1.polynome2.load_data import load_data
from nas_1.polynome2.search_space import create_search_space
from deephyper.nas.preprocessing import minmaxstdscaler, stdscaler
import numpy as np
import tensorflow as tf
tf.config.run_functions_eagerly(True)

# from deephyper.nas.train_utils import *
from sklearn import metrics

Problem = NaProblem(seed=2019)

Problem.load_data(load_data)

Problem.preprocessing(stdscaler)

Problem.search_space(create_search_space, num_layers=6)

Problem.hyperparameters(
    batch_size=128,
    learning_rate=0.001,
    optimizer='adam',
    num_epochs=20,
    callbacks=dict(EarlyStopping=dict(
        monitor='val_loss',  # or 'val_r2' or 'val_acc' ?
        # mode='max',
        verbose=0,
        patience=5,
        restore_best_weights=True)))
예제 #10
0
from deephyper.problem import NaProblem
from nas_gcn.qm7.load_data import load_data
from nas_gcn.search_space_utils import create_search_space

Problem = NaProblem(seed=2020)
Problem.load_data(load_data)
Problem.search_space(create_search_space, data='qm7')
Problem.hyperparameters(batch_size=128,
                        learning_rate=1e-3,
                        optimizer='adam',
                        num_epochs=50)
Problem.loss("mae")
Problem.metrics(['mae', 'mse', 'r2', 'negmae'])
Problem.objective('val_negmae__max')

if __name__ == '__main__':
    print(Problem)
예제 #11
0
from deephyper.problem import NaProblem
from deephyper.benchmark.nas.nasbench101.load_data import load_data
from deephyper.benchmark.nas.nasbench101.util import create_search_space
from deephyper.nas.preprocessing import minmaxstdscaler

Problem = NaProblem()

Problem.load_data(load_data)

# Problem.preprocessing(minmaxstdscaler)

Problem.search_space(create_search_space)

Problem.hyperparameters(batch_size=100, learning_rate=0.1, optimizer="adam", num_epochs=1)

Problem.loss("mse")

Problem.metrics(["r2"])

Problem.objective("val_r2")


# Just to print your problem, to test its definition and imports in the current python environment.
if __name__ == "__main__":
    print(Problem)
예제 #12
0
from deephyper.problem import NaProblem
from nas_big_data.cifar10.load_data import load_data
from nas_big_data.cifar10.search_space_darts import create_search_space

Problem = NaProblem(seed=2019)

Problem.load_data(load_data)


def augment(inputs, outputs):
    import tensorflow as tf

    inputs["input_0"] = tf.image.random_crop(inputs["input_0"], [28, 28, 3])
    inputs["input_0"] = tf.image.resize_with_crop_or_pad(
        inputs["input_0"], 32, 32)
    inputs["input_0"] = tf.image.random_flip_left_right(inputs["input_0"])

    return inputs, outputs


Problem.augment(augment)

Problem.search_space(create_search_space)

Problem.hyperparameters(
    batch_size=[32, 64, 128, 256, 512, 1024],
    learning_rate=(0.001, 0.1, "log-uniform"),
    optimizer="adam",
    num_epochs=100,  # maximal bound
    verbose=0,
    callbacks=dict(
예제 #13
0
파일: problem.py 프로젝트: xclmj/deephyper
from deephyper.problem import NaProblem
from nas_problems.polynome2.load_data import load_data
from nas_problems.polynome2.search_space import create_search_space
from deephyper.search.nas.model.preprocessing import minmaxstdscaler

Problem = NaProblem(seed=2019)

Problem.load_data(load_data)

Problem.preprocessing(minmaxstdscaler)

Problem.search_space(create_search_space, num_layers=3)

Problem.hyperparameters(
    batch_size=32,
    learning_rate=0.01,
    optimizer="adam",
    num_epochs=20,
    callbacks=dict(EarlyStopping=dict(
        monitor="val_r2",
        mode="max",
        verbose=0,
        patience=5  # or 'val_acc' ?
    )),
)

Problem.loss("mse")  # or 'categorical_crossentropy' ?

Problem.metrics(["r2"])  # or 'acc' ?

Problem.objective("val_r2__last")  # or 'val_acc__last' ?
예제 #14
0
from deephyper.problem import NaProblem
from deephyper.benchmark.nas.linearReg.load_data import load_data
from deephyper.benchmark.nas.linearRegMultiInputsGen.load_data import load_data
from deephyper.nas.preprocessing import minmaxstdscaler
from deepspace.tabular import OneLayerSpace

Problem = NaProblem()

Problem.load_data(load_data)

Problem.preprocessing(minmaxstdscaler)

Problem.search_space(OneLayerSpace)

Problem.hyperparameters(batch_size=100,
                        learning_rate=0.1,
                        optimizer="adam",
                        num_epochs=10)

Problem.loss("mse")

Problem.metrics(["r2"])

Problem.objective("val_r2")

# Just to print your problem, to test its definition and imports in the current python environment.
if __name__ == "__main__":
    print(Problem)
예제 #15
0
from deephyper.benchmark.nas.linearReg.load_data import load_data
from deephyper.problem import NaProblem
from deepspace.tabular import OneLayerSpace


Problem = NaProblem()

Problem.load_data(load_data)

Problem.search_space(OneLayerSpace)

Problem.hyperparameters(batch_size=100, learning_rate=0.1, optimizer="adam", num_epochs=1)

Problem.loss("mse")

Problem.metrics(["r2"])

Problem.objective("val_r2")


# Just to print your problem, to test its definition and imports in the current python environment.
if __name__ == "__main__":
    print(Problem)

    model = Problem.get_keras_model([1])
예제 #16
0
from deephyper.problem import NaProblem
from deephyper.benchmark.nas.linearRegMultiLoss.load_data import load_data
from deepspace.tabular import SupervisedRegAutoEncoderFactory


def create_search_space(input_shape, output_shape, **kwargs):
    return SupervisedRegAutoEncoderFactory()(input_shape, output_shape, **kwargs)


Problem = NaProblem(seed=2019)

Problem.load_data(load_data)

Problem.search_space(create_search_space, num_layers=10)

Problem.hyperparameters(
    batch_size=100, learning_rate=0.1, optimizer="adam", num_epochs=20
)

Problem.loss(
    loss={"output_0": "mse", "output_1": "mse"},
    weights={"output_0": 0.0, "output_1": 1.0},
)

Problem.metrics({"output_0": ["r2", "mse"], "output_1": "mse"})

Problem.objective("val_output_0_r2")


# Just to print your problem, to test its definition and imports in the current python environment.
if __name__ == "__main__":
예제 #17
0
from deephyper.problem import NaProblem
from nas_big_data.combo.search_space import create_search_space
from nas_big_data.combo.load_data import load_data

Problem = NaProblem(seed=2019)

Problem.load_data(load_data)

Problem.search_space(create_search_space, num_layers=5)

Problem.hyperparameters(batch_size=32,
                        learning_rate=0.001,
                        optimizer="adam",
                        num_epochs=4,
                        verbose=0,
                        callbacks=dict(
                            ReduceLROnPlateau=dict(monitor="val_r2",
                                                   mode="max",
                                                   verbose=0,
                                                   patience=5),
                            EarlyStopping=dict(monitor="val_r2",
                                               min_delta=0,
                                               mode="max",
                                               verbose=0,
                                               patience=10),
                        ))

Problem.loss("mse")

Problem.metrics(["r2"])
예제 #18
0
    def test_create(self):
        from deephyper.problem import NaProblem

        NaProblem()
예제 #19
0
from deephyper.problem import NaProblem
from nas_big_data.covertype.dense_skipco import create_search_space
from nas_big_data.covertype.load_data import load_data

Problem = NaProblem(seed=2019)

Problem.load_data(load_data)

Problem.search_space(create_search_space, num_layers=10)

Problem.hyperparameters(
    batch_size=[32, 64, 128, 256, 512, 1024],
    learning_rate=(0.001, 0.1, "log-uniform"),
    optimizer="adam",
    num_epochs=20,  # maximal bound
    verbose=0,
    callbacks=dict(
        CSVExtendedLogger=dict(),
        ModelCheckpoint=dict(
            monitor="val_acc",
            mode="max",
            save_best_only=True,
            verbose=0,
            filepath="model.h5",
            save_weights_only=True,
        ),
    ),
    ranks_per_node=[1, 2, 4, 8],
)

Problem.loss("categorical_crossentropy")
예제 #20
0
    def test_full_problem(self):
        from deephyper.nas.preprocessing import minmaxstdscaler
        from deephyper.problem import NaProblem

        pb = NaProblem()

        def load_data(prop):
            return ([[10]], [1]), ([10], [1])

        pb.load_data(load_data, prop=1.0)

        pb.preprocessing(minmaxstdscaler)

        pb.search_space(OneLayerSpace)

        pb.hyperparameters(
            batch_size=64,
            learning_rate=0.001,
            optimizer="adam",
            num_epochs=10,
            loss_metric="mse",
        )

        with pytest.raises(NaProblemError):
            pb.objective("r2")

        pb.loss("mse")
        pb.metrics(["r2"])

        possible_objective = ["loss", "val_loss", "r2", "val_r2"]
        for obj in possible_objective:
            pb.objective(obj)
예제 #21
0
from deephyper.problem import NaProblem
from nas_big_data.daymet.conv_lstm_2d import create_conv_lstm_search_space
from nas_big_data.daymet.load_data import load_data

Problem = NaProblem(seed=2019)

Problem.load_data(load_data)

Problem.search_space(create_conv_lstm_search_space, num_layers=10)

Problem.hyperparameters(
    # batch_size=256,  # (128, 2048),
    batch_size=[32, 64, 128, 256, 512, 1024],
    learning_rate=(0.001, 0.1, "log-uniform"),
    optimizer="adam",
    num_epochs=20,
    verbose=0,
    callbacks=dict(CSVExtendedLogger=dict(), TimeStopping=dict(seconds=460)),
)

Problem.loss("mse")

Problem.metrics(["mae"])

Problem.objective("val_loss")

# Just to print your problem, to test its definition and imports in the current python environment.
if __name__ == "__main__":
    print(Problem)
예제 #22
0
from deephyper.problem import NaProblem
from nas_big_data.ptb.search_space import create_search_space
from nas_big_data.ptb.load_data import load_data

Problem = NaProblem(seed=2019)

Problem.load_data(load_data)

Problem.search_space(create_search_space, num_layers=4)

Problem.hyperparameters(
    batch_size=32,
    learning_rate=(0.001, 0.1, "log-uniform"),
    optimizer="adam",
    num_epochs=100,
    verbose=0,
    callbacks=dict(
        CSVExtendedLogger=dict(),
        TimeStopping=dict(seconds=1200),
        EarlyStopping=dict(
            monitor="val_sparse_perplexity",
            min_delta=0,
            mode="min",
            verbose=0,
            patience=5,
        ),
        ReduceLROnPlateau=dict(patience=4, verbose=0),
        ModelCheckpoint=dict(
            monitor="val_sparse_perplexity",
            mode="min",
            save_best_only=True,
예제 #23
0
from deephyper.problem import NaProblem
from deephyper.benchmark.nas.mnist1D.load_data import load_data
from deepspace.tabular import OneLayerFactory


def create_search_space(input_shape=(10,), output_shape=(1,), **kwargs):
    return OneLayerFactory()(input_shape, output_shape, **kwargs)


Problem = NaProblem()

Problem.load_data(load_data)

Problem.search_space(create_search_space)

Problem.hyperparameters(
    batch_size=100, learning_rate=0.1, optimizer="adam", num_epochs=10
)

Problem.loss("categorical_crossentropy")

Problem.metrics(["acc"])

Problem.objective("val_acc")


# Just to print your problem, to test its definition and imports in the current python environment.
if __name__ == "__main__":
    print(Problem)
예제 #24
0
from deephyper.problem import NaProblem
from molnet.molnet.load_data import load_data
from molnet.molnet.search_space import create_search_space
from deephyper.search.nas.model.preprocessing import minmaxstdscaler

Problem = NaProblem(seed=2019)

Problem.load_data(load_data)

Problem.preprocessing(minmaxstdscaler)

Problem.search_space(create_search_space, num_layers=3)

Problem.hyperparameters(
    batch_size=32,
    learning_rate=0.01,
    optimizer='adam',
    num_epochs=20,
    callbacks=dict(EarlyStopping=dict(
        monitor='val_r2',  # or 'val_acc' ?
        mode='max',
        verbose=0,
        patience=5)))

Problem.loss('mse')  # or 'categorical_crossentropy' ?

Problem.metrics(['r2'])  # or 'acc' ?

Problem.objective('val_r2__last')  # or 'val_acc__last' ?

# Just to print your problem, to test its definition and imports in the current python environment.
예제 #25
0
from deephyper.problem import NaProblem
from nas_big_data.daymet.conv_lstm_2d import create_conv_lstm_search_space
from nas_big_data.daymet.load_data import load_data

Problem = NaProblem(seed=2019)

Problem.load_data(load_data)

Problem.search_space(create_conv_lstm_search_space, num_layers=2)

Problem.hyperparameters(
    batch_size=256,
    learning_rate=0.01,
    optimizer="adam",
    num_epochs=20,
    verbose=0,
    callbacks=dict(CSVExtendedLogger=dict(), TimeStopping=dict(seconds=460)),
)

Problem.loss("mse")

Problem.metrics(["mae"])

Problem.objective("val_loss")

# Just to print your problem, to test its definition and imports in the current python environment.
if __name__ == "__main__":
    print(Problem)