Example #1
0
def create_problem(load_data):
    Problem = NaProblem(seed=2019)

    Problem.load_data(load_data)

    #Problem.preprocessing(minmaxstdscaler)

    Problem.search_space(create_search_space, num_layers=10)

    Problem.hyperparameters(
        verbose=0,
        batch_size=100,
        learning_rate=0.001,  #lr search: 0.01, lr post: 0.001
        optimizer='adam',
        num_epochs=50,
        callbacks=dict(EarlyStopping=dict(
            monitor='val_r2', mode='max', verbose=0, patience=5)))

    Problem.loss('mse')

    Problem.metrics(['r2'])

    Problem.objective('val_r2__last')

    Problem.post_training(num_epochs=1000,
                          metrics=['r2'],
                          callbacks=dict(ModelCheckpoint={
                              'monitor': 'val_r2',
                              'mode': 'max',
                              'save_best_only': True,
                              'verbose': 1
                          },
                                         EarlyStopping={
                                             'monitor': 'val_r2',
                                             'mode': 'max',
                                             'verbose': 1,
                                             'patience': 50
                                         },
                                         TensorBoard=dict(log_dir='{}'.format(
                                             time.time()), )))

    if __name__ == '__main__':
        print(Problem)
        from pprint import pprint
        pprint(Problem.space)
Example #2
0
from deephyper.benchmark import NaProblem
from candlepb.Uno.structs.uno_mlp_1 import create_structure
from candlepb.Uno.uno_baseline_keras2 import load_data_multi_array

Problem = NaProblem()

Problem.load_data(load_data_multi_array)

# Problem.preprocessing(minmaxstdscaler)

Problem.search_space(create_structure, num_cells=3)

Problem.hyperparameters(
    batch_size=64,
    learning_rate=0.001,
    optimizer='adam',
    num_epochs=1,
)

Problem.loss('mse')

Problem.metrics(['r2'])

Problem.objective('val_r2__last')

Problem.post_training(num_epochs=1000,
                      metrics=['r2'],
                      model_checkpoint={
                          'monitor': 'val_r2',
                          'mode': 'max',
                          'save_best_only': True,
Example #3
0
from deephyper.benchmark import NaProblem
from deephyper.benchmark.nas.linearReg.load_data import load_data
from deephyper.benchmark.nas.linearRegMultiInputsGen.load_data import load_data
from deephyper.search.nas.model.baseline.simple import create_search_space
from deephyper.search.nas.model.preprocessing import minmaxstdscaler

Problem = NaProblem()

Problem.load_data(load_data)

Problem.preprocessing(minmaxstdscaler)

Problem.search_space(create_search_space)

Problem.hyperparameters(
    batch_size=100,
    learning_rate=0.1,
    optimizer='adam',
    num_epochs=10,
)

Problem.loss('mse')

Problem.metrics(['r2'])

Problem.objective('val_r2')

# Just to print your problem, to test its definition and imports in the current python environment.
if __name__ == '__main__':
    print(Problem)
Example #4
0
from deephyper.benchmark import NaProblem
from deephyper.benchmark.nas.linearRegMultiLoss.load_data import load_data
from deephyper.search.nas.model.baseline.simple_bi_model import create_search_space

from deephyper.search.nas.model.preprocessing import minmaxstdscaler

Problem = NaProblem(seed=2019)

Problem.load_data(load_data)

# Problem.preprocessing(minmaxstdscaler)

Problem.search_space(create_search_space, num_layers=10)

Problem.hyperparameters(batch_size=100,
                        learning_rate=0.1,
                        optimizer="adam",
                        num_epochs=20)

Problem.loss(
    loss={
        "output_0": "mse",
        "output_1": "mse"
    },
    weights={
        "output_0": 0.0,
        "output_1": 1.0
    },
)

Problem.metrics({"output_0": ["r2", "mse"], "output_1": "mse"})
Example #5
0
from candlepb.NT3.models.candle_conv_mlp_baseline import create_structure
from candlepb.NT3.nt3_baseline_keras2 import load_data
from deephyper.benchmark import NaProblem

Problem = NaProblem()

Problem.load_data(load_data)

# Problem.preprocessing(minmaxstdscaler)

Problem.search_space(create_structure)

Problem.hyperparameters(
    batch_size=20,
    learning_rate=0.01,
    optimizer='adam',
    num_epochs=1,
)

Problem.loss('categorical_crossentropy')

Problem.metrics(['acc'])

Problem.objective('val_acc__last')

Problem.post_training(num_epochs=1000,
                      metrics=['acc'],
                      model_checkpoint={
                          'monitor': 'val_acc',
                          'mode': 'max',
                          'save_best_only': True,