from deephyper.benchmark import Problem from candlepb.Combo.models.candle_mlp_7 import create_structure # We create our Problem object with the Problem class, you don't have to name your Problem object 'Problem' it can be any name you want. You can also define different problems in the same module. Problem = Problem() # You define the create structure function. This function will return an object following the Structure interface. You can also have kwargs arguments such as 'num_cells' for this function. Problem.add_dim('create_structure', { 'func': create_structure }) # You define the hyperparameters used to train your generated models during the search. Problem.add_dim('hyperparameters', { 'num_epochs': 1, }) # Just to print your problem, to test its definition and imports in the current python environment. if __name__ == '__main__': print(Problem)
from deephyper.benchmark import Problem from deephyper.benchmark.nas.dixonpriceReg.load_data import load_data from deephyper.search.nas.model.baseline.anl_mlp_2 import create_structure from deephyper.search.nas.model.preprocessing import minmaxstdscaler # We create our Problem object with the Problem class, you don't have to name your Problem object 'Problem' it can be any name you want. You can also define different problems in the same module. Problem = Problem() # You define if your problem is a regression problem (the reward will be minus of the mean squared error) or a classification problem (the reward will be the accuracy of the network on the validation set). Problem.add_dim('regression', True) # You define how to load your data by giving a 'load_data' function. This function will return your data set following this interface: (train_X, train_y), (valid_X, valid_y). You can also add a 'kwargs' key with arguments for the load_data function. Problem.add_dim('load_data', { 'func': load_data, }) # OPTIONAL : You define a preprocessing function which will be applied on your data before training generated models. This preprocessing function use sklearn preprocessors api. Problem.add_dim('preprocessing', { 'func': minmaxstdscaler }) # You define the create structure function. This function will return an object following the Structure interface. You can also have kwargs arguments such as 'num_cells' for this function. Problem.add_dim('create_structure', { 'func': create_structure, 'kwargs': { 'num_cells': 5 } }) # You define the hyperparameters used to train your generated models during the search. Problem.add_dim('hyperparameters', {
from deephyper.benchmark import Problem from candlepb.Combo.combo_baseline_keras2 import load_data_deephyper_gen from candlepb.Combo.models.candle_conv_mlp_3 import create_structure # from deephyper.search.nas.model.preprocessing import minmaxstdscaler # We create our Problem object with the Problem class, you don't have to name your Problem object 'Problem' it can be any name you want. You can also define different problems in the same module. Problem = Problem() # You define if your problem is a regression problem (the reward will be minus of the mean squared error) or a classification problem (the reward will be the accuracy of the network on the validation set). Problem.add_dim('regression', True) # You define how to load your data by giving a 'load_data' function. This function will return your data set following this interface: (train_X, train_y), (valid_X, valid_y). You can also add a 'kwargs' key with arguments for the load_data function. Problem.add_dim('load_data', { 'func': load_data_deephyper_gen, 'kwargs': { 'prop': 0.1 } }) # OPTIONAL : You define a preprocessing function which will be applied on your data before training generated models. This preprocessing function use sklearn preprocessors api. # Problem.add_dim('preprocessing', { # 'func': minmaxstdscaler # }) # You define the create structure function. This function will return an object following the Structure interface. You can also have kwargs arguments such as 'num_cells' for this function. Problem.add_dim('create_structure', { 'func': create_structure, 'kwargs': { 'num_cells': 5 } })
from deephyper.benchmark import Problem from candlepb.Combo.models.candle_mlp_9 import create_structure # We create our Problem object with the Problem class, you don't have to name your Problem object 'Problem' it can be any name you want. You can also define different problems in the same module. Problem = Problem() # You define the create structure function. This function will return an object following the Structure interface. You can also have kwargs arguments such as 'num_cells' for this function. Problem.add_dim('create_structure', {'func': create_structure}) # You define the hyperparameters used to train your generated models during the search. Problem.add_dim('hyperparameters', { 'num_epochs': 1, }) Problem.add_dim('load_data', {'prop': 0.5}) # Just to print your problem, to test its definition and imports in the current python environment. if __name__ == '__main__': print(Problem)
from deephyper.search.nas.model.baseline.anl_mlp_2 import create_structure from deephyper.benchmark.nas.mnist1D.load_data import load_data from deephyper.benchmark import Problem Problem = Problem() Problem.add_dim('regression', False) Problem.add_dim('load_data', { 'func': load_data }) Problem.add_dim('create_structure', { 'func': create_structure, 'kwargs': { 'num_cells': 5 } }) Problem.add_dim('hyperparameters', { 'batch_size': 64, 'learning_rate': 0.0001, 'optimizer': 'adam', 'num_epochs': 10, 'loss_metric': 'categorical_crossentropy', 'metrics': ['acc'] }) if __name__ == '__main__': print(Problem)
# TODO : not ready from deephyper.search.nas.contrib.google_nas_net import create_search_space from deephyper.benchmark.nas.mnist2D.load_data import load_data from deephyper.benchmark import Problem Problem = Problem() Problem.add_dim('regression', False) Problem.add_dim('load_data', { 'func': load_data }) Problem.add_dim('create_search_space', { 'func': create_search_space, 'kwargs': {} }) Problem.add_dim('hyperparameters', { 'batch_size': 100, 'learning_rate': 0.001, 'optimizer': 'adam', 'num_epochs': 50, 'loss_metric': 'mean_softmax_cross_entropy', 'test_metric': 'accuracy' }) if __name__ == '__main__': print(Problem)
from deephyper.benchmark import Problem from candlepb.Combo.combo_baseline_keras2 import load_data_deephyper_gen, load_data_combo from candlepb.Combo.models.candle_mlp_5 import create_structure # from deephyper.search.nas.model.preprocessing import minmaxstdscaler # We create our Problem object with the Problem class, you don't have to name your Problem object 'Problem' it can be any name you want. You can also define different problems in the same module. Problem = Problem() # You define if your problem is a regression problem (the reward will be minus of the mean squared error) or a classification problem (the reward will be the accuracy of the network on the validation set). Problem.add_dim('regression', True) # You define how to load your data by giving a 'load_data' function. This function will return your data set following this interface: (train_X, train_y), (valid_X, valid_y). You can also add a 'kwargs' key with arguments for the load_data function. Problem.add_dim('load_data', { 'func': load_data_deephyper_gen, }) # OPTIONAL : You define a preprocessing function which will be applied on your data before training generated models. This preprocessing function use sklearn preprocessors api. # Problem.add_dim('preprocessing', { # 'func': minmaxstdscaler # }) # You define the create structure function. This function will return an object following the Structure interface. You can also have kwargs arguments such as 'num_cells' for this function. Problem.add_dim('create_structure', {'func': create_structure}) # You define the hyperparameters used to train your generated models during the search. Problem.add_dim( 'hyperparameters', { 'batch_size': 256, 'learning_rate': 0.01, 'optimizer': 'adam', 'num_epochs': 1,