from deephyper.benchmark import NaProblem from candlepb.Uno.structs.uno_mlp_1 import create_structure from candlepb.Uno.uno_baseline_keras2 import load_data_multi_array Problem = NaProblem() Problem.load_data(load_data_multi_array) # Problem.preprocessing(minmaxstdscaler) Problem.search_space(create_structure, num_cells=3) Problem.hyperparameters( batch_size=64, learning_rate=0.001, optimizer='adam', num_epochs=1, ) Problem.loss('mse') Problem.metrics(['r2']) Problem.objective('val_r2__last') Problem.post_training(num_epochs=1000, metrics=['r2'], model_checkpoint={ 'monitor': 'val_r2', 'mode': 'max', 'save_best_only': True,
from deephyper.benchmark import NaProblem from deephyper.benchmark.nas.linearRegMultiLoss.load_data import load_data from deephyper.search.nas.model.baseline.simple_bi_model import create_search_space from deephyper.search.nas.model.preprocessing import minmaxstdscaler Problem = NaProblem(seed=2019) Problem.load_data(load_data) # Problem.preprocessing(minmaxstdscaler) Problem.search_space(create_search_space, num_layers=10) Problem.hyperparameters(batch_size=100, learning_rate=0.1, optimizer="adam", num_epochs=20) Problem.loss( loss={ "output_0": "mse", "output_1": "mse" }, weights={ "output_0": 0.0, "output_1": 1.0 }, ) Problem.metrics({"output_0": ["r2", "mse"], "output_1": "mse"})
from deephyper.benchmark import NaProblem from deephyper.benchmark.nas.linearReg.load_data import load_data from deephyper.benchmark.nas.linearRegMultiInputsGen.load_data import load_data from deephyper.search.nas.model.baseline.simple import create_search_space from deephyper.search.nas.model.preprocessing import minmaxstdscaler Problem = NaProblem() Problem.load_data(load_data) Problem.preprocessing(minmaxstdscaler) Problem.search_space(create_search_space) Problem.hyperparameters( batch_size=100, learning_rate=0.1, optimizer='adam', num_epochs=10, ) Problem.loss('mse') Problem.metrics(['r2']) Problem.objective('val_r2') # Just to print your problem, to test its definition and imports in the current python environment. if __name__ == '__main__': print(Problem)
from deephyper.benchmark import NaProblem from nascd.ImprovedFishes.load_data import load_data from nascd.ImprovedFishes.search_space import create_search_space from deephyper.search.nas.model.preprocessing import minmaxstdscaler Problem = NaProblem(seed=2019) Problem.load_data(load_data) Problem.preprocessing(minmaxstdscaler) Problem.search_space(create_search_space) Problem.hyperparameters( batch_size=8, learning_rate=0.01, optimizer='adam', num_epochs=200, callbacks=dict(EarlyStopping=dict( monitor='r2', # or 'val_acc' ? mode='max', verbose=0, patience=5))) Problem.loss('mse') # or 'categorical_crossentropy' ? Problem.metrics(['r2']) # or 'acc' ? Problem.objective('r2__max') # or 'val_acc__last' ? Problem.post_training(
from deephyper.benchmark import NaProblem from candlepb.Uno.structs.uno_mlp_1 import create_structure from candlepb.Uno.uno_baseline_keras2 import load_data_multi_array Problem = NaProblem() Problem.load_data(load_data_multi_array) # Problem.preprocessing(minmaxstdscaler) Problem.search_space(create_structure, num_cells=3) Problem.hyperparameters( batch_size=64, learning_rate=0.001, optimizer='adam', num_epochs=1, ) Problem.loss('mse') Problem.metrics(['r2']) Problem.objective('val_r2__last') # Problem.post_training( # num_epochs=1000, # metrics=['r2'], # # model_checkpoint={ # # 'monitor': 'val_r2', # # 'mode': 'max',
from deephyper.benchmark import NaProblem from deephyper.benchmark.nas.mnist1D.load_data import load_data from deephyper.search.nas.model.baseline.simple import create_search_space from deephyper.search.nas.model.preprocessing import minmaxstdscaler Problem = NaProblem() Problem.load_data(load_data) Problem.search_space(create_search_space) Problem.hyperparameters( batch_size=100, learning_rate=0.1, optimizer='adam', num_epochs=10, ) Problem.loss('categorical_crossentropy') Problem.metrics(['acc']) Problem.objective('val_acc') # Just to print your problem, to test its definition and imports in the current python environment. if __name__ == '__main__': print(Problem)
from deephyper.benchmark import NaProblem from deephyper.benchmark.nas.linearRegMultiVar.load_data import load_data from deephyper.search.nas.model.baseline.simple_deep import create_search_space from deephyper.search.nas.model.preprocessing import minmaxstdscaler Problem = NaProblem(seed=2019) Problem.load_data(load_data) # Problem.preprocessing(minmaxstdscaler) Problem.search_space(create_search_space) Problem.hyperparameters(batch_size=100, learning_rate=0.1, optimizer="adam", num_epochs=1) Problem.loss("mse") Problem.metrics(["r2"]) Problem.objective("val_r2") # Just to print your problem, to test its definition and imports in the current python environment. if __name__ == "__main__": print(Problem)
## /Users/yzamora/nas_problems/nas_problems/polynome2 import time from deephyper.benchmark import NaProblem from deephyper.search.nas.model.preprocessing import minmaxstdscaler from nas_problems.polynome2.load_data import load_data from nas_problems.polynome2.architecture import create_search_space Problem = NaProblem(seed=2019) Problem.load_data(load_data) #Problem.preprocessing(minmaxstdscaler) Problem.search_space(create_search_space, num_layers=10) Problem.hyperparameters( verbose=0, batch_size=100, learning_rate=0.001, #lr search: 0.01, lr post: 0.001 optimizer='adam', num_epochs=50, callbacks=dict(EarlyStopping=dict( monitor='val_r2', mode='max', verbose=0, patience=5))) Problem.loss('mse') Problem.metrics(['r2']) Problem.objective('val_r2__last') Problem.post_training(num_epochs=1000,
from candlepb.NT3.models.candle_conv_mlp_baseline import create_structure from candlepb.NT3.nt3_baseline_keras2 import load_data from deephyper.benchmark import NaProblem Problem = NaProblem() Problem.load_data(load_data) # Problem.preprocessing(minmaxstdscaler) Problem.search_space(create_structure) Problem.hyperparameters( batch_size=20, learning_rate=0.01, optimizer='adam', num_epochs=1, ) Problem.loss('categorical_crossentropy') Problem.metrics(['acc']) Problem.objective('val_acc__last') Problem.post_training(num_epochs=1000, metrics=['acc'], model_checkpoint={ 'monitor': 'val_acc', 'mode': 'max', 'save_best_only': True,
from nas_problems.polynome2.load_data import load_data from nas_problems.polynome2.search_space import create_search_space from deephyper.benchmark import NaProblem Problem = NaProblem() Problem.load_data(load_data, size=1000) Problem.search_space(create_search_space) Problem.hyperparameters( batch_size=128, learning_rate=0.001, optimizer='rmsprop', num_epochs=5, ) Problem.loss('mse') Problem.metrics(['r2']) Problem.objective('val_r2__last') Problem.post_training(num_epochs=60, metrics=['r2'], model_checkpoint={ 'monitor': 'val_r2', 'mode': 'max', 'save_best_only': True, 'verbose': 1 },
from deephyper.benchmark import NaProblem import os import sys HERE = os.path.dirname(os.path.abspath( __file__)) # useful to locate data files with respect to this file sys.path.insert(0, HERE) from load_data import load_data from search_space import create_search_space from deephyper.search.nas.model.preprocessing import minmaxstdscaler Problem = NaProblem(seed=2019) Problem.load_data(load_data) # Problem.preprocessing(minmaxstdscaler) Problem.search_space(create_search_space, num_layers=5) Problem.hyperparameters( batch_size=32, learning_rate=0.001, optimizer='adam', num_epochs=20, callbacks=dict(EarlyStopping=dict( monitor='r2', # or 'val_acc' ? mode='max', verbose=0, patience=10))) Problem.loss('mse') # or 'categorical_crossentropy' ?
from candlepb.NT3.models.candle_conv_mlp_1 import create_structure from deephyper.benchmark import NaProblem from candlepb.NT3.problems.load_data import load_data import os import numpy as np from typing import Tuple import pandas as pd from sklearn.preprocessing import StandardScaler, MinMaxScaler, MaxAbsScaler from keras.utils import np_utils Problem = NaProblem() Problem.load_data(load_data) Problem.search_space(create_structure) Problem.hyperparameters(batch_size=20, learning_rate=0.01, optimizer='adam', num_epochs=1, ranks_per_node=1) Problem.loss('categorical_crossentropy') Problem.metrics(['acc']) Problem.objective('val_acc__last') # Problem.post_training(
from deephyper.benchmark import NaProblem from nascd.xorandor.load_data import load_data from nascd.xorandor.search_space import create_search_space # from deephyper.search.nas.model.preprocessing import stdscaler Problem = NaProblem(seed=4968214) Problem.load_data(load_data) # Problem.preprocessing(stdscaler) Problem.search_space(create_search_space) Problem.hyperparameters( batch_size=2, learning_rate=1.0, optimizer="rmsprop", num_epochs=2500, verbose=0, callbacks=dict(EarlyStopping=dict( monitor="loss", mode="min", verbose=0, patience=5 # or 'val_acc' ? )), ) Problem.loss("binary_crossentropy") # or 'categorical_crossentropy' ? Problem.metrics(["binary_accuracy"]) # or 'acc' ?
from deephyper.benchmark import NaProblem from nascd.fishes.load_data import load_data from nascd.fishes.search_space import create_search_space from deephyper.search.nas.model.preprocessing import minmaxstdscaler Problem = NaProblem(seed=2019) Problem.load_data(load_data) Problem.preprocessing(minmaxstdscaler) Problem.search_space(create_search_space) Problem.hyperparameters( batch_size=8, learning_rate=0.01, optimizer="adam", num_epochs=200, verbose=0, callbacks=dict(EarlyStopping=dict( monitor="r2", mode="max", verbose=0, patience=5 # or 'val_acc' ? )), ) Problem.loss("mse") # or 'categorical_crossentropy' ? Problem.metrics(["r2"]) # or 'acc' ?