def create_problem(load_data): Problem = NaProblem(seed=2019) Problem.load_data(load_data) #Problem.preprocessing(minmaxstdscaler) Problem.search_space(create_search_space, num_layers=10) Problem.hyperparameters( verbose=0, batch_size=100, learning_rate=0.001, #lr search: 0.01, lr post: 0.001 optimizer='adam', num_epochs=50, callbacks=dict(EarlyStopping=dict( monitor='val_r2', mode='max', verbose=0, patience=5))) Problem.loss('mse') Problem.metrics(['r2']) Problem.objective('val_r2__last') Problem.post_training(num_epochs=1000, metrics=['r2'], callbacks=dict(ModelCheckpoint={ 'monitor': 'val_r2', 'mode': 'max', 'save_best_only': True, 'verbose': 1 }, EarlyStopping={ 'monitor': 'val_r2', 'mode': 'max', 'verbose': 1, 'patience': 50 }, TensorBoard=dict(log_dir='{}'.format( time.time()), ))) if __name__ == '__main__': print(Problem) from pprint import pprint pprint(Problem.space)
from deephyper.benchmark import NaProblem from candlepb.Uno.structs.uno_mlp_1 import create_structure from candlepb.Uno.uno_baseline_keras2 import load_data_multi_array Problem = NaProblem() Problem.load_data(load_data_multi_array) # Problem.preprocessing(minmaxstdscaler) Problem.search_space(create_structure, num_cells=3) Problem.hyperparameters( batch_size=64, learning_rate=0.001, optimizer='adam', num_epochs=1, ) Problem.loss('mse') Problem.metrics(['r2']) Problem.objective('val_r2__last') Problem.post_training(num_epochs=1000, metrics=['r2'], model_checkpoint={ 'monitor': 'val_r2', 'mode': 'max', 'save_best_only': True,
from deephyper.benchmark import NaProblem from deephyper.benchmark.nas.linearRegMultiLoss.load_data import load_data from deephyper.search.nas.model.baseline.simple_bi_model import create_search_space from deephyper.search.nas.model.preprocessing import minmaxstdscaler Problem = NaProblem(seed=2019) Problem.load_data(load_data) # Problem.preprocessing(minmaxstdscaler) Problem.search_space(create_search_space, num_layers=10) Problem.hyperparameters(batch_size=100, learning_rate=0.1, optimizer="adam", num_epochs=20) Problem.loss( loss={ "output_0": "mse", "output_1": "mse" }, weights={ "output_0": 0.0, "output_1": 1.0 }, ) Problem.metrics({"output_0": ["r2", "mse"], "output_1": "mse"})
from deephyper.benchmark import NaProblem from nascd.xorandor.load_data import load_data from nascd.xorandor.search_space import create_search_space # from deephyper.search.nas.model.preprocessing import stdscaler Problem = NaProblem(seed=4968214) Problem.load_data(load_data) # Problem.preprocessing(stdscaler) Problem.search_space(create_search_space) Problem.hyperparameters( batch_size=2, learning_rate=1.0, optimizer="rmsprop", num_epochs=2500, verbose=0, callbacks=dict(EarlyStopping=dict( monitor="loss", mode="min", verbose=0, patience=5 # or 'val_acc' ? )), ) Problem.loss("binary_crossentropy") # or 'categorical_crossentropy' ? Problem.metrics(["binary_accuracy"]) # or 'acc' ?