def test_create_structure(): from random import random, seed from deephyper.search.nas.model.space.structure import KerasStructure from deephyper.search.nas.model.baseline.anl_mlp_2 import create_structure from deephyper.core.model_utils import number_parameters seed(10) structure = create_structure([(10, 1), (10, 1)], (1, ), 5) assert type(structure) is KerasStructure ops = [random() for i in range(structure.num_nodes)] structure.set_ops(ops) structure.draw_graphviz('graph_anl_mlp_2_test.dot') model = structure.create_model() import numpy as np x = np.zeros((1, 10, 1)) y = model.predict([x, x]) print(f'shape(x): {np.shape(x)}') print(f'shape(y): {np.shape(y)}') nparameters = number_parameters() print('number of parameters: ', nparameters) assert np.shape(y) == ( 1, 1), f'Wrong output shape {np.shape(y)} should be {(1, 1)}'
def test_create_structure(): from random import random, seed from deephyper.search.nas.model.space.structure import KerasStructure from deephyper.core.model_utils import number_parameters from tensorflow.keras.utils import plot_model import tensorflow as tf # seed(10) shapes = [(942, ), (3820, ), (3820, )] structure = create_structure(shapes, (1, )) assert type(structure) is KerasStructure ops = [random() for i in range(structure.num_nodes)] # ops = [ # 0.07692307692307693, # 0.5384615384615384, # 0.07692307692307693, # 0.07692307692307693, # 0.07692307692307693, # 0.5384615384615384, # 0.5384615384615384, # 0.07692307692307693, # 0.5384615384615384, # 0.0 # ] print('num ops: ', len(ops)) print('size: ', structure.size) structure.set_ops(ops) structure.draw_graphviz('graph_candle_mlp_5.dot') model = structure.create_model() print('depth: ', structure.depth) model = structure.create_model() plot_model(model, to_file='graph_candle_mlp_5.png', show_shapes=True) import numpy as np x0 = np.zeros((1, *shapes[0])) x1 = np.zeros((1, *shapes[1])) x2 = np.zeros((1, *shapes[2])) inpts = [x0, x1, x2] y = model.predict(inpts) for x in inpts: print(f'shape(x): {np.shape(x)}') print(f'shape(y): {np.shape(y)}') total_parameters = number_parameters() print('total_parameters: ', total_parameters) model.summary()
def main(config): num_epochs = NUM_EPOCHS load_data = config['load_data']['func'] print('[PARAM] Loading data') # Loading data kwargs = config['load_data'].get('kwargs') sig_load_data = signature(load_data) if len(sig_load_data.parameters) == 0: data = load_data() else: if 'prop' in sig_load_data.parameters: if kwargs is None: data = load_data(prop=PROP) else: kwargs['prop'] = PROP data = load_data(**kwargs) else: if kwargs is None: data = load_data() else: data = load_data(**kwargs) print('[PARAM] Data loaded') # Set data shape if type(data) is tuple: if len(data) != 2: raise RuntimeError( f'Loaded data are tuple, should ((training_input, training_output), (validation_input, validation_output)) but length=={len(data)}' ) (t_X, t_y), (v_X, v_y) = data if type(t_X) is np.ndarray and type(t_y) is np.ndarray and \ type(v_X) is np.ndarray and type(v_y) is np.ndarray: input_shape = np.shape(t_X)[1:] elif type(t_X) is list and type(t_y) is np.ndarray and \ type(v_X) is list and type(v_y) is np.ndarray: input_shape = [np.shape(itX)[1:] for itX in t_X ] # interested in shape of data not in length else: raise RuntimeError( f'Data returned by load_data function are of a wrong type: type(t_X)=={type(t_X)}, type(t_y)=={type(t_y)}, type(v_X)=={type(v_X)}, type(v_y)=={type(v_y)}' ) output_shape = np.shape(t_y)[1:] config['data'] = { 'train_X': t_X, 'train_Y': t_y, 'valid_X': v_X, 'valid_Y': v_y } elif type(data) is dict: config['data'] = data input_shape = [ data['shapes'][0][f'input_{i}'] for i in range(len(data['shapes'][0])) ] output_shape = data['shapes'][1] else: raise RuntimeError( f'Data returned by load_data function are of an unsupported type: {type(data)}' ) cs_kwargs = config['create_structure'].get('kwargs') if cs_kwargs is None: structure = config['create_structure']['func'](input_shape, output_shape) else: structure = config['create_structure']['func'](input_shape, output_shape, **cs_kwargs) arch_seq = ARCH_SEQ structure.set_ops(arch_seq) try: structure.draw_graphviz('graph_full.dot') except: pass print('Model operations set.') if config.get('preprocessing') is not None: preprocessing = util.load_attr_from(config['preprocessing']['func']) config['preprocessing']['func'] = preprocessing print(f"Preprocessing set with: {config['preprocessing']}") else: print('No preprocessing...') config['preprocessing'] = None model_created = False if config['regression']: try: model = structure.create_model() model_created = True except: model_created = False print('Error: Model creation failed...') print('INFO STACKTRACE: ', traceback.format_exc()) if model_created: try: plot_model(model, to_file='model.png', show_shapes=True) model.summary() except Exception as err: print('can\t create model.png file...') print('INFO STACKTRACE: ', traceback.format_exc()) try: model.load_weights("model_weights.h5") print('model weights loaded!') except Exception as err: print('failed to load model weights...') print('INFO STACKTRACE: ', traceback.format_exc()) trainer = TrainerRegressorTrainValid(config=config, model=model) else: try: model = structure.create_model(activation='softmax') model_created = True except Exception as err: model_created = False print('Error: Model creation failed...') print('INFO STACKTRACE: ', traceback.format_exc()) if model_created: try: plot_model(model, to_file='model.png', show_shapes=True) except Exception as err: print('can\t create model.png file...') print('INFO STACKTRACE: ', traceback.format_exc()) try: model.load_weights("model_weights.h5") print('model weights loaded!') except Exception as err: print('failed to load model weights...') print('INFO STACKTRACE: ', traceback.format_exc()) trainer = TrainerClassifierTrainValid(config=config, model=model) tb_cb = keras.callbacks.TensorBoard(histogram_freq=0, batch_size=256, write_grads=True) trainer.add_callback(tb_cb) print('Trainer is ready.') print(f'Start training... num_epochs={num_epochs}') nparams = number_parameters() print('model number of parameters: ', nparams) if NUM_EPOCHS > 0: trainer.train(num_epochs=num_epochs) # serialize weights to HDF5 model.save_weights("model_weights.h5") print("Saved model weight to disk: model_weights.h5") if config['regression']: y_orig, y_pred = trainer.predict('valid') r_list = list() for dim in range(np.shape(y_orig)[1]): r, _ = stats.pearsonr(y_orig[:, dim], y_pred[:, dim]) r_list.append(r) print('r_list: ', r_list)