def initialize_parameters(): print('Initializing parameters...') import os # Obtain the path of the directory of this script file_path = os.path.dirname(os.path.realpath(__file__)) # Import the CANDLE library import sys sys.path.append(candle_lib) import candle_keras as candle # Instantiate the candle.Benchmark class mymodel_common = candle.Benchmark(file_path, os.getenv("DEFAULT_PARAMS_FILE"), 'keras', prog='myprog', desc='My model') # Get a dictionary of the model hyperparamters hyperparams = candle.initialize_parameters(mymodel_common) # Return the dictionary of the hyperparameters return (hyperparams)
def initialize_parameters(): unet_common = unet.UNET(unet.file_path, 'unet_params.txt', 'keras', prog='unet_example', desc='UNET example') # Initialize parameters gParameters = candle.initialize_parameters(unet_common) return gParameters
def initialize_parameters(): # Build benchmark object unoBmk = benchmark.BenchmarkUno(benchmark.file_path, 'uno_default_model.txt', 'keras', prog='uno_baseline', desc='Build neural network based models to predict tumor response to single and paired drugs.') # Initialize parameters gParameters = candle.initialize_parameters(unoBmk) #benchmark.logger.info('Params: {}'.format(gParameters)) return gParameters
def initialize_parameters(): # Build benchmark object nt3Bmk = bmk.BenchmarkNT3(bmk.file_path, 'nt3_default_model.txt', 'keras', prog='nt3_baseline', desc='Multi-task (DNN) for data extraction from clinical reports - Pilot 3 Benchmark 1') # Initialize parameters gParameters = candle.initialize_parameters(nt3Bmk) #benchmark.logger.info('Params: {}'.format(gParameters)) return gParameters
def initialize_parameters(): gae_common = gae(file_path, 'gae_params.txt', 'keras', prog='gae_baseline_keras2', desc='GAE Network' ) # Initialize parameters gParameters = candle.initialize_parameters(gae_common) return gParameters
def initialize_parameters(): mnist_common = mnist.MNIST(mnist.file_path, 'mnist_params.txt', 'keras', prog='mnist_cnn', desc='MNIST CNN example') # Initialize parameters gParameters = candle.initialize_parameters(mnist_common) csv_logger = CSVLogger('{}/params.log'.format(gParameters)) return gParameters
def initialize_parameters(): # Build benchmark object mnist_common = mnist_uq.MNIST(mnist_uq.file_path, 'mnist_complex.txt', 'keras', prog='mnist_mlp', desc='MNIST example') # Initialize parameters gParameters = candle.initialize_parameters(mnist_common) return gParameters
def initialize_parameters(): # Build benchmark object p1b2Bmk = p1b2.BenchmarkP1B2(p1b2.file_path, 'p1b2_default_model.txt', 'keras', prog='p1b2_baseline', desc='Train Classifier - Pilot 1 Benchmark 2') # Initialize parameters gParameters = candle.initialize_parameters(p1b2Bmk) #p1b2.logger.info('Params: {}'.format(gParameters)) return gParameters
def initialize_parameters(): mnist_common = mnist.MNIST(mnist.file_path, os.getenv("DEFAULT_PARAMS_FILE"), 'keras', prog='mnist_mlp', desc='MNIST example') import candle_keras as candle # Initialize parameters gParameters = candle.initialize_parameters(mnist_common) csv_logger = CSVLogger('{}/params.log'.format(gParameters)) return gParameters
def initialize_parameters(): # Build benchmark object comboBmk = combo.BenchmarkCombo( combo.file_path, 'combo_default_model.txt', 'keras', prog='combo_baseline', desc= 'Build neural network based models to predict tumor response to drug pairs.' ) # Initialize parameters gParameters = candle.initialize_parameters(comboBmk) #combo.logger.info('Params: {}'.format(gParameters)) return gParameters
def initialize_parameters(): # Build benchmark object p1b1Bmk = p1b1.BenchmarkP1B1( p1b1.file_path, 'p1b1_default_model.txt', 'keras', prog='p1b1_baseline', desc= 'Multi-task (DNN) for data extraction from clinical reports - Pilot 3 Benchmark 1' ) # Initialize parameters gParameters = candle.initialize_parameters(p1b1Bmk) #p1b1.logger.info('Params: {}'.format(gParameters)) return gParameters
def initialize_parameters(): t29_common = candle_keras.Benchmark(file_path, 't29_default_model.txt', 'keras', prog='t29res.py', desc='resnet') # Need a pointer to the docs showing what is provided # by default additional_definitions = [{ 'name': 'connections', 'default': 1, 'type': int, 'help': 'The number of residual connections.' }, { 'name': 'distance', 'default': 1, 'type': int, 'help': 'Residual connection distance between dense layers.' }, { 'name': 'model', 'default': 'model.json', 'type': str, 'help': 'Name of json model description file.' }, { 'name': 'weights', 'default': 'model.h5', 'type': str, 'help': 'Name of h5 weights file.' }, { 'name': 'n_pred', 'default': 1, 'type': int, 'help': 'Number of predictions to do on each sample.' }] t29_common.additional_definitions = additional_definitions gParameters = candle_keras.initialize_parameters(t29_common) return gParameters
def initialize_parameters(): # Build benchmark object #mymodel_common = candle.Benchmark(file_path,os.getenv("DEFAULT_PARAMS_FILE"),'keras',prog='myprog',desc='My model') unoBmk = benchmark.BenchmarkUno( benchmark.file_path, os.getenv("DEFAULT_PARAMS_FILE"), 'keras', prog='uno_baseline', desc= 'Build neural network based models to predict tumor response to single and paired drugs.' ) # Initialize parameters hyperparams = candle.initialize_parameters(unoBmk) #benchmark.logger.info('Params: {}'.format(hyperparams)) return hyperparams
def initialize_parameters(): t29_common = candle_keras.Benchmark(file_path, 't29_default_model.txt','keras', prog='t29res.py',desc='resnet') # Need a pointer to the docs showing what is provided # by default additional_definitions = [ {'name':'connections', 'default':1, 'type':int, 'help':'The number of residual connections.'}, {'name':'distance', 'default':1, 'type':int, 'help':'Residual connection distance between dense layers.'} ] t29_common.additional_definitions = additional_definitions gParameters = candle_keras.initialize_parameters(t29_common) return gParameters
def initialize_parameters(): t29_common = candle_keras.Benchmark(file_path, 't29_default_model.txt','keras', prog='/t29res.py',desc='resnet') #A list common parsed parameters are available here: https://ecp-candle.github.io/Candle/html/_modules/default_utils.html#get_common_parser additional_definitions = [ {'name':'connections', 'default':1, 'type':int, 'help':'The number of residual connections.'}, {'name':'distance', 'default':1, 'type':int, 'help':'Residual connection distance between dense layers.'} ] t29_common.additional_definitions = additional_definitions gParameters = candle_keras.initialize_parameters(t29_common) return gParameters
def initialize_parameters(): # Add the candle_keras library to the Python path import sys, os sys.path.append(os.getenv("CANDLE") + '/Candle/common') # Instantiate the Benchmark class (the values of the prog and desc parameters don't really matter) import candle_keras as candle mymodel_common = candle.Benchmark(os.path.dirname( os.path.realpath(__file__)), os.getenv("DEFAULT_PARAMS_FILE"), 'keras', prog='myprogram', desc='My CANDLE example') # Read the parameters (in a dictionary format) pointed to by the environment variable DEFAULT_PARAMS_FILE gParameters = candle.initialize_parameters(mymodel_common) # Return this dictionary of parameters return (gParameters)
def initialize_parameters(): # Build benchmark object p2b1Bmk = p2b1.BenchmarkP2B1( p2b1.file_path, 'p2b1_default_model.txt', 'keras', prog='p2b1_baseline', desc='Train Molecular Frame Autoencoder - Pilot 2 Benchmark 1') # Initialize parameters GP = candle.initialize_parameters(p2b1Bmk) #p2b1.logger.info('Params: {}'.format(gParameters)) print('\nTraining parameters:') for key in sorted(GP): print("\t%s: %s" % (key, GP[key])) # print json.dumps(GP, indent=4, skipkeys=True, sort_keys=True) if GP['backend'] != 'theano' and GP['backend'] != 'tensorflow': sys.exit('Invalid backend selected: %s' % GP['backend']) os.environ['KERAS_BACKEND'] = GP['backend'] reload(K) ''' if GP['backend'] == 'theano': K.set_image_dim_ordering('th') elif GP['backend'] == 'tensorflow': K.set_image_dim_ordering('tf') ''' K.set_image_data_format('channels_last') #"th" format means that the convolutional kernels will have the shape (depth, input_depth, rows, cols) #"tf" format means that the convolutional kernels will have the shape (rows, cols, input_depth, depth) print("Image data format: ", K.image_data_format()) # print "Image ordering: ", K.image_dim_ordering() return GP