def __init__(self, model_config): super(CNNPredictor, self).__init__(model_config, 'CNN') conv_config, conv_layer_config, mlp_config = load_conv_spec( self.model_config['nnet_spec'], self.batch_size, self.model_config['input_shape']) activationFn = parse_activation(mlp_config['activation']) if mlp_config['do_dropout'] or conv_config['do_dropout']: self.model = DropoutCNN( self.numpy_rng, self.theano_rng, conv_layer_configs=conv_layer_config, batch_size=self.batch_size, n_outs=self.model_config['n_outs'], hidden_layer_configs=mlp_config, hidden_activation=activationFn, use_fast=conv_config['use_fast'], l1_reg=mlp_config['l1_reg'], l2_reg=mlp_config['l1_reg'], max_col_norm=mlp_config['max_col_norm'], input_dropout_factor=conv_config['input_dropout_factor']) else: self.model = CNN(self.numpy_rng, self.theano_rng, conv_layer_configs=conv_layer_config, batch_size=batch_size, n_outs=self.model_config['n_outs'], hidden_layer_configs=mlp_config, hidden_activation=activationFn, use_fast=conv_config['use_fast'], l1_reg=mlp_config['l1_reg'], l2_reg=mlp_config['l1_reg'], max_col_norm=mlp_config['max_col_norm']) self.__load_model__(self.model_config['input_file'], mlp_config['pretrained_layers'])
def runCNN3D(arg): if type(arg) is dict: model_config = arg else: model_config = load_model(arg, 'CNN') conv_config, conv_layer_config, mlp_config = load_conv_spec( model_config['nnet_spec'], model_config['batch_size'], model_config['input_shape']) #__debugPrintData__(conv_layer_config,'covolution config') data_spec = load_data_spec(model_config['data_spec'], model_config['batch_size']) numpy_rng = numpy.random.RandomState(model_config['random_seed']) theano_rng = RandomStreams(numpy_rng.randint(2**30)) logger.info('> ... building the model') hidden_activation = parse_activation(mlp_config['activation']) createDir(model_config['wdir']) #create working dir batch_size = model_config['batch_size'] cnn = CNN3D(numpy_rng, theano_rng, conv_layer_configs=conv_layer_config, batch_size=batch_size, n_outs=model_config['n_outs'], hidden_layer_configs=mlp_config, hidden_activation=hidden_activation, l1_reg=mlp_config['l1_reg'], l2_reg=mlp_config['l1_reg'], max_col_norm=mlp_config['max_col_norm']) ######################## # Loading THE MODEL # ######################## try: # pretraining ptr_file = model_config['input_file'] pretrained_layers = mlp_config['pretrained_layers'] logger.info("Loading the pretrained network..") cnn.load(filename=ptr_file, max_layer_num=pretrained_layers, withfinal=True) except KeyError, e: logger.warning( "Pretrained network missing in working directory, skipping model loading" )
def runCNN(arg): if type(arg) is dict: model_config = arg else : model_config = load_model(arg,'CNN') conv_config,conv_layer_config,mlp_config = load_conv_spec( model_config['nnet_spec'], model_config['batch_size'], model_config['input_shape']) data_spec = load_data_spec(model_config['data_spec'],model_config['batch_size']); numpy_rng = numpy.random.RandomState(89677) theano_rng = RandomStreams(numpy_rng.randint(2 ** 30)) logger.info('> ... building the model') activationFn = parse_activation(mlp_config['activation']); createDir(model_config['wdir']); #create working dir batch_size = model_config['batch_size']; if mlp_config['do_dropout'] or conv_config['do_dropout']: logger.info('>Initializing dropout cnn model') cnn = DropoutCNN(numpy_rng,theano_rng,conv_layer_configs = conv_layer_config, batch_size = batch_size, n_outs=model_config['n_outs'],hidden_layer_configs=mlp_config, hidden_activation = activationFn, use_fast = conv_config['use_fast'],l1_reg = mlp_config['l1_reg'], l2_reg = mlp_config['l1_reg'],max_col_norm = mlp_config['max_col_norm'], input_dropout_factor=conv_config['input_dropout_factor']) else: cnn = CNN(numpy_rng,theano_rng,conv_layer_configs = conv_layer_config, batch_size = batch_size, n_outs=model_config['n_outs'],hidden_layer_configs=mlp_config, hidden_activation = activationFn, use_fast = conv_config['use_fast'],l1_reg = mlp_config['l1_reg'], l2_reg = mlp_config['l1_reg'],max_col_norm = mlp_config['max_col_norm']) ######################## # Loading THE MODEL # ######################## try: # pretraining ptr_file = model_config['input_file'] pretrained_layers = mlp_config['pretrained_layers'] logger.info("Loading the pretrained network..") cnn.load(filename=ptr_file,max_layer_num = pretrained_layers, withfinal=True) except KeyError, e: logger.warning("Pretrained network missing in working directory, skipping model loading")
def __init__(self,model_config): super(CNN3dPredictor, self).__init__(model_config,'CNN3d'); conv_config,conv_layer_config,mlp_config = load_conv_spec(self.model_config['nnet_spec'], self.batch_size, self.model_config['input_shape']) activationFn = parse_activation(mlp_config['activation']); self.model = CNN3D(self.numpy_rng,self.theano_rng,conv_layer_configs = conv_layer_config, batch_size = self.batch_size, n_outs= self.model_config['n_outs'], hidden_layer_configs=mlp_config,hidden_activation = hidden_activation, l1_reg = mlp_config['l1_reg'],l2_reg = mlp_config['l1_reg'], max_col_norm = mlp_config['max_col_norm']) self.__load_model__(self.model_config['input_file'],mlp_config['pretrained_layers']);
def runCNN3D(arg): if type(arg) is dict: model_config = arg else : model_config = load_model(arg,'CNN') conv_config,conv_layer_config,mlp_config = load_conv_spec( model_config['nnet_spec'], model_config['batch_size'], model_config['input_shape']) #__debugPrintData__(conv_layer_config,'covolution config') data_spec = load_data_spec(model_config['data_spec'],model_config['batch_size']); numpy_rng = numpy.random.RandomState(model_config['random_seed']) theano_rng = RandomStreams(numpy_rng.randint(2 ** 30)) logger.info('> ... building the model') hidden_activation = parse_activation(mlp_config['activation']); createDir(model_config['wdir']); #create working dir batch_size = model_config['batch_size']; cnn = CNN3D(numpy_rng,theano_rng,conv_layer_configs = conv_layer_config, batch_size = batch_size, n_outs=model_config['n_outs'],hidden_layer_configs=mlp_config,hidden_activation = hidden_activation, l1_reg = mlp_config['l1_reg'],l2_reg = mlp_config['l1_reg'],max_col_norm = mlp_config['max_col_norm']) ######################## # Loading THE MODEL # ######################## try: # pretraining ptr_file = model_config['input_file'] pretrained_layers = mlp_config['pretrained_layers'] logger.info("Loading the pretrained network..") cnn.load(filename=ptr_file,max_layer_num = pretrained_layers, withfinal=True) except KeyError, e: logger.warning("Pretrained network missing in working directory, skipping model loading")
def __init__(self,model_config): super(CNNPredictor, self).__init__(model_config,'CNN'); conv_config,conv_layer_config,mlp_config = load_conv_spec(self.model_config['nnet_spec'], self.batch_size, self.model_config['input_shape']) activationFn = parse_activation(mlp_config['activation']); if mlp_config['do_dropout'] or conv_config['do_dropout']: self.model = DropoutCNN(self.numpy_rng,self.theano_rng,conv_layer_configs = conv_layer_config, batch_size = self.batch_size, n_outs=self.model_config['n_outs'], hidden_layer_configs=mlp_config, hidden_activation = activationFn, use_fast = conv_config['use_fast'],l1_reg = mlp_config['l1_reg'], l2_reg = mlp_config['l1_reg'],max_col_norm = mlp_config['max_col_norm'], input_dropout_factor=conv_config['input_dropout_factor']) else: self.model = CNN(self.numpy_rng,self.theano_rng,conv_layer_configs = conv_layer_config, batch_size = batch_size, n_outs=self.model_config['n_outs'], hidden_layer_configs=mlp_config, hidden_activation = activationFn, use_fast = conv_config['use_fast'],l1_reg = mlp_config['l1_reg'], l2_reg = mlp_config['l1_reg'],max_col_norm = mlp_config['max_col_norm']) self.__load_model__(self.model_config['input_file'],mlp_config['pretrained_layers']);
def runCNN(arg): if type(arg) is dict: model_config = arg else: model_config = load_model(arg, 'CNN') conv_config, conv_layer_config, mlp_config = load_conv_spec( model_config['nnet_spec'], model_config['batch_size'], model_config['input_shape']) data_spec = load_data_spec(model_config['data_spec'], model_config['batch_size']) numpy_rng = numpy.random.RandomState(89677) theano_rng = RandomStreams(numpy_rng.randint(2**30)) logger.info('> ... building the model') activationFn = parse_activation(mlp_config['activation']) createDir(model_config['wdir']) #create working dir batch_size = model_config['batch_size'] if mlp_config['do_dropout'] or conv_config['do_dropout']: logger.info('>Initializing dropout cnn model') cnn = DropoutCNN( numpy_rng, theano_rng, conv_layer_configs=conv_layer_config, batch_size=batch_size, n_outs=model_config['n_outs'], hidden_layer_configs=mlp_config, hidden_activation=activationFn, use_fast=conv_config['use_fast'], l1_reg=mlp_config['l1_reg'], l2_reg=mlp_config['l1_reg'], max_col_norm=mlp_config['max_col_norm'], input_dropout_factor=conv_config['input_dropout_factor']) else: cnn = CNN(numpy_rng, theano_rng, conv_layer_configs=conv_layer_config, batch_size=batch_size, n_outs=model_config['n_outs'], hidden_layer_configs=mlp_config, hidden_activation=activationFn, use_fast=conv_config['use_fast'], l1_reg=mlp_config['l1_reg'], l2_reg=mlp_config['l1_reg'], max_col_norm=mlp_config['max_col_norm']) ######################## # Loading THE MODEL # ######################## try: # pretraining ptr_file = model_config['input_file'] pretrained_layers = mlp_config['pretrained_layers'] logger.info("Loading the pretrained network..") cnn.load(filename=ptr_file, max_layer_num=pretrained_layers, withfinal=True) except KeyError, e: logger.warning( "Pretrained network missing in working directory, skipping model loading" )