Esempio n. 1
0
def classifier_model(input_shape, output_shape, num_filters):

	# create model
	layer1 = {'layer': 'input',
			'input_shape': input_shape
			}
	layer2 = {'layer': 'conv1d',
			'num_filters': num_filters,
			'filter_size': 11,
			'activation': 'sigmoid',
			'W': init.HeUniform(),
			'padding': 'SAME',
			'global_pool': 'max',
			}
	layer3 = {'layer': 'reshape',
			  'reshape': [-1, num_filters],
			}
	layer4 = {'layer': 'reduce_mean',
			}
	layer5 = {'layer': 'reshape',
			  'reshape': [-1, 1],
			}
	#from tfomics import build_network
	model_layers = [layer1, layer2, layer3, layer4, layer5]

	# optimization parameters
	optimization = {"objective": "binary",
				  "optimizer": "adam",
				  "learning_rate": 0.001,
				  #"momentum": 0.9,
				  #"l2": 1e-6,
				  }
	return model_layers, optimization
Esempio n. 2
0
	def conv2d_residual_block(self, model_layer, name):

		last_layer = self.last_layer
		filter_size = model_layer['filter_size']
		if 'function' in model_layer:
			activation = model_layer['function']
		else:
			activation = 'relu'

		# original residual unit
		shape = self.network[last_layer].get_output_shape()
		num_filters = shape[-1].value

		if not isinstance(filter_size, (list, tuple)):
			filter_size = (filter_size, filter_size)

		if 'W' not in model_layer.keys():
			W = init.HeUniform(**self.seed)
		else:
			W = model_layer['W']

		self.network[name+'_1resid'] = layers.Conv2DLayer(self.network[last_layer], num_filters=num_filters,
											  filter_size=filter_size,
											  W=W,
											  padding='SAME')
		self.network[name+'_1resid_norm'] = layers.BatchNormLayer(self.network[name+'_1resid'], self.placeholders['is_training'])
		self.network[name+'_1resid_active'] = layers.ActivationLayer(self.network[name+'_1resid_norm'], function=activation)


		if 'dropout_block' in model_layer:
			placeholder_name = 'keep_prob_'+str(self.num_dropout)
			self.placeholders[placeholder_name] = tf.placeholder(tf.float32, name=placeholder_name)
			self.feed_dict[placeholder_name] = 1-model_layer['dropout_block']
			self.num_dropout += 1
			self.network[name+'_dropout1'] = layers.DropoutLayer(self.network[name+'_1resid_active'], keep_prob=self.placeholders[placeholder_name])
			lastname = name+'_dropout1'
		else:
			lastname = name+'_1resid_active'

		self.network[name+'_2resid'] = layers.Conv2DLayer(self.network[lastname], num_filters=num_filters,
												  filter_size=filter_size,
												  W=W,
												  padding='SAME')
		self.network[name+'_2resid_norm'] = layers.BatchNormLayer(self.network[name+'_2resid'], self.placeholders['is_training'])
		self.network[name+'_resid_sum'] = layers.ElementwiseSumLayer([self.network[last_layer], self.network[name+'_2resid_norm']])
		self.network[name+'_resid'] = layers.ActivationLayer(self.network[name+'_resid_sum'], function=activation)
		self.last_layer = name+'_resid'
Esempio n. 3
0
	def dense_residual_block(self, model_layer, name):

		last_layer = self.last_layer

		if 'function' in model_layer:
			activation = model_layer['function']
		else:
			activation = 'relu'

		# original residual unit
		shape = self.network[last_layer].get_output_shape()
		num_units = shape[-1].value

		if 'W' not in model_layer.keys():
			W = init.HeUniform(**self.seed)
		else:
			W = model_layer['W']

		self.network[name+'_1resid'] = layers.DenseLayer(self.network[last_layer],
														num_units=num_units,
														W=W,
														b=None,
														**self.seed)
		self.network[name+'_1resid_norm'] = layers.BatchNormLayer(self.network[name+'_1resid'], self.placeholders['is_training'])
		self.network[name+'_1resid_active'] = layers.ActivationLayer(self.network[name+'_1resid_norm'], function=activation)

		if 'dropout_block' in model_layer:
			placeholder_name = 'keep_prob_'+str(self.num_dropout)
			self.placeholders[placeholder_name] = tf.placeholder(tf.float32, name=placeholder_name)
			self.feed_dict[placeholder_name] = 1-model_layer['dropout_block']
			self.num_dropout += 1
			self.network[name+'_dropout1'] = layers.DropoutLayer(self.network[name+'_1resid_active'], keep_prob=self.placeholders[placeholder_name])
			lastname = name+'_dropout1'
		else:
			lastname = name+'_1resid_active'


		self.network[name+'_2resid'] = layers.DenseLayer(self.network[lastname],
														num_units=num_units,
														W=W,
														b=None,
														**self.seed)
		self.network[name+'_2resid_norm'] = layers.BatchNormLayer(self.network[name+'_2resid'], self.placeholders['is_training'])
		self.network[name+'_resid_sum'] = layers.ElementwiseSumLayer([self.network[last_layer], self.network[name+'_2resid_norm']])
		self.network[name+'_resid'] = layers.ActivationLayer(self.network[name+'_resid_sum'], function=activation)
		self.last_layer = name+'_resid'