예제 #1
0
	def dense_residual_block(self, model_layer, name):
		with tf.name_scope('dense_residual_block') as scope:
			last_layer = self.last_layer

			if 'function' in model_layer:
				activation = model_layer['function']
			else:
				activation = 'relu'

			# original residual unit
			shape = self.network[last_layer].get_output_shape()
			num_units = shape[-1].value

			self.network[name+'_1resid'] = layers.DenseLayer(self.network[last_layer], num_units=num_units, b=None, **self.seed)
			self.network[name+'_1resid_norm'] = layers.BatchNormLayer(self.network[name+'_1resid'], self.placeholders['is_training'])
			self.network[name+'_1resid_active'] = layers.ActivationLayer(self.network[name+'_1resid_norm'], function=activation)

			if 'dropout_block' in model_layer:
				placeholder_name = 'keep_prob_'+str(self.num_dropout)
				self.placeholders[placeholder_name] = tf.placeholder(tf.float32, name=placeholder_name)
				self.feed_dict[placeholder_name] = 1-model_layer['dropout_block']
				self.num_dropout += 1
				self.network[name+'_dropout1'] = layers.DropoutLayer(self.network[name+'_1resid_active'], keep_prob=self.placeholders[placeholder_name])
				lastname = name+'_dropout1'
			else:
				lastname = name+'_1resid_active'

			self.network[name+'_2resid'] = layers.DenseLayer(self.network[lastname], num_units=num_units, b=None, **self.seed)
			self.network[name+'_2resid_norm'] = layers.BatchNormLayer(self.network[name+'_2resid'], self.placeholders['is_training'])
			self.network[name+'_resid_sum'] = layers.ElementwiseSumLayer([self.network[last_layer], self.network[name+'_2resid_norm']])
			self.network[name+'_resid'] = layers.ActivationLayer(self.network[name+'_resid_sum'], function=activation)
			self.last_layer = name+'_resid'
예제 #2
0
    def conv2d_residual_block(self, model_layer, name):

        last_layer = self.last_layer
        filter_size = model_layer['filter_size']
        if 'function' in model_layer:
            activation = model_layer['function']
        else:
            activation = 'relu'

        # original residual unit
        shape = self.network[last_layer].get_output_shape()
        num_filters = shape[-1].value

        if not isinstance(filter_size, (list, tuple)):
            filter_size = (filter_size, filter_size)

        if 'W' not in model_layer.keys():
            W = init.GlorotUniform(**self.seed)
        else:
            W = model_layer['W']

        self.network[name + '_1resid'] = layers.Conv2DLayer(
            self.network[last_layer],
            num_filters=num_filters,
            filter_size=filter_size,
            W=W,
            padding='SAME')
        self.network[name + '_1resid_norm'] = layers.BatchNormLayer(
            self.network[name + '_1resid'], self.placeholders['is_training'])
        self.network[name + '_1resid_active'] = layers.ActivationLayer(
            self.network[name + '_1resid_norm'], function=activation)

        if 'dropout_block' in model_layer:
            placeholder_name = 'keep_prob_' + str(self.num_dropout)
            self.placeholders[placeholder_name] = tf.placeholder(
                tf.float32, name=placeholder_name)
            self.feed_dict[placeholder_name] = 1 - model_layer['dropout_block']
            self.num_dropout += 1
            self.network[name + '_dropout1'] = layers.DropoutLayer(
                self.network[name + '_1resid_active'],
                keep_prob=self.placeholders[placeholder_name])
            lastname = name + '_dropout1'
        else:
            lastname = name + '_1resid_active'

        self.network[name + '_2resid'] = layers.Conv2DLayer(
            self.network[lastname],
            num_filters=num_filters,
            filter_size=filter_size,
            W=W,
            padding='SAME')
        self.network[name + '_2resid_norm'] = layers.BatchNormLayer(
            self.network[name + '_2resid'], self.placeholders['is_training'])
        self.network[name + '_resid_sum'] = layers.ElementwiseSumLayer(
            [self.network[last_layer], self.network[name + '_2resid_norm']])
        self.network[name + '_resid'] = layers.ActivationLayer(
            self.network[name + '_resid_sum'], function=activation)
        self.last_layer = name + '_resid'