def conv2d_residual_block(network, lastlayer, model_layer):

    name = model_layer['name']
    filter_size = model_layer['filter_size']
    is_training = model_layer['batch_norm']
    if 'function' in model_layer:
        activation = model_layer['function']
    else:
        activation = 'relu'

    # original residual unit
    shape = network[lastlayer].get_output_shape()
    num_filters = shape[-1].value

    if not isinstance(filter_size, (list, tuple)):
        filter_size = (filter_size, 1)

    network[name + '_1resid'] = layers.Conv2DLayer(network[lastlayer],
                                                   num_filters=num_filters,
                                                   filter_size=filter_size,
                                                   padding='SAME')
    network[name + '_1resid_norm'] = layers.BatchNormLayer(
        network[name + '_1resid'], is_training)
    network[name + '_1resid_active'] = layers.ActivationLayer(
        network[name + '_1resid_norm'], function=activation)

    if 'dropout_block' in model_layer:
        network[name + '_dropout1'] = layers.DropoutLayer(
            network[name + '_1resid_active'],
            keep_prob=model_layer['dropout_block'])
        lastname = name + '_dropout1'
    else:
        lastname = name + '_1resid_active'

    network[name + '_2resid'] = layers.Conv2DLayer(network[lastname],
                                                   num_filters=num_filters,
                                                   filter_size=filter_size,
                                                   padding='SAME')
    network[name + '_2resid_norm'] = layers.BatchNormLayer(
        network[name + '_2resid'], is_training)
    network[name + '_resid_sum'] = layers.ElementwiseSumLayer(
        [network[lastlayer], network[name + '_2resid_norm']])
    network[name + '_resid'] = layers.ActivationLayer(network[name +
                                                              '_resid_sum'],
                                                      function=activation)
    return network
	def conv1d_residual_block(self, model_layer):

		lastlayer = self.lastlayer

		name = model_layer['name']
		filter_size = model_layer['filter_size']
		is_training = model_layer['batch_norm']
		if 'function' in model_layer:
			activation = model_layer['function']
		else:
			activation = 'relu'

		# original residual unit
		shape = self.network[lastlayer].get_output_shape()
		num_filters = shape[-1].value

		if not isinstance(filter_size, (list, tuple)):
			filter_size = (filter_size, 1)

		self.network[name+'_1resid'] = layers.Conv2DLayer(self.network[lastlayer], num_filters=num_filters, filter_size=filter_size, padding='SAME')
		self.network[name+'_1resid_norm'] = layers.BatchNormLayer(self.network[name+'_1resid'], is_training)
		self.network[name+'_1resid_active'] = layers.ActivationLayer(self.network[name+'_1resid_norm'], function=activation)

		if 'dropout_block' in model_layer:
			dropout = model_layer['dropout_block']
			placeholder_name = 'keep_prob'+str(len(self.num_dropout))
			exec(placeholder_name+" = tf.placeholder(tf.float32, name='"+placeholder_name+"')")
			#exec("self.placeholders["+placeholder_name+"] = " + placeholder_name)			
			exec("self.network["+name+"+'_dropout1'] = layers.DropoutLayer(self.network["+name+"+'_1resid_active'], keep_prob="+placeholder_name+")")				
			exec("self.hidden_feed_dict["+placeholder_name+"] ="+str(dropout))
			self.num_dropout += 1
			lastname = name+'_dropout1'
		else:
			lastname = name+'_1resid_active'

		self.network[name+'_2resid'] = layers.Conv2DLayer(self.network[lastname], num_filters=num_filters, filter_size=filter_size, padding='SAME')
		self.network[name+'_2resid_norm'] = layers.BatchNormLayer(self.network[name+'_2resid'], is_training)
		self.network[name+'_resid_sum'] = layers.ElementwiseSumLayer([self.network[lastlayer], self.network[name+'_2resid_norm']])
		self.network[name+'_resid'] = layers.ActivationLayer(self.network[name+'_resid_sum'], function=activation)

		self.lastlayer = name+'_resid'

		return network
def dense_residual_block(network, lastlayer, model_layer):

    name = model_layer['name']
    is_training = model_layer['batch_norm']
    if 'function' in model_layer:
        activation = model_layer['function']
    else:
        activation = 'relu'

    # original residual unit
    shape = network[lastlayer].get_output_shape()
    num_units = shape[-1].value

    network[name + '_1resid'] = layers.DenseLayer(network[lastlayer],
                                                  num_units=num_units,
                                                  b=None)
    network[name + '_1resid_norm'] = layers.BatchNormLayer(
        network[name + '_1resid'], is_training)
    network[name + '_1resid_active'] = layers.ActivationLayer(
        network[name + '_1resid_norm'], function=activation)

    if 'dropout_block' in model_layer:
        network[name + '_dropout1'] = layers.DropoutLayer(
            network[name + '_1resid_active'],
            keep_prob=model_layer['dropout_block'])
    else:
        lastname = name + '_1resid_active'

    network[name + '_2resid'] = layers.DenseLayer(network[lastname],
                                                  num_units=num_units,
                                                  b=None)
    network[name + '_2resid_norm'] = layers.BatchNormLayer(
        network[name + '_2resid'], is_training)
    network[name + '_resid_sum'] = layers.ElementwiseSumLayer(
        [network[lastlayer], network[name + '_2resid_norm']])
    network[name + '_resid'] = layers.ActivationLayer(network[name +
                                                              '_resid_sum'],
                                                      function=activation)
    return network
	def dense_residual_block(self, model_layer):

		lastlayer = self.lastlayer

		name = model_layer['name']
		is_training = model_layer['batch_norm']
		if 'function' in model_layer:
			activation = model_layer['function']
		else:
			activation = 'relu'

		# original residual unit
		shape = self.network[lastlayer].get_output_shape()
		num_units = shape[-1].value

		self.network[name+'_1resid'] = layers.DenseLayer(self.network[lastlayer], num_units=num_units, b=None)
		self.network[name+'_1resid_norm'] = layers.BatchNormLayer(self.network[name+'_1resid'], is_training)
		self.network[name+'_1resid_active'] = layers.ActivationLayer(self.network[name+'_1resid_norm'], function=activation)

		

		if 'dropout_block' in model_layer:
			dropout = model_layer['dropout_block']
			placeholder_name = 'keep_prob'+str(len(self.num_dropout))
			exec(placeholder_name+" = tf.placeholder(tf.float32, name='"+placeholder_name+"')")
			#exec("self.placeholders["+placeholder_name+"] = " + placeholder_name)			
			exec("self.network["+name+"+'_dropout1'] = layers.DropoutLayer(self.network["+name+"+'_1resid_active'], keep_prob="+placeholder_name+")")				
			exec("self.hidden_feed_dict["+placeholder_name+"] ="+str(dropout))
			lastname = name+'_dropout1'
			self.num_dropout += 1
		else:
			lastname = name+'_1resid_active'

		self.network[name+'_2resid'] = layers.DenseLayer(self.network[lastname], num_units=num_units, b=None)
		self.network[name+'_2resid_norm'] = layers.BatchNormLayer(self.network[name+'_2resid'], is_training)
		self.network[name+'_resid_sum'] = layers.ElementwiseSumLayer([self.network[lastlayer], self.network[name+'_2resid_norm']])
		self.network[name+'_resid'] = layers.ActivationLayer(self.network[name+'_resid_sum'], function=activation)
		return network