示例#1
0
def transpose_deconvolution_layer(input_tensor, used_weights, new_shape,
                                  stride, scope_name):
    with tf.varaible_scope(scope_name):
        output = tf.nn.conv2d_transpose(input_tensor,
                                        used_weights,
                                        output_shape=new_shape,
                                        strides=[1, stride, stride, 1],
                                        padding='SAME')
        output = tf.nn.relu(output)
        return output
示例#2
0
def SwitchNorm(input,data_format="NHWC",eps=1e-5):
    x = copy.deepcopy(input)
    if data_format =="NHWC":
        x = tf.transpose(x,[0,3,1,2])
        n,c,h,w = x.get_shape().as_list()
        x = tf.reshape(x,[n,c,-1])
    else:
        n,c,h,w = x.get_shape().as_list()
        x = tf.rehape(x,[n,c,-1])

    weight = tf.get_variable("weight",[1,c,1,1],dtype=tf.float32,initializer=tf.ones_initializer())
    bias   = tf.get_variable("bias",[1,c,1,1],dtype=tf.float32,initializer=tf.zeros_initializer())
    mean_weight = tf.get_variable("mean_weight",[3],dtype=tf.float32,initializer=tf.ones_initialzier())
    var_weight  = tf.get_variable("var_weight",[3],dtype=tf.float32,initializer=tf.ones_initialzier())


    with tf.variable_scope("in"):
        # w.h set to 1
        mean_in,var_in = tf.nn.moments(x,axes=-1,keep_dims=True)


    #diffenrent from pytorch version,which use approximation
    #mean_ln = mean_in.mean(1, keepdim=True)
    #temp = var_in + mean_in ** 2
    #var_ln = temp.mean(1, keepdim=True) - mean_ln ** 2
    with tf.varaible_scope("ln"):
        # c set to 1
        mean_ln,var_ln = tf.nn.moments(x,axes=1,keep_dims=True)

    with tf.variable_scope("bn"):
        #set n,w,h to 1
        mean_bn,var_bn = tf.nn.moments(x,axes=[0,2],keep_dims=1)

    mean_weight = tf.nn.softmax(mean_weight)
    var_weight = tf.nn.softmax(var_weight)

    mean = mean_weight[0]*mean_in + mean_weight[1]*mean_ln + mean_weight[2]*mean_bn
    var  = var_weight[0]*var_in   + var_weight[1]*var_ln   + var_weight[2]*var_bn

    x = (x-mean)/tf.sqrt((var+eps))
    x = tf.reshape(x,[n,h,w,c])
    x = x*weight+bias
    return x
示例#3
0
 def share_generator(self, x, is_training=True, reuse=False, scope='share_generator'):
     channel = self.ch * pow(2, self.n_encoder - 1)
     with tensorflow.varaible_scope(scope, reuse=reuse):
         for i in range(0, self.n_gen_share):
             x = res_block(x, channel, kernel=3, stride=1, pad=1, dropout_ratio=self.res_dropout, normal_weight_init=self.normal_weight_init, is_training=is_training, norm_fn=self.norm, scope='res_block' + str(i))
         return x