def linear(input_, output_size, name="Linear", stddev=0.01, scale=1.0, with_learnable_sn_scale=False, with_sn=False, bias_start=0.0, with_w=False, update_collection=None, with_singular_values=False): shape = input_.get_shape().as_list() with tf.variable_scope(name): scope_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, tf.get_variable_scope().name) has_summary = any([('Matrix' in v.op.name) for v in scope_vars]) matrix = tf.get_variable("Matrix", [shape[1], output_size], tf.float32, tf.random_normal_initializer(stddev=stddev)) mul = tf.matmul(input_, matrix) bias = tf.get_variable("bias", [output_size], initializer=tf.constant_initializer(bias_start)) if not has_summary: variable_summaries({'b': bias}) variable_summaries({'W': matrix}, with_singular_values=with_singular_values) if with_w: return mul + bias, matrix, bias else: return mul + bias
def linear_one_hot(input_, output_size, num_classes, name="Linear_one_hot", stddev=0.01, scale=1.0, with_learnable_sn_scale=False, with_sn=False, bias_start=0.0, with_w=False, update_collection=None, with_singular_values=False): with tf.variable_scope(name): scope_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, tf.get_variable_scope().name) has_summary = any([('Matrix' in v.op.name) for v in scope_vars]) matrix = tf.get_variable("Matrix", [num_classes, output_size], tf.float32, tf.random_normal_initializer(stddev=stddev)) embed = tf.nn.embedding_lookup(matrix, input_) if not has_summary: variable_summaries({'W': matrix}, with_singular_values=with_singular_values) if with_w: return embed, matrix else: return embed
def __init__(self, epsilon=1e-5, momentum=0.9, name="batch_norm", format='NCHW'): with tf.variable_scope(name): self.epsilon = epsilon self.momentum = momentum self.name = name if format == 'NCHW': self.axis = 1 elif format == 'NHWC': self.axis = 3
def deconv2d(input_, output_shape, k_h=5, k_w=5, d_h=2, d_w=2, stddev=0.02, scale=1.0, with_learnable_sn_scale=False, with_sn=False, name="deconv2d", with_w=False, update_collection=None, data_format='NCHW', with_singular_values=False): with tf.variable_scope(name): scope_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, tf.get_variable_scope().name) has_summary = any([('w' in v.op.name) for v in scope_vars]) out_channel, in_channel = get_in_out_shape( output_shape, input_.get_shape().as_list(), data_format) strides = get_strides(d_h, d_w, data_format) # filter : [height, width, output_channels, in_channels] w = tf.get_variable( 'w', [k_h, k_w, out_channel, in_channel], initializer=tf.random_normal_initializer(stddev=stddev)) deconv = tf.nn.conv2d_transpose(input_, w, output_shape=output_shape, strides=strides, data_format=data_format) biases = tf.get_variable('biases', [out_channel], initializer=tf.constant_initializer(0.0)) deconv = tf.reshape( tf.nn.bias_add(deconv, biases, data_format=data_format), deconv.get_shape()) if not has_summary: variable_summaries({'b': biases}) variable_summaries({'W': w}, with_singular_values=with_singular_values) if with_w: return deconv, w, biases else: return deconv
def conv2d(input_, output_dim, k_h=5, k_w=5, d_h=2, d_w=2, stddev=0.02, scale=1.0, with_learnable_sn_scale=False, with_sn=False, name="snconv2d", update_collection=None, data_format='NCHW', with_singular_values=False): with tf.variable_scope(name): scope_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, tf.get_variable_scope().name) has_summary = any([('w' in v.op.name) for v in scope_vars]) out_channel, in_channel = get_in_out_shape( [output_dim], input_.get_shape().as_list(), data_format) strides = get_strides(d_h, d_w, data_format) w = tf.get_variable( 'w', [k_h, k_w, in_channel, out_channel], initializer=tf.truncated_normal_initializer(stddev=stddev)) conv = tf.nn.conv2d(input_, w, strides=strides, padding='SAME', data_format=data_format) biases = tf.get_variable('biases', [output_dim], initializer=tf.constant_initializer(0.0)) conv = tf.reshape( tf.nn.bias_add(conv, biases, data_format=data_format), conv.get_shape()) if not has_summary: variable_summaries({'b': biases}) variable_summaries({'W': w}, with_singular_values=with_singular_values) return conv