Example #1
0
def create_conv(in_channels, out_channels, kernel_size, weight_init, bias_init):
    weight_init = get_conv_init_value((kernel_size, kernel_size, in_channels, out_channels), weight_init)
    conv = tf.keras.layers.Conv2D(out_channels,
                                  kernel_size,
                                  kernel_initializer=Constant(weight_init),
                                  bias_initializer=Constant(bias_init))
    return conv
Example #2
0
def fully_connected(inp,
                    n_out,
                    activation=None,
                    scope="fully_connected",
                    weight_initializer=UniformUnitScaling(),
                    init_bias=0.0,
                    use_bias=True,
                    reuse=False):
    with tf.variable_scope(scope, reuse=reuse):
        inp_size = int(inp.get_shape()[1])
        shape = [inp_size, n_out]
        weight = tf.get_variable("W", shape, initializer=weight_initializer)
        out = tf.matmul(inp, weight)

        if use_bias:
            bias = tf.get_variable("b", [n_out],
                                   initializer=Constant(init_bias))
            out += bias

    if activation == 'relu':
        return tf.nn.relu(out)
    if activation == 'softmax':
        return tf.nn.softmax(out)
    if activation == 'tanh':
        return tf.tanh(out)
    return out
    def build(self, input_shape):
        filters = self.filters
        channels = self.channels
        for i in range(self.K + 1):

            # init = log(...)
            init = np.log(np.expm1(1.0 / self.scale / filters[i + 1]))
            self.matrices.append(
                self.add_variable(name="matrix" + str(i),
                                  shape=[channels, filters[i + 1], filters[i]],
                                  initializer=Constant(init)))

            # initializer = ... zero
            self.factors.append(
                self.add_variable(name="factor" + str(i),
                                  shape=[channels, filters[i + 1], 1],
                                  initializer=Zeros()))

            # RandomUniform (-0.5,0.5)
            self.bias.append(
                self.add_variable(name="bias" + str(i),
                                  shape=[channels, filters[i + 1], 1],
                                  initializer=RandomUniform(minval=-0.5,
                                                            maxval=0.5)))
            '''
Example #4
0
def create_conv(in_channels,
                out_channels,
                kernel_size,
                weight_init,
                bias_init,
                transpose=False):
    weight_init = get_conv_init_value(
        (kernel_size, kernel_size, in_channels, out_channels), weight_init)
    args = {
        'filters': out_channels,
        'kernel_size': kernel_size,
        'kernel_initializer': Constant(weight_init),
        'bias_initializer': Constant(bias_init)
    }
    if not transpose:
        conv_cls = tf.keras.layers.Conv2D
    else:
        conv_cls = tf.keras.layers.Conv2DTranspose
    return conv_cls(**args)
Example #5
0
 def add_fc(self, name, size, weight_initializer, init_bias):
     """Add fully-connected layer.
     """
     with tf.variable_scope(name):
         input_features = self.layers[-1].get_shape().as_list()[1]
         weights = tf.get_variable('W', [input_features, size],
                                   initializer=weight_initializer)
         self.define_weight_summaries(weights)
         bias = tf.get_variable('b', [size],
                                initializer=Constant(init_bias))
         fc = tf.matmul(self.layers[-1], weights) + bias
         self.layers.append(fc)
Example #6
0
 def build(self, input_shape):
     input_shape = tensor_shape.TensorShape(input_shape)
     if input_shape[-1].value is None:
         raise ValueError('The last dimension of the inputs to `Dense` '
                          'should be defined. Found `None`.')
     self.input_spec = base.InputSpec(min_ndim=2,
                                      axes={-1: input_shape[-1].value})
     kernel_shape = [input_shape[-1].value, self.units]
     kernel_quiet = self.add_variable('kernel_quiet',
                                      shape=kernel_shape,
                                      initializer=self.kernel_initializer,
                                      regularizer=self.kernel_regularizer,
                                      constraint=self.kernel_constraint,
                                      dtype=self.dtype,
                                      trainable=True)
     scale_init = Constant(value=(0.5 / np.sqrt(kernel_shape[0])))
     kernel_noise_scale = self.add_variable('kernel_noise_scale',
                                            shape=kernel_shape,
                                            initializer=scale_init,
                                            dtype=self.dtype,
                                            trainable=True)
     kernel_noise = self.make_kernel_noise(shape=kernel_shape)
     self.kernel = kernel_quiet + kernel_noise_scale * kernel_noise
     if self.use_bias:
         bias_shape = [
             self.units,
         ]
         bias_quiet = self.add_variable('bias_quiet',
                                        shape=bias_shape,
                                        initializer=self.bias_initializer,
                                        regularizer=self.bias_regularizer,
                                        constraint=self.bias_constraint,
                                        dtype=self.dtype,
                                        trainable=True)
         bias_noise_scale = self.add_variable(name='bias_noise_scale',
                                              shape=bias_shape,
                                              initializer=scale_init,
                                              dtype=self.dtype,
                                              trainable=True)
         bias_noise = self.make_bias_noise(shape=bias_shape)
         self.bias = bias_quiet + bias_noise_scale * bias_noise
     else:
         self.bias = None
     self.built = True