def _noisy(self, x, units, kernel_initializer=tf_utils.xavier_initializer(), name=None, reuse=None, sigma=.4): name = name if name is not None else 'noisy' with tf.variable_scope(name, reuse=reuse): y = self._dense(x, units, kernel_initializer=kernel_initializer, reuse=reuse) with tf.variable_scope('noisy', reuse=reuse): # params for the noisy layer features = x.shape.as_list()[-1] w_shape = [features, units] b_shape = [units] epsilon_w = tf.truncated_normal(w_shape, stddev=sigma, name='epsilon_w') epsilon_b = tf.truncated_normal(b_shape, stddev=sigma, name='epsilon_b') noisy_w = tf.get_variable('noisy_w', shape=w_shape, initializer=kernel_initializer, regularizer=self.l2_regularizer, trainable=self.trainable) noisy_b = tf.get_variable('noisy_b', shape=b_shape, initializer=tf.constant_initializer(sigma / np.sqrt(units)), trainable=self.trainable) # output of the noisy layer x = tf.matmul(x, noisy_w * epsilon_w) + noisy_b * epsilon_b x = x + y if self.trainable: return x else: return y
def _convtrans(self, x, filters, kernel_size, strides=1, padding='same', kernel_initializer=tf_utils.xavier_initializer(), name=None, reuse=None): return tf.layers.conv2d_transpose(x, filters, kernel_size, strides=strides, padding=padding, kernel_initializer=kernel_initializer, kernel_regularizer=self.l2_regularizer, trainable=self.trainable, name=name, reuse=reuse)
def _dense_norm_activation(self, x, units, kernel_initializer=tf_utils.xavier_initializer(), normalization=None, activation=None, name=None, reuse=None): x = self._dense(x, units, kernel_initializer=kernel_initializer, name=name, reuse=reuse) x = tf_utils.norm_activation(x, normalization=normalization, activation=activation, training=self.training, trainable=self.trainable) return x
def _convtrans_norm_activation( self, x, filters, kernel_size, strides=1, padding='same', kernel_initializer=tf_utils.xavier_initializer(), normalization=None, activation=None, trainable=True, name=None, reuse=None): x = self._convtrans(x, filters, kernel_size, strides=strides, padding=padding, kernel_initializer=kernel_initializer, trainable=trainable, name=name, reuse=reuse) x = tf_utils.norm_activation(x, normalization=normalization, activation=activation, training=getattr(self, 'is_training', False)) return x
def _conv_norm_activation(self, x, filters, kernel_size, strides=1, padding='same', kernel_initializer=tf_utils.xavier_initializer(), normalization=None, activation=None, name=None, reuse=None): x = self._conv(x, filters, kernel_size, strides=strides, padding=padding, kernel_initializer=kernel_initializer, name=name, reuse=reuse) x = tf_utils.norm_activation(x, normalization=normalization, activation=activation, training=self.training, trainable=self.trainable) return x
def _noisy_norm_activation(self, x, units, kernel_initializer=tf_utils.xavier_initializer(), normalization=None, activation=None, name=None, reuse=None, sigma=.4): x = self._noisy(x, units, kernel_initializer=kernel_initializer, name=name, reuse=reuse, sigma=sigma) x = tf_utils.norm_activation(x, normalization=normalization, activation=activation, training=self.training, trainable=self.trainable) return x
def _dense(self, x, units, kernel_initializer=tf_utils.xavier_initializer(), name=None, reuse=None): return tf.layers.dense(x, units, kernel_initializer=kernel_initializer, kernel_regularizer=self.l2_regularizer, trainable=self.trainable, name=name, reuse=reuse)