Example #1
0
    def _convtrans_norm_activation(
            self,
            x,
            filters,
            kernel_size,
            strides=1,
            padding='same',
            kernel_initializer=tf_utils.xavier_initializer(),
            normalization=None,
            activation=None,
            trainable=True,
            name=None,
            reuse=None):
        x = self._convtrans(x,
                            filters,
                            kernel_size,
                            strides=strides,
                            padding=padding,
                            kernel_initializer=kernel_initializer,
                            trainable=trainable,
                            name=name,
                            reuse=reuse)
        x = tf_utils.norm_activation(x,
                                     normalization=normalization,
                                     activation=activation,
                                     training=getattr(self, 'is_training',
                                                      False))

        return x
Example #2
0
    def _conv_norm_activation(self,
                              x,
                              filters,
                              kernel_size,
                              strides=1,
                              padding='same',
                              kernel_initializer=tf_utils.xavier_initializer(),
                              normalization=None,
                              activation=None,
                              name=None,
                              reuse=None):
        x = self._conv(x,
                       filters,
                       kernel_size,
                       strides=strides,
                       padding=padding,
                       kernel_initializer=kernel_initializer,
                       name=name,
                       reuse=reuse)
        x = tf_utils.norm_activation(x,
                                     normalization=normalization,
                                     activation=activation,
                                     training=self.training,
                                     trainable=self.trainable)

        return x