Пример #1
0
def residual_block(x, hparams):
    """A stack of convolution blocks with residual connection."""
    k = (hparams.kernel_height, hparams.kernel_width)
    dilations_and_kernels = [((1, 1), k) for _ in xrange(3)]
    y = common_layers.subseparable_conv_block(x,
                                              hparams.hidden_size,
                                              dilations_and_kernels,
                                              padding="SAME",
                                              separability=0,
                                              name="residual_block")
    x = common_layers.layer_norm(x + y, hparams.hidden_size, name="lnorm")
    return tf.nn.dropout(x, 1.0 - hparams.dropout)
Пример #2
0
def residual_conv(x, repeat, k, hparams, name, reuse=None):
    """A stack of convolution blocks with residual connections."""
    with tf.variable_scope(name, reuse=reuse):
        dilations_and_kernels = [((1, 1), k) for _ in xrange(3)]
        for i in xrange(repeat):
            with tf.variable_scope("repeat_%d" % i):
                y = common_layers.conv_block(common_layers.layer_norm(
                    x, hparams.hidden_size, name="lnorm"),
                                             hparams.hidden_size,
                                             dilations_and_kernels,
                                             padding="SAME",
                                             name="residual_conv")
                y = tf.nn.dropout(y, 1.0 - hparams.dropout)
                x += y
        return x
Пример #3
0
def residual_dilated_conv(x, repeat, padding, name, hparams):
    """A stack of convolution blocks with residual connections."""
    with tf.variable_scope(name):
        k = (hparams.kernel_height, hparams.kernel_width)
        dilations_and_kernels = [((2**i, 1), k)
                                 for i in xrange(hparams.num_hidden_layers)]
        for i in xrange(repeat):
            with tf.variable_scope("repeat_%d" % i):
                y = common_layers.conv_block(common_layers.layer_norm(
                    x, hparams.hidden_size, name="lnorm"),
                                             hparams.hidden_size,
                                             dilations_and_kernels,
                                             padding=padding,
                                             name="residual_conv")
                y = tf.nn.dropout(y, 1.0 - hparams.dropout)
                x += y
        return x
Пример #4
0
 def residual_fn(x, y):
     return common_layers.layer_norm(
         x + tf.nn.dropout(y, 1.0 - hparams.residual_dropout))
Пример #5
0
def layernorm_module(x, hparams):
    return common_layers.layer_norm(x, hparams.hidden_size, name="layer_norm")
Пример #6
0
def residual_fn3(x, y, z, hparams):
    y = tf.nn.dropout(y, 1.0 - hparams.dropout)
    z = tf.nn.dropout(z, 1.0 - hparams.dropout)
    return common_layers.layer_norm(x + y + z)