Ejemplo n.º 1
0
def residual_block(x, hparams):
    """A stack of convolution blocks with residual connection."""
    k = (hparams.kernel_height, hparams.kernel_width)
    dilations_and_kernels = [((1, 1), k) for _ in xrange(3)]
    y = common_layers.subseparable_conv_block(x,
                                              hparams.hidden_size,
                                              dilations_and_kernels,
                                              padding="SAME",
                                              separability=0,
                                              name="residual_block")
    x = common_layers.layer_norm(x + y, hparams.hidden_size, name="lnorm")
    return tf.nn.dropout(x, 1.0 - hparams.dropout)
Ejemplo n.º 2
0
def get_norm(hparams):
    """Get the normalizer function."""
    if hparams.normalizer_fn == "layer":
        return lambda x, name: common_layers.layer_norm(  # pylint: disable=g-long-lambda
            x,
            hparams.hidden_size,
            name=name)
    if hparams.normalizer_fn == "batch":
        return tf.layers.batch_normalization
    if hparams.normalizer_fn == "noam":
        return common_layers.noam_norm
    if hparams.normalizer_fn == "none":
        return lambda x, name: x
    raise ValueError(
        "Parameter normalizer_fn must be one of: 'layer', 'batch',"
        "'noam', 'none'.")
Ejemplo n.º 3
0
def residual_dilated_conv(x, repeat, padding, name, hparams):
    """A stack of convolution blocks with residual connections."""
    with tf.variable_scope(name):
        k = (hparams.kernel_height, hparams.kernel_width)
        dilations_and_kernels = [((2**i, 1), k)
                                 for i in xrange(hparams.num_hidden_layers)]
        for i in xrange(repeat):
            with tf.variable_scope("repeat_%d" % i):
                y = common_layers.conv_block(x,
                                             hparams.hidden_size,
                                             dilations_and_kernels,
                                             padding=padding,
                                             name="residual_conv")
                x = common_layers.layer_norm(x + y,
                                             hparams.hidden_size,
                                             name="lnorm")
                x = tf.nn.dropout(x, hparams.dropout)
        return x
Ejemplo n.º 4
0
def residual_fn3(x, y, z, hparams):
  y = tf.nn.dropout(y, 1.0 - hparams.dropout)
  z = tf.nn.dropout(z, 1.0 - hparams.dropout)
  return common_layers.layer_norm(x + y + z)
Ejemplo n.º 5
0
 def residual_fn(x, y):
     return common_layers.layer_norm(
         x + tf.nn.dropout(y, 1.0 - hparams.residual_dropout))