def cell_wrapper():
        # forward and backward RNN-Cell
        if rnn_cell == "gru":
            cell = rnn.GRUCell(cell_sz,
                               initializer=tf.orthogonal_initializer,
                               use_peepholes=False,
                               cell_clip=None,
                               proj_clip=None)
        elif rnn_cell == "sru":
            cell = rnn.SRUCell(cell_sz,
                               initializer=tf.orthogonal_initializer,
                               use_peepholes=False,
                               cell_clip=None,
                               proj_clip=None)
        else:
            cell = rnn.LSTMCell(cell_sz,
                                initializer=tf.orthogonal_initializer,
                                use_peepholes=False,
                                cell_clip=None,
                                proj_clip=None)

        # dropout and highway
        if rnn_dropout is True:
            print("dropout")
            cell = rnn.DropoutWrapper(cell,
                                      input_keep_prob=input_keep_prob,
                                      output_keep_prob=output_keep_prob,
                                      state_keep_prob=state_keep_prob,
                                      variational_recurrent=vi_dropout,
                                      input_size=inputs.shape[-1],
                                      dtype=tf.float32)
        if highway is True:
            print("highway")
            cell = rnn.HighwayWrapper(cell,
                                      couple_carry_transform_gates=True,
                                      carry_bias_init=1.0)
        if self_attention is True:
            print("attention!!!")
            cell = AttentionWrapper(cell,
                                    memory=inputs,
                                    memory_len=seq_len,
                                    mechanism=attention_mechanism,
                                    attention_sz=attention_sz,
                                    dot_norm=dot_norm)

        # single forward and backward cells
        return cell
Exemple #2
0
def get_rnn_cell(hparams=None, mode=None):
    """Creates an RNN cell.

    See :func:`~texar.core.default_rnn_cell_hparams` for all
    hyperparameters and default values.

    Args:
        hparams (dict or HParams, optional): Cell hyperparameters. Missing
            hyperparameters are set to default values.
        mode (optional): A Tensor taking value in
            :tf_main:`tf.estimator.ModeKeys <estimator/ModeKeys>`, including
            `TRAIN`, `EVAL`, and `PREDICT`. If `None`, dropout will be
            controlled by :func:`texar.global_mode`.

    Returns:
        A cell instance.

    Raises:
        ValueError: If hparams["num_layers"]>1 and hparams["type"] is a class
            instance.
        ValueError: The cell is not an
            :tf_main:`RNNCell <contrib/rnn/RNNCell>` instance.
    """
    if hparams is None or isinstance(hparams, dict):
        hparams = HParams(hparams, default_rnn_cell_hparams())

    d_hp = hparams["dropout"]
    if d_hp["variational_recurrent"] and \
            len(d_hp["input_size"]) != hparams["num_layers"]:
        raise ValueError(
            "If variational_recurrent=True, input_size must be a list of "
            "num_layers(%d) integers. Got len(input_size)=%d." %
            (hparams["num_layers"], len(d_hp["input_size"])))

    cells = []
    cell_kwargs = hparams["kwargs"].todict()
    num_layers = hparams["num_layers"]
    for layer_i in range(num_layers):
        # Create the basic cell
        cell_type = hparams["type"]
        if not is_str(cell_type) and not isinstance(cell_type, type):
            if num_layers > 1:
                raise ValueError(
                    "If 'num_layers'>1, then 'type' must be a cell class or "
                    "its name/module path, rather than a cell instance.")
        cell_modules = ['tensorflow.contrib.rnn', 'texar.custom']
        cell = utils.check_or_get_instance(cell_type, cell_kwargs,
                                           cell_modules, rnn.RNNCell)

        # Optionally add dropout
        if d_hp["input_keep_prob"] < 1.0 or \
                d_hp["output_keep_prob"] < 1.0 or \
                d_hp["state_keep_prob"] < 1.0:
            vr_kwargs = {}
            if d_hp["variational_recurrent"]:
                vr_kwargs = {
                    "variational_recurrent": True,
                    "input_size": d_hp["input_size"][layer_i],
                    "dtype": tf.float32
                }
            input_keep_prob = switch_dropout(d_hp["input_keep_prob"], mode)
            output_keep_prob = switch_dropout(d_hp["output_keep_prob"], mode)
            state_keep_prob = switch_dropout(d_hp["state_keep_prob"], mode)
            cell = rnn.DropoutWrapper(cell=cell,
                                      input_keep_prob=input_keep_prob,
                                      output_keep_prob=output_keep_prob,
                                      state_keep_prob=state_keep_prob,
                                      **vr_kwargs)

        # Optionally add residual and highway connections
        if layer_i > 0:
            if hparams["residual"]:
                cell = rnn.ResidualWrapper(cell)
            if hparams["highway"]:
                cell = rnn.HighwayWrapper(cell)

        cells.append(cell)

    if hparams["num_layers"] > 1:
        cell = rnn.MultiRNNCell(cells)
    else:
        cell = cells[0]

    return cell