Example #1
0
    def _decoder_training(self, init_state):
        lin_proj = Dense(args.vocab_size, _scope='decoder/dense')

        helper = tf.contrib.seq2seq.TrainingHelper(
            inputs=tf.nn.embedding_lookup(self.tied_embedding,
                                          self._decoder_input()),
            sequence_length=self.seq_length + 1)
        decoder = BasicDecoder(cell=tf.nn.rnn_cell.MultiRNNCell(
            [self._rnn_cell() for _ in range(args.decoder_layers)]),
                               helper=helper,
                               initial_state=init_state,
                               z=self.z,
                               output_layer=None)
        decoder_output, _, _ = tf.contrib.seq2seq.dynamic_decode(
            decoder=decoder,
            impute_finished=True,
            maximum_iterations=tf.reduce_max(self.seq_length + 1))

        return decoder_output.rnn_output, lin_proj.apply(
            decoder_output.rnn_output)
if COHERENCE_REGULARIZATION == "Convolution":
    z, coherence_loss = conv2d_co(z, 5, 5, 16)
    z = max_pool(z, 2, 2)
    z = tf.reshape(z, [-1, z.shape[1] * z.shape[2] * z.shape[3]])
    z = tf.nn.dropout(z, keep_prob=dropout)

else:
    z = conv2d(z, 5, 5, 16)
    z = max_pool(z, 2, 2)
    z = tf.reshape(z, [-1, z.shape[1] * z.shape[2] * z.shape[3]])

layer = Dense(800,
              activation=relu,
              kernel_initializer=xavier_initializer(uniform=False),
              name="Dense")
z = layer.apply(z)
layer.kernel = tf.nn.l2_normalize(layer.kernel, axis=0)
eye = tf.eye(800)

if COHERENCE_REGULARIZATION == "FC":
    coherence_loss = coherence_coef * tf.reduce_max(
        tf.abs(
            tf.abs(tf.matmul(layer.kernel, layer.kernel, transpose_a=True)) -
            corrMat))
else:
    coherence_loss = 0
coherence = tf.reduce_max(
    tf.abs(
        tf.abs(tf.matmul(layer.kernel, layer.kernel, transpose_a=True)) - eye))
z = tf.nn.dropout(z, keep_prob=dropout)
pred_logits = tf.contrib.layers.fully_connected(