Пример #1
0
    def apply(self, is_train, x, c, mask=None, context_mask=None):
        x = dropout(x, self.keep_probs, is_train)
        c = dropout(c, self.context_keep_probs, is_train)
        init = get_keras_initialization(self.init)
        x_w = tf.get_variable("merge_x_weights", (x.shape.as_list()[-1], self.output_size), initializer=init)
        c_w = tf.get_variable("merge_context_weights", (c.shape.as_list()[-1], self.output_size), initializer=init)
        output = tf.tensordot(x, x_w, axes=[[2], [0]]) + tf.expand_dims(tf.matmul(c, c_w), 1)
        if self.use_dots:
            dots = tf.einsum("aij,aj->aij", x, c)
            dot_w = tf.get_variable("dot_weights", (c.shape.as_list()[-1], self.output_size), initializer=init)
            output += tf.tensordot(dots, dot_w, axes=[[2], [0]])

        bais = tf.get_variable("merge_bias", (1, 1, self.output_size))
        output += bais
        return get_keras_activation(self.activation)(output)
Пример #2
0
 def apply(self, is_train, x, mask=None):
     num_channels = x.get_shape()[3]
     filter_ = tf.get_variable("conv1d/filters", shape=[1, self.filter_size, num_channels, self.num_filters], dtype='float')
     bias = tf.get_variable("conv1d/bias", shape=[self.num_filters], dtype='float')
     strides = [1, 1, 1, 1]
     if self.keep_probs < 1.0:
         x = dropout(x, self.keep_probs, is_train)
     fn = get_keras_activation(self.activation)
     return fn(tf.nn.conv2d(x, filter_, strides, "VALID") + bias)
Пример #3
0
 def apply(self, is_train, x, mask=None):
     shape = tf.shape(x)
     noise_shape = []
     for i in range(len(shape)):
         if i == self.axis:
             noise_shape.append(1)
         else:
             noise_shape.append(shape[i])
     return dropout(x, self.keep_probs, is_train, noise_shape)
Пример #4
0
    def __call__(self, inputs, state, scope=None):
        with tf.variable_scope(self.scope):
            c, h = state
            h = dropout(h, self.keep_recurrent_probs, self.is_train)

            mat = _compute_gates(inputs, h, self.num_units, self.forget_bias,
                                        self.kernel_initializer, self.recurrent_initializer, True)

            i, j, f, o = tf.split(value=mat, num_or_size_splits=4, axis=1)

            new_c = (c * self.recurrent_activation(f) + self.recurrent_activation(i) *
                     self.activation(j))
            new_h = self.activation(new_c) * self.recurrent_activation(o)

            new_state = LSTMStateTuple(new_c, new_h)

        return new_h, new_state
Пример #5
0
 def apply(self, is_train, x, mask=None):
     shape = tf.shape(x)
     return dropout(x, self.keep_prob, is_train, [shape[0], 1, shape[2]])
Пример #6
0
 def apply(self, is_train, x, mask=None):
     return dropout(x, self.keep_prob, is_train)
Пример #7
0
 def apply(self, is_train, x, mask=None):
     shape = tf.shape(x)
     #if x.shape[1] == 4 :
         #pdb.set_trace()
         #return dropout(x, self.keep_prob, is_train, [1, 1, shape[2]])
     return dropout(x, self.keep_prob, is_train, [shape[0], 1, shape[2]])