Example #1
0
 def compute_mask(self, inputs, mask=None):
   if mask is None:
     return None
   if not isinstance(mask, list):
     raise ValueError('`mask` should be a list.')
   if not isinstance(inputs, list):
     raise ValueError('`inputs` should be a list.')
   if len(mask) != len(inputs):
     raise ValueError('The lists `inputs` and `mask` '
                      'should have the same length.')
   if all([m is None for m in mask]):
     return None
   # Make a list of masks while making sure
   # the dimensionality of each mask
   # is the same as the corresponding input.
   masks = []
   for input_i, mask_i in zip(inputs, mask):
     if mask_i is None:
       # Input is unmasked. Append all 1s to masks,
       masks.append(array_ops.ones_like(input_i, dtype='bool'))
     elif K.ndim(mask_i) < K.ndim(input_i):
       # Mask is smaller than the input, expand it
       masks.append(array_ops.expand_dims(mask_i, axis=-1))
     else:
       masks.append(mask_i)
   concatenated = K.concatenate(masks, axis=self.axis)
   return K.all(concatenated, axis=-1, keepdims=False)
Example #2
0
 def compute_mask(self, inputs, mask=None):
     if mask is None:
         return None
     if not isinstance(mask, (tuple, list)):
         raise ValueError('`mask` should be a list.')
     if not isinstance(inputs, (tuple, list)):
         raise ValueError('`inputs` should be a list.')
     if len(mask) != len(inputs):
         raise ValueError('The lists `inputs` and `mask` '
                          'should have the same length.')
     if all(m is None for m in mask):
         return None
     # Make a list of masks while making sure
     # the dimensionality of each mask
     # is the same as the corresponding input.
     masks = []
     for input_i, mask_i in zip(inputs, mask):
         if mask_i is None:
             # Input is unmasked. Append all 1s to masks,
             masks.append(array_ops.ones_like(input_i, dtype='bool'))
         elif backend.ndim(mask_i) < backend.ndim(input_i):
             # Mask is smaller than the input, expand it
             masks.append(array_ops.expand_dims(mask_i, axis=-1))
         else:
             masks.append(mask_i)
     concatenated = backend.concatenate(masks, axis=self.axis)
     return backend.all(concatenated, axis=-1, keepdims=False)
Example #3
0
 def compute_mask(self, inputs, mask=None):
     if isinstance(inputs, list):
         memory = inputs[0]
     else:
         memory = inputs
     if len(K.int_shape(memory)) > 3 and mask is not None:
         return K.all(mask, axis=-1)
     else:
         return None
Example #4
0
 def compute_mask(self, inputs, mask=None):
   if mask is None:
     return None
   if not isinstance(mask, list):
     raise ValueError('`mask` should be a list.')
   if not isinstance(inputs, list):
     raise ValueError('`inputs` should be a list.')
   if len(mask) != len(inputs):
     raise ValueError('The lists `inputs` and `mask` '
                      'should have the same length.')
   if all(m is None for m in mask):
     return None
   masks = [array_ops.expand_dims(m, axis=0) for m in mask if m is not None]
   return K.all(K.concatenate(masks, axis=0), axis=0, keepdims=False)
Example #5
0
def tp_score(y_true, y_pred, threshold=0.1):

    tp_3d = K.concatenate([
        K.cast(K.expand_dims(K.flatten(y_true)), 'bool'),
        K.cast(
            K.expand_dims(K.flatten(K.greater(y_pred, K.constant(threshold)))),
            'bool'),
        K.cast(K.ones_like(K.expand_dims(K.flatten(y_pred))), 'bool')
    ],
                          axis=1)

    tp = K.sum(K.cast(K.all(tp_3d, axis=1), 'int32'))

    return tp
Example #6
0
 def compute_mask(self, inputs, mask=None):
   if mask is None:
     return None
   if not isinstance(mask, list):
     raise ValueError('`mask` should be a list.')
   if not isinstance(inputs, list):
     raise ValueError('`inputs` should be a list.')
   if len(mask) != len(inputs):
     raise ValueError('The lists `inputs` and `mask` '
                      'should have the same length.')
   if all([m is None for m in mask]):
     return None
   masks = [array_ops.expand_dims(m, axis=0) for m in mask if m is not None]
   return K.all(K.concatenate(masks, axis=0), axis=0, keepdims=False)
Example #7
0
def fn_score(y_true, y_pred, threshold=0.1):

    fn_3d = K.concatenate([
        K.cast(K.expand_dims(K.flatten(y_true)), 'bool'),
        K.cast(
            K.expand_dims(
                K.flatten(
                    K.abs(
                        K.cast(K.greater(y_pred, K.constant(threshold)),
                               'float') - K.ones_like(y_pred)))), 'bool'),
        K.cast(K.ones_like(K.expand_dims(K.flatten(y_pred))), 'bool')
    ],
                          axis=1)

    fn = K.sum(K.cast(K.all(fn_3d, axis=1), 'int32'))

    return fn
    def update_state(self, y_true, y_pred, sample_weight=None):
        """

        :param y_true: Tensor shape: (B, T)
        :param y_pred: Tensor shape (B, T)
        :param sample_weight: None or Tensor shape (B, T)
        :return:
        """
        judge = K.equal(y_pred, y_true)  # shape: (B, T)
        value = K.all(judge, axis=1, keepdims=False)  # shape: (B, )

        # # # DEBUG: output training value
        # print_op = tf.print(value)
        # with tf.control_dependencies([print_op]):
        #     value = tf.identity(value)

        super(SequenceCorrectness, self).update_state(value)