Esempio n. 1
0
  def testCrfLogLikelihood(self):
    inputs = np.array(
        [[4, 5, -3], [3, -1, 3], [-1, 2, 1], [0, 0, 0]], dtype=np.float32)
    transition_params = np.array(
        [[-3, 5, -2], [3, 4, 1], [1, 2, 1]], dtype=np.float32)
    sequence_lengths = np.array(3, dtype=np.int32)
    num_words = inputs.shape[0]
    num_tags = inputs.shape[1]
    with self.test_session() as sess:
      all_sequence_log_likelihoods = []

      # Make sure all probabilities sum to 1.
      for tag_indices in itertools.product(
          range(num_tags), repeat=sequence_lengths):
        tag_indices = list(tag_indices)
        tag_indices.extend([0] * (num_words - sequence_lengths))
        sequence_log_likelihood, _ = crf.crf_log_likelihood(
            inputs=array_ops.expand_dims(inputs, 0),
            tag_indices=array_ops.expand_dims(tag_indices, 0),
            sequence_lengths=array_ops.expand_dims(sequence_lengths, 0),
            transition_params=constant_op.constant(transition_params))
        all_sequence_log_likelihoods.append(sequence_log_likelihood)
      total_log_likelihood = math_ops.reduce_logsumexp(
          all_sequence_log_likelihoods)
      tf_total_log_likelihood = sess.run(total_log_likelihood)
      self.assertAllClose(tf_total_log_likelihood, 0.0)
Esempio n. 2
0
    def testCrfLogLikelihood(self):
        inputs = np.array([[4, 5, -3], [3, -1, 3], [-1, 2, 1], [0, 0, 0]],
                          dtype=np.float32)
        transition_params = np.array([[-3, 5, -2], [3, 4, 1], [1, 2, 1]],
                                     dtype=np.float32)
        sequence_lengths = np.array(3, dtype=np.int32)
        num_words = inputs.shape[0]
        num_tags = inputs.shape[1]
        with self.test_session() as sess:
            all_sequence_log_likelihoods = []

            # Make sure all probabilities sum to 1.
            for tag_indices in itertools.product(range(num_tags),
                                                 repeat=sequence_lengths):
                tag_indices = list(tag_indices)
                tag_indices.extend([0] * (num_words - sequence_lengths))
                sequence_log_likelihood, _ = crf.crf_log_likelihood(
                    inputs=array_ops.expand_dims(inputs, 0),
                    tag_indices=array_ops.expand_dims(tag_indices, 0),
                    sequence_lengths=array_ops.expand_dims(
                        sequence_lengths, 0),
                    transition_params=constant_op.constant(transition_params))
                all_sequence_log_likelihoods.append(sequence_log_likelihood)
            total_log_likelihood = math_ops.reduce_logsumexp(
                all_sequence_log_likelihoods)
            tf_total_log_likelihood = sess.run(total_log_likelihood)
            self.assertAllClose(tf_total_log_likelihood, 0.0)
Esempio n. 3
0
  def __call__(self, seqs, lengths, states):
    '''
      states: bs x L x d
    '''
    batch_size, L, d = tf.shape(states)[0], tf.shape(states)[1], tf.shape(states)[2]
    unary = tf.matmul(tf.reshape(states, [-1, d]), self.softmax_w, transpose_b=True) + self.softmax_b

    unary = tf.reshape(unary, [batch_size, L, self.vocab_size])
    unary.set_shape([None, self.max_sequence_length, self.vocab_size]) # TF akwardness
 
    
    ll, _ = crf_log_likelihood(unary, seqs, lengths, self.pairwise)

    return ll
    test_sequence_lengths_t = tf.constant(test_word_lengths, dtype=tf.int32, name='test_sequence_lengths')

    w_t = tf.get_variable('W', shape=(num_features, num_tags), dtype=tf.float32,
                          regularizer=None, initializer=tf.initializers.zeros())

    transition_weights_t = tf.get_variable('T', shape=(num_tags, num_tags), dtype=tf.float32,
                                           regularizer=None, initializer=tf.initializers.zeros())

    x_t_features = tf.reshape(x_t, [-1, num_features], name='X_flattened')

    scores = tf.matmul(x_t_features, w_t, name='energies')
    scores = tf.reshape(scores, [num_train_examples, num_train_words, num_tags])

    # Compute the log-likelihood of the gold sequences and keep the transition
    # params for inference at test time.
    log_likelihood, transition_weights_t = crf_log_likelihood(scores, y_t, train_sequence_lengths_t, transition_weights_t)

    x_train_t_features = tf.reshape(x_t, [-1, num_features], name='X_train_flattened')
    x_test_t_features = tf.reshape(x_test_t, [-1, num_features], name='X_test_flattened')

    test_scores = tf.matmul(x_test_t_features, w_t, name='test_energies')
    test_scores = tf.reshape(test_scores, [num_test_examples, num_test_words, num_tags])

    # Compute the viterbi sequence and score.
    viterbi_sequence_train, viterbi_train_scores = crf_decode(scores, transition_weights_t, train_sequence_lengths_t)
    viterbi_sequence, viterbi_score = crf_decode(test_scores, transition_weights_t, test_sequence_lengths_t)

    # Add a training op to tune the parameters.
    loss = -C * tf.reduce_mean(log_likelihood)
    loss += 1e-2 * tf.nn.l2_loss(w_t)
    #loss += 1e-2 * 0.5 * tf.reduce_sum(tf.square(transition_weights_t))