Ejemplo n.º 1
0
def fast_clauses_as_sequence_jagged(clauses, name=None):
  """Serialize FastClause protos into a jagged tensor of ids.

  Args:
    clauses: 1-D tensor or 2-D jagged tensor of FastClause protos.
    name: Optional name for this operation.

  Returns:
    Jagged tensor of id sequences.
  """
  sizes, flat = jagged.unjagged(clauses)
  sizes_flat, flat = gen_clause_ops.fast_clauses_as_sequence_jagged(
      flat, name=name)
  return jagged.jagged(jagged.jagged(sizes, sizes_flat), flat)
Ejemplo n.º 2
0
 def testConvEmpty(self):
   with self.test_session() as sess:
     sizes = tf.constant([], dtype=tf.int32)
     seqs = jagged.jagged(sizes, tf.zeros([0, 7]))
     filters = [tf.zeros([3, 7, 13])]
     activations = [tf.nn.relu]
     out = jagged.conv1d_stack(seqs, filters, activations)
     out_sizes, out_flat = sess.run([out.sizes, out.flat])
     self.assertAllEqual(out_sizes, [])
     self.assertAllEqual(out_flat, np.zeros([0, 13]))
Ejemplo n.º 3
0
def fast_model(conjectures, clauses, vocab, hparams, mode):
    """Classify conjectures and clauses.

  Args:
    conjectures: Negated conjectures as a Jagged of serialized FastClauses.
    clauses: Clauses as serialized FastClauses.
    vocab: Path to vocabulary file.
    hparams: Hyperparameters.
    mode: Either 'train' or 'eval'.  Unused.

  Returns:
    Logits.
  """
    _ = mode  # Mode is unused
    hidden_size = hparams.hidden_size
    conv_layers = hparams.conv_layers

    # Convert all FastClauses to sequences of ids
    conjectures = inputs.fast_clauses_as_sequence_jagged(conjectures)
    clauses = inputs.fast_clauses_as_sequence_jagged(clauses)

    # Embed ids
    vocab_size, _ = inputs.read_vocab(vocab)
    params = model_utils.embedding_weights(dim=hparams.embedding_size,
                                           size=vocab_size)
    conjectures = jagged.jagged(
        conjectures.sizes, tf.nn.embedding_lookup(params, conjectures.flat))
    clauses = jagged.jagged(clauses.sizes,
                            tf.nn.embedding_lookup(params, clauses.flat))

    def bias_relu(x, bias):
        return tf.nn.relu(x + bias)

    def embed_clauses(clauses, name):
        with tf.variable_scope(name):
            filters, activations = [], []
            dim = hparams.embedding_size
            for i in range(conv_layers):
                filters.append(
                    tf.get_variable('filter%d' % i,
                                    shape=(hparams.filter_width, dim,
                                           hidden_size),
                                    initializer=layers.xavier_initializer()))
                bias = tf.get_variable('bias%d' % i,
                                       shape=(hidden_size, ),
                                       initializer=tf.constant_initializer(0))
                activations.append(functools.partial(bias_relu, bias=bias))
                dim = hidden_size
            clauses = jagged.conv1d_stack(clauses, filters, activations)
            return jagged.reduce_max(clauses)

    # Embed conjectures
    conjectures = embed_clauses(conjectures, 'conjectures')
    for _ in range(hparams.mid_layers):
        conjectures = jagged.Jagged(conjectures.sizes,
                                    layers.relu(conjectures.flat, hidden_size))
    conjectures = jagged.reduce_max(conjectures, name='conjecture_embeddings')

    # Embed clauses
    clauses = embed_clauses(clauses, 'clauses')

    # Repeat each conjecture enough times to match clauses
    expansion = tf.size(clauses) // tf.maximum(1, tf.size(conjectures))
    conjectures = tf.reshape(tf.tile(conjectures[:, None], [1, expansion, 1]),
                             [-1, hidden_size])

    # Classify
    net = tf.concat((conjectures, clauses), 1)
    net = layers.relu(net, hparams.hidden_size)
    logits = tf.squeeze(layers.linear(net, 1), [-1])
    return logits