コード例 #1
0
ファイル: nql_test.py プロジェクト: dogydev/bert-nq-python3
 def setUp(self):
     super(TestOnDenseGrid, self).setUp()
     sparse_grid_context = nql_test_lib.make_grid()
     context = nql.NeuralQueryContext()
     # copy the grid but densify some of it
     context.declare_relation('n', 'place_t', 'place_t')
     context.declare_relation('s', 'place_t', 'place_t')
     context.declare_relation('e', 'place_t', 'place_t')
     context.declare_relation('w', 'place_t', 'place_t')
     context.declare_relation('color', 'place_t', 'color_t', dense=True)
     context.declare_relation('distance_to', 'place_t', 'corner_t')
     # copy the type definitions
     for type_name in sparse_grid_context.get_type_names():
         entity_list = [
             sparse_grid_context.get_entity_name(i, type_name)
             for i in range(sparse_grid_context.get_max_id(type_name))
         ]
         context.extend_type(type_name, entity_list)
     # copy the data over
     for r in sparse_grid_context.get_relation_names():
         m = sparse_grid_context.get_initial_value(r)
         if context.is_dense(r):
             context.set_initial_value(r, m.todense())
         else:
             context.set_initial_value(r, m)
     self.context = context
     self.session = tf.Session()
コード例 #2
0
def make_grid():
    """Create a grid, with relations for going n, s, e, w."""
    result = nql.NeuralQueryContext()
    result.declare_relation('n', 'place_t', 'place_t')
    result.declare_relation('s', 'place_t', 'place_t')
    result.declare_relation('e', 'place_t', 'place_t')
    result.declare_relation('w', 'place_t', 'place_t')
    result.declare_relation('color', 'place_t', 'color_t')
    result.declare_relation('distance_to', 'place_t', 'corner_t')

    kg_lines = []
    dij = {'n': (-1, 0), 's': (+1, 0), 'e': (0, +1), 'w': (0, -1)}
    for i in range(0, 4):
        for j in range(0, 4):
            cell_color = 'black' if (i % 2) == (j % 2) else 'white'
            kg_lines.append(
                '\t'.join(['color', cell(i, j), cell_color]) + '\n')
            kg_lines.append('\t'.join(
                ['distance_to', cell(i, j), 'ul',
                 str(i + j)]) + '\n')
            for direction, (di, dj) in dij.items():
                if (0 <= i + di < 4) and (0 <= j + dj < 4):
                    kg_lines.append('\t'.join([
                        direction, cell(i, j),
                        cell(i + di, j + dj)
                    ]) + '\n')
    result.load_kg(lines=kg_lines, freeze=True)
    return result
コード例 #3
0
  def build_context(self, params=None):
    """Create a new NeuralQueryContext and configure it.

    Args:
      params: optional parameters to be passed to config_context

    Returns:
      The newly configured context.
    """
    context = nql.NeuralQueryContext()
    self.config_context(context, params)
    return context
コード例 #4
0
ファイル: dataset_test.py プロジェクト: yuval6957/language
 def setUp(self):
     super(TestTFDataset, self).setUp()
     self.clean_examples = ['a|A', 'b|B', 'c|C,D']
     self.noisy_examples = ['a|A', 'b|Beta', 'c|C,noise', 'd|D']
     self.empty_examples = ['a|A,', 'b|Beta,', 'c|']
     self.noisy_examples_good_count = 4
     self.empty_examples_good_count = 3
     self.context = nql.NeuralQueryContext()
     self.context.extend_type('uc_t', ['A', 'B', 'C', 'D'])
     self.context.freeze('uc_t')
     with tf.Session() as session:
         s_const = tf.constant('hello world', dtype=tf.string)
         s_eval = session.run(s_const)
         self.tf_string_type = type(s_eval)
         self.tf_string_type = bytes
コード例 #5
0
ファイル: dataset_test.py プロジェクト: yuval6957/language
 def setUp(self):
     self.context = nql.NeuralQueryContext()
     self.context.declare_entity_type('uc_t',
                                      fixed_vocab=['A', 'B', 'C', 'D'],
                                      unknown_marker=None)
コード例 #6
0
ファイル: nql_test.py プロジェクト: dogydev/bert-nq-python3
 def setUp(self):
     super(TestLoad, self).setUp()
     self.context = nql.NeuralQueryContext()
     self.context.declare_relation('foo', 'foo_d', 'foo_r')
     self.context.declare_relation('bat', 'bat_d', 'bat_r')
コード例 #7
0
ファイル: nql_test.py プロジェクト: dogydev/bert-nq-python3
 def setUp(self):
     super(TestDeclaredTypes, self).setUp()
     self.context = nql.NeuralQueryContext()
コード例 #8
0
def load_kg(base_dir, stem):
    context = nql.NeuralQueryContext()
    names = {}

    tf.logging.info('loading closures so they can be skipped in the KB')
    # TODO(wcohen): this is a little sloppy since we ignore the relation.
    closures = {}

    def load_closures(filename):
        for line in tf.gfile.Open(base_dir + filename):
            parts = line.strip().split('\t')
            closures['i/' + parts[0]] = ['i/' + p for p in parts[1:]]

    load_closures('closed-locations.txt')
    load_closures('closed-categories.txt')

    tf.logging.info('loading categories so they can be skipped in the KB')
    all_category_ids = set()
    for line in tf.gfile.Open(base_dir + stem + '_cats.tsv'):
        cat_id, _, _ = line.strip().split('\t')
        all_category_ids.add('i/' + cat_id)

    rels = set()
    props = set()
    ents = set()
    kg_lines = []
    num_cats_skipped = 0

    tf.logging.info('reading kg')
    for line in tf.gfile.Open(base_dir + stem + '_kb.tsv'):
        rel, head, tail = line.strip().split('\t')
        if tail in all_category_ids:
            num_cats_skipped += 1
        else:
            if rel not in rels:
                context.declare_relation(rel, 'ent_t', 'prop_t')
                rels.add(rel)
            ents.add(head)
            # if tail is something like a location of a concept
            # with superconcepts, add all the containing locations
            # or superconcepts
            tails = closures.get(tail, [tail])
            for t in tails:
                props.add(t)
                kg_lines.append('\t'.join([rel, head, t]) + '\n')
    tf.logging.info('loaded %d kb lines skipped %d categories-related props' %
                    (len(kg_lines), num_cats_skipped))

    tf.logging.info('reading names')
    context.declare_relation('prop_name', 'prop_t', 'name_t')
    context.declare_relation('ent_name', 'ent_t', 'name_t')
    context.declare_relation('rel_name', 'rel_t', 'name_t')
    for line in tf.gfile.Open(base_dir + stem + '_names.tsv'):
        _, head, tail = line.strip().split('\t')
        names[head] = tail
        if head in props:
            kg_lines.append('\t'.join(['prop_name', head, tail]))
        if head in ents:
            kg_lines.append('\t'.join(['ent_name', head, tail]))
        if head in rels:
            kg_lines.append('\t'.join(['rel_name', head, tail]))
    tf.logging.info('loading %d kg lines', len(kg_lines))
    context.load_kg(lines=kg_lines)
    tf.logging.info('loaded')
    context.construct_relation_group('rel_g', 'ent_t', 'prop_t')
    return context, names