def get_matches(diagram1, diagram2, threshold_score=3.0, consider_context=True):
    # initVM for lucene
    lucene.initVM()

    # index creation
    p1 = get_preprocessed_nodes(diagram1, consider_context)
    p2 = get_preprocessed_nodes(diagram2, consider_context)

    docs_from_p1 = transform_to_index(p1)
    docs_from_p2 = transform_to_index(p2)

    # set up in memory store for indexes
    directory1 = store.RAMDirectory()
    directory2 = store.RAMDirectory()
    analyzer = analysis.standard.StandardAnalyzer()

    # create separate indexes for each diagram
    create_index(directory=directory1, analyzer=analyzer, documents_to_index=docs_from_p1)
    create_index(directory=directory2, analyzer=analyzer, documents_to_index=docs_from_p2)

    # SEARCH STEP: use indexes from one store to search on indexes from other store
    res_1 = search_index(directory_to_search=directory2, entities_to_check=p1, analyzer=analyzer)
    res_2 = search_index(directory_to_search=directory1, entities_to_check=p2, analyzer=analyzer)

    # apply match search pruning rules
    set_m = prune_results(res_1, res_2, threshold_score)

    directory1.close()
    directory2.close()
    return set_m
Example #2
0
 def load(cls, directory, analyzer=None):
     """Open `IndexSearcher`_ with a lucene RAMDirectory, loading index into memory."""
     with closing.store(directory) as directory:
         directory = store.RAMDirectory(directory, store.IOContext.DEFAULT)
     self = cls(directory, analyzer)
     self.shared.add(self.directory)
     return self
Example #3
0
 def directory(self, directory):
     if directory is None:
         directory = store.RAMDirectory()
         self.add(directory)
     elif isinstance(directory, string_types):
         directory = store.FSDirectory.open(File(directory).toPath())
         self.add(directory)
     return directory
Example #4
0
def test_grouping(tempdir, indexer, zipcodes):
    field = indexer.fields['location'] = engine.NestedField(
        'state.county.city', docValuesType='sorted')
    for doc in zipcodes:
        if doc['state'] in ('CA', 'AK', 'WY', 'PR'):
            lat, lng = ('{0:08.3f}'.format(doc.pop(l))
                        for l in ['latitude', 'longitude'])
            location = '.'.join(doc[name]
                                for name in ['state', 'county', 'city'])
            indexer.add(doc, latitude=lat, longitude=lng, location=location)
    indexer.commit()
    states = list(indexer.terms('state'))
    assert states[0] == 'AK' and states[-1] == 'WY'
    counties = [
        term.split('.')[-1] for term in indexer.terms('state.county', 'CA')
    ]
    hits = indexer.search(field.prefix('CA'))
    assert sorted({hit['county'] for hit in hits}) == counties
    assert counties[0] == 'Alameda' and counties[-1] == 'Yuba'
    cities = [
        term.split('.')[-1]
        for term in indexer.terms('state.county.city', 'CA.Los Angeles')
    ]
    hits = indexer.search(field.prefix('CA.Los Angeles'))
    assert sorted({hit['city'] for hit in hits}) == cities
    assert cities[0] == 'Acton' and cities[-1] == 'Woodland Hills'
    (hit, ) = indexer.search('zipcode:90210')
    assert hit['state'] == 'CA' and hit['county'] == 'Los Angeles' and hit[
        'city'] == 'Beverly Hills' and hit['longitude'] == '-118.406'
    query = Q.prefix('zipcode', '90')
    ((field, facets), ) = indexer.facets(query, 'state.county').items()
    assert field == 'state.county'
    la, orange = sorted(filter(facets.get, facets))
    assert la == 'CA.Los Angeles' and facets[la] > 100
    assert orange == 'CA.Orange' and facets[orange] > 10
    queries = {
        term: Q.term(field, term)
        for term in indexer.terms(field, 'CA.')
    }
    ((field, facets), ) = indexer.facets(query, **{field: queries}).items()
    assert all(value.startswith('CA.')
               for value in facets) and set(facets) == set(queries)
    assert facets['CA.Los Angeles'] == 264
    groups = indexer.groupby(field, Q.term('state', 'CA'), count=1)
    assert len(groups) == 1 < groups.count
    (hits, ) = groups
    assert hits.value == 'CA.Los Angeles' and len(
        hits) == 1 and hits.count > 100
    grouping = engine.documents.GroupingSearch(field,
                                               sort=search.Sort(
                                                   indexer.sortfield(field)),
                                               cache=False,
                                               allGroups=True)
    assert all(
        grouping.search(indexer.indexSearcher, Q.alldocs()).facets.values())
    assert len(grouping) == len(list(grouping)) > 100
    assert set(grouping) > set(facets)
    hits = indexer.search(query, timeout=-1)
    assert not hits and not hits.count and math.isnan(hits.maxscore)
    hits = indexer.search(query, timeout=10)
    assert len(hits) == hits.count == indexer.count(
        query) and hits.maxscore == 1.0
    directory = store.RAMDirectory()
    query = Q.term('state', 'CA')
    size = indexer.copy(directory, query)
    searcher = engine.IndexSearcher(directory)
    assert len(searcher) == size and list(searcher.terms('state')) == ['CA']
    path = os.path.join(tempdir, 'temp')
    size = indexer.copy(path, exclude=query, merge=1)
    assert len(searcher) + size == len(indexer)
    searcher = engine.IndexSearcher(path)
    assert len(searcher.segments) == 1 and 'CA' not in searcher.terms('state')
    directory.close()