def test_awe_1_to_1(): """ Text axiom weight estimation """ ont = Ontology() assert ont.nodes() == [] lexmap = LexicalMapEngine( config={ 'cardinality_weights': [{ 'prefix1': 'X', 'prefix2': 'Y', 'cardinality': '11', 'weights': [-1.0, -1.0, 2.0, 0.0] }] }) ont.add_node('X:1', 'foo 1') ont.add_node('Y:1', 'foo 1') ont.add_node('Z:1a', 'foo 1') ont.add_node('Z:1b', 'foo 1') lexmap.index_ontology(ont) xg = lexmap.get_xref_graph() df = lexmap.as_dataframe(xg) print(df.to_csv(sep="\t")) P_XY = lexmap.weighted_axioms('X:1', 'Y:1', xg) P_XZ = lexmap.weighted_axioms('X:1', 'Z:1a', xg) logging.info('P_XY={} P_XZ={}'.format(P_XY, P_XZ)) assert P_XY[2] > P_XZ[2]
def test_awe_scope_map(): """ Text axiom weight estimation, syn scopes """ ont = Ontology() assert ont.nodes() == [] lexmap = LexicalMapEngine() ont.add_node('X:1', 'x1') ont.add_node('Y:1', 'y1') ont.add_node('Z:1', 'z1') ont.add_synonym(Synonym('X:1', val='related', pred='hasRelatedSynonym')) ont.add_synonym(Synonym('Y:1', val='related', pred='hasRelatedSynonym')) ont.add_synonym(Synonym('Y:1', val='exact', pred='hasExactSynonym')) ont.add_synonym(Synonym('Z:1', val='exact', pred='hasExactSynonym')) lexmap.index_ontology(ont) xg = lexmap.get_xref_graph() df = lexmap.as_dataframe(xg) print(df.to_csv(sep="\t")) P_XY = lexmap.weighted_axioms('X:1', 'Y:1', xg) P_YZ = lexmap.weighted_axioms('Y:1', 'Z:1', xg) logging.info('P_XY={} P_XZ={}'.format(P_XY, P_YZ)) assert P_XY[2] > P_XY[0] assert P_XY[2] > P_XY[1] assert P_XY[2] > P_XY[3] assert P_XY[2] < P_YZ[2]
def test_awe_match_pairs(): """ Text axiom weight estimation """ ont = Ontology() assert ont.nodes() == [] lexmap = LexicalMapEngine( config={ 'match_weights': [{ 'prefix1': 'X', 'prefix2': 'Y', 'weights': [1.0, -1.0, 2.0, 0.0] }] }) ont.add_node('X:1', 'foo 1') ont.add_node('Y:1', 'foo 1') lexmap.index_ontology(ont) xg = lexmap.get_xref_graph() df = lexmap.as_dataframe(xg) print(df.to_csv(sep="\t")) P_XY = lexmap.weighted_axioms('X:1', 'Y:1', xg) P_YX = lexmap.weighted_axioms('Y:1', 'X:1', xg) logging.info('P_XY={} P_YX={}'.format(P_XY, P_YX)) assert P_XY[0] > P_XY[1] assert P_XY[0] == P_YX[1]
def test_awe_1_to_many_default(): """ As previous test, but with defaults """ ont = Ontology() lexmap = LexicalMapEngine( config={ 'cardinality_weights': [{ 'cardinality': 'm1', 'weights': [1.0, -1.0, -2.0, 0.0] }] }) ont.add_node('X:1', 'foo 1') ont.add_node('Y:1a', 'foo 1a') ont.add_synonym(Synonym('Y:1a', val='foo 1', pred='hasRelatedSynonym')) ont.add_node('Y:1b', 'foo 1b') ont.add_synonym(Synonym('Y:1b', val='foo 1', pred='hasExactSynonym')) lexmap.index_ontology(ont) xg = lexmap.get_xref_graph() df = lexmap.as_dataframe(xg) print(df.to_csv(sep="\t")) P = lexmap.weighted_axioms('X:1', 'Y:1a', xg) logging.info('P={}'.format(P)) assert P[0] < P[1] assert P[1] > P[2]
def test_awe_1_to_many_flat(): """ Text axiom weight estimation, for a 1-to-many situation, where the many are not inter-related """ ont = Ontology() lexmap = LexicalMapEngine( config={ 'cardinality_weights': [{ 'prefix1': 'X', 'prefix2': 'Y', 'cardinality': '1m', 'weights': [-1.0, 1.0, -2.0, 0.0] }] }) ont.add_node('X:1', 'foo 1') ont.add_node('Y:1a', 'foo 1a') ont.add_synonym(Synonym('Y:1a', val='foo 1', pred='hasRelatedSynonym')) ont.add_node('Y:1b', 'foo 1b') ont.add_synonym(Synonym('Y:1b', val='foo 1', pred='hasExactSynonym')) lexmap.index_ontology(ont) xg = lexmap.get_xref_graph() df = lexmap.as_dataframe(xg) print(df.to_csv(sep="\t")) P = lexmap.weighted_axioms('X:1', 'Y:1a', xg) logging.info('P={}'.format(P)) assert P[0] < P[1] assert P[1] > P[2]
def test_awe_xref_weights(): """ Text axiom weight estimation, when provided with defaults """ ont = Ontology() assert ont.nodes() == [] lexmap = LexicalMapEngine( config={ 'xref_weights': [ { 'left': 'X:1', 'right': 'Y:1', 'weights': [100.0, 0.0, 0.0, 0.0] }, { 'left': 'Z:1', 'right': 'Y:1', 'weights': [0.0, 100.0, 0.0, 0.0] }, ] }) ont.add_node('X:1', 'foo') ont.add_node('Y:1', 'foo') ont.add_node('Z:1', 'foo') lexmap.index_ontology(ont) xg = lexmap.get_xref_graph() df = lexmap.as_dataframe(xg) print(df.to_csv(sep="\t")) P_XY = lexmap.weighted_axioms('X:1', 'Y:1', xg) P_YZ = lexmap.weighted_axioms('Y:1', 'Z:1', xg) logging.info('P_XY={} P_XZ={}'.format(P_XY, P_YZ)) assert P_XY[0] > P_XY[1] assert P_XY[0] > P_XY[2] assert P_XY[0] > P_XY[3] assert P_YZ[0] > P_YZ[1] assert P_YZ[0] > P_YZ[2] assert P_YZ[0] > P_YZ[3]
def test_awe_1_to_many_hier(): """ Text axiom weight estimation """ ont = Ontology() assert ont.nodes() == [] lexmap = LexicalMapEngine() ont.add_node('X:1', 'foo 1') ont.add_node('Z:1a', 'foo 1') ont.add_node('Z:1b', 'foo 1') ont.add_parent('Z:1b', 'Z:1a') lexmap.index_ontology(ont) xg = lexmap.get_xref_graph() df = lexmap.as_dataframe(xg) print(df.to_csv(sep="\t")) P_a = lexmap.weighted_axioms('X:1', 'Z:1a', xg) P_b = lexmap.weighted_axioms('X:1', 'Z:1b', xg) logging.info('P_a={} P_b={}'.format(P_a, P_b)) assert P_a[0] > P_a[1] assert P_b[0] < P_b[1] assert P_a[0] > P_b[0]