def test_awe_scope_map(): """ Text axiom weight estimation, syn scopes """ ont = Ontology() assert ont.nodes() == [] lexmap = LexicalMapEngine() ont.add_node('X:1', 'x1') ont.add_node('Y:1', 'y1') ont.add_node('Z:1', 'z1') ont.add_synonym(Synonym('X:1', val='related', pred='hasRelatedSynonym')) ont.add_synonym(Synonym('Y:1', val='related', pred='hasRelatedSynonym')) ont.add_synonym(Synonym('Y:1', val='exact', pred='hasExactSynonym')) ont.add_synonym(Synonym('Z:1', val='exact', pred='hasExactSynonym')) lexmap.index_ontology(ont) xg = lexmap.get_xref_graph() df = lexmap.as_dataframe(xg) print(df.to_csv(sep="\t")) P_XY = lexmap.weighted_axioms('X:1', 'Y:1', xg) P_YZ = lexmap.weighted_axioms('Y:1', 'Z:1', xg) logging.info('P_XY={} P_XZ={}'.format(P_XY, P_YZ)) assert P_XY[2] > P_XY[0] assert P_XY[2] > P_XY[1] assert P_XY[2] > P_XY[3] assert P_XY[2] < P_YZ[2]
def test_awe_match_pairs(): """ Text axiom weight estimation """ ont = Ontology() assert ont.nodes() == [] lexmap = LexicalMapEngine( config={ 'match_weights': [{ 'prefix1': 'X', 'prefix2': 'Y', 'weights': [1.0, -1.0, 2.0, 0.0] }] }) ont.add_node('X:1', 'foo 1') ont.add_node('Y:1', 'foo 1') lexmap.index_ontology(ont) xg = lexmap.get_xref_graph() df = lexmap.as_dataframe(xg) print(df.to_csv(sep="\t")) P_XY = lexmap.weighted_axioms('X:1', 'Y:1', xg) P_YX = lexmap.weighted_axioms('Y:1', 'X:1', xg) logging.info('P_XY={} P_YX={}'.format(P_XY, P_YX)) assert P_XY[0] > P_XY[1] assert P_XY[0] == P_YX[1]
def test_awe_1_to_1(): """ Text axiom weight estimation """ ont = Ontology() assert ont.nodes() == [] lexmap = LexicalMapEngine( config={ 'cardinality_weights': [{ 'prefix1': 'X', 'prefix2': 'Y', 'cardinality': '11', 'weights': [-1.0, -1.0, 2.0, 0.0] }] }) ont.add_node('X:1', 'foo 1') ont.add_node('Y:1', 'foo 1') ont.add_node('Z:1a', 'foo 1') ont.add_node('Z:1b', 'foo 1') lexmap.index_ontology(ont) xg = lexmap.get_xref_graph() df = lexmap.as_dataframe(xg) print(df.to_csv(sep="\t")) P_XY = lexmap.weighted_axioms('X:1', 'Y:1', xg) P_XZ = lexmap.weighted_axioms('X:1', 'Z:1a', xg) logging.info('P_XY={} P_XZ={}'.format(P_XY, P_XZ)) assert P_XY[2] > P_XZ[2]
def test_mutable(): """ Test mutability of ontology class """ ont = Ontology() ont.add_node('TEST:1', 'foo bar') ont.add_node('TEST:2', 'bar foo') ont.add_node('TEST:3', 'foo bar') ont.add_node('TEST:4', 'wiz') syn = Synonym('TEST:4', val='bar foo', pred='hasExactSynonym') ont.add_synonym(syn) w = GraphRenderer.create('obo') w.write(ont) for n in ont.nodes(): meta = ont._meta(n) print('{} -> {}'.format(n, meta)) assert ont.label('TEST:1') == 'foo bar' assert ont.synonyms('TEST:1') == [] assert ont.synonyms('TEST:4')[0].val == 'bar foo'
def create_gene_terms(): print('Creating gene terms') handle = os.path.join(FIXTURE_DIR, 'hgnc.json') with open(handle, 'r', encoding='utf-8') as f: hgnc_json = f.read() g = obograph_util.convert_json_object(json.loads(hgnc_json)) ont = Ontology(handle=handle, payload=g) gene_terms = [] for n_id in ont.nodes(): n_dict = ont.node(n_id) if 'type' in n_dict: if ont.node_type(n_id) == 'CLASS': for t in n_dict['meta']['basicPropertyValues']: if t['pred'] == 'http://ncicb.nci.nih.gov/xml/owl/EVS/Hugo.owl#Approved_Symbol': symbol = t['val'] if not symbol.endswith('~withdrawn'): # print('{} {}'.format(n_id, symbol)) gene_terms.append( GeneTerm(term_id=n_id, label=symbol)) break GeneTerm.objects.bulk_create(gene_terms)
def test_awe_xref_weights(): """ Text axiom weight estimation, when provided with defaults """ ont = Ontology() assert ont.nodes() == [] lexmap = LexicalMapEngine( config={ 'xref_weights': [ { 'left': 'X:1', 'right': 'Y:1', 'weights': [100.0, 0.0, 0.0, 0.0] }, { 'left': 'Z:1', 'right': 'Y:1', 'weights': [0.0, 100.0, 0.0, 0.0] }, ] }) ont.add_node('X:1', 'foo') ont.add_node('Y:1', 'foo') ont.add_node('Z:1', 'foo') lexmap.index_ontology(ont) xg = lexmap.get_xref_graph() df = lexmap.as_dataframe(xg) print(df.to_csv(sep="\t")) P_XY = lexmap.weighted_axioms('X:1', 'Y:1', xg) P_YZ = lexmap.weighted_axioms('Y:1', 'Z:1', xg) logging.info('P_XY={} P_XZ={}'.format(P_XY, P_YZ)) assert P_XY[0] > P_XY[1] assert P_XY[0] > P_XY[2] assert P_XY[0] > P_XY[3] assert P_YZ[0] > P_YZ[1] assert P_YZ[0] > P_YZ[2] assert P_YZ[0] > P_YZ[3]
def test_awe_1_to_many_hier(): """ Text axiom weight estimation """ ont = Ontology() assert ont.nodes() == [] lexmap = LexicalMapEngine() ont.add_node('X:1', 'foo 1') ont.add_node('Z:1a', 'foo 1') ont.add_node('Z:1b', 'foo 1') ont.add_parent('Z:1b', 'Z:1a') lexmap.index_ontology(ont) xg = lexmap.get_xref_graph() df = lexmap.as_dataframe(xg) print(df.to_csv(sep="\t")) P_a = lexmap.weighted_axioms('X:1', 'Z:1a', xg) P_b = lexmap.weighted_axioms('X:1', 'Z:1b', xg) logging.info('P_a={} P_b={}'.format(P_a, P_b)) assert P_a[0] > P_a[1] assert P_b[0] < P_b[1] assert P_a[0] > P_b[0]