def gen_pdm_from_indfile(ont_url, outfile):
    """Reads an owl file from ont_url; Writes a JSON file (outfile) of 
    types and annotations on individuals in the file.
    JSON structure: 
    id: 
       label: string
       def: string
       types:
         - isAnonymous:boolean; 
         - relId:URI_string; 
         - objectId:URI_string.
    """
    
    ont = Brain()
    ont.learn(ont_url)
    axioms = {}
    if ont.knowsClass("CARO_0030002"):
        axioms.update(gen_pdm(ont, ont.getInstances("CARO_0030002", 0), "CARO_0030002")) # expression_patterns
    if ont.knowsClass("FBbt_00005106"):
        axioms.update(gen_pdm(ont, ont.getInstances("FBbt_00005106", 0), "FBbt_00005106")) # neurons
    if ont.knowsClass("FBbt_00007683"):
        axioms.update(gen_pdm(ont, ont.getInstances("FBbt_00007683", 0), "FBbt_00007683")) # clones
    jfile = open(outfile, "w")
    jfile.write(json.dumps(axioms, sort_keys=True, indent=4))
    ont.sleep()
def __main__():
    ont = Brain()
    ont.learn(sys.argv[1])
    qtab = tab("./", "queries.tsv") 
    for r in qtab.tab:
        q = Query(r, ont)
        q.qtest()
    ont.sleep()
Beispiel #3
0
import sys


"""Runs verification tests on abstract patterns and generates markdown docs.
1st ARG specifies path to input/output files.  
Second ARG specifies ontology file to use in validation."""

def load_json_from_file(path):
    json_file = open(path, "r")
    json_string = json_file.read()
    json_file.close()
    return json.loads(json_string)

# Testing abstract pattern validation and documentation
o = Brain()
o.learn(sys.argv[2]) # Running with local file for now.
# Switch this to specify path as argv
json_files = glob.glob(sys.argv[1] + "*.json")  # Note - glob returns full file path

for f in json_files:
    p = load_json_from_file(f)
    m = re.search("(.+).json", f)
    pattern_name = m.group(1)
    print "Processing %s" % pattern_name
    ap = pattern.abstract_pattern(p, o)
    md = open(pattern_name + ".md", "w")
    #print ap.gen_markdown_doc()
    md.write(ap.gen_markdown_doc())
    md.close()
    o.sleep()
Beispiel #4
0
		if "domainId" in term["domainData"]:
			term["domainData"]["domainId"] = oboid_domId[oboid] # Terns that didn't formerly have domainId will not, at the point
		else:
			term["domainData"] = {"domainColour": [0,128,128,127],"domainId": oboid_domId[oboid],"domainSelected": "false"} # Setting default colour - will need to fix.
	else:
		if "domainId" in term["domainData"]:
			term["domainData"]["domainId"] = '' # Remove old domain_id, if present.
		owl_id = re.sub(':','_',oboid)
		name = '';
		if fbbt.knowsClass(owl_id):
			name = fbbt.getLabel(owl_id)
		print "Ignoring %s %s as , according to the mapping file, it is not a painted domain." % (oboid, name)
	new_tc.append(term)

    
	# Adding records for any new terms
for oboid, domid in oboid_domId.items():
	if oboid not in known_term_list:
		new_tc.append({"domainData":{"domainColour": [0,128,128,127],"domainId": domid,"domainSelected": "false"}, "extId": [oboid],"nodeState": {"open": "false", "selected": "false"} }) # Anything generated here will need a new nodeId.

update_names(new_tc, fbbt)

fbbt.sleep()

write_json(new_tc, "../json/treeContent_JFRC_BN_final.jso")    


    
    
    
#!/usr/bin/env jython -J-Xmx3000m

import sys
from uk.ac.ebi.brain.core import Brain
sys.path.append('../mod')
from lmb_fc_tools import get_con, owlDbOnt
con = get_con(sys.argv[1], sys.argv[2])

obo = "http://purl.obolibrary.org/obo/"
vfb = obo + "fbbt/vfb/"

paths = [obo + "fbbt/fbbt-simple.owl",
         obo + "so.owl",
         vfb + "fb_features.owl",
         obo + "ro.owl",
         vfb + "vfb_ext.owl"
         ]

# Could be done with one big brain file in memory, but would require lots of ram to run
for p in paths:
    b = Brain()
    b.learn(p)
    od = owlDbOnt(con, b)
    od.update_labels()
    b.sleep()

con.close()
Beispiel #6
0
# baseURI = ''
# for d in dc:
# 	baseURI = d['baseURI']
# cursor.close()
vfb_ind = Brain('http://www.virtualflybrain.org/owl/', 'http://www.virtualflybrain.org/owl/' + 'flycircuit_plus.owl') # Adding IRI manually for now.
# Setup ontologies
addOboAnnotationProperties(vfb_ind)
addVFBAnnotationProperties(vfb_ind)
ont_dict = {}
ont_dict['vfb_ind']=vfb_ind
ont_dict['fbbt'] = load_ont(FBBT)
#ont_dict['fbbt'] = load_ont("http://purl.obolibrary.org/obo/fbbt/%s/fbbt-simple.owl" % fbbt_release_version)
ont_dict['fb_feature'] = load_ont("../../owl/fb_features.owl")
#ont_dict['fb_feature'] = load_ont("http://purl.obolibrary.org/obo/fbbt/vfb/fb_features.owl")
# Now run all the functions

gen_ind_by_source(nc, ont_dict, dataset)
gen_ind_by_source(nc, ont_dict, "CostaJefferis_v3") # Add v3 clusters
#add_manual_ann(conn.cursor(), vfb_ind)
add_BN_dom_overlap(nc, vfb_ind, ont_dict['fbbt'])
map_to_clusters(nc, vfb_ind)


# Save output file and clean up

vfb_ind.save("../../owl/flycircuit_plus.owl")
#conn.close()
vfb_ind.sleep()
ont_dict['fbbt'].sleep()
ont_dict['fb_feature'].sleep()
class test_suite():
    
    # Tests to add: add_owl_entity_2_db
    
    
    def __init__(self, usr,pwd, ont_uri_list):
        self.conn = get_con(usr,pwd)
        self.ont = Brain()
        for uri in ont_uri_list:
                self.ont.learn(uri)
        self.od = owlDbOnt(self.conn, self.ont)
        self.cleanup_list = []
        
    def run_tests(self):
        self.add_ind_type_test()
        self.add_akv_type_test()
        self.cleanup()
        self.ont.sleep()
        self.conn.close()
  
    
    def add_ind_type_test(self):
        """Combined test of add ind and add_ind_type..
        """
        # A better test would use silly examples that could never be real, so all entities could safely be deleted.
        # add ind_test where name has quotes to be escaped.
        self.od.add_ind("add_ind_test", 'CostaJefferis')
        cursor = self.conn.cursor()
        cursor.execute("SELECT * from owl_individual WHERE label = 'add_ind_test'")
        dc = dict_cursor(cursor)
        iid = False
        for d in dc:
            if d['label'] == "add_ind_test": 
                iid = d['id']
            else:
                warnings.warn("Failed to add test ind")
        cursor.close()
        # add ind_type_test
        if iid:
            self.od.add_ind_type(ind = iid, OWLclass = 'FBbt_00003624', objectProperty =  'BFO_0000050')
            typ = self.od.type_exists('FBbt_00003624', 'BFO_0000050')
            self.od.add_ind_type(ind = iid, OWLclass = 'FBgn0000490', objectProperty = 'RO_0002292')
            typ2 = self.od.type_exists('FBgn0000490', 'RO_0002292')
            stat = False
            if not typ: 
                warnings.warn("Failed to create test type statement 'BFO_0000050' some 'FBbt_00003624'")
            elif not typ2:
                warnings.warn("Failed to create test type statement 'expresses some dpp'.")
            else:
                stat = True
            # No longer needed as DELETE cascade set    
#            self.cleanup_list.append("DELETE FROM individual_type WHERE id = %s" % typ)  # Type assertions must be deleted first.
#            self.cleanup_list.append("DELETE FROM individual_type WHERE id = %s" % typ2)  # Type assertions must be deleted first.

        self.cleanup_list.append("DELETE from owl_individual WHERE label = 'add_ind_test'")
        return stat
    
                
    def add_akv_type_test(self):
        self.od.add_akv_type('process', 'note','FBbt_00003624', 'BFO_0000050')
        cursor = self.conn.cursor()
        cursor.execute("SELECT at.id FROM annotation_type at " \
                       "JOIN annotation_key_value akv ON (akv.id = at.annotation_key_value_id) " \
                       "JOIN owl_type ot ON (ot.id=at.owl_type_id)")
        dc = dict_cursor(cursor)
        ID = ''
        for d in dc:
            ID = d['id']
        if not ID:
            warnings.warn("Failed to add akv type.")
        self.cleanup_list.append("DELETE FROM annotation_type WHERE id = %s" % ID)
        return ID
    
    def cleanup(self):
        cursor = self.conn.cursor()
        for command in self.cleanup_list:
            cursor.execute(command)  
        self.conn.commit()
		if fb['obstat']:
			warnings.warn(fb['fbid'] +" " + fb['uc_name'] + " is obsolete !  Not adding to fb_feature.owl.") 
		else:
			if not fb_feature.knowsClass(fb['fbid']): # Only add class if not obsolete.
				fb_feature.addClass(fb['fbid'])
				tmp = re.sub("<up\>", "[",fb['uc_name'])
				uc_name = re.sub("<\/up>", "]", tmp)
				fb_feature.label(fb['fbid'], uc_name)
				if re.match('FBtp\d+', fb['fbid']):
					fb_feature.subClassOf(fb['fbid'], 'SO_0000796')
				elif re.match('FBti\d+', fb['fbid']):
					fb_feature.subClassOf(fb['fbid'], 'SO_0001218')
				elif re.match('FBgn\d+', fb['fbid']):
					fb_feature.subClassOf(fb['fbid'], 'SO_0000704')
				elif re.match('FBal\d+', fb['fbid']):
					fb_feature.subClassOf(fb['fbid'], 'SO_0001023')
				else:
					warnings.warn("Ignoring this, as doesn't look like an FB feature: %s."  % fb['fbid'])
					continue


	time.sleep(0.1)

# vfb_ms_conn.close()

fb_cursor.close()
fb_pg_conn.close()

fb_feature.save("../../owl/fb_features.owl") # yuk
fb_feature.sleep()
Beispiel #9
0
#!/usr/bin/env jython

from uk.ac.ebi.brain.core import Brain
import json

gorel = Brain()
gorel.learn("http://purl.obolibrary.org/obo/go/extensions/gorel.owl")

# Declaring AE rels as list for now.
relations = [ "GOREL_0001006" ]
# Would be better to pull list of AE rels automatically from file.
#With current structure, thus would require pulling subproperties.  Can't do that with Brain.

# Iterate over list, pulling usage and saving to file named for relation:

for r in relations:
    label = gorel.getLabel(r)
    usage = gorel.getAnnotation(r, "usage")
    usage_md = open("../.gitdown/" + label + "_usage.md", "w")
    usage_md.write(usage)
    usage_md.close()

gorel.sleep()


            # Using related link. Can denormalise with generic edge naming script.
            s = "MATCH (I:Individual), (C:Class) WHERE I.short_form = '%s'" \
                "AND C.short_form = '%s' MERGE (I)-[r:Related {label: '%s', short_form: '%s' }]->(C)" \
                % (i, t['objectId'], rel, t['relId']) # 
            statements.append(s)
    facts = vom.get_triples(sfid = i)
    for f in facts:
        rel = re.sub(' ', '_', vfb.getLabel(f[1]))
        s = "MATCH (I1:Individual), (I2:Individual) " \
            "WHERE I1.short_form = '%s' and I2.short_form = '%s' " \
            "MERGE (I1)-[r:Related { label: '%s', short_form: '%s' }]-(I2)" \
            % (f[0], f[2], rel, f[1])
        statements.append(s)

nc.commit_list_in_chunks(statements, verbose = True, chunk_length = 1000)
vfb.sleep()

# Inds from graph (probably don't need this)
# payload = {'statements': [{'statement': 'MATCH (i:Individual) RETURN i.short_form'}]}
# ind_q_res = requests.post(url = "%s/db/data/transaction/commit" % base_uri, auth = (usr, pwd) , data = json.dumps(payload))
# rj= rels.json()
# inds = rj['results'][0]['data']