def extract_terms(output_dir): if has_internet_connectivity(): terms = dict() atlases = get_json("http://neurovault.org/api/atlases/?format=json") for atlas in atlases: label_description_file = "http://neurovault.org/media/images/291/Talairach-labels-2mm.xml" print "Parsing %s" %(label_description_file) xml_dict = read_xml_url(label_description_file) atlas_name = xml_dict["atlas"]["header"]["name"] atlas_name_label = atlas_name.replace(" ","_").lower() atlas_labels = xml_dict["atlas"]["data"]["label"] for l in range(len(atlas_labels)): label = atlas_labels[l] # We will use coordinate for unique ID unique_id = "%s_%s" %(atlas_name_label,l) terms[unique_id] = {"name":label["#text"], "x":label["@x"], "y":label["@y"], "z":label["@z"]} save_terms(terms,output_dir=output_dir) else: print "Cannot define fsl atlas terms, no internet connectivity."
def get_atlas_xml(): # If > 100 atlases, we would need to get pagination pages here. return get_json("http://neurovault.org/api/atlases/?format=json")["results"]
def get_cattell(): url = "https://raw.githubusercontent.com/vsoch/traits/master/data/json/cattell_personality_282.json" return get_json(url)
def get_nif_json(): return get_json("http://matrix.neuinfo.org:9000/scigraph/graph/neighbors/NIFGA:birnlex_796?relationshipType=http://www.obofoundry.org/ro/ro.owl%23has_proper_part&direction=OUTGOING&depth=9")