Пример #1
0
def extract_relations(output_dir):

    cattell = get_cattell()
    terms = get_terms(cattell)
    tuples = []
    for term in cattell:
        unique_id = term["id"]
        opposite = (unique_id,term["opposite_id"],"opposite_of")
        tuples.append(opposite) 

    save_relations(terms,output_dir=output_dir,relationships=tuples)
Пример #2
0
def extract_relations(uids,output_dir):

    # This function will be called by a job, and must call save_relations
    # ** ALL USER FUNCTIONS MUST HAVE output_dir as an input

    # You should provide a list of tuples, with the
    # first and second items for each tuple corresponding
    # to term ids you specified in extract_terms
    tuples = [(source1,target1,value)]

    # Value can be a string or int
    # The terms variable is equivalent to the one needed for extract_terms
    save_relations(relations=tuples,output_dir=output_dir)
Пример #3
0
def extract_relations(output_dir):

    if has_internet_connectivity(): 
       tuples = []
       terms = get_terms()   
       relations = get_nif_json()
       for relation in relations["edges"]:
           tup = (relation["obj"],relation["sub"],relation["pred"])
           tuples.append(tup)

       save_relations(terms,output_dir=output_dir,relationships=tuples)
    else:
       print "Cannot define fma-nif relationships, no internet connectivity."
Пример #4
0
def extract_relations(output_dir):

    tuples = []
    terms = get_terms()
    concepts = get_concepts()

    for concept in concepts:
        if "relationships" in concept:
            for relation in concept["relationships"]:   
                relationship = "%s,%s" %(relation["direction"],relation["relationship"])
                tup = (concept["id"],relation["id"],relationship) 
                tuples.append(tup)

    save_relations(terms,output_dir=output_dir,relationships=tuples)
Пример #5
0
def extract_relations(terms,maps_dir,output_dir):

    if isinstance(terms,str):
        terms = [terms]

    f,d = download_data()
    features = pandas.read_csv(f,sep="\t")  
    database = pandas.read_csv(d,sep="\t")  
    allterms = features.columns.tolist()
    allterms.pop(0)  #pmid

    dataset = Dataset(d)
    dataset.add_features(f)
    image_matrix = pandas.DataFrame(columns=range(228453))
    for t in range(len(allterms)):
        term = allterms[t]
        term_name = term.replace(" ","_")
        pickled_map = "%s/%s_pFgA_z.pkl" %(maps_dir,term_name)
        if not os.path.exists(pickled_map):
            print "Generating P(term|activation) for term %s" %(term)
            ids = dataset.get_ids_by_features(term)
            maps = meta.MetaAnalysis(dataset,ids)
            pickle.dump(maps.images["pFgA_z"],open(pickled_map,"wb"))
        map_data = pickle.load(open(pickled_map,"rb"))
        image_matrix.loc[term] = map_data

    sims = pandas.DataFrame(columns=image_matrix.index)
    tuples = []
    for t1 in range(len(terms)):
        term1 = terms[t1]
        print "Extracting NeuroSynth relationships for term %s..." %(term1)
        for t2 in range(len(terms)):
            term2 = terms[t2]
            if t1<t2:
                score = pearsonr(image_matrix.loc[term1],image_matrix.loc[term2])[0]
                tuples.append((term1,term2,score))

    save_relations(output_dir=output_dir,relations=tuples)