def go_fish():

    f,d = download_data()
    features = pandas.read_csv(f,sep="\t")  
    database = pandas.read_csv(d,sep="\t")  
    pmids = database.id.unique().tolist()
    print "NeuroSynth database has %s unique PMIDs" %(len(pmids))

    # Generate brain maps to extract relationships with
    terms = features.columns.tolist()
    terms.pop(0)  #pmid
    
    maps_dir = "%s/terms/neurosynth/maps" %(home)
    if not os.path.exists(maps_dir):
        os.mkdir(maps_dir)

    # jobs to download abstract texts
    generate_job(func="generate_maps",inputs={"terms":terms},category="terms",batch_num=100)
    generate_job(func="extract_text",category="corpus",inputs={"pmids":pmids},batch_num=100)
    generate_job(func="extract_terms",category="terms")
    generate_job(func="extract_relations",inputs={"terms":terms,"maps_dir":maps_dir},category="relations",batch_num=100)
def go_fish():    
    generate_job(func="extract_terms",category="terms")
def go_fish():
    generate_job(func="extract_text", category="corpus")
def go_fish():

    # jobs to download abstract texts
    generate_job(func="extract_text",category="corpus",inputs={"uids",uids},batch_num=100)
    generate_job(func="extract_terms",category="terms")
    generate_job(func="extract_relations",category="relations")
def go_fish():    
    generate_job(func="extract_terms",category="terms")
    generate_job(func="extract_relationships",category="terms")