in_dir = mmr_out_dir + r'/summaries' else: in_dir = arc_out_dir + r'/summaries' call = ('qsub -cwd -P proj_custom_models -N "part3_{}" ' '-l mem_free=10G -pe multi_slot 5 ' '-o FILEPATH ' '-e FILEPATH cluster_shell.sh ' 'db_process_upload.py "{}" "{}" "{}" "{}" "{}"'.format( u_type, u_type, process_vers, gbd_round_id, conn_def, in_dir)) subprocess.call(call, shell=True) time.sleep(5) if __name__ == '__main__': gbd_round_id, conn_def = sys.argv[1:3] years = range(1990, 2017) cause_df = get_cause_metadata(8) causes = cause_df.ix[(cause_df.most_detailed == 1) | (cause_df.level == 0)].cause_id.unique().tolist() upload_types = ['single', 'multi'] process_vers = Uploader(int(gbd_round_id), conn_def).prep_upload(conn_def) mmr_out_dir, arc_out_dir = set_out_dirs(process_vers) launch_mmr_jobs(years, causes, process_vers, mmr_out_dir) maternal_fns.wait('part1', 300) launch_arc_jobs(causes, arc_out_dir) maternal_fns.wait('part2', 300) launch_upload_jobs(upload_types, process_vers, gbd_round_id, conn_def)
# run interpolation jobname = 'interp_dismod_%s' % (dismod_me_id) call = ('qsub -cwd -P proj_injuries ' '-o FILEPATH ' '-e FILEPATH ' '-pe multi_slot 45 -N ' '%s FILEPATH.sh FILEPATH.py ' ' "%s" "%s" ' % (jobname, dismod_me_id, interp_out_dir)) subprocess.call(call, shell=True) # II. Cause-fraction correction ------------------------------------------------- # # wait for interpolation to finish maternal_fns.wait('interp_dismod', 300) # run cause fraction scaling logger.info("Generating corrected cause fractions") for year in yearvals: jobname = 'dismod_cf_correct_%d' % year call = ('qsub -cwd -P proj_injuries ' '-o FILEPATH ' '-e FILEPATH ' '-pe multi_slot 8 -N %s ' 'FILEPATH.sh FILEPATH.py ' '"%s" "%s" "%s" "%s"' % (jobname, jobname, cluster_dir, year, dep_map_type)) subprocess.call(call, shell=True) # wait for cause fraction scaling jobs to finish
print("Getting cause metdata") cause_df = get_cause_metadata(8, gbd_round_id=gbd_round_id) print("Getting causes") # only most-detailed and root cause causes = cause_df.loc[(cause_df.most_detailed == 1) | (cause_df.level == 0)].cause_id.unique().tolist() codcorrect_vers = get_best_codcorrect_vers(decomp_step, gbd_round_id) print("setting process version") process_vers = Uploader(conn_def, codcorrect_vers, decomp_step, int(gbd_round_id)).prep_upload() process_vers = 14774 mmr_out_dir, arc_out_dir = set_out_dirs(process_vers) print("Launching save_birth_estimates") launch_save_birth_estimate_job(gbd_round_id, decomp_step, process_vers) maternal_fns.wait('save_birth_estimates', 60) print("Launching calculate_mmr jobs") launch_mmr_jobs(mmr_constants.OUTPUT_YEARS, causes, process_vers, mmr_out_dir, codcorrect_vers, decomp_step, gbd_round_id) maternal_fns.wait('mmr_calculate_mmr', 300) print("Launching calculate_arc jobs") launch_pct_change_jobs(causes, arc_out_dir) maternal_fns.wait('mmr_calculate_pct_change', 300) print("Uploading!") launch_upload_jobs(mmr_constants.UPLOAD_TYPES, mmr_out_dir, arc_out_dir, process_vers, gbd_round_id, conn_def) print("Done with gbd outputs upload, now running SDG upload") launch_sdg(process_vers)