def run_como( como_dir=None, root_dir="FILEPATH", gbd_round_id=5, location_set_id=35, year_id=list(range(1990, 2018)), measure_id=[3, 5, 6], n_draws=1000, n_simulants=20000, components=["cause", "sequela", "injuries", "impairment"], change_years=[(1990, 2007), (2007, 2017), (1990, 2017)], agg_loc_sets=[35, 83], project="proj_como"): special_sets = set(agg_loc_sets) - set([location_set_id]) all_sets = set(agg_loc_sets) | set([location_set_id]) if como_dir is not None: cv = ComoVersion(como_dir) cv.load_cache() else: cv = ComoVersion.new( root_dir, gbd_round_id, location_set_id, year_id, measure_id, n_draws, components, change_years, special_sets) cwf = ComoWorkFlow(cv) cwf.add_tasks_to_dag(n_simulants=n_simulants, agg_loc_sets=all_sets) if cwf.run_workflow(project=project): all_locs = [] for location_set_id in all_sets: loc_tree = loctree(location_set_id=location_set_id, gbd_round_id=cv.gbd_round_id) all_locs.extend(loc_tree.node_ids) all_locs = list(set(all_locs)) run_upload(cv, all_locs) else: raise RuntimeError("como unsuccessful")
def run_pipeline_como( root_dir, gbd_round_id=4, location_id=[], year_id=[], sex_id=[], age_group_id=[], measure_id=[], n_draws=1000, n_simulants=20000, components=["sequela", "cause", "impairment", "injuries"]): cv = ComoVersion.new(root_dir, gbd_round_id, location_id, year_id, sex_id, age_group_id, measure_id, n_draws, components) try: cjm = CentralJobMonitor(cv.como_dir, persistent=False) time.sleep(5) except Exception as e: raise e else: executor_params = {"request_timeout": 10000} jobq = JobQueue(cv.como_dir, scheduler=RetryScheduler, executor=SGEExecutor, executor_params=executor_params) # run nonfatal pipeline by location/year/sex parallelism = ["location_id", "sex_id"] for slices in cv.dimensions.index_slices(parallelism): jobname = "como_e_sim_{location_id}_{sex_id}".format( location_id=slices[0], sex_id=slices[1]) job = jobq.create_job( jobname=jobname, runfile=true_path(executable="compute_nonfatal"), parameters=[ "--como_dir", cv.como_dir, "--location_id", str(slices[0]), "--sex_id", str(slices[1]), "--n_processes", "23", "--n_simulants", str(n_simulants) ]) jobq.queue_job(job, slots=50, memory=400, project="proj_como", process_timeout=(60 * 180)) jobq.block_till_done(stop_scheduler_when_done=False) # run aggregation by year/sex/measure parallelism = ["year_id", "sex_id", "measure_id"] for slices in cv.dimensions.index_slices(parallelism): for component in cv.components: if component != "sequela": loc_sets = [35, 40] else: loc_sets = [35] for location_set_id in loc_sets: jobname = ("como_e_agg_{component}_{year_id}_{sex_id}" "_{measure_id}_{location_set_id}").format( component=component, year_id=slices[0], sex_id=slices[1], measure_id=slices[2], location_set_id=location_set_id) job = jobq.create_job( jobname=jobname, runfile=true_path(executable="aggregate_nonfatal"), parameters=[ "--como_dir", cv.como_dir, "--component", component, "--year_id", str(slices[0]), "--sex_id", str(slices[1]), "--measure_id", str(slices[2]), "--location_set_id", str(location_set_id) ]) jobq.queue_job(job, slots=25, memory=200, project="proj_como", process_timeout=(60 * 600)) jobq.block_till_done(stop_scheduler_when_done=False) # run summaries by component/location lt = dbtrees.loctree(None, 35) sdi_lts = dbtrees.loctree(None, 40, return_many=True) locs = [l.id for l in lt.nodes] sdi_locs = [l.root.id for l in sdi_lts] for component in cv.components: if component != "sequela": summ_locs = locs + sdi_locs else: summ_locs = locs[:] for location_id in summ_locs: jobname = "como_e_summ_{component}_{location_id}".format( component=component, location_id=location_id) job = jobq.create_job( jobname=jobname, runfile=true_path(executable="summarize_nonfatal"), parameters=[ "--como_dir", cv.como_dir, "--component", component, "--location_id", str(location_id) ]) jobq.queue_job(job, slots=48, memory=96, project="proj_como", process_timeout=(60 * 240)) jobq.block_till_done(stop_scheduler_when_done=False) for component in cv.components: jobname = "como_e_upload_{component}".format(component=component) job = jobq.create_job( jobname=jobname, runfile=true_path(executable="upload_nonfatal"), parameters=[ "--como_dir", cv.como_dir, "--component", component, "--location_id", " ".join([str(l) for l in locs + sdi_locs]) ]) jobq.queue_job(job, slots=20, memory=40, project="proj_como", process_timeout=(60 * 720)) jobq.block_till_done() finally: cjm.generate_report() cjm.stop_responder() cjm.stop_publisher()