def deploy_states_api(disable_validation, input_dir, output): """The entry function for invocation""" for intervention in list(Intervention): states_result = api_pipeline.run_projections( input_dir, AggregationLevel.STATE, intervention, run_validation=not disable_validation, ) states_results_api = api_pipeline.generate_api(states_result) api_pipeline.deploy_results(states_results_api, output) logger.info("finished top counties job")
def deploy_states_api(disable_validation, input_dir, output, summary_output): """The entry function for invocation""" # check that the dirs exist before starting for directory in [input_dir, output, summary_output]: if not os.path.isdir(directory): raise NotADirectoryError(directory) for intervention in list(Intervention): logger.info(f"Running intervention {intervention.name}") states_result = api_pipeline.run_projections( input_dir, AggregationLevel.STATE, intervention, run_validation=not disable_validation, ) state_summaries, state_timeseries = api_pipeline.generate_api(states_result, input_dir) api_pipeline.deploy_results([*state_summaries, *state_timeseries], output) states_summary = api_pipeline.build_states_summary( state_summaries, intervention ) states_timeseries = api_pipeline.build_states_timeseries( state_timeseries, intervention ) summarized_timeseries = api_pipeline.build_prediction_header_timeseries_data(states_timeseries) api_pipeline.deploy_prediction_timeseries_csvs(summarized_timeseries, summary_output) api_pipeline.deploy_results([states_summary], summary_output, write_csv=True) api_pipeline.deploy_results([states_timeseries], summary_output)
def deploy_counties_api(disable_validation, input_dir, output, summary_output): """The entry function for invocation""" # check that the dirs exist before starting for directory in [input_dir, output, summary_output]: if not os.path.isdir(directory): raise NotADirectoryError(directory) for intervention in list(Intervention): county_result = api_pipeline.run_projections( input_dir, AggregationLevel.COUNTY, intervention, run_validation=not disable_validation, ) county_summaries, county_timeseries = api_pipeline.generate_api( county_result, input_dir) api_pipeline.deploy_results([*county_summaries, *county_timeseries], output) counties_summary = api_pipeline.build_counties_summary( county_summaries, intervention) counties_timeseries = api_pipeline.build_counties_timeseries( county_timeseries, intervention) summarized_timeseries = api_pipeline.build_prediction_header_timeseries_data( counties_timeseries) api_pipeline.deploy_prediction_timeseries_csvs(summarized_timeseries, summary_output) api_pipeline.deploy_results([counties_summary], summary_output, write_csv=True) api_pipeline.deploy_results([counties_timeseries], summary_output) logger.info("finished top counties job")
def deploy_counties_api(disable_validation, input_dir, output, summary_output): """The entry function for invocation""" for intervention in list(Intervention): # TODO(issues/#258): remove check once counties support inferrence if intervention in Intervention.county_supported_interventions(): county_result = api_pipeline.run_projections( input_dir, AggregationLevel.COUNTY, intervention, run_validation=not disable_validation, ) county_summaries, county_timeseries = api_pipeline.generate_api( county_result, input_dir ) api_pipeline.deploy_results([*county_summaries, *county_timeseries], output) counties_summary = api_pipeline.build_counties_summary(county_summaries, intervention) counties_timeseries = api_pipeline.build_counties_timeseries(county_timeseries, intervention) summarized_timeseries = api_pipeline.build_prediction_header_timeseries_data(counties_timeseries) api_pipeline.deploy_prediction_timeseries_csvs(summarized_timeseries, summary_output) api_pipeline.deploy_results([counties_summary], summary_output, write_csv=True) api_pipeline.deploy_results([counties_timeseries], summary_output) logger.info("finished top counties job")
def deploy_states_api(disable_validation, input_dir, output, summary_output): """The entry function for invocation""" for intervention in list(Intervention): logger.info(f"Running intervention {intervention.name}") states_result = api_pipeline.run_projections( input_dir, AggregationLevel.STATE, intervention, run_validation=not disable_validation, ) state_summaries, state_timeseries = api_pipeline.generate_api(states_result, input_dir) api_pipeline.deploy_results([*state_summaries, *state_timeseries], output) states_summary = api_pipeline.build_states_summary( state_summaries, intervention ) states_timeseries = api_pipeline.build_states_timeseries( state_timeseries, intervention ) summarized_timeseries = api_pipeline.build_prediction_header_timeseries_data(states_timeseries) api_pipeline.deploy_prediction_timeseries_csvs(summarized_timeseries, summary_output) api_pipeline.deploy_results([states_summary], summary_output, write_csv=True) api_pipeline.deploy_results([states_timeseries], summary_output)
def deploy_states_api(disable_validation, input_dir, output, summary_output): """The entry function for invocation""" for intervention in list(Intervention): states_result = api_pipeline.run_projections( input_dir, AggregationLevel.STATE, intervention, run_validation=not disable_validation, ) states_results_api = api_pipeline.generate_api(states_result, input_dir) api_pipeline.deploy_results(states_results_api, output) states_summary = api_pipeline.build_states_summary( states_results_api, intervention) states_timeseries = api_pipeline.build_states_timeseries( states_results_api, intervention) api_pipeline.deploy_results([states_summary], summary_output, write_csv=True) api_pipeline.deploy_results([states_timeseries], summary_output) logger.info("finished states job")
def county_fips_summaries(input_dir, output): """Generates summary files by state and globally of counties with model output data.""" county_summaries = api_pipeline.build_county_summary_from_model_output( input_dir) api_pipeline.deploy_results(county_summaries, output)