else:  # len(validcases) <= 7
            print(
                f"Not enough historical data (less than a week)" +
                f"for Continent={continent}, Country={country} and Province={province}"
            )
            continue
    else:  # file for that tuple (country, province) doesn't exist in processed files
        continue

# Appending parameters, aggregations per country, per continent, and for the world
# for predictions today & since 100
today_date_str = "".join(str(datetime.now().date()).split("-"))
df_global_predictions_since_today_scenarios = pd.concat(
    list_df_global_predictions_since_today_scenarios).reset_index(drop=True)
df_global_predictions_since_100_cases_scenarios = pd.concat(
    list_df_global_predictions_since_100_cases_scenarios).reset_index(
        drop=True)
delphi_data_saver = DELPHIDataSaver(
    path_to_folder_danger_map=PATH_TO_FOLDER_DANGER_MAP,
    path_to_website_predicted=PATH_TO_WEBSITE_PREDICTED,
    df_global_parameters=None,
    df_global_predictions_since_today=
    df_global_predictions_since_today_scenarios,
    df_global_predictions_since_100_cases=
    df_global_predictions_since_100_cases_scenarios,
)
delphi_data_saver.save_policy_predictions_to_dict_pickle(website=False)
print(
    "Exported all policy-dependent predictions for US states to website & danger_map repositories"
)
Exemple #2
0
            print(f"Finished predicting for Continent={continent}, Country={country} and Province={province}")
        else:  # len(validcases) <= 7
            print(f"Not enough historical data (less than a week)" +
                  f"for Continent={continent}, Country={country} and Province={province}")
            continue
    else:  # file for that tuple (country, province) doesn't exist in processed files
        continue

# Appending parameters, aggregations per country, per continent, and for the world
# for predictions today & since 100
today_date_str = "".join(str(datetime.now().date()).split("-"))
df_global_parameters = pd.concat(list_df_global_parameters)
df_global_predictions_since_today = pd.concat(list_df_global_predictions_since_today)
df_global_predictions_since_today = DELPHIAggregations.append_all_aggregations(
    df_global_predictions_since_today
)
# TODO: Discuss with website team how to save this file to visualize it and compare with historical data
df_global_predictions_since_100_cases = pd.concat(list_df_global_predictions_since_100_cases)
df_global_predictions_since_100_cases = DELPHIAggregations.append_all_aggregations(
    df_global_predictions_since_100_cases
)
delphi_data_saver = DELPHIDataSaver(
    path_to_folder_danger_map=os.path.join(OUTPUT_ROOT, 'dangermap'),
    path_to_website_predicted=os.path.join(OUTPUT_ROOT, 'webpred'),
    df_global_parameters=df_global_parameters,
    df_global_predictions_since_today=df_global_predictions_since_today,
    df_global_predictions_since_100_cases=df_global_predictions_since_100_cases,
)
delphi_data_saver.save_all_datasets(save_since_100_cases=False, website=False)
print("Exported all 3 datasets to website & danger_map repositories")
Exemple #3
0
            print(f"Finished predicting for Continent={continent}, Country={country} and Province={province}")
        else:  # len(validcases) <= 7
            print(f"Not enough historical data (less than a week)" +
                  f"for Continent={continent}, Country={country} and Province={province}")
            continue
    else:  # file for that tuple (country, province) doesn't exist in processed files
        continue

# Appending parameters, aggregations per country, per continent, and for the world
# for predictions today & since 100
today_date_str = "".join(str(datetime.now().date()).split("-"))
df_global_parameters = pd.concat(list_df_global_parameters)
df_global_predictions_since_today = pd.concat(list_df_global_predictions_since_today)
df_global_predictions_since_today = DELPHIAggregations.append_all_aggregations(
    df_global_predictions_since_today
)
df_global_predictions_since_100_cases = pd.concat(list_df_global_predictions_since_100_cases)
df_global_predictions_since_100_cases = DELPHIAggregations.append_all_aggregations(
    df_global_predictions_since_100_cases
)
delphi_data_saver = DELPHIDataSaver(
    path_to_folder_danger_map=PATH_TO_FOLDER_DANGER_MAP,
    path_to_website_predicted=PATH_TO_WEBSITE_PREDICTED,
    df_global_parameters=df_global_parameters,
    df_global_predictions_since_today=df_global_predictions_since_today,
    df_global_predictions_since_100_cases=df_global_predictions_since_100_cases,
    today_date_str=today_date_str,
)
delphi_data_saver.save_all_datasets(save_since_100_cases=False, website=True)
print("Exported all 3 datasets to website & danger_map repositories")
Exemple #4
0
            print(f"Finished predicting for Continent={continent}, Country={country} and Province={province}")
        else:  # len(validcases) <= 7
            print(f"Not enough historical data (less than a week)" +
                  f"for Continent={continent}, Country={country} and Province={province}")
            continue
    else:  # file for that tuple (country, province) doesn't exist in processed files
        continue

# Appending parameters, aggregations per country, per continent, and for the world
# for predictions today & since 100
today_date_str = "".join(str(datetime.now().date()).split("-"))
df_global_parameters = pd.concat(list_df_global_parameters)
df_global_predictions_since_today = pd.concat(list_df_global_predictions_since_today)
df_global_predictions_since_today = DELPHIAggregations.append_all_aggregations(
    df_global_predictions_since_today
)
# TODO: Discuss with website team how to save this file to visualize it and compare with historical data
df_global_predictions_since_100_cases = pd.concat(list_df_global_predictions_since_100_cases)
df_global_predictions_since_100_cases = DELPHIAggregations.append_all_aggregations(
    df_global_predictions_since_100_cases
)
delphi_data_saver = DELPHIDataSaver(
    path_to_folder_danger_map=PATH_TO_FOLDER_DANGER_MAP,
    path_to_website_predicted=PATH_TO_WEBSITE_PREDICTED,
    df_global_parameters=df_global_parameters,
    df_global_predictions_since_today=df_global_predictions_since_today,
    df_global_predictions_since_100_cases=df_global_predictions_since_100_cases,
)
# delphi_data_saver.save_all_datasets(save_since_100_cases=False, website=False)
print("Exported all 3 datasets to website & danger_map repositories")