print(f"Finished predicting for Continent={continent}, Country={country} and Province={province}") else: # len(validcases) <= 7 print(f"Not enough historical data (less than a week)" + f"for Continent={continent}, Country={country} and Province={province}") continue else: # file for that tuple (country, province) doesn't exist in processed files continue # Appending parameters, aggregations per country, per continent, and for the world # for predictions today & since 100 today_date_str = "".join(str(datetime.now().date()).split("-")) df_global_parameters = pd.concat(list_df_global_parameters) df_global_predictions_since_today = pd.concat(list_df_global_predictions_since_today) df_global_predictions_since_today = DELPHIAggregations.append_all_aggregations( df_global_predictions_since_today ) df_global_predictions_since_100_cases = pd.concat(list_df_global_predictions_since_100_cases) df_global_predictions_since_100_cases = DELPHIAggregations.append_all_aggregations( df_global_predictions_since_100_cases ) delphi_data_saver = DELPHIDataSaver( path_to_folder_danger_map=PATH_TO_FOLDER_DANGER_MAP, path_to_website_predicted=PATH_TO_WEBSITE_PREDICTED, df_global_parameters=df_global_parameters, df_global_predictions_since_today=df_global_predictions_since_today, df_global_predictions_since_100_cases=df_global_predictions_since_100_cases, today_date_str=today_date_str, ) delphi_data_saver.save_all_datasets(save_since_100_cases=False, website=True) print("Exported all 3 datasets to website & danger_map repositories")
print( f"Not enough historical data (less than a week)" + f"for Continent={continent}, Country={country} and Province={province}" ) continue else: # file for that tuple (country, province) doesn't exist in processed files continue # Appending parameters, aggregations per country, per continent, and for the world # for predictions today & since 100 today_date_str = "".join(str(datetime.now().date()).split("-")) df_global_parameters = pd.concat(list_df_global_parameters) df_global_predictions_since_today = pd.concat( list_df_global_predictions_since_today) df_global_predictions_since_today = DELPHIAggregations.append_all_aggregations( df_global_predictions_since_today) # TODO: Discuss with website team how to save this file to visualize it and compare with historical data df_global_predictions_since_100_cases = pd.concat( list_df_global_predictions_since_100_cases) df_global_predictions_since_100_cases = DELPHIAggregations.append_all_aggregations( df_global_predictions_since_100_cases) delphi_data_saver = DELPHIDataSaver( path_to_folder_danger_map=PATH_TO_FOLDER_DANGER_MAP, path_to_website_predicted=PATH_TO_WEBSITE_PREDICTED, df_global_parameters=df_global_parameters, df_global_predictions_since_today=df_global_predictions_since_today, df_global_predictions_since_100_cases=df_global_predictions_since_100_cases, ) delphi_data_saver.save_all_datasets(save_since_100_cases=False) print("Exported all 3 datasets to website & danger_map repositories")