def run_severity(adate): """ run flood severity calculation for a date""" # flood severity calculation # flood_severity(gfmscsv,glofascsv,date) real_date = adate[:-2] gfmscsv = gfmsdata_fix + "Flood_byStor_" + adate + ".csv" glofascsv = glofasdata + "threspoints_" + adate + ".csv" flood_severity(gfmscsv, glofascsv, real_date, flooddata) logging.info("Flood: " + real_date) # generate GIS output file flood_file = flooddata + 'Attributes_Clean_' + real_date + '.csv' generate_gisfile(flood_file, real_date, gisdata)
def run_cron_fix(adate): """run cron job""" # cron steup cd ~/ModelofModels/data && python datatool.py --cron # run every three hours # edit: crontab -e # 5 0,3,6,9,12,15,18,21 * * * commnad # it is likly only one date: 2020051600 #processing_dates = GloFAS_process() # check if GMS data is available #processing_dates = ['2020061800','2020061900','2020062000'] processing_dates = [adate] binhours = ["00", "03", "06", "09", "12", "15", "18", "21"] for data_date in processing_dates: # find the previous one real_date = data_date[:-2] for binhour in binhours: bin_file = "Flood_byStor_" + real_date + binhour + ".bin" print(bin_file) # process bin file data_extractor(aqid_csv=None, bin_file=bin_file) # need to run duration caculation previous_date = datetime.strptime(real_date, "%Y%m%d") - timedelta(days=1) base0 = "Flood_byStor_" + previous_date.strftime("%Y%m%d") + "21.csv" fix_list = ["Flood_byStor_" + real_date + x + ".csv" for x in binhours] fix_list.insert(0, base0) # call fix duration fix_duration(fix_list, folder=gfmsdata, fixfolder=gfmsdata_fix) # flood severity calculation # flood_severity(gfmscsv,glofascsv,date) gfmscsv = gfmsdata_fix + "Flood_byStor_" + data_date + ".csv" glofascsv = glofasdata + "threspoints_" + data_date + ".csv" flood_severity(gfmscsv, glofascsv, real_date, flooddata) logging.info("Flood: " + real_date) # generate GIS output file flood_file = flooddata + 'Attributes_Clean_' + real_date + '.csv' generate_gisfile(flood_file, real_date, gisdata)