def create_raincell(configs, **kwargs): raincell_config = Config(configs) raincell_io = RaincellNcfIO(raincell_config) raincell_algo = RaincellAlgo(raincell_io, raincell_config) schedule_date_str = kwargs['ds'] schedule_date = datetime.strptime(schedule_date_str, DATE_FORMAT) base_dt = schedule_date start_dt = base_dt - timedelta(days=2) end_dt = base_dt + timedelta(days=3) nc_f = nc_f_format.format(schedule_date.strftime(DATE_FORMAT)) nc_f_prev_1 = nc_f_format.format((schedule_date - timedelta(days=1)).strftime(DATE_FORMAT)) nc_f_prev_2 = nc_f_format.format((schedule_date - timedelta(days=2)).strftime(DATE_FORMAT)) print(nc_f) print(nc_f_prev_1) print(nc_f_prev_2) raincell_algo.execute( ncfs={ 'nc_f': path.join(wrf_results_nfs, nc_f), 'nc_f_prev_days': [ path.join(wrf_results_nfs, nc_f_prev_1), path.join(wrf_results_nfs, nc_f_prev_2) ] }, start_dt=start_dt, base_dt=base_dt, end_dt=end_dt, )
def create_outflow(configs, **kwargs): outflow_config = Config(configs) outflow_io = OutflowIO(outflow_config) outflow_algo = OutflowAlgo(outflow_io, outflow_config) schedule_date_str = kwargs['ds'] schedule_date = datetime.strptime(schedule_date_str, DATE_FORMAT) base_dt = schedule_date start_dt = base_dt - timedelta(days=2) end_dt = base_dt + timedelta(days=3) outflow_algo.execute( start_dt=start_dt, end_dt=end_dt )
def create_dailyraincsv(configs, **kwargs): dailyraincsv_config = Config(configs) dailyraincsv_io = RainCsvIO(dailyraincsv_config) dailyraincsv_algo = RainCsvAlgo(dailyraincsv_io, dailyraincsv_config) schedule_date_str = kwargs['ds'] schedule_date = datetime.strptime(schedule_date_str, DATE_FORMAT) base_dt = schedule_date start_dt = base_dt - timedelta(days=2) end_dt = base_dt + timedelta(days=3) dailyraincsv_algo.execute( start_dt=start_dt, base_dt=base_dt, end_dt=end_dt )
hourly_forecast = OutflowAlgo.process_tidal_forecast(algo_input) lines = [] with open(self.algo_config['init_tidal_config']) as init_tidal_conf_f: init_tidal_levels = init_tidal_conf_f.readlines() for init_tidal_level in init_tidal_levels: if len(init_tidal_level.split()): # Check if not empty line lines.append(init_tidal_level) if init_tidal_level[0] == 'N': lines.append('{0} {1:{w}} {2:{w}}\n'.format('S', 0, 0, w=self.algo_config['DAT_WIDTH'])) base_dt = dynamic_args['start_dt'].replace(minute=0, second=0, microsecond=0) for dt_index, rows in hourly_forecast.iterrows(): hours_so_far = int((dt_index - base_dt).total_seconds()/3600) tidal_value = float(rows['value']) tidal_line = '{0} {1:{w}} {2:{w}{b}}\n'\ .format('S', hours_so_far, tidal_value, b='.2f', w=self.algo_config['DAT_WIDTH']) lines.append(tidal_line) return lines @staticmethod def process_tidal_forecast(tidal_forecast): if not isinstance(tidal_forecast, pd.DataFrame): raise TypeError('Given timeseries is not a pandas data-frame of time, value columns') return tidal_forecast.resample('H').max().dropna() if __name__ == '__main__': outflow_config = Config('/home/nira/PycharmProjects/DI_Framework/flo2d_input_preparation/outflow/config.json') outflow_io = OutflowIO(outflow_config) outflow_algo = OutflowAlgo(outflow_io, outflow_config) outflow_algo.execute(start_dt=datetime(2018,1,1,0,0,0), end_dt=datetime(2018,1,5,0,0,0))
@staticmethod def prepare_line(res_min, batch_size, start_ts, end_ts, rainfall_df): lines = [] header_line = "%d %d %s %s\n" % (res_min, batch_size, start_ts, end_ts) lines.append(header_line) cell_nos = np.sort(rainfall_df.columns) for index, row in rainfall_df.iterrows(): for cell in cell_nos: line = "%d %.1f\n" % (cell, row[cell]) lines.append(line) return lines if __name__ == '__main__': raincell_config = Config( '/home/nira/PycharmProjects/DI_Framework/flo2d_input_preparation/raincell/config.json' ) raincell_io = RaincellNcfIO(raincell_config) outflow_algo = RaincellAlgo(raincell_io, raincell_config) outflow_algo.execute( ncfs={ 'nc_f': "/home/nira/PycharmProjects/DI_Framework/resources/wrf_output/now/wrfout_d03_2018-01-03_18_00_00_rf", 'nc_f_prev_days': [ "/home/nira/PycharmProjects/DI_Framework/resources/wrf_output/prev_1/wrfout_d03_2018-01-02_18_00_00_rf", "/home/nira/PycharmProjects/DI_Framework/resources/wrf_output/prev_2/wrfout_d03_2018-01-01_18_00_00_rf" ] }, start_dt=datetime(2018, 1, 1, 0, 0, 0), base_dt=datetime(2018, 1, 1, 0, 0, 0), end_dt=datetime(2018, 1, 1, 0, 0, 0),
print('Input Integrity Error!', ex) return None def check_input_integrity(self, algo_input): location_ids = self.algo_config['location-ids'] # Should be a pandas Dataframe with DatetimeIndex. if not isinstance(algo_input, pd.DataFrame) or not isinstance( algo_input.index, pd.DatetimeIndex): raise AttributeError( 'algo_input should be a pandas DataFrame with DatetimeIndex.') # Column names should be same as location_ids. if set(location_ids) != set(algo_input.columns): raise AttributeError( 'Columns values of input DataFrame should be same as location Ids.' ) return True if __name__ == '__main__': raincsv_config = Config( '/home/nira/PycharmProjects/DI_Framework/hec_hms_input_preparation/rain_csv/config.json' ) raincsv_io = RainCsvIO(raincsv_config) raincsv_algo = RainCsvAlgo(raincsv_io, raincsv_config) raincsv_algo.execute( start_dt=datetime(2018, 1, 1, 0, 0, 0), base_dt=datetime(2018, 1, 3, 0, 0, 0), end_dt=datetime(2018, 1, 6, 0, 0, 0), )