Exemplo n.º 1
0
def test_generate_joules_inflow():
    """
    Checks generating inflow file from Joules LSM
    """
    print("TEST 7: TEST GENERATE INFLOW FILE FROM Joules DATA")
    rapid_output_path = os.path.join(OUTPUT_DATA_PATH, "output", "u-k")
    #Create testing environment
    try:
        os.mkdir(RAPID_DATA_PATH)
    except OSError:
        pass
    try:
        os.mkdir(os.path.join(OUTPUT_DATA_PATH, "output"))
    except OSError:
        pass

    try:
        copytree(os.path.join(COMPARE_DATA_PATH, "gis", "u-k"),
                 os.path.join(RAPID_DATA_PATH, "u-k"))
    except OSError:
        pass

    #run main process
    run_lsm_rapid_process(
        rapid_executable_location=RAPID_EXE_PATH,
        cygwin_bin_location=CYGWIN_BIN_PATH,
        rapid_io_files_location=OUTPUT_DATA_PATH,
        lsm_data_location=os.path.join(LSM_INPUT_DATA_PATH, 'joules'),
        simulation_start_datetime=datetime(1980, 1, 1),
        simulation_end_datetime=datetime(2014, 1, 31),
        file_datetime_re_pattern=r'\d{8}_\d{2}',
        file_datetime_pattern="%Y%m%d_%H",
        generate_rapid_namelist_file=False,
        run_rapid_simulation=False,
        use_all_processors=True,
    )

    #CHECK OUTPUT
    m3_file_name = "m3_riv_bas_met_office_joules_3hr_20080803to20080803.nc"
    generated_m3_file = os.path.join(rapid_output_path, m3_file_name)
    generated_m3_file_solution = os.path.join(INFLOW_COMPARE_DATA_PATH,
                                              m3_file_name)
    #check other info in netcdf file
    d1 = Dataset(generated_m3_file)
    d2 = Dataset(generated_m3_file_solution)
    assert_almost_equal(d1.variables['m3_riv'][:],
                        d2.variables['m3_riv'][:],
                        decimal=5)
    if 'rivid' in d2.variables.keys():
        ok_((d1.variables['rivid'][:] == d2.variables['rivid'][:]).all())
    if 'lat' in d2.variables.keys():
        ok_((d1.variables['lat'][:] == d2.variables['lat'][:]).all())
    if 'lon' in d2.variables.keys():
        ok_((d1.variables['lon'][:] == d2.variables['lon'][:]).all())
    d1.close()
    d2.close()

    #cleanup
    rmtree(os.path.join(OUTPUT_DATA_PATH, "input"))
    rmtree(os.path.join(OUTPUT_DATA_PATH, "output"))
Exemplo n.º 2
0
    def test_generate_era20cm_inflow(self):
        """
        Checks generating inflow file from ERA 20CM LSM
        """
        print("TEST 3: TEST GENERATE INFLOW FILE FROM ERA 20CM DATA")

        rapid_input_path, rapid_output_path = self._setup_automated("x-x")

        run_lsm_rapid_process(
            rapid_executable_location=self.RAPID_EXE_PATH,
            cygwin_bin_location=self.CYGWIN_BIN_PATH,
            rapid_io_files_location=self.OUTPUT_DATA_PATH,
            lsm_data_location=os.path.join(self.LSM_INPUT_DATA_PATH,
                                           'era20cm'),
            simulation_start_datetime=datetime(1980, 1, 1),
            simulation_end_datetime=datetime(2014, 1, 31),
            ensemble_list=range(10),
            generate_rapid_namelist_file=False,
            run_rapid_simulation=False,
            use_all_processors=True,
        )

        for i in range(10):
            #CHECK OUTPUT
            #m3_riv
            m3_file_name = "m3_riv_bas_era_20cm_t159_3hr_20000129to20000130_{0}.nc".format(
                i)
            generated_m3_file = os.path.join(rapid_output_path, m3_file_name)
            generated_m3_file_solution = os.path.join(
                self.INFLOW_COMPARE_DATA_PATH, m3_file_name)

            self._compare_m3(generated_m3_file, generated_m3_file_solution)
Exemplo n.º 3
0
    def start_run(self, start_utc_dt, end_utc_dt, init_first_run=False):
        model_run_dt_list = self.prepare_model_run(start_utc_dt, end_utc_dt)

        for i in range(len(model_run_dt_list)):
            model_run_dt = model_run_dt_list[i]
            print("****************************************Run RAPID {}".format(model_run_dt))
            model_run_dt_str = model_run_dt.strftime("%Y%m%d%H")
            model_run_root = os.path.join(self.target_path, model_run_dt_str)
            previous_model_run_dt = model_run_dt - self.model_run_interval_tdelta

            previous_model_run_dt_str = previous_model_run_dt.strftime("%Y%m%d%H")
            previous_model_run_root = os.path.join(self.target_path, previous_model_run_dt_str)

            # Run Cosmo
            run_lsm_rapid_process(
                rapid_executable_location=self.rapid_executable_path,
                rapid_io_files_location=model_run_root,
                lsm_data_location=os.path.join(model_run_root, "data", model_run_dt_str),
                use_all_processors=False,  # defaults to use all processors available
                num_processors=1,  # you can change this number if use_all_processors=False
                generate_initialization_file=True,
                timedelta_between_simulations=datetime.timedelta(hours=12),
                initial_flows_file=os.path.join(previous_model_run_root, "qinit.csv") if i > 0 or init_first_run else None

            )
            print("*************************Done {}".format(model_run_dt))

        pass
        print("****************************All Done")
Exemplo n.º 4
0
    def test_generate_era20cm_inflow(self):
        """
        Checks generating inflow file from ERA 20CM LSM
        """
        print("TEST 3: TEST GENERATE INFLOW FILE FROM ERA 20CM DATA")
    
        rapid_input_path, rapid_output_path = self._setup_automated("x-x")
    
        run_lsm_rapid_process(
            rapid_executable_location=self.RAPID_EXE_PATH,
            cygwin_bin_location=self.CYGWIN_BIN_PATH,
            rapid_io_files_location=self.OUTPUT_DATA_PATH,
            lsm_data_location=os.path.join(self.LSM_INPUT_DATA_PATH, 'era20cm'), 
            simulation_start_datetime=datetime(1980, 1, 1),
            simulation_end_datetime=datetime(2014, 1, 31),
            ensemble_list=range(10),
            generate_rapid_namelist_file=False,
            run_rapid_simulation=False,
            use_all_processors=True,
        )
        
        for i in range(10):
            #CHECK OUTPUT    
            #m3_riv
            m3_file_name = "m3_riv_bas_era_20cm_t159_3hr_20000129to20000130_{0}.nc".format(i)
            generated_m3_file = os.path.join(rapid_output_path, m3_file_name)
            generated_m3_file_solution = os.path.join(self.INFLOW_COMPARE_DATA_PATH, m3_file_name)

            self._compare_m3(generated_m3_file,generated_m3_file_solution)
Exemplo n.º 5
0
    def test_generate_joules_inflow(self):
        """
        Checks generating inflow file from Joules LSM
        """
        print("TEST 7: TEST GENERATE INFLOW FILE FROM Joules DATA")
        rapid_input_path, rapid_output_path = self._setup_automated("u-k")

        #run main process
        run_lsm_rapid_process(
            rapid_executable_location=self.RAPID_EXE_PATH,
            cygwin_bin_location=self.CYGWIN_BIN_PATH,
            rapid_io_files_location=self.OUTPUT_DATA_PATH,
            lsm_data_location=os.path.join(self.LSM_INPUT_DATA_PATH, 'joules'),
            simulation_start_datetime=datetime(1980, 1, 1),
            simulation_end_datetime=datetime(2014, 1, 31),
            file_datetime_re_pattern=r'\d{8}_\d{2}',
            file_datetime_pattern="%Y%m%d_%H",
            generate_rapid_namelist_file=False,
            run_rapid_simulation=False,
            use_all_processors=True,
        )

        #CHECK OUTPUT
        m3_file_name = "m3_riv_bas_met_office_joules_3hr_20080803to20080803.nc"
        generated_m3_file = os.path.join(rapid_output_path, m3_file_name)
        generated_m3_file_solution = os.path.join(
            self.INFLOW_COMPARE_DATA_PATH, m3_file_name)

        #check other info in netcdf file
        self._compare_m3(generated_m3_file, generated_m3_file_solution)
Exemplo n.º 6
0
 def test_generate_joules_inflow(self):
     """
     Checks generating inflow file from Joules LSM
     """
     print("TEST 7: TEST GENERATE INFLOW FILE FROM Joules DATA")
     rapid_input_path, rapid_output_path = self._setup_automated("u-k")
     
     #run main process    
     run_lsm_rapid_process(
         rapid_executable_location=self.RAPID_EXE_PATH,
         cygwin_bin_location=self.CYGWIN_BIN_PATH,
         rapid_io_files_location=self.OUTPUT_DATA_PATH,
         lsm_data_location=os.path.join(self.LSM_INPUT_DATA_PATH, 'joules'), 
         simulation_start_datetime=datetime(1980, 1, 1),
         simulation_end_datetime=datetime(2014, 1, 31),
         file_datetime_re_pattern = r'\d{8}_\d{2}',
         file_datetime_pattern = "%Y%m%d_%H",      
         generate_rapid_namelist_file=False,
         run_rapid_simulation=False,
         use_all_processors=True,
     )
     
     #CHECK OUTPUT
     m3_file_name = "m3_riv_bas_met_office_joules_3hr_20080803to20080803.nc"
     generated_m3_file = os.path.join(rapid_output_path, m3_file_name)
     generated_m3_file_solution = os.path.join(self.INFLOW_COMPARE_DATA_PATH, m3_file_name)
     
     #check other info in netcdf file
     self._compare_m3(generated_m3_file, generated_m3_file_solution)
Exemplo n.º 7
0
    def test_run_era_interim_inflow(self):
        """
        Checks generating inflow file from ERA Interim LSM
        """
        print("TEST 1: TEST GENERATE INFLOW FILE FROM ERA INTERIM DATA")
        
        rapid_input_path, rapid_output_path = self._setup_automated("x-x")

        #run main process    
        run_lsm_rapid_process(
            rapid_executable_location=self.RAPID_EXE_PATH,
            cygwin_bin_location=self.CYGWIN_BIN_PATH,
            rapid_io_files_location=self.OUTPUT_DATA_PATH,
            lsm_data_location=os.path.join(self.LSM_INPUT_DATA_PATH, 'erai3'), 
            simulation_start_datetime=datetime(1980, 1, 1),
            simulation_end_datetime=datetime(2014, 1, 31),
            generate_rapid_namelist_file=False,
            run_rapid_simulation=True,
            generate_return_periods_file=False,
            generate_seasonal_initialization_file=False,
            generate_initialization_file=True,
            use_all_processors=True,
        )
        
        #CHECK OUTPUT    
        #m3_riv
        m3_file_name = "m3_riv_bas_erai_t511_3hr_20030121to20030122.nc"
        generated_m3_file = os.path.join(rapid_output_path, m3_file_name)
        generated_m3_file_solution = os.path.join(self.INFLOW_COMPARE_DATA_PATH, m3_file_name)
        self._compare_m3(generated_m3_file,generated_m3_file_solution)
        
        #qout file
        qout_file_name = "Qout_erai_t511_3hr_20030121to20030122.nc"
        generated_qout_file = os.path.join(rapid_output_path, qout_file_name)
        generated_qout_file_solution = os.path.join(self.INFLOW_COMPARE_DATA_PATH, qout_file_name)
        d1 = Dataset(generated_qout_file)
        d2 = Dataset(generated_qout_file_solution)
        assert_almost_equal(d1.variables['Qout'][:], d2.variables['Qout'][:], decimal=5)
        ok_((d1.variables['rivid'][:] == d2.variables['rivid'][:]).all())
        ok_((d1.variables['time'][:] == d2.variables['time'][:]).all())
        if 'lat' in d2.variables.keys():
            ok_((d1.variables['lat'][:] == d2.variables['lat'][:]).all())
        if 'lon' in d2.variables.keys():
            ok_((d1.variables['lon'][:] == d2.variables['lon'][:]).all())
        d1.close()
        d2.close()
                                                     
        #initialization file
        qinit_file_name = "qinit_erai_t511_3hr_20030121to20030122.csv"
        generated_qinit_file = os.path.join(rapid_input_path, qinit_file_name)
        generated_qinit_file_solution = os.path.join(self.INFLOW_COMPARE_DATA_PATH, qinit_file_name)
    
        ok_(compare_csv_decimal_files(generated_qinit_file, generated_qinit_file_solution))
        
        #additional cleanup
        remove_files(generated_qinit_file)
Exemplo n.º 8
0
def run_era5_rapid_simulation(region, rapid_executable_location,
                              lsm_data_location, master_input_location,
                              master_output_location, prev_sim_end_date_str,
                              simulation_start_datetime,
                              simulation_end_datetime):
    print('Region: ' + region)

    # Define rapid input and output folder for specific region
    rapid_input_location = os.path.join(master_input_location, region)
    print('Rapid Input Folder: ' + rapid_input_location)
    rapid_output_location = os.path.join(master_output_location, region)
    if not os.path.exists(rapid_output_location):
        print('Creating Output Folder...')
        os.makedirs(rapid_output_location)
    print('Rapid Output Folder: ' + rapid_output_location)

    # Check for initial flows file
    initial_flows_file = ''
    for file in os.listdir(rapid_input_location):
        if file.endswith(prev_sim_end_date_str +
                         '.csv') and file.startswith('qinit_era5'):
            initial_flows_file = os.path.join(rapid_input_location, file)
            print('Initial FLows File: ' + initial_flows_file)
    if initial_flows_file == '':
        print('ERROR: Initial Flows File not found for the region ' + region)
        raise Exception('Initial Flows File not found for the region ' +
                        region)

    # Run RAPID
    print('Starting RAPID process for ' + region + '...')
    run_lsm_rapid_process(
        rapid_executable_location=rapid_executable_location,
        lsm_data_location=
        lsm_data_location,  # folder containing ERA-5 runoff data
        rapid_input_location=rapid_input_location,
        rapid_output_location=rapid_output_location,
        initial_flows_file=initial_flows_file,
        simulation_start_datetime=simulation_start_datetime,
        simulation_end_datetime=
        simulation_end_datetime,  # will stop at last date of available runoff grid
        run_rapid_simulation=
        True,  # if you want to run RAPID after generating inflow file
        generate_rapid_namelist_file=
        False,  # if you want to run RAPID manually later
        generate_initialization_file=
        True,  # if you want to generate qinit file from end of RAPID simulation
        use_all_processors=True,  # defaults to use all processors available
        # num_processors=1  # you can change this number if use_all_processors=False
    )
    print('------------------------------\n')
Exemplo n.º 9
0
 def _run_automatic(self, lsm_folder_name):
     """
     run for automatic method
     """
     #run main process    
     run_lsm_rapid_process(
         rapid_executable_location=self.RAPID_EXE_PATH,
         cygwin_bin_location=self.CYGWIN_BIN_PATH,
         rapid_io_files_location=self.OUTPUT_DATA_PATH,
         lsm_data_location=os.path.join(self.LSM_INPUT_DATA_PATH, lsm_folder_name), 
         simulation_start_datetime=datetime(1980, 1, 1),
         simulation_end_datetime=datetime(2014, 12, 31),
         generate_rapid_namelist_file=False,
         run_rapid_simulation=False,
         use_all_processors=True,
     )
Exemplo n.º 10
0
def main():
    #current_utc_dt_str = sys.argv[1]
    #print (current_utc_dt_str)
    #current_utc_dt = datetime.datetime.strptime(current_utc_dt_str, "%Y%m%d%H%M")

    #start_utc_dt = datetime.datetime.utcnow()
    #end_utc_dt = None

    start_utc_dt = datetime.datetime(year=2018, month=8, day=8, hour=0)
    end_utc_dt = datetime.datetime(year=2018, month=8, day=9, hour=0)

    model_run_dt_list = prepare_model_run(start_utc_dt, end_utc_dt)

    from RAPIDpy.inflow import run_lsm_rapid_process

    for model_run_dt in model_run_dt_list:

        model_run_dt_str = model_run_dt.strftime("%Y%m%d%H")
        model_run_root = os.path.join(target_path, model_run_dt_str)
        previous_model_run_dt = model_run_dt - datetime.timedelta(hours=12)

        previous_model_run_dt_str = previous_model_run_dt.strftime("%Y%m%d%H")
        previous_model_run_root = os.path.join(target_path, previous_model_run_dt_str)

        # Run Cosmo
        run_lsm_rapid_process(
            rapid_executable_location='/home/sherry/rapid/run/rapid',
            rapid_io_files_location=model_run_root,
            lsm_data_location=os.path.join(model_run_root, "data", model_run_dt_str),
            use_all_processors=False,  # defaults to use all processors available
            num_processors=1,  # you can change this number if use_all_processors=False
            generate_initialization_file=True,
            timedelta_between_simulations=datetime.timedelta(hours=12),
            initial_flows_file=os.path.join(previous_model_run_root, "qinit.csv"),
            # path to folder with LSM data
            # simulation_start_datetime=datetime(1980, 1, 1),
            # simulation_end_datetime=datetime(2017, 1, 1),
            # file_datetime_re_pattern = r'\d{12}',
            # file_datetime_pattern = "%Y%m%d%H%M",
            # file_datetime_re_pattern = r'\d{6}',
            # file_datetime_pattern = "%Y%m",
            # expected_time_step = "86400",
            # convert_one_hour_to_three=False
        )


    pass
Exemplo n.º 11
0
 def _run_automatic(self, lsm_folder_name):
     """
     run for automatic method
     """
     #run main process
     run_lsm_rapid_process(
         rapid_executable_location=self.RAPID_EXE_PATH,
         cygwin_bin_location=self.CYGWIN_BIN_PATH,
         rapid_io_files_location=self.OUTPUT_DATA_PATH,
         lsm_data_location=os.path.join(self.LSM_INPUT_DATA_PATH,
                                        lsm_folder_name),
         simulation_start_datetime=datetime(1980, 1, 1),
         simulation_end_datetime=datetime(2014, 12, 31),
         generate_rapid_namelist_file=False,
         run_rapid_simulation=False,
         use_all_processors=True,
     )
Exemplo n.º 12
0
    def test_run_era_interim_inflow_init(self):
        """
        Checks generating inflow file from ERA Interim LSM
        """
        rapid_input_path, rapid_output_path = self._setup_automated("x-x")

        # initialization file
        qinit_file_name = "qinit_erai_t511_3hr_20030121to20030122.csv"
        qinit_file = os.path.join(rapid_input_path, qinit_file_name)
        copy(os.path.join(self.INFLOW_COMPARE_DATA_PATH, qinit_file_name),
             qinit_file)

        # run main process
        output_file_info = run_lsm_rapid_process(
            rapid_executable_location=RAPID_EXE_PATH,
            cygwin_bin_location=self.CYGWIN_BIN_PATH,
            rapid_io_files_location=self.OUTPUT_DATA_PATH,
            lsm_data_location=os.path.join(self.LSM_INPUT_DATA_PATH, 'erai3'),
            generate_rapid_namelist_file=False,
            run_rapid_simulation=True,
            generate_initialization_file=True,
            initial_flows_file=qinit_file,
            use_all_processors=True,
        )
        # qout file
        generated_qout_file = os.path.join(
            rapid_output_path, "Qout_erai_t511_3hr_20030121to20030122.nc")
        generated_qout_file_solution = os.path.join(
            self.INFLOW_COMPARE_DATA_PATH,
            "Qout_erai_t511_3hr_20030121to20030122_init.nc")

        # check output file info
        assert output_file_info[0]['x-x']['m3_riv'] == os.path.join(
            rapid_output_path,
            "m3_riv_bas_erai_t511_3hr_20030121to20030122.nc")
        assert output_file_info[0]['x-x']['qout'] == generated_qout_file

        d1 = Dataset(generated_qout_file)
        d2 = Dataset(generated_qout_file_solution)
        assert_almost_equal(d1.variables['Qout'][:],
                            d2.variables['Qout'][:],
                            decimal=0)
        assert (d1.variables['rivid'][:] == d2.variables['rivid'][:]).all()
        assert (d1.variables['time'][:] == d2.variables['time'][:]).all()
        if 'lat' in d2.variables.keys():
            assert (d1.variables['lat'][:] == d2.variables['lat'][:]).all()
        if 'lon' in d2.variables.keys():
            assert (d1.variables['lon'][:] == d2.variables['lon'][:]).all()
        d1.close()
        d2.close()
Exemplo n.º 13
0
    def _run_automatic(self,
                       lsm_folder_name,
                       watershed_folder,
                       file_datetime_pattern=None,
                       file_datetime_re_pattern=None,
                       convert_one_hour_to_three=False,
                       expected_time_step=None,
                       single_run=False,
                       filter_dates=True):
        """
        run for automatic method
        """
        rapid_input_path, rapid_output_path = self._setup_automated(
            watershed_folder)
        run_input_folder = None
        run_output_folder = None
        rapid_io_folder = self.OUTPUT_DATA_PATH
        if single_run:
            run_input_folder = rapid_input_path
            run_output_folder = rapid_output_path
            rapid_io_folder = None

        start_datetime = None
        end_datetime = None
        if filter_dates:
            start_datetime = datetime(1980, 1, 1)
            end_datetime = datetime(2014, 12, 31)

        # run main process
        output_file_info = run_lsm_rapid_process(
            rapid_executable_location=RAPID_EXE_PATH,
            cygwin_bin_location=self.CYGWIN_BIN_PATH,
            rapid_io_files_location=rapid_io_folder,
            rapid_input_location=run_input_folder,
            rapid_output_location=run_output_folder,
            lsm_data_location=os.path.join(self.LSM_INPUT_DATA_PATH,
                                           lsm_folder_name),
            simulation_start_datetime=start_datetime,
            simulation_end_datetime=end_datetime,
            generate_rapid_namelist_file=False,
            run_rapid_simulation=False,
            use_all_processors=True,
            file_datetime_pattern=file_datetime_pattern,
            file_datetime_re_pattern=file_datetime_re_pattern,
            convert_one_hour_to_three=convert_one_hour_to_three,
            expected_time_step=expected_time_step,
        )
        return rapid_input_path, rapid_output_path, output_file_info
Exemplo n.º 14
0
from datetime import datetime, timedelta
from RAPIDpy.inflow import run_lsm_rapid_process
#------------------------------------------------------------------------------
#main process
#------------------------------------------------------------------------------
if __name__ == "__main__":
    run_lsm_rapid_process(
        rapid_executable_location='/home/sherry/rapid/run/rapid',
        rapid_io_files_location='/home/sherry/Downloads/hiwat/',
        #lsm_data_location='/home/sherry/Downloads/hiwat/mod_files', #path to folder with LSM data
        lsm_data_location='/home/sherry/Downloads/hiwat/data/merged',
        # model_run_datetime=datetime(2017, 8, 5, 18), # YYYY, MM, DD, HH
        # simulation_start_datetime=timedelta(hours=1),
        # simulation_end_datetime=timedelta(hours=72),
        use_all_processors=False,
        num_processors=1,
	#file_datetime_re_pattern = r'\d{12}',
	#file_datetime_pattern = "%Y%m%d%H%M",
	#file_datetime_re_pattern = r'\d{6}',
        #file_datetime_pattern = "%Y%m",
	#expected_time_step = "3600",
	#convert_one_hour_to_three=False
    )
Exemplo n.º 15
0
def test_generate_era20cm_inflow():
    """
    Checks generating inflow file from ERA 20CM LSM
    """
    print("TEST 3: TEST GENERATE INFLOW FILE FROM ERA 20CM DATA")

    rapid_output_path = os.path.join(OUTPUT_DATA_PATH, "output", "x-x")
    #Create testing environment
    try:
        os.mkdir(RAPID_DATA_PATH)
    except OSError:
        pass
    try:
        os.mkdir(os.path.join(OUTPUT_DATA_PATH, "output"))
    except OSError:
        pass

    try:
        copytree(os.path.join(COMPARE_DATA_PATH, "gis", "x-x"),
                 os.path.join(RAPID_DATA_PATH, "x-x"))
    except OSError:
        pass

    run_lsm_rapid_process(
        rapid_executable_location=RAPID_EXE_PATH,
        cygwin_bin_location=CYGWIN_BIN_PATH,
        rapid_io_files_location=OUTPUT_DATA_PATH,
        lsm_data_location=os.path.join(LSM_INPUT_DATA_PATH, 'era20cm'),
        simulation_start_datetime=datetime(1980, 1, 1),
        simulation_end_datetime=datetime(2014, 1, 31),
        ensemble_list=range(10),
        generate_rapid_namelist_file=False,
        run_rapid_simulation=False,
        use_all_processors=True,
    )

    for i in range(10):
        #CHECK OUTPUT
        #m3_riv
        m3_file_name = "m3_riv_bas_era_20cm_t159_3hr_20000129to20000130_{0}.nc".format(
            i)
        generated_m3_file = os.path.join(rapid_output_path, m3_file_name)
        generated_m3_file_solution = os.path.join(INFLOW_COMPARE_DATA_PATH,
                                                  m3_file_name)

        #check other info in netcdf file
        d1 = Dataset(generated_m3_file)
        d2 = Dataset(generated_m3_file_solution)
        assert_almost_equal(d1.variables['m3_riv'][:],
                            d2.variables['m3_riv'][:],
                            decimal=5)
        if 'rivid' in d2.variables.keys():
            ok_((d1.variables['rivid'][:] == d2.variables['rivid'][:]).all())
        if 'lat' in d2.variables.keys():
            ok_((d1.variables['lat'][:] == d2.variables['lat'][:]).all())
        if 'lon' in d2.variables.keys():
            ok_((d1.variables['lon'][:] == d2.variables['lon'][:]).all())
        d1.close()
        d2.close()

    #cleanup
    rmtree(os.path.join(OUTPUT_DATA_PATH, "input"))
    rmtree(os.path.join(OUTPUT_DATA_PATH, "output"))
from datetime import datetime
from RAPIDpy.inflow import run_lsm_rapid_process
# ------------------------------------------------------------------------------
# main process
# ------------------------------------------------------------------------------
if __name__ == "__main__":
    run_lsm_rapid_process(
        rapid_executable_location='/root/rapid/run/rapid',
        rapid_io_files_location='/home/rapid-io',
        lsm_data_location=
        '/home/ERA-5_hourly_data',  # path to folder with LSM data
        simulation_start_datetime=datetime(1979, 1, 1),
        simulation_end_datetime=datetime(2018, 12, 31),
        generate_rapid_namelist_file=
        True,  # if you want to run RAPID manually later
        run_rapid_simulation=
        True,  # if you want to run RAPID after generating inflow file
        generate_return_periods_file=
        True,  # if you want to get return period file from RAPID simulation
        return_period_method='weibull',
        generate_seasonal_averages_file=True,
        generate_seasonal_initialization_file=
        True,  # if you want to get seasonal init file from RAPID simulation
        generate_initialization_file=
        False,  # if you want to generate qinit file from end of RAPID simulation
        use_all_processors=True)
Exemplo n.º 17
0
from datetime import datetime
from RAPIDpy.inflow import run_lsm_rapid_process
#------------------------------------------------------------------------------
#main process
#------------------------------------------------------------------------------
if __name__ == "__main__":
    run_lsm_rapid_process(
        rapid_executable_location='/home/sherry/rapid/run/rapid',
        rapid_io_files_location='/home/sherry/Downloads/ECMWF/rapid-io',
        lsm_data_location=
        '/home/sherry/Downloads/ECMWF/ecmwf/data',  #path to folder with LSM data
        generate_rapid_namelist_file=
        True,  #if you want to run RAPID manually later
        run_rapid_simulation=
        True,  #if you want to run RAPID after generating inflow file
        generate_return_periods_file=
        False,  #if you want to get return period file from RAPID simulation
        generate_seasonal_averages_file=False,
        generate_seasonal_initialization_file=
        False,  #if you want to get seasonal init file from RAPID simulation
        generate_initialization_file=
        False,  #if you want to generate qinit file from end of RAPID simulation
        use_all_processors=True,  #defaults to use all processors available
        # num_processors=1, #you can change this number if use_all_processors=False
        #        cygwin_bin_location="" #if you are using Windows with Cygwin
    )
Exemplo n.º 18
0
def test_run_era_interim_inflow():
    """
    Checks generating inflow file from ERA Interim LSM
    """
    print("TEST 1: TEST GENERATE INFLOW FILE FROM ERA INTERIM DATA")

    rapid_input_path = os.path.join(RAPID_DATA_PATH, "x-x")
    rapid_output_path = os.path.join(OUTPUT_DATA_PATH, "output", "x-x")
    #Create testing environment
    try:
        os.mkdir(RAPID_DATA_PATH)
    except OSError:
        pass
    try:
        os.mkdir(os.path.join(OUTPUT_DATA_PATH, "output"))
    except OSError:
        pass

    try:
        copytree(os.path.join(COMPARE_DATA_PATH, "gis", "x-x"),
                 rapid_input_path)
    except OSError:
        pass

    #run main process
    run_lsm_rapid_process(
        rapid_executable_location=RAPID_EXE_PATH,
        cygwin_bin_location=CYGWIN_BIN_PATH,
        rapid_io_files_location=OUTPUT_DATA_PATH,
        lsm_data_location=os.path.join(LSM_INPUT_DATA_PATH, 'erai3'),
        simulation_start_datetime=datetime(1980, 1, 1),
        simulation_end_datetime=datetime(2014, 1, 31),
        generate_rapid_namelist_file=False,
        run_rapid_simulation=True,
        generate_return_periods_file=False,
        generate_seasonal_initialization_file=False,
        generate_initialization_file=True,
        use_all_processors=True,
    )

    #CHECK OUTPUT
    #m3_riv
    m3_file_name = "m3_riv_bas_erai_t511_3hr_20030121to20030122.nc"
    generated_m3_file = os.path.join(rapid_output_path, m3_file_name)
    generated_m3_file_solution = os.path.join(INFLOW_COMPARE_DATA_PATH,
                                              m3_file_name)
    #check other info in netcdf file
    d1 = Dataset(generated_m3_file)
    d2 = Dataset(generated_m3_file_solution)
    assert_almost_equal(d1.variables['m3_riv'][:],
                        d2.variables['m3_riv'][:],
                        decimal=5)
    if 'rivid' in d2.variables.keys():
        ok_((d1.variables['rivid'][:] == d2.variables['rivid'][:]).all())
    if 'lat' in d2.variables.keys():
        ok_((d1.variables['lat'][:] == d2.variables['lat'][:]).all())
    if 'lon' in d2.variables.keys():
        ok_((d1.variables['lon'][:] == d2.variables['lon'][:]).all())
    d1.close()
    d2.close()

    #qout file
    qout_file_name = "Qout_erai_t511_3hr_20030121to20030122.nc"
    generated_qout_file = os.path.join(rapid_output_path, qout_file_name)
    generated_qout_file_solution = os.path.join(INFLOW_COMPARE_DATA_PATH,
                                                qout_file_name)
    d1 = Dataset(generated_qout_file)
    d2 = Dataset(generated_qout_file_solution)
    assert_almost_equal(d1.variables['Qout'][:],
                        d2.variables['Qout'][:],
                        decimal=5)
    ok_((d1.variables['rivid'][:] == d2.variables['rivid'][:]).all())
    ok_((d1.variables['time'][:] == d2.variables['time'][:]).all())
    if 'lat' in d2.variables.keys():
        ok_((d1.variables['lat'][:] == d2.variables['lat'][:]).all())
    if 'lon' in d2.variables.keys():
        ok_((d1.variables['lon'][:] == d2.variables['lon'][:]).all())
    d1.close()
    d2.close()

    #initialization file
    qinit_file_name = "qinit_erai_t511_3hr_20030121to20030122.csv"
    generated_qinit_file = os.path.join(rapid_input_path, qinit_file_name)
    generated_qinit_file_solution = os.path.join(INFLOW_COMPARE_DATA_PATH,
                                                 qinit_file_name)

    ok_(
        compare_csv_decimal_files(generated_qinit_file,
                                  generated_qinit_file_solution))

    #cleanup
    remove_files(generated_qinit_file)
    rmtree(os.path.join(OUTPUT_DATA_PATH, "input"))
    rmtree(os.path.join(OUTPUT_DATA_PATH, "output"))
Exemplo n.º 19
0
    def test_run_era_interim_inflow(self):
        """
        Checks generating inflow file from ERA Interim LSM
        """
        rapid_input_path, rapid_output_path = self._setup_automated("x-x")

        # run main process
        output_file_info = run_lsm_rapid_process(
            rapid_executable_location=RAPID_EXE_PATH,
            cygwin_bin_location=self.CYGWIN_BIN_PATH,
            rapid_io_files_location=self.OUTPUT_DATA_PATH,
            lsm_data_location=os.path.join(self.LSM_INPUT_DATA_PATH, 'erai3'),
            simulation_start_datetime=datetime(1980, 1, 1),
            simulation_end_datetime=datetime(2014, 1, 31),
            generate_rapid_namelist_file=False,
            run_rapid_simulation=True,
            generate_return_periods_file=False,
            generate_seasonal_initialization_file=False,
            generate_initialization_file=True,
            use_all_processors=True,
        )

        # CHECK OUTPUT
        # m3_riv
        m3_file_name = "m3_riv_bas_erai_t511_3hr_20030121to20030122.nc"
        generated_m3_file = os.path.join(rapid_output_path, m3_file_name)
        generated_m3_file_solution = os.path.join(
            self.INFLOW_COMPARE_DATA_PATH, m3_file_name)
        self._compare_m3(generated_m3_file, generated_m3_file_solution)

        # qout file
        qout_file_name = "Qout_erai_t511_3hr_20030121to20030122.nc"
        generated_qout_file = os.path.join(rapid_output_path, qout_file_name)
        generated_qout_file_solution = os.path.join(
            self.INFLOW_COMPARE_DATA_PATH, qout_file_name)
        d1 = Dataset(generated_qout_file)
        d2 = Dataset(generated_qout_file_solution)
        assert_almost_equal(d1.variables['Qout'][:],
                            d2.variables['Qout'][:],
                            decimal=5)
        assert (d1.variables['rivid'][:] == d2.variables['rivid'][:]).all()
        assert (d1.variables['time'][:] == d2.variables['time'][:]).all()
        if 'lat' in d2.variables.keys():
            assert (d1.variables['lat'][:] == d2.variables['lat'][:]).all()
        if 'lon' in d2.variables.keys():
            assert (d1.variables['lon'][:] == d2.variables['lon'][:]).all()
        d1.close()
        d2.close()

        # check output file info
        assert output_file_info[0]['x-x']['m3_riv'] == generated_m3_file
        assert output_file_info[0]['x-x']['qout'] == generated_qout_file

        # initialization file
        qinit_file_name = "qinit_erai_t511_3hr_20030121to20030122.csv"
        generated_qinit_file = os.path.join(rapid_input_path, qinit_file_name)
        generated_qinit_file_solution = os.path.join(
            self.INFLOW_COMPARE_DATA_PATH, qinit_file_name)

        assert compare_csv_decimal_files(generated_qinit_file,
                                         generated_qinit_file_solution)

        # additional cleanup
        remove_files(generated_qinit_file)
Exemplo n.º 20
0
import os
import sys
from datetime import datetime
from re import split
from RAPIDpy.inflow import run_lsm_rapid_process

# home = os.path.expanduser('~')
# print(home)
# rapid_exec = 'rapid/run/rapid'
# era_data = 'host_share/era5_data/era5_runoff_2001to2015'
# era_data = 'host_share/era_data'
start_date = list(map(int, split("/|-", str(sys.argv[4]))))
end_date = list(map(int, split("/|-", str(sys.argv[5]))))

run_lsm_rapid_process(
    rapid_executable_location=str(sys.argv[1]),
    rapid_io_files_location=str(sys.argv[2]),
    lsm_data_location=str(sys.argv[3]),
    simulation_start_datetime=datetime(*start_date),  # datetime(2010, 1, 1),
    simulation_end_datetime=datetime(*end_date),  # datetime(2014, 12, 31),
    generate_rapid_namelist_file=True,
    run_rapid_simulation=True,
    generate_return_periods_file=True,
    return_period_method='weibull',
    generate_seasonal_averages_file=True,
    generate_seasonal_initialization_file=True,
    generate_initialization_file=False,
    use_all_processors=False,
    num_processors=1,
)
def run_lsm_forecast_process(rapid_executable_location,
                             rapid_io_files_location,
                             lsm_forecast_location,
                             main_log_directory,
                             timedelta_between_forecasts=timedelta(seconds=12 *
                                                                   3600),
                             historical_data_location="",
                             warning_flow_threshold=None):
    """
    Parameters
    ----------
    rapid_executable_location: str
        Path to RAPID executable.
    rapid_io_files_location: str
        Path ro RAPID input/output directory.
    lsm_forecast_location: str
        Path to WRF forecast directory.
    main_log_directory: str
        Path to directory to store main logs.
    timedelta_between_forecasts: :obj:`datetime.timedelta`
        Time difference between forecasts.
    historical_data_location: str, optional
        Path to return period and seasonal data.
    warning_flow_threshold: float, optional
        Minimum value for return period in m3/s to generate warning.
        Default is None.
    """
    time_begin_all = datetime.utcnow()

    log_file_path = os.path.join(
        main_log_directory, "spt_compute_lsm_{0}.log".format(
            time_begin_all.strftime("%y%m%d%H%M%S")))

    with CaptureStdOutToLog(log_file_path):
        clean_main_logs(main_log_directory, prepend="spt_compute_lsm_")
        # get list of correclty formatted rapid input directories in rapid directory
        rapid_input_directories = get_valid_watershed_list(
            os.path.join(rapid_io_files_location, "input"))

        current_forecast_start_datetime = \
            determine_start_end_timestep(sorted(glob(os.path.join(lsm_forecast_location, "*.nc"))))[0]

        forecast_date_string = current_forecast_start_datetime.strftime(
            "%Y%m%dt%H")
        # look for past forecast qinit
        past_forecast_date_string = (
            current_forecast_start_datetime -
            timedelta_between_forecasts).strftime("%Y%m%dt%H")
        init_file_name = 'Qinit_{0}.csv'.format(past_forecast_date_string)

        # PHASE 1: SEASONAL INITIALIZATION ON FIRST RUN
        if historical_data_location and os.path.exists(
                historical_data_location):
            seasonal_init_job_list = []
            # iterate over models
            for rapid_input_directory in rapid_input_directories:
                seasonal_master_watershed_input_directory = os.path.join(
                    rapid_io_files_location, "input", rapid_input_directory)
                init_file_path = os.path.join(
                    seasonal_master_watershed_input_directory, init_file_name)
                historical_watershed_directory = os.path.join(
                    historical_data_location, rapid_input_directory)
                if os.path.exists(historical_watershed_directory):
                    seasonal_streamflow_file = glob(
                        os.path.join(historical_watershed_directory,
                                     "seasonal_average*.nc"))
                    if seasonal_streamflow_file and not os.path.exists(
                            init_file_path):
                        seasonal_init_job_list.append((
                            seasonal_streamflow_file[0],
                            seasonal_master_watershed_input_directory,
                            init_file_path,
                        ))

            if seasonal_init_job_list:
                if len(seasonal_init_job_list) > 1:
                    seasonal_pool = mp_Pool()
                    seasonal_pool.imap(
                        compute_seasonal_average_initial_flows_multiprocess_worker,
                        seasonal_init_job_list,
                        chunksize=1)
                    seasonal_pool.close()
                    seasonal_pool.join()
                else:
                    compute_seasonal_average_initial_flows_multiprocess_worker(
                        seasonal_init_job_list[0])

        # PHASE 2: MAIN RUN
        for rapid_input_directory in rapid_input_directories:
            master_watershed_input_directory = os.path.join(
                rapid_io_files_location, "input", rapid_input_directory)
            master_watershed_output_directory = os.path.join(
                rapid_io_files_location, 'output', rapid_input_directory,
                forecast_date_string)
            watershed, subbasin = get_watershed_subbasin_from_folder(
                rapid_input_directory)

            # PHASE 2.1 RUN RAPID
            output_file_information = run_lsm_rapid_process(
                rapid_executable_location=rapid_executable_location,
                lsm_data_location=lsm_forecast_location,
                rapid_input_location=master_watershed_input_directory,
                rapid_output_location=master_watershed_output_directory,
                initial_flows_file=os.path.join(
                    master_watershed_input_directory, init_file_name),
            )

            forecast_file = output_file_information[0][rapid_input_directory][
                'qout']
            m3_riv_file = output_file_information[0][rapid_input_directory][
                'm3_riv']

            try:
                os.remove(m3_riv_file)
            except OSError:
                pass

            # PHASE 2.2: GENERATE WARNINGS
            forecast_directory = os.path.join(rapid_io_files_location,
                                              'output', rapid_input_directory,
                                              forecast_date_string)

            historical_watershed_directory = os.path.join(
                historical_data_location, rapid_input_directory)
            print('Historical watershed dir: ', historical_watershed_directory)
            if os.path.exists(historical_watershed_directory):
                return_period_files = glob(
                    os.path.join(historical_watershed_directory,
                                 "return_period*.nc"))
                print('Return periiod files: ', return_period_files)
                if return_period_files:
                    print("Generating warning points for {0}-{1} from {2}".
                          format(watershed, subbasin, forecast_date_string))
                    try:
                        generate_lsm_warning_points(forecast_file,
                                                    return_period_files[0],
                                                    forecast_directory,
                                                    warning_flow_threshold)
                    except Exception as ex:
                        print(ex)
                        pass

            # PHASE 2.3: GENERATE INITIALIZATION FOR NEXT RUN
            print("Initializing flows for {0}-{1} from {2}".format(
                watershed, subbasin, forecast_date_string))
            try:
                compute_initial_flows_lsm(
                    forecast_file, master_watershed_input_directory,
                    current_forecast_start_datetime +
                    timedelta_between_forecasts)
            except Exception as ex:
                print(ex)
                pass

        # print info to user
        time_end = datetime.utcnow()
        print("Time Begin: {0}".format(time_begin_all))
        print("Time Finish: {0}".format(time_end))
        print("TOTAL TIME: {0}".format(time_end - time_begin_all))
Exemplo n.º 22
0
from datetime import datetime
from RAPIDpy.inflow import run_lsm_rapid_process
#------------------------------------------------------------------------------
#main process
#------------------------------------------------------------------------------
if __name__ == "__main__":
    run_lsm_rapid_process(
        rapid_executable_location='/home/sherry/rapid/run/rapid',
        rapid_io_files_location='/home/sherry/Downloads/lis',
        lsm_data_location='/home/sherry/Downloads/lis/data/201808',
        use_all_processors=False,  #  defaults to use all processors available
        num_processors=1,  # you can change this number if use_all_processors=False
        #path to folder with LSM data
        #simulation_start_datetime=datetime(1980, 1, 1),
        #simulation_end_datetime=datetime(2017, 1, 1),
	#file_datetime_re_pattern = r'\d{12}',
	#file_datetime_pattern = "%Y%m%d%H%M",
	#file_datetime_re_pattern = r'\d{6}',
        #file_datetime_pattern = "%Y%m",
	# expected_time_step = "86400",
	#convert_one_hour_to_three=False
    )
Exemplo n.º 23
0
from datetime import datetime
from RAPIDpy.inflow import run_lsm_rapid_process
import os
#------------------------------------------------------------------------------
#main process
#------------------------------------------------------------------------------
if __name__ == "__main__":
    run_lsm_rapid_process(
        rapid_executable_location='/root/rapid/run/rapid',
        rapid_input_location=os.path.abspath(
            os.path.join(os.path.dirname(__file__), '..', 'rapid_io_files')),
        lsm_data_location=os.path.abspath(
            os.path.join(os.path.dirname(__file__), '..',
                         'lsm_data')),  #path to folder with LSM data
        rapid_output_location=os.path.abspath(
            os.path.join(os.path.dirname(__file__), '..', 'streamflow')),
        use_all_processors=False,  #defaults to use all processors available
        num_processors=
        1,  #you can change this number if use_all_processors=False
    )