예제 #1
0
def test_generate_rapid_input_file():
    """
    Checks RAPID input file generation with valid input
    """
    print("TEST 1: GENERATE NAMELIST FILE")
    rapid_manager = RAPID(
        rapid_executable_location=RAPID_EXE_PATH,
        cygwin_bin_location=CYGWIN_BIN_PATH,
        use_all_processors=True,
        ZS_TauR=24 *
        3600,  #duration of routing procedure (time step of runoff data)
        ZS_dtR=15 * 60,  #internal routing time step
        ZS_TauM=12 * 24 * 3600,  #total simulation time
        ZS_dtM=24 * 3600  #input time step
    )
    rapid_manager.update_parameters(rapid_connect_file='rapid_connect.csv',
                                    Vlat_file='m3_riv.nc',
                                    riv_bas_id_file='riv_bas_id.csv',
                                    k_file='k.csv',
                                    x_file='x.csv',
                                    Qout_file='Qout.nc')
    generated_input_file = os.path.join(OUTPUT_DATA_PATH,
                                        "rapid_namelist-GENERATE")
    rapid_manager.generate_namelist_file(generated_input_file)
    generated_input_file_solution = os.path.join(COMPARE_DATA_PATH,
                                                 "rapid_namelist-GENERATE")

    assert (fcmp(generated_input_file, generated_input_file_solution))

    remove_files(generated_input_file)
예제 #2
0
def test_update_rapid_input_file():
    """
    Checks RAPID input file update with valid input
    """
    print("TEST 2: UPDATE NAMELIST FILE")
    rapid_manager = RAPID(
        rapid_executable_location=RAPID_EXE_PATH,
        cygwin_bin_location=CYGWIN_BIN_PATH,
        use_all_processors=True,
    )
    rapid_manager.update_parameters(rapid_connect_file='rapid_connect.csv',
                                    Vlat_file='m3_riv.nc',
                                    riv_bas_id_file='riv_bas_id.csv',
                                    k_file='k.csv',
                                    x_file='x.csv',
                                    Qout_file='Qout.nc')

    original_input_file = os.path.join(INPUT_DATA_PATH, "rapid_namelist_valid")

    updated_input_file = os.path.join(OUTPUT_DATA_PATH,
                                      "rapid_namelist-UPDATE")

    copy(original_input_file, updated_input_file)
    rapid_manager.update_namelist_file(updated_input_file)
    updated_input_file_solution = os.path.join(COMPARE_DATA_PATH,
                                               "rapid_namelist-UPDATE")

    assert (fcmp(updated_input_file, updated_input_file_solution))

    remove_files(updated_input_file)
예제 #3
0
def test_update_rapid_input_file():
    """
    Checks RAPID input file update with valid input
    """
    print("TEST 2: UPDATE NAMELIST FILE")
    rapid_manager = RAPID(rapid_executable_location=RAPID_EXE_PATH,
                          cygwin_bin_location=CYGWIN_BIN_PATH,
                          use_all_processors=True,                          
                         )
    rapid_manager.update_parameters(rapid_connect_file='rapid_connect.csv',
                                    Vlat_file='m3_riv.nc',
                                    riv_bas_id_file='riv_bas_id.csv',
                                    k_file='k.csv',
                                    x_file='x.csv',
                                    Qout_file='Qout.nc'
                                    )

    original_input_file = os.path.join(INPUT_DATA_PATH, 
                                      "rapid_namelist_valid")
    
    updated_input_file = os.path.join(OUTPUT_DATA_PATH, 
                                      "rapid_namelist-UPDATE")

    copy(original_input_file, updated_input_file)
    rapid_manager.update_namelist_file(updated_input_file)
    updated_input_file_solution = os.path.join(COMPARE_DATA_PATH, 
                                               "rapid_namelist-UPDATE")


    ok_(fcmp(updated_input_file, updated_input_file_solution))
    
    remove_files(updated_input_file)
예제 #4
0
def test_update_rapid_numbers_input_file():
    """
    Checks RAPID input file update with number validation
    """
    print("TEST 4: GENERATE NUMBERS FOR NAMELIST FILE")
    rapid_manager = RAPID(
        rapid_executable_location=RAPID_EXE_PATH,
        cygwin_bin_location=CYGWIN_BIN_PATH,
        use_all_processors=True,
        rapid_connect_file=os.path.join(INPUT_DATA_PATH, 'rapid_connect.csv'),
        riv_bas_id_file=os.path.join(INPUT_DATA_PATH, 'riv_bas_id.csv'),
    )
    rapid_manager.update_reach_number_data()

    rapid_manager.update_parameters(rapid_connect_file='rapid_connect.csv',
                                    Vlat_file='m3_nasa_lis_3hr_20020830.nc',
                                    riv_bas_id_file='riv_bas_id.csv',
                                    k_file='k.csv',
                                    x_file='x.csv',
                                    Qout_file='Qout.nc')

    generated_input_file = os.path.join(OUTPUT_DATA_PATH,
                                        "rapid_namelist-GENERATE-NUMBERS")

    rapid_manager.generate_namelist_file(generated_input_file)

    generated_input_file_solution = os.path.join(
        COMPARE_DATA_PATH, "rapid_namelist-GENERATE-NUMBERS")

    assert (fcmp(generated_input_file, generated_input_file_solution))

    remove_files(generated_input_file)
예제 #5
0
def test_generate_rapid_input_file():
    """
    Checks RAPID input file generation with valid input
    """
    print("TEST 1: GENERATE NAMELIST FILE")
    rapid_manager = RAPID(rapid_executable_location=RAPID_EXE_PATH,
                          cygwin_bin_location=CYGWIN_BIN_PATH,
                          use_all_processors=True,                          
                          ZS_TauR = 24*3600, #duration of routing procedure (time step of runoff data)
                          ZS_dtR = 15*60, #internal routing time step
                          ZS_TauM = 12*24*3600, #total simulation time 
                          ZS_dtM = 24*3600 #input time step 
                         )
    rapid_manager.update_parameters(rapid_connect_file='rapid_connect.csv',
                                    Vlat_file='m3_riv.nc',
                                    riv_bas_id_file='riv_bas_id.csv',
                                    k_file='k.csv',
                                    x_file='x.csv',
                                    Qout_file='Qout.nc'
                                    )
    generated_input_file = os.path.join(OUTPUT_DATA_PATH, 
                                        "rapid_namelist-GENERATE")
    rapid_manager.generate_namelist_file(generated_input_file)
    generated_input_file_solution = os.path.join(COMPARE_DATA_PATH, 
                                                 "rapid_namelist-GENERATE")


    ok_(fcmp(generated_input_file, generated_input_file_solution))
    
    remove_files(generated_input_file)
예제 #6
0
def test_update_rapid_numbers_input_file():
    """
    Checks RAPID input file update with number validation
    """
    print("TEST 4: GENERATE NUMBERS FOR NAMELIST FILE")
    rapid_manager = RAPID(rapid_executable_location=RAPID_EXE_PATH,
                          cygwin_bin_location=CYGWIN_BIN_PATH,
                          use_all_processors=True,
                          rapid_connect_file=os.path.join(INPUT_DATA_PATH, 'rapid_connect.csv'),
                          riv_bas_id_file=os.path.join(INPUT_DATA_PATH, 'riv_bas_id.csv'),
                         )
    rapid_manager.update_reach_number_data()
                          
    rapid_manager.update_parameters(rapid_connect_file='rapid_connect.csv',
                                    Vlat_file='m3_nasa_lis_3hr_20020830.nc',
                                    riv_bas_id_file='riv_bas_id.csv',
                                    k_file='k.csv',
                                    x_file='x.csv',
                                    Qout_file='Qout.nc'
                                    )

    generated_input_file = os.path.join(OUTPUT_DATA_PATH, 
                                      "rapid_namelist-GENERATE-NUMBERS")

    rapid_manager.generate_namelist_file(generated_input_file)
                          
    generated_input_file_solution = os.path.join(COMPARE_DATA_PATH, 
                                                 "rapid_namelist-GENERATE-NUMBERS")


    ok_(fcmp(generated_input_file, generated_input_file_solution))
    
    remove_files(generated_input_file)
예제 #7
0
def test_generate_qinit_file():
    """
    This tests the qinit file function to create an input qinit file for RAPID
    """
    print("TEST 11: TEST GENERATE QINIT FILE")
    rapid_manager = RAPID(rapid_executable_location=RAPID_EXE_PATH,
                          cygwin_bin_location=CYGWIN_BIN_PATH,
                          rapid_connect_file=os.path.join(
                              INPUT_DATA_PATH, 'rapid_connect.csv'))

    #test with original rapid outpur
    input_qout_file = os.path.join(COMPARE_DATA_PATH,
                                   'Qout_nasa_lis_3hr_20020830.nc')
    original_qout_file = os.path.join(OUTPUT_DATA_PATH,
                                      'Qout_nasa_lis_3hr_20020830.nc')
    copy(input_qout_file, original_qout_file)

    qinit_original_rapid_qout = os.path.join(OUTPUT_DATA_PATH,
                                             'qinit_original_rapid_qout.csv')
    rapid_manager.update_parameters(Qout_file=original_qout_file)
    rapid_manager.generate_qinit_from_past_qout(
        qinit_file=qinit_original_rapid_qout)

    qinit_original_rapid_qout_solution = os.path.join(
        COMPARE_DATA_PATH, 'qinit_original_rapid_qout.csv')
    ok_(
        compare_csv_decimal_files(qinit_original_rapid_qout,
                                  qinit_original_rapid_qout_solution,
                                  header=False))

    #test with CF rapid output and alternate time index
    cf_input_qout_file = os.path.join(COMPARE_DATA_PATH,
                                      'Qout_nasa_lis_3hr_20020830_CF.nc')
    cf_qout_file = os.path.join(OUTPUT_DATA_PATH,
                                'Qout_nasa_lis_3hr_20020830_CF.nc')
    copy(cf_input_qout_file, cf_qout_file)

    qinit_cf_rapid_qout = os.path.join(OUTPUT_DATA_PATH,
                                       'qinit_cf_rapid_qout.csv')
    rapid_manager.update_parameters(Qout_file=cf_qout_file)
    rapid_manager.generate_qinit_from_past_qout(qinit_file=qinit_cf_rapid_qout,
                                                time_index=5)

    qinit_cf_rapid_qout_solution = os.path.join(COMPARE_DATA_PATH,
                                                'qinit_cf_rapid_qout.csv')
    ok_(
        compare_csv_decimal_files(qinit_cf_rapid_qout,
                                  qinit_cf_rapid_qout_solution,
                                  header=False))

    remove_files(original_qout_file, qinit_original_rapid_qout, cf_qout_file,
                 qinit_cf_rapid_qout)
예제 #8
0
def test_update_rapid_numbers_forcing_input_file():
    """
    Checks RAPID input file update with forcing data and number validation
    """
    rapid_manager = RAPID(rapid_executable_location=RAPID_EXE_PATH,
                          cygwin_bin_location=CYGWIN_BIN_PATH,
                          use_all_processors=True,
                          rapid_connect_file=os.path.join(INPUT_DATA_PATH,
                                                          'rapid_connect.csv'),
                          riv_bas_id_file=os.path.join(INPUT_DATA_PATH,
                                                       'riv_bas_id.csv'),
                          for_tot_id_file=os.path.join(INPUT_DATA_PATH,
                                                       'for_tot_id.csv'),
                          for_use_id_file=os.path.join(INPUT_DATA_PATH,
                                                       'for_use_id.csv'),
                          ZS_dtF=3 * 60 * 60,
                          BS_opt_for=True
                          )

    rapid_manager.update_reach_number_data()

    rapid_manager.update_parameters(rapid_connect_file='rapid_connect.csv',
                                    Vlat_file='m3_nasa_lis_3hr_20020830.nc',
                                    riv_bas_id_file='riv_bas_id.csv',
                                    k_file='k.csv',
                                    x_file='x.csv',
                                    Qout_file='Qout.nc',
                                    Qfor_file='qfor.csv',
                                    for_tot_id_file='for_tot_id.csv',
                                    for_use_id_file='for_use_id.csv',
                                    )

    generated_input_file = os.path.join(OUTPUT_DATA_PATH,
                                      "rapid_namelist-GENERATE-NUMBERS-FORCING")

    rapid_manager.generate_namelist_file(generated_input_file)

    generated_input_file_solution = os.path.join(COMPARE_DATA_PATH,
                                                 "rapid_namelist-GENERATE-NUMBERS-FORCING")


    assert (fcmp(generated_input_file, generated_input_file_solution))

    remove_files(generated_input_file)
예제 #9
0
def test_generate_qinit_file():
    """
    This tests the qinit file function to create an input qinit file for RAPID
    """
    print("TEST 11: TEST GENERATE QINIT FILE")
    rapid_manager = RAPID(rapid_executable_location=RAPID_EXE_PATH,
                          cygwin_bin_location=CYGWIN_BIN_PATH,
                          rapid_connect_file=os.path.join(INPUT_DATA_PATH, 'rapid_connect.csv')
                         )

    #test with original rapid outpur
    input_qout_file = os.path.join(COMPARE_DATA_PATH, 'Qout_nasa_lis_3hr_20020830.nc')
    original_qout_file = os.path.join(OUTPUT_DATA_PATH, 'Qout_nasa_lis_3hr_20020830.nc')
    copy(input_qout_file, original_qout_file)

    qinit_original_rapid_qout = os.path.join(OUTPUT_DATA_PATH, 'qinit_original_rapid_qout.csv')
    rapid_manager.update_parameters(Qout_file=original_qout_file)
    rapid_manager.generate_qinit_from_past_qout(qinit_file=qinit_original_rapid_qout)
    
    qinit_original_rapid_qout_solution = os.path.join(COMPARE_DATA_PATH, 'qinit_original_rapid_qout.csv')
    ok_(compare_csv_decimal_files(qinit_original_rapid_qout, qinit_original_rapid_qout_solution, header=False))

    #test with CF rapid output and alternate time index
    cf_input_qout_file = os.path.join(COMPARE_DATA_PATH, 'Qout_nasa_lis_3hr_20020830_CF.nc')
    cf_qout_file = os.path.join(OUTPUT_DATA_PATH, 'Qout_nasa_lis_3hr_20020830_CF.nc')
    copy(cf_input_qout_file, cf_qout_file)

    qinit_cf_rapid_qout = os.path.join(OUTPUT_DATA_PATH, 'qinit_cf_rapid_qout.csv')
    rapid_manager.update_parameters(Qout_file=cf_qout_file)
    rapid_manager.generate_qinit_from_past_qout(qinit_file=qinit_cf_rapid_qout,
                                                time_index=5)
                                                
    qinit_cf_rapid_qout_solution = os.path.join(COMPARE_DATA_PATH, 'qinit_cf_rapid_qout.csv')
    ok_(compare_csv_decimal_files(qinit_cf_rapid_qout, qinit_cf_rapid_qout_solution, header=False))

    remove_files(original_qout_file, 
                 qinit_original_rapid_qout,
                 cf_qout_file,
                 qinit_cf_rapid_qout
                 )
예제 #10
0
def ecmwf_rapid_multiprocess_worker(
        node_path, rapid_input_directory, ecmwf_forecast,
        forecast_date_timestep, watershed, subbasin, rapid_executable_location,
        init_flow, initialization_time_step, BS_opt_dam, IS_dam_tot,
        IS_dam_use, dam_tot_id_file, dam_use_id_file, dam_file):
    """
    Multiprocess worker function
    """
    time_start_all = datetime.datetime.utcnow()

    os.chdir(node_path)

    ensemble_number = get_ensemble_number_from_forecast(ecmwf_forecast)

    def remove_file(file_name):
        """
        remove file
        """
        try:
            os.remove(file_name)
        except OSError:
            pass

    #prepare ECMWF file for RAPID
    print("INFO: Running all ECMWF downscaling for watershed: {0}-{1} {2} {3}".
          format(watershed, subbasin, forecast_date_timestep, ensemble_number))

    #set up RAPID manager
    rapid_connect_file = case_insensitive_file_search(rapid_input_directory,
                                                      r'rapid_connect\.csv')

    rapid_manager = RAPID(
        rapid_executable_location=rapid_executable_location,
        rapid_connect_file=rapid_connect_file,
        riv_bas_id_file=case_insensitive_file_search(rapid_input_directory,
                                                     r'riv_bas_id.*?\.csv'),
        k_file=case_insensitive_file_search(rapid_input_directory, r'k\.csv'),
        x_file=case_insensitive_file_search(rapid_input_directory, r'x\.csv'),
        ZS_dtM=3 * 60 * 60,  #RAPID internal loop time interval
    )

    # check for forcing flows
    try:
        rapid_manager.update_parameters(
            Qfor_file=case_insensitive_file_search(rapid_input_directory,
                                                   r'qfor\.csv'),
            for_tot_id_file=case_insensitive_file_search(
                rapid_input_directory, r'for_tot_id\.csv'),
            for_use_id_file=case_insensitive_file_search(
                rapid_input_directory, r'for_use_id\.csv'),
            ZS_dtF=3 * 60 * 60,  # forcing time interval
            BS_opt_for=True)
    except Exception:
        print('WARNING: Forcing files not found. Skipping forcing ...')
        pass

    rapid_manager.update_reach_number_data()

    outflow_file_name = os.path.join(
        node_path, 'Qout_%s_%s_%s.nc' %
        (watershed.lower(), subbasin.lower(), ensemble_number))

    qinit_file = ""
    BS_opt_Qinit = False
    if (init_flow):
        #check for qinit file
        past_date = (datetime.datetime.strptime(forecast_date_timestep[:11],"%Y%m%d.%H") - \
                     datetime.timedelta(hours=initialization_time_step)).strftime("%Y%m%dt%H")
        qinit_file = os.path.join(rapid_input_directory,
                                  'Qinit_%s.csv' % past_date)
        BS_opt_Qinit = qinit_file and os.path.exists(qinit_file)
        if not BS_opt_Qinit:
            print("Error: {0} not found. Not initializing ...".format(
                qinit_file))
            qinit_file = ""

    try:
        comid_lat_lon_z_file = case_insensitive_file_search(
            rapid_input_directory, r'comid_lat_lon_z.*?\.csv')
    except Exception:
        comid_lat_lon_z_file = ""
        print(
            "WARNING: comid_lat_lon_z_file not found. Not adding lat/lon/z to output file ..."
        )

    RAPIDinflowECMWF_tool = CreateInflowFileFromECMWFRunoff()
    forecast_resolution = RAPIDinflowECMWF_tool.dataIdentify(ecmwf_forecast)
    #determine weight table from resolution
    if forecast_resolution == "HighRes":
        #HIGH RES
        grid_name = RAPIDinflowECMWF_tool.getGridName(ecmwf_forecast,
                                                      high_res=True)
        #generate inflows for each timestep
        weight_table_file = case_insensitive_file_search(
            rapid_input_directory, r'weight_{0}\.csv'.format(grid_name))

        inflow_file_name_1hr = os.path.join(
            node_path, 'm3_riv_bas_1hr_%s.nc' % ensemble_number)
        inflow_file_name_3hr = os.path.join(
            node_path, 'm3_riv_bas_3hr_%s.nc' % ensemble_number)
        inflow_file_name_6hr = os.path.join(
            node_path, 'm3_riv_bas_6hr_%s.nc' % ensemble_number)
        qinit_3hr_file = os.path.join(node_path, 'Qinit_3hr.csv')
        qinit_6hr_file = os.path.join(node_path, 'Qinit_6hr.csv')

        try:

            RAPIDinflowECMWF_tool.execute(ecmwf_forecast, weight_table_file,
                                          inflow_file_name_1hr, grid_name,
                                          "1hr")

            #from Hour 0 to 90 (the first 91 time points) are of 1 hr time interval
            interval_1hr = 1 * 60 * 60  #1hr
            duration_1hr = 90 * 60 * 60  #90hrs
            rapid_manager.update_parameters(
                ZS_TauR=
                interval_1hr,  #duration of routing procedure (time step of runoff data)
                ZS_dtR=15 * 60,  #internal routing time step
                ZS_TauM=duration_1hr,  #total simulation time
                ZS_dtM=interval_1hr,  #RAPID internal loop time interval
                ZS_dtF=interval_1hr,  # forcing time interval
                Vlat_file=inflow_file_name_1hr,
                Qout_file=outflow_file_name,
                Qinit_file=qinit_file,
                BS_opt_Qinit=BS_opt_Qinit,
                BS_opt_dam=BS_opt_dam,
                IS_dam_tot=IS_dam_tot,
                IS_dam_use=IS_dam_use,
                dam_tot_id_file=dam_tot_id_file,
                dam_use_id_file=dam_use_id_file,
                dam_file=dam_file)
            rapid_manager.run()

            #generate Qinit from 1hr
            rapid_manager.generate_qinit_from_past_qout(qinit_3hr_file)

            #then from Hour 90 to 144 (19 time points) are of 3 hour time interval
            RAPIDinflowECMWF_tool.execute(ecmwf_forecast, weight_table_file,
                                          inflow_file_name_3hr, grid_name,
                                          "3hr_subset")
            interval_3hr = 3 * 60 * 60  #3hr
            duration_3hr = 54 * 60 * 60  #54hrs
            qout_3hr = os.path.join(node_path, 'Qout_3hr.nc')
            rapid_manager.update_parameters(
                ZS_TauR=
                interval_3hr,  #duration of routing procedure (time step of runoff data)
                ZS_dtR=15 * 60,  #internal routing time step
                ZS_TauM=duration_3hr,  #total simulation time 
                ZS_dtM=interval_3hr,  #RAPID internal loop time interval
                ZS_dtF=interval_3hr,  # forcing time interval
                Vlat_file=inflow_file_name_3hr,
                Qout_file=qout_3hr,
                BS_opt_dam=BS_opt_dam,  #True,
                IS_dam_tot=IS_dam_tot,  #1,
                IS_dam_use=IS_dam_use,  #1,
                dam_tot_id_file=dam_tot_id_file,
                dam_use_id_file=dam_use_id_file,
                dam_file=dam_file)
            rapid_manager.run()

            #generate Qinit from 3hr
            rapid_manager.generate_qinit_from_past_qout(qinit_6hr_file)
            #from Hour 144 to 240 (15 time points) are of 6 hour time interval
            RAPIDinflowECMWF_tool.execute(ecmwf_forecast, weight_table_file,
                                          inflow_file_name_6hr, grid_name,
                                          "6hr_subset")
            interval_6hr = 6 * 60 * 60  #6hr
            duration_6hr = 96 * 60 * 60  #96hrs
            qout_6hr = os.path.join(node_path, 'Qout_6hr.nc')
            rapid_manager.update_parameters(
                ZS_TauR=
                interval_6hr,  #duration of routing procedure (time step of runoff data)
                ZS_dtR=15 * 60,  #internal routing time step
                ZS_TauM=duration_6hr,  #total simulation time 
                ZS_dtM=interval_6hr,  #RAPID internal loop time interval
                ZS_dtF=interval_6hr,  # forcing time interval
                Vlat_file=inflow_file_name_6hr,
                Qout_file=qout_6hr,
                BS_opt_dam=BS_opt_dam,
                IS_dam_tot=IS_dam_tot,
                IS_dam_use=IS_dam_use,
                dam_tot_id_file=dam_tot_id_file,
                dam_use_id_file=dam_use_id_file,
                dam_file=dam_file)
            rapid_manager.run()

            #Merge all files together at the end
            cv = ConvertRAPIDOutputToCF(
                rapid_output_file=[outflow_file_name, qout_3hr, qout_6hr],
                start_datetime=datetime.datetime.strptime(
                    forecast_date_timestep[:11], "%Y%m%d.%H"),
                time_step=[interval_1hr, interval_3hr, interval_6hr],
                qinit_file=qinit_file,
                comid_lat_lon_z_file=comid_lat_lon_z_file,
                rapid_connect_file=rapid_connect_file,
                project_name="ECMWF-RAPID Predicted flows by US Army ERDC",
                output_id_dim_name='rivid',
                output_flow_var_name='Qout',
                print_debug=False)
            cv.convert()

        except Exception:
            remove_file(qinit_3hr_file)
            remove_file(qinit_6hr_file)
            remove_file(inflow_file_name_1hr)
            remove_file(inflow_file_name_3hr)
            remove_file(inflow_file_name_6hr)
            traceback.print_exc()
            raise

        remove_file(qinit_3hr_file)
        remove_file(qinit_6hr_file)
        remove_file(inflow_file_name_1hr)
        remove_file(inflow_file_name_3hr)
        remove_file(inflow_file_name_6hr)

    elif forecast_resolution == "LowResFull":
        #LOW RES - 3hr and 6hr timesteps
        grid_name = RAPIDinflowECMWF_tool.getGridName(ecmwf_forecast,
                                                      high_res=False)
        #generate inflows for each timestep
        weight_table_file = case_insensitive_file_search(
            rapid_input_directory, r'weight_{0}\.csv'.format(grid_name))

        inflow_file_name_3hr = os.path.join(
            node_path, 'm3_riv_bas_3hr_%s.nc' % ensemble_number)
        inflow_file_name_6hr = os.path.join(
            node_path, 'm3_riv_bas_6hr_%s.nc' % ensemble_number)
        qinit_6hr_file = os.path.join(node_path, 'Qinit_6hr.csv')

        try:

            RAPIDinflowECMWF_tool.execute(ecmwf_forecast, weight_table_file,
                                          inflow_file_name_3hr, grid_name,
                                          "3hr_subset")

            #from Hour 0 to 144 (the first 49 time points) are of 3 hr time interval
            interval_3hr = 3 * 60 * 60  #3hr
            duration_3hr = 144 * 60 * 60  #144hrs
            rapid_manager.update_parameters(
                ZS_TauR=
                interval_3hr,  #duration of routing procedure (time step of runoff data)
                ZS_dtR=15 * 60,  #internal routing time step
                ZS_TauM=duration_3hr,  #total simulation time 
                ZS_dtM=interval_3hr,  #RAPID internal loop time interval
                ZS_dtF=interval_3hr,  # forcing time interval
                Vlat_file=inflow_file_name_3hr,
                Qout_file=outflow_file_name,
                Qinit_file=qinit_file,
                BS_opt_Qinit=BS_opt_Qinit,
                BS_opt_dam=BS_opt_dam,
                IS_dam_tot=IS_dam_tot,
                IS_dam_use=IS_dam_use,
                dam_tot_id_file=dam_tot_id_file,
                dam_use_id_file=dam_use_id_file,
                dam_file=dam_file)
            rapid_manager.run()

            #generate Qinit from 3hr
            rapid_manager.generate_qinit_from_past_qout(qinit_6hr_file)
            #from Hour 144 to 360 (36 time points) are of 6 hour time interval
            RAPIDinflowECMWF_tool.execute(ecmwf_forecast, weight_table_file,
                                          inflow_file_name_6hr, grid_name,
                                          "6hr_subset")
            interval_6hr = 6 * 60 * 60  #6hr
            duration_6hr = 216 * 60 * 60  #216hrs
            qout_6hr = os.path.join(node_path, 'Qout_6hr.nc')
            rapid_manager.update_parameters(
                ZS_TauR=
                interval_6hr,  #duration of routing procedure (time step of runoff data)
                ZS_dtR=15 * 60,  #internal routing time step
                ZS_TauM=duration_6hr,  #total simulation time 
                ZS_dtM=interval_6hr,  #RAPID internal loop time interval
                ZS_dtF=interval_6hr,  # forcing time interval
                Vlat_file=inflow_file_name_6hr,
                Qout_file=qout_6hr,
                BS_opt_dam=BS_opt_dam,
                IS_dam_tot=IS_dam_tot,
                IS_dam_use=IS_dam_use,
                dam_tot_id_file=dam_tot_id_file,
                dam_use_id_file=dam_use_id_file,
                dam_file=dam_file)
            rapid_manager.run()

            #Merge all files together at the end
            cv = ConvertRAPIDOutputToCF(
                rapid_output_file=[outflow_file_name, qout_6hr],
                start_datetime=datetime.datetime.strptime(
                    forecast_date_timestep[:11], "%Y%m%d.%H"),
                time_step=[interval_3hr, interval_6hr],
                qinit_file=qinit_file,
                comid_lat_lon_z_file=comid_lat_lon_z_file,
                rapid_connect_file=rapid_connect_file,
                project_name="ECMWF-RAPID Predicted flows by US Army ERDC",
                output_id_dim_name='rivid',
                output_flow_var_name='Qout',
                print_debug=False)
            cv.convert()

        except Exception:
            remove_file(qinit_6hr_file)
            remove_file(inflow_file_name_3hr)
            remove_file(inflow_file_name_6hr)
            traceback.print_exc()
            raise

        remove_file(qinit_6hr_file)
        remove_file(inflow_file_name_3hr)
        remove_file(inflow_file_name_6hr)

    elif forecast_resolution == "LowRes":
        #LOW RES - 6hr only
        inflow_file_name = os.path.join(node_path,
                                        'm3_riv_bas_%s.nc' % ensemble_number)

        grid_name = RAPIDinflowECMWF_tool.getGridName(ecmwf_forecast,
                                                      high_res=False)
        #generate inflows for each timestep
        weight_table_file = case_insensitive_file_search(
            rapid_input_directory, r'weight_{0}\.csv'.format(grid_name))

        try:

            print("INFO: Converting ECMWF inflow ...")
            RAPIDinflowECMWF_tool.execute(ecmwf_forecast, weight_table_file,
                                          inflow_file_name, grid_name)

            interval = 6 * 60 * 60  #6hr
            duration = 15 * 24 * 60 * 60  #15 days
            rapid_manager.update_parameters(
                ZS_TauR=
                interval,  #duration of routing procedure (time step of runoff data)
                ZS_dtR=15 * 60,  #internal routing time step
                ZS_TauM=duration,  #total simulation time 
                Vlat_file=inflow_file_name,
                Qout_file=outflow_file_name,
                Qinit_file=qinit_file,
                BS_opt_Qinit=BS_opt_Qinit,
                BS_opt_dam=BS_opt_dam,
                IS_dam_tot=IS_dam_tot,
                IS_dam_use=IS_dam_use,
                dam_tot_id_file=dam_tot_id_file,
                dam_use_id_file=dam_use_id_file,
                dam_file=dam_file)

            rapid_manager.run()
            rapid_manager.make_output_CF_compliant(
                simulation_start_datetime=datetime.datetime.strptime(
                    forecast_date_timestep[:11], "%Y%m%d.%H"),
                comid_lat_lon_z_file=comid_lat_lon_z_file,
                project_name="ECMWF-RAPID Predicted flows by US Army ERDC")

        except Exception:
            remove_file(inflow_file_name)
            traceback.print_exc()
            raise

        #clean up
        remove_file(inflow_file_name)

    else:
        raise Exception("ERROR: invalid forecast resolution ...")

    time_stop_all = datetime.datetime.utcnow()
    print("INFO: Total time to compute: {0}".format(time_stop_all -
                                                    time_start_all))
예제 #11
0
def ecmwf_rapid_multiprocess_worker(node_path, rapid_input_directory,
                                    ecmwf_forecast, forecast_date_timestep, 
                                    watershed, subbasin, rapid_executable_location, 
                                    init_flow, initialization_time_step, conversion_flag, # modified this line CJB 20190218
                                                                                           # MJS I might consider a netCDF4.Dataset.variables['RO'].units check in SPT,
                                                                                           # and a correction of units attributes in the upstream cdo processing.
                                                                                           #
# dam arguments added, MJS 8/23/2020
                                    BS_opt_dam,IS_dam_tot,IS_dam_use,
                                    dam_tot_id_file,dam_use_id_file,dam_file):
    """
    Multiprocess worker function
    """
    print("In ecmwf_rapid_multiprocess_worker")

    time_start_all = datetime.datetime.utcnow()

    os.chdir(node_path)

    ensemble_number = get_ensemble_number_from_forecast(ecmwf_forecast)

    def remove_file(file_name):
        """
        remove file
        """
        try:
            os.remove(file_name)
        except OSError:
            pass

    #prepare ECMWF file for RAPID
    print("INFO: Running all ECMWF downscaling for watershed: {0}-{1} {2} {3}"
          .format(watershed,
                  subbasin,
                  forecast_date_timestep,
                  ensemble_number))

    #set up RAPID manager
    rapid_connect_file=case_insensitive_file_search(rapid_input_directory,
                                                    r'rapid_connect\.csv')

    rapid_manager = RAPID(
        rapid_executable_location=rapid_executable_location,
        rapid_connect_file=rapid_connect_file,
        riv_bas_id_file=case_insensitive_file_search(rapid_input_directory,
                                                     r'riv_bas_id.*?\.csv'),
        k_file=case_insensitive_file_search(rapid_input_directory,
                                            r'k\.csv'),
        x_file=case_insensitive_file_search(rapid_input_directory,
                                            r'x\.csv'),
        ZS_dtM=3*60*60, #RAPID internal loop time interval
    )

    # check for forcing flows
    try:
        rapid_manager.update_parameters(
            Qfor_file=case_insensitive_file_search(rapid_input_directory,
                                                   r'qfor\.csv'),
            for_tot_id_file=case_insensitive_file_search(rapid_input_directory,
                                                         r'for_tot_id\.csv'),
            for_use_id_file=case_insensitive_file_search(rapid_input_directory,
                                                         r'for_use_id\.csv'),
            ZS_dtF=3*60*60, # forcing time interval
            BS_opt_for=True
        )
    except Exception:
        print('WARNING: Forcing files not found. Skipping forcing ...')
        pass


    rapid_manager.update_reach_number_data()

    outflow_file_name = os.path.join(node_path,
                                     'Qout_%s_%s_%s.nc' % (watershed.lower(), 
                                                           subbasin.lower(), 
                                                           ensemble_number))

    qinit_file = ""
    BS_opt_Qinit = False
    print(init_flow)
    if(init_flow):
        #check for qinit file
        past_date = (datetime.datetime.strptime(forecast_date_timestep[:11],"%Y%m%d.%H") - \
#                     datetime.timedelta(hours=initialization_time_step)).strftime("%Y%m%dt%H")
                     datetime.timedelta(hours=12)).strftime("%Y%m%dt%H")
        print("Past date:" ,past_date)
        qinit_file = os.path.join(rapid_input_directory, 'Qinit_%s.csv' % past_date)
        BS_opt_Qinit = qinit_file and os.path.exists(qinit_file)
        if not BS_opt_Qinit:
            print("Error: {0} not found. Not initializing ...".format(qinit_file))
            qinit_file = ""

            
    try:
        comid_lat_lon_z_file = case_insensitive_file_search(rapid_input_directory,
                                                            r'comid_lat_lon_z.*?\.csv')
    except Exception:
        comid_lat_lon_z_file = ""
        print("WARNING: comid_lat_lon_z_file not found. Not adding lat/lon/z to output file ...")

    RAPIDinflowECMWF_tool = CreateInflowFileFromECMWFRunoff()
    forecast_resolution = RAPIDinflowECMWF_tool.dataIdentify(ecmwf_forecast)
    time_step_count = RAPIDinflowECMWF_tool.getTimeSize(ecmwf_forecast) #new line 20190108 CJB
    print(forecast_resolution)
    """ 
    NOTE: time_step_count is the total count of forecast steps, not periods 
    """ # 20190108 CJB
    #determine weight table from resolution
    if forecast_resolution == "HRES136": #new line 20190108 CJB 150 <= T <= 240
        #if forecast_resolution == "HighRes":
        #HIGH RES
        grid_name = RAPIDinflowECMWF_tool.getGridName(ecmwf_forecast, high_res=True)
        #generate inflows for each timestep
        weight_table_file = case_insensitive_file_search(rapid_input_directory,
                                                         r'weight_{0}\.csv'.format(grid_name))

        inflow_file_name_1hr = os.path.join(node_path, 'm3_riv_bas_1hr_%s.nc' % ensemble_number)
        inflow_file_name_3hr = os.path.join(node_path, 'm3_riv_bas_3hr_%s.nc' % ensemble_number)
        inflow_file_name_6hr = os.path.join(node_path, 'm3_riv_bas_6hr_%s.nc' % ensemble_number)
        qinit_3hr_file = os.path.join(node_path, 'Qinit_3hr.csv')
        qinit_6hr_file = os.path.join(node_path, 'Qinit_6hr.csv')
        try:

            RAPIDinflowECMWF_tool.execute(ecmwf_forecast, 
                                          weight_table_file, 
                                          inflow_file_name_1hr,
                                          grid_name,
                                          conversion_flag, # added this line CJB 20190218
                                          "1hr")

            #from Hour 0 to 90 (the first 91 time points) are of 1 hr time interval
            periods_1hr = 90 #new line 20190108 CJB 
            interval_1hr = 1*60*60 #1hr
            duration_1hr = periods_1hr*interval_1hr #90hrs # modified line 20190108 CJB, MJS
            rapid_manager.update_parameters(ZS_TauR=interval_1hr, #duration of routing procedure (time step of runoff data)
                                            ZS_dtR=15*60, #internal routing time step
                                            ZS_TauM=duration_1hr, #total simulation time
                                            ZS_dtM=interval_1hr, #RAPID internal loop time interval
                                            ZS_dtF=interval_1hr, # forcing time interval
                                            Vlat_file=inflow_file_name_1hr,
                                            Qout_file=outflow_file_name,
                                            Qinit_file=qinit_file,
                                            BS_opt_Qinit=BS_opt_Qinit,
                                            BS_opt_dam = BS_opt_dam,
                                            IS_dam_tot = IS_dam_tot,
                                            IS_dam_use = IS_dam_use,
                                            dam_tot_id_file = dam_tot_id_file,
                                            dam_use_id_file = dam_use_id_file,
                                            dam_file = dam_file)
            rapid_manager.run()
            #generate Qinit from 1hr
            rapid_manager.generate_qinit_from_past_qout(qinit_3hr_file)

            #then from Hour 90 to 144 (19 time points) are of 3 hour time interval
            RAPIDinflowECMWF_tool.execute(ecmwf_forecast, 
                                          weight_table_file, 
                                          inflow_file_name_3hr,
                                          grid_name,
                                          conversion_flag, # added this line CJB 20190218
                                          "3hr_subset")

            periods_3hr = 18 #new line 20190108 CJB
            interval_3hr = 3*60*60 #3hr
            duration_3hr = periods_3hr*interval_3hr #54hrs # modified line 20190108 CJB, MJS
            qout_3hr = os.path.join(node_path,'Qout_3hr.nc')
            rapid_manager.update_parameters(ZS_TauR=interval_3hr, #duration of routing procedure (time step of runoff data)
                                            ZS_dtR=15*60, #internal routing time step
                                            ZS_TauM=duration_3hr, #total simulation time 
                                            ZS_dtM=interval_3hr, #RAPID internal loop time interval
                                            ZS_dtF=interval_3hr,  # forcing time interval
                                            Vlat_file=inflow_file_name_3hr,
                                            Qout_file=qout_3hr,
                                            BS_opt_dam = BS_opt_dam, 
                                            IS_dam_tot = IS_dam_tot,
                                            IS_dam_use = IS_dam_use,
                                            dam_tot_id_file = dam_tot_id_file,
                                            dam_use_id_file = dam_use_id_file,
                                            dam_file = dam_file)
            rapid_manager.run()

            #generate Qinit from 3hr
            rapid_manager.generate_qinit_from_past_qout(qinit_6hr_file)

            #from Hour 144 to 240 (15 time points) are of 6 hour time interval
            RAPIDinflowECMWF_tool.execute(ecmwf_forecast, 
                                          weight_table_file, 
                                          inflow_file_name_6hr,
                                          grid_name,
                                          conversion_flag, # added this line CJB 20190218
                                          "6hr_subset")

            periods_6hr = (time_step_count -1) - periods_3hr - periods_1hr #new line 20190108 CJB, MJS
            interval_6hr = 6*60*60 #6hr
            duration_6hr = periods_6hr*interval_6hr #new line 20190108 CJB, MJS 
            qout_6hr = os.path.join(node_path,'Qout_6hr.nc')
            rapid_manager.update_parameters(ZS_TauR=interval_6hr, #duration of routing procedure (time step of runoff data)
                                            ZS_dtR=15*60, #internal routing time step
                                            ZS_TauM=duration_6hr, #total simulation time 
                                            ZS_dtM=interval_6hr, #RAPID internal loop time interval
                                            ZS_dtF=interval_6hr,  # forcing time interval
                                            Vlat_file=inflow_file_name_6hr,
                                            Qout_file=qout_6hr,
                                            BS_opt_dam = BS_opt_dam, 
                                            IS_dam_tot = IS_dam_tot,
                                            IS_dam_use = IS_dam_use,
                                            dam_tot_id_file = dam_tot_id_file,
                                            dam_use_id_file = dam_use_id_file,
                                            dam_file = dam_file)
            rapid_manager.run()

            #Merge all files together at the end
            cv = ConvertRAPIDOutputToCF(rapid_output_file=[outflow_file_name, qout_3hr, qout_6hr],
                                        start_datetime=datetime.datetime.strptime(forecast_date_timestep[:11], "%Y%m%d.%H"),
                                        time_step=[interval_1hr, interval_3hr, interval_6hr],
                                        qinit_file=qinit_file,
                                        comid_lat_lon_z_file=comid_lat_lon_z_file,
                                        rapid_connect_file=rapid_connect_file,
                                        project_name="ECMWF-RAPID Predicted flows by US Army ERDC",
                                        output_id_dim_name='rivid',
                                        output_flow_var_name='Qout',
                                        print_debug=False)

            cv.convert()

        except Exception:
            remove_file(qinit_3hr_file)
            remove_file(qinit_6hr_file)
            remove_file(inflow_file_name_1hr)
            remove_file(inflow_file_name_3hr)
            remove_file(inflow_file_name_6hr)
            traceback.print_exc()
            raise

        remove_file(qinit_3hr_file)
        remove_file(qinit_6hr_file)
        remove_file(inflow_file_name_1hr)
        remove_file(inflow_file_name_3hr)
        remove_file(inflow_file_name_6hr)

    if forecast_resolution == "HRES13": #new line 20190108 CJB 93 <= T <= 144
        #if forecast_resolution == "HighRes":
	    #HIGH RES
        grid_name = RAPIDinflowECMWF_tool.getGridName(ecmwf_forecast, high_res=True)
        #generate inflows for each timestep
        weight_table_file = case_insensitive_file_search(rapid_input_directory,
                                                         r'weight_{0}\.csv'.format(grid_name))

        inflow_file_name_1hr = os.path.join(node_path, 'm3_riv_bas_1hr_%s.nc' % ensemble_number)
        inflow_file_name_3hr = os.path.join(node_path, 'm3_riv_bas_3hr_%s.nc' % ensemble_number)
        #inflow_file_name_6hr = os.path.join(node_path, 'm3_riv_bas_6hr_%s.nc' % ensemble_number)
        qinit_3hr_file = os.path.join(node_path, 'Qinit_3hr.csv')
        #qinit_6hr_file = os.path.join(node_path, 'Qinit_6hr.csv')
        
        
        try:
        
            RAPIDinflowECMWF_tool.execute(ecmwf_forecast, 
                                          weight_table_file, 
                                          inflow_file_name_1hr,
                                          grid_name,
                                          conversion_flag, # added this line CJB 20190218
                                          "1hr")

            #from Hour 0 to 90 (the first 91 time points) are of 1 hr time interval
            periods_1hr = 90 #new line 20190108 CJB
            interval_1hr = 1*60*60 #1hr
            duration_1hr = periods_1hr*60*60 #90hrs # modified line 20190108 CJB
            rapid_manager.update_parameters(ZS_TauR=interval_1hr, #duration of routing procedure (time step of runoff data)
                                            ZS_dtR=15*60, #internal routing time step
                                            ZS_TauM=duration_1hr, #total simulation time
                                            ZS_dtM=interval_1hr, #RAPID internal loop time interval
                                            ZS_dtF=interval_1hr, # forcing time interval
                                            Vlat_file=inflow_file_name_1hr,
                                            Qout_file=outflow_file_name,
                                            Qinit_file=qinit_file,
                                            BS_opt_Qinit=BS_opt_Qinit,
                                            BS_opt_dam = BS_opt_dam, 
                                            IS_dam_tot = IS_dam_tot,
                                            IS_dam_use = IS_dam_use,
                                            dam_tot_id_file = dam_tot_id_file,
                                            dam_use_id_file = dam_use_id_file,
                                            dam_file = dam_file)
            rapid_manager.run()

            #generate Qinit from 1hr
            rapid_manager.generate_qinit_from_past_qout(qinit_3hr_file)

            #then from Hour 90 to 144 (19 time points) are of 3 hour time interval
            RAPIDinflowECMWF_tool.execute(ecmwf_forecast, 
                                          weight_table_file, 
                                          inflow_file_name_3hr,
                                          grid_name,
                                          conversion_flag, # added this line CJB 20190218
                                          "3hr_subset")

            periods_3hr = (time_step_count - 1) - periods_1hr #new line 20190108 CJB, MJS
            interval_3hr = 3*60*60 #3hr
            duration_3hr = periods_3hr*interval_3hr #new line 20190108 CJB, MJS
            qout_3hr = os.path.join(node_path,'Qout_3hr.nc')
            rapid_manager.update_parameters(ZS_TauR=interval_3hr, #duration of routing procedure (time step of runoff data)
                                            ZS_dtR=15*60, #internal routing time step
                                            ZS_TauM=duration_3hr, #total simulation time 
                                            ZS_dtM=interval_3hr, #RAPID internal loop time interval
                                            ZS_dtF=interval_3hr,  # forcing time interval
                                            Vlat_file=inflow_file_name_3hr,
                                            Qout_file=qout_3hr,
                                            BS_opt_dam = BS_opt_dam, 
                                            IS_dam_tot = IS_dam_tot,
                                            IS_dam_use = IS_dam_use,
                                            dam_tot_id_file = dam_tot_id_file,
                                            dam_use_id_file = dam_use_id_file,
                                            dam_file = dam_file)
            rapid_manager.run()

            #Merge all files together at the end
            cv = ConvertRAPIDOutputToCF(rapid_output_file=[outflow_file_name, qout_3hr], 
                                        start_datetime=datetime.datetime.strptime(forecast_date_timestep[:11], "%Y%m%d.%H"), 
                                        time_step=[interval_1hr, interval_3hr], 
                                        qinit_file=qinit_file, 
                                        comid_lat_lon_z_file=comid_lat_lon_z_file,
                                        rapid_connect_file=rapid_connect_file, 
                                        project_name="ECMWF-RAPID Predicted flows by US Army ERDC", 
                                        output_id_dim_name='rivid',
                                        output_flow_var_name='Qout',
                                        print_debug=False)
            cv.convert()

        except Exception:
            remove_file(qinit_3hr_file)
            remove_file(inflow_file_name_1hr)
            remove_file(inflow_file_name_3hr)
            traceback.print_exc()
            raise

        remove_file(qinit_3hr_file)
        remove_file(inflow_file_name_1hr)
        remove_file(inflow_file_name_3hr)

    if forecast_resolution == "HRES1": #new line 20190108 CJB 0 <= T <= 90
        #if forecast_resolution == "HighRes":
	    #HIGH RES
        grid_name = RAPIDinflowECMWF_tool.getGridName(ecmwf_forecast, high_res=True)
        #generate inflows for each timestep
        weight_table_file = case_insensitive_file_search(rapid_input_directory,
                                                         r'weight_{0}\.csv'.format(grid_name))

        inflow_file_name_1hr = os.path.join(node_path, 'm3_riv_bas_1hr_%s.nc' % ensemble_number)
        #inflow_file_name_3hr = os.path.join(node_path, 'm3_riv_bas_3hr_%s.nc' % ensemble_number)
        #inflow_file_name_6hr = os.path.join(node_path, 'm3_riv_bas_6hr_%s.nc' % ensemble_number)
        #qinit_3hr_file = os.path.join(node_path, 'Qinit_3hr.csv')
        #qinit_6hr_file = os.path.join(node_path, 'Qinit_6hr.csv')

        try:

            RAPIDinflowECMWF_tool.execute(ecmwf_forecast, 
                                          weight_table_file, 
                                          inflow_file_name_1hr,
                                          grid_name,
                                          conversion_flag, # added this line CJB 20190218
                                          "1hr")

            #from Hour 0 to 90 (the first 91 time points) are of 1 hr time interval
            interval_1hr = 1*60*60 #1hr
            duration_1hr = (time_step_count - 1)*interval_1hr #new line 20190108 CJB, MJS
            rapid_manager.update_parameters(ZS_TauR=interval_1hr, #duration of routing procedure (time step of runoff data)
                                            ZS_dtR=15*60, #internal routing time step
                                            ZS_TauM=duration_1hr, #total simulation time
                                            ZS_dtM=interval_1hr, #RAPID internal loop time interval
                                            ZS_dtF=interval_1hr, # forcing time interval
                                            Vlat_file=inflow_file_name_1hr,
                                            Qout_file=outflow_file_name,
                                            Qinit_file=qinit_file,
                                            BS_opt_Qinit=BS_opt_Qinit,
                                            BS_opt_dam = BS_opt_dam, 
                                            IS_dam_tot = IS_dam_tot,
                                            IS_dam_use = IS_dam_use,
                                            dam_tot_id_file = dam_tot_id_file,
                                            dam_use_id_file = dam_use_id_file,
                                            dam_file = dam_file)

            rapid_manager.run()

            #Merge all files together at the end
            cv = ConvertRAPIDOutputToCF(rapid_output_file=[outflow_file_name], 
                                        start_datetime=datetime.datetime.strptime(forecast_date_timestep[:11], "%Y%m%d.%H"), 
                                        time_step=[interval_1hr], 
                                        qinit_file=qinit_file, 
                                        comid_lat_lon_z_file=comid_lat_lon_z_file,
                                        rapid_connect_file=rapid_connect_file, 
                                        project_name="ECMWF-RAPID Predicted flows by US Army ERDC", 
                                        output_id_dim_name='rivid',
                                        output_flow_var_name='Qout',
                                        print_debug=False)

            cv.convert()

        except Exception:
            remove_file(inflow_file_name_1hr)
            traceback.print_exc()
            raise

        remove_file(inflow_file_name_1hr)

    elif forecast_resolution == "ENS36": #new line 20190108 CJB 150 <= T <= 360
        #elif forecast_resolution == "LowResFull":
        #LOW RES - 3hr and 6hr timesteps
        grid_name = RAPIDinflowECMWF_tool.getGridName(ecmwf_forecast, high_res=False)
        #generate inflows for each timestep
        weight_table_file = case_insensitive_file_search(rapid_input_directory,
                                                         r'weight_{0}\.csv'.format(grid_name))

        inflow_file_name_3hr = os.path.join(node_path, 'm3_riv_bas_3hr_%s.nc' % ensemble_number)
        inflow_file_name_6hr = os.path.join(node_path, 'm3_riv_bas_6hr_%s.nc' % ensemble_number)
        qinit_6hr_file = os.path.join(node_path, 'Qinit_6hr.csv')

        try:

            RAPIDinflowECMWF_tool.execute(ecmwf_forecast, 
                                          weight_table_file, 
                                          inflow_file_name_3hr,
                                          grid_name,
                                          conversion_flag, # added this line CJB 20190218
                                          "3hr_subset")

            #from Hour 0 to 144 (the first 49 time points) are of 3 hr time interval
            periods_3hr = 48
            interval_3hr = 3*60*60 #3hr
            duration_3hr = periods_3hr*interval_3hr #new line 20190108 CJB, MJS
            rapid_manager.update_parameters(ZS_TauR=interval_3hr, #duration of routing procedure (time step of runoff data)
                                            ZS_dtR=15*60, #internal routing time step
                                            ZS_TauM=duration_3hr, #total simulation time 
                                            ZS_dtM=interval_3hr, #RAPID internal loop time interval
                                            ZS_dtF=interval_3hr,  # forcing time interval
                                            Vlat_file=inflow_file_name_3hr,
                                            Qout_file=outflow_file_name,
                                            Qinit_file=qinit_file,
                                            BS_opt_Qinit=BS_opt_Qinit,
                                            BS_opt_dam = BS_opt_dam, 
                                            IS_dam_tot = IS_dam_tot,
                                            IS_dam_use = IS_dam_use,
                                            dam_tot_id_file = dam_tot_id_file,
                                            dam_use_id_file = dam_use_id_file,
                                            dam_file = dam_file)
            rapid_manager.run()

            #generate Qinit from 3hr
            rapid_manager.generate_qinit_from_past_qout(qinit_6hr_file)

            #from Hour 144 to 360 (36 time points) are of 6 hour time interval
            RAPIDinflowECMWF_tool.execute(ecmwf_forecast, 
                                          weight_table_file, 
                                          inflow_file_name_6hr,
                                          grid_name,
                                          conversion_flag, # added this line CJB 20190218
                                          "6hr_subset")

            periods_6hr = (time_step_count - 1) - periods_3hr #new line 20190108 CJB, MJS
            interval_6hr = 6*60*60 #6hr
            duration_6hr = periods_6hr*interval_6hr #new line 20190108 CJB, MJS
            qout_6hr = os.path.join(node_path,'Qout_6hr.nc')
            rapid_manager.update_parameters(ZS_TauR=interval_6hr, #duration of routing procedure (time step of runoff data)
                                            ZS_dtR=15*60, #internal routing time step
                                            ZS_TauM=duration_6hr, #total simulation time 
                                            ZS_dtM=interval_6hr, #RAPID internal loop time interval
                                            ZS_dtF=interval_6hr,  # forcing time interval
                                            Vlat_file=inflow_file_name_6hr,
                                            Qout_file=qout_6hr,
                                            BS_opt_dam = BS_opt_dam, 
                                            IS_dam_tot = IS_dam_tot,
                                            IS_dam_use = IS_dam_use,
                                            dam_tot_id_file = dam_tot_id_file,
                                            dam_use_id_file = dam_use_id_file,
                                            dam_file = dam_file)
            rapid_manager.run()

            #Merge all files together at the end
            cv = ConvertRAPIDOutputToCF(rapid_output_file=[outflow_file_name, qout_6hr], 
                                        start_datetime=datetime.datetime.strptime(forecast_date_timestep[:11], "%Y%m%d.%H"), 
                                        time_step=[interval_3hr, interval_6hr], 
                                        qinit_file=qinit_file, 
                                        comid_lat_lon_z_file=comid_lat_lon_z_file,
                                        rapid_connect_file=rapid_connect_file, 
                                        project_name="ECMWF-RAPID Predicted flows by US Army ERDC", 
                                        output_id_dim_name='rivid',
                                        output_flow_var_name='Qout',
                                        print_debug=False)
            cv.convert()

        except Exception:
            remove_file(qinit_6hr_file)
            remove_file(inflow_file_name_3hr)
            remove_file(inflow_file_name_6hr)
            traceback.print_exc()
            raise

        remove_file(qinit_6hr_file)
        remove_file(inflow_file_name_3hr)
        remove_file(inflow_file_name_6hr)

    elif forecast_resolution == "ENS3": #new line 20190108 CJB 0 <= T <= 144
        #elif forecast_resolution == "LowResFull":
        #LOW RES - 3hr and 6hr timesteps
        grid_name = RAPIDinflowECMWF_tool.getGridName(ecmwf_forecast, high_res=False)
        #generate inflows for each timestep
        weight_table_file = case_insensitive_file_search(rapid_input_directory,
                                                         r'weight_{0}\.csv'.format(grid_name))

        inflow_file_name_3hr = os.path.join(node_path, 'm3_riv_bas_3hr_%s.nc' % ensemble_number)
        #inflow_file_name_6hr = os.path.join(node_path, 'm3_riv_bas_6hr_%s.nc' % ensemble_number)
        #qinit_6hr_file = os.path.join(node_path, 'Qinit_6hr.csv')

        try:

            RAPIDinflowECMWF_tool.execute(ecmwf_forecast, 
                                          weight_table_file, 
                                          inflow_file_name_3hr,
                                          grid_name,
                                          conversion_flag, # added this line CJB 20190218
                                          "3hr_subset")

            #from Hour 0 to 144 (the first 49 time points) are of 3 hr time interval
            periods_3hr = time_step_count - 1 #new line 20190108 CJB, MJS
            interval_3hr = 3*60*60 #3hr
            duration_3hr = periods_3hr*interval_3hr #new line 20190108 CJB, MJS
            rapid_manager.update_parameters(ZS_TauR=interval_3hr, #duration of routing procedure (time step of runoff data)
                                            ZS_dtR=15*60, #internal routing time step
                                            ZS_TauM=duration_3hr, #total simulation time 
                                            ZS_dtM=interval_3hr, #RAPID internal loop time interval
                                            ZS_dtF=interval_3hr,  # forcing time interval
                                            Vlat_file=inflow_file_name_3hr,
                                            Qout_file=outflow_file_name,
                                            Qinit_file=qinit_file,
                                            BS_opt_Qinit=BS_opt_Qinit,
                                            BS_opt_dam = BS_opt_dam, 
                                            IS_dam_tot = IS_dam_tot,
                                            IS_dam_use = IS_dam_use,
                                            dam_tot_id_file = dam_tot_id_file,
                                            dam_use_id_file = dam_use_id_file,
                                            dam_file = dam_file)
            rapid_manager.run()

            #Merge all files together at the end
            cv = ConvertRAPIDOutputToCF(rapid_output_file=[outflow_file_name], 
                                        start_datetime=datetime.datetime.strptime(forecast_date_timestep[:11], "%Y%m%d.%H"), 
                                        time_step=[interval_3hr], 
                                        qinit_file=qinit_file, 
                                        comid_lat_lon_z_file=comid_lat_lon_z_file,
                                        rapid_connect_file=rapid_connect_file, 
                                        project_name="ECMWF-RAPID Predicted flows by US Army ERDC", 
                                        output_id_dim_name='rivid',
                                        output_flow_var_name='Qout',
                                        print_debug=False)

            cv.convert()
            #rapid_manager.make_output_CF_compliant(simulation_start_datetime=datetime.datetime.strptime(forecast_date_timestep[:11], "%Y%m%d.%H"),
            #                                       comid_lat_lon_z_file=comid_lat_lon_z_file,
            #                                       project_name="ECMWF-RAPID Predicted flows by US Army ERDC")
        except Exception:
            remove_file(inflow_file_name_3hr)
            traceback.print_exc()
            raise

        #remove_file(inflow_file_name_3hr)

    elif forecast_resolution == "ENS6":
        #LOW RES - 6hr only
        inflow_file_name = os.path.join(node_path, 'm3_riv_bas_%s.nc' % ensemble_number)

        grid_name = RAPIDinflowECMWF_tool.getGridName(ecmwf_forecast, high_res=False)
        #generate inflows for each timestep
        weight_table_file = case_insensitive_file_search(rapid_input_directory,
                                                         r'weight_{0}\.csv'.format(grid_name))

        try:

            print("INFO: Converting ECMWF inflow ...")
            RAPIDinflowECMWF_tool.execute(ecmwf_forecast, 
                                          weight_table_file, 
                                          inflow_file_name,
                                          conversion_flag, # added this line CJB 20190218
                                          grid_name)

            periods_6hr = time_step_count - 1 #new line 20190108 CJB, MJS
            interval = 6*60*60 #6hr
            duration = periods_6hr*interval #new line 20190108 CJB 
            rapid_manager.update_parameters(ZS_TauR=interval, #duration of routing procedure (time step of runoff data)
                                            ZS_dtR=15*60, #internal routing time step
                                            ZS_TauM=duration, #total simulation time 
                                            Vlat_file=inflow_file_name,
                                            Qout_file=outflow_file_name,
                                            Qinit_file=qinit_file,
                                            BS_opt_Qinit=BS_opt_Qinit,
                                            BS_opt_dam = BS_opt_dam, 
                                            IS_dam_tot = IS_dam_tot,
                                            IS_dam_use = IS_dam_use,
                                            dam_tot_id_file = dam_tot_id_file,
                                            dam_use_id_file = dam_use_id_file,
                                            dam_file = dam_file)
    
            rapid_manager.run()

            rapid_manager.make_output_CF_compliant(simulation_start_datetime=datetime.datetime.strptime(forecast_date_timestep[:11], "%Y%m%d.%H"),
                                                   comid_lat_lon_z_file=comid_lat_lon_z_file,
                                                   project_name="ECMWF-RAPID Predicted flows by US Army ERDC")

        except Exception:
            remove_file(inflow_file_name)
            traceback.print_exc()
            raise
            
        #clean up
        remove_file(inflow_file_name)

    #else:
    #    raise Exception("ERROR: invalid forecast resolution ...")
        
    time_stop_all = datetime.datetime.utcnow()
    print("INFO: Total time to compute: {0}".format(time_stop_all-time_start_all))
def ecmwf_rapid_multiprocess_worker(node_path, rapid_input_directory,
                                    ecmwf_forecast, forecast_date_timestep, 
                                    watershed, subbasin, rapid_executable_location, 
                                    init_flow):
    """
    Multiprocess worker function
    """
    time_start_all = datetime.datetime.utcnow()

    os.chdir(node_path)

    ensemble_number = get_ensemble_number_from_forecast(ecmwf_forecast)

    def remove_file(file_name):
        """
        remove file
        """
        try:
            os.remove(file_name)
        except OSError:
            pass

    #prepare ECMWF file for RAPID
    print("Running all ECMWF downscaling for watershed: {0}-{1} {2} {3}".format(watershed, 
                                                                                subbasin,
                                                                                forecast_date_timestep,
                                                                                ensemble_number))

    #set up RAPID manager
    rapid_connect_file=case_insensitive_file_search(rapid_input_directory,
                                                    r'rapid_connect\.csv')

    rapid_manager = RAPID(rapid_executable_location=rapid_executable_location,
                          rapid_connect_file=rapid_connect_file,
                          riv_bas_id_file=case_insensitive_file_search(rapid_input_directory,
                                                                       r'riv_bas_id.*?\.csv'),
                          k_file=case_insensitive_file_search(rapid_input_directory,
                                                              r'k\.csv'),
                          x_file=case_insensitive_file_search(rapid_input_directory,
                                                              r'x\.csv'),
                          ZS_dtM=3*60*60, #RAPID internal loop time interval
                         )
    
    rapid_manager.update_reach_number_data()
    
    outflow_file_name = os.path.join(node_path, 
                                     'Qout_%s_%s_%s.nc' % (watershed.lower(), 
                                                           subbasin.lower(), 
                                                           ensemble_number))

    qinit_file = ""
    BS_opt_Qinit = False
    if(init_flow):
        #check for qinit file
        past_date = (datetime.datetime.strptime(forecast_date_timestep[:11],"%Y%m%d.%H") - \
                     datetime.timedelta(hours=12)).strftime("%Y%m%dt%H")
        qinit_file = os.path.join(rapid_input_directory, 'Qinit_%s.csv' % past_date)
        BS_opt_Qinit = qinit_file and os.path.exists(qinit_file)
        if not BS_opt_Qinit:
            qinit_file = ""
            print("Error: {0} not found. Not initializing ...".format(qinit_file))
            
            
    try:
        comid_lat_lon_z_file = case_insensitive_file_search(rapid_input_directory,
                                                            r'comid_lat_lon_z.*?\.csv')
    except Exception:
        comid_lat_lon_z_file = ""
        print("comid_lat_lon_z_file not found. Not adding lat/lon/z to output file ...")

    RAPIDinflowECMWF_tool = CreateInflowFileFromECMWFRunoff()
    forecast_resolution = RAPIDinflowECMWF_tool.dataIdentify(ecmwf_forecast)

    #determine weight table from resolution
    if forecast_resolution == "HighRes":
        #HIGH RES
        grid_name = RAPIDinflowECMWF_tool.getGridName(ecmwf_forecast, high_res=True)
        #generate inflows for each timestep
        weight_table_file = case_insensitive_file_search(rapid_input_directory,
                                                         r'weight_{0}\.csv'.format(grid_name))
                                                         
        inflow_file_name_1hr = os.path.join(node_path, 'm3_riv_bas_1hr_%s.nc' % ensemble_number)
        inflow_file_name_3hr = os.path.join(node_path, 'm3_riv_bas_3hr_%s.nc' % ensemble_number)
        inflow_file_name_6hr = os.path.join(node_path, 'm3_riv_bas_6hr_%s.nc' % ensemble_number)
        qinit_3hr_file = os.path.join(node_path, 'Qinit_3hr.csv')
        qinit_6hr_file = os.path.join(node_path, 'Qinit_6hr.csv')
        
        
        try:
        
            RAPIDinflowECMWF_tool.execute(ecmwf_forecast, 
                                          weight_table_file, 
                                          inflow_file_name_1hr,
                                          grid_name,
                                          "1hr")

            #from Hour 0 to 90 (the first 91 time points) are of 1 hr time interval
            interval_1hr = 1*60*60 #1hr
            duration_1hr = 90*60*60 #90hrs
            rapid_manager.update_parameters(ZS_TauR=interval_1hr, #duration of routing procedure (time step of runoff data)
                                            ZS_dtR=15*60, #internal routing time step
                                            ZS_TauM=duration_1hr, #total simulation time
                                            ZS_dtM=interval_1hr, #RAPID internal loop time interval
                                            Vlat_file=inflow_file_name_1hr,
                                            Qout_file=outflow_file_name,
                                            Qinit_file=qinit_file,
                                            BS_opt_Qinit=BS_opt_Qinit)
            rapid_manager.run()
    
            #generate Qinit from 1hr
            rapid_manager.generate_qinit_from_past_qout(qinit_3hr_file)

            #then from Hour 90 to 144 (19 time points) are of 3 hour time interval
            RAPIDinflowECMWF_tool.execute(ecmwf_forecast, 
                                          weight_table_file, 
                                          inflow_file_name_3hr,
                                          grid_name,
                                          "3hr_subset")
            interval_3hr = 3*60*60 #3hr
            duration_3hr = 54*60*60 #54hrs
            qout_3hr = os.path.join(node_path,'Qout_3hr.nc')
            rapid_manager.update_parameters(ZS_TauR=interval_3hr, #duration of routing procedure (time step of runoff data)
                                            ZS_dtR=15*60, #internal routing time step
                                            ZS_TauM=duration_3hr, #total simulation time 
                                            ZS_dtM=interval_3hr, #RAPID internal loop time interval
                                            Vlat_file=inflow_file_name_3hr,
                                            Qout_file=qout_3hr)
            rapid_manager.run()

            #generate Qinit from 3hr
            rapid_manager.generate_qinit_from_past_qout(qinit_6hr_file)
            #from Hour 144 to 240 (15 time points) are of 6 hour time interval
            RAPIDinflowECMWF_tool.execute(ecmwf_forecast, 
                                          weight_table_file, 
                                          inflow_file_name_6hr,
                                          grid_name,
                                          "6hr_subset")
            interval_6hr = 6*60*60 #6hr
            duration_6hr = 96*60*60 #96hrs
            qout_6hr = os.path.join(node_path,'Qout_6hr.nc')
            rapid_manager.update_parameters(ZS_TauR=interval_6hr, #duration of routing procedure (time step of runoff data)
                                            ZS_dtR=15*60, #internal routing time step
                                            ZS_TauM=duration_6hr, #total simulation time 
                                            ZS_dtM=interval_6hr, #RAPID internal loop time interval
                                            Vlat_file=inflow_file_name_6hr,
                                            Qout_file=qout_6hr)
            rapid_manager.run()

            #Merge all files together at the end
            cv = ConvertRAPIDOutputToCF(rapid_output_file=[outflow_file_name, qout_3hr, qout_6hr], 
                                        start_datetime=datetime.datetime.strptime(forecast_date_timestep[:11], "%Y%m%d.%H"), 
                                        time_step=[interval_1hr, interval_3hr, interval_6hr], 
                                        qinit_file=qinit_file, 
                                        comid_lat_lon_z_file=comid_lat_lon_z_file,
                                        rapid_connect_file=rapid_connect_file, 
                                        project_name="ECMWF-RAPID Predicted flows by US Army ERDC", 
                                        output_id_dim_name='rivid',
                                        output_flow_var_name='Qout',
                                        print_debug=False)
            cv.convert()
    
        except Exception:
            remove_file(qinit_3hr_file)
            remove_file(qinit_6hr_file)
            remove_file(inflow_file_name_1hr)
            remove_file(inflow_file_name_3hr)
            remove_file(inflow_file_name_6hr)
            raise
            
        remove_file(qinit_3hr_file)
        remove_file(qinit_6hr_file)
        remove_file(inflow_file_name_1hr)
        remove_file(inflow_file_name_3hr)
        remove_file(inflow_file_name_6hr)

    elif forecast_resolution == "LowResFull":
        #LOW RES - 3hr and 6hr timesteps
        grid_name = RAPIDinflowECMWF_tool.getGridName(ecmwf_forecast, high_res=False)
        #generate inflows for each timestep
        weight_table_file = case_insensitive_file_search(rapid_input_directory,
                                                         r'weight_{0}\.csv'.format(grid_name))
                                                         
        inflow_file_name_3hr = os.path.join(node_path, 'm3_riv_bas_3hr_%s.nc' % ensemble_number)
        inflow_file_name_6hr = os.path.join(node_path, 'm3_riv_bas_6hr_%s.nc' % ensemble_number)
        qinit_6hr_file = os.path.join(node_path, 'Qinit_6hr.csv')
        
        try:
        
            RAPIDinflowECMWF_tool.execute(ecmwf_forecast, 
                                          weight_table_file, 
                                          inflow_file_name_3hr,
                                          grid_name,
                                          "3hr_subset")

            #from Hour 0 to 144 (the first 49 time points) are of 3 hr time interval
            interval_3hr = 3*60*60 #3hr
            duration_3hr = 144*60*60 #144hrs
            rapid_manager.update_parameters(ZS_TauR=interval_3hr, #duration of routing procedure (time step of runoff data)
                                            ZS_dtR=15*60, #internal routing time step
                                            ZS_TauM=duration_3hr, #total simulation time 
                                            ZS_dtM=interval_3hr, #RAPID internal loop time interval
                                            Vlat_file=inflow_file_name_3hr,
                                            Qout_file=outflow_file_name,
                                            Qinit_file=qinit_file,
                                            BS_opt_Qinit=BS_opt_Qinit)
            rapid_manager.run()
    
            #generate Qinit from 3hr
            rapid_manager.generate_qinit_from_past_qout(qinit_6hr_file)
            #from Hour 144 to 360 (36 time points) are of 6 hour time interval
            RAPIDinflowECMWF_tool.execute(ecmwf_forecast, 
                                          weight_table_file, 
                                          inflow_file_name_6hr,
                                          grid_name,
                                          "6hr_subset")
            interval_6hr = 6*60*60 #6hr
            duration_6hr = 216*60*60 #216hrs
            qout_6hr = os.path.join(node_path,'Qout_6hr.nc')
            rapid_manager.update_parameters(ZS_TauR=interval_6hr, #duration of routing procedure (time step of runoff data)
                                            ZS_dtR=15*60, #internal routing time step
                                            ZS_TauM=duration_6hr, #total simulation time 
                                            ZS_dtM=interval_6hr, #RAPID internal loop time interval
                                            Vlat_file=inflow_file_name_6hr,
                                            Qout_file=qout_6hr)
            rapid_manager.run()

            #Merge all files together at the end
            cv = ConvertRAPIDOutputToCF(rapid_output_file=[outflow_file_name, qout_6hr], 
                                        start_datetime=datetime.datetime.strptime(forecast_date_timestep[:11], "%Y%m%d.%H"), 
                                        time_step=[interval_3hr, interval_6hr], 
                                        qinit_file=qinit_file, 
                                        comid_lat_lon_z_file=comid_lat_lon_z_file,
                                        rapid_connect_file=rapid_connect_file, 
                                        project_name="ECMWF-RAPID Predicted flows by US Army ERDC", 
                                        output_id_dim_name='rivid',
                                        output_flow_var_name='Qout',
                                        print_debug=False)
            cv.convert()
    
        except Exception:
            remove_file(qinit_6hr_file)
            remove_file(inflow_file_name_3hr)
            remove_file(inflow_file_name_6hr)
            raise
            
        remove_file(qinit_6hr_file)
        remove_file(inflow_file_name_3hr)
        remove_file(inflow_file_name_6hr)
        
    elif forecast_resolution == "LowRes":
        #LOW RES - 6hr only
        inflow_file_name = os.path.join(node_path, 'm3_riv_bas_%s.nc' % ensemble_number)

        grid_name = RAPIDinflowECMWF_tool.getGridName(ecmwf_forecast, high_res=False)
        #generate inflows for each timestep
        weight_table_file = case_insensitive_file_search(rapid_input_directory,
                                                         r'weight_{0}\.csv'.format(grid_name))

        try:

            print("Converting ECMWF inflow ...")
            RAPIDinflowECMWF_tool.execute(ecmwf_forecast, 
                                          weight_table_file, 
                                          inflow_file_name,
                                          grid_name)
    
            interval = 6*60*60 #6hr
            duration = 15*24*60*60 #15 days
            rapid_manager.update_parameters(ZS_TauR=interval, #duration of routing procedure (time step of runoff data)
                                            ZS_dtR=15*60, #internal routing time step
                                            ZS_TauM=duration, #total simulation time 
                                            Vlat_file=inflow_file_name,
                                            Qout_file=outflow_file_name,
                                            Qinit_file=qinit_file,
                                            BS_opt_Qinit=BS_opt_Qinit)
    
            rapid_manager.run()
            rapid_manager.make_output_CF_compliant(simulation_start_datetime=datetime.datetime.strptime(forecast_date_timestep[:11], "%Y%m%d.%H"),
                                                   comid_lat_lon_z_file=comid_lat_lon_z_file,
                                                   project_name="ECMWF-RAPID Predicted flows by US Army ERDC")

        except Exception:
            remove_file(inflow_file_name)
            raise
            
        #clean up
        remove_file(inflow_file_name)

    else:
        raise Exception("ERROR: invalid forecast resolution ...")
        
    time_stop_all = datetime.datetime.utcnow()
    print("Total time to compute: {0}".format(time_stop_all-time_start_all))