Exemple #1
0
    def test_screen_some_gauge_and_save(self):
        config_dir = definitions.CONFIG_DIR
        config_file = os.path.join(config_dir, "transdata/config_exp12.ini")
        subdir = r"transdata/exp12"
        config_data = GagesConfig.set_subdir(config_file, subdir)

        ref_source_data = GagesSource.choose_some_basins(
            self.config_data,
            self.config_data.model_dict["data"]["tRangeTrain"],
            screen_basin_area_huc4=False,
            ref="Ref")
        ref_sites_id = ref_source_data.all_configs['flow_screen_gage_id']
        ref_sites_id_df = pd.DataFrame({"STAID": ref_sites_id})
        dapeng_dir = os.path.join(self.config_data.data_path["DB"], "dapeng")
        if not os.path.isdir(dapeng_dir):
            os.makedirs(dapeng_dir)
        dapeng_v2_gageid_file = os.path.join(dapeng_dir, "v2.csv")
        ref_sites_id_df.to_csv(dapeng_v2_gageid_file, index=False)

        gages_model = GagesModels(config_data,
                                  screen_basin_area_huc4=False,
                                  major_dam_num=0)
        sites_id_df = pd.DataFrame(
            {"STAID": gages_model.data_model_train.t_s_dict["sites_id"]})
        dapeng_v1_gageid_file = os.path.join(dapeng_dir, "v1.csv")
        sites_id_df.to_csv(dapeng_v1_gageid_file, index=False)

        print("read and save data screen")
Exemple #2
0
 def test_gages_data_model(self):
     if self.config_data.config_file.CACHE.QUICK_DATA:
         data_dir = self.config_data.config_file.CACHE.DATA_DIR
         gages_model_train, gages_model_test = generate_gages_models(
             self.config_data, data_dir, screen_basin_area_huc4=False)
     else:
         gages_model = GagesModels(self.config_data)
         gages_model_train = gages_model.data_model_train
         gages_model_test = gages_model.data_model_test
     if self.config_data.config_file.CACHE.STATE:
         save_datamodel(gages_model_train,
                        data_source_file_name='data_source.txt',
                        stat_file_name='Statistics.json',
                        flow_file_name='flow',
                        forcing_file_name='forcing',
                        attr_file_name='attr',
                        f_dict_file_name='dictFactorize.json',
                        var_dict_file_name='dictAttribute.json',
                        t_s_dict_file_name='dictTimeSpace.json')
         save_datamodel(gages_model_test,
                        data_source_file_name='test_data_source.txt',
                        stat_file_name='test_Statistics.json',
                        flow_file_name='test_flow',
                        forcing_file_name='test_forcing',
                        attr_file_name='test_attr',
                        f_dict_file_name='test_dictFactorize.json',
                        var_dict_file_name='test_dictAttribute.json',
                        t_s_dict_file_name='test_dictTimeSpace.json')
         print("read and save data model")
Exemple #3
0
 def test_some_reservoirs(self):
     config_data = self.config_data
     gages_model = GagesModels(config_data,
                               screen_basin_area_huc4=False,
                               DOR=self.dor)
     gages_model_train = gages_model.data_model_train
     gages_model_test = gages_model.data_model_test
     if self.cache:
         save_datamodel(gages_model_train,
                        data_source_file_name='data_source.txt',
                        stat_file_name='Statistics.json',
                        flow_file_name='flow',
                        forcing_file_name='forcing',
                        attr_file_name='attr',
                        f_dict_file_name='dictFactorize.json',
                        var_dict_file_name='dictAttribute.json',
                        t_s_dict_file_name='dictTimeSpace.json')
         save_datamodel(gages_model_test,
                        data_source_file_name='test_data_source.txt',
                        stat_file_name='test_Statistics.json',
                        flow_file_name='test_flow',
                        forcing_file_name='test_forcing',
                        attr_file_name='test_attr',
                        f_dict_file_name='test_dictFactorize.json',
                        var_dict_file_name='test_dictAttribute.json',
                        t_s_dict_file_name='test_dictTimeSpace.json')
Exemple #4
0
 def test_gages_data_model(self):
     camels531_gageid_file = os.path.join(self.config_data.data_path["DB"],
                                          "camels531", "CAMELS531.txt")
     gauge_df = pd.read_csv(camels531_gageid_file, dtype={"GaugeID": str})
     gauge_list = gauge_df["GaugeID"].values
     all_sites_camels_531 = np.sort(
         [str(gauge).zfill(8) for gauge in gauge_list])
     gages_model = GagesModels(self.config_data,
                               screen_basin_area_huc4=False,
                               sites_id=all_sites_camels_531.tolist())
     save_datamodel(gages_model.data_model_train,
                    data_source_file_name='data_source.txt',
                    stat_file_name='Statistics.json',
                    flow_file_name='flow',
                    forcing_file_name='forcing',
                    attr_file_name='attr',
                    f_dict_file_name='dictFactorize.json',
                    var_dict_file_name='dictAttribute.json',
                    t_s_dict_file_name='dictTimeSpace.json')
     save_datamodel(gages_model.data_model_test,
                    data_source_file_name='test_data_source.txt',
                    stat_file_name='test_Statistics.json',
                    flow_file_name='test_flow',
                    forcing_file_name='test_forcing',
                    attr_file_name='test_attr',
                    f_dict_file_name='test_dictFactorize.json',
                    var_dict_file_name='test_dictAttribute.json',
                    t_s_dict_file_name='test_dictTimeSpace.json')
     print("read and save data model")
Exemple #5
0
def camels_lstm(args):
    update_cfg(cfg, args)
    random_seed = cfg.RANDOM_SEED
    test_epoch = cfg.TEST_EPOCH
    gpu_num = cfg.CTX
    train_mode = cfg.TRAIN_MODE
    cache = cfg.CACHE.STATE
    print("train and test in CAMELS: \n")
    config_data = GagesConfig(cfg)

    camels531_gageid_file = os.path.join(config_data.data_path["DB"],
                                         "camels531", "camels531.txt")
    gauge_df = pd.read_csv(camels531_gageid_file, dtype={"GaugeID": str})
    gauge_list = gauge_df["GaugeID"].values
    all_sites_camels_531 = np.sort(
        [str(gauge).zfill(8) for gauge in gauge_list])
    gages_model = GagesModels(config_data,
                              screen_basin_area_huc4=False,
                              sites_id=all_sites_camels_531.tolist())
    gages_model_train = gages_model.data_model_train
    gages_model_test = gages_model.data_model_test
    if cache:
        save_datamodel(gages_model_train,
                       data_source_file_name='data_source.txt',
                       stat_file_name='Statistics.json',
                       flow_file_name='flow',
                       forcing_file_name='forcing',
                       attr_file_name='attr',
                       f_dict_file_name='dictFactorize.json',
                       var_dict_file_name='dictAttribute.json',
                       t_s_dict_file_name='dictTimeSpace.json')
        save_datamodel(gages_model_test,
                       data_source_file_name='test_data_source.txt',
                       stat_file_name='test_Statistics.json',
                       flow_file_name='test_flow',
                       forcing_file_name='test_forcing',
                       attr_file_name='test_attr',
                       f_dict_file_name='test_dictFactorize.json',
                       var_dict_file_name='test_dictAttribute.json',
                       t_s_dict_file_name='test_dictTimeSpace.json')
    with torch.cuda.device(gpu_num):
        if train_mode:
            master_train(gages_model_train, random_seed=random_seed)
        pred, obs = master_test(gages_model_test, epoch=test_epoch)
        basin_area = gages_model_test.data_source.read_attr(
            gages_model_test.t_s_dict["sites_id"], ['DRAIN_SQKM'],
            is_return_dict=False)
        mean_prep = gages_model_test.data_source.read_attr(
            gages_model_test.t_s_dict["sites_id"], ['PPTAVG_BASIN'],
            is_return_dict=False)
        mean_prep = mean_prep / 365 * 10
        pred = _basin_norm(pred, basin_area, mean_prep, to_norm=False)
        obs = _basin_norm(obs, basin_area, mean_prep, to_norm=False)
        save_result(gages_model_test.data_source.data_config.data_path['Temp'],
                    test_epoch, pred, obs)
Exemple #6
0
 def test_gages_data_model(self):
     gages_model = GagesModels(self.config_data)
     save_datamodel(gages_model.data_model_train, data_source_file_name='data_source.txt',
                    stat_file_name='Statistics.json', flow_file_name='flow', forcing_file_name='forcing',
                    attr_file_name='attr', f_dict_file_name='dictFactorize.json',
                    var_dict_file_name='dictAttribute.json', t_s_dict_file_name='dictTimeSpace.json')
     save_datamodel(gages_model.data_model_test, data_source_file_name='test_data_source.txt',
                    stat_file_name='test_Statistics.json', flow_file_name='test_flow',
                    forcing_file_name='test_forcing', attr_file_name='test_attr',
                    f_dict_file_name='test_dictFactorize.json', var_dict_file_name='test_dictAttribute.json',
                    t_s_dict_file_name='test_dictTimeSpace.json')
     print("read and save data model")
Exemple #7
0
 def test_train_gages4susquehanna(self):
     config_dir = definitions.CONFIG_DIR
     config_file = os.path.join(config_dir, "susquehanna/config_exp4.ini")
     subdir = r"susquehanna/exp4"
     config_data = GagesConfig.set_subdir(config_file, subdir)
     dor = -0.02
     gages_model = GagesModels(config_data,
                               screen_basin_area_huc4=False,
                               DOR=dor)
     # save_datamodel(gages_model.data_model_train, data_source_file_name='data_source.txt',
     #                stat_file_name='Statistics.json', flow_file_name='flow', forcing_file_name='forcing',
     #                attr_file_name='attr', f_dict_file_name='dictFactorize.json',
     #                var_dict_file_name='dictAttribute.json', t_s_dict_file_name='dictTimeSpace.json')
     with torch.cuda.device(2):
         # pre_trained_model_epoch = 400
         master_train(gages_model.data_model_train)
         # master_train(gages_model.data_model_train, pre_trained_model_epoch=pre_trained_model_epoch)
     print("read and train data model")
Exemple #8
0
 def test_test_gages4susquehanna(self):
     config_dir = definitions.CONFIG_DIR
     config_file = os.path.join(config_dir, "susquehanna/config_exp4.ini")
     subdir = r"susquehanna/exp4"
     config_data = GagesConfig.set_subdir(config_file, subdir)
     dor = -0.02
     gages_model = GagesModels(config_data,
                               screen_basin_area_huc4=False,
                               DOR=dor)
     save_datamodel(gages_model.data_model_test,
                    data_source_file_name='test_data_source.txt',
                    stat_file_name='test_Statistics.json',
                    flow_file_name='test_flow',
                    forcing_file_name='test_forcing',
                    attr_file_name='test_attr',
                    f_dict_file_name='test_dictFactorize.json',
                    var_dict_file_name='test_dictAttribute.json',
                    t_s_dict_file_name='test_dictTimeSpace.json')
     data_model = gages_model.data_model_test
     with torch.cuda.device(2):
         pred, obs = master_test(data_model, epoch=self.test_epoch)
         basin_area = data_model.data_source.read_attr(
             data_model.t_s_dict["sites_id"], ['DRAIN_SQKM'],
             is_return_dict=False)
         mean_prep = data_model.data_source.read_attr(
             data_model.t_s_dict["sites_id"], ['PPTAVG_BASIN'],
             is_return_dict=False)
         mean_prep = mean_prep / 365 * 10
         pred = _basin_norm(pred, basin_area, mean_prep, to_norm=False)
         obs = _basin_norm(obs, basin_area, mean_prep, to_norm=False)
         save_result(data_model.data_source.data_config.data_path['Temp'],
                     self.test_epoch, pred, obs)
         plot_we_need(data_model,
                      obs,
                      pred,
                      id_col="STAID",
                      lon_col="LNG_GAGE",
                      lat_col="LAT_GAGE")
Exemple #9
0
def dor_lstm(args):
    update_cfg(cfg, args)
    random_seed = cfg.RANDOM_SEED
    test_epoch = cfg.TEST_EPOCH
    gpu_num = cfg.CTX
    train_mode = cfg.TRAIN_MODE
    dor = cfg.GAGES.attrScreenParams.DOR
    cache = cfg.CACHE.STATE
    print("train and test in some dor basins: \n")
    config_data = GagesConfig(cfg)

    gages_model = GagesModels(config_data, screen_basin_area_huc4=False, DOR=dor)
    gages_model_train = gages_model.data_model_train
    gages_model_test = gages_model.data_model_test
    if cache:
        save_datamodel(gages_model_train, data_source_file_name='data_source.txt',
                       stat_file_name='Statistics.json', flow_file_name='flow', forcing_file_name='forcing',
                       attr_file_name='attr', f_dict_file_name='dictFactorize.json',
                       var_dict_file_name='dictAttribute.json', t_s_dict_file_name='dictTimeSpace.json')
        save_datamodel(gages_model_test, data_source_file_name='test_data_source.txt',
                       stat_file_name='test_Statistics.json', flow_file_name='test_flow',
                       forcing_file_name='test_forcing', attr_file_name='test_attr',
                       f_dict_file_name='test_dictFactorize.json', var_dict_file_name='test_dictAttribute.json',
                       t_s_dict_file_name='test_dictTimeSpace.json')
    with torch.cuda.device(gpu_num):
        if train_mode:
            master_train(gages_model_train, random_seed=random_seed)
        pred, obs = master_test(gages_model_test, epoch=test_epoch)
        basin_area = gages_model_test.data_source.read_attr(gages_model_test.t_s_dict["sites_id"], ['DRAIN_SQKM'],
                                                            is_return_dict=False)
        mean_prep = gages_model_test.data_source.read_attr(gages_model_test.t_s_dict["sites_id"], ['PPTAVG_BASIN'],
                                                           is_return_dict=False)
        mean_prep = mean_prep / 365 * 10
        pred = _basin_norm(pred, basin_area, mean_prep, to_norm=False)
        obs = _basin_norm(obs, basin_area, mean_prep, to_norm=False)
        save_result(gages_model_test.data_source.data_config.data_path['Temp'], test_epoch, pred, obs)
Exemple #10
0
 def test_read_data(self):
     config_file = self.config_file
     if config_file.CACHE.QUICK_DATA:
         data_dir = config_file.CACHE.DATA_DIR
         gages_model_train, gages_model_test = generate_gages_models(
             self.config_data,
             data_dir,
             t_range=[
                 self.config_data.model_dict["data"]["tRangeTrain"],
                 self.config_data.model_dict["data"]["tRangeTest"]
             ],
             screen_basin_area_huc4=False)
     else:
         gages_model = GagesModels(self.config_data,
                                   screen_basin_area_huc4=False)
         gages_model_train = gages_model.data_model_train
         gages_model_test = gages_model.data_model_test
     if config_file.CACHE.STATE:
         save_datamodel(gages_model_train,
                        data_source_file_name='data_source.txt',
                        stat_file_name='Statistics.json',
                        flow_file_name='flow',
                        forcing_file_name='forcing',
                        attr_file_name='attr',
                        f_dict_file_name='dictFactorize.json',
                        var_dict_file_name='dictAttribute.json',
                        t_s_dict_file_name='dictTimeSpace.json')
         save_datamodel(gages_model_test,
                        data_source_file_name='test_data_source.txt',
                        stat_file_name='test_Statistics.json',
                        flow_file_name='test_flow',
                        forcing_file_name='test_forcing',
                        attr_file_name='test_attr',
                        f_dict_file_name='test_dictFactorize.json',
                        var_dict_file_name='test_dictAttribute.json',
                        t_s_dict_file_name='test_dictTimeSpace.json')
Exemple #11
0
 def test_gages_data_model(self):
     config_data = self.config_data
     major_dam_num = [1, 200]  # max major dam num is 155
     if cfg.CACHE.QUICK_DATA:
         source_data = GagesSource.choose_some_basins(
             config_data,
             config_data.model_dict["data"]["tRangeTrain"],
             screen_basin_area_huc4=False,
             major_dam_num=major_dam_num)
         sites_id = source_data.all_configs['flow_screen_gage_id']
         print("The binary data has exsited")
         quick_data_dir = os.path.join(self.config_data.data_path["DB"],
                                       "quickdata")
         # data_dir = os.path.join(quick_data_dir, "conus-all_85-05_nan-0.1_00-1.0")
         data_dir = os.path.join(quick_data_dir,
                                 "conus-all_90-10_nan-0.0_00-1.0")
         data_model_train = GagesModel.load_datamodel(
             data_dir,
             data_source_file_name='data_source.txt',
             stat_file_name='Statistics.json',
             flow_file_name='flow.npy',
             forcing_file_name='forcing.npy',
             attr_file_name='attr.npy',
             f_dict_file_name='dictFactorize.json',
             var_dict_file_name='dictAttribute.json',
             t_s_dict_file_name='dictTimeSpace.json')
         data_model_test = GagesModel.load_datamodel(
             data_dir,
             data_source_file_name='test_data_source.txt',
             stat_file_name='test_Statistics.json',
             flow_file_name='test_flow.npy',
             forcing_file_name='test_forcing.npy',
             attr_file_name='test_attr.npy',
             f_dict_file_name='test_dictFactorize.json',
             var_dict_file_name='test_dictAttribute.json',
             t_s_dict_file_name='test_dictTimeSpace.json')
         gages_model_train = GagesModel.update_data_model(
             self.config_data,
             data_model_train,
             sites_id_update=sites_id,
             screen_basin_area_huc4=False)
         gages_model_test = GagesModel.update_data_model(
             self.config_data,
             data_model_test,
             sites_id_update=sites_id,
             train_stat_dict=gages_model_train.stat_dict,
             screen_basin_area_huc4=False)
     else:
         gages_model = GagesModels(config_data,
                                   screen_basin_area_huc4=False,
                                   major_dam_num=major_dam_num)
         gages_model_train = gages_model.data_model_train
         gages_model_test = gages_model.data_model_test
     if cfg.CACHE.STATE:
         save_datamodel(gages_model_train,
                        data_source_file_name='data_source.txt',
                        stat_file_name='Statistics.json',
                        flow_file_name='flow',
                        forcing_file_name='forcing',
                        attr_file_name='attr',
                        f_dict_file_name='dictFactorize.json',
                        var_dict_file_name='dictAttribute.json',
                        t_s_dict_file_name='dictTimeSpace.json')
         save_datamodel(gages_model_test,
                        data_source_file_name='test_data_source.txt',
                        stat_file_name='test_Statistics.json',
                        flow_file_name='test_flow',
                        forcing_file_name='test_forcing',
                        attr_file_name='test_attr',
                        f_dict_file_name='test_dictFactorize.json',
                        var_dict_file_name='test_dictAttribute.json',
                        t_s_dict_file_name='test_dictTimeSpace.json')
         print("read and save data model")
Exemple #12
0
                   flow_file_name='test_flow',
                   forcing_file_name='test_forcing',
                   attr_file_name='test_attr',
                   f_dict_file_name='test_dictFactorize.json',
                   var_dict_file_name='test_dictAttribute.json',
                   t_s_dict_file_name='test_dictTimeSpace.json')
    hydro_logger.info("read and save gages conus data model")

    camels531_gageid_file = os.path.join(config_data.data_path["DB"],
                                         "camels531", "camels531.txt")
    gauge_df = pd.read_csv(camels531_gageid_file, dtype={"GaugeID": str})
    gauge_list = gauge_df["GaugeID"].values
    all_sites_camels_531 = np.sort(
        [str(gauge).zfill(8) for gauge in gauge_list])
    gages_model = GagesModels(config_data,
                              screen_basin_area_huc4=False,
                              sites_id=all_sites_camels_531.tolist())
    save_datamodel(gages_model.data_model_test,
                   data_source_file_name='test_data_source.txt',
                   stat_file_name='test_Statistics.json',
                   flow_file_name='test_flow',
                   forcing_file_name='test_forcing',
                   attr_file_name='test_attr',
                   f_dict_file_name='test_dictFactorize.json',
                   var_dict_file_name='test_dictAttribute.json',
                   t_s_dict_file_name='test_dictTimeSpace.json')
    hydro_logger.info("read and save camels 531 data model")
# plot
data_model = GagesModel.load_datamodel(
    config_data.data_path["Temp"],
    data_source_file_name='test_data_source.txt',
Exemple #13
0
    def test_some_reservoirs(self):
        print("train and test in basins with different combination: \n")
        dam_plan = self.dam_plan
        config_data = self.config_data
        test_epoch = self.test_epoch
        if dam_plan == 2:
            dam_num = 0
            dor = self.config_file.GAGES.attrScreenParams.DOR
            source_data_dor1 = GagesSource.choose_some_basins(
                config_data,
                config_data.model_dict["data"]["tRangeTrain"],
                screen_basin_area_huc4=False,
                DOR=dor)
            # basins with dams
            source_data_withoutdams = GagesSource.choose_some_basins(
                config_data,
                config_data.model_dict["data"]["tRangeTrain"],
                screen_basin_area_huc4=False,
                dam_num=dam_num)

            sites_id_dor1 = source_data_dor1.all_configs['flow_screen_gage_id']
            sites_id_withoutdams = source_data_withoutdams.all_configs[
                'flow_screen_gage_id']
            sites_id_chosen = np.sort(
                np.union1d(np.array(sites_id_dor1),
                           np.array(sites_id_withoutdams))).tolist()
        elif dam_plan == 3:
            dam_num = [1, 100000]
            # basins with dams
            source_data_withdams = GagesSource.choose_some_basins(
                config_data,
                config_data.model_dict["data"]["tRangeTrain"],
                screen_basin_area_huc4=False,
                dam_num=dam_num)
            sites_id_chosen = source_data_withdams.all_configs[
                'flow_screen_gage_id']
        else:
            print("wrong choice")
            sites_id_chosen = None
        gages_model = GagesModels(config_data,
                                  screen_basin_area_huc4=False,
                                  sites_id=sites_id_chosen)
        gages_model_train = gages_model.data_model_train
        gages_model_test = gages_model.data_model_test
        if self.cache:
            save_datamodel(gages_model_train,
                           data_source_file_name='data_source.txt',
                           stat_file_name='Statistics.json',
                           flow_file_name='flow',
                           forcing_file_name='forcing',
                           attr_file_name='attr',
                           f_dict_file_name='dictFactorize.json',
                           var_dict_file_name='dictAttribute.json',
                           t_s_dict_file_name='dictTimeSpace.json')
            save_datamodel(gages_model_test,
                           data_source_file_name='test_data_source.txt',
                           stat_file_name='test_Statistics.json',
                           flow_file_name='test_flow',
                           forcing_file_name='test_forcing',
                           attr_file_name='test_attr',
                           f_dict_file_name='test_dictFactorize.json',
                           var_dict_file_name='test_dictAttribute.json',
                           t_s_dict_file_name='test_dictTimeSpace.json')
        with torch.cuda.device(self.gpu_num):
            if self.train_mode:
                master_train(gages_model_train, random_seed=self.random_seed)
            pred, obs = master_test(gages_model_test, epoch=test_epoch)
            basin_area = gages_model_test.data_source.read_attr(
                gages_model_test.t_s_dict["sites_id"], ['DRAIN_SQKM'],
                is_return_dict=False)
            mean_prep = gages_model_test.data_source.read_attr(
                gages_model_test.t_s_dict["sites_id"], ['PPTAVG_BASIN'],
                is_return_dict=False)
            mean_prep = mean_prep / 365 * 10
            pred = _basin_norm(pred, basin_area, mean_prep, to_norm=False)
            obs = _basin_norm(obs, basin_area, mean_prep, to_norm=False)
            save_result(
                gages_model_test.data_source.data_config.data_path['Temp'],
                test_epoch, pred, obs)
Exemple #14
0
def dam_lstm(args):
    update_cfg(cfg, args)
    random_seed = cfg.RANDOM_SEED
    test_epoch = cfg.TEST_EPOCH
    gpu_num = cfg.CTX
    train_mode = cfg.TRAIN_MODE
    dor = cfg.GAGES.attrScreenParams.DOR
    cache = cfg.CACHE.STATE
    print("train and test in basins with dams: \n")
    config_data = GagesConfig(cfg)

    source_data_dor1 = GagesSource.choose_some_basins(
        config_data,
        config_data.model_dict["data"]["tRangeTrain"],
        screen_basin_area_huc4=False,
        DOR=dor)
    # basins with dams
    source_data_withdams = GagesSource.choose_some_basins(
        config_data,
        config_data.model_dict["data"]["tRangeTrain"],
        screen_basin_area_huc4=False,
        dam_num=[1, 100000])

    sites_id_dor1 = source_data_dor1.all_configs['flow_screen_gage_id']
    sites_id_withdams = source_data_withdams.all_configs['flow_screen_gage_id']
    sites_id_chosen = np.intersect1d(np.array(sites_id_dor1),
                                     np.array(sites_id_withdams)).tolist()

    gages_model = GagesModels(config_data,
                              screen_basin_area_huc4=False,
                              sites_id=sites_id_chosen)
    gages_model_train = gages_model.data_model_train
    gages_model_test = gages_model.data_model_test
    if cache:
        save_datamodel(gages_model_train,
                       data_source_file_name='data_source.txt',
                       stat_file_name='Statistics.json',
                       flow_file_name='flow',
                       forcing_file_name='forcing',
                       attr_file_name='attr',
                       f_dict_file_name='dictFactorize.json',
                       var_dict_file_name='dictAttribute.json',
                       t_s_dict_file_name='dictTimeSpace.json')
        save_datamodel(gages_model_test,
                       data_source_file_name='test_data_source.txt',
                       stat_file_name='test_Statistics.json',
                       flow_file_name='test_flow',
                       forcing_file_name='test_forcing',
                       attr_file_name='test_attr',
                       f_dict_file_name='test_dictFactorize.json',
                       var_dict_file_name='test_dictAttribute.json',
                       t_s_dict_file_name='test_dictTimeSpace.json')
    with torch.cuda.device(gpu_num):
        if train_mode:
            master_train(gages_model_train, random_seed=random_seed)
        pred, obs = master_test(gages_model_test, epoch=test_epoch)
        basin_area = gages_model_test.data_source.read_attr(
            gages_model_test.t_s_dict["sites_id"], ['DRAIN_SQKM'],
            is_return_dict=False)
        mean_prep = gages_model_test.data_source.read_attr(
            gages_model_test.t_s_dict["sites_id"], ['PPTAVG_BASIN'],
            is_return_dict=False)
        mean_prep = mean_prep / 365 * 10
        pred = _basin_norm(pred, basin_area, mean_prep, to_norm=False)
        obs = _basin_norm(obs, basin_area, mean_prep, to_norm=False)
        save_result(gages_model_test.data_source.data_config.data_path['Temp'],
                    test_epoch, pred, obs)
Exemple #15
0
def conus_other_lstm(args):
    update_cfg(cfg, args)
    random_seed = cfg.RANDOM_SEED
    test_epoch = cfg.TEST_EPOCH
    gpu_num = cfg.CTX
    train_mode = cfg.TRAIN_MODE
    print("train and test in CONUS: \n")
    print(cfg)
    config_data = GagesConfig(cfg)

    gages_model = GagesModels(config_data, screen_basin_area_huc4=False)
    gages_model_train = gages_model.data_model_train
    gages_model_test = gages_model.data_model_test
    if cfg.CACHE.GEN_QUICK_DATA:
        if not os.path.isdir(cfg.CACHE.DATA_DIR):
            os.makedirs(cfg.CACHE.DATA_DIR)
        save_quick_data(gages_model_train,
                        cfg.CACHE.DATA_DIR,
                        data_source_file_name='data_source.txt',
                        stat_file_name='Statistics.json',
                        flow_file_name='flow',
                        forcing_file_name='forcing',
                        attr_file_name='attr',
                        f_dict_file_name='dictFactorize.json',
                        var_dict_file_name='dictAttribute.json',
                        t_s_dict_file_name='dictTimeSpace.json')
        save_quick_data(gages_model_test,
                        cfg.CACHE.DATA_DIR,
                        data_source_file_name='test_data_source.txt',
                        stat_file_name='test_Statistics.json',
                        flow_file_name='test_flow',
                        forcing_file_name='test_forcing',
                        attr_file_name='test_attr',
                        f_dict_file_name='test_dictFactorize.json',
                        var_dict_file_name='test_dictAttribute.json',
                        t_s_dict_file_name='test_dictTimeSpace.json')
    if cfg.CACHE.STATE:
        save_datamodel(gages_model_train,
                       data_source_file_name='data_source.txt',
                       stat_file_name='Statistics.json',
                       flow_file_name='flow',
                       forcing_file_name='forcing',
                       attr_file_name='attr',
                       f_dict_file_name='dictFactorize.json',
                       var_dict_file_name='dictAttribute.json',
                       t_s_dict_file_name='dictTimeSpace.json')
        save_datamodel(gages_model_test,
                       data_source_file_name='test_data_source.txt',
                       stat_file_name='test_Statistics.json',
                       flow_file_name='test_flow',
                       forcing_file_name='test_forcing',
                       attr_file_name='test_attr',
                       f_dict_file_name='test_dictFactorize.json',
                       var_dict_file_name='test_dictAttribute.json',
                       t_s_dict_file_name='test_dictTimeSpace.json')

    with torch.cuda.device(gpu_num):
        if train_mode:
            data_set = StreamflowInputDataset(gages_model_train)
            master_train_easier_lstm(data_set)
        test_data_set = StreamflowInputDataset(gages_model_test,
                                               train_mode=False)
        pred, obs = master_test_easier_lstm(test_data_set,
                                            load_epoch=test_epoch)
        basin_area = gages_model_test.data_source.read_attr(
            gages_model_test.t_s_dict["sites_id"], ['DRAIN_SQKM'],
            is_return_dict=False)
        mean_prep = gages_model_test.data_source.read_attr(
            gages_model_test.t_s_dict["sites_id"], ['PPTAVG_BASIN'],
            is_return_dict=False)
        mean_prep = mean_prep / 365 * 10
        pred = _basin_norm(pred, basin_area, mean_prep, to_norm=False)
        obs = _basin_norm(obs, basin_area, mean_prep, to_norm=False)
        save_result(gages_model_test.data_source.data_config.data_path['Temp'],
                    test_epoch, pred, obs)