예제 #1
0
def main():
    from preprocess.config import parse_ini_configuration

    seims_cfg = parse_ini_configuration()
    client = ConnectMongoDB(seims_cfg.hostname, seims_cfg.port)
    conn = client.get_conn()
    db_model = conn[seims_cfg.spatial_db]

    spatial_gfs = GridFS(db_model, DBTableNames.gridfs_spatial)

    csv_path = r'C:\z_data\zhongTianShe\model_data_seims\field_scale_params'
    csv_files = FileClass.get_full_filename_by_suffixes(csv_path, ['.csv'])
    field_count = 7419
    prefix = 9999
    # Create mask file
    mask_name = '%d_MASK' % prefix
    mask_array = [[1] * field_count]
    import_array_to_mongodb(spatial_gfs, mask_array, mask_name)

    # Create spatial parameters
    for csv_file in csv_files:
        print('Import %s...' % csv_file)
        param_arrays = read_field_arrays_from_csv(csv_file)
        for key, value in list(param_arrays.items()):
            import_array_to_mongodb(spatial_gfs, value, '%d_%s' % (prefix, key))
예제 #2
0
def main():
    """TEST CODE"""
    from preprocess.config import parse_ini_configuration
    from preprocess.db_mongodb import ConnectMongoDB
    seims_cfg = parse_ini_configuration()
    client = ConnectMongoDB(seims_cfg.hostname, seims_cfg.port)
    conn = client.get_conn()
    main_db = conn[seims_cfg.spatial_db]

    TerrainUtilClass.parameters_extraction(seims_cfg, main_db)
예제 #3
0
파일: utility.py 프로젝트: crazyzlj/SEIMS
def delete_scenarios_by_ids(hostname, port, dbname, sids):
    """Delete scenario data by ID in MongoDB."""
    client = ConnectMongoDB(hostname, port)
    conn = client.get_conn()
    db = conn[dbname]
    collection = db['BMP_SCENARIOS']
    for _id in sids:
        collection.remove({'ID': _id})
        print('Delete scenario: %d in MongoDB completed!' % _id)
    client.close()
예제 #4
0
파일: userdef.py 프로젝트: crazyzlj/SEIMS
def write_param_values_to_mongodb(hostname, port, spatial_db, param_defs, param_values):
    # update Parameters collection in MongoDB
    client = ConnectMongoDB(hostname, port)
    conn = client.get_conn()
    db = conn[spatial_db]
    collection = db['PARAMETERS']
    collection.update_many({}, {'$unset': {'CALI_VALUES': ''}})
    for idx, pname in enumerate(param_defs['names']):
        v2str = ','.join(str(v) for v in param_values[:, idx])
        collection.find_one_and_update({'NAME': pname}, {'$set': {'CALI_VALUES': v2str}})
    client.close()
def main():
    """TEST CODE"""
    from preprocess.config import parse_ini_configuration
    from preprocess.db_mongodb import ConnectMongoDB
    seims_cfg = parse_ini_configuration()
    client = ConnectMongoDB(seims_cfg.hostname, seims_cfg.port)
    conn = client.get_conn()

    ImportWeightData.workflow(seims_cfg, conn, 0)

    client.close()
예제 #6
0
def main():
    """TEST CODE"""
    from preprocess.config import parse_ini_configuration
    from preprocess.db_mongodb import ConnectMongoDB
    seims_cfg = parse_ini_configuration()
    client = ConnectMongoDB(seims_cfg.hostname, seims_cfg.port)
    conn = client.get_conn()
    main_db = conn[seims_cfg.spatial_db]

    ImportParam2Mongo.workflow(seims_cfg, main_db)

    client.close()
예제 #7
0
 def reset_simulation_timerange(self):
     """Update simulation time range in MongoDB [FILE_IN]."""
     client = ConnectMongoDB(self.model.host, self.model.port)
     conn = client.get_conn()
     db = conn[self.model.db_name]
     stime_str = self.model.time_start.strftime('%Y-%m-%d %H:%M:%S')
     etime_str = self.model.time_end.strftime('%Y-%m-%d %H:%M:%S')
     db[DBTableNames.main_filein].find_one_and_update({'TAG': 'STARTTIME'},
                                                      {'$set': {'VALUE': stime_str}})
     db[DBTableNames.main_filein].find_one_and_update({'TAG': 'ENDTIME'},
                                                      {'$set': {'VALUE': etime_str}})
     client.close()
예제 #8
0
def main():
    """TEST CODE"""
    from preprocess.config import parse_ini_configuration
    from preprocess.db_mongodb import ConnectMongoDB
    seims_cfg = parse_ini_configuration()
    client = ConnectMongoDB(seims_cfg.hostname, seims_cfg.port)
    conn = client.get_conn()
    maindb = conn[seims_cfg.spatial_db]

    ImportReaches2Mongo.generate_reach_table(seims_cfg, maindb)

    client.close()
예제 #9
0
def main():
    """TEST CODE"""
    from preprocess.config import parse_ini_configuration
    from preprocess.db_mongodb import ConnectMongoDB
    seims_cfg = parse_ini_configuration()
    client = ConnectMongoDB(seims_cfg.hostname, seims_cfg.port)
    conn = client.get_conn()
    maindb = conn[seims_cfg.spatial_db]
    scenariodb = conn[seims_cfg.bmp_scenario_db]

    ImportScenario2Mongo.scenario_from_texts(seims_cfg, maindb, scenariodb)

    client.close()
예제 #10
0
def main():
    """TEST CODE"""
    from preprocess.config import parse_ini_configuration
    from preprocess.db_mongodb import ConnectMongoDB
    seims_cfg = parse_ini_configuration()
    client = ConnectMongoDB(seims_cfg.hostname, seims_cfg.port)
    conn = client.get_conn()
    hydroclim_db = conn[seims_cfg.climate_db]

    site_m = HydroClimateUtilClass.query_climate_sites(hydroclim_db, 'M')
    site_p = HydroClimateUtilClass.query_climate_sites(hydroclim_db, 'P')

    client.close()
예제 #11
0
def main():
    """TEST CODE"""
    from preprocess.config import parse_ini_configuration
    from preprocess.db_mongodb import ConnectMongoDB
    seims_cfg = parse_ini_configuration()
    client = ConnectMongoDB(seims_cfg.hostname, seims_cfg.port)
    conn = client.get_conn()
    db = conn[seims_cfg.climate_db]
    import time
    st = time.time()
    ImportMeteoData.workflow(seims_cfg, db)
    et = time.time()
    print(et - st)
    client.close()
예제 #12
0
def main():
    from preprocess.config import parse_ini_configuration

    seims_cfg = parse_ini_configuration()
    client = ConnectMongoDB(seims_cfg.hostname, seims_cfg.port)
    conn = client.get_conn()
    db_model = conn[seims_cfg.spatial_db]
    sitelist_coll = db_model[DBTableNames.main_sitelist]

    # Add an item in SITELIST collection, in which the SUBBASINID is 9999
    bsn_item = sitelist_coll.find_one({FieldNames.subbasin_id: 0})
    field_bsn_item = deepcopy(bsn_item)
    del field_bsn_item['_id']
    field_bsn_item[FieldNames.subbasin_id] = 9999
    sitelist_coll.insert_one(field_bsn_item)
예제 #13
0
파일: calibrate.py 프로젝트: crazyzlj/SEIMS
    def ParamDefs(self):
        """Read cali_param_rng.def file

           name,lower_bound,upper_bound

            e.g.,
             Param1,0,1
             Param2,0.5,1.2
             Param3,-1.0,1.0

        Returns:
            a dictionary containing:
            - names - the names of the parameters
            - bounds - a list of lists of lower and upper bounds
            - num_vars - a scalar indicating the number of variables
                         (the length of names)
        """
        # read param_defs.json if already existed
        if self.param_defs:
            return self.param_defs
        # read param_range_def file and output to json file
        client = ConnectMongoDB(self.cfg.model.host, self.cfg.model.port)
        conn = client.get_conn()
        db = conn[self.cfg.model.db_name]
        collection = db['PARAMETERS']

        names = list()
        bounds = list()
        num_vars = 0
        if not FileClass.is_file_exists(self.cfg.param_range_def):
            raise ValueError('Parameters definition file: %s is not'
                             ' existed!' % self.cfg.param_range_def)
        items = read_data_items_from_txt(self.cfg.param_range_def)
        for item in items:
            if len(item) < 3:
                continue
            # find parameter name, print warning message if not existed
            cursor = collection.find({'NAME': item[0]}, no_cursor_timeout=True)
            if not cursor.count():
                print('WARNING: parameter %s is not existed!' % item[0])
                continue
            num_vars += 1
            names.append(item[0])
            bounds.append([float(item[1]), float(item[2])])
        self.param_defs = {'names': names, 'bounds': bounds, 'num_vars': num_vars}
        return self.param_defs
예제 #14
0
 def write_param_values_to_mongodb(self):
     """Update Parameters collection in MongoDB.
     Notes:
         The field value of 'CALI_VALUES' of all parameters will be deleted first.
     """
     if not self.param_defs:
         self.read_param_ranges()
     if self.param_values is None or len(self.param_values) == 0:
         self.generate_samples()
     client = ConnectMongoDB(self.model.host, self.model.port)
     conn = client.get_conn()
     db = conn[self.model.db_name]
     collection = db['PARAMETERS']
     collection.update_many({}, {'$unset': {'CALI_VALUES': ''}})
     for idx, pname in enumerate(self.param_defs['names']):
         v2str = ','.join(str(v) for v in self.param_values[:, idx])
         collection.find_one_and_update({'NAME': pname}, {'$set': {'CALI_VALUES': v2str}})
     client.close()
예제 #15
0
파일: scenario.py 프로젝트: crazyzlj/SEIMS
 def read_simulation_timerange(self):
     """Read simulation time range from MongoDB."""
     client = ConnectMongoDB(self.hostname, self.port)
     conn = client.get_conn()
     db = conn[self.main_db]
     collection = db['FILE_IN']
     try:
         stime_str = collection.find_one({'TAG': 'STARTTIME'}, no_cursor_timeout=True)['VALUE']
         etime_str = collection.find_one({'TAG': 'ENDTIME'}, no_cursor_timeout=True)['VALUE']
         stime = StringClass.get_datetime(stime_str)
         etime = StringClass.get_datetime(etime_str)
         dlt = etime - stime + timedelta(seconds=1)
         self.timerange = (dlt.days * 86400. + dlt.seconds) / 86400. / 365.
     except NetworkTimeout or Exception:
         # In case of unexpected raise
         self.timerange = 1.  # set default
         pass
     client.close()
예제 #16
0
파일: scenario.py 프로젝트: crazyzlj/SEIMS
 def export_to_mongodb(self):
     """Export current scenario to MongoDB.
     Delete the same ScenarioID if existed.
     """
     client = ConnectMongoDB(self.hostname, self.port)
     conn = client.get_conn()
     db = conn[self.scenario_db]
     collection = db['BMP_SCENARIOS']
     try:
         # find ScenarioID, remove if existed.
         if collection.find({'ID': self.ID}, no_cursor_timeout=True).count():
             collection.remove({'ID': self.ID})
     except NetworkTimeout or Exception:
         # In case of unexpected raise
         pass
     for objid, bmp_item in self.bmp_items.items():
         bmp_item['_id'] = ObjectId()
         collection.insert_one(bmp_item)
     client.close()
예제 #17
0
파일: config.py 프로젝트: crazyzlj/SEIMS
    def read_bmp_parameters(self):
        """Read BMP configuration from MongoDB."""
        client = ConnectMongoDB(self.hostname, self.port)
        conn = client.get_conn()
        scenariodb = conn[self.bmp_scenario_db]

        bmpcoll = scenariodb[self.bmps_coll]
        findbmps = bmpcoll.find({}, no_cursor_timeout=True)
        for fb in findbmps:
            fb = UtilClass.decode_strs_in_dict(fb)
            if 'SUBSCENARIO' not in fb:
                continue
            curid = fb['SUBSCENARIO']
            if curid not in self.bmps_subids:
                continue
            if curid not in self.bmps_params:
                self.bmps_params[curid] = dict()
            for k, v in fb.items():
                if k == 'SUBSCENARIO':
                    continue
                elif k == 'LANDUSE':
                    if isinstance(v, int):
                        v = [v]
                    elif v == 'ALL' or v == '':
                        v = None
                    else:
                        v = StringClass.extract_numeric_values_from_string(v)
                        v = [int(abs(nv)) for nv in v]
                    self.bmps_params[curid][k] = v[:]
                elif k == 'SLPPOS':
                    if isinstance(v, int):
                        v = [v]
                    elif v == 'ALL' or v == '':
                        v = list(self.slppos_tags.keys())
                    else:
                        v = StringClass.extract_numeric_values_from_string(v)
                        v = [int(abs(nv)) for nv in v]
                    self.bmps_params[curid][k] = v[:]
                else:
                    self.bmps_params[curid][k] = v

        client.close()
예제 #18
0
"""Create a connection to MongoDB as a global module used in SCOOP-based parallel applications

    Note that, this is a temporary and not elegant solution.
      Before running such applications such as scenarios_analysis/spatialunits/main_nsga2.py,
      users MUST update the host and port manually.

    References:
        Explicit access to module level variables by accessing them explicitly on the module.
          https://stackoverflow.com/a/35904211/4837280

    @author   : Liangjun Zhu

    @changelog:
    - 20-07-21  lj - separated from preprocess.db_mongodb.py to make it more likely a global module
"""
from __future__ import absolute_import, unicode_literals

import sys

from preprocess.db_mongodb import ConnectMongoDB

# this is a pointer to the module object instance itself
this = sys.modules[__name__]

# user specific parameters for their MongoDB server
this.host = '127.0.0.1'
this.port = 27017

# this client will be created once for each process in the entire application
this.client = ConnectMongoDB(ip=this.host, port=this.port).get_conn()
예제 #19
0
    def read_param_ranges(self):
        """Read param_rng.def file

           name,lower_bound,upper_bound,group,dist
           (group and dist are optional)

            e.g.,
             Param1,0,1[,Group1][,dist1]
             Param2,0,1[,Group2][,dist2]
             Param3,0,1[,Group3][,dist3]

        Returns:
            a dictionary containing:
            - names - the names of the parameters
            - bounds - a list of lists of lower and upper bounds
            - num_vars - a scalar indicating the number of variables
                         (the length of names)
            - groups - a list of group names (strings) for each variable
            - dists - a list of distributions for the problem,
                        None if not specified or all uniform
        """
        # read param_defs.json if already existed
        if not self.param_defs:
            if FileClass.is_file_exists(self.cfg.outfiles.param_defs_json):
                with open(self.cfg.outfiles.param_defs_json, 'r') as f:
                    self.param_defs = UtilClass.decode_strs_in_dict(json.load(f))
                return
        # read param_range_def file and output to json file
        client = ConnectMongoDB(self.model.host, self.model.port)
        conn = client.get_conn()
        db = conn[self.model.db_name]
        collection = db['PARAMETERS']

        names = list()
        bounds = list()
        groups = list()
        dists = list()
        num_vars = 0
        items = read_data_items_from_txt(self.cfg.param_range_def)
        for item in items:
            if len(item) < 3:
                continue
            # find parameter name, print warning message if not existed
            cursor = collection.find({'NAME': item[0]}, no_cursor_timeout=True)
            if not cursor.count():
                print('WARNING: parameter %s is not existed!' % item[0])
                continue
            num_vars += 1
            names.append(item[0])
            bounds.append([float(item[1]), float(item[2])])
            # If the fourth column does not contain a group name, use
            # the parameter name
            if len(item) >= 4:
                groups.append(item[3])
            else:
                groups.append(item[0])
            if len(item) >= 5:
                dists.append(item[4])
            else:
                dists.append('unif')
        if groups == names:
            groups = None
        elif len(set(groups)) == 1:
            raise ValueError('Only one group defined, results will not bemeaningful')

        # setting dists to none if all are uniform
        # because non-uniform scaling is not needed
        if all([d == 'unif' for d in dists]):
            dists = None

        self.param_defs = {'names': names, 'bounds': bounds,
                           'num_vars': num_vars, 'groups': groups, 'dists': dists}

        # Save as json, which can be loaded by json.load()
        json_data = json.dumps(self.param_defs, indent=4, cls=SpecialJsonEncoder)
        with open(self.cfg.outfiles.param_defs_json, 'w') as f:
            f.write(json_data)
예제 #20
0
    def export_scenario_to_gtiff(self, outpath=None):
        """Export scenario to GTiff.

        TODO: Read Raster from MongoDB should be extracted to pygeoc.
        """
        if not self.export_sce_tif:
            return
        dist = self.bmps_info['DISTRIBUTION']
        dist_list = StringClass.split_string(dist, '|')
        if len(dist_list) >= 2 and dist_list[0] == 'RASTER':
            dist_name = '0_' + dist_list[1]  # prefix 0_ means the whole basin
            # read dist_name from MongoDB
            client = ConnectMongoDB(self.hostname, self.port)
            conn = client.get_conn()
            maindb = conn[self.main_db]
            spatial_gfs = GridFS(maindb, DBTableNames.gridfs_spatial)
            # read file from mongodb
            if not spatial_gfs.exists(filename=dist_name):
                print('WARNING: %s is not existed, export scenario failed!' %
                      dist_name)
                return
            try:
                slpposf = maindb[DBTableNames.gridfs_spatial].files.find(
                    {'filename': dist_name}, no_cursor_timeout=True)[0]
            except NetworkTimeout or Exception:
                # In case of unexpected raise
                client.close()
                return

            ysize = int(slpposf['metadata'][RasterMetadata.nrows])
            xsize = int(slpposf['metadata'][RasterMetadata.ncols])
            xll = slpposf['metadata'][RasterMetadata.xll]
            yll = slpposf['metadata'][RasterMetadata.yll]
            cellsize = slpposf['metadata'][RasterMetadata.cellsize]
            nodata_value = slpposf['metadata'][RasterMetadata.nodata]
            srs = slpposf['metadata'][RasterMetadata.srs]
            if isinstance(srs, text_type):
                srs = str(srs)
            srs = osr.GetUserInputAsWKT(srs)
            geotransform = [0] * 6
            geotransform[0] = xll - 0.5 * cellsize
            geotransform[1] = cellsize
            geotransform[3] = yll + (ysize - 0.5) * cellsize  # yMax
            geotransform[5] = -cellsize

            slppos_data = spatial_gfs.get(slpposf['_id'])
            total_len = xsize * ysize
            fmt = '%df' % (total_len, )
            slppos_data = unpack(fmt, slppos_data.read())
            slppos_data = numpy.reshape(slppos_data, (ysize, xsize))

            v_dict = dict()
            for idx, gene_v in enumerate(self.gene_values):
                v_dict[self.gene_to_unit[idx]] = gene_v

            for k, v in v_dict.items():
                slppos_data[slppos_data == k] = v
            if outpath is None:
                outpath = self.scenario_dir + os.path.sep + 'Scenario_%d.tif' % self.ID
            RasterUtilClass.write_gtiff_file(outpath, ysize, xsize,
                                             slppos_data, geotransform, srs,
                                             nodata_value)
            client.close()
예제 #21
0
    def workflow(cfg):
        """Building MongoDB workflow"""
        f = cfg.logs.build_mongo
        # build a connection to mongodb database
        client = ConnectMongoDB(cfg.hostname, cfg.port)
        conn = client.get_conn()
        maindb = conn[cfg.spatial_db]
        climatedb = conn[cfg.climate_db]
        scenariodb = None
        if cfg.use_scernario:
            scenariodb = conn[cfg.bmp_scenario_db]

        # import model parameters information to MongoDB
        status_output('Import model parameters', 10, f)
        ImportParam2Mongo.workflow(cfg, maindb)
        n_subbasins = MongoQuery.get_init_parameter_value(
            maindb, SubbsnStatsName.subbsn_num)
        print('Number of subbasins: %d' % n_subbasins)

        # Extract spatial parameters for reaches, landuse, soil, etc.
        status_output(
            'Extract spatial parameters for reaches, landuse, soil, etc...',
            20, f)
        extract_spatial_parameters(cfg, maindb)

        # import stream parameters
        status_output('Generating reach table with initialized parameters...',
                      40, f)
        ImportReaches2Mongo.generate_reach_table(cfg, maindb)

        # import raster data to MongoDB
        status_output('Importing raster to MongoDB....', 50, f)
        ImportMongodbClass.spatial_rasters(cfg, 0)
        ImportMongodbClass.spatial_rasters(cfg, n_subbasins)

        # Import IUH
        status_output(
            'Generating and importing IUH (Instantaneous Unit Hydrograph)....',
            60, f)
        ImportMongodbClass.iuh(cfg, 0)
        ImportMongodbClass.iuh(cfg, n_subbasins)

        # Import grid layering data
        status_output('Generating and importing grid layering....', 70, f)
        ImportMongodbClass.grid_layering(cfg, 0)
        ImportMongodbClass.grid_layering(cfg, n_subbasins)

        # Import hydro-climate data
        status_output('Import climate data....', 80, f)
        ImportMongodbClass.climate_data(cfg, maindb, climatedb)

        # Import weight and related data, this should after ImportMongodbClass.climate_data()
        status_output(
            'Generating weight data for interpolation of meteorology data '
            'and weight dependent parameters....', 85, f)
        ImportWeightData.workflow(cfg, conn, 0)
        ImportWeightData.workflow(cfg, conn, n_subbasins)

        # Measurement Data, such as discharge, sediment yield.
        status_output(
            'Import observed data, such as discharge, sediment yield....', 90,
            f)
        ImportObservedData.workflow(cfg, maindb, climatedb)

        # Import BMP scenario database to MongoDB
        status_output('Importing bmp scenario....', 95, f)
        ImportScenario2Mongo.scenario_from_texts(cfg, maindb, scenariodb)

        status_output('Build DB: %s finished!' % cfg.spatial_db, 100, f)

        # close connection to MongoDB
        client.close()
예제 #22
0
    def __init__(self, cfg):
        # type: (PostConfig) -> None
        """Constructor"""
        self.model = MainSEIMS(args_dict=cfg.model_cfg.ConfigDict)
        self.ws = self.model.OutputDirectory
        if not FileClass.is_dir_exists(self.ws):
            raise ValueError('The output directory %s is not existed!' %
                             self.ws)
        self.plot_vars = cfg.plot_vars
        self.plot_cfg = cfg.plot_cfg  # type: PlotConfig
        # UTCTIME, calibration period
        self.stime = cfg.cali_stime
        self.etime = cfg.cali_etime
        self.subbsnID = cfg.plt_subbsnid
        # validation period
        self.vali_stime = cfg.vali_stime
        self.vali_etime = cfg.vali_etime

        # Read model data from MongoDB, the time period of simulation is read from FILE_IN.
        mongoclient = ConnectMongoDB(self.model.host,
                                     self.model.port).get_conn()
        self.readData = ReadModelData(mongoclient, self.model.db_name)
        self.mode = self.readData.Mode
        self.interval = self.readData.Interval
        # check start and end time of calibration
        st, et = self.readData.SimulationPeriod
        self.plot_validation = True
        if st > self.stime:
            self.stime = st
        if et < self.etime:
            self.etime = et
        if st > self.etime > self.stime:
            self.stime = st
            self.etime = et
            # in this circumstance, no validation should be calculated.
            self.vali_stime = None
            self.vali_etime = None
            self.plot_validation = False
        # check validation time period
        if self.vali_stime and self.vali_etime:
            if self.vali_stime >= self.vali_etime or st > self.vali_etime > self.vali_stime \
                or self.vali_stime >= et:
                self.vali_stime = None
                self.vali_etime = None
                self.plot_validation = False
            elif st > self.vali_stime:
                self.vali_stime = st
            elif et < self.vali_etime:
                self.vali_etime = et
        else:
            self.plot_validation = False
        # Set start time and end time of both calibration and validation periods
        start = self.stime
        end = self.etime
        if self.plot_validation:
            start = self.stime if self.stime < self.vali_stime else self.vali_stime
            end = self.etime if self.etime > self.vali_etime else self.vali_etime
        self.outletid = self.readData.OutletID
        # read precipitation
        self.pcp_date_value = self.readData.Precipitation(
            self.subbsnID, start, end)
        # read simulated data and update the available variables
        self.plot_vars, self.sim_data_dict = read_simulation_from_txt(
            self.ws, self.plot_vars, self.outletid, start, end)
        self.sim_data_value = list(
        )  # type: List[List[Union[datetime, float]]]
        for d, vs in self.sim_data_dict.items():
            self.sim_data_value.append([d] + vs[:])
        # reset start time and end time
        if len(self.sim_data_value) == 0:
            raise RuntimeError(
                'No available simulate data, please check the start and end time!'
            )
        # read observation data from MongoDB
        self.obs_vars, self.obs_data_dict = self.readData.Observation(
            self.subbsnID, self.plot_vars, start, end)

        # Calibration period
        self.sim_obs_dict = match_simulation_observation(self.plot_vars,
                                                         self.sim_data_dict,
                                                         self.obs_vars,
                                                         self.obs_data_dict,
                                                         start_time=self.stime,
                                                         end_time=self.etime)
        calculate_statistics(self.sim_obs_dict)
        # Validation period if existed
        self.vali_sim_obs_dict = dict()
        if self.plot_validation:
            self.vali_sim_obs_dict = match_simulation_observation(
                self.plot_vars,
                self.sim_data_dict,
                self.obs_vars,
                self.obs_data_dict,
                start_time=self.vali_stime,
                end_time=self.vali_etime)
            calculate_statistics(self.vali_sim_obs_dict)
예제 #23
0
    def read_param_ranges(self):
        """Read param_rng.def file

           name,lower_bound,upper_bound,group,dist
           (group and dist are optional)

            e.g.,
             Param1,0,1[,Group1][,dist1]
             Param2,0,1[,Group2][,dist2]
             Param3,0,1[,Group3][,dist3]

        Returns:
            a dictionary containing:
            - names - the names of the parameters
            - bounds - a list of lists of lower and upper bounds
            - num_vars - a scalar indicating the number of variables
                         (the length of names)
            - groups - a list of group names (strings) for each variable
            - dists - a list of distributions for the problem,
                        None if not specified or all uniform
        """
        # read param_defs.json if already existed
        if not self.param_defs:
            if FileClass.is_file_exists(self.cfg.outfiles.param_defs_json):
                with open(self.cfg.outfiles.param_defs_json,
                          'r',
                          encoding='utf-8') as f:
                    self.param_defs = UtilClass.decode_strs_in_dict(
                        json.load(f))
                return
        # read param_range_def file and output to json file
        client = ConnectMongoDB(self.model.host, self.model.port)
        conn = client.get_conn()
        db = conn[self.model.db_name]
        collection = db['PARAMETERS']

        names = list()
        bounds = list()
        groups = list()
        dists = list()
        num_vars = 0
        items = read_data_items_from_txt(self.cfg.param_range_def)
        for item in items:
            if len(item) < 3:
                continue
            # find parameter name, print warning message if not existed
            cursor = collection.find({'NAME': item[0]}, no_cursor_timeout=True)
            if not cursor.count():
                print('WARNING: parameter %s is not existed!' % item[0])
                continue
            num_vars += 1
            names.append(item[0])
            bounds.append([float(item[1]), float(item[2])])
            # If the fourth column does not contain a group name, use
            # the parameter name
            if len(item) >= 4:
                groups.append(item[3])
            else:
                groups.append(item[0])
            if len(item) >= 5:
                dists.append(item[4])
            else:
                dists.append('unif')
        if groups == names:
            groups = None
        elif len(set(groups)) == 1:
            raise ValueError(
                'Only one group defined, results will not bemeaningful')

        # setting dists to none if all are uniform
        # because non-uniform scaling is not needed
        if all([d == 'unif' for d in dists]):
            dists = None

        self.param_defs = {
            'names': names,
            'bounds': bounds,
            'num_vars': num_vars,
            'groups': groups,
            'dists': dists
        }

        # Save as json, which can be loaded by json.load()
        json_data = json.dumps(self.param_defs,
                               indent=4,
                               cls=SpecialJsonEncoder)
        with open(self.cfg.outfiles.param_defs_json, 'w',
                  encoding='utf-8') as f:
            f.write('%s' % json_data)
예제 #24
0
파일: scenario.py 프로젝트: crazyzlj/SEIMS
    def export_scenario_to_gtiff(self, outpath=None):
        """Export scenario to GTiff.

        TODO: Read Raster from MongoDB should be extracted to pygeoc.
        """
        if not self.export_sce_tif:
            return
        dist = self.bmps_info['DISTRIBUTION']
        dist_list = StringClass.split_string(dist, '|')
        if len(dist_list) >= 2 and dist_list[0] == 'RASTER':
            dist_name = '0_' + dist_list[1]  # prefix 0_ means the whole basin
            # read dist_name from MongoDB
            client = ConnectMongoDB(self.hostname, self.port)
            conn = client.get_conn()
            maindb = conn[self.main_db]
            spatial_gfs = GridFS(maindb, DBTableNames.gridfs_spatial)
            # read file from mongodb
            if not spatial_gfs.exists(filename=dist_name):
                print('WARNING: %s is not existed, export scenario failed!' % dist_name)
                return
            try:
                slpposf = maindb[DBTableNames.gridfs_spatial].files.find({'filename': dist_name},
                                                                         no_cursor_timeout=True)[0]
            except NetworkTimeout or Exception:
                # In case of unexpected raise
                client.close()
                return

            ysize = int(slpposf['metadata'][RasterMetadata.nrows])
            xsize = int(slpposf['metadata'][RasterMetadata.ncols])
            xll = slpposf['metadata'][RasterMetadata.xll]
            yll = slpposf['metadata'][RasterMetadata.yll]
            cellsize = slpposf['metadata'][RasterMetadata.cellsize]
            nodata_value = slpposf['metadata'][RasterMetadata.nodata]
            srs = slpposf['metadata'][RasterMetadata.srs]
            if isinstance(srs, text_type):
                srs = str(srs)
            srs = osr.GetUserInputAsWKT(srs)
            geotransform = [0] * 6
            geotransform[0] = xll - 0.5 * cellsize
            geotransform[1] = cellsize
            geotransform[3] = yll + (ysize - 0.5) * cellsize  # yMax
            geotransform[5] = -cellsize

            slppos_data = spatial_gfs.get(slpposf['_id'])
            total_len = xsize * ysize
            fmt = '%df' % (total_len,)
            slppos_data = unpack(fmt, slppos_data.read())
            slppos_data = numpy.reshape(slppos_data, (ysize, xsize))

            v_dict = dict()
            for idx, gene_v in enumerate(self.gene_values):
                v_dict[self.gene_to_unit[idx]] = gene_v

            for k, v in v_dict.items():
                slppos_data[slppos_data == k] = v
            if outpath is None:
                outpath = self.scenario_dir + os.path.sep + 'Scenario_%d.tif' % self.ID
            RasterUtilClass.write_gtiff_file(outpath, ysize, xsize, slppos_data, geotransform,
                                             srs, nodata_value)
            client.close()
예제 #25
0
def DelinateSlopePositionByThreshold(
        modelcfg,  # type: ParseSEIMSConfig
        thresholds,  # type: Dict[int, List]
        fuzzyslppos_fnames,  # type: List[Tuple[int, AnyStr, AnyStr]]
        outfname,  # type: AnyStr
        subbsn_id=0  # type: int
):
    # type: (...) -> Dict
    """

    Args:
        model_cfg: Configuration of SEIMS-based model
        thresholds: {HillslopeID: {rdgID, bksID, vlyID, T_bks2rdg, T_bks2vly}, ...}
        fuzzyslppos_fnames: [(1, 'summit', 'rdgInf'), ...]
        outfname: output GridFS name
        subbsn_id: By default use the whole watershed data
    Returns:
        hillslp_data(dict): {}
    """
    # 1. Read raster data from MongoDB
    hillslpr = ReadRasterFromMongoDB(modelcfg.host, modelcfg.port,
                                     modelcfg.db_name,
                                     DBTableNames.gridfs_spatial,
                                     '%d_HILLSLOPE_MERGED' % subbsn_id)
    landuser = ReadRasterFromMongoDB(modelcfg.host, modelcfg.port,
                                     modelcfg.db_name,
                                     DBTableNames.gridfs_spatial,
                                     '%d_LANDUSE' % subbsn_id)
    fuzslppos_rs = list()
    for tag, tagname, gfsname in fuzzyslppos_fnames:
        fuzslppos_rs.append(
            ReadRasterFromMongoDB(modelcfg.host, modelcfg.port,
                                  modelcfg.db_name,
                                  DBTableNames.gridfs_spatial,
                                  '%d_%s' % (subbsn_id, gfsname.upper())))

    # Output for test
    # out_dir = r'D:\data_m\youwuzhen\seims_models_phd\data_prepare\spatial\spatial_units\tmp'
    # out_hillslp = out_dir + os.sep + 'hillslope.tif'
    # RasterUtilClass.write_gtiff_file(out_hillslp, hillslpr.nRows, hillslpr.nCols,
    #                                  hillslpr.data, hillslpr.geotrans, hillslpr.srs,
    #                                  hillslpr.noDataValue)
    # out_landuse = out_dir + os.sep + 'landuse.tif'
    # RasterUtilClass.write_gtiff_file(out_landuse, landuser.nRows, landuser.nCols,
    #                                  landuser.data, landuser.geotrans, landuser.srs,
    #                                  landuser.noDataValue)
    # for i, (tag, tagname, gfsname) in enumerate(fuzzyslppos_fnames):
    #     curname = out_dir + os.sep + '%s.tif' % gfsname
    #     RasterUtilClass.write_gtiff_file(curname, fuzslppos_rs[i].nRows, fuzslppos_rs[i].nCols,
    #                                      fuzslppos_rs[i].data, fuzslppos_rs[i].geotrans,
    #                                      fuzslppos_rs[i].srs,
    #                                      fuzslppos_rs[i].noDataValue)

    # 2. Initialize output
    outgfsname = '%d_%s' % (subbsn_id, outfname.upper())
    outdict = dict(
    )  # type: Dict[AnyStr, Dict[int, Dict[AnyStr, Union[float, Dict[int, float]]]]]
    slppos_cls = numpy.ones(
        (hillslpr.nRows, hillslpr.nCols)) * hillslpr.noDataValue
    valid_cells = 0

    # Get the fuzzy slope position values from up to bottom
    def GetFuzzySlopePositionValues(i_row, i_col):
        seqvalues = [-9999] * len(fuzslppos_rs)
        for iseq, fuzdata in enumerate(fuzslppos_rs):
            curv = fuzdata.data[i_row][i_col]
            if MathClass.floatequal(curv, fuzdata.noDataValue):
                return None
            if curv < 0:
                return None
            seqvalues[iseq] = curv
        return seqvalues

    # ACTUAL ALGORITHM
    for row in range(hillslpr.nRows):
        for col in range(hillslpr.nCols):
            # Exclude invalid situation
            hillslp_id = hillslpr.data[row][col]
            if MathClass.floatequal(hillslp_id, hillslpr.noDataValue):
                continue
            if hillslp_id not in thresholds:
                continue
            landuse_id = landuser.data[row][col]
            if MathClass.floatequal(landuse_id, landuser.noDataValue):
                continue
            fuzzyvalues = GetFuzzySlopePositionValues(row, col)
            if fuzzyvalues is None:
                continue

            # THIS PART SHOULD BE REVIEWED CAREFULLY LATER! --START
            # Step 1. Get the index of slope position with maximum similarity
            max_fuz = max(fuzzyvalues)
            max_idx = fuzzyvalues.index(max_fuz)
            tmpfuzzyvalues = fuzzyvalues[:]
            tmpfuzzyvalues.remove(max_fuz)
            sec_fuz = max(tmpfuzzyvalues)
            sec_idx = fuzzyvalues.index(sec_fuz)

            sel_idx = max_idx  # Select the maximum by default

            cur_threshs = thresholds[hillslp_id][1 - len(fuzzyvalues):]

            if max_idx == len(fuzzyvalues) - 1:  # the bottom position
                if sec_idx == len(
                        fuzzyvalues
                ) - 2 and 0 < max_fuz - sec_fuz < cur_threshs[-1]:
                    sel_idx = sec_idx  # change valley to backslope
            elif max_idx == 0:  # the upper position
                if sec_idx == 1 and 0 < max_fuz - sec_fuz < cur_threshs[0]:
                    sel_idx = sec_idx  # change ridge to backslope
            else:  # the middle positions
                # Two thresholds could be applied,
                #     i.e., cur_threshs[max_idx-1] and cur_threshs[max_idx]
                if sec_idx == max_idx - 1 and 0. > sec_fuz - max_fuz > cur_threshs[
                        max_idx - 1]:
                    sel_idx = sec_idx
                elif sec_idx == max_idx + 1 and 0. > sec_fuz - max_fuz > cur_threshs[
                        max_idx]:
                    sel_idx = sec_idx

            # Exception:
            if sec_fuz < 0.1 and sel_idx == sec_idx:
                sel_idx = max_idx

            # if sel_idx != max_idx:  # boundary has been adapted
            #     print('fuzzy values: %s, thresholds: %s, '
            #           'sel_idx: %d' % (fuzzyvalues.__str__(), cur_threshs.__str__(), sel_idx))

            slppos_id = thresholds[hillslp_id][sel_idx]
            # THIS PART SHOULD BE REVIEWED CAREFULLY LATER! --END

            slppos_cls[row][col] = slppos_id
            sel_tagname = fuzzyslppos_fnames[sel_idx][1]
            if sel_tagname not in outdict:
                outdict[sel_tagname] = dict()
            if slppos_id not in outdict[sel_tagname]:
                outdict[sel_tagname][slppos_id] = {
                    'area': 0,
                    'landuse': dict()
                }
            outdict[sel_tagname][slppos_id]['area'] += 1
            if landuse_id not in outdict[sel_tagname][slppos_id]['landuse']:
                outdict[sel_tagname][slppos_id]['landuse'][landuse_id] = 0.
            outdict[sel_tagname][slppos_id]['landuse'][landuse_id] += 1.

            valid_cells += 1
    # Change cell counts to area
    area_km2 = hillslpr.dx * hillslpr.dx * 1.e-6
    for tagname, slpposdict in viewitems(outdict):
        for sid, datadict in viewitems(slpposdict):
            outdict[tagname][sid]['area'] *= area_km2
            for luid in outdict[tagname][sid]['landuse']:
                outdict[tagname][sid]['landuse'][luid] *= area_km2

    # 3. Write the classified slope positions data back to mongodb
    metadata = dict()
    metadata[RasterMetadata.subbasin] = subbsn_id
    metadata['ID'] = outgfsname
    metadata['TYPE'] = outfname.upper()
    metadata[RasterMetadata.cellsize] = hillslpr.dx
    metadata[RasterMetadata.nodata] = hillslpr.noDataValue
    metadata[RasterMetadata.ncols] = hillslpr.nCols
    metadata[RasterMetadata.nrows] = hillslpr.nRows
    metadata[RasterMetadata.xll] = hillslpr.xMin + 0.5 * hillslpr.dx
    metadata[RasterMetadata.yll] = hillslpr.yMin + 0.5 * hillslpr.dx
    metadata['LAYERS'] = 1.
    metadata[RasterMetadata.cellnum] = valid_cells
    metadata[RasterMetadata.srs] = hillslpr.srs

    client = ConnectMongoDB(modelcfg.host, modelcfg.port)
    conn = client.get_conn()
    maindb = conn[modelcfg.db_name]
    spatial_gfs = GridFS(maindb, DBTableNames.gridfs_spatial)
    # delete if the tablename gridfs file existed
    if spatial_gfs.exists(filename=outgfsname):
        x = spatial_gfs.get_version(filename=outgfsname)
        spatial_gfs.delete(x._id)
    # create and write new GridFS file
    new_gridfs = spatial_gfs.new_file(filename=outgfsname, metadata=metadata)
    new_gridfs_array = slppos_cls.reshape(
        (1, hillslpr.nCols * hillslpr.nRows)).tolist()[0]

    fmt = '%df' % hillslpr.nCols * hillslpr.nRows
    s = pack(fmt, *new_gridfs_array)
    new_gridfs.write(s)
    new_gridfs.close()

    # Read and output for test
    # slpposcls_r = ReadRasterFromMongoDB(modelcfg.host, modelcfg.port,
    #                                     modelcfg.db_name, DBTableNames.gridfs_spatial, outgfsname)
    # out_slpposcls = out_dir + os.sep + '%s.tif' % outgfsname
    # RasterUtilClass.write_gtiff_file(out_slpposcls, slpposcls_r.nRows, slpposcls_r.nCols,
    #                                  slpposcls_r.data, slpposcls_r.geotrans, slpposcls_r.srs,
    #                                  slpposcls_r.noDataValue)
    client.close()

    return outdict
예제 #26
0
    def workflow(cfg):
        """Building MongoDB workflow"""
        f = cfg.logs.build_mongo
        # build a connection to mongodb database
        client = ConnectMongoDB(cfg.hostname, cfg.port)
        conn = client.get_conn()
        maindb = conn[cfg.spatial_db]
        climatedb = conn[cfg.climate_db]
        scenariodb = None
        if cfg.use_scernario:
            scenariodb = conn[cfg.bmp_scenario_db]

        # import model parameters information to MongoDB
        status_output('Import model parameters', 10, f)
        ImportParam2Mongo.workflow(cfg, maindb)
        n_subbasins = MongoQuery.get_init_parameter_value(maindb, SubbsnStatsName.subbsn_num)
        print('Number of subbasins: %d' % n_subbasins)

        # Extract spatial parameters for reaches, landuse, soil, etc.
        status_output('Extract spatial parameters for reaches, landuse, soil, etc...', 20, f)
        extract_spatial_parameters(cfg, maindb)

        # import stream parameters
        status_output('Generating reach table with initialized parameters...', 40, f)
        ImportReaches2Mongo.generate_reach_table(cfg, maindb)

        # import raster data to MongoDB
        status_output('Importing raster to MongoDB....', 50, f)
        ImportMongodbClass.spatial_rasters(cfg, 0)
        ImportMongodbClass.spatial_rasters(cfg, n_subbasins)

        # Import IUH
        status_output('Generating and importing IUH (Instantaneous Unit Hydrograph)....', 60, f)
        ImportMongodbClass.iuh(cfg, 0)
        ImportMongodbClass.iuh(cfg, n_subbasins)

        # Import grid layering data
        status_output('Generating and importing grid layering....', 70, f)
        ImportMongodbClass.grid_layering(cfg, 0)
        ImportMongodbClass.grid_layering(cfg, n_subbasins)

        # Import hydro-climate data
        status_output('Import climate data....', 80, f)
        ImportMongodbClass.climate_data(cfg, maindb, climatedb)

        # Import weight and related data, this should after ImportMongodbClass.climate_data()
        status_output('Generating weight data for interpolation of meteorology data '
                      'and weight dependent parameters....', 85, f)
        ImportWeightData.workflow(cfg, conn, 0)
        ImportWeightData.workflow(cfg, conn, n_subbasins)

        # Measurement Data, such as discharge, sediment yield.
        status_output('Import observed data, such as discharge, sediment yield....', 90, f)
        ImportObservedData.workflow(cfg, maindb, climatedb)

        # Import BMP scenario database to MongoDB
        status_output('Importing bmp scenario....', 95, f)
        ImportScenario2Mongo.scenario_from_texts(cfg, maindb, scenariodb)

        status_output('Build DB: %s finished!' % cfg.spatial_db, 100, f)

        # close connection to MongoDB
        client.close()