Example #1
0
def delete_scenarios_by_ids(hostname, port, dbname, sids):
    """Delete scenario data by ID in MongoDB."""
    client = ConnectMongoDB(hostname, port)
    conn = client.get_conn()
    db = conn[dbname]
    collection = db['BMP_SCENARIOS']
    for _id in sids:
        collection.remove({'ID': _id})
        print('Delete scenario: %d in MongoDB completed!' % _id)
    client.close()
def main():
    """TEST CODE"""
    from preprocess.config import parse_ini_configuration
    from preprocess.db_mongodb import ConnectMongoDB
    seims_cfg = parse_ini_configuration()
    client = ConnectMongoDB(seims_cfg.hostname, seims_cfg.port)
    conn = client.get_conn()

    ImportWeightData.workflow(seims_cfg, conn, 0)

    client.close()
Example #3
0
def write_param_values_to_mongodb(hostname, port, spatial_db, param_defs, param_values):
    # update Parameters collection in MongoDB
    client = ConnectMongoDB(hostname, port)
    conn = client.get_conn()
    db = conn[spatial_db]
    collection = db['PARAMETERS']
    collection.update_many({}, {'$unset': {'CALI_VALUES': ''}})
    for idx, pname in enumerate(param_defs['names']):
        v2str = ','.join(str(v) for v in param_values[:, idx])
        collection.find_one_and_update({'NAME': pname}, {'$set': {'CALI_VALUES': v2str}})
    client.close()
def main():
    """TEST CODE"""
    from preprocess.config import parse_ini_configuration
    from preprocess.db_mongodb import ConnectMongoDB
    seims_cfg = parse_ini_configuration()
    client = ConnectMongoDB(seims_cfg.hostname, seims_cfg.port)
    conn = client.get_conn()

    ImportWeightData.workflow(seims_cfg, conn, 0)

    client.close()
def main():
    """TEST CODE"""
    from preprocess.config import parse_ini_configuration
    from preprocess.db_mongodb import ConnectMongoDB
    seims_cfg = parse_ini_configuration()
    client = ConnectMongoDB(seims_cfg.hostname, seims_cfg.port)
    conn = client.get_conn()
    maindb = conn[seims_cfg.spatial_db]

    ImportReaches2Mongo.generate_reach_table(seims_cfg, maindb)

    client.close()
Example #6
0
def main():
    """TEST CODE"""
    from preprocess.config import parse_ini_configuration
    from preprocess.db_mongodb import ConnectMongoDB
    seims_cfg = parse_ini_configuration()
    client = ConnectMongoDB(seims_cfg.hostname, seims_cfg.port)
    conn = client.get_conn()
    main_db = conn[seims_cfg.spatial_db]

    LanduseUtilClass.parameters_extraction(seims_cfg, main_db)

    client.close()
Example #7
0
def main():
    """TEST CODE"""
    from preprocess.config import parse_ini_configuration
    from preprocess.db_mongodb import ConnectMongoDB
    seims_cfg = parse_ini_configuration()
    client = ConnectMongoDB(seims_cfg.hostname, seims_cfg.port)
    conn = client.get_conn()
    main_db = conn[seims_cfg.spatial_db]

    LanduseUtilClass.parameters_extraction(seims_cfg, main_db)

    client.close()
Example #8
0
 def reset_simulation_timerange(self):
     """Update simulation time range in MongoDB [FILE_IN]."""
     client = ConnectMongoDB(self.model.host, self.model.port)
     conn = client.get_conn()
     db = conn[self.model.db_name]
     stime_str = self.model.time_start.strftime('%Y-%m-%d %H:%M:%S')
     etime_str = self.model.time_end.strftime('%Y-%m-%d %H:%M:%S')
     db[DBTableNames.main_filein].find_one_and_update({'TAG': 'STARTTIME'},
                                                      {'$set': {'VALUE': stime_str}})
     db[DBTableNames.main_filein].find_one_and_update({'TAG': 'ENDTIME'},
                                                      {'$set': {'VALUE': etime_str}})
     client.close()
def main():
    """TEST CODE"""
    from preprocess.config import parse_ini_configuration
    from preprocess.db_mongodb import ConnectMongoDB
    seims_cfg = parse_ini_configuration()
    client = ConnectMongoDB(seims_cfg.hostname, seims_cfg.port)
    conn = client.get_conn()
    maindb = conn[seims_cfg.spatial_db]

    ImportReaches2Mongo.generate_reach_table(seims_cfg, maindb)

    client.close()
def main():
    """TEST CODE"""
    from preprocess.config import parse_ini_configuration
    from preprocess.db_mongodb import ConnectMongoDB
    seims_cfg = parse_ini_configuration()
    client = ConnectMongoDB(seims_cfg.hostname, seims_cfg.port)
    conn = client.get_conn()
    main_db = conn[seims_cfg.spatial_db]

    ImportParam2Mongo.workflow(seims_cfg, main_db)

    client.close()
def main():
    """TEST CODE"""
    from preprocess.config import parse_ini_configuration
    from preprocess.db_mongodb import ConnectMongoDB
    seims_cfg = parse_ini_configuration()
    client = ConnectMongoDB(seims_cfg.hostname, seims_cfg.port)
    conn = client.get_conn()
    main_db = conn[seims_cfg.spatial_db]

    ImportParam2Mongo.workflow(seims_cfg, main_db)

    client.close()
Example #12
0
 def reset_simulation_timerange(self):
     """Update simulation time range in MongoDB [FILE_IN]."""
     client = ConnectMongoDB(self.model.host, self.model.port)
     conn = client.get_conn()
     db = conn[self.model.db_name]
     stime_str = self.model.simu_stime.strftime('%Y-%m-%d %H:%M:%S')
     etime_str = self.model.simu_etime.strftime('%Y-%m-%d %H:%M:%S')
     db[DBTableNames.main_filein].find_one_and_update({'TAG': 'STARTTIME'},
                                                      {'$set': {'VALUE': stime_str}})
     db[DBTableNames.main_filein].find_one_and_update({'TAG': 'ENDTIME'},
                                                      {'$set': {'VALUE': etime_str}})
     client.close()
def main():
    """TEST CODE"""
    from preprocess.config import parse_ini_configuration
    from preprocess.db_mongodb import ConnectMongoDB
    seims_cfg = parse_ini_configuration()
    client = ConnectMongoDB(seims_cfg.hostname, seims_cfg.port)
    conn = client.get_conn()
    hydroclim_db = conn[seims_cfg.climate_db]

    site_m = HydroClimateUtilClass.query_climate_sites(hydroclim_db, 'M')
    site_p = HydroClimateUtilClass.query_climate_sites(hydroclim_db, 'P')

    client.close()
Example #14
0
def main():
    """TEST CODE"""
    from preprocess.config import parse_ini_configuration
    from preprocess.db_mongodb import ConnectMongoDB
    seims_cfg = parse_ini_configuration()
    client = ConnectMongoDB(seims_cfg.hostname, seims_cfg.port)
    conn = client.get_conn()
    maindb = conn[seims_cfg.spatial_db]
    scenariodb = conn[seims_cfg.bmp_scenario_db]

    ImportScenario2Mongo.scenario_from_texts(seims_cfg, maindb, scenariodb)

    client.close()
Example #15
0
def main():
    """TEST CODE"""
    from preprocess.config import parse_ini_configuration
    from preprocess.db_mongodb import ConnectMongoDB
    seims_cfg = parse_ini_configuration()
    client = ConnectMongoDB(seims_cfg.hostname, seims_cfg.port)
    conn = client.get_conn()
    maindb = conn[seims_cfg.spatial_db]
    scenariodb = conn[seims_cfg.bmp_scenario_db]

    ImportScenario2Mongo.scenario_from_texts(seims_cfg, maindb, scenariodb)

    client.close()
Example #16
0
def main():
    """TEST CODE"""
    from preprocess.config import parse_ini_configuration
    from preprocess.db_mongodb import ConnectMongoDB
    seims_cfg = parse_ini_configuration()
    client = ConnectMongoDB(seims_cfg.hostname, seims_cfg.port)
    conn = client.get_conn()
    db = conn[seims_cfg.climate_db]
    import time
    st = time.time()
    ImportMeteoData.workflow(seims_cfg, db)
    et = time.time()
    print(et - st)
    client.close()
Example #17
0
def main():
    """TEST CODE"""
    from preprocess.config import parse_ini_configuration
    from preprocess.db_mongodb import ConnectMongoDB
    seims_cfg = parse_ini_configuration()
    client = ConnectMongoDB(seims_cfg.hostname, seims_cfg.port)
    conn = client.get_conn()
    db = conn[seims_cfg.climate_db]
    import time
    st = time.time()
    ImportMeteoData.workflow(seims_cfg, db)
    et = time.time()
    print(et - st)
    client.close()
Example #18
0
def write_param_values_to_mongodb(hostname, port, spatial_db, param_defs,
                                  param_values):
    # update Parameters collection in MongoDB
    client = ConnectMongoDB(hostname, port)
    conn = client.get_conn()
    db = conn[spatial_db]
    collection = db['PARAMETERS']
    collection.update_many({}, {'$unset': {'CALI_VALUES': ''}})
    for idx, pname in enumerate(param_defs['names']):
        v2str = ','.join(str(v) for v in param_values[:, idx])
        collection.find_one_and_update({'NAME': pname},
                                       {'$set': {
                                           'CALI_VALUES': v2str
                                       }})
    client.close()
Example #19
0
 def read_simulation_timerange(self):
     """Read simulation time range from MongoDB."""
     client = ConnectMongoDB(self.hostname, self.port)
     conn = client.get_conn()
     db = conn[self.main_db]
     collection = db['FILE_IN']
     try:
         stime_str = collection.find_one({'TAG': 'STARTTIME'}, no_cursor_timeout=True)['VALUE']
         etime_str = collection.find_one({'TAG': 'ENDTIME'}, no_cursor_timeout=True)['VALUE']
         stime = StringClass.get_datetime(stime_str)
         etime = StringClass.get_datetime(etime_str)
         dlt = etime - stime + timedelta(seconds=1)
         self.timerange = (dlt.days * 86400. + dlt.seconds) / 86400. / 365.
     except NetworkTimeout or Exception:
         # In case of unexpected raise
         self.timerange = 1.  # set default
         pass
     client.close()
Example #20
0
 def write_param_values_to_mongodb(self):
     """Update Parameters collection in MongoDB.
     Notes:
         The field value of 'CALI_VALUES' of all parameters will be deleted first.
     """
     if not self.param_defs:
         self.read_param_ranges()
     if self.param_values is None or len(self.param_values) == 0:
         self.generate_samples()
     client = ConnectMongoDB(self.model.host, self.model.port)
     conn = client.get_conn()
     db = conn[self.model.db_name]
     collection = db['PARAMETERS']
     collection.update_many({}, {'$unset': {'CALI_VALUES': ''}})
     for idx, pname in enumerate(self.param_defs['names']):
         v2str = ','.join(str(v) for v in self.param_values[:, idx])
         collection.find_one_and_update({'NAME': pname}, {'$set': {'CALI_VALUES': v2str}})
     client.close()
Example #21
0
 def write_param_values_to_mongodb(self):
     """Update Parameters collection in MongoDB.
     Notes:
         The field value of 'CALI_VALUES' of all parameters will be deleted first.
     """
     if not self.param_defs:
         self.read_param_ranges()
     if self.param_values is None or len(self.param_values) == 0:
         self.generate_samples()
     client = ConnectMongoDB(self.model.host, self.model.port)
     conn = client.get_conn()
     db = conn[self.model.db_name]
     collection = db['PARAMETERS']
     collection.update_many({}, {'$unset': {'CALI_VALUES': ''}})
     for idx, pname in enumerate(self.param_defs['names']):
         v2str = ','.join(str(v) for v in self.param_values[:, idx])
         collection.find_one_and_update({'NAME': pname}, {'$set': {'CALI_VALUES': v2str}})
     client.close()
Example #22
0
    def read_bmp_parameters(self):
        """Read BMP configuration from MongoDB.
        Each BMP is stored in Collection as one item identified by 'SUBSCENARIO' field,
        so the `self.bmps_params` is dict with BMP_ID ('SUBSCENARIO') as key.
        """
        client = ConnectMongoDB(self.modelcfg.host, self.modelcfg.port)
        conn = client.get_conn()
        scenariodb = conn[self.scenario_db]

        bmpcoll = scenariodb[self.cfg.bmps_coll]
        findbmps = bmpcoll.find({}, no_cursor_timeout=True)
        for fb in findbmps:
            fb = UtilClass.decode_strs_in_dict(fb)
            if 'SUBSCENARIO' not in fb:
                continue
            curid = fb['SUBSCENARIO']
            if curid not in self.cfg.bmps_subids:
                continue
            if curid not in self.bmps_params:
                self.bmps_params[curid] = dict()
            for k, v in fb.items():
                if k == 'SUBSCENARIO':
                    continue
                elif k == 'LANDUSE':
                    if isinstance(v, int):
                        v = [v]
                    elif v == 'ALL' or v == '':
                        v = None
                    else:
                        v = StringClass.extract_numeric_values_from_string(v)
                        v = [int(abs(nv)) for nv in v]
                    self.bmps_params[curid][k] = v[:]
                elif k == 'SLPPOS':
                    if isinstance(v, int):
                        v = [v]
                    elif v == 'ALL' or v == '':
                        v = list(self.cfg.slppos_tags.keys())
                    else:
                        v = StringClass.extract_numeric_values_from_string(v)
                        v = [int(abs(nv)) for nv in v]
                    self.bmps_params[curid][k] = v[:]
                else:
                    self.bmps_params[curid][k] = v
        client.close()
Example #23
0
 def export_to_mongodb(self):
     """Export current scenario to MongoDB.
     Delete the same ScenarioID if existed.
     """
     client = ConnectMongoDB(self.hostname, self.port)
     conn = client.get_conn()
     db = conn[self.scenario_db]
     collection = db['BMP_SCENARIOS']
     try:
         # find ScenarioID, remove if existed.
         if collection.find({'ID': self.ID}, no_cursor_timeout=True).count():
             collection.remove({'ID': self.ID})
     except NetworkTimeout or Exception:
         # In case of unexpected raise
         pass
     for objid, bmp_item in self.bmp_items.items():
         bmp_item['_id'] = ObjectId()
         collection.insert_one(bmp_item)
     client.close()
Example #24
0
    def read_bmp_parameters(self):
        """Read BMP configuration from MongoDB."""
        client = ConnectMongoDB(self.hostname, self.port)
        conn = client.get_conn()
        scenariodb = conn[self.bmp_scenario_db]

        bmpcoll = scenariodb[self.bmps_coll]
        findbmps = bmpcoll.find({}, no_cursor_timeout=True)
        for fb in findbmps:
            fb = UtilClass.decode_strs_in_dict(fb)
            if 'SUBSCENARIO' not in fb:
                continue
            curid = fb['SUBSCENARIO']
            if curid not in self.bmps_subids:
                continue
            if curid not in self.bmps_params:
                self.bmps_params[curid] = dict()
            for k, v in fb.items():
                if k == 'SUBSCENARIO':
                    continue
                elif k == 'LANDUSE':
                    if isinstance(v, int):
                        v = [v]
                    elif v == 'ALL' or v == '':
                        v = None
                    else:
                        v = StringClass.extract_numeric_values_from_string(v)
                        v = [int(abs(nv)) for nv in v]
                    self.bmps_params[curid][k] = v[:]
                elif k == 'SLPPOS':
                    if isinstance(v, int):
                        v = [v]
                    elif v == 'ALL' or v == '':
                        v = list(self.slppos_tags.keys())
                    else:
                        v = StringClass.extract_numeric_values_from_string(v)
                        v = [int(abs(nv)) for nv in v]
                    self.bmps_params[curid][k] = v[:]
                else:
                    self.bmps_params[curid][k] = v

        client.close()
Example #25
0
 def read_simulation_timerange(self):
     """Read simulation time range from MongoDB."""
     client = ConnectMongoDB(self.hostname, self.port)
     conn = client.get_conn()
     db = conn[self.main_db]
     collection = db['FILE_IN']
     try:
         stime_str = collection.find_one({'TAG': 'STARTTIME'},
                                         no_cursor_timeout=True)['VALUE']
         etime_str = collection.find_one({'TAG': 'ENDTIME'},
                                         no_cursor_timeout=True)['VALUE']
         stime = StringClass.get_datetime(stime_str)
         etime = StringClass.get_datetime(etime_str)
         dlt = etime - stime + timedelta(seconds=1)
         self.timerange = (dlt.days * 86400. + dlt.seconds) / 86400. / 365.
     except NetworkTimeout or Exception:
         # In case of unexpected raise
         self.timerange = 1.  # set default
         pass
     client.close()
Example #26
0
 def export_to_mongodb(self):
     """Export current scenario to MongoDB.
     Delete the same ScenarioID if existed.
     """
     client = ConnectMongoDB(self.hostname, self.port)
     conn = client.get_conn()
     db = conn[self.scenario_db]
     collection = db['BMP_SCENARIOS']
     try:
         # find ScenarioID, remove if existed.
         if collection.find({
                 'ID': self.ID
         }, no_cursor_timeout=True).count():
             collection.remove({'ID': self.ID})
     except NetworkTimeout or Exception:
         # In case of unexpected raise
         pass
     for objid, bmp_item in self.bmp_items.items():
         bmp_item['_id'] = ObjectId()
         collection.insert_one(bmp_item)
     client.close()
Example #27
0
def ReadRasterFromMongoDB(ip, port, db_name, gfsname, gfilename):
    client = ConnectMongoDB(ip, port)
    conn = client.get_conn()
    maindb = conn[db_name]
    spatial_gfs = GridFS(maindb, gfsname)
    if not spatial_gfs.exists(filename=gfilename):
        raise ValueError('WARNING: %s is not existed in %s:%s!' %
                         (gfilename, db_name, gfsname))
    try:
        gfsdata = maindb[DBTableNames.gridfs_spatial].files.find(
            {'filename': gfilename}, no_cursor_timeout=True)[0]
    except NetworkTimeout or Exception:
        # In case of unexpected raise
        client.close()
        return None

    ysize = int(gfsdata['metadata'][RasterMetadata.nrows])
    xsize = int(gfsdata['metadata'][RasterMetadata.ncols])
    xll = gfsdata['metadata'][RasterMetadata.xll]
    yll = gfsdata['metadata'][RasterMetadata.yll]
    cellsize = gfsdata['metadata'][RasterMetadata.cellsize]
    nodata = gfsdata['metadata'][RasterMetadata.nodata]
    srs = gfsdata['metadata'][RasterMetadata.srs]
    if is_string(srs):
        srs = str(srs)
    srs = osr.GetUserInputAsWKT(srs)
    geotransform = [0] * 6
    geotransform[0] = xll - 0.5 * cellsize
    geotransform[1] = cellsize
    geotransform[3] = yll + (ysize - 0.5) * cellsize  # yMax
    geotransform[5] = -cellsize

    array_data = spatial_gfs.get(gfsdata['_id'])
    total_len = xsize * ysize
    fmt = '%df' % (total_len, )
    array_data = unpack(fmt, array_data.read())
    array_data = numpy.reshape(array_data, (ysize, xsize))
    return Raster(ysize, xsize, array_data, nodata, geotransform, srs)
    def workflow(cfg):
        """Building MongoDB workflow"""
        f = cfg.logs.build_mongo
        # build a connection to mongodb database
        client = ConnectMongoDB(cfg.hostname, cfg.port)
        conn = client.get_conn()
        maindb = conn[cfg.spatial_db]
        climatedb = conn[cfg.climate_db]
        scenariodb = None
        if cfg.use_scernario:
            scenariodb = conn[cfg.bmp_scenario_db]

        # import model parameters information to MongoDB
        status_output('Import model parameters', 10, f)
        ImportParam2Mongo.workflow(cfg, maindb)
        n_subbasins = MongoQuery.get_init_parameter_value(
            maindb, SubbsnStatsName.subbsn_num)
        print('Number of subbasins: %d' % n_subbasins)

        # Extract spatial parameters for reaches, landuse, soil, etc.
        status_output(
            'Extract spatial parameters for reaches, landuse, soil, etc...',
            20, f)
        extract_spatial_parameters(cfg, maindb)

        # import stream parameters
        status_output('Generating reach table with initialized parameters...',
                      40, f)
        ImportReaches2Mongo.generate_reach_table(cfg, maindb)

        # import raster data to MongoDB
        status_output('Importing raster to MongoDB....', 50, f)
        ImportMongodbClass.spatial_rasters(cfg, 0)
        ImportMongodbClass.spatial_rasters(cfg, n_subbasins)

        # Import IUH
        status_output(
            'Generating and importing IUH (Instantaneous Unit Hydrograph)....',
            60, f)
        ImportMongodbClass.iuh(cfg, 0)
        ImportMongodbClass.iuh(cfg, n_subbasins)

        # Import grid layering data
        status_output('Generating and importing grid layering....', 70, f)
        ImportMongodbClass.grid_layering(cfg, 0)
        ImportMongodbClass.grid_layering(cfg, n_subbasins)

        # Import hydro-climate data
        status_output('Import climate data....', 80, f)
        ImportMongodbClass.climate_data(cfg, maindb, climatedb)

        # Import weight and related data, this should after ImportMongodbClass.climate_data()
        status_output(
            'Generating weight data for interpolation of meteorology data '
            'and weight dependent parameters....', 85, f)
        ImportWeightData.workflow(cfg, conn, 0)
        ImportWeightData.workflow(cfg, conn, n_subbasins)

        # Measurement Data, such as discharge, sediment yield.
        status_output(
            'Import observed data, such as discharge, sediment yield....', 90,
            f)
        ImportObservedData.workflow(cfg, maindb, climatedb)

        # Import BMP scenario database to MongoDB
        status_output('Importing bmp scenario....', 95, f)
        ImportScenario2Mongo.scenario_from_texts(cfg, maindb, scenariodb)

        status_output('Build DB: %s finished!' % cfg.spatial_db, 100, f)

        # close connection to MongoDB
        client.close()
Example #29
0
def DelinateSlopePositionByThreshold(
        modelcfg,  # type: ParseSEIMSConfig
        thresholds,  # type: Dict[int, List]
        fuzzyslppos_fnames,  # type: List[Tuple[int, AnyStr, AnyStr]]
        outfname,  # type: AnyStr
        subbsn_id=0  # type: int
):
    # type: (...) -> Dict
    """

    Args:
        model_cfg: Configuration of SEIMS-based model
        thresholds: {HillslopeID: {rdgID, bksID, vlyID, T_bks2rdg, T_bks2vly}, ...}
        fuzzyslppos_fnames: [(1, 'summit', 'rdgInf'), ...]
        outfname: output GridFS name
        subbsn_id: By default use the whole watershed data
    Returns:
        hillslp_data(dict): {}
    """
    # 1. Read raster data from MongoDB
    hillslpr = ReadRasterFromMongoDB(modelcfg.host, modelcfg.port,
                                     modelcfg.db_name,
                                     DBTableNames.gridfs_spatial,
                                     '%d_HILLSLOPE_MERGED' % subbsn_id)
    landuser = ReadRasterFromMongoDB(modelcfg.host, modelcfg.port,
                                     modelcfg.db_name,
                                     DBTableNames.gridfs_spatial,
                                     '%d_LANDUSE' % subbsn_id)
    fuzslppos_rs = list()
    for tag, tagname, gfsname in fuzzyslppos_fnames:
        fuzslppos_rs.append(
            ReadRasterFromMongoDB(modelcfg.host, modelcfg.port,
                                  modelcfg.db_name,
                                  DBTableNames.gridfs_spatial,
                                  '%d_%s' % (subbsn_id, gfsname.upper())))

    # Output for test
    # out_dir = r'D:\data_m\youwuzhen\seims_models_phd\data_prepare\spatial\spatial_units\tmp'
    # out_hillslp = out_dir + os.sep + 'hillslope.tif'
    # RasterUtilClass.write_gtiff_file(out_hillslp, hillslpr.nRows, hillslpr.nCols,
    #                                  hillslpr.data, hillslpr.geotrans, hillslpr.srs,
    #                                  hillslpr.noDataValue)
    # out_landuse = out_dir + os.sep + 'landuse.tif'
    # RasterUtilClass.write_gtiff_file(out_landuse, landuser.nRows, landuser.nCols,
    #                                  landuser.data, landuser.geotrans, landuser.srs,
    #                                  landuser.noDataValue)
    # for i, (tag, tagname, gfsname) in enumerate(fuzzyslppos_fnames):
    #     curname = out_dir + os.sep + '%s.tif' % gfsname
    #     RasterUtilClass.write_gtiff_file(curname, fuzslppos_rs[i].nRows, fuzslppos_rs[i].nCols,
    #                                      fuzslppos_rs[i].data, fuzslppos_rs[i].geotrans,
    #                                      fuzslppos_rs[i].srs,
    #                                      fuzslppos_rs[i].noDataValue)

    # 2. Initialize output
    outgfsname = '%d_%s' % (subbsn_id, outfname.upper())
    outdict = dict(
    )  # type: Dict[AnyStr, Dict[int, Dict[AnyStr, Union[float, Dict[int, float]]]]]
    slppos_cls = numpy.ones(
        (hillslpr.nRows, hillslpr.nCols)) * hillslpr.noDataValue
    valid_cells = 0

    # Get the fuzzy slope position values from up to bottom
    def GetFuzzySlopePositionValues(i_row, i_col):
        seqvalues = [-9999] * len(fuzslppos_rs)
        for iseq, fuzdata in enumerate(fuzslppos_rs):
            curv = fuzdata.data[i_row][i_col]
            if MathClass.floatequal(curv, fuzdata.noDataValue):
                return None
            if curv < 0:
                return None
            seqvalues[iseq] = curv
        return seqvalues

    # ACTUAL ALGORITHM
    for row in range(hillslpr.nRows):
        for col in range(hillslpr.nCols):
            # Exclude invalid situation
            hillslp_id = hillslpr.data[row][col]
            if MathClass.floatequal(hillslp_id, hillslpr.noDataValue):
                continue
            if hillslp_id not in thresholds:
                continue
            landuse_id = landuser.data[row][col]
            if MathClass.floatequal(landuse_id, landuser.noDataValue):
                continue
            fuzzyvalues = GetFuzzySlopePositionValues(row, col)
            if fuzzyvalues is None:
                continue

            # THIS PART SHOULD BE REVIEWED CAREFULLY LATER! --START
            # Step 1. Get the index of slope position with maximum similarity
            max_fuz = max(fuzzyvalues)
            max_idx = fuzzyvalues.index(max_fuz)
            tmpfuzzyvalues = fuzzyvalues[:]
            tmpfuzzyvalues.remove(max_fuz)
            sec_fuz = max(tmpfuzzyvalues)
            sec_idx = fuzzyvalues.index(sec_fuz)

            sel_idx = max_idx  # Select the maximum by default

            cur_threshs = thresholds[hillslp_id][1 - len(fuzzyvalues):]

            if max_idx == len(fuzzyvalues) - 1:  # the bottom position
                if sec_idx == len(
                        fuzzyvalues
                ) - 2 and 0 < max_fuz - sec_fuz < cur_threshs[-1]:
                    sel_idx = sec_idx  # change valley to backslope
            elif max_idx == 0:  # the upper position
                if sec_idx == 1 and 0 < max_fuz - sec_fuz < cur_threshs[0]:
                    sel_idx = sec_idx  # change ridge to backslope
            else:  # the middle positions
                # Two thresholds could be applied,
                #     i.e., cur_threshs[max_idx-1] and cur_threshs[max_idx]
                if sec_idx == max_idx - 1 and 0. > sec_fuz - max_fuz > cur_threshs[
                        max_idx - 1]:
                    sel_idx = sec_idx
                elif sec_idx == max_idx + 1 and 0. > sec_fuz - max_fuz > cur_threshs[
                        max_idx]:
                    sel_idx = sec_idx

            # Exception:
            if sec_fuz < 0.1 and sel_idx == sec_idx:
                sel_idx = max_idx

            # if sel_idx != max_idx:  # boundary has been adapted
            #     print('fuzzy values: %s, thresholds: %s, '
            #           'sel_idx: %d' % (fuzzyvalues.__str__(), cur_threshs.__str__(), sel_idx))

            slppos_id = thresholds[hillslp_id][sel_idx]
            # THIS PART SHOULD BE REVIEWED CAREFULLY LATER! --END

            slppos_cls[row][col] = slppos_id
            sel_tagname = fuzzyslppos_fnames[sel_idx][1]
            if sel_tagname not in outdict:
                outdict[sel_tagname] = dict()
            if slppos_id not in outdict[sel_tagname]:
                outdict[sel_tagname][slppos_id] = {
                    'area': 0,
                    'landuse': dict()
                }
            outdict[sel_tagname][slppos_id]['area'] += 1
            if landuse_id not in outdict[sel_tagname][slppos_id]['landuse']:
                outdict[sel_tagname][slppos_id]['landuse'][landuse_id] = 0.
            outdict[sel_tagname][slppos_id]['landuse'][landuse_id] += 1.

            valid_cells += 1
    # Change cell counts to area
    area_km2 = hillslpr.dx * hillslpr.dx * 1.e-6
    for tagname, slpposdict in viewitems(outdict):
        for sid, datadict in viewitems(slpposdict):
            outdict[tagname][sid]['area'] *= area_km2
            for luid in outdict[tagname][sid]['landuse']:
                outdict[tagname][sid]['landuse'][luid] *= area_km2

    # 3. Write the classified slope positions data back to mongodb
    metadata = dict()
    metadata[RasterMetadata.subbasin] = subbsn_id
    metadata['ID'] = outgfsname
    metadata['TYPE'] = outfname.upper()
    metadata[RasterMetadata.cellsize] = hillslpr.dx
    metadata[RasterMetadata.nodata] = hillslpr.noDataValue
    metadata[RasterMetadata.ncols] = hillslpr.nCols
    metadata[RasterMetadata.nrows] = hillslpr.nRows
    metadata[RasterMetadata.xll] = hillslpr.xMin + 0.5 * hillslpr.dx
    metadata[RasterMetadata.yll] = hillslpr.yMin + 0.5 * hillslpr.dx
    metadata['LAYERS'] = 1.
    metadata[RasterMetadata.cellnum] = valid_cells
    metadata[RasterMetadata.srs] = hillslpr.srs

    client = ConnectMongoDB(modelcfg.host, modelcfg.port)
    conn = client.get_conn()
    maindb = conn[modelcfg.db_name]
    spatial_gfs = GridFS(maindb, DBTableNames.gridfs_spatial)
    # delete if the tablename gridfs file existed
    if spatial_gfs.exists(filename=outgfsname):
        x = spatial_gfs.get_version(filename=outgfsname)
        spatial_gfs.delete(x._id)
    # create and write new GridFS file
    new_gridfs = spatial_gfs.new_file(filename=outgfsname, metadata=metadata)
    new_gridfs_array = slppos_cls.reshape(
        (1, hillslpr.nCols * hillslpr.nRows)).tolist()[0]

    fmt = '%df' % hillslpr.nCols * hillslpr.nRows
    s = pack(fmt, *new_gridfs_array)
    new_gridfs.write(s)
    new_gridfs.close()

    # Read and output for test
    # slpposcls_r = ReadRasterFromMongoDB(modelcfg.host, modelcfg.port,
    #                                     modelcfg.db_name, DBTableNames.gridfs_spatial, outgfsname)
    # out_slpposcls = out_dir + os.sep + '%s.tif' % outgfsname
    # RasterUtilClass.write_gtiff_file(out_slpposcls, slpposcls_r.nRows, slpposcls_r.nCols,
    #                                  slpposcls_r.data, slpposcls_r.geotrans, slpposcls_r.srs,
    #                                  slpposcls_r.noDataValue)
    client.close()

    return outdict
Example #30
0
    def export_scenario_to_gtiff(self, outpath=None):
        """Export scenario to GTiff.

        TODO: Read Raster from MongoDB should be extracted to pygeoc.
        """
        if not self.export_sce_tif:
            return
        dist = self.bmps_info['DISTRIBUTION']
        dist_list = StringClass.split_string(dist, '|')
        if len(dist_list) >= 2 and dist_list[0] == 'RASTER':
            dist_name = '0_' + dist_list[1]  # prefix 0_ means the whole basin
            # read dist_name from MongoDB
            client = ConnectMongoDB(self.hostname, self.port)
            conn = client.get_conn()
            maindb = conn[self.main_db]
            spatial_gfs = GridFS(maindb, DBTableNames.gridfs_spatial)
            # read file from mongodb
            if not spatial_gfs.exists(filename=dist_name):
                print('WARNING: %s is not existed, export scenario failed!' %
                      dist_name)
                return
            try:
                slpposf = maindb[DBTableNames.gridfs_spatial].files.find(
                    {'filename': dist_name}, no_cursor_timeout=True)[0]
            except NetworkTimeout or Exception:
                # In case of unexpected raise
                client.close()
                return

            ysize = int(slpposf['metadata'][RasterMetadata.nrows])
            xsize = int(slpposf['metadata'][RasterMetadata.ncols])
            xll = slpposf['metadata'][RasterMetadata.xll]
            yll = slpposf['metadata'][RasterMetadata.yll]
            cellsize = slpposf['metadata'][RasterMetadata.cellsize]
            nodata_value = slpposf['metadata'][RasterMetadata.nodata]
            srs = slpposf['metadata'][RasterMetadata.srs]
            if isinstance(srs, text_type):
                srs = str(srs)
            srs = osr.GetUserInputAsWKT(srs)
            geotransform = [0] * 6
            geotransform[0] = xll - 0.5 * cellsize
            geotransform[1] = cellsize
            geotransform[3] = yll + (ysize - 0.5) * cellsize  # yMax
            geotransform[5] = -cellsize

            slppos_data = spatial_gfs.get(slpposf['_id'])
            total_len = xsize * ysize
            fmt = '%df' % (total_len, )
            slppos_data = unpack(fmt, slppos_data.read())
            slppos_data = numpy.reshape(slppos_data, (ysize, xsize))

            v_dict = dict()
            for idx, gene_v in enumerate(self.gene_values):
                v_dict[self.gene_to_unit[idx]] = gene_v

            for k, v in v_dict.items():
                slppos_data[slppos_data == k] = v
            if outpath is None:
                outpath = self.scenario_dir + os.path.sep + 'Scenario_%d.tif' % self.ID
            RasterUtilClass.write_gtiff_file(outpath, ysize, xsize,
                                             slppos_data, geotransform, srs,
                                             nodata_value)
            client.close()
Example #31
0
    def workflow(cfg):
        """Building MongoDB workflow"""
        f = cfg.logs.build_mongo
        # build a connection to mongodb database
        client = ConnectMongoDB(cfg.hostname, cfg.port)
        conn = client.get_conn()
        maindb = conn[cfg.spatial_db]
        climatedb = conn[cfg.climate_db]
        scenariodb = None
        if cfg.use_scernario:
            scenariodb = conn[cfg.bmp_scenario_db]

        # import model parameters information to MongoDB
        status_output('Import model parameters', 10, f)
        ImportParam2Mongo.workflow(cfg, maindb)
        n_subbasins = MongoQuery.get_init_parameter_value(maindb, SubbsnStatsName.subbsn_num)
        print('Number of subbasins: %d' % n_subbasins)

        # Extract spatial parameters for reaches, landuse, soil, etc.
        status_output('Extract spatial parameters for reaches, landuse, soil, etc...', 20, f)
        extract_spatial_parameters(cfg, maindb)

        # import stream parameters
        status_output('Generating reach table with initialized parameters...', 40, f)
        ImportReaches2Mongo.generate_reach_table(cfg, maindb)

        # import raster data to MongoDB
        status_output('Importing raster to MongoDB....', 50, f)
        ImportMongodbClass.spatial_rasters(cfg, 0)
        ImportMongodbClass.spatial_rasters(cfg, n_subbasins)

        # Import IUH
        status_output('Generating and importing IUH (Instantaneous Unit Hydrograph)....', 60, f)
        ImportMongodbClass.iuh(cfg, 0)
        ImportMongodbClass.iuh(cfg, n_subbasins)

        # Import grid layering data
        status_output('Generating and importing grid layering....', 70, f)
        ImportMongodbClass.grid_layering(cfg, 0)
        ImportMongodbClass.grid_layering(cfg, n_subbasins)

        # Import hydro-climate data
        status_output('Import climate data....', 80, f)
        ImportMongodbClass.climate_data(cfg, maindb, climatedb)

        # Import weight and related data, this should after ImportMongodbClass.climate_data()
        status_output('Generating weight data for interpolation of meteorology data '
                      'and weight dependent parameters....', 85, f)
        ImportWeightData.workflow(cfg, conn, 0)
        ImportWeightData.workflow(cfg, conn, n_subbasins)

        # Measurement Data, such as discharge, sediment yield.
        status_output('Import observed data, such as discharge, sediment yield....', 90, f)
        ImportObservedData.workflow(cfg, maindb, climatedb)

        # Import BMP scenario database to MongoDB
        status_output('Importing bmp scenario....', 95, f)
        ImportScenario2Mongo.scenario_from_texts(cfg, maindb, scenariodb)

        status_output('Build DB: %s finished!' % cfg.spatial_db, 100, f)

        # close connection to MongoDB
        client.close()
Example #32
0
    def export_scenario_to_gtiff(self, outpath=None):
        """Export scenario to GTiff.

        TODO: Read Raster from MongoDB should be extracted to pygeoc.
        """
        if not self.export_sce_tif:
            return
        dist = self.bmps_info['DISTRIBUTION']
        dist_list = StringClass.split_string(dist, '|')
        if len(dist_list) >= 2 and dist_list[0] == 'RASTER':
            dist_name = '0_' + dist_list[1]  # prefix 0_ means the whole basin
            # read dist_name from MongoDB
            client = ConnectMongoDB(self.hostname, self.port)
            conn = client.get_conn()
            maindb = conn[self.main_db]
            spatial_gfs = GridFS(maindb, DBTableNames.gridfs_spatial)
            # read file from mongodb
            if not spatial_gfs.exists(filename=dist_name):
                print('WARNING: %s is not existed, export scenario failed!' % dist_name)
                return
            try:
                slpposf = maindb[DBTableNames.gridfs_spatial].files.find({'filename': dist_name},
                                                                         no_cursor_timeout=True)[0]
            except NetworkTimeout or Exception:
                # In case of unexpected raise
                client.close()
                return

            ysize = int(slpposf['metadata'][RasterMetadata.nrows])
            xsize = int(slpposf['metadata'][RasterMetadata.ncols])
            xll = slpposf['metadata'][RasterMetadata.xll]
            yll = slpposf['metadata'][RasterMetadata.yll]
            cellsize = slpposf['metadata'][RasterMetadata.cellsize]
            nodata_value = slpposf['metadata'][RasterMetadata.nodata]
            srs = slpposf['metadata'][RasterMetadata.srs]
            if isinstance(srs, text_type):
                srs = str(srs)
            srs = osr.GetUserInputAsWKT(srs)
            geotransform = [0] * 6
            geotransform[0] = xll - 0.5 * cellsize
            geotransform[1] = cellsize
            geotransform[3] = yll + (ysize - 0.5) * cellsize  # yMax
            geotransform[5] = -cellsize

            slppos_data = spatial_gfs.get(slpposf['_id'])
            total_len = xsize * ysize
            fmt = '%df' % (total_len,)
            slppos_data = unpack(fmt, slppos_data.read())
            slppos_data = numpy.reshape(slppos_data, (ysize, xsize))

            v_dict = dict()
            for idx, gene_v in enumerate(self.gene_values):
                v_dict[self.gene_to_unit[idx]] = gene_v

            for k, v in v_dict.items():
                slppos_data[slppos_data == k] = v
            if outpath is None:
                outpath = self.scenario_dir + os.path.sep + 'Scenario_%d.tif' % self.ID
            RasterUtilClass.write_gtiff_file(outpath, ysize, xsize, slppos_data, geotransform,
                                             srs, nodata_value)
            client.close()