Exemple #1
0
    def workflow(cfg, maindb, climdb):
        """
        This function mainly to import measurement data to MongoDB
        data type may include Q (discharge, m3/s), SED (mg/L), TN (mg/L), TP (mg/L), etc.
        the required parameters that defined in configuration file (*.ini)
        """
        if not cfg.use_observed:
            return False
        c_list = climdb.collection_names()
        if not StringClass.string_in_list(DBTableNames.observes, c_list):
            climdb.create_collection(DBTableNames.observes)
        else:
            climdb.drop_collection(DBTableNames.observes)
        if not StringClass.string_in_list(DBTableNames.sites, c_list):
            climdb.create_collection(DBTableNames.sites)
        if not StringClass.string_in_list(DBTableNames.var_desc, c_list):
            climdb.create_collection(DBTableNames.var_desc)

        file_list = FileClass.get_full_filename_by_suffixes(
            cfg.observe_dir, ['.txt', '.csv'])
        meas_file_list = list()
        site_loc = list()
        for fl in file_list:
            if StringClass.is_substring('observed_', fl):
                meas_file_list.append(fl)
            else:
                site_loc.append(fl)
        ImportObservedData.data_from_txt(maindb, climdb, meas_file_list,
                                         site_loc, cfg.spatials.subbsn)
        return True
def main():
    from preprocess.config import parse_ini_configuration

    seims_cfg = parse_ini_configuration()
    client = ConnectMongoDB(seims_cfg.hostname, seims_cfg.port)
    conn = client.get_conn()
    db_model = conn[seims_cfg.spatial_db]

    spatial_gfs = GridFS(db_model, DBTableNames.gridfs_spatial)

    csv_path = r'C:\z_data\zhongTianShe\model_data_seims\field_scale_params'
    csv_files = FileClass.get_full_filename_by_suffixes(csv_path, ['.csv'])
    field_count = 7419
    prefix = 9999
    # Create mask file
    mask_name = '%d_MASK' % prefix
    mask_array = [[1] * field_count]
    import_array_to_mongodb(spatial_gfs, mask_array, mask_name)

    # Create spatial parameters
    for csv_file in csv_files:
        print('Import %s...' % csv_file)
        param_arrays = read_field_arrays_from_csv(csv_file)
        for key, value in list(param_arrays.items()):
            import_array_to_mongodb(spatial_gfs, value, '%d_%s' % (prefix, key))
Exemple #3
0
    def workflow(cfg, maindb, climdb):
        """
        This function mainly to import measurement data to MongoDB
        data type may include Q (discharge, m3/s), SED (mg/L), tn (mg/L), tp (mg/L), etc.
        the required parameters that defined in configuration file (*.ini)
        """
        if not cfg.use_observed:
            return False
        c_list = climdb.collection_names()
        if not StringClass.string_in_list(DBTableNames.observes, c_list):
            climdb.create_collection(DBTableNames.observes)
        else:
            climdb.drop_collection(DBTableNames.observes)
        if not StringClass.string_in_list(DBTableNames.sites, c_list):
            climdb.create_collection(DBTableNames.sites)
        if not StringClass.string_in_list(DBTableNames.var_desc, c_list):
            climdb.create_collection(DBTableNames.var_desc)

        file_list = FileClass.get_full_filename_by_suffixes(cfg.observe_dir, ['.txt'])
        meas_file_list = []
        site_loc = []
        for fl in file_list:
            if StringClass.is_substring('observed_', fl):
                meas_file_list.append(fl)
            else:
                site_loc.append(fl)
        ImportObservedData.data_from_txt(maindb, climdb, meas_file_list, site_loc,
                                         cfg.spatials.subbsn)
        return True
def main():
    from preprocess.config import parse_ini_configuration

    seims_cfg = parse_ini_configuration()
    client = ConnectMongoDB(seims_cfg.hostname, seims_cfg.port)
    conn = client.get_conn()
    db_model = conn[seims_cfg.spatial_db]

    spatial_gfs = GridFS(db_model, DBTableNames.gridfs_spatial)

    csv_path = r'C:\z_data\zhongTianShe\model_data_seims\field_scale_params'
    csv_files = FileClass.get_full_filename_by_suffixes(csv_path, ['.csv'])
    field_count = 7419
    prefix = 9999
    # Create mask file
    mask_name = '%d_MASK' % prefix
    mask_array = [[1] * field_count]
    import_array_to_mongodb(spatial_gfs, mask_array, mask_name)

    # Create spatial parameters
    for csv_file in csv_files:
        print('Import %s...' % csv_file)
        param_arrays = read_field_arrays_from_csv(csv_file)
        for key, value in list(param_arrays.items()):
            import_array_to_mongodb(spatial_gfs, value,
                                    '%d_%s' % (prefix, key))