Beispiel #1
0
    def initialize_landcover_parameters(landcover_file, landcover_initial_fields_file, dst_dir):
        """generate initial landcover_init_param parameters"""
        lc_data_items = read_data_items_from_txt(landcover_initial_fields_file)
        # print(lc_data_items)
        field_names = lc_data_items[0]
        lu_id = -1
        for i, v in enumerate(field_names):
            if StringClass.string_match(v, 'LANDUSE_ID'):
                lu_id = i
                break
        data_items = lc_data_items[1:]
        replace_dicts = dict()
        for item in data_items:
            for i, v in enumerate(item):
                if i != lu_id:
                    if field_names[i].upper() not in list(replace_dicts.keys()):
                        replace_dicts[field_names[i].upper()] = {float(item[lu_id]): float(v)}
                    else:
                        replace_dicts[field_names[i].upper()][float(item[lu_id])] = float(v)
        # print(replace_dicts)

        # Generate GTIFF
        for item, v in list(replace_dicts.items()):
            filename = dst_dir + os.path.sep + item + '.tif'
            print(filename)
            RasterUtilClass.raster_reclassify(landcover_file, v, filename)
        return list(replace_dicts['LANDCOVER'].values())
Beispiel #2
0
 def convert_code(in_file, out_file, in_alg='taudem', out_alg='arcgis', datatype=None):
     """
     convert D8 flow direction code from one algorithm to another.
     Args:
         in_file: input raster file path
         out_file: output raster file path
         in_alg: available algorithms are in FlowModelConst.d8_dirs. "taudem" is the default
         out_alg: same as in_alg. "arcgis" is the default
         datatype: default is None and use the datatype of the in_file
     """
     FileClass.check_file_exists(in_file)
     in_alg = in_alg.lower()
     out_alg = out_alg.lower()
     if in_alg not in FlowModelConst.d8_dirs or out_alg not in FlowModelConst.d8_dirs:
         raise RuntimeError('The input algorithm name should one of %s' %
                            ', '.join(list(FlowModelConst.d8_dirs.keys())))
     convert_dict = dict()
     in_code = FlowModelConst.d8_dirs.get(in_alg)
     out_code = FlowModelConst.d8_dirs.get(out_alg)
     assert len(in_code) == len(out_code)
     for i, tmp_in_code in enumerate(in_code):
         convert_dict[tmp_in_code] = out_code[i]
     if datatype is not None and datatype in GDALDataType:
         RasterUtilClass.raster_reclassify(in_file, convert_dict, out_file, datatype)
     else:
         RasterUtilClass.raster_reclassify(in_file, convert_dict, out_file)
Beispiel #3
0
 def convert_code(in_file, out_file, in_alg='taudem', out_alg='arcgis', datatype=None):
     """
     convert D8 flow direction code from one algorithm to another.
     Args:
         in_file: input raster file path
         out_file: output raster file path
         in_alg: available algorithms are in FlowModelConst.d8_dirs. "taudem" is the default
         out_alg: same as in_alg. "arcgis" is the default
         datatype: default is None and use the datatype of the in_file
     """
     FileClass.check_file_exists(in_file)
     in_alg = in_alg.lower()
     out_alg = out_alg.lower()
     if in_alg not in FlowModelConst.d8_dirs or out_alg not in FlowModelConst.d8_dirs:
         raise RuntimeError('The input algorithm name should one of %s' %
                            ', '.join(list(FlowModelConst.d8_dirs.keys())))
     convert_dict = dict()
     in_code = FlowModelConst.d8_dirs.get(in_alg)
     out_code = FlowModelConst.d8_dirs.get(out_alg)
     assert len(in_code) == len(out_code)
     for i, tmp_in_code in enumerate(in_code):
         convert_dict[tmp_in_code] = out_code[i]
     if datatype is not None and datatype in GDALDataType:
         RasterUtilClass.raster_reclassify(in_file, convert_dict, out_file, datatype)
     else:
         RasterUtilClass.raster_reclassify(in_file, convert_dict, out_file)
Beispiel #4
0
 def reclassify_landcover_parameters(landuse_file, landcover_file, landcover_initial_fields_file,
                                     landcover_lookup_file, attr_names, dst_dir):
     """relassify landcover_init_param parameters"""
     land_cover_codes = LanduseUtilClass.initialize_landcover_parameters(
             landuse_file, landcover_initial_fields_file, dst_dir)
     attr_map = LanduseUtilClass.read_crop_lookup_table(landcover_lookup_file)
     n = len(attr_names)
     replace_dicts = list()
     dst_crop_tifs = list()
     for i in range(n):
         cur_attr = attr_names[i]
         cur_dict = dict()
         dic = attr_map[cur_attr]
         for code in land_cover_codes:
             if MathClass.floatequal(code, DEFAULT_NODATA):
                 continue
             if code not in list(cur_dict.keys()):
                 cur_dict[code] = dic.get(code)
         replace_dicts.append(cur_dict)
         dst_crop_tifs.append(dst_dir + os.path.sep + cur_attr + '.tif')
     # print(replace_dicts)
     # print(len(replace_dicts))
     # print(dst_crop_tifs)
     # print(len(dst_crop_tifs))
     # Generate GTIFF
     for i, v in enumerate(dst_crop_tifs):
         # print(dst_crop_tifs[i])
         RasterUtilClass.raster_reclassify(landcover_file, replace_dicts[i], v)
Beispiel #5
0
 def reclassify_landcover_parameters(landuse_file, landcover_file,
                                     landcover_initial_fields_file,
                                     landcover_lookup_file, attr_names,
                                     dst_dir):
     """relassify landcover_init_param parameters"""
     land_cover_codes = LanduseUtilClass.initialize_landcover_parameters(
         landuse_file, landcover_initial_fields_file, dst_dir)
     attr_map = LanduseUtilClass.read_crop_lookup_table(
         landcover_lookup_file)
     n = len(attr_names)
     replace_dicts = list()
     dst_crop_tifs = list()
     for i in range(n):
         cur_attr = attr_names[i]
         cur_dict = dict()
         dic = attr_map[cur_attr]
         for code in land_cover_codes:
             if MathClass.floatequal(code, DEFAULT_NODATA):
                 continue
             if code not in list(cur_dict.keys()):
                 cur_dict[code] = dic.get(code)
         replace_dicts.append(cur_dict)
         dst_crop_tifs.append(dst_dir + os.path.sep + cur_attr + '.tif')
     # print(replace_dicts)
     # print(len(replace_dicts))
     # print(dst_crop_tifs)
     # print(len(dst_crop_tifs))
     # Generate GTIFF
     for i, v in enumerate(dst_crop_tifs):
         # print(dst_crop_tifs[i])
         RasterUtilClass.raster_reclassify(landcover_file, replace_dicts[i],
                                           v)
Beispiel #6
0
    def initialize_landcover_parameters(landcover_file,
                                        landcover_initial_fields_file,
                                        dst_dir):
        """generate initial landcover_init_param parameters"""
        lc_data_items = read_data_items_from_txt(landcover_initial_fields_file)
        # print(lc_data_items)
        field_names = lc_data_items[0]
        lu_id = -1
        for i, v in enumerate(field_names):
            if StringClass.string_match(v, 'LANDUSE_ID'):
                lu_id = i
                break
        data_items = lc_data_items[1:]
        replace_dicts = dict()
        for item in data_items:
            for i, v in enumerate(item):
                if i != lu_id:
                    if field_names[i].upper() not in list(
                            replace_dicts.keys()):
                        replace_dicts[field_names[i].upper()] = {
                            float(item[lu_id]): float(v)
                        }
                    else:
                        replace_dicts[field_names[i].upper()][float(
                            item[lu_id])] = float(v)
        # print(replace_dicts)

        # Generate GTIFF
        for item, v in list(replace_dicts.items()):
            filename = dst_dir + os.path.sep + item + '.tif'
            print(filename)
            RasterUtilClass.raster_reclassify(landcover_file, v, filename)
        return list(replace_dicts['LANDCOVER'].values())
Beispiel #7
0
    def post_process_of_delineated_data(cfg):
        """Do some necessary transfer for subbasin, stream, and flow direction raster."""
        # inputs
        stream_net_file = cfg.taudems.streamnet_shp
        subbasin_file = cfg.taudems.subbsn_m
        flow_dir_file_tau = cfg.taudems.d8flow_m
        stream_raster_file = cfg.taudems.stream_m
        # outputs
        # -- shapefile
        shp_dir = cfg.dirs.geoshp
        UtilClass.mkdir(shp_dir)
        # ---- outlet, copy from DirNameUtils.TauDEM
        FileClass.copy_files(cfg.taudems.outlet_m, cfg.vecs.outlet)
        # ---- reaches
        output_reach_file = cfg.vecs.reach
        # ---- subbasins
        subbasin_vector_file = cfg.vecs.subbsn
        # -- raster file
        output_subbasin_file = cfg.spatials.subbsn
        output_flow_dir_file = cfg.spatials.d8flow
        output_stream_link_file = cfg.spatials.stream_link
        output_hillslope_file = cfg.spatials.hillslope

        id_map = StreamnetUtil.serialize_streamnet(stream_net_file,
                                                   output_reach_file)
        RasterUtilClass.raster_reclassify(subbasin_file, id_map,
                                          output_subbasin_file, GDT_Int32)
        StreamnetUtil.assign_stream_id_raster(stream_raster_file,
                                              output_subbasin_file,
                                              output_stream_link_file)

        # Convert D8 encoding rule to ArcGIS
        D8Util.convert_code(flow_dir_file_tau, output_flow_dir_file)

        # convert raster to shapefile (for subbasin and basin)
        print('Generating subbasin vector...')
        VectorUtilClass.raster2shp(output_subbasin_file, subbasin_vector_file,
                                   'subbasin', FieldNames.subbasin_id)
        mask_file = cfg.spatials.mask
        basin_vector = cfg.vecs.bsn
        print('Generating basin vector...')
        VectorUtilClass.raster2shp(mask_file, basin_vector, 'basin',
                                   FieldNames.basin)
        # delineate hillslope
        DelineateHillslope.downstream_method_whitebox(output_stream_link_file,
                                                      flow_dir_file_tau,
                                                      output_hillslope_file)
Beispiel #8
0
    def post_process_of_delineated_data(cfg):
        """Do some necessary transfer for subbasin, stream, and flow direction raster."""
        # inputs
        stream_net_file = cfg.taudems.streamnet_shp
        subbasin_file = cfg.taudems.subbsn_m
        flow_dir_file_tau = cfg.taudems.d8flow_m
        stream_raster_file = cfg.taudems.stream_m
        # outputs
        # -- shapefile
        shp_dir = cfg.dirs.geoshp
        UtilClass.mkdir(shp_dir)
        # ---- outlet, copy from DirNameUtils.TauDEM
        FileClass.copy_files(cfg.taudems.outlet_m, cfg.vecs.outlet)
        # ---- reaches
        output_reach_file = cfg.vecs.reach
        # ---- subbasins
        subbasin_vector_file = cfg.vecs.subbsn
        # -- raster file
        output_subbasin_file = cfg.spatials.subbsn
        output_flow_dir_file = cfg.spatials.d8flow
        output_stream_link_file = cfg.spatials.stream_link
        output_hillslope_file = cfg.spatials.hillslope

        id_map = StreamnetUtil.serialize_streamnet(stream_net_file, output_reach_file)
        RasterUtilClass.raster_reclassify(subbasin_file, id_map, output_subbasin_file, GDT_Int32)
        StreamnetUtil.assign_stream_id_raster(stream_raster_file, output_subbasin_file,
                                              output_stream_link_file)

        # Convert D8 encoding rule to ArcGIS
        D8Util.convert_code(flow_dir_file_tau, output_flow_dir_file)

        # convert raster to shapefile (for subbasin and basin)
        print('Generating subbasin vector...')
        VectorUtilClass.raster2shp(output_subbasin_file, subbasin_vector_file, 'subbasin',
                                   FieldNames.subbasin_id)
        mask_file = cfg.spatials.mask
        basin_vector = cfg.vecs.bsn
        print('Generating basin vector...')
        VectorUtilClass.raster2shp(mask_file, basin_vector, 'basin', FieldNames.basin)
        # delineate hillslope
        DelineateHillslope.downstream_method_whitebox(output_stream_link_file, flow_dir_file_tau,
                                                      output_hillslope_file)
Beispiel #9
0
    def lookup_soil_parameters(dstdir, soiltype_file, soil_lookup_file):
        """Reclassify soil parameters by lookup table."""
        #  Read soil properties from txt file
        soil_lookup_data = read_data_items_from_txt(soil_lookup_file)
        soil_instances = list()
        soil_prop_flds = soil_lookup_data[0][:]
        for i in range(1, len(soil_lookup_data)):
            cur_soil_data_item = soil_lookup_data[i][:]
            cur_seqn = cur_soil_data_item[0]
            cur_sname = cur_soil_data_item[1]
            cur_soil_ins = SoilProperty(cur_seqn, cur_sname)
            for j in range(2, len(soil_prop_flds)):
                cur_flds = StringClass.split_string(cur_soil_data_item[j],
                                                    '-')  # Get field values
                for k, tmpfld in enumerate(cur_flds):
                    cur_flds[k] = float(tmpfld)  # Convert to float
                if StringClass.string_match(soil_prop_flds[j],
                                            SoilUtilClass._NLYRS):
                    cur_soil_ins.SOILLAYERS = int(cur_flds[0])
                elif StringClass.string_match(soil_prop_flds[j],
                                              SoilUtilClass._Z):
                    cur_soil_ins.SOILDEPTH = cur_flds
                elif StringClass.string_match(soil_prop_flds[j],
                                              SoilUtilClass._OM):
                    cur_soil_ins.OM = cur_flds
                elif StringClass.string_match(soil_prop_flds[j],
                                              SoilUtilClass._CLAY):
                    cur_soil_ins.CLAY = cur_flds
                elif StringClass.string_match(soil_prop_flds[j],
                                              SoilUtilClass._SILT):
                    cur_soil_ins.SILT = cur_flds
                elif StringClass.string_match(soil_prop_flds[j],
                                              SoilUtilClass._SAND):
                    cur_soil_ins.SAND = cur_flds
                elif StringClass.string_match(soil_prop_flds[j],
                                              SoilUtilClass._ROCK):
                    cur_soil_ins.ROCK = cur_flds
                elif StringClass.string_match(soil_prop_flds[j],
                                              SoilUtilClass._ZMX):
                    cur_soil_ins.SOL_ZMX = cur_flds[0]
                elif StringClass.string_match(soil_prop_flds[j],
                                              SoilUtilClass._ANIONEXCL):
                    cur_soil_ins.ANION_EXCL = cur_flds[0]
                elif StringClass.string_match(soil_prop_flds[j],
                                              SoilUtilClass._CRK):
                    cur_soil_ins.SOL_CRK = cur_flds[0]
                elif StringClass.string_match(soil_prop_flds[j],
                                              SoilUtilClass._BD):
                    cur_soil_ins.DENSITY = cur_flds
                elif StringClass.string_match(soil_prop_flds[j],
                                              SoilUtilClass._K):
                    cur_soil_ins.CONDUCTIVITY = cur_flds
                elif StringClass.string_match(soil_prop_flds[j],
                                              SoilUtilClass._WP):
                    cur_soil_ins.WILTINGPOINT = cur_flds
                elif StringClass.string_match(soil_prop_flds[j],
                                              SoilUtilClass._FC):
                    cur_soil_ins.FIELDCAP = cur_flds
                elif StringClass.string_match(soil_prop_flds[j],
                                              SoilUtilClass._AWC):
                    cur_soil_ins.AWC = cur_flds
                elif StringClass.string_match(soil_prop_flds[j],
                                              SoilUtilClass._POROSITY):
                    cur_soil_ins.POROSITY = cur_flds
                elif StringClass.string_match(soil_prop_flds[j],
                                              SoilUtilClass._USLE_K):
                    cur_soil_ins.USLE_K = cur_flds
                elif StringClass.string_match(soil_prop_flds[j],
                                              SoilUtilClass._ALB):
                    cur_soil_ins.SOL_ALB = cur_flds
                elif StringClass.string_match(soil_prop_flds[j],
                                              SoilUtilClass._ESCO):
                    cur_soil_ins.ESCO = cur_flds[0]
                elif StringClass.string_match(soil_prop_flds[j],
                                              SoilUtilClass._NO3):
                    cur_soil_ins.SOL_NO3 = cur_flds
                elif StringClass.string_match(soil_prop_flds[j],
                                              SoilUtilClass._NH4):
                    cur_soil_ins.SOL_NH4 = cur_flds
                elif StringClass.string_match(soil_prop_flds[j],
                                              SoilUtilClass._ORGN):
                    cur_soil_ins.SOL_ORGN = cur_flds
                elif StringClass.string_match(soil_prop_flds[j],
                                              SoilUtilClass._SOLP):
                    cur_soil_ins.SOL_SOLP = cur_flds
                elif StringClass.string_match(soil_prop_flds[j],
                                              SoilUtilClass._ORGP):
                    cur_soil_ins.SOL_ORGP = cur_flds
            cur_soil_ins.check_data_validation()
            soil_instances.append(cur_soil_ins)
        soil_prop_dict = dict()
        for sol in soil_instances:
            cur_sol_dict = sol.soil_dict()
            for fld in cur_sol_dict:
                if fld in soil_prop_dict:
                    soil_prop_dict[fld].append(cur_sol_dict[fld])
                else:
                    soil_prop_dict[fld] = [cur_sol_dict[fld]]
        # print(list(soilPropDict.keys()))
        # print(list(soilPropDict.values()))

        replace_dicts = list()
        dst_soil_tifs = list()
        seqns = soil_prop_dict[SoilUtilClass._SEQN]
        max_lyr_num = int(numpy.max(soil_prop_dict[SoilUtilClass._NLYRS]))
        for key in soil_prop_dict:
            if key != SoilUtilClass._SEQN and key != SoilUtilClass._NAME:
                key_l = 1
                for key_v in soil_prop_dict[key]:
                    if isinstance(key_v, list):
                        if len(key_v) > key_l:
                            key_l = len(key_v)
                if key_l == 1:
                    cur_dict = {}
                    for i, tmpseq in enumerate(seqns):
                        cur_dict[float(tmpseq)] = soil_prop_dict[key][i]
                    replace_dicts.append(cur_dict)
                    dst_soil_tifs.append(dstdir + os.path.sep + key + '.tif')
                else:
                    for i in range(max_lyr_num):
                        cur_dict = dict()
                        for j, tmpseq in enumerate(seqns):
                            if i < soil_prop_dict[SoilUtilClass._NLYRS][j]:
                                cur_dict[float(
                                    tmpseq)] = soil_prop_dict[key][j][i]
                            else:
                                cur_dict[float(seqns[j])] = DEFAULT_NODATA
                        replace_dicts.append(cur_dict)
                        dst_soil_tifs.append(dstdir + os.path.sep + key + '_' +
                                             str(i + 1) + '.tif')
        # print(replaceDicts)
        # print(len(replaceDicts))
        # print(dstSoilTifs)
        # print(len(dstSoilTifs))

        # Generate GTIFF
        for i, soil_tif in enumerate(dst_soil_tifs):
            print(soil_tif)
            RasterUtilClass.raster_reclassify(soiltype_file, replace_dicts[i],
                                              soil_tif)
Beispiel #10
0
    def watershed_delineation(np, dem, outlet_file=None, thresh=0, singlebasin=False,
                              workingdir=None, mpi_bin=None, bin_dir=None,
                              logfile=None, runtime_file=None, hostfile=None,
                              avoid_redo=False):
        """Watershed Delineation based on D8 flow direction.

        Args:
            np: process number for MPI
            dem: DEM path
            outlet_file: predefined outlet shapefile path
            thresh: predefined threshold for extracting stream from accumulated flow direction
            singlebasin: when set True, only extract subbasins that drains into predefined outlets
            workingdir: directory that store outputs
            mpi_bin: directory of MPI executable binary, e.g., mpiexec, mpirun
            bin_dir: directory of TauDEM and other executable binaries
            logfile: log file path
            runtime_file: runtime file path
            hostfile: host list file path for MPI
            avoid_redo: avoid executing some functions that do not depend on input arguments
                        when repeatedly invoke this function
        """
        # 1. Check directories
        if not os.path.exists(dem):
            TauDEM.error('DEM: %s is not existed!' % dem)
        dem = os.path.abspath(dem)
        if workingdir is None or workingdir is '':
            workingdir = os.path.dirname(dem)
        nc = TauDEMFilesUtils(workingdir)  # predefined names
        workingdir = nc.workspace
        UtilClass.mkdir(workingdir)
        # 2. Check log file
        if logfile is not None and FileClass.is_file_exists(logfile):
            os.remove(logfile)
        # 3. perform calculation
        # Filling DEM
        if not (avoid_redo and FileClass.is_file_exists(nc.filldem)):
            UtilClass.writelog(logfile, '[Output] %s' % 'remove pit...', 'a')
            TauDEM.pitremove(np, dem, nc.filldem, workingdir, mpi_bin, bin_dir,
                             log_file=logfile, runtime_file=runtime_file, hostfile=hostfile)
        # Flow direction based on D8 algorithm
        if not (avoid_redo and FileClass.is_file_exists(nc.d8flow)):
            UtilClass.writelog(logfile, '[Output] %s' % 'D8 flow direction...', 'a')
            TauDEM.d8flowdir(np, nc.filldem, nc.d8flow, nc.slp, workingdir,
                             mpi_bin, bin_dir, log_file=logfile,
                             runtime_file=runtime_file, hostfile=hostfile)
        # Flow accumulation without stream skeleton as weight
        if not (avoid_redo and FileClass.is_file_exists(nc.d8acc)):
            UtilClass.writelog(logfile, '[Output] %s' % 'D8 flow accumulation...', 'a')
            TauDEM.aread8(np, nc.d8flow, nc.d8acc, None, None, False, workingdir, mpi_bin, bin_dir,
                          log_file=logfile, runtime_file=runtime_file, hostfile=hostfile)
        # Initial stream network using mean accumulation as threshold
        UtilClass.writelog(logfile, '[Output] %s' % 'Generating stream raster initially...', 'a')
        min_accum, max_accum, mean_accum, std_accum = RasterUtilClass.raster_statistics(nc.d8acc)
        TauDEM.threshold(np, nc.d8acc, nc.stream_raster, mean_accum, workingdir,
                         mpi_bin, bin_dir, log_file=logfile,
                         runtime_file=runtime_file, hostfile=hostfile)
        # Outlets position initialization and adjustment
        UtilClass.writelog(logfile, '[Output] %s' % 'Moving outlet to stream...', 'a')
        if outlet_file is None:  # if not given, take cell with maximum accumulation as outlet
            outlet_file = nc.outlet_pre
            TauDEM.connectdown(np, nc.d8flow, nc.d8acc, outlet_file, nc.outlet_m, wtsd=None,
                               workingdir=workingdir, mpiexedir=mpi_bin, exedir=bin_dir,
                               log_file=logfile, runtime_file=runtime_file, hostfile=hostfile)
        TauDEM.moveoutletstostrm(np, nc.d8flow, nc.stream_raster, outlet_file,
                                 nc.outlet_m, workingdir=workingdir,
                                 mpiexedir=mpi_bin, exedir=bin_dir,
                                 log_file=logfile, runtime_file=runtime_file, hostfile=hostfile)
        # Stream skeleton by peuker-douglas algorithm
        UtilClass.writelog(logfile, '[Output] %s' % 'Generating stream skeleton ...', 'a')
        TauDEM.peukerdouglas(np, nc.filldem, nc.stream_pd, workingdir,
                             mpi_bin, bin_dir, log_file=logfile,
                             runtime_file=runtime_file, hostfile=hostfile)
        # Weighted flow acculation with outlet
        UtilClass.writelog(logfile, '[Output] %s' % 'Flow accumulation with outlet...', 'a')
        tmp_outlet = None
        if singlebasin:
            tmp_outlet = nc.outlet_m
        TauDEM.aread8(np, nc.d8flow, nc.d8acc_weight, tmp_outlet, nc.stream_pd, False,
                      workingdir, mpi_bin, bin_dir, log_file=logfile,
                      runtime_file=runtime_file, hostfile=hostfile)
        # Determine threshold by input argument or dropanalysis function
        if thresh <= 0:  # find the optimal threshold using dropanalysis function
            UtilClass.writelog(logfile, '[Output] %s' %
                               'Drop analysis to select optimal threshold...', 'a')
            min_accum, max_accum, mean_accum, std_accum = \
                RasterUtilClass.raster_statistics(nc.d8acc_weight)
            if mean_accum - std_accum < 0:
                minthresh = mean_accum
            else:
                minthresh = mean_accum - std_accum
            maxthresh = mean_accum + std_accum
            TauDEM.dropanalysis(np, nc.filldem, nc.d8flow, nc.d8acc_weight,
                                nc.d8acc_weight, nc.outlet_m, minthresh, maxthresh,
                                20, 'true', nc.drptxt, workingdir, mpi_bin, bin_dir,
                                log_file=logfile, runtime_file=runtime_file, hostfile=hostfile)
            if not FileClass.is_file_exists(nc.drptxt):
                # raise RuntimeError('Dropanalysis failed and drp.txt was not created!')
                UtilClass.writelog(logfile, '[Output] %s' %
                                   'dropanalysis failed!', 'a')
                thresh = 0.5 * (maxthresh - minthresh) + minthresh
            else:
                with open(nc.drptxt, 'r', encoding='utf-8') as drpf:
                    temp_contents = drpf.read()
                    (beg, thresh) = temp_contents.rsplit(' ', 1)
            thresh = float(thresh)
            UtilClass.writelog(logfile, '[Output] %s: %f' %
                               ('Selected optimal threshold: ', thresh), 'a')
        # Final stream network
        UtilClass.writelog(logfile, '[Output] %s' % 'Generating stream raster...', 'a')
        TauDEM.threshold(np, nc.d8acc_weight, nc.stream_raster, thresh,
                         workingdir, mpi_bin, bin_dir, log_file=logfile,
                         runtime_file=runtime_file, hostfile=hostfile)
        UtilClass.writelog(logfile, '[Output] %s' % 'Generating stream net...', 'a')
        TauDEM.streamnet(np, nc.filldem, nc.d8flow, nc.d8acc_weight, nc.stream_raster,
                         nc.outlet_m, nc.stream_order, nc.channel_net,
                         nc.channel_coord, nc.streamnet_shp, nc.subbsn,
                         workingdir, mpi_bin, bin_dir,
                         log_file=logfile, runtime_file=runtime_file, hostfile=hostfile)
        # Serialize IDs of subbasins and the corresponding streams
        UtilClass.writelog(logfile, '[Output] %s' % 'Serialize subbasin&stream IDs...', 'a')
        id_map = StreamnetUtil.serialize_streamnet(nc.streamnet_shp, nc.streamnet_m)
        RasterUtilClass.raster_reclassify(nc.subbsn, id_map, nc.subbsn_m, GDT_Int32)
        StreamnetUtil.assign_stream_id_raster(nc.stream_raster, nc.subbsn_m, nc.stream_m)
        # convert raster to shapefile (for subbasin and basin)
        UtilClass.writelog(logfile, '[Output] %s' % 'Generating subbasin vector...', 'a')
        VectorUtilClass.raster2shp(nc.subbsn_m, nc.subbsn_shp, 'subbasin', 'SUBBASINID')
        # Finish the workflow
        UtilClass.writelog(logfile, '[Output] %s' %
                           'Original subbasin delineation is finished!', 'a')
Beispiel #11
0
    def lookup_soil_parameters(dstdir, soiltype_file, soil_lookup_file):
        """Reclassify soil parameters by lookup table."""
        #  Read soil properties from txt file
        soil_lookup_data = read_data_items_from_txt(soil_lookup_file)
        soil_instances = list()
        soil_prop_flds = soil_lookup_data[0][:]
        for i in range(1, len(soil_lookup_data)):
            cur_soil_data_item = soil_lookup_data[i][:]
            cur_seqn = cur_soil_data_item[0]
            cur_sname = cur_soil_data_item[1]
            cur_soil_ins = SoilProperty(cur_seqn, cur_sname)
            for j in range(2, len(soil_prop_flds)):
                cur_flds = StringClass.split_string(cur_soil_data_item[j], '-')  # Get field values
                for k, tmpfld in enumerate(cur_flds):
                    cur_flds[k] = float(tmpfld)  # Convert to float
                if StringClass.string_match(soil_prop_flds[j], SoilUtilClass._NLYRS):
                    cur_soil_ins.SOILLAYERS = int(cur_flds[0])
                elif StringClass.string_match(soil_prop_flds[j], SoilUtilClass._Z):
                    cur_soil_ins.SOILDEPTH = cur_flds
                elif StringClass.string_match(soil_prop_flds[j], SoilUtilClass._OM):
                    cur_soil_ins.OM = cur_flds
                elif StringClass.string_match(soil_prop_flds[j], SoilUtilClass._CLAY):
                    cur_soil_ins.CLAY = cur_flds
                elif StringClass.string_match(soil_prop_flds[j], SoilUtilClass._SILT):
                    cur_soil_ins.SILT = cur_flds
                elif StringClass.string_match(soil_prop_flds[j], SoilUtilClass._SAND):
                    cur_soil_ins.SAND = cur_flds
                elif StringClass.string_match(soil_prop_flds[j], SoilUtilClass._ROCK):
                    cur_soil_ins.ROCK = cur_flds
                elif StringClass.string_match(soil_prop_flds[j], SoilUtilClass._ZMX):
                    cur_soil_ins.SOL_ZMX = cur_flds[0]
                elif StringClass.string_match(soil_prop_flds[j], SoilUtilClass._ANIONEXCL):
                    cur_soil_ins.ANION_EXCL = cur_flds[0]
                elif StringClass.string_match(soil_prop_flds[j], SoilUtilClass._CRK):
                    cur_soil_ins.SOL_CRK = cur_flds[0]
                elif StringClass.string_match(soil_prop_flds[j], SoilUtilClass._BD):
                    cur_soil_ins.DENSITY = cur_flds
                elif StringClass.string_match(soil_prop_flds[j], SoilUtilClass._K):
                    cur_soil_ins.CONDUCTIVITY = cur_flds
                elif StringClass.string_match(soil_prop_flds[j], SoilUtilClass._WP):
                    cur_soil_ins.WILTINGPOINT = cur_flds
                elif StringClass.string_match(soil_prop_flds[j], SoilUtilClass._FC):
                    cur_soil_ins.FIELDCAP = cur_flds
                elif StringClass.string_match(soil_prop_flds[j], SoilUtilClass._AWC):
                    cur_soil_ins.AWC = cur_flds
                elif StringClass.string_match(soil_prop_flds[j], SoilUtilClass._POROSITY):
                    cur_soil_ins.POROSITY = cur_flds
                elif StringClass.string_match(soil_prop_flds[j], SoilUtilClass._USLE_K):
                    cur_soil_ins.USLE_K = cur_flds
                elif StringClass.string_match(soil_prop_flds[j], SoilUtilClass._ALB):
                    cur_soil_ins.SOL_ALB = cur_flds
                elif StringClass.string_match(soil_prop_flds[j], SoilUtilClass._ESCO):
                    cur_soil_ins.ESCO = cur_flds[0]
                elif StringClass.string_match(soil_prop_flds[j], SoilUtilClass._NO3):
                    cur_soil_ins.SOL_NO3 = cur_flds
                elif StringClass.string_match(soil_prop_flds[j], SoilUtilClass._NH4):
                    cur_soil_ins.SOL_NH4 = cur_flds
                elif StringClass.string_match(soil_prop_flds[j], SoilUtilClass._ORGN):
                    cur_soil_ins.SOL_ORGN = cur_flds
                elif StringClass.string_match(soil_prop_flds[j], SoilUtilClass._SOLP):
                    cur_soil_ins.SOL_SOLP = cur_flds
                elif StringClass.string_match(soil_prop_flds[j], SoilUtilClass._ORGP):
                    cur_soil_ins.SOL_ORGP = cur_flds
            cur_soil_ins.check_data_validation()
            soil_instances.append(cur_soil_ins)
        soil_prop_dict = dict()
        for sol in soil_instances:
            cur_sol_dict = sol.soil_dict()
            for fld in cur_sol_dict:
                if fld in soil_prop_dict:
                    soil_prop_dict[fld].append(cur_sol_dict[fld])
                else:
                    soil_prop_dict[fld] = [cur_sol_dict[fld]]
        # print(list(soilPropDict.keys()))
        # print(list(soilPropDict.values()))

        replace_dicts = list()
        dst_soil_tifs = list()
        seqns = soil_prop_dict[SoilUtilClass._SEQN]
        max_lyr_num = int(numpy.max(soil_prop_dict[SoilUtilClass._NLYRS]))
        for key in soil_prop_dict:
            if key != SoilUtilClass._SEQN and key != SoilUtilClass._NAME:
                key_l = 1
                for key_v in soil_prop_dict[key]:
                    if isinstance(key_v, list):
                        if len(key_v) > key_l:
                            key_l = len(key_v)
                if key_l == 1:
                    cur_dict = {}
                    for i, tmpseq in enumerate(seqns):
                        cur_dict[float(tmpseq)] = soil_prop_dict[key][i]
                    replace_dicts.append(cur_dict)
                    dst_soil_tifs.append(dstdir + os.path.sep + key + '.tif')
                else:
                    for i in range(max_lyr_num):
                        cur_dict = dict()
                        for j, tmpseq in enumerate(seqns):
                            if i < soil_prop_dict[SoilUtilClass._NLYRS][j]:
                                cur_dict[float(tmpseq)] = soil_prop_dict[key][j][i]
                            else:
                                cur_dict[float(seqns[j])] = DEFAULT_NODATA
                        replace_dicts.append(cur_dict)
                        dst_soil_tifs.append(dstdir + os.path.sep + key + '_' + str(i + 1) + '.tif')
        # print(replaceDicts)
        # print(len(replaceDicts))
        # print(dstSoilTifs)
        # print(len(dstSoilTifs))

        # Generate GTIFF
        for i, soil_tif in enumerate(dst_soil_tifs):
            print(soil_tif)
            RasterUtilClass.raster_reclassify(soiltype_file, replace_dicts[i], soil_tif)