Пример #1
0
def hydroresample(in_vardict,
                  out_vardict,
                  in_hydrotemplate,
                  resampling_type='NEAREST'):
    templatedesc = arcpy.Describe(in_hydrotemplate)

    # Check that all in_vardict keys are in out_vardict (that each input path has a matching output path)
    keymatch = {l: l in out_vardict for l in in_vardict}
    if not all(keymatch.values()):
        raise ValueError(
            'All keys in in_vardict are not in out_vardict: {}'.format(
                [l for l in keymatch if not keymatch[l]]))

    # Iterate through input rasters
    for var in in_vardict:
        outresample = out_vardict[var]

        if not arcpy.Exists(outresample):
            print('Processing {}...'.format(outresample))
            arcpy.env.extent = arcpy.env.snapRaster = in_hydrotemplate
            arcpy.env.XYResolution = "0.0000000000000001 degrees"
            arcpy.env.cellSize = templatedesc.meanCellWidth
            print('%.17f' % float(arcpy.env.cellSize))

            try:
                arcpy.Resample_management(in_raster=in_vardict[var],
                                          out_raster=outresample,
                                          cell_size=templatedesc.meanCellWidth,
                                          resampling_type=resampling_type)
            except Exception:
                print("Exception in user code:")
                traceback.print_exc(file=sys.stdout)
                arcpy.ResetEnvironments()

        else:
            print('{} already exists...'.format(outresample))

        # Check whether everything is the same
        maskdesc = arcpy.Describe(outresample)

        extentcomp = maskdesc.extent.JSON == templatedesc.extent.JSON
        print('Equal extents? {}'.format(extentcomp))
        if not extentcomp:
            print("{0} != {1}".format(maskdesc.extent, templatedesc.extent))

        cscomp = maskdesc.meanCellWidth == templatedesc.meanCellWidth
        print('Equal cell size? {}'.format(cscomp))
        if not cscomp:
            print("{0} != {1}".format(maskdesc.meanCellWidth,
                                      templatedesc.meanCellWidth))

        srcomp = compsr(outresample, in_hydrotemplate)
        print('Same Spatial Reference? {}'.format(srcomp))
        if not srcomp:
            print("{0} != {1}".format(maskdesc.SpatialReference.name,
                                      templatedesc.SpatialReference.name))

    arcpy.ResetEnvironments()
Пример #2
0
def process_pallets(pallets, is_post_copy=False):
    '''
    pallets: Pallet[]
    is_post_copy: Boolean

    Loop over all pallets, check if data has changed and determine whether to process.
    Call `process` if this is not the post copy. Otherwise call `post_copy_process`.
    Finally, call ship.
    '''

    if not is_post_copy:
        verb = 'processing'
    else:
        verb = 'post copy processing'

    log.info('%s pallets...', verb)

    for pallet in pallets:
        try:
            if pallet.is_ready_to_ship(
            ):  #: checks for schema changes or errors
                if pallet.requires_processing(
                ) and pallet.success[0]:  #: checks for data that was updated
                    log.info('%s pallet: %r', verb, pallet)
                    start_seconds = clock()

                    arcpy.ResetEnvironments()
                    arcpy.ClearWorkspaceCache_management()
                    if not is_post_copy:
                        with seat.timed_pallet_process(pallet, 'process'):
                            pallet.process()
                    else:
                        with seat.timed_pallet_process(pallet,
                                                       'post_copy_process'):
                            pallet.post_copy_process()

                    log.debug('%s pallet %s', verb.replace('ing', 'ed'),
                              seat.format_time(clock() - start_seconds))

                if not is_post_copy:
                    start_seconds = clock()

                    log.info('shipping pallet: %r', pallet)
                    arcpy.ResetEnvironments()
                    arcpy.ClearWorkspaceCache_management()
                    with seat.timed_pallet_process(pallet, 'ship'):
                        pallet.ship()
                    log.debug('shipped pallet %s',
                              seat.format_time(clock() - start_seconds))
        except Exception as e:
            pallet.success = (False, e)
            log.error('error %s pallet: %s for pallet: %r',
                      verb,
                      e,
                      pallet,
                      exc_info=True)
Пример #3
0
def main():

    # change directory to the parent folder path
    os.chdir(pf_path)

    # list all folders in parent folder path - note this is not recursive
    dir_list = filter(lambda x: os.path.isdir(x), os.listdir('.'))

    # remove folders in the list that start with '00_' since these aren't our huc8 folders
    for dir in dir_list[:]:
        if dir.startswith('00_'):
            dir_list.remove(dir)

    # set arcpy environment settings
    arcpy.env.overwriteOutput = 'TRUE'  # overwrite output
    arcpy.env.resamplingMethod = 'CUBIC'  # set resampling method to cubic in case arc does any behind the scenes dem re-sampling

    # run dem_clip function for each huc8 folder
    for dir in dir_list:
        #if not os.path.exists(os.path.join(pf_path, dir, 'PRISM')):
            try:
                prism_clip(dir)
            except Exception as err:
                print "Clipping PRISM failed for " + dir + ". The exception thrown was:"
                print err

    # clear environment settings
    arcpy.ResetEnvironments()
    arcpy.ClearEnvironment("workspace")
def main():
    nhd = arcpy.GetParameterAsText(0)
    watersheds = arcpy.GetParameterAsText(1)
    topoutfolder = arcpy.GetParameterAsText(2)
    filterlakes = arcpy.GetParameterAsText(3)
    cumulative_watersheds(nhd, watersheds, topoutfolder, filterlakes)
    arcpy.ResetEnvironments()
Пример #5
0
 def __exit__(self, exc_type, exc_value, traceback):
     #clean up
     self.Workspace = ""
     self.Regions = None
     shutil.rmtree(self._TempLocation, True)
     arcpy.ResetEnvironments()
     arcpy.ClearEnvironment("workspace")
Пример #6
0
def reclass(Layer):
    print "\t" * 2 + "Reclassifying values to High/NotHigh..."
    if Layer == "crowni":
        ReclassExpression = "Value > 5000"
        ReclassRaster = Con(tmpRaster, 2, 1, ReclassExpression)
    if Layer == "emissions":
        ReclassExpression = "Value > 100"
        ReclassRaster = Con(tmpRaster, 2, 1, ReclassExpression)
    if Layer == "flame":
        ReclassExpression = "Value > 2"
        ReclassRaster = Con(tmpRaster, 2, 1, ReclassExpression)
    if Layer == "intensity":
        ReclassExpression = "Value > 400"
        ReclassRaster = Con(tmpRaster, 2, 1, ReclassExpression)
    if Layer == "fuelcon":
        ReclassExpression = "Value > 50"
        ReclassRaster = Con(tmpRaster, 2, 1, ReclassExpression)
    if Layer == "scorchht":
        ReclassExpression = "Value > 2"
        ReclassRaster = Con(tmpRaster, 2, 1, ReclassExpression)
    if Layer == "soilh":
        ReclassExpression = "Value > 60"
        ReclassRaster = Con(tmpRaster, 2, 1, ReclassExpression)
    if Layer == "spread":
        ReclassExpression = "Value > 83.3"
        ReclassRaster = Con(tmpRaster, 2, 1, ReclassExpression)
    if Layer == "treem":
        ReclassExpression = "Value > 70"
        ReclassRaster = Con(tmpRaster, 2, 1, ReclassExpression)
    ReclassRaster.save(outPath + "\\" + Layer)
    # Clean up
    arcpy.ResetEnvironments()
Пример #7
0
def process_pallets(pallets):
    '''pallets: Pallet[]

    Loop over all pallets, check if data has changed, and determine whether to process.
    '''

    verb = 'processing'

    log.info('%s pallets...', verb)

    for pallet in pallets:
        try:
            if pallet.is_ready_to_ship():  #: checks for schema changes or errors
                if pallet.requires_processing():  #: checks for data that was updated
                    log.info('%s pallet: %r', verb, pallet)
                    start_seconds = perf_counter()

                    arcpy.ResetEnvironments()
                    arcpy.ClearWorkspaceCache_management()

                    with seat.timed_pallet_process(pallet, 'process'):
                        pallet.process()

                    log.debug('%s pallet %s', verb.replace('ing', 'ed'), seat.format_time(perf_counter() - start_seconds))
        except Exception as e:
            pallet.success = (False, str(e))
            log.error('error %s pallet: %s for pallet: %r', verb, e, pallet, exc_info=True)
Пример #8
0
def clip_to_hu8(raster,
                nhd_gdb,
                out_dir,
                projection=arcpy.SpatialReference(102039)):
    """Outputs a series of rasters, each one clipped to a different HU8. """
    env.workspace = 'in_memory'
    env.outputCoordinateSystem = projection
    env.compression = "NONE"  # only final tifs are generated
    env.snapRaster = raster
    env.cellSize = '10'
    env.pyramids = "PYRAMIDS -1 SKIP_FIRST"
    arcpy.CheckOutExtension("Spatial")

    # HUC8 polygons each saved as separate fc inheriting albers from environ
    huc8_fc = os.path.join(nhd_gdb, "WBD_HU8")
    arcpy.MakeFeatureLayer_management(huc8_fc, "huc8_layer")
    huc4_code = re.search('\d{4}', os.path.basename(nhd_gdb)).group()

    clips_dir = os.path.join(out_dir, 'huc8clips{0}'.format(huc4_code))
    if not os.path.exists(clips_dir):
        os.mkdir(clips_dir)


##    # add walls
##    arcpy.PolygonToLine_management(huc8_fc, 'wall_lines')
##    arcpy.AddField_management('wall_lines', "height", "DOUBLE")
##    arcpy.CalculateField_management('wall_lines', "height", '500', "PYTHON")
##    arcpy.FeatureToRaster_conversion('wall_lines', "height", 'wall_raster')
##    wallsObject = Raster('wall_raster')
##    elevObject = Raster(raster)
##    walled_ned = Con(IsNull(wallsObject), elevObject,
##                    (wallsObject + elevObject))

# for each HU8 feature in the fc, make a clip
    with arcpy.da.SearchCursor(huc8_fc, ["HUC_8"]) as cursor:
        for row in cursor:
            if row[0].startswith(huc4_code):
                whereClause = """"{0}" = '{1}'""".format("HUC_8", row[0])
                arcpy.SelectLayerByAttribute_management(
                    "huc8_layer", 'NEW_SELECTION', whereClause)
                arcpy.CopyFeatures_management("huc8_layer", "this_hu8")

                # clip the raster
                out_raster = os.path.join(clips_dir,
                                          'NED{0}.tif'.format(row[0]))
                cu.multi_msg('Creating output {0}'.format(out_raster))

                # use a small buffer here because otherwise the walls get
                # cut off in slivers
                arcpy.Buffer_analysis('this_hu8', 'this_hu8_buffer', 5000)
                arcpy.Clip_management(raster, '', out_raster,
                                      'this_hu8_buffer', '#',
                                      'ClippingGeometry')
                arcpy.Delete_management('this_hu8')
                arcpy.Delete_management('this_hu8_buffer')

    arcpy.Delete_management('huc8_layer')
    arcpy.ResetEnvironments()
    arcpy.CheckInExtension("Spatial")
Пример #9
0
def test():
    wsraster = r'C:\GISData\Scratch\new_watersheds.gdb\huc08020203_watersheds_precursors'
    subregion = r'C:\GISData\Scratch\Scratch.gdb\huc08020203'
    seedline = r'C:\GISData\Scratch\new_pourpoints\pourpoints0802\pourpoints.gdb\eligible_flowlines'
    seedpoly = r'C:\GISData\Scratch\new_pourpoints\pourpoints0802\pourpoints.gdb\eligible_lakes'
    outfolder = r'C:\GISData\Scratch\scott 08020203'
    clean_watersheds(wsraster, subregion, seedline, seedpoly, outfolder)
    arcpy.ResetEnvironments()
Пример #10
0
    def setpRAE(self,snapgds, directory,extentgds = None, maskgds = None):
        """Set Raster Analysis Environment.
            snapgds: snap IGeodataset 
            directory: workspace and scratch workspace directory
            extentgds: extent IGeodataset
            maskgds: mask IGeodataset
        """
        try:
            raise Exception("This is a work in progress. Its not quite right yet")
            #https://pro.arcgis.com/en/pro-app/arcpy/classes/env.htm
        
            arcpy.ResetEnvironments()
            pExists = os.path.exists(directory)

            #set spatial reference
            if maskgds is not None:
                #arcpy.env.outputCoordinateSystem = arcpy.Describe(maskgds).spatialReference
                arcpy.env.extent = arcpy.Describe(maskgds).extent
                #arcpy.env.mask = maskgds
            else:
                arcpy.env.outputCoordinateSystem = arcpy.Describe(snapgds).spatialReference

            #endif

            #set ouput workspace - check exists first and make        
            if not pExists:
                #create one
                os.makedirs(directory)
            #endif
        
            arcpy.env.workspace = directory
            arcpy.env.scratchWorkspace = directory
        
            #Cell Size
            desc = arcpy.Describe(snapgds)
            arcpy.env.cellSize = snapgds
            #extent
            #if extentgds is not None:
                #arcpy.env.extent = extentgds
            arcpy.env.snapRaster = snapgds
        except:
            arcpy.ResetEnvironments()
            tb = traceback.format_exc()
            return
Пример #11
0
def main():
    wsraster = arcpy.GetParameterAsText(0)  # Watershed raster
    subregion = arcpy.GetParameterAsText(
        1)  # Single polygon CSI subregion feature class for boundary.
    seedline = arcpy.GetParameterAsText(
        2)  # Stream lines used as seeds for watersheds
    seedpoly = arcpy.GetParameterAsText(
        3)  # Lake polys used as seeds for watersheds
    outfolder = arcpy.GetParameterAsText(4)  # Folder for output.
    clean_watersheds(wsraster, subregion, seedline, seedpolyg, outfolder)
    arcpy.ResetEnvironments()
Пример #12
0
def hydronibble(in_vardict, out_vardict, in_hydrotemplate, nodatavalue=-9999):
    arcpy.env.extent = arcpy.env.snapRaster = in_hydrotemplate
    arcpy.env.XYResolution = "0.0000000000000001 degrees"
    arcpy.env.cellSize = arcpy.Describe(in_hydrotemplate).meanCellWidth

    # Perform euclidean allocation to HydroSHEDS land mask pixels with no WorldClim data
    for var in in_vardict:
        if arcpy.Exists(in_vardict[var]):
            outnib = out_vardict[var]
            if not arcpy.Exists(outnib):
                print('Processing {}...'.format(outnib))
                try:
                    mismask = Con((IsNull(in_vardict[var])) &
                                  (~IsNull(in_hydrotemplate)),
                                  in_hydrotemplate)

                    #Perform euclidean allocation to those pixels
                    Nibble(
                        in_raster=Con(
                            ~IsNull(mismask), nodatavalue, in_vardict[var]
                        ),  #where mismask is not NoData (pixels for which var is NoData but hydrotemplate has data), assign nodatavalue (provided by user, not NoData), otherwise, keep var data (see Nibble tool)
                        in_mask_raster=in_vardict[var],
                        nibble_values='DATA_ONLY',
                        nibble_nodata='PRESERVE_NODATA').save(outnib)

                    del mismask

                except Exception:
                    print("Exception in user code:")
                    traceback.print_exc(file=sys.stdout)
                    del mismask
                    arcpy.ResetEnvironments()

            else:
                print('{} already exists...'.format(outnib))
        else:
            print('Input - {} does not exists...'.format(in_vardict[var]))

    arcpy.ResetEnvironments()
Пример #13
0
def test():
    """Tests the tool. Call from another module to test."""
    nhd_gdb = 'E:/RawNHD_byHUC/NHDH0415.gdb'
    ned_dir = 'E:/Downloaded_NED'
    ned_footprints_fc = 'C:/GISData/NED_FootPrint_Subregions.gdb/nedfootprints'
    out_dir = 'C:/GISData/Scratch'
    is_zipped = True
    available_ram = '12'

    mosaic_workspace = stage_files(nhd_gdb, ned_dir, ned_footprints_fc,
                                   out_dir, is_zipped)
    mosaic(mosaic_workspace, mosaic_workspace, available_ram)
    #delete_neds(mosaic_workspace)
    arcpy.ResetEnvironments()
Пример #14
0
def wall(nhd_gdb,
         rasters_list,
         outfolder,
         height='500',
         projection=arcpy.SpatialReference(102039)):
    """For one or more HU8s within the same subregion (nhd_gdb variable),
    adds walls at the boundaries to force flow direction that do not cross
    the boundary."""
    env.workspace = 'in_memory'
    env.outputCoordinateSystem = projection
    env.compression = "NONE"
    env.snapRaster = rasters_list[0]  # they all have the same snap
    env.cellSize = '10'
    env.pyramids = "PYRAMIDS -1 SKIP_FIRST"
    arcpy.CheckOutExtension("Spatial")

    # HU8 layer
    huc8_fc = os.path.join(nhd_gdb, "WBD_HU8")
    arcpy.MakeFeatureLayer_management(huc8_fc, "huc8_layer")

    # create output folder with HUC4 in the name
    huc4_code = re.search('\d{4}', os.path.basename(nhd_gdb)).group()
    walled_dir = os.path.join(outfolder, 'walled' + huc4_code)
    if not os.path.exists(walled_dir):
        os.mkdir(walled_dir)

    # make the walls raster
    arcpy.PolygonToLine_management(huc8_fc, 'wall_lines')
    arcpy.AddField_management('wall_lines', "height", "DOUBLE")
    arcpy.CalculateField_management('wall_lines', "height", '500', "PYTHON")
    arcpy.FeatureToRaster_conversion('wall_lines', "height", 'wall_raster')
    wallsObject = Raster('wall_raster')

    for raster in rasters_list:
        out_name = os.path.join(
            walled_dir,
            os.path.basename(raster).replace('fel.tif', '_wfel.tif'))
        cu.multi_msg('Creating output {0}'.format(out_name))
        env.extent = raster
        elevObject = Raster(raster)
        walled_ned = Con(IsNull(wallsObject), elevObject,
                         (wallsObject + elevObject))

        walled_ned.save(out_name)

    for item in ['huc8_layer', 'wall_lines', 'wall_raster']:
        arcpy.Delete_management(item)
    arcpy.ResetEnvironments()
    arcpy.CheckInExtension("Spatial")
def streamBurning(DEM_orig, scratch_gdb, seg_network_a, home, home_name):
    print("stream burning process")
    arcpy.CalculateStatistics_management(DEM_orig)

    dem_orig_b = Con(IsNull(DEM_orig), 0, DEM_orig)
    dem_orig_b.save(scratch_gdb + "/DEM_square")

    # set up river network raster
    network_raster = scratch_gdb + "/network_raster"

    arcpy.env.extent = dem_orig_b
    arcpy.env.cellsize = dem_orig_b
    arcpy.env.snapRaster = dem_orig_b
    arcpy.env.outputCoordinateSystem = dem_orig_b

    print('convert network to raster')
    net_fields = [f.name for f in arcpy.ListFields(seg_network_a)]
    if "burn_val" in net_fields:
        arcpy.DeleteField_management(seg_network_a, "burn_val")
    del net_fields
    arcpy.AddField_management(seg_network_a, "burn_val", "SHORT")
    arcpy.CalculateField_management(seg_network_a, "burn_val", "10")  #

    arcpy.FeatureToRaster_conversion(
        seg_network_a, "burn_val", network_raster,
        dem_orig_b)  # THINK IT IS WORTH CHANGING THE ATTRIBUTE VALUE

    network_raster_a = Con(
        IsNull(network_raster), 0,
        30)  # changed to 30 to see if it improves stream ordering
    network_raster_a.save(scratch_gdb + "/net_ras_fin")
    arcpy.ResetEnvironments()

    # This is just a map algebra thing to replace the stuff above that uses numpy (the numpy stuff works but is
    # limited in terms of how much it can process.
    print("stream burning DEM")
    rivers_ting = Raster(scratch_gdb + "/net_ras_fin")
    dem_ting = Raster(scratch_gdb + "/DEM_square")

    stream_burn_dem_a = dem_ting - rivers_ting
    stream_burn_dem_a.save(scratch_gdb + "/raster_burn")

    sb_DEM = os.path.join(home, "{0}strBurndDEm.tif".format(home_name))

    arcpy.CopyRaster_management(scratch_gdb + "/raster_burn", sb_DEM)

    print("stream burning complete")
    return stream_burn_dem_a, network_raster, sb_DEM
Пример #16
0
def roadtoheat(site, inshp, res, kernel_dir, keyw, inFID, heatfield, sitedic,
               snapras, outdir):
    expr = """{0} = {1}""".format(arcpy.AddFieldDelimiters(inshp, inFID),
                                  str(site))
    print(expr)
    arcpy.MakeFeatureLayer_management(in_features=inshp, out_layer='lyr')
    arcpy.SelectLayerByAttribute_management('lyr',
                                            selection_type='NEW_SELECTION',
                                            where_clause=expr)
    #print(site)
    nshp = len(
        [row[0] for row in arcpy.da.SearchCursor('lyr', [inFID])]
    )  #int(arcpy.GetCount_management('lyr').getOutput(0)) would be faster but often has a bug
    #print(nshp)
    if nshp > 0:
        #print('{} features'.format(nshp))
        arcpy.ResetEnvironments()
        arcpy.env.extent = sitedic[site]
        arcpy.env.snapRaster = snapras
        outras = os.path.join(outdir,
                              'hpmstiger_{0}{1}.tif'.format(heatfield, site))
        if not arcpy.Exists(outras):
            print('{} does not exist, generate heatmap'.format(outras))
            tmpdir = os.path.join(os.path.dirname(outdir),
                                  'tmp_{}'.format(str(site)))
            os.mkdir(tmpdir)
            arcpy.env.scratchWorkspace = tmpdir
            arcpy.PolylineToRaster_conversion('lyr',
                                              value_field=heatfield,
                                              out_rasterdataset=outras,
                                              priority_field=heatfield,
                                              cellsize=res)
            customheatmap(kernel_dir=kernel_dir,
                          in_raster=outras,
                          scratch_dir=tmpdir,
                          out_gdb=outdir,
                          out_var=heatfield + str(site),
                          divnum=100,
                          keyw=keyw,
                          ext='.tif',
                          verbose=True)
            arcpy.Delete_management(tmpdir)
    arcpy.SelectLayerByAttribute_management(
        'lyr', selection_type='CLEAR_SELECTION')  #might not be necessary
    arcpy.Delete_management('lyr')  #might not be necessary
Пример #17
0
    def __init__(self, workspacePath, regions):
        self.Workspace = ""
        self.Regions = None
        self.isInit = False
        self.Mask = None

        arcpy.ResetEnvironments()
        self._WorkspaceDirectory = workspacePath
        self._TempLocation = tempfile.mkdtemp(dir=self._WorkspaceDirectory)

        arcpy.env.workspace = self._TempLocation
        arcpy.env.overwriteOutput = True

        self._initialize(regions)

        arcpy.env.workspace = self._TempLocation
        arcpy.env.overwriteOutput = True
        self._sm("initialized Percent Overlay Agent")
Пример #18
0
def wall(nhd_gdb,
         rasters_list,
         outfolder,
         height='500',
         projection=arcpy.SpatialReference(102039)):
    """For one or more HU8s within the same subregion (nhd_gdb variable),
    adds walls at the boundaries to force flow direction that do not cross
    the boundary."""
    env.workspace = 'in_memory'
    env.outputCoordinateSystem = projection
    env.compression = "NONE"
    env.snapRaster = rasters_list[0]  # they all have the same snap
    env.cellSize = '10'
    env.pyramids = "PYRAMIDS -1 SKIP_FIRST"
    arcpy.CheckOutExtension("Spatial")

    # HU8 layer
    huc12_fc = os.path.join(nhd_gdb, "WBDHU12")
    arcpy.MakeFeatureLayer_management(huc12_fc, "huc12_layer")

    # make the walls raster
    arcpy.PolygonToLine_management(huc12_fc, 'wall_lines')
    arcpy.AddField_management('wall_lines', "height", "DOUBLE")
    arcpy.CalculateField_management('wall_lines', "height", '500000', "PYTHON")
    arcpy.FeatureToRaster_conversion('wall_lines', "height", 'wall_raster')
    wallsObject = Raster('wall_raster')

    for raster in rasters_list:
        out_name = os.path.join(raster.replace('.tif', '_walled.tif'))
        arcpy.AddMessage('Creating output {0}'.format(out_name))
        env.extent = raster
        elevObject = Raster(raster)
        walled_ned = Con(
            IsNull(wallsObject), elevObject,
            Con(LessThan(elevObject, -58000), elevObject, wallsObject))

        walled_ned.save(out_name)

    for item in ['huc8_layer', 'wall_lines', 'wall_raster']:
        arcpy.Delete_management(item)
    arcpy.ResetEnvironments()
    arcpy.CheckInExtension("Spatial")

    return out_name
Пример #19
0
def burn_streams(subregion_ned,
                 nhd_gdb,
                 burnt_out,
                 projection=arcpy.SpatialReference(102039)):
    env.snapRaster = subregion_ned
    env.outputCoordinateSystem = projection
    env.compression = "LZ77"  # compress temp tifs for speed
    env.extent = subregion_ned
    env.workspace = 'in_memory'

    flowline = os.path.join(nhd_gdb, 'NHDFlowline')
    # Copy flowlines to shapefile that will inherit environ output coord system
    # just easier to have a copy in the correct projection later
    arcpy.CopyFeatures_management(flowline, 'flowline_proj')
    ##    arcpy.FeatureClassToFeatureClass_conversion("NHDFlowline", "in_memory", flow_line)
    cu.multi_msg("Prepared NHDFlowline for rasterizing.")

    # Feature to Raster- rasterize the NHDFlowline
    # will inherit grid from env.snapRaster
    arcpy.FeatureToRaster_conversion('flowline_proj', "OBJECTID",
                                     'flowline_raster', "10")
    cu.multi_msg("Converted flowlines to raster.")

    # Raster Calculator- burns in streams, beveling in from 500m
    cu.multi_msg(
        "Burning streams into raster, 10m deep and beveling in from 500m out. This may take a while...."
    )
    arcpy.CheckOutExtension("Spatial")
    distance = EucDistance('flowline_proj', cell_size="10")
    streams = Reclassify(
        Raster('flowline_raster') > 0, "Value", "1 1; NoData 0")
    burnt = Raster(subregion_ned) - (10 * streams) - (0.02 * (500 - distance) *
                                                      (distance < 500))

    cu.multi_msg("Saving output raster...")
    burnt.save(burnt_out)

    # Delete intermediate rasters and shapefiles
    for item in ['flowline_proj', 'flowline_raster']:
        arcpy.Delete_management(item)
    arcpy.CheckInExtension("Spatial")
    cu.multi_msg("Burn process completed")
    arcpy.ResetEnvironments()
 def __init__(self, workspacePath, workspaceID):
     super(StreamStatsNationalOps, self).__init__(workspacePath)
     self.WorkspaceID = workspaceID
     self.mask = os.path.join(
         os.path.join(self._WorkspaceDirectory, self.WorkspaceID + '.gdb',
                      "Layers"),
         Config()["catchment"]["downstream"])
     self.mask2 = os.path.join(
         os.path.join(self._WorkspaceDirectory, self.WorkspaceID + '.gdb',
                      "Layers"),
         Config()["catchment"]["global"])
     self.mask3 = os.path.join(
         os.path.join(self._WorkspaceDirectory, self.WorkspaceID + '.gdb',
                      "Layers"),
         Config()["catchment"]["upstream"])
     if not arcpy.Exists(self.mask):
         raise Exception("Mask does not exist: " + self.mask)
     self._sm("initialized StreamStatsNationalOps")
     arcpy.ResetEnvironments()
Пример #21
0
def clear_data_dir(gdb_dir, shapefile_dir, save_dir):
    gdb_name = r'Bikeshare_GDB'
    gdb_path = gdb_dir + r'/' + gdb_name + r'.gdb'

    # set arcpy workspace
    arcpy.env.workspace = gdb_path
    arcpy.ResetEnvironments()
    arcpy.env.overwriteOutput = True

    # check if data directory exists, and delete it, if yes
    if os.path.exists(gdb_dir):
        arcpy.Delete_management(gdb_dir)

    # create empty directories for geodatabase and shapefiles
    os.mkdir(gdb_dir)
    os.mkdir(shapefile_dir)

    # check if save directory exists, and create, if not
    if not os.path.exists(save_dir):
        os.makedirs(save_dir)

    print('Directory Clear and Refresh Complete!')
Пример #22
0
def main():
    # change directory to the parent folder path
    os.chdir(pf_path)
    # list all folders in parent folder path - note this is not recursive
    dir_list = filter(lambda x: os.path.isdir(x), os.listdir('.'))
    # remove folders in the list that start with '00_' since these aren't our huc8 folders
    for dir in dir_list[:]:
        if dir.startswith('00_'):
            dir_list.remove(dir)

    # set arcpy environment settings
    arcpy.env.overwriteOutput = 'TRUE'  # overwrite output
    arcpy.env.resamplingMethod = 'NEAREST'  # set resampling method to nearest in case arc does any behind the scenes dem re-sampling

    # run dem_clip function for each huc8 folder
    for dir in dir_list:
        ras_clip(dir, 'EVT')
        ras_clip(dir, 'BPS')

    # reset arcpy environment
    arcpy.ResetEnvironments()
    arcpy.ClearEnvironment("workspace")
Пример #23
0
def main():

    # get list of all '*.img' rasters in dem_path folder
    os.chdir(dem_path)
    dems = glob.glob('*.img')
    dem_list = ";".join(dems)

    # environment settings
    arcpy.env.workspace = dem_path
    arcpy.env.resamplingMethod = 'CUBIC'
    outCS = arcpy.SpatialReference(coord_sys)

    # mosaic individual tiles to single raster
    tmp_dem = arcpy.MosaicToNewRaster_management(dem_list, out_path,
                                                 'tmp_' + out_name + '.tif',
                                                 outCS, "32_BIT_FLOAT", "",
                                                 "1")

    # clip raster to aoi shapefile
    out_dem = arcpy.Clip_management(tmp_dem, '',
                                    os.path.join(out_path, out_name + '.tif'),
                                    aoi_path, '', 'ClippingGeometry',
                                    'NO_MAINTAIN_EXTENT')

    # create and save hillshade
    arcpy.ResetEnvironments()
    out_hs = arcpy.sa.Hillshade(out_dem, '', '', "NO_SHADOWS")
    out_hs.save(os.path.join(out_path, out_name + '_HS.tif'))

    # check raster cell size
    xResult = arcpy.GetRasterProperties_management(tmp_dem, 'CELLSIZEX')
    print 'Mosaic Raster Cell Size: ' + str(xResult.getOutput(0))
    xResult = arcpy.GetRasterProperties_management(out_dem, 'CELLSIZEX')
    print 'Clipped Raster Cell Size: ' + str(xResult.getOutput(0))

    # clear environment settings
    arcpy.ClearEnvironment("workspace")
Пример #24
0
def update(crate, validate_crate, change_detection):
    '''
    crate: models.Crate
    validate_crate: models.Pallet.validate_crate
    change_detection: ChangeDetection

    returns: (string, string)
        One of the result string constants from models.Crate and an optional message

    Checks to see if a crate can be updated by using validate_crate (if implemented
    within the pallet) or check_schema otherwise. If the crate is valid it then updates the data.
    '''
    arcpy.env.geographicTransformations = crate.geographic_transformation
    change_status = (Crate.NO_CHANGES, None)

    try:
        if not arcpy.Exists(crate.destination):
            log.debug('%s does not exist. creating', crate.destination)
            _create_destination_data(
                crate,
                skip_hash_field=change_detection.has_table(crate.source_name))

            change_status = (Crate.CREATED, None)

        #: check for custom validation logic, otherwise do a default schema check
        try:
            has_custom = validate_crate(crate)
            if has_custom == NotImplemented:
                check_schema(crate)
        except Exception as e:
            log.warning('validation error: %s for crate %r',
                        e,
                        crate,
                        exc_info=True)
            return (Crate.INVALID_DATA, str(e))

        #: use change detection data if it exists for this table
        if change_detection.has_table(crate.source_name):
            if change_detection.has_changed(
                    crate.source_name) or change_status[0] == Crate.CREATED:
                return change_detection.update(crate)
            else:
                return change_status
        else:
            #: create source hash and store
            changes = _hash(crate)

        if changes.has_changes():
            log.debug('starting edit session...')
            with arcpy.da.Editor(crate.destination_workspace):
                #: delete un-accessed hashes
                if changes.has_deletes():
                    log.debug('number of rows to be deleted: %d',
                              len(changes._deletes))
                    status, _ = change_status
                    if status != Crate.CREATED:
                        change_status = (Crate.UPDATED, None)

                    log.debug('deleting from destintation table')
                    with arcpy.da.UpdateCursor(crate.destination,
                                               hash_field) as cursor:
                        for row in cursor:
                            if row[0] in changes._deletes:
                                cursor.deleteRow()

                #: add new/updated rows
                if changes.has_adds():
                    log.debug('number of rows to be added: %d',
                              len(changes.adds))
                    status, message = change_status
                    if status != Crate.CREATED:
                        change_status = (Crate.UPDATED, None)

                    #: reproject data if source is different than destination
                    if crate.needs_reproject():
                        changes.table = arcpy.Project_management(
                            changes.table,
                            changes.table + reproject_temp_suffix,
                            crate.destination_coordinate_system,
                            crate.geographic_transformation)[0]

                    if not crate.is_table():
                        changes.fields[shape_field_index] = changes.fields[
                            shape_field_index].rstrip('WKT')

                    #: cache this so we don't have to call it for every record
                    is_table = crate.is_table()
                    with arcpy.da.SearchCursor(changes.table, changes.fields) as add_cursor,\
                            arcpy.da.InsertCursor(crate.destination, changes.fields) as cursor:
                        for row in add_cursor:
                            #: skip null geometries
                            if not is_table and row[shape_field_index] is None:
                                continue

                            cursor.insertRow(row)

            if changes.has_dups:
                change_status = (Crate.UPDATED_OR_CREATED_WITH_WARNINGS,
                                 'duplicate features detected!')
        else:
            log.debug('no changes found.')

            if changes.has_dups:
                change_status = (Crate.WARNING, 'duplicate features detected!')

        #: sanity check the row counts between source and destination
        count_status = _check_counts(crate, changes)

        return count_status or change_status
    except Exception as e:
        log.error('unhandled exception: %s for crate %r',
                  str(e),
                  crate,
                  exc_info=True)

        return (Crate.UNHANDLED_EXCEPTION, str(e))
    finally:
        arcpy.ResetEnvironments()
        arcpy.ClearWorkspaceCache_management()
Пример #25
0
#Author: Manuel Gonzalez-Rivero
#Date: June 2016
#Purpose: to reshape and cleaup data from viewshed analysis for the purpuse of this study

import sys, os.path
sys.path.insert(0, 'PATH_TO_ARCHGIS')
import arcpy
import os
import arcgisscripting
from arcpy import env
arcpy.ResetEnvironments()
wks = "PATH_TO WORKING_DIR"
env.workspace = wks
tableList = arcpy.ListTables()

for table in tableList:
    inTable = table
    outTable = "%s\\vwshd\\%s" % (wks, table)
    tempTableView = "%s_TableView" % (inTable)
    expression = arcpy.AddFieldDelimiters(tempTableView, "Value") + " = 0"
    # Execute CopyRows to make a new copy of the table
    arcpy.CopyRows_management(inTable, outTable)

    # Execute MakeTableView
    arcpy.MakeTableView_management(outTable, tempTableView)

    # Execute SelectLayerByAttribute to determine which rows to delete
    arcpy.SelectLayerByAttribute_management(tempTableView, "NEW_SELECTION",
                                            expression)

    # Execute GetCount and if some features have been selected, then execute
Пример #26
0
def stats_area_table(zone_fc,
                     zone_field,
                     in_value_raster,
                     out_table,
                     is_thematic,
                     warn_at_end=False):
    orig_env = arcpy.env.workspace
    arcpy.env.workspace = 'in_memory'
    arcpy.CheckOutExtension("Spatial")
    arcpy.AddMessage("Calculating zonal statistics...")

    # Set up environments for alignment between zone raster and theme raster
    env.snapRaster = in_value_raster
    env.cellSize = in_value_raster
    env.extent = zone_fc

    # TODO: If we experience errors again, add a try/except where the except writes the
    # conversion raster to a scratch workspace instead, that eliminated the errors we
    # we getting several years ago with 10.1, not sure if they will happen still.
    arcpy.PolygonToRaster_conversion(zone_fc, zone_field, 'convert_raster',
                                     'MAXIMUM_AREA')
    env.extent = "MINOF"
    arcpy.sa.ZonalStatisticsAsTable('convert_raster', zone_field,
                                    in_value_raster, 'temp_zonal_table',
                                    'DATA', 'ALL')

    if is_thematic:
        #for some reason env.cellSize doesn't work
        desc = arcpy.Describe(in_value_raster)
        cell_size = desc.meanCelLHeight

        # calculate/doit
        arcpy.AddMessage("Tabulating areas...")
        arcpy.sa.TabulateArea('convert_raster', zone_field, in_value_raster,
                              'Value', 'temp_area_table', cell_size)

        # making the output table
        arcpy.CopyRows_management('temp_area_table', 'temp_entire_table')
        zonal_stats_fields = ['COUNT', 'AREA']
        arcpy.JoinField_management('temp_entire_table', zone_field,
                                   'temp_zonal_table', zone_field,
                                   zonal_stats_fields)

        # cleanup
        arcpy.Delete_management('temp_area_table')

    if not is_thematic:
        # making the output table
        arcpy.CopyRows_management('temp_zonal_table', 'temp_entire_table')

    arcpy.AddMessage("Refining output table...")

    # Join to the input zones raster
    arcpy.AddField_management('convert_raster', 'Pct_NoData', 'DOUBLE')
    arcpy.CopyRows_management('convert_raster', 'zones_VAT')
    arcpy.JoinField_management('zones_VAT', zone_field, 'temp_entire_table',
                               zone_field)
    calculate_expr = '100*(1-(float(!COUNT_1!)/!Count!))'
    arcpy.CalculateField_management('zones_VAT', 'Pct_NoData', calculate_expr,
                                    "PYTHON")
    refine_zonal_output('zones_VAT', zone_field, is_thematic)

    # final table gets a record even for no-data zones
    keep_fields = [f.name for f in arcpy.ListFields('zones_VAT')]
    if zone_field.upper() in keep_fields:
        keep_fields.remove(zone_field.upper())
    if zone_field in keep_fields:
        keep_fields.remove(zone_field)
    cu.one_in_one_out('zones_VAT', keep_fields, zone_fc, zone_field, out_table)

    # Convert missing "Pct_NoData" values to 100
    codeblock = """def convert_pct(arg1):
        if arg1 is None:
            return float(100)
        else:
            return arg1"""
    arcpy.CalculateField_management(out_table, 'Pct_NoData',
                                    'convert_pct(!Pct_NoData!)', 'PYTHON_9.3',
                                    codeblock)

    # count whether all zones got an output record or not)
    out_count = int(
        arcpy.GetCount_management('temp_entire_table').getOutput(0))
    in_count = int(arcpy.GetCount_management(zone_fc).getOutput(0))
    count_diff = in_count - out_count

    # cleanup
    for item in [
            'temp_zonal_table', 'temp_entire_table', 'convert_raster',
            'zones_VAT'
    ]:
        arcpy.Delete_management(item)
    arcpy.ResetEnvironments()
    arcpy.env.workspace = orig_env  # hope this prevents problems using list of FCs from workspace as batch
    arcpy.CheckInExtension("Spatial")

    return [out_table, count_diff]
def raster_project(prj_current, inraster, in_gdb, prj_folder, out_folder,
                   c_region):
    start_raster = datetime.datetime.now()
    # Resets arcpy environment variables to default
    arcpy.ResetEnvironments()
    print "\n"
    print inraster

    # in_raster = Raster(in_gdb + os.sep + str(inraster))
    in_raster = in_gdb + os.sep + str(inraster)

    prj_name = prj_current.replace('.prj', '')
    out_gdb_name = prj_current.replace('.prj', '.gdb')
    out_gdb_name = c_region + "_" + out_gdb_name.replace(" ", "_")
    out_gdb = out_folder + os.sep + out_gdb_name
    create_gdb(out_folder, out_gdb_name, out_gdb)

    snap_raster = Raster(RegionalProjection_Dict[c_region])
    arcpy.Delete_management("snap")
    print RegionalProjection_Dict[c_region]
    arcpy.MakeRasterLayer_management(snap_raster, "snap", "#", "#", "#")
    arcpy.env.snapRaster = "snap"
    print str(snap_raster)
    # Set the processing extent to be equal to the use layer; only species within the extent will be
    # included in the output species file; applied when the we make the raster layers
    myExtent = snap_raster.extent

    # location prj files
    prj_file_path = prj_folder + os.sep + prj_current

    # extract spatial information from prj files

    dsc_prj = arcpy.Describe(prj_file_path)
    prj_sr = dsc_prj.spatialReference
    prj_datum = prj_sr.GCS.datumName
    print in_raster
    prj_raster_name = str(inraster) + "_" + prj_name  # regional species raster
    prj_raster = out_gdb + os.sep + prj_raster_name  # complete output path for regional species raster
    print prj_raster
    try:
        if prj_datum == "D_WGS_1984":  # # indicates the file needs a geographic tranformation from  NAD 83 to WGS 84
            if not arcpy.Exists(prj_raster):

                arcpy.MakeRasterLayer_management(in_raster, "inital_r_lyr",
                                                 "#", myExtent, "#")
                print 'Projecting {0} into {1}'.format(inraster, prj_name)
                # "NAD_1983_To_WGS_1984_1" is a geographic transformation used to go from NAD_1983 to WGS 84 for the US
                # TODO check to see if different transformation would make more sense WGS_1984_(ITRF00)_To_NAD_1983
                arcpy.ProjectRaster_management("inital_r_lyr", prj_raster,
                                               prj_sr, "NEAREST", "30",
                                               "NAD_1983_To_WGS_1984_1")
                arcpy.Delete_management("inital_r_lyr")

            else:
                print str(prj_raster) + " already exists"

        else:

            if not arcpy.Exists(prj_raster):
                print in_gdb + os.sep + str(inraster)
                arcpy.MakeRasterLayer_management(in_raster, "inital_r_lyr",
                                                 "#", myExtent, "#")
                print 'Projecting {0} into {1}'.format(inraster, prj_name)
                arcpy.ProjectRaster_management("inital_r_lyr", prj_raster,
                                               prj_sr, 'NEAREST', "30")
                arcpy.Delete_management("inital_r_lyr")

            else:
                print str(prj_raster) + " already exists"
        print 'Completed loop of {0} in: {1}\n'.format(
            prj_name, (datetime.datetime.now() - start_raster))

    except Exception as error:
        print 'Error in loop'
        print(error.args[0])
Пример #28
0
    def execute(self, inRiv, inCat, inRasterHAND, inRasterMinLocal, inRasterStr, inStep, inDeltaH, inRWKS, inFWKS, bConnectedOnly = True, pScratchWorkspace = None, nProcessors = 0): 
        ''' for a given inRiver3DRaster, construct the floodplain
        1. inRasterRiv3D - raster of river water level Z
        2. inRasterElev - DEM of the terrain 
        3. inStep - index of the waterlevel in a sequence of waterlevel, used to construct the output raster name (R+inStep)
        4. inCat - catchment used to limit the floodplain
        '''
        sOK = apwrutils.C_OK 
        dCatID2RivID = dict()
        dRivID2DH = dict() 
       
        inDeltaH = float(inDeltaH)
        if(inDeltaH<0):
            #arcpy.AddMessage("{} {}  {}".format(inRiv, flooddsconfig.FN_HYDROID, flooddsconfig.FN_DH ))
            try:
                with arcpy.da.SearchCursor(inRiv, [flooddsconfig.FN_HYDROID, flooddsconfig.FN_DH]) as rows:
                    for row in rows:
                        try:
                            dRivID2DH.setdefault(row[0], row[1])
                            if((self.DebugLevel & 1)==1):  arcpy.AddMessage("HID->DH={}->{}".format(row[0], row[1]))
                        except:
                            pass
            
            except arcpy.ExecuteError:
                sMsg = "{} {}".format(str(arcpy.GetMessages(2)), trace())
                arcpy.AddMessage(sMsg) 

            except:   
                arcpy.AddMessage(trace())             
                pass     

        pHandMasked = ""
        flZoneTempRiver = ""
        flZoneDslv = ""
        flRiv = ""
        arcpy.AddMessage("FloodplainFromHAND.execute ScratchWorkspace={} nProcessors={}".format(pScratchWorkspace,nProcessors) ) 
        if((flooddsconfig.debugLevel & 2)==2): arcpy.AddMessage("in floodplainfromhand: os.environ['TMP']={}, os.environ['TEMP']={}".format(os.environ['TMP'], os.environ['TEMP']))
        try:
            lRiverFlds = [flooddsconfig.FN_HYDROID, flooddsconfig.FN_DRAINID]
            if(len(arcpy.ListFields(inRiv,flooddsconfig.FN_DRAINID))==0):
                arcpy.AddMessage("Required field {} does not exist in {}".format(apwrutils.FN_DRAINID, inRiv))
            
            with arcpy.da.SearchCursor(inRiv, lRiverFlds) as rows:
                for row in rows:
                    try:
                        dCatID2RivID.setdefault(row[1],row[0])    #Catchment.HYDROID->River.HYDROID
                    except:
                        pass

            if((flooddsconfig.debugLevel & 2)==2):
                for catid, rivid in iter(dCatID2RivID.items()):
                    arcpy.AddMessage("catID={} rivID={}".format(catid,rivid))

            arcpy.CheckOutExtension("Spatial")
            if((flooddsconfig.debugLevel & 1)==1):
                sMsg = "inFWKS={} \ninRiv={} \ninCat={} \ninRasterHAND={} \ninStep={} \ninDeltaH={} \ninRWKS={}".format(inFWKS, inRiv, inCat, inRasterHAND, inStep, inDeltaH, inRWKS)
                apwrutils.Utils.ShowMsg(sMsg)
            
            if(pScratchWorkspace==None):  
                scratch_wks = flooddsconfig.pScratchWorkspace      #   arcpy.env.scratchWorkspace  
                scratchFolder = flooddsconfig.pScratchFolder         #  arcpy.env.scratchFolder
                arcpy.env.scratchWorkspace = scratch_wks
            else:
                scratch_wks = pScratchWorkspace
                arcpy.env.scratchWorkspace = scratch_wks
                scratchFolder = arcpy.env.scratchFolder 
            arcpy.AddMessage("arcpy.env.scratchWorkspace={}".format(arcpy.env.scratchWorkspace))
            #..arcpy.AddMessage("arcpy.env.scratchFolder={} scratch_wks={}".format(scratchFolder, pScratchWorkspace))
            if((flooddsconfig.debugLevel & 2)==2): arcpy.AddMessage("arcpy.env.scratchFolder={} scratch_wks={}".format(scratchFolder, pScratchWorkspace))
            if((flooddsconfig.debugLevel & 1)==1):
                sMsg = "arcpy.env.scratchFolder={}, scratch_wks={}".format(scratchFolder, scratch_wks)
                arcpy.AddMessage(sMsg)
            #if(scratch_wks==None):
            #    scratch_wks = os.path.join(scratchFolder, "scratch.gdb") 
            #    if(arcpy.Exists(scratch_wks)==False):
            #        arcpy.CreateFileGDB_management(scratchFolder, "scratch.gdb") 
            #    arcpy.env.scratchWorkspace = scratch_wks

            rasterDescribe = arcpy.Describe(inRasterHAND)
            arcpy.env.snapRaster = rasterDescribe.catalogPath #SnapRaster
            arcpy.env.overwriteOutput = True

            bExists = apwrutils.Utils.makeSureDirExists(inRWKS)
            #filGrdInt = os.path.join(scratchFolder, arcpy.CreateUniqueName('filGrdInt', scratchFolder)) 
            cellSize = arcpy.GetRasterProperties_management(inRasterHAND, "CELLSIZEX") 
            sr = arcpy.Describe(inRasterHAND).spatialReference
        
            #Holdes final raster results (depth grid)
            sDepthRWKS = os.path.join(inRWKS,flooddsconfig.FDN_Depth)   #Depth folder
            bExists = apwrutils.Utils.makeSureDirExists(sDepthRWKS)     #Depth folder
            sWseRWKS = os.path.join(inRWKS, flooddsconfig.FDN_WSE)      #WSE folder
            bExists = apwrutils.Utils.makeSureDirExists(sWseRWKS)       #WSE folder
            sGDepth = os.path.join(inRWKS, flooddsconfig.FND_G_Depth)
            bExists = apwrutils.Utils.makeSureDirExists(sGDepth)
            sGPFZone = os.path.join(inRWKS, flooddsconfig.FND_G_PFZone)
            bExists = apwrutils.Utils.makeSureDirExists(sGPFZone)

            if(inCat!=None):
                #..Create floodzone featureclass to hold fp polygons for each river
                fcZoneRiver = os.path.join(inFWKS, flooddsconfig.LN_FPZoneRiver)
                if((flooddsconfig.debugLevel & 1) == 1):  arcpy.AddMessage("fcZoneRiver: {}".format(fcZoneRiver))
                if(arcpy.Exists(fcZoneRiver)==False):
                    arcpy.CreateFeatureclass_management(inFWKS, flooddsconfig.LN_FPZoneRiver, "POLYGON", None, None, None, sr)

                fieldsRiver = {flooddsconfig.FN_StreamID:'LONG', flooddsconfig.FN_STEP:'TEXT', 
                      flooddsconfig.FN_GridCode:'LONG', flooddsconfig.FN_DateCreated :'TEXT', 
                      flooddsconfig.FN_FLDESC:'DOUBLE', apwrutils.FN_HYDROCODE:'TEXT'}

                try:
                    ii = apwrutils.Utils.addFields(fcZoneRiver, fieldsRiver)
                    if((flooddsconfig.debugLevel & 1)==1): arcpy.AddMessage("Processing inStep={}. (Fields added={}).".format(inStep,ii))
                   
                except arcpy.ExecuteError:
                    arcpy.AddError(str(arcpy.GetMessages(2)))
                lFieldsRiver = [apwrutils.FN_ShapeAt,flooddsconfig.FN_StreamID, flooddsconfig.FN_STEP, 
                    flooddsconfig.FN_GridCode, flooddsconfig.FN_DateCreated, flooddsconfig.FN_FLDESC, apwrutils.FN_HYDROCODE]
                
                if((flooddsconfig.debugLevel & 1)==1): 
                    sMsg = "Processing raster by each catchment contained in {}".format(inCat)     
                    arcpy.AddMessage(sMsg)
                #  makesure the temp Raster dir exist
                sCatRWKS = os.path.join(inRWKS, "RCat")
                bExists = apwrutils.Utils.makeSureDirExists(sCatRWKS)
                #    maskGrd = arcpy.sa.Polygon
                #apwrutils.Utils.ShowMsg("TobeImplemented....")
                oDesc = arcpy.Describe(inCat) 
                sOIDFld = oDesc.OIDFieldName
                lCatFlds = [apwrutils.FN_ShapeAt, sOIDFld, apwrutils.FN_HYDROID]
                rivID = 0
                sRasters = ""
                sp = " " * 2
                fl = ""
                deltaH = 0.0
                #for k in dCatID2RivID:
                #    arcpy.AddMessage("{} -> {}".format(k, dCatID2RivID[k]))
                nCats = arcpy.GetCount_management(inCat)[0] 
                with arcpy.da.SearchCursor(inCat, lCatFlds) as rows:
                    for iRow, row in enumerate(rows):
                        ddt = time.clock()
                        rivID = 0
                        catID = 0
                        try:        #try in row
                            iOID = row[lCatFlds.index(sOIDFld)]
                            catID = row[lCatFlds.index(apwrutils.FN_HYDROID)]
                            if(catID in dCatID2RivID):
                                rivID = dCatID2RivID[catID]
                            else:
                                arcpy.AddMessage("catID {} is not found in dCatID2RiverID".format(catID))
                                rivID = -1

                            oPoly = row[lCatFlds.index(apwrutils.FN_ShapeAt)]
                            oExt = oPoly.extent
                            #sWhere = "{}={}".format(sOIDFld, iOID) 
                            sWhere = "{}={}".format(apwrutils.FN_HYDROID, catID) 
                            pHandMasked = os.path.join(sCatRWKS, "cat{}".format(catID))
                            arcpy.env.extent = oExt
                            if(os.path.exists(pHandMasked)==False):
                                fl = "DH{}_{}".format(inStep, catID)
                                if(arcpy.Exists(fl)):
                                   arcpy.Delete_management(fl) 
                                arcpy.MakeFeatureLayer_management(inCat, fl, sWhere)
                                #if((flooddsconfig.debugLevel & 1)==1):  arcpy.AddMessage("PolygonToRaster_conversion -> {},  {} where {}".format(pHandMasked, inCat, sWhere))
                                pHandMask = arcpy.sa.ExtractByMask(inRasterHAND, fl)
                                pHandMask.save(pHandMasked) 
                            else:
                                #flOutFile = arcpy.management.MakeRasterLayer(pHandMasked, "flCat{}".format(rivID))
                                if((flooddsconfig.debugLevel & 8) == 8):  arcpy.AddMessage("{} already existed for catchment {}".format(pHandMasked, sWhere))
                            
                            if(inDeltaH<0):
                                try:
                                    deltaH = dRivID2DH[rivID]
                                    #arcpy.AddMessage("deltaH = {}".format(deltaH)) 
                                except:
                                    deltaH = float(inDeltaH)
                            else:
                                deltaH = float(inDeltaH)
              
                            #(zFactor,zUnit) = apwrutils.Utils.getZFactorUnit(inRasterHAND)
                            #deltaH = deltaH * zFactor 
                            expression = "value <= {}".format(deltaH) 
                            #..save the rivNibble to wse location.
                            #wseRaster = arcpy.sa.Con(inRasterHAND, inRasterHAND, "", expression) 
                            if(arcpy.Exists(inRasterStr)):
                                wseRaster = arcpy.sa.Con(pHandMasked, pHandMasked, inRasterStr, expression)
                            else:  
                                wseRaster = arcpy.sa.Con(pHandMasked, pHandMasked, "", expression)                                                     
                            #..Get the river depth and save the depth grid  '..ye, @1/28/2016 12:12:40 PM on ZYE1
                            sName = "{}_{}_{}_{}{}".format(flooddsconfig.HD_Depth,inStep,flooddsconfig.HD_River, rivID, flooddsconfig.Ext_R) 
                            fpDepth = arcpy.sa.Minus(float(deltaH), wseRaster)
                            sDepthFile = os.path.join(sDepthRWKS,sName)
                            #..arcpy.AddMessage("fpDept={}".format(fpDepth))
                            fpDepth.save(sDepthFile)    # Depth grid.
                            if(arcpy.Exists(inRasterMinLocal)):
                                wseRaster = arcpy.sa.Plus(wseRaster, inRasterMinLocal)
                            sWseName = "{}_{}_{}_{}{}".format(flooddsconfig.HD_WSE, inStep, flooddsconfig.HD_River, rivID, flooddsconfig.Ext_R)
                            wseRaster.save(os.path.join(sWseRWKS, sWseName))                            
                            #..Save the fpDepth
                            fpZone4PolyRiver = arcpy.sa.Con(fpDepth, 1, 0, 'value >= 0'  ) 
                            #..arcpy.AddMessage("inStep_{}, rz_{}".format(inStep, inStep))
                            fpZoneTempRiver = os.path.join(scratch_wks, "rz{}_{}".format(inStep ,rivID)) 
                            #..arcpy.AddMessage("fpZoneTempRiver={}".format(fpZoneTempRiver))
                            arcpy.RasterToPolygon_conversion(fpZone4PolyRiver, fpZoneTempRiver, "NO_SIMPLIFY" )
                            sDslvName = sName.split(".")[0]
                            flZoneDslv = "{}DSLV".format(sDslvName)                           
                            if(bConnectedOnly==True):
                                sRivWhere = "{}={}".format(apwrutils.FN_HYDROID, rivID)
                                flZoneTempRiver = "flrz{}_{}".format(inStep,rivID)
                                flRiv = "flrv{}".format(rivID) 
                                arcpy.MakeFeatureLayer_management(inRiv, flRiv, sRivWhere) 
                                arcpy.MakeFeatureLayer_management(fpZoneTempRiver, flZoneTempRiver) 
                                arcpy.SelectLayerByLocation_management(flZoneTempRiver, 'INTERSECT', flRiv)
                                fpZoneTempDslv = os.path.join(scratch_wks, "fpr{}_{}".format(inStep, rivID))
                                arcpy.Dissolve_management(flZoneTempRiver, fpZoneTempDslv, [flooddsconfig.FN_GridCode])
                                sWhereGridCode = "{}>0".format(flooddsconfig.FN_GridCode)
                                arcpy.MakeFeatureLayer_management(fpZoneTempDslv, flZoneDslv, sWhereGridCode)
                                try:
                                    fpDepth = arcpy.sa.ExtractByMask(fpDepth, flZoneDslv)    #pMaskFC)   #flZoneDslv)  # pMaskFC)   #flZoneDslv)
                                    #if save directly to .tif format as extractbymask is applied, the nodata would be presented as '-3.4028234663853E+38', which in other places would not be treated as NODATA (by other functions)
                                    pRaster = arcpy.sa.Plus(fpDepth, 0.0)    
                                    pRaster.save(sDepthFile) 
                                except:
                                    arcpy.AddMessage(trace())
                                    pass
                                    #arcpy.CopyRaster_management(sDepthFile, ssOutFileNew) 
                            else:
                                fpZoneTempDslv = os.path.join(scratch_wks, "fpr{}_{}".format(inStep, rivID))
                                arcpy.Dissolve_management(fpZoneTempRiver, fpZoneTempDslv, [flooddsconfig.FN_GridCode])
                                arcpy.MakeFeatureLayer_management(fpZoneTempDslv, flZoneDslv)
                           
                            if((flooddsconfig.debugLevel & 2)==2): arcpy.AddMessage("sName{}, fpZoneTempDslv={}".format(sName, fpZoneTempDslv) )
                            if(sRasters ==""):
                                sRasters = sName
                            else:
                                sRasters = sRasters + ";" + sName
                            sDateCreated = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") 
                            with arcpy.da.InsertCursor(fcZoneRiver, lFieldsRiver) as inRows:
                                #with arcpy.da.SearchCursor(fpZoneTempDslv, [apwrutils.FN_ShapeAt,flooddsconfig.FN_GridCode]) as prows:
                                with arcpy.da.SearchCursor(flZoneDslv, [apwrutils.FN_ShapeAt,flooddsconfig.FN_GridCode]) as prows:
                                    for prow in prows:
                                        try:
                                            #fieldsRiver = {Shape@, flooddsconfig.FN_StreamID:'LONG', flooddsconfig.FN_STEP:'TEXT', 
                                            #      flooddsconfig.FN_GridCode:'LONG', flooddsconfig.FN_DateCreated :'TEXT', 
                                            #      flooddsconfig.FN_FLDESC:'TEXT', apwrutils.FN_HYDROCODE:'TEXT'}
                                            inRow = []
                                            oShp = prow[0]
                                            inRow.append(oShp)
                                            inRow.append(rivID)    #StreamID
                                            inRow.append(inStep)   #FPStep
                                            inRow.append(prow[1])  #GRidCode
                                            inRow.append(sDateCreated)  #DateCreated
                                            inRow.append(deltaH)   #FPDESC
                                            inRow.append(rivID)    #HYDROCODE
                                            inRows.insertRow(inRow)       
                                        except:
                                            arcpy.AddMessage(trace()) 

                        except arcpy.ExecuteError:   #try in row for Cat
                            sMsg = str(arcpy.GetMessages(2))
                            arcpy.AddError(sMsg)
                        except:
                            arcpy.AddWarning(arcpy.GetMessages(2))
                            sMsg = trace()
                            arcpy.AddMessage(sMsg)
                        finally:   ##try in row - per catchment
                            if(fl!=""):
                                arcpy.Delete_management(fl)

                            if(flZoneTempRiver!=""):
                                arcpy.Delete_management(flZoneTempRiver)
                            if(flZoneDslv!=""):
                                arcpy.Delete_management(flZoneDslv)

                            if(flRiv!=""):
                                arcpy.Delete_management(flRiv)
                            sMsg = "{} (inStep,dh)=({},{}) {} of {} catchments, {} (rivid={} catid={} dt={})".format(sp, inStep, ("%.2f" % deltaH), (iRow+1), nCats, sWhere, rivID, catID, apwrutils.Utils.GetDSMsg(ddt, "")) 
                            arcpy.AddMessage(sMsg)
                            #if((flooddsconfig.debugLevel & 1)==1): 
                            #    sMsg = "Done, processing raster on catchment {} (rivid={} catid={} dt={})".format(sWhere, rivID, catID, apwrutils.Utils.GetDSMsg(ddt)) 
                            #    arcpy.AddMessage(sMsg)

                if (nProcessors<=1) :            
                    try:
                        arcpy.env.extent = inRasterHAND                      
                        arcpy.env.workspace = sDepthRWKS
                        arcpy.env.mask = inRasterHAND
                        # sDepthName = "{}{}{}".format(flooddsconfig.HD_Depth,inStep,flooddsconfig.Ext_R)  # did not work when .tif is used, it would produce a mosaic ds with Nodata being filled with -128 or 0.
                        sDepthName = "{}_{}.tif".format(flooddsconfig.HD_Depth,inStep)
                        sCellSize = "" 
                        if(apwrutils.Utils.isNumeric(cellSize)==True):
                             sCellSize = cellSize 

                        arcpy.MosaicToNewRaster_management(sRasters, sGDepth, sDepthName, sr, pixel_type="16_BIT_SIGNED", cellsize=sCellSize, number_of_bands="1", mosaic_method="MAXIMUM", mosaic_colormap_mode="FIRST")
                        #arcpy.MosaicToNewRaster_management(sRasters, sGDepth, sDepthName, sr, pixel_type="32_BIT_FLOAT", cellsize=sCellSize, number_of_bands="1", mosaic_method="MAXIMUM", mosaic_colormap_mode="FIRST")
                        #arcpy.MosaicToNewRaster_management(sRasters, sGDepth, sDepthName, sr, "32_BIT_FLOAT", cellSize, "1", "LAST","FIRST")
                        flDepthName = os.path.join(sGDepth, sDepthName)
                           
                        if(flooddsconfig.Ext_R!=""):
                             fpDepthF = arcpy.sa.SetNull(flDepthName, flDepthName, '"value" <= 0')
                             fpDepthF.save(flDepthName)
                        else:
                             fpDepthF = flDepthName
                
                        if((flooddsconfig.debugLevel & 1)==1): 
                            sMsg = "mosaic raster depth grid: fpDepthF={}".format(fpDepthF)
                            arcpy.AddMessage(sMsg)

                    except arcpy.ExecuteError:   #try in row for Cat
                        sMsg = str(arcpy.GetMessages(2))
                        arcpy.AddWarning(sMsg)
                    except:
                        arcpy.AddWarning(arcpy.GetMessages(2))
                        sMsg = trace()
                        arcpy.AddMessage(sMsg)
            else:
                 isNullGrd = arcpy.sa.IsNull(river3DInt)
                 nibSrc = arcpy.sa.Con(isNullGrd, river3DInt, "-99999", "Value = 0")
                 nibLevel = arcpy.sa.Nibble(nibSrc, river3DInt)  #, "ALL_VALUES") 
                 fpDepth = arcpy.sa.Minus(nibLevel, filGrdInt)
                 fpDepthF = arcpy.sa.Con(fpDepth, fpDepth, "#", '"value" >= 0')     #fpDepth>0, return fpDepth, else null.
            
            fpZoneName = flooddsconfig.LN_FPZone
            fcZoneRslt = os.path.join(inFWKS, fpZoneName)
            fpDepthRName = "{}_{}".format(flooddsconfig.LN_FPZone,inStep) 
            fpRaster = os.path.join(sGPFZone, fpDepthRName)
            if(nProcessors<=1):
                if((flooddsconfig.debugLevel & 1)==1): arcpy.AddMessage(fpDepthF)
                fpZone4Poly = arcpy.sa.Con(fpDepthF, 1, 0, '"value" >= 0')            
                fpZoneTemp = os.path.join(scratch_wks, "r{}".format(inStep)) 
                arcpy.RasterToPolygon_conversion(fpZone4Poly, fpZoneTemp, "NO_SIMPLIFY")
                
                if(inRiv!=None):
                    #try to remove the floodplain polygons not connected with the inRiv
                    flZoneOnRiv = ""
                    try:
                        flZoneOnRiv = "flzr{}".format(inStep)    #Zone that overlay with river lines.
                        if(arcpy.Exists(flZoneOnRiv)==True): arcpy.Delete_management(flZoneOnRiv)
                        if(arcpy.Exists(fpZoneTemp)):
                            arcpy.MakeFeatureLayer_management(fpZoneTemp, flZoneOnRiv)    
                            arcpy.SelectLayerByLocation_management(flZoneOnRiv, "INTERSECT", inRiv)
                            #Connected Raster Area:
                            sRasterConn = os.path.join(scratchFolder, "C{}".format(inStep))
                            arcpy.PolygonToRaster_conversion(flZoneOnRiv, flooddsconfig.FN_GridCode, sRasterConn,"","",cellSize)
                            fpZone4Poly = arcpy.sa.Con(sRasterConn, fpZone4Poly)
                            fpZone4Poly = arcpy.sa.SetNull(fpZone4Poly, fpZone4Poly, '"value" = 0')
                            arcpy.RasterToPolygon_conversion(fpZone4Poly, fpZoneTemp, "NO_SIMPLIFY")
                            try:
                                del fpZone4Poly
                            except:
                                pass
                            try:
                                del flZoneOnRiv
                            except:
                                pass 

                    except arcpy.ExecuteError:
                        sMsg = "{}, {}".format(arcpy.GetMessages(2), trace())
                        arcpy.AddMessage(sMsg)
                    except:
                        arcpy.AddMessage("try to remove floodplain not intersecting with a river. {}".format(trace()))
                    finally:
                        pass

                fpZoneTempDslv = os.path.join(scratch_wks, "FPD{}".format(inStep))
                arcpy.Dissolve_management(fpZoneTemp, fpZoneTempDslv, [flooddsconfig.FN_GridCode])
                if(arcpy.Exists(fcZoneRslt)==False):
                    arcpy.CreateFeatureclass_management(inFWKS, fpZoneName, "POLYGON", fpZoneTempDslv, None, None, sr)   
        
                oDesc = arcpy.Describe(fcZoneRslt)
                fields = {flooddsconfig.FN_FLDESC:'DOUBLE', flooddsconfig.FN_STEP:'LONG', 
                          flooddsconfig.FN_GridCode:'LONG', flooddsconfig.FN_DateCreated :'TEXT'}

                apwrutils.Utils.addFields(fcZoneRslt, fields)
                if(not inDeltaH): inDeltaH = datetime.datetime.now()
                sDateCreated = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") 
                shpFldName = apwrutils.Utils.GetShapeFieldName(fcZoneRslt)            
                lFieldsZR = [apwrutils.FN_ShapeAt,flooddsconfig.FN_STEP,flooddsconfig.FN_FLDESC,flooddsconfig.FN_GridCode, flooddsconfig.FN_DateCreated]
                lFieldsDslv = [apwrutils.FN_ShapeAt, flooddsconfig.FN_GridCode] 
                with arcpy.da.InsertCursor(fcZoneRslt, lFieldsZR) as inRows:
                    with arcpy.da.SearchCursor(fpZoneTempDslv,lFieldsDslv) as rows:
                        for row in rows:
                            try:
                                inRow = []
                                oShp = row[0]
                                inRow.append(oShp)
                                inRow.append(inStep)
                                inRow.append(inDeltaH)
                                inRow.append(row[lFieldsDslv.index(flooddsconfig.FN_GridCode)])
                                inRow.append(sDateCreated)
                                inRows.insertRow(inRow)       
                            except:
                                arcpy.AddMessage(trace())  
        
                # ExtractByMask - extract the fpZoneF (floodplain (depth) zone with in float)   
                fpDepthFExt = arcpy.sa.ExtractByMask(fpDepthF, fcZoneRslt) 
                fpDepthFExt.save(fpRaster)
                try:
                    del fpDepthFExt
                    del fpZone4Poly
                except:
                    pass 
        except:
            sOK = trace()
            arcpy.AddMessage(sOK)
            
        finally:
            if((flooddsconfig.debugLevel & 1)==1):  arcpy.AddMessage("floodplainfromhand Cleaning up...")
            arcpy.ResetEnvironments()

        if(sOK == apwrutils.C_OK):
            tReturn = (sOK, fcZoneRslt, fpRaster)
        else:
            tReturn = (sOK)

        return tReturn
Пример #29
0
    def execute(self, inRasterRiv3D, inRasterElev, inStep, inRWKS, inFWKS, inRiv, inCat, inMultiplier = 100, inDeltaH = None, bConnectedOnly = True, pScratchWorkspace = None, nProcessors = 0): 
    #def ConstructFloodplain(self, inRasterRiv3D, inRasterElev, inStep, inRWKS, inFWKS, inRiv, inCat, inMultiplier = 100, inDeltaH = None): 
        ''' for a given inRiver3DRaster, construct the floodplain
        1. inRasterRiv3D - raster of river water level Z
        2. inRasterElev - DEM of the terrain 
        3. inStep - index of the waterlevel in a sequence of waterlevel, used to construct the output raster name (R+inStep)
        4. inCat - catchment used to limit the floodplain
        '''
        sOK = apwrutils.C_OK 
        dCatID2RivID = dict()
        pMask = ""
        flZoneTempRiver = ""
        flZoneDslv = ""
        flRiv = ""
        arcpy.AddMessage("{} {}".format(pScratchWorkspace,nProcessors) ) 
        if((flooddsconfig.debugLevel & 2)==2): arcpy.AddMessage("in floodplainfrom3driverraster: os.environ['TMP']={}, os.environ['TEMP']={}".format(os.environ['TMP'], os.environ['TEMP']))
        try:
            lRiverFlds = [apwrutils.FN_HYDROID, apwrutils.FN_DRAINID]
            with arcpy.da.SearchCursor(inRiv, lRiverFlds) as rows:
                for row in rows:
                    try:
                        dCatID2RivID.setdefault(row[1],row[0])    #Catchment.HYDROID->River.HYDROID
                    except:
                        pass

            if((flooddsconfig.debugLevel & 2)==2):
                for catid, rivid in iter(dCatID2RivID.items()):
                    arcpy.AddMessage("catID={} rivID={}".format(catid,rivid))

            if (not inStep): inStep=0
            arcpy.CheckOutExtension("Spatial")
            if((flooddsconfig.debugLevel & 1)==1):
                sMsg = "inRasterRiv3D={} \ninRasterElev={} \ninStep={} \ninRWKS={} \ninFWKS={} \ninRiv={} \ninCat={} \ninMultiplier={} \ninDeltaH={}".format(inRasterRiv3D,inRasterElev, inStep, inRWKS, inFWKS,inRiv,inCat,inMultiplier,inDeltaH)
                apwrutils.Utils.ShowMsg(sMsg)
            
            if(pScratchWorkspace==None):  
                scratch_wks = flooddsconfig.pScratchWorkspace      #   arcpy.env.scratchWorkspace  
                scratchFolder = flooddsconfig.pScratchFolder         #  arcpy.env.scratchFolder
                arcpy.env.scratchWorkspace = scratch_wks
            else:
                scratch_wks = pScratchWorkspace
                arcpy.env.scratchWorkspace = scratch_wks
                scratchFolder = arcpy.env.scratchFolder 

            #..arcpy.AddMessage("arcpy.env.scratchFolder={} scratch_wks={}".format(scratchFolder, pScratchWorkspace))
            if((flooddsconfig.debugLevel & 2)==2): arcpy.AddMessage("arcpy.env.scratchFolder={} scratch_wks={}".format(scratchFolder, pScratchWorkspace))
            if((flooddsconfig.debugLevel & 1)==1):
                sMsg = "arcpy.env.scratchFolder={}, scratch_wks={}".format(scratchFolder, scratch_wks)
                arcpy.AddMessage(sMsg)
            #if(scratch_wks==None):
            #    scratch_wks = os.path.join(scratchFolder, "scratch.gdb") 
            #    if(arcpy.Exists(scratch_wks)==False):
            #        arcpy.CreateFileGDB_management(scratchFolder, "scratch.gdb") 
            #    arcpy.env.scratchWorkspace = scratch_wks

            rasterDescribe = arcpy.Describe(inRasterElev)
            arcpy.env.snapRaster = rasterDescribe.catalogPath #SnapRaster
            arcpy.env.overwriteOutput = True

            bExists = apwrutils.Utils.makeSureDirExists(inRWKS)
            #filGrdInt = os.path.join(scratchFolder, arcpy.CreateUniqueName('filGrdInt', scratchFolder)) 
            cellSize = arcpy.GetRasterProperties_management(inRasterElev, "CELLSIZEX") 
            sr = arcpy.Describe(inRasterElev).spatialReference
            inMultiplier = int(inMultiplier)    
            filGrdX100 = arcpy.sa.Times(inRasterElev, inMultiplier) 
            river3DX100 = arcpy.sa.Times(inRasterRiv3D, inMultiplier) 
            filGrdInt = arcpy.sa.Int(filGrdX100)
            river3DInt = arcpy.sa.Int(river3DX100)
        
            #Holdes final raster results (depth grid)
            sDepthRWKS = os.path.join(inRWKS,flooddsconfig.FDN_Depth)   #Depth folder
            bExists = apwrutils.Utils.makeSureDirExists(sDepthRWKS)     #Depth folder
            sWseRWKS = os.path.join(inRWKS, flooddsconfig.FDN_WSE)      #WSE folder
            bExists = apwrutils.Utils.makeSureDirExists(sWseRWKS)       #WSE folder
            sUwseRWKS = os.path.join(inRWKS, flooddsconfig.FDN_UWSE)    #UWSE folder
            bExists = apwrutils.Utils.makeSureDirExists(sUwseRWKS)      #UWSE folder
            sGDepth = os.path.join(inRWKS, flooddsconfig.FND_G_Depth)
            bExists = apwrutils.Utils.makeSureDirExists(sGDepth)
            sGPFZone = os.path.join(inRWKS, flooddsconfig.FND_G_PFZone)
            bExists = apwrutils.Utils.makeSureDirExists(sGPFZone)
            sp = " "*2
            if(inCat!=None):
                #..Create floodzone featureclass to hold fp polygons for each river
                fcZoneRiver = os.path.join(inFWKS, flooddsconfig.LN_FPZoneRiver)
                if((flooddsconfig.debugLevel & 1) == 1):  arcpy.AddMessage("fcZoneRiver: {}".format(fcZoneRiver))
                if(arcpy.Exists(fcZoneRiver)==False):
                    arcpy.CreateFeatureclass_management(inFWKS, flooddsconfig.LN_FPZoneRiver, "POLYGON", None, None, None, sr)

                fieldsRiver = {flooddsconfig.FN_StreamID:'LONG', flooddsconfig.FN_STEP:'TEXT', 
                      flooddsconfig.FN_GridCode:'LONG', flooddsconfig.FN_DateCreated :'TEXT', 
                      flooddsconfig.FN_FLDESC:'DOUBLE', apwrutils.FN_HYDROCODE:'TEXT'}

                try:
                    ii = apwrutils.Utils.addFields(fcZoneRiver, fieldsRiver)
                    if((flooddsconfig.debugLevel & 1)==1): arcpy.AddMessage("Processing inStep={}. (Fields added={}).".format(inStep,ii))
                   
                except arcpy.ExecuteError:
                    arcpy.AddError(str(arcpy.GetMessages(2)))
                lFieldsRiver = [apwrutils.FN_ShapeAt,flooddsconfig.FN_StreamID, flooddsconfig.FN_STEP, 
                    flooddsconfig.FN_GridCode, flooddsconfig.FN_DateCreated, flooddsconfig.FN_FLDESC, apwrutils.FN_HYDROCODE]
                
                if((flooddsconfig.debugLevel & 1)==1): 
                    sMsg = "Processing raster by each catchment contained in {}".format(inCat)     
                    arcpy.AddMessage(sMsg)
                #  makesure the temp Raster dir exist
                #inRWKSTemp = inRWKS
                #if(os.path.exists(arcpy.env.scratchFolder)): inRWKSTemp = arcpy.env.scratchFolder 
                sCatRWKS = os.path.join(inRWKS, "RCat")
                #sCatRWKS = os.path.join(inRWKSTemp, "RCat")
                bExists = apwrutils.Utils.makeSureDirExists(sCatRWKS)
                #    maskGrd = arcpy.sa.Polygon
                #apwrutils.Utils.ShowMsg("TobeImplemented....")
                oDesc = arcpy.Describe(inCat) 
                sOIDFld = oDesc.OIDFieldName
                lCatFlds = [apwrutils.FN_ShapeAt, sOIDFld, apwrutils.FN_HYDROID]
                rivID = 0
                sRasters = ""
                with arcpy.da.SearchCursor(inCat, lCatFlds) as rows:
                    for row in rows:
                        ddt = time.clock()
                        rivID = 0
                        catID = 0
                        try:        #try in row
                            iOID = row[lCatFlds.index(sOIDFld)]
                            catID = row[lCatFlds.index(apwrutils.FN_HYDROID)]
                            rivID = dCatID2RivID[catID]
                            oPoly = row[lCatFlds.index(apwrutils.FN_ShapeAt)]
                            oExt = oPoly.extent
                            #sWhere = "{}={}".format(sOIDFld, iOID) 
                            sWhere = "{}={}".format(apwrutils.FN_HYDROID, catID) 
                            #fl = arcpy.management.MakeFeatureLayer(inCat, "FL{}_{}".format(inStep,iOID), sWhere)
                            fl = "DH{}_{}".format(inStep, catID)
                            if(arcpy.Exists(fl)):
                                arcpy.Delete_management(fl) 
                            arcpy.MakeFeatureLayer_management(inCat, fl, sWhere)

                            #pMask = os.path.join(sCatRWKS, "d{}c{}".format(inStep,rivID))    #inStep, rivid, rivid is used instead of catid so that it is easier to id the cat/riv relation when debugging.
                            #pMask = os.path.join(sCatRWKS, "cat{}".format(iOID))
                            pMask = os.path.join(sCatRWKS, "cat{}".format(catID))
                            #arcpy.AddMessage("pMask={}  .exist={} {}".format(pMask, os.path.exists(pMask), arcpy.Exists(pMask)))
                            arcpy.env.extent = oExt
                            if(os.path.exists(pMask)==False):
                                if((flooddsconfig.debugLevel & 1)==1):  arcpy.AddMessage("PolygonToRaster_conversion -> {},  {} where {}".format(pMask, inCat, sWhere))
                                arcpy.PolygonToRaster_conversion(fl, sOIDFld, pMask, None, None, cellSize)
                                #flOutFile = arcpy.management.MakeRasterLayer(pMask, "flCat{}".format(rivID))
                            else:
                                #flOutFile = arcpy.management.MakeRasterLayer(pMask, "flCat{}".format(rivID))
                                 if((flooddsconfig.debugLevel & 8) == 8):  arcpy.AddMessage("{} already existed for catchment {}".format(pMask, sWhere))
                            
                            #arcpy.AddMessage("after: pMask={}  .exist={}  {}".format(pMask, os.path.exists(pMask), arcpy.Exists(pMask)))  
                            riv3D = arcpy.sa.ExtractByMask(river3DInt, fl)      #inRasterRiv3DRaster
                            ssOutFile = os.path.join(sCatRWKS, "d{}r{}".format(inStep,rivID))   #timestep, rivid
                            if((flooddsconfig.debugLevel & 2)==2): arcpy.AddMessage("  saving extracted riv3D to {} dt={}".format(ssOutFile, apwrutils.Utils.GetDSMsg(ddt)))
                            riv3D.save(ssOutFile)
                            riv3DInt = arcpy.sa.Int(riv3D)
                            riv3DIsNull = arcpy.sa.IsNull(riv3DInt)
                            riv3DSrc = arcpy.sa.Con(riv3DIsNull, riv3DInt, pMask, '"value" = 0')
                            rivNibble = arcpy.sa.Nibble(riv3DSrc, riv3DInt)
                            #..save the rivNibbleResults to the UWSE location
                            uwseNibble = arcpy.sa.Divide(rivNibble, float(inMultiplier))
                            #uwseNibble = arcpy.sa.ExtractByMask(uwseNibble, pMask) 
                            sUwseName = "{}_{}_{}_{}{}".format(flooddsconfig.HD_UWSE,inStep,flooddsconfig.HD_River,rivID, flooddsconfig.Ext_R)
                            uwseNibble.save(os.path.join(sUwseRWKS, sUwseName))
                            #..save the rivNibble to wse location.
                            wseNibble = arcpy.sa.Con(arcpy.sa.GreaterThan(uwseNibble, inRasterElev), uwseNibble, "#")
                            sWseName = "{}_{}_{}_{}{}".format(flooddsconfig.HD_WSE, int(inStep), flooddsconfig.HD_River, rivID, flooddsconfig.Ext_R)
                            wseNibble.save(os.path.join(sWseRWKS, sWseName))
                            #..Get the river depth and save the depth grid  '..ye, @1/28/2016 12:12:40 PM on ZYE1
                            sName = "{}_{}_{}_{}{}".format(flooddsconfig.HD_Depth,int(inStep),flooddsconfig.HD_River, rivID, flooddsconfig.Ext_R) 
                            fpDepth = arcpy.sa.Minus(rivNibble, filGrdInt)
                            fpDepth = arcpy.sa.Con(fpDepth, fpDepth, "#", '"value" >= 0')
                            fpDepthF = arcpy.sa.Divide(fpDepth, float(inMultiplier)) 
                            sDepthFile = os.path.join(sDepthRWKS,sName)
                            fpDepthF.save(sDepthFile)    # Depth grid.

                            #..Save the fpDepth
                            fpZone4PolyRiver = arcpy.sa.Con(fpDepth, 1, 0, '"value" >= 0'  ) 
                            fpZoneTempRiver = os.path.join(scratch_wks, "rz{}_{}".format(int(inStep),rivID)) 
                            #arcpy.RasterToPolygon_conversion(regiongroupg, tmpPolyFCInit, "NO_SIMPLIFY", "Value")  
                            try:
                                arcpy.RasterToPolygon_conversion(fpZone4PolyRiver, fpZoneTempRiver, "NO_SIMPLIFY" )
                                if(bConnectedOnly==True):
                                    sRivWhere = "{}={}".format(apwrutils.FN_HYDROID, rivID)
                                    flZoneTempRiver = "flrz{}_{}".format(int(inStep),rivID)
                                    flRiv = "flrv{}".format(rivID) 
                                    arcpy.MakeFeatureLayer_management(inRiv, flRiv, sRivWhere) 
                                    arcpy.MakeFeatureLayer_management(fpZoneTempRiver, flZoneTempRiver) 
                                    arcpy.SelectLayerByLocation_management(flZoneTempRiver, 'INTERSECT', flRiv)
                                    fpZoneTempDslv = os.path.join(scratch_wks, "fpr{}_{}".format(inStep, rivID))
                                    arcpy.Dissolve_management(flZoneTempRiver, fpZoneTempDslv, [flooddsconfig.FN_GridCode])
                                    sWhereGridCode = "{}>0".format(flooddsconfig.FN_GridCode)
                                    sDslvName = sName.split(".")[0]
                                    flZoneDslv = "{}DSLV".format(sDslvName) 
                                    arcpy.MakeFeatureLayer_management(fpZoneTempDslv, flZoneDslv, sWhereGridCode)
                                    try:
                                        fpDepthMasked = arcpy.sa.ExtractByMask(fpDepthF, flZoneDslv)  #   pMaskFC)   #flZoneDslv)
                                        pRaster = arcpy.sa.Plus(fpDepthMasked, 0.0)    
                                        pRaster.save(sDepthFile) 
                                    except:
                                        arcpy.AddMessage(trace())
                                        pass
                                        #arcpy.CopyRaster_management(ssOutFile, ssOutFileNew) 
                               
                                else:
                                    fpZoneTempDslv = os.path.join(scratch_wks, "fpr{}_{}".format(int(inStep), rivID))
                                    arcpy.Dissolve_management(fpZoneTempRiver, fpZoneTempDslv, [flooddsconfig.FN_GridCode])
                                if((flooddsconfig.debugLevel & 2)==2): arcpy.AddMessage("sName{}, fpZoneTempDslv={}".format(sName, fpZoneTempDslv) )
                                if(sRasters ==""):
                                    sRasters = sName
                                else:
                                    sRasters = sRasters + ";" + sName
                                #arcpy.Dissolve_management(fpZoneTempRiver, fpZoneTempDslv, [flooddsconfig.FN_GridCode])
                                sDateCreated = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") 
                                with arcpy.da.InsertCursor(fcZoneRiver, lFieldsRiver) as inRows:
                                    #with arcpy.da.SearchCursor(fpZoneTempDslv, [apwrutils.FN_ShapeAt,flooddsconfig.FN_GridCode]) as prows:
                                    with arcpy.da.SearchCursor(flZoneDslv, [apwrutils.FN_ShapeAt,flooddsconfig.FN_GridCode]) as prows:
                                        for prow in prows:
                                            try:
                                                #fieldsRiver = {Shape@, flooddsconfig.FN_StreamID:'LONG', flooddsconfig.FN_STEP:'TEXT', 
                                                #      flooddsconfig.FN_GridCode:'LONG', flooddsconfig.FN_DateCreated :'TEXT', 
                                                #      flooddsconfig.FN_FLDESC:'TEXT', apwrutils.FN_HYDROCODE:'TEXT'}
                                                inRow = []
                                                oShp = prow[0]
                                                inRow.append(oShp)
                                                inRow.append(rivID)    #StreamID
                                                inRow.append(inStep)   #FPStep
                                                inRow.append(prow[1])  #GRidCode
                                                inRow.append(sDateCreated)  #DateCreated
                                                inRow.append(inDeltaH)   #FPDESC
                                                inRow.append(rivID)    #HYDROCODE
                                                inRows.insertRow(inRow)       
                                            except:
                                                arcpy.AddMessage(trace()) 
                                
                            except:
                                pass

                        except arcpy.ExecuteError:   #try in row for Cat
                            sMsg = str(arcpy.GetMessages(2))
                            arcpy.AddError("{} {}".format(sMsg, trace()))
                        except:
                            arcpy.AddWarning(arcpy.GetMessages(2))
                            sMsg = trace()
                            arcpy.AddMessage(sMsg)
                        finally:   ##try in row - per catchment
                            if(fl!=""):
                                arcpy.Delete_management(fl)

                            if(flZoneTempRiver!=""):
                                arcpy.Delete_management(flZoneTempRiver)
                            if(flZoneDslv!=""):
                                arcpy.Delete_management(flZoneDslv)

                            if(flRiv!=""):
                                arcpy.Delete_management(flRiv)
                            #sMsg = "Done, processing catchment {} (rivid={} catid={} dt={})".format(sWhere, rivID, catID, apwrutils.Utils.GetDSMsg(ddt))
                            sMsg = "{} (iStep,dh)=({},{}) Done, processing catchment {} (rivid={} catid={} dt={})".format(sp, int(inStep), inDeltaH, sWhere, rivID, catID, apwrutils.Utils.GetDSMsg(ddt)) 
 
                            arcpy.AddMessage(sMsg)
                if (nProcessors<=1) :            
                    try:
                        arcpy.env.extent = inRasterElev                      
                        arcpy.env.workspace = sDepthRWKS
                        arcpy.env.mask = inRasterElev
                    
                        # sDepthName = "{}{}{}".format(flooddsconfig.HD_Depth,inStep,flooddsconfig.Ext_R)  # did not work when .tif is used, it would produce a mosaic ds with Nodata being filled with -128 or 0.
                        sDepthName = "{}_{}".format(flooddsconfig.HD_Depth,int(inStep))
                        #arcpy.MosaicToNewRaster_management(sRasters, inRWKS, sDepthName, sr, "8_BIT_UNSIGNED", cellSize, "1", "LAST","FIRST")
                        #flDepthName = os.path.join(inRWKS, sDepthName)
                        arcpy.MosaicToNewRaster_management(sRasters, sGDepth, sDepthName, sr, "8_BIT_UNSIGNED", cellSize, "1", "LAST","FIRST")
                        flDepthName = os.path.join(sGDepth, sDepthName)

                        if(flooddsconfig.Ext_R!=""):
                             fpDepthF = arcpy.sa.SetNull(flDepthName, flDepthName, '"value" <= 0')
                             fpDepthF.save(flDepthName)
                        else:
                             fpDepthF = flDepthName
                
                        if((flooddsconfig.debugLevel & 1)==1): 
                            sMsg = "mosaic raster depth grid: fpDepthF={}".format(fpDepthF)
                            arcpy.AddMessage(sMsg)

                    except arcpy.ExecuteError:   #try in row for Cat
                        sMsg = str(arcpy.GetMessages(2))
                        arcpy.AddWarning(sMsg)
                    except:
                        arcpy.AddWarning(arcpy.GetMessages(2))
                        sMsg = trace()
                        arcpy.AddMessage(sMsg)

                
                #fpDepth = arcpy.sa.Minus(flDepthName, filGrdInt)
                #if((flooddsconfig.debugLevel & 1)==1): 
                #    sMsg = "Construct fpDepth  {}".format(flDepthName) 
                #    arcpy.AddMessage(sMsg)

            else:
                 isNullGrd = arcpy.sa.IsNull(river3DInt)
                 nibSrc = arcpy.sa.Con(isNullGrd, river3DInt, "-99999", "Value = 0")
                 nibLevel = arcpy.sa.Nibble(nibSrc, river3DInt)  #, "ALL_VALUES") 
                 fpDepth = arcpy.sa.Minus(nibLevel, filGrdInt)
                 fpDepthF = arcpy.sa.Con(fpDepth, fpDepth, "#", '"value" >= 0')     #fpDepth>0, return fpDepth, else null.
            
            fpZoneName = flooddsconfig.LN_FPZone
            fcZoneRslt = os.path.join(inFWKS, fpZoneName)
            fpDepthRName = "{}_{}".format(flooddsconfig.LN_FPZone,int(inStep)) 
            fpRaster = os.path.join(sGPFZone, fpDepthRName)
            if(nProcessors<=1):
                if((flooddsconfig.debugLevel & 1)==1): arcpy.AddMessage(fpDepthF)
                fpZone4Poly = arcpy.sa.Con(fpDepthF, 1, 0, '"value" >= 0'  )            
                fpZoneTemp = os.path.join(scratch_wks, "r" + str(int(inStep))) 
                arcpy.RasterToPolygon_conversion(fpZone4Poly, fpZoneTemp, "NO_SIMPLIFY")
        
                if(inRiv!=None):
                    #try to remove the floodplain polygons not connected with the inRiv
                    flZoneOnRiv = ""
                    try:
                        flZoneOnRiv = "flZoneOnRiv"    #Zone that overlay with river lines.
                        arcpy.MakeFeatureLayer_management(fpZoneTemp, flZoneOnRiv)    
                        arcpy.SelectLayerByLocation_management(flZoneOnRiv, "INTERSECT", inRiv)
                        #Connected Raster Area:
                        sRasterConn = os.path.join(scratchFolder, "C" + str(int(inStep)))
                        arcpy.PolygonToRaster_conversion(flZoneOnRiv, flooddsconfig.FN_GridCode, sRasterConn,"","",cellSize)
                        fpZone4Poly = arcpy.sa.Con(sRasterConn, fpZone4Poly)
                        fpZone4Poly = arcpy.sa.SetNull(fpZone4Poly, fpZone4Poly, '"value" = 0')
                        arcpy.RasterToPolygon_conversion(fpZone4Poly, fpZoneTemp, "NO_SIMPLIFY")
                    except:
                        arcpy.AddMessage("try to remove floodplain not intersecting with a river. {}".format(trace()))
                    finally:
                        if(flZoneOnRiv!=""):
                            arcpy.Delete_management(flZoneOnRiv)                     
                fpZoneTempDslv = os.path.join(scratch_wks, "FPD" + str(int(inStep)))
                arcpy.Dissolve_management(fpZoneTemp, fpZoneTempDslv, [flooddsconfig.FN_GridCode])
            
                if(arcpy.Exists(fcZoneRslt)==False):
                    arcpy.CreateFeatureclass_management(inFWKS, fpZoneName, "POLYGON", fpZoneTempDslv, None, None, sr)   
        
                oDesc = arcpy.Describe(fcZoneRslt)
                fields = {flooddsconfig.FN_FLDESC:'DOUBLE', flooddsconfig.FN_STEP:'LONG', 
                          flooddsconfig.FN_GridCode:'LONG', flooddsconfig.FN_DateCreated :'TEXT'}

                apwrutils.Utils.addFields(fcZoneRslt, fields)
                
                sDateCreated = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") 
        
                shpFldName = apwrutils.Utils.GetShapeFieldName(fcZoneRslt)            
                lFieldsZR = [apwrutils.FN_ShapeAt,flooddsconfig.FN_STEP,flooddsconfig.FN_FLDESC,flooddsconfig.FN_GridCode, flooddsconfig.FN_DateCreated]
                lFieldsDslv = [apwrutils.FN_ShapeAt, flooddsconfig.FN_GridCode] 
                with arcpy.da.InsertCursor(fcZoneRslt, lFieldsZR) as inRows:
                    with arcpy.da.SearchCursor(fpZoneTempDslv,lFieldsDslv) as rows:
                        for row in rows:
                            try:
                                inRow = []
                                oShp = row[0]
                                inRow.append(oShp)
                                inRow.append(int(inStep))
                                inRow.append(inDeltaH)
                                inRow.append(row[lFieldsDslv.index(flooddsconfig.FN_GridCode)])
                                inRow.append(sDateCreated)
                                inRows.insertRow(inRow)       
                            except:
                                arcpy.AddMessage(trace())  
        
                # ExtractByMask - extract the fpZoneF (floodplain (depth) zone with in float)   
                fpDepthFExt = arcpy.sa.ExtractByMask(fpDepthF, fcZoneRslt)
                #sDepthRName = "{}{}{}{}".format(flooddsconfig.HD_Depth,inStep,flooddsconfig.HD_River,flooddsconfig.Ext_R)
                fpDepthFExt.save(fpRaster)
                #..Construct the Raster/FeatureLayer to send back to the calling program
                #flZone = arcpy.management.MakeFeatureLayer(fcZoneRslt, fpZoneName)
                #rlZone = arcpy.management.MakeRasterLayer(fpRaster, fpDepthRName) 

        except:
            sOK = trace()
            arcpy.AddMessage(sOK)
            
        finally:
            if((flooddsconfig.debugLevel & 1)==1):  arcpy.AddMessage("FloodplainFrom3DRiverRaster Cleaning up...")
            arcpy.ResetEnvironments()

        if(sOK == apwrutils.C_OK):
            tReturn = (sOK, fcZoneRslt, fpRaster)
        else:
            tReturn = (sOK)

        return tReturn
Пример #30
0
def clip():

    arcpy.env.workspace = nhd
    arcpy.RefreshCatalog(nhd)
    arcpy.ResetEnvironments()

    # Burnt and walled mosaiced elevation
    raster = burnt_ned

    # Create a feature dataset in NHD file geodatabase named "HUC8_Albers" in Albers projection
    workspace = arcpy.ListWorkspaces("*", "FileGDB")
    sr = arcpy.SpatialReference()
    sr.factoryCode = 102039
    sr.create()
    arcpy.env.outputCoordinateSystem = sr
    arcpy.env.compression = "None"
    arcpy.env.pyramid = "NONE"
    arcpy.CreateFeatureDataset_management(arcpy.env.workspace, "HUC8_Albers",
                                          sr)

    # HUC8 polygon selected automaticly from input workspace
    inhuc8 = "WBD_HU8"
    inhuc8albers = "WBD_HU8_Albers"

    # Project WBD_HU8 to Albers
    srin = arcpy.SpatialReference()
    srin.factoryCode = 4269
    srin.create()

    arcpy.Project_management(inhuc8, "HUC8_Albers\WBD_HU8_Albers", sr, '',
                             srin)

    # Output goes to feature dataset HUC8_Albers
    outfd = "HUC8_Albers"

    # Splits HUC8 into individual feature classes for each polygon
    arcpy.AddField_management("WBD_HU8_Albers", "Label", "TEXT")
    arcpy.RefreshCatalog(nhd)
    calcexp = '"HUC" + !HUC_8!'
    arcpy.CalculateField_management("WBD_HU8_Albers", "Label", calcexp,
                                    "PYTHON")
    if not os.path.exists(os.path.join(outfolder, "cliptemp")):
        os.mkdir(os.path.join(outfolder, "cliptemp"))
    cliptemp = os.path.join(outfolder, "cliptemp")
    arcpy.FeatureClassToShapefile_conversion("WBD_HU8_Albers", cliptemp)
    wbdshp = os.path.join(cliptemp, "WBD_HU8_Albers.shp")
    arcpy.Split_analysis(wbdshp, wbdshp, "Label", outfd, '')
    shutil.rmtree(cliptemp)

    # Buffer HUC8 feature classes by 5000m
    fcs = arcpy.ListFeatureClasses("", "Polygon", "HUC8_Albers")
    for fc in fcs:
        arcpy.Buffer_analysis(fc, outfd + "\\" + fc + "_buffer", "5000 meters")

    arcpy.RefreshCatalog(nhd)
    arcpy.ResetEnvironments()

    # Clips rasters
    fcs = arcpy.ListFeatureClasses("*_buffer", "Polygon", "HUC8_Albers")
    for fc in fcs:
        arcpy.env.compression = "None"
        arcpy.env.pyramid = "NONE"
        fcshort = fc[3:11]
        arcpy.Clip_management(
            raster, '', outfolder + "\\" + "huc8clips" + nhdsubregion + "\\" +
            "NED" + fcshort + ".tif", fc, "0", "ClippingGeometry")

    return