Exemplo n.º 1
0
def workLines(lineNo):
    # Temporary files
    outWorkspace = flmc.GetWorkspace(workspaceName)

    # read params from text file
    f = open(outWorkspace + "\\params.txt")
    Forest_Line_Feature_Class = f.readline().strip()
    Cost_Raster = f.readline().strip()
    Line_Processing_Radius = f.readline().strip()
    f.close()

    fileSeg = outWorkspace + "\\FLM_CL_Segment_" + str(lineNo) + ".shp"
    fileOrigin = outWorkspace + "\\FLM_CL_Origin_" + str(lineNo) + ".shp"
    fileDestination = outWorkspace + "\\FLM_CL_Destination_" + str(
        lineNo) + ".shp"
    fileBuffer = outWorkspace + "\\FLM_CL_Buffer_" + str(lineNo) + ".shp"
    fileClip = outWorkspace + "\\FLM_CL_Clip_" + str(lineNo) + ".tif"
    fileCostDist = outWorkspace + "\\FLM_CL_CostDist_" + str(lineNo) + ".tif"
    fileCostBack = outWorkspace + "\\FLM_CL_CostBack_" + str(lineNo) + ".tif"
    fileCenterLine = outWorkspace + "\\FLM_CL_CenterLine_" + str(
        lineNo) + ".shp"

    # Find origin and destination coordinates
    x1 = segment_list[0].X
    y1 = segment_list[0].Y
    x2 = segment_list[-1].X
    y2 = segment_list[-1].Y

    # Create origin feature class
    try:
        arcpy.CreateFeatureclass_management(outWorkspace, PathFile(fileOrigin),
                                            "POINT", Forest_Line_Feature_Class,
                                            "DISABLED", "DISABLED",
                                            Forest_Line_Feature_Class)
        cursor = arcpy.da.InsertCursor(fileOrigin, ["SHAPE@XY"])
        xy = (float(x1), float(y1))
        cursor.insertRow([xy])
        del cursor
    except Exception as e:
        print("Creating origin feature class failed: at X, Y" + str(xy) + ".")
        print(e)

    # Create destination feature class
    try:
        arcpy.CreateFeatureclass_management(outWorkspace,
                                            PathFile(fileDestination), "POINT",
                                            Forest_Line_Feature_Class,
                                            "DISABLED", "DISABLED",
                                            Forest_Line_Feature_Class)
        cursor = arcpy.da.InsertCursor(fileDestination, ["SHAPE@XY"])
        xy = (float(x2), float(y2))
        cursor.insertRow([xy])
        del cursor
    except Exception as e:
        print("Creating destination feature class failed: at X, Y" + str(xy) +
              ".")
        print(e)

    try:
        # Buffer around line
        arcpy.Buffer_analysis(fileSeg, fileBuffer, Line_Processing_Radius,
                              "FULL", "ROUND", "NONE", "", "PLANAR")

        # Clip cost raster using buffer
        DescBuffer = arcpy.Describe(fileBuffer)
        SearchBox = str(DescBuffer.extent.XMin) + " " + str(
            DescBuffer.extent.YMin) + " " + str(
                DescBuffer.extent.XMax) + " " + str(DescBuffer.extent.YMax)
        arcpy.Clip_management(Cost_Raster, SearchBox, fileClip, fileBuffer, "",
                              "ClippingGeometry", "NO_MAINTAIN_EXTENT")

        # Least cost path
        arcpy.gp.CostDistance_sa(fileOrigin, fileClip, fileCostDist, "",
                                 fileCostBack, "", "", "", "", "TO_SOURCE")
        arcpy.gp.CostPathAsPolyline_sa(fileDestination, fileCostDist,
                                       fileCostBack, fileCenterLine,
                                       "BEST_SINGLE", "")

    except Exception as e:
        print("Problem with line starting at X " + str(x1) + ", Y " + str(y1) +
              "; and ending at X " + str(x1) + ", Y " + str(y1) + ".")
        print(e)

    # Clean temporary files
    arcpy.Delete_management(fileSeg)
    arcpy.Delete_management(fileOrigin)
    arcpy.Delete_management(fileDestination)
    arcpy.Delete_management(fileBuffer)
    arcpy.Delete_management(fileClip)
    arcpy.Delete_management(fileCostDist)
    arcpy.Delete_management(fileCostBack)
Exemplo n.º 2
0
def clipInputs(outputFolder, studyAreaMaskBuff, inputDEM, inputLC, inputSoil, inputStreamNetwork, outputDEM, outputLC, outputSoil, outputStream):

    try:
        log.info("Clipping input data")

        # Set temporary variables
        prefix = os.path.join(arcpy.env.scratchGDB, "clip_")

        DEMCopy = prefix + "DEMCopy"
        lcResample = prefix + "lcResample"
        soilResample = prefix + "soilResample"

        # Clip DEM
        # Check DEM not compressed. If it is, uncompress before clipping.
        compression = arcpy.Describe(inputDEM).compressionType
        if compression.lower != 'none':
            arcpy.env.compression = "None"
            arcpy.CopyRaster_management(inputDEM, DEMCopy)
            arcpy.Clip_management(DEMCopy, "#", outputDEM, studyAreaMaskBuff, clipping_geometry="ClippingGeometry")

            # Delete copy of DEM
            arcpy.Delete_management(DEMCopy)

        else:
            arcpy.Clip_management(inputDEM, "#", outputDEM, studyAreaMaskBuff, clipping_geometry="ClippingGeometry")

        DEMSpatRef = arcpy.Describe(outputDEM).SpatialReference

        # Set environment variables
        arcpy.env.snapRaster = outputDEM
        arcpy.env.extent = outputDEM
        arcpy.env.cellSize = outputDEM

        # Resample and clip land cover
        lcFormat = arcpy.Describe(inputLC).dataType

        if lcFormat in ['RasterDataset', 'RasterLayer']:
            lcResampleInt = arcpy.sa.ApplyEnvironment(inputLC)
            lcResampleInt.save(lcResample)
            del lcResampleInt

            arcpy.Clip_management(lcResample, "#", outputLC, studyAreaMaskBuff, clipping_geometry="ClippingGeometry")

            # Delete resampled LC
            arcpy.Delete_management(lcResample)

        elif lcFormat in ['ShapeFile', 'FeatureClass']:
            arcpy.Clip_analysis(inputLC, studyAreaMaskBuff, outputLC, configuration.clippingTolerance)

        # Resample and clip soil
        soilFormat = arcpy.Describe(inputSoil).dataType

        if soilFormat in ['RasterDataset', 'RasterLayer']:
            soilResampleInt = arcpy.sa.ApplyEnvironment(inputSoil)
            soilResampleInt.save(soilResample)
            del soilResampleInt

            arcpy.Clip_management(soilResample, "#", outputSoil, studyAreaMaskBuff, clipping_geometry="ClippingGeometry")

            # Delete resampled soil
            arcpy.Delete_management(soilResample)

        elif soilFormat in ['ShapeFile', 'FeatureClass']:
            arcpy.Clip_analysis(inputSoil, studyAreaMaskBuff, outputSoil, configuration.clippingTolerance)

        # Clip steam network
        if inputStreamNetwork == None:
            outputStream = None
        else:
            arcpy.Clip_analysis(inputStreamNetwork, studyAreaMaskBuff, outputStream, configuration.clippingTolerance)


        log.info("Input data clipped successfully")

    except Exception:
        log.error("Input data clipping did not complete successfully")
        raise
    else:
        out_csv = outpath_final + os.sep + geoid + "_" + table_suffix + '.csv'
        if not os.path.exists(out_csv):
            start_loop = datetime.datetime.now()
            print '\nWorking on county {0}; {1} of {2}'.format(
                geoid, counter, row_count)

            arcpy.env.snapRaster = snap_raster
            whereclause = "STUSPS = '%s'" % geoid
            arcpy.Delete_management("lyr")
            arcpy.Delete_management("crops")
            arcpy.Delete_management("test")
            arcpy.MakeFeatureLayer_management("fc", "lyr", whereclause)
            try:
                arcpy.Clip_management("in_raster", "#",
                                      r"in_memory/raster_" + str(geoid), "lyr",
                                      "NoData", "ClippingGeometry",
                                      "MAINTAIN_EXTENT")
                # if we want to save the clipped inraster file uncomment this  line
                # arcpy.CopyRaster_management(r"in_memory/raster_" + str(geoid),out_layers +os.sep+ "raster_"+str(geoid))
                arcpy.BuildRasterAttributeTable_management(
                    r"in_memory\\raster_" + str(geoid), "Overwrite")
                arcpy.MakeRasterLayer_management(in_crops, "crops")
                arcpy.ApplySymbologyFromLayer_management(
                    "crops", symbologyLayer)
                arcpy.gp.ZonalHistogram_sa("in_memory\\raster_" + str(geoid),
                                           "VALUE", "crops",
                                           r"in_memory/outtable_" + str(geoid))

                list_fields = [
                    f.name for f in arcpy.ListFields(r"in_memory/outtable_" +
                                                     str(geoid))
Exemplo n.º 4
0
# [OPTIONAL code block: we should use the Clip_analysis tool instead]Use selection to clip Landsat imagery [Could possible use this code acquired from here and skip the above step to clip rasters to the Wilmington shapefile]. Once raster and shapefile are added to the file: 
# Extent tool Syntax: Extent  ({XMin}, {YMin}, {XMax}, {YMax}, {ZMin}, {ZMax}, {MMin}, {MMax})
Extents = arcpy.sa.Raster(MyRaster).extent`
# [OPTIONAL]  Use these lines to make a polygon out of these extents:
pnt_array = arcpy.Array()
pnt_array.add(Extents.lowerLeft)
pnt_array.add(Extents.lowerRight)
pnt_array.add(Extents.upperRight)
pnt_array.add(Extents.upperLeft)
poly = arcpy.Polygon(pnt_array)

# Clip my shapefile using this code. Outputs are raster which fit Wilmington’s boundary -- Also repeat for FEMA map layer. Need the flood map to be limited to area of interest.  
arcpy.Clip_analysis(shp, poly, Shp_clip)

arcpy.Clip_management (in_raster = name , 
 rectangle = cordList, 
 out_raster = C:/Data/*name*.tif, 
  In_template_dataset = cordList_polygon ,
  clipping_geometry) 

#PART 3: Iterate & Classify
#Begin iteration for tiff files
raster_bands = [“dummy”, “2018-09-18, Sentinel-2A L1C, B01.tiff”, “2018-09-18, Sentinel-2A L1C, B02.tiff”, “2018-09-18, Sentinel-2A L1C, B03.tiff”, “2018-09-18, Sentinel-2A L1C, B04.tiff”, “2018-09-18, Sentinel-2A L1C, B05.tiff”, “2018-09-18, Sentinel-2A L1C, B06.tiff”, “2018-09-18, Sentinel-2A L1C, B07.tiff”, “2018-09-18, Sentinel-2A L1C, B08.tiff”, “2018-09-18, Sentinel-2A L1C, B09.tiff”, “2018-09-18, Sentinel-2A L1C, B10.tiff”, “2018-09-18, Sentinel-2A L1C, B11.tiff”, “2018-09-18, Sentinel-2A L1C, B12.tiff”]

#Supervised classification
#Calculate new raster values using NWDI (Normalized Difference Water Index). Output is a raster file with NWDI values for each cell. 
#     (McFeeters, 1996) This formula is for detecting water bodies.
# Stack Exchange link- https://gis.stackexchange.com/questions/150067/access-individual-bands-and-use-them-in-map-algebra
#ArcHelp link: http://desktop.arcgis.com/en/arcmap/10.3/tools/3d-analyst-toolbox/an-overview-of-the-raster-math-toolset.htm

arcpy.rastermath([inrastergreen, inrasterNIR], ‘((a - b)/(a + b))’, outrasNDWI_#])
Exemplo n.º 5
0
def bianLi(rootDir, wildcard, recursion):
    exts = wildcard.split(" ")
    dirs = os.listdir(rootDir)
    count = len(dirs)
    #print count
    for dir in dirs:

        #get fullname
        fullname = rootDir + '/' + dir
        #print "rootDir:",rootDir

        #if the fullname is dir not a file,then re bianLi
        if (os.path.isdir(fullname) & recursion):
            bianLi(fullname, wildcard, recursion)

        # if the fullname is a '.tif' file,then do process
        else:

            #get file path
            rootDirPath = rootDir.split('/')

            #get the count of rootDirPath
            nPath_Count = len(rootDirPath)

            #get the last path of rootDirPath
            fileLastPath = rootDirPath[nPath_Count - 1]
            #print('fileLastPath:----'+fileLastPath)

            InputImage = fileLastPath + ".tif"

            #Creat the output file name without path:OutPutImageAfterClip
            OutPutImageAfterClip = fileLastPath + "_clip.tif"
            #print('OutPutImageAfterClip:----'+OutPutImageAfterClip)

            fullOutPutImageAfterClip = rootDir + "/" + OutPutImageAfterClip
            #print('fullOutPutImageAfterClip=='+fullOutPutImageAfterClip)
            ExtentionName = os.path.splitext(dir)[1]
            #print('ExtentionName:----'+ExtentionName)
            if (ExtentionName == exts[0]):
                #if (InputImage == dir):
                #if the folder's files are not layerstacked
                if not (os.path.isfile(fullOutPutImageAfterClip)):
                    print " "
                    print "---------------------------------------------"

                    print('InputImage:----' + InputImage)
                    #print('dir:'+dir)
                    #creat env.workspace name
                    EWSName = rootDir
                    print('env.workspace:----' + EWSName)
                    env.workspace = EWSName

                    try:
                        #do Clip_management use arcpy
                        arcpy.Clip_management(
                            InputImage, "#", OutPutImageAfterClip,
                            "D:/share/croppattern/Data_20170712/RasterRangeNoZY3.shp",
                            "0", "ClippingGeometry")
                        print " "
                    except:
                        print "Clip example failed."
                        print arcpy.GetMessages()
                    print(OutPutImageAfterClip + '-----is ok')
Exemplo n.º 6
0
                arcpy.MosaicToNewRaster_management(
                    input_rasters=mosaic,
                    output_location=pathdab,
                    raster_dataset_name_with_extension="mosaic",
                    coordinate_system_for_the_raster=spatialReference_new,
                    pixel_type="16_BIT_SIGNED",
                    cellsize="7.4031617",
                    number_of_bands="1",
                    mosaic_method="LAST",
                    mosaic_colormap_mode="FIRST")

                # The following inputs are layers or table views: "dtm", "square_test"
                arcpy.Clip_management(
                    in_raster="mosaic",
                    rectangle=ExtStr,
                    out_raster=pathdab + "dtm_clip",
                    in_template_dataset=pathdab + "miniarea_square",
                    nodata_value="-3.402823e+038",
                    clipping_geometry="NONE",
                    maintain_clipping_extent="NO_MAINTAIN_EXTENT")

                # Get input Raster properties
                inRas = arcpy.Raster('dtm_clip')

                # or I could convert it to ascii
                arcpy.RasterToASCII_conversion(
                    inRas, outASCII + crater_id[ix] + "_visible.asc")

                ix = ix + 1

                arcpy.Delete_management("dtm_clip")
                arcpy.Delete_management("miniarea_square")
def TemperatureSeparation(dir_LAI, dir_RGBNIR, dir_Tr, dir_DSM, 
                          NoDataValue, Veg_threshold, Soil_threshold,
                          band_R, band_NIR, 
                          cellsize_resample, 
                          Azimuth, Altitude, 
                          MiddleProducts="No"):
    '''
    This function is trying to consider the effect of shadow pixel. But the algorithm used in this function cannot identify the shadow pixel since
    the upscaled DSM data (from 0.15 meter pixel to 0.6 meter pixel) cannot tell any shadow pixel. In other words, shadow information is smoothed 
    due to the DSM data upscaling.
    
    Parameters used in this function:
    dir_LAI: the file path of the LAI image, resolution is 3.6 meter by 3.6 meter.
    dir_RGBNIR: the file path of the optical image containing R, G, B, and NIR bands, and the resolution is 0.15 meter b 0.15 meter.
    dir_Tr: the file path of the temperature image in unith of degree C, and the resolution is 0.6 meter by 0.6 meter.
    dir_DSM: the file path of the DSM image in meter and the resolution is 0.15 meter by 0.15 meter.
    NoDataValue: assigne a value to represent the NaN value.
    Veg_threshold: any NDVI pixel value above this threshold represents vegetation pixels.
    Soil_threshold: any NDVI pixel value below this threshold represents soil pixels.
    band_R: the number of layer in the optical image (multiple bands) representing the Red band.
    band_NIR: the number of layer in the optical image (multiple bands) representing the Near-infrared band.
    cellsize_resample: 0.6 meter by 0.6 meter resolution in order to calculate the vine shadow and temperature separation.
    Azimuth: a parameter used for vine shadow calculation.
    Altitude: a parameter used for vine shadow calculation.
    MiddleProducts: default is "No", which means to delete the middle products. Other parameters, like "Yes" will save the middle products.
    '''
    # import libraries
    import arcpy
    import gdal
    import os
    import numpy as np
    import pandas as pd
    from scipy.stats import linregress
    import matplotlib.pyplot as plt
    
    # Optical image processing
    # resample the optical image
    [res_x,res_y] = TellResolution(dir_RGBNIR)
    name_resample = "resample_RGBNIR.tif"
    arcpy.Resample_management(in_raster=dir_RGBNIR, 
                              out_raster=dir_output+"\\"+name_resample,
                              cell_size=str(cellsize_resample)+" "+str(cellsize_resample), 
                              resampling_type="BILINEAR")
    # clip the optical and thermal image the same as the LAI
    extent = TellExtent(dir_LAI)
    name_clip_opt = "clip_RGBNIR.tif"
    arcpy.Clip_management(in_raster=dir_output+"\\"+name_resample, 
                          rectangle=extent, 
                          out_raster=dir_output+"\\"+name_clip_opt, 
                          in_template_dataset=dir_LAI, 
                          nodata_value=NoDataValue, 
                          clipping_geometry="NONE", 
                          maintain_clipping_extent="MAINTAIN_EXTENT")

    # Temperature data clip
    # Resolution of the temperature is 0.6 meter by 0.6 meter
    # The unit is in degree C
    name_clip_tr = "clip_tr.tif"
    arcpy.Clip_management(in_raster=dir_Tr, 
                          rectangle=extent, 
                          out_raster=dir_output+"\\"+name_clip_tr, 
                          in_template_dataset=dir_LAI, 
                          nodata_value=NoDataValue, 
                          clipping_geometry="NONE", 
                          maintain_clipping_extent="MAINTAIN_EXTENT")

    # DSM data clip
    # resample the optical image
    [res_x,res_y] = TellResolution(dir_DSM)
    name_resample_dsm = "resample_DSM.tif"
    arcpy.Resample_management(in_raster=dir_DSM, 
                              out_raster=dir_output+"\\"+name_resample_dsm,
                              cell_size=str(cellsize_resample)+" "+str(cellsize_resample), 
                              resampling_type="BILINEAR")
    # Clip the DSM data
    name_clip_dsm = "clip_DSM.tif"
    arcpy.Clip_management(in_raster=dir_output+"\\"+name_resample_dsm, 
                          rectangle=extent, 
                          out_raster=dir_output+"\\"+name_clip_dsm, 
                          in_template_dataset=dir_LAI, 
                          nodata_value=NoDataValue, 
                          clipping_geometry="NONE", 
                          maintain_clipping_extent="MAINTAIN_EXTENT")

    # Temperature separation
    Array_LAI = arcpy.RasterToNumPyArray(dir_LAI, nodata_to_value=NoDataValue)
    Array_RGBNIR = arcpy.RasterToNumPyArray(dir_output+"\\"+name_clip_opt, nodata_to_value=NoDataValue)
    Array_R = Array_RGBNIR[band_R,:,:]
    Array_NIR = Array_RGBNIR[band_NIR,:,:]

    Array_NDVI = (Array_NIR-Array_R)/(Array_NIR+Array_R)
    Array_NDVI[Array_NDVI<0] = np.nan
    Array_NDVI[Array_NDVI>1] = np.nan
    Array_Tr = arcpy.RasterToNumPyArray(dir_output+"\\"+name_clip_tr, nodata_to_value=NoDataValue)
    Array_Tr[Array_Tr<0] = np.nan
#     print(Array_LAI.shape, Array_RGBNIR.shape, Array_R.shape, Array_NIR.shape, Array_Tr.shape)
#     print(np.nanmax(Array_NDVI),np.nanmin(Array_NDVI))

    # Stefan-Boltzmann Law
    Array_Tr = Array_Tr ** 4

    dims_LAI = Array_LAI.shape
    print("Column of the LAI:",dims_LAI[0],"Row of the LAI:",dims_LAI[1])
    dims_NDVI = Array_NDVI.shape
    print("Column of the spectral data:",dims_NDVI[0],"Row of the spectral data:",dims_NDVI[1])
    hor_pixel = int(dims_NDVI[0]/dims_LAI[0])
    ver_pixel = int(dims_NDVI[1]/dims_LAI[1])
    print("Each LAI pixel contains",hor_pixel,"(column/column) by",ver_pixel,"(row/row) pixels.")

    # Hillshade calculation
    # Hillshade calculation based on the cliped RGBNIR data
    name_hillshade = "Hillshade.tif"
    arcpy.gp.HillShade_sa(dir_output+"\\"+name_clip_dsm, 
                          dir_output+"\\"+name_hillshade, 
                          str(Azimuth), 
                          str(Altitude), 
                          "SHADOWS", "1")
    # Read the hillshade as array to ignore the shadow pixels
    Array_Hillshade = arcpy.RasterToNumPyArray(dir_output+"\\"+name_hillshade, nodata_to_value=NoDataValue)

    # Get the information from LAI map for data output
    fid=gdal.Open(dir_LAI)
    input_lai=fid.GetRasterBand(1).ReadAsArray()
    dims_lai=input_lai.shape
    # Read the GDAL GeoTransform to get the pixel size
    lai_geo=fid.GetGeoTransform()
    lai_prj=fid.GetProjection()
    fid=None
    # Compute the dimensions of the output file
    geo_out=list(lai_geo)
    geo_out=tuple(geo_out)

    t_canopy = np.empty((dims_LAI[0],dims_LAI[1]))
    t_canopy[:] = np.nan
    t_soil = np.empty((dims_LAI[0],dims_LAI[1]))
    t_soil[:] = np.nan
    t_coeff = np.empty((dims_LAI[0],dims_LAI[1]))
    t_coeff[:] = np.nan
    print("Dimension of the canopy temperature is:",t_canopy.shape[0],t_canopy.shape[1])
    print("Dimension of the soil temperature is:",t_soil.shape[0],t_soil.shape[1])

    # initial values for these four variables
    renew_slope = NoDataValue
    renew_intercept = NoDataValue
    renew_coeff = NoDataValue
    slope = NoDataValue
    intercept = NoDataValue
    correlation = NoDataValue
    pvalue = NoDataValue
    stderr = NoDataValue

    for irow in range(dims_LAI[0]):
        start_row = irow * hor_pixel
        end_row = start_row + (hor_pixel)
        for icol in range(dims_LAI[1]):
            start_col = icol * ver_pixel
            end_col = start_col + (ver_pixel)

            # Using the hillshade to eliminate the shadow pixel
            local_NDVI = Array_NDVI[start_row:end_row,start_col:end_col]
            local_NDVI[local_NDVI < 0] = NoDataValue
            local_Tr = Array_Tr[start_row:end_row,start_col:end_col]
            local_Hillshade = Array_Hillshade[start_row:end_row,start_col:end_col]
            local_Hillshade[local_Hillshade<=0] = 0
            local_Hillshade[local_Hillshade>0] = 1

            local_NDVI = local_NDVI*local_Hillshade

            tmp_NDVI = local_NDVI.reshape(-1)
            tmp_NDVI[tmp_NDVI<=0] = np.nan
    #         print("Invalid pixel number is:",np.count_nonzero(np.isnan(tmp_NDVI)))
            tmp_Tr = local_Tr.reshape(-1)
            tmp_Tr = np.sqrt(np.sqrt(tmp_Tr)) # unit in degree C

            df = pd.DataFrame()
            df = pd.DataFrame({'NDVI': tmp_NDVI,'Tr': tmp_Tr})
            df = df.dropna()
            df = df.apply(pd.to_numeric, errors='coerce')

            # do regression if valid data existed in the data frame
            if len(df) != 0:
                slope,intercept,correlation,pvalue,stderr = linregress(df['NDVI'],df['Tr'])        
                # slope: slope of the regression
                # intercept: intercept of the regression line
                # correlation: correlation coefficient
                # pvalue: two-sided p-value for a hypothesis test whose null hypothesis is that the slope is zero
                # stderr: standard error of the estimate
            else: pass

            # renew the slope and the intercept if the slope is negative
            if np.nanmean(slope) < 0:
                renew_slope = slope
                renew_intercept = intercept
                renew_coeff = correlation
            else: pass

            # gain index for soil and canopy pixel for each local domain
            index_soil = np.where(local_NDVI <= Soil_threshold)
            index_veg = np.where(local_NDVI >= Veg_threshold)
            # when the domain contains both vegetation and soil
            if len(index_soil[0]) > 0 and len(index_veg[0]) > 0:
                t_canopy[irow,icol] = np.nanmean(local_Tr[index_veg[0],index_veg[1]])
                t_soil[irow,icol] = np.nanmean(local_Tr[index_soil[0],index_soil[1]])
            # when the domain contains vegetation but no soil: estimate the soil temperature
            elif len(index_soil[0]) == 0 and len(index_veg[0]) > 0:
                t_canopy[irow,icol] = np.nanmean(local_Tr[index_veg[0],index_veg[1]])
                t_soil[irow,icol] = ((renew_intercept + renew_slope * Soil_threshold)**2)**2
            # when the domain contains soil but no vegetation: vegetation temperature is "NAN"
            elif len(index_soil[0]) > 0 and len(index_veg[0]) <= 0:
                t_canopy[irow,icol] = np.nan
                t_soil[irow,icol] = np.nanmean(local_Tr[index_soil[0],index_soil[1]])
            # when the domain contains either pure soil or vegetation
            # estimate the soil and vegetation temperature
            elif len(index_soil[0]) == 0 and len(index_veg[0]) == 0:
                t_canopy[irow,icol] = np.nan
                t_soil[irow,icol] = np.nan

            t_coeff[irow,icol] = renew_coeff

    tt_canopy = np.sqrt(np.sqrt(t_canopy.copy())) + 273.15
    tt_soil = np.sqrt(np.sqrt(t_soil.copy())) + 273.15

    # Write the output file
    driver = gdal.GetDriverByName('GTiff')
    ds = driver.Create(dir_output+"\\"+output_name, dims_LAI[1], dims_LAI[0], 3, gdal.GDT_Float32)
    ds.SetGeoTransform(geo_out)
    ds.SetProjection(lai_prj)
    band=ds.GetRasterBand(1)
    band.WriteArray(tt_canopy)
    band.SetNoDataValue(NoDataValue)
    band.FlushCache()
    band=ds.GetRasterBand(2)
    band.WriteArray(tt_soil)
    band.SetNoDataValue(NoDataValue)
    band.FlushCache()
    band=ds.GetRasterBand(3)
    band.WriteArray(t_coeff)
    band.SetNoDataValue(NoDataValue)
    band.FlushCache()
    ds = None
    print("Done!!! Temperature separation is finished.")
    
    # delete the middle products
    if MiddleProducts == "No":
        os.remove(dir_output+"\\"+name_resample)
        os.remove(dir_output+"\\"+name_clip_opt)
        os.remove(dir_output+"\\"+name_clip_tr)
        os.remove(dir_output+"\\"+name_resample_dsm)
        os.remove(dir_output+"\\"+name_clip_dsm)
        os.remove(dir_output+"\\"+name_hillshade)
    else:
        pass
    
    return("Temperature separation is finished!!!")
Exemplo n.º 8
0
def main():
    '''Main RLOS'''
    try:
        # get/set initial environment
        env.overwriteOutput = True
        installInfo = arcpy.GetInstallInfo("desktop")

        # get observer's vibility modifier maximums
        obsMaximums = maxVizModifiers(observers)
        removeSPOT = obsMaximums['REMOVE_SPOT']
        if removeSPOT is True:
            arcpy.AddMessage("Observer SPOT is <NULL>, deleteing field ...")
            arcpy.DeleteField_management(observers, "SPOT")

        # Do a Minimum Bounding Geometry (MBG) on the input observers
        observers_mbg = os.path.join(env.scratchWorkspace, "observers_mbg")
        delete_me.append(observers_mbg)
        arcpy.AddMessage("Finding observer's minimum bounding envelope ...")
        # ENVELOPE would be better but would make it ArcInfo-only.
        arcpy.MinimumBoundingGeometry_management(observers, observers_mbg,
                                                 "RECTANGLE_BY_AREA")

        # Now find the center of the (MBG)
        arcpy.AddMessage("Finding center of observers ...")
        mbgCenterPoint = os.path.join(env.scratchWorkspace, "mbgCenterPoint")
        mbgExtent = arcpy.Describe(observers_mbg).extent
        mbgSR = arcpy.Describe(observers_mbg).spatialReference
        mbgCenterX = mbgExtent.XMin + (mbgExtent.XMax - mbgExtent.XMin)
        mbgCenterY = mbgExtent.YMin + (mbgExtent.YMax - mbgExtent.YMin)
        arcpy.CreateFeatureclass_management(os.path.dirname(mbgCenterPoint),
                                            os.path.basename(mbgCenterPoint),
                                            "POINT", "#", "DISABLED",
                                            "DISABLED", mbgSR)
        mbgShapeFieldName = arcpy.Describe(mbgCenterPoint).ShapeFieldName
        rows = arcpy.InsertCursor(mbgCenterPoint)
        feat = rows.newRow()
        feat.setValue(mbgShapeFieldName, arcpy.Point(mbgCenterX, mbgCenterY))
        rows.insertRow(feat)
        del rows
        delete_me.append(mbgCenterPoint)

        # Get the maximum radius of the observers
        maxRad = obsMaximums['RADIUS2']
        maxOffset = obsMaximums['OFFSETA']
        horizonDistance = 0.0
        z_factor = float(zfactor(observers))
        if RADIUS2_to_infinity is True:
            ''' if going to infinity what we really need is the distance to the horizon
            based on height/elevation'''
            arcpy.AddMessage("Finding horizon distance ...")
            result = arcpy.GetCellValue_management(
                input_surface,
                str(mbgCenterX) + " " + str(mbgCenterY))
            centroid_elev = result.getOutput(0)
            R2 = float(centroid_elev) + float(maxOffset)
            # length, in meters, of semimajor axis of WGS_1984 spheroid.
            R = 6378137.0
            horizonDistance = math.sqrt(math.pow((R + R2), 2) - math.pow(R, 2))
            arcpy.AddMessage(str(horizonDistance) + " meters.")
            horizonExtent = (str(mbgCenterX - horizonDistance) + " " +
                             str(mbgCenterY - horizonDistance) + " " +
                             str(mbgCenterX + horizonDistance) + " " +
                             str(mbgCenterY + horizonDistance))
            # since we are doing infinity we can drop the RADIUS2 field
            arcpy.AddMessage(
                "Analysis to edge of surface, dropping RADIUS2 field ...")
            arcpy.DeleteField_management(observers, "RADIUS2")
        else:
            pass

        # reset center of AZED using Lat/Lon of MBG center point
        # Project point to WGS 84
        arcpy.AddMessage("Recentering Azimuthal Equidistant to centroid ...")
        mbgCenterWGS84 = os.path.join(env.scratchWorkspace, "mbgCenterWGS84")
        arcpy.Project_management(mbgCenterPoint, mbgCenterWGS84, GCS_WGS_1984)
        arcpy.AddXY_management(mbgCenterWGS84)
        pointx = 0.0
        pointy = 0.0
        shapeField = arcpy.Describe(mbgCenterWGS84).ShapeFieldName
        rows = arcpy.SearchCursor(mbgCenterWGS84)
        for row in rows:
            feat = row.getValue(shapeField)
            pnt = feat.getPart()
            pointx = pnt.X
            pointy = pnt.Y
        del row
        del rows
        # write new central meridian and latitude of origin...
        strAZED = '''PROJCS["World_Azimuthal_Equidistant",
        GEOGCS["GCS_WGS_1984",
        DATUM["D_WGS_1984",
        SPHEROID["WGS_1984",6378137.0,298.257223563]],
        PRIMEM["Greenwich",0.0],
        UNIT["Degree",0.0174532925199433]],
        PROJECTION["Azimuthal_Equidistant"],
        PARAMETER["False_Easting",0.0],
        PARAMETER["False_Northing",0.0],
        PARAMETER["Central_Meridian",' + str(pointx) + '],
        PARAMETER["Latitude_Of_Origin",' + str(pointy) + '],
        UNIT["Meter",1.0],
        AUTHORITY["ESRI",54032]]'''
        delete_me.append(mbgCenterWGS84)

        # Clip the input surface to the maximum visibilty range and extract
        # it to a 1000 x 1000 raster
        # if going to infinity then clip to horizion extent
        surf_extract = os.path.join(env.scratchWorkspace, "surf_extract")
        if RADIUS2_to_infinity is True:
            mbgBuffer = os.path.join(env.scratchWorkspace, "mbgBuffer")
            arcpy.Buffer_analysis(observers_mbg, mbgBuffer, horizonDistance)
            delete_me.append(mbgBuffer)
            surfaceSR = arcpy.Describe(input_surface).spatialReference
            mbgBufferPrj = os.path.join(env.scratchWorkspace, "mbgBufferPrj")
            arcpy.Project_management(mbgBuffer, mbgBufferPrj, surfaceSR)
            delete_me.append(mbgBufferPrj)
            mbgBufferPrjExtent = arcpy.Describe(mbgBufferPrj).extent
            cellSize = max(
                float(mbgBufferPrjExtent.width) / 1000.0,
                float(mbgBufferPrjExtent.height) / 1000.0)
            env.cellSize = cellSize
            arcpy.AddMessage(
                "Clipping and resampling surface to analysis area with " +
                str(cellSize) + " meter cell size ...")
            arcpy.Clip_management(input_surface, "#", surf_extract,
                                  mbgBufferPrj)
        else:
            # buffer MBG by max RADIUS 2 + 10%
            mbgBuffer = os.path.join(env.scratchWorkspace, "mbgBuffer")
            arcpy.Buffer_analysis(observers_mbg, mbgBuffer,
                                  obsMaximums['RADIUS2'])
            delete_me.append(mbgBuffer)
            # project buffer to surface SR
            surfaceSR = arcpy.Describe(input_surface).spatialReference
            mbgBufferPrj = os.path.join(env.scratchWorkspace, "mbgBufferPrj")
            arcpy.Project_management(mbgBuffer, mbgBufferPrj, surfaceSR)
            delete_me.append(mbgBufferPrj)
            # clip surface to projected buffer
            arcpy.Clip_management(input_surface, "#", surf_extract,
                                  mbgBufferPrj)
        delete_me.append(surf_extract)

        # Project surface to the new AZED
        extract_prj = os.path.join(env.scratchWorkspace, "extract_prj")
        arcpy.AddMessage("Projecting surface ...")
        arcpy.ProjectRaster_management(surf_extract, extract_prj, strAZED)
        delete_me.append(extract_prj)

        # Project observers to the new AZED
        obs_prj = os.path.join(env.scratchWorkspace, "obs_prj")
        arcpy.AddMessage("Projecting observers ...")
        arcpy.Project_management(observers, obs_prj, strAZED)
        delete_me.append(obs_prj)

        # Project the MBG buffer to AZED
        obs_buf = os.path.join(env.scratchWorkspace, "obs_buf")
        # if RADIUS2_to_infinity == True:
        #    arcpy.Buffer_analysis(obs_prj,obs_buf,horizonDistance)
        # else:
        #    arcpy.Project_management(mbgBufferPrj,obs_buf,strAZED)
        arcpy.Project_management(mbgBufferPrj, obs_buf, strAZED)
        delete_me.append(obs_buf)

        # Finally ... run Viewshed
        arcpy.AddMessage("Calculating Viewshed ...")
        vshed = os.path.join(env.scratchWorkspace, "vshed")
        delete_me.append(vshed)
        outVshed = sa.Viewshed(extract_prj, obs_prj, 1.0, "CURVED_EARTH",
                               terrestrial_refractivity_coefficient)
        outVshed.save(vshed)

        # Raster To Polygon
        arcpy.AddMessage("Converting to polygons ...")
        ras_poly = os.path.join(env.scratchWorkspace, "ras_poly")
        arcpy.RasterToPolygon_conversion(vshed, ras_poly, polygon_simplify)
        delete_me.append(ras_poly)

        # clip output polys to buffer
        if RADIUS2_to_infinity is not True:
            out_buf = os.path.join(env.scratchWorkspace, "out_buf")
            arcpy.Buffer_analysis(obs_prj, out_buf, "RADIUS2")
            delete_me.append(out_buf)
            arcpy.Clip_analysis(ras_poly, out_buf, output_rlos)
        else:
            arcpy.CopyFeatures_management(ras_poly, output_rlos)

        # set output
        arcpy.SetParameter(2, output_rlos)

        # cleanup
        arcpy.AddMessage("Removing scratch datasets:")
        for ds in delete_me:
            arcpy.AddMessage(str(ds))
            arcpy.Delete_management(ds)

    except arcpy.ExecuteError:
        # Get the tool error messages
        msgs = arcpy.GetMessages()
        arcpy.AddError(msgs)
        # print msgs #UPDATE2to3
        print(msgs)

    except:
        # Get the traceback object
        tb = sys.exc_info()[2]
        tbinfo = traceback.format_tb(tb)[0]

        # Concatenate information together concerning the error into a
        # message string
        pymsg = ("PYTHON ERRORS:\nTraceback info:\n" + tbinfo +
                 "\nError Info:\n" + str(sys.exc_info()[1]))
        msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages() + "\n"

        # Return python error messages for use in script tool or Python Window
        arcpy.AddError(pymsg)
        arcpy.AddError(msgs)

        # Print Python error messages for use in Python / Python Window
        # print pymsg + "\n" #UPDATE2to3
        print(pymsg + "\n")
        # print msgs #UPDATE2to3
        print(msgs)
Exemplo n.º 9
0
    #change i to output resolution for output name
    res = i * 2
    #concatenate output folder with resolution values
    outname = r"D:\Projects_new\NE_wetlands\Outputs\SpatialAggTests\cti_raw" + str(
        res)
    #run aggregation
    outAggreg = Aggregate(cti, i, "MEAN", "TRUNCATE", "DATA")
    #save output
    outAggreg.save(outname)

#load and clip landcover data
extent = str(lidarDTM.extent)
LandcoverClip = arcpy.Clip_management(
    in_raster=
    "D:/Projects_new/NE_wetlands/Landcover/lcm-2007-25m_1201493/lcm2007_25m_gb.tif",
    rectangle="330000 380000 390000 430000",
    out_raster="D:/Temps/LandcoverClip",
    clipping_geometry="NONE",
    maintain_clipping_extent="NO_MAINTAIN_EXTENT")
LandcoverClip = Raster("D:/Temps/LandcoverClip")

#reclassify landcover into VH,M,L,VL drainage categories
landcoverClass = Reclassify(
    Landcover, "VALUE",
    RemapValue([["0", "NoData"], ["1", 3], ["2", 3], ["3", 2], ["4", 2],
                ["5", 2], ["6", 2], ["8", 2], ["9", 4], ["10", 3], ["11", 3],
                ["12", 4], ["14", 1], ["15", 3], ["16", 3], ["18", 3],
                ["19", 3], ["20", 3], ["21", 4], ["22", 1], ["23", 1]]))

#resamples to match the 2m of the lidarDTM
landcover2m = arcpy.Resample_management(landcoverClass, "landcover2m.tif", "2")
import arcpy
from arcpy.sa import *
import pickle
import numpy as np

arcpy.env.workspace = r"X:/DBA/Databases/IRIS/#2017/machinelearning/TIF"

ML_Y_Raster = Raster('2014_163000_563000_RGB_hrl.tif')

myExtent = ML_Y_Raster.extent

Xmin = myExtent.XMin
Ymin = myExtent.YMin
Xmax = myExtent.XMax
Ymax = myExtent.YMax

xwaarde = Xmin + 800
ywaarde = Ymin + 370
rasterGrootte = 100
Bestandsnaam = str(xwaarde) + '_' + str(ywaarde) + '.tif'
print Bestandsnaam
ClipExtent = str(xwaarde) + ' ' + str(ywaarde) + ' ' + str(
    xwaarde + rasterGrootte) + ' ' + str(ywaarde + rasterGrootte)
print ClipExtent
arcpy.Clip_management(
    ML_Y_Raster, ClipExtent,
    'X:/DBA/Databases/IRIS/#2017/machinelearning/tegels 100x100/' +
    Bestandsnaam, "#", "#", "NONE")
def Footprint_Digital_Results(footprint,
                              tseb_r_1,
                              tseb_r_2,
                              temp_image,
                              dir_out,
                              lai_image,
                              fc_image,
                              n_rn,
                              n_h,
                              n_le,
                              n_g,
                              n_t_et,
                              pixel_size,
                              upper_boundary,
                              lower_boundary,
                              delete_tmp_files="Yes",
                              single_layer_temp="Yes"):
    '''
    parameters:
    footprint: directory of the footprint image.
    tseb_r_1: directory of the TSEB result: multiple-layer image.
    tseb_r_2: directory of the TSEB ancillary result: multiple-layer image.
    temp_image: directory of the temperature image. it could be single-layer or multiple-layer image, but the "single_layer_temp" need to be set    correspondingly.
    dir_out: directory of the outputs from this scripts. they are transform results and they can be deleted.
    lai_image: directory of the LAI image.
    fc_image: directory of the fractional cover image.
    n_rn, n_h, n_le, n_g: the layer number of the net radiation, sensible heat flux, latent heat flux, and soil surface heat flux.
    n_t_et: the layer number of the ratio between canopy latent heat flux and total latent heat flux.
    pixel_size: the pixel size (e.g., 3.6 meter by 3.6 meter).
    upper_boundary: the upper threshold for all fluxes at one pixel which does not make sense, e.g., 10,000 W/m2 for LE at one pixel.
    lower_boundary: the lower threshold for all fluxes at one pixel which does not make sense, e.g., -1,500 W/m2 for LE at one pixel.
    delete_tmp_files: Default is "Yes", and this means the temporary (middle products) files will be deleted at the end. Any other input (string) results in saving the temporary files.
    single_layer_temp: Default is "Yes", and this means a single layer temperature image. Other input (string) must be multiple layers temperature image, 
                        and the 1st and 2nd layer must be the canopy and soil temperature, respectively.

    return:
    Net radiation, sensible heat flux, latent heat flux, soil surface heat flux, canopy latent heat flux, 
            LAI, single-layer mean temperature, canopy temperature, and soil temperature within the footprint area.
    '''

    cellsize = str(pixel_size) + " " + str(pixel_size)
    arcpy.Resample_management(in_raster=footprint,
                              out_raster=dir_out + "\\footprint_resample.tif",
                              cell_size=cellsize,
                              resampling_type="CUBIC")

    Grid_Describe = arcpy.Describe(tseb_r_1)
    Grid_Extent = Grid_Describe.extent
    extent = "{} {} {} {}".format(Grid_Extent.XMin, Grid_Extent.YMin,
                                  Grid_Extent.XMax, Grid_Extent.YMax)

    arcpy.Clip_management(in_raster=dir_out + "\\footprint_resample.tif",
                          rectangle=extent,
                          out_raster=dir_out + "\\footprint_clip.tif",
                          in_template_dataset=tseb_r_1,
                          nodata_value="0.000000e+00",
                          clipping_geometry="NONE",
                          maintain_clipping_extent="MAINTAIN_EXTENT")

    raster_footprint = arcpy.RasterToNumPyArray(dir_out +
                                                "\\footprint_clip.tif",
                                                nodata_to_value=-9999)
    raster_footprint[raster_footprint > 1] = np.nan
    raster_footprint[raster_footprint < 0] = np.nan
    raster_tseb = arcpy.RasterToNumPyArray(tseb_r_1, nodata_to_value=-9999)
    raster_tseb_ancillary = arcpy.RasterToNumPyArray(tseb_r_2,
                                                     nodata_to_value=np.nan)
    raster_lai = arcpy.RasterToNumPyArray(lai_image, nodata_to_value=np.nan)
    raster_fc = arcpy.RasterToNumPyArray(fc_image, nodata_to_value=np.nan)

    raster_rn = raster_tseb[n_rn, :, :]
    raster_rn[raster_rn > upper_boundary] = np.nan
    raster_rn[raster_rn < lower_boundary] = np.nan
    out_rn = raster_rn * raster_footprint

    raster_h = raster_tseb[n_h, :, :]
    raster_h[raster_h > upper_boundary] = np.nan
    raster_h[raster_h < lower_boundary] = np.nan
    out_h = raster_h * raster_footprint

    raster_le = raster_tseb[n_le, :, :]
    raster_le[raster_le > upper_boundary] = np.nan
    raster_le[raster_le < lower_boundary] = np.nan
    out_le = raster_le * raster_footprint

    raster_g = raster_tseb[n_g, :, :]
    raster_g[raster_g > upper_boundary] = np.nan
    raster_g[raster_g < lower_boundary] = np.nan
    out_g = raster_g * raster_footprint

    raster_t = raster_tseb_ancillary[n_t_et, :, :]
    raster_t[raster_t > 1] = np.nan
    raster_t[raster_t < 0] = np.nan
    out_t = out_le * raster_t

    raster_tet = raster_footprint * 0 + 1
    out_tet = raster_tet * raster_tseb_ancillary[n_t_et, :, :]
    out_tet = np.nanmean(out_tet)

    raster_lai[raster_lai > 5] = np.nan
    raster_lai[raster_lai < 0] = np.nan
    out_lai = raster_lai * (raster_footprint * 0 + 1)

    raster_fc[raster_fc > 1] = np.nan
    raster_fc[raster_fc < 0] = np.nan
    out_fc = raster_fc * (raster_footprint * 0 + 1)

    if single_layer_temp == "Yes":
        raster_temp = arcpy.RasterToNumPyArray(temp_image,
                                               nodata_to_value=np.nan)
        raster_temp[raster_temp > 350] = np.nan  # 76.85 C
        raster_temp[raster_temp < 270] = np.nan  # -3.15 C
        out_temp = raster_temp * (raster_footprint * 0 + 1)

        out_rn = np.nansum(out_rn)
        out_h = np.nansum(out_h)
        out_le = np.nansum(out_le)
        out_g = np.nansum(out_g)
        out_t = np.nansum(out_t)
        out_lai = np.nanmean(out_lai)
        out_fc = np.nanmean(out_fc)
        out_temp = np.nanmean(out_temp)
        out_temp_canopy = np.nan
        out_temp_soil = np.nan
        print("Rn - Net radiation:", round(out_rn, 3))
        print("H - Sensible heat flux:", round(out_h, 3))
        print("LE - Latent heat flux:", round(out_le, 3))
        print("G - Soil surface heat flux:", round(out_g, 3))
        print("T - Canopy latent heat flux:", round(out_t, 3))
        print("ET partitioning:", round(out_tet, 3))
        print("\nLAI:", round(out_lai, 3))
        print("Fractional cover:", round(out_fc, 3))
        print("Temperautre:", round(out_temp, 3), "K")
    else:
        # canopy, soil temperature are required at the 1st and 2nd layer of the image
        raster_temp = arcpy.RasterToNumPyArray(temp_image,
                                               nodata_to_value=np.nan)
        raster_temp_canopy = raster_temp[0, :, :]
        raster_temp_soil = raster_temp[1, :, :]

        raster_temp_canopy[raster_temp_canopy > 350] = np.nan  # 76.85 C
        raster_temp_canopy[raster_temp_canopy < 270] = np.nan  # -3.15 C
        out_temp_canopy = raster_temp_canopy * (raster_footprint * 0 + 1)

        raster_temp_soil[raster_temp_soil > 350] = np.nan  # 76.85 C
        raster_temp_soil[raster_temp_soil < 270] = np.nan  # -3.15 C
        out_temp_soil = raster_temp_soil * (raster_footprint * 0 + 1)

        out_rn = np.nansum(out_rn)
        out_h = np.nansum(out_h)
        out_le = np.nansum(out_le)
        out_g = np.nansum(out_g)
        out_t = np.nansum(out_t)
        out_lai = np.nanmean(out_lai)
        out_fc = np.nanmean(out_fc)
        out_temp_canopy = np.nanmean(out_temp_canopy)
        out_temp_soil = np.nanmean(out_temp_soil)
        out_temp = np.nan
        print("Rn - Net radiation:", round(out_rn, 3))
        print("H - Sensible heat flux:", round(out_h, 3))
        print("LE - Latent heat flux:", round(out_le, 3))
        print("G - Soil surface heat flux:", round(out_g, 3))
        print("T - Canopy latent heat flux:", round(out_t, 3))
        print("ET partitioning:", round(out_tet, 3))
        print("\nLAI:", round(out_lai, 3))
        print("Fractional cover:", round(out_fc, 3))
        print("Canopy temperautre:", round(out_temp_canopy, 3), "K")
        print("Soil temperautre:", round(out_temp_soil, 3), "K")

    if delete_tmp_files == "Yes":
        os.remove(dir_out + "\\footprint_resample.tif")
        os.remove(dir_out + "\\footprint_clip.tif")
    else:
        print("Temporary files are saved in the output folder.")

    return (out_rn, out_h, out_le, out_g, out_t, out_tet, out_lai, out_fc,
            out_temp, out_temp_canopy, out_temp_soil)
Exemplo n.º 12
0
print "Importing Arcpy...." + str(time.ctime())
import arcpy
print "         Arcpy imported! " + str(time.ctime())

print "Setting local variables" + str(time.ctime())
arcpy.env.workspace = "D:/GD/IHPAN/Gaul/_Mapy/_metaarkusze/data.gdb"
# mxd = arcpy.mapping.MapDocument("D:/GD/WGiSR/_Konferencje/Plener 2018/heatMap/HeatMap.mxd")
# df = arcpy.mapping.ListDataFrames(mxd)[0]
print "         Local variables set!" + str(time.ctime())

print "Clipping..."  + str(time.ctime())
arcpy.Clip_management(
    r"GAUL_RASTER\Babimost_A2_B2_meta.tif",
    "265690.022579334 444111.323305845 333117.820225502 527358.613670745",
    "D:\GD\IHPAN\Gaul\_Mapy\_metaarkusze\data.gdb\Babimost_clip",
    r"GAUL_MASKS\POWIAT_Babimost",
    256,
    "ClippingGeometry",
    "MAINTAIN_EXTENT")
arcpy.Clip_management(
    r"GAUL_RASTER\Poznan_A1-B2_meta.tif",
    "299400.899102051 470779.676501803 382321.502278291 540453.896805332",
    "D:\GD\IHPAN\Gaul\_Mapy\_metaarkusze\data.gdb\Poznan_clip",
    r"GAUL_MASKS\POWIAT_Poznań",
    256,
    "ClippingGeometry",
    "MAINTAIN_EXTENT")
arcpy.Clip_management(
    r"GAUL_RASTER\Srem_A2-B2_meta.tif",
    "335720.040082338 441921.717819948 400351.860474886 515204.67834739",
    "D:\GD\IHPAN\Gaul\_Mapy\_metaarkusze\data.gdb\Srem_clip",
Exemplo n.º 13
0
# ---------------------------------------------------------------------------
# domain_buffer_clip.py
# Created on: 2017-02-22 10:03:38.00000
#   (generated by ArcGIS/ModelBuilder)
# Description:
# ---------------------------------------------------------------------------

# Import arcpy module
import arcpy

# Check out any necessary licenses
arcpy.CheckOutExtension("3D")

# Local variables:
v1008_1_tif = "C:\\Users\\adamclark\\Desktop\\deleteme\\1008_1.tif"
v1008_1_domain_py_shp = "C:\\Users\\adamclark\\Desktop\\deleteme\\1008_1_domain_py.shp"
v1008_1_domain_300buf_py_shp = "C:\\Users\\adamclark\\Desktop\\deleteme\\1008_1_domain_300buf_py.shp"
v1008_1_clip_tif = "C:\\Users\\adamclark\\Desktop\\deleteme\\1008_1_clip.tif"

### Process: Raster Domain
##arcpy.RasterDomain_3d(v1008_1_tif, v1008_1_domain_py_shp, "POLYGON")
##
### Process: Buffer
##arcpy.Buffer_analysis(v1008_1_domain_py_shp, v1008_1_domain_300buf_py_shp, "-300 Meters", "FULL", "ROUND", "NONE", "")
desc = arcpy.Describe(v1008_1_domain_300buf_py_shp)
ExtObj = desc.extent
clip = "%d %d %d %d" % (ExtObj.XMin, ExtObj.YMin, ExtObj.XMax, ExtObj.YMax)
# Process: Clip
arcpy.Clip_management(v1008_1_tif, clip, v1008_1_clip_tif,
                      v1008_1_domain_300buf_py_shp, "256", "ClippingGeometry")
PStransit = os.path.join(rootdir, 'results/transit.gdb/PStransit')
PStransitbus = PStransit + '_busroutes'
PStransitbus_proj = PStransit + '_busroutes_proj'
PStransitbus_splitdiss = PStransitbus_proj + '_splitv_diss'
PStransitduplitab = os.path.join(rootdir, 'results/transit.gdb/explFindID')
PStransitras = os.path.join(rootdir, 'results/transit.gdb/PStransit_ras')

trees_aea = os.path.join(gdb, 'trees_aea')

########################################################################################################################
# PREPARE LAND USE DATA FOR PUGET SOUND
########################################################################################################################
#Export NLCD data to Puget Sound scale
arcpy.env.snapRaster = NLCD_imp
arcpy.Clip_management(in_raster=NLCD_reclass, rectangle=PSwatershed, out_raster=NLCD_reclass_PS)
arcpy.Clip_management(in_raster=NLCD, rectangle=PSwatershed, out_raster=NLCD_PS)
#Export NLCD impervious data
arcpy.Clip_management(in_raster=NLCD_imp, rectangle=PSwatershed, out_raster=NLCD_imp_PS)
#Compute focal stats
imp_mean = arcpy.sa.FocalStatistics(NLCD_imp_PS, neighborhood = NbrCircle(3, "CELL"), statistics_type= 'MEAN')
imp_mean.save(NLCD_imp_PS + '_mean.tif')

########################################################################################################################
# PREPARE VARIABLES TO CREATE HEATMAPS (Puget Sound): FUNCTIONAL-CLASS BASED AADT AND SPEED LIMIT, SLOPE, AND TRANSIT ROUTES
# These outputs were used for initial moss sampling
########################################################################################################################
#-----------------------------------------------------------------------------------------------------------------------
# Prepare OSM data to create heatmap based on roads functional class for all Puget Sound OSM roads
#-----------------------------------------------------------------------------------------------------------------------
arcpy.env.workspace = gdb
Exemplo n.º 15
0
def soil_depth_calc(soil_parent_material_1, advanced_superficial_deposit,
                    DTM_clip_np, DTM_cell_size, buffer_catchment,
                    buffer_extent, river_catchment_BNG, catch_extent,
                    bottom_left_corner):

    adaquate_soil_data_provided = False
    if soil_parent_material_1 and soil_parent_material_1 != '#':
        if advanced_superficial_deposit and advanced_superficial_deposit != '#':
            arcpy.AddMessage("Adequate soil information provided")
            adaquate_soil_data_provided = True

    if soil_parent_material_1 and soil_parent_material_1 != '#':
        # Check the soil parent type
        desc_soil_depth = arcpy.Describe(soil_parent_material_1)
        soil_depth_raster_feature = desc_soil_depth.datasetType
        arcpy.AddMessage("The soil depth dataset is a " +
                         soil_depth_raster_feature)

        # process the soil parent material for entry into the model.

        if soil_depth_raster_feature == 'FeatureClass':

            soil_parent_material_1_clip = arcpy.Clip_analysis(
                soil_parent_material_1, river_catchment_BNG)

            arcpy.AddField_management(soil_parent_material_1_clip, "R_DEPTH",
                                      "FLOAT")

            arcpy.AddMessage("Added new fields to the table")

            # Create update cursor for feature class
            rows = arcpy.UpdateCursor(soil_parent_material_1_clip)

            for row in rows:
                if row.SOIL_DEPTH == "DEEP":
                    row.R_DEPTH = 2.0

                elif row.SOIL_DEPTH == "DEEP-INTERMEDIATE":
                    row.R_DEPTH = 1.5

                elif row.SOIL_DEPTH == "INTERMEDIATE":
                    row.R_DEPTH = 1.0

                elif row.SOIL_DEPTH == "INTERMEDIATE-SHALLOW":
                    row.R_DEPTH = 0.5

                elif row.SOIL_DEPTH == "SHALLOW":
                    row.R_DEPTH = 0.25

                elif row.SOIL_DEPTH == "NA":
                    row.R_DEPTH = 0.0

                else:
                    row.R_DEPTH = 0.0

                rows.updateRow(row)

            # Delete cursor and row objects to remove locks on the data
            del row
            del rows

            soil_depth_raster = arcpy.FeatureToRaster_conversion(
                soil_parent_material_1_clip, "R_DEPTH", '#', DTM_cell_size)

            soil_depth_raster_clip = arcpy.Clip_management(
                soil_depth_raster, catch_extent, "MODEL_BGS_SOIL_DEPTH",
                river_catchment_BNG, "#", "ClippingGeometry")
            arcpy.AddMessage(
                "Soil depth field converted to raster and clipped")

        else:
            Soil_clip = arcpy.Clip_management(soil_depth, catch_extent,
                                              "MODEL_BGS_SOIL_DEPTH",
                                              river_catchment_BNG, "#",
                                              "ClippingGeometry")

    # Process and clip the advanced superficial deposit data ready to go into the model.
    if advanced_superficial_deposit and advanced_superficial_deposit != '#':
        # Check superficial type
        desc_advanced_superficial_deposit = arcpy.Describe(
            advanced_superficial_deposit)
        advanced_superficial_deposit_raster_feature = desc_advanced_superficial_deposit.datasetType
        arcpy.AddMessage("The advanced superficial deposit is a " +
                         advanced_superficial_deposit_raster_feature)

        # Check land cover cell size
        advanced_superficial_deposit_cell_size = desc_advanced_superficial_deposit.meanCellHeight
        arcpy.AddMessage("The advanced superficial deposit cell size is " +
                         str(advanced_superficial_deposit_cell_size))

        if advanced_superficial_deposit_cell_size != DTM_cell_size:

            arcpy.AddMessage(
                "The cell size of the advanced superficial deposit you have provided is different to the DTM"
            )
            advanced_superficial_deposit_clip = arcpy.Clip_management(
                advanced_superficial_deposit, buffer_extent, "Temp11",
                buffer_catchment, "#", "ClippingGeometry")
            arcpy.AddMessage(
                "Advanced superficial deposit clipped to enlarged catchment")
            advanced_superficial_deposit_correct_cell = arcpy.Resample_management(
                advanced_superficial_deposit_clip, "Temp12", DTM_cell_size,
                "NEAREST")
            arcpy.AddMessage(
                "Cell size of advanced superficial deposit converted to same as DTM"
            )
            #advanced_superficial_deposit_final_clip = arcpy.Clip_management(advanced_superficial_deposit_correct_cell, catch_extent, "MODEL_SUP_DEPTH", river_catchment_BNG, "#", "ClippingGeometry")
            advanced_superficial_deposit_final_clip = arcpy.gp.ExtractByMask_sa(
                advanced_superficial_deposit_correct_cell, river_catchment_BNG,
                "MODEL_SUP_DEPTH")
            arcpy.AddMessage(
                "Advanced superficial deposit correct cell clipped to catchment"
            )

        else:
            advanced_superficial_deposit_final_clip = arcpy.gp.ExtractByMask_sa(
                advanced_superficial_deposit, river_catchment_BNG,
                "MODEL_SUP_DEPTH")

        neighborhood = NbrRectangle(200, 200, "Map")

        # Execute FocalStatistics
        focal_advanced_superficial_deposit = FocalStatistics(
            "MODEL_SUP_DEPTH", neighborhood, "MEAN", "")
        #focal_advanced_superficial_deposit_final_clip = arcpy.Clip_management(focal_advanced_superficial_deposit, catch_extent, "MODEL_FOCAL_SUP_DEPTH", river_catchment_BNG, "#", "ClippingGeometry")
        focal_advanced_superficial_deposit_final_clip = arcpy.gp.ExtractByMask_sa(
            focal_advanced_superficial_deposit, river_catchment_BNG,
            "MODEL_FOCAL_SUP_DEPTH")
        arcpy.AddMessage("Focal statistics calculated")

        # Convert the soil depth rasters to numpys
        advanced_superficial_deposit_np = arcpy.RasterToNumPyArray(
            "MODEL_SUP_DEPTH", '#', '#', '#', 0)
        focal_advanced_superficial_deposit_np = arcpy.RasterToNumPyArray(
            "MODEL_FOCAL_SUP_DEPTH", '#', '#', '#', 0)
        final_depth = np.zeros_like(DTM_clip_np, dtype=float)

        np.putmask(
            final_depth,
            np.logical_and(focal_advanced_superficial_deposit_np > 0,
                           final_depth >= 0),
            focal_advanced_superficial_deposit_np)
        np.putmask(
            final_depth,
            np.logical_and(advanced_superficial_deposit_np > 0,
                           advanced_superficial_deposit_np > final_depth),
            advanced_superficial_deposit_np)

        final_depth[DTM_clip_np == -9999] = -9999

        soil_depth_raster = arcpy.NumPyArrayToRaster(final_depth,
                                                     bottom_left_corner,
                                                     DTM_cell_size,
                                                     DTM_cell_size, -9999)
        soil_depth_raster.save("MODEL_ASD_soil_depth")

    # Soil depth data
    if adaquate_soil_data_provided == False:
        arcpy.AddMessage(
            "No soil depth or particaldata has been provided therefore a default depth of 1m will be used for missing areas"
        )
        soil_depth = np.empty_like(DTM_clip_np, dtype=float)
        soil_depth[:] = 1.0
        soil_depth[DTM_clip_np == -9999] = -9999
        soil_depth_raster = arcpy.NumPyArrayToRaster(soil_depth,
                                                     bottom_left_corner,
                                                     DTM_cell_size,
                                                     DTM_cell_size, -9999)
        soil_depth_raster = arcpy.Clip_management(soil_depth_raster,
                                                  catch_extent,
                                                  "MODEL_general_soil_depth",
                                                  river_catchment_BNG, "#",
                                                  "ClippingGeometry")

    arcpy.AddMessage("Soil depth calculated")
    arcpy.AddMessage("-------------------------")
#Author: Nasrin Nahar
#This code has been used clip the cloud free composite of NDVI to refugee camp boundary through
import arcpy
from arcpy import env
env.workspace = "D:\Rohingya Project-20191106T153319Z-001\Landsat data"
env.overwriteOutput = True
input = "D:\Rohingya Project-20191106T153319Z-001\Landsat data\Output\cloud_free_composite.tif"
clipfc = "D:\Rohingya Project-20191106T153319Z-001\Landsat data\Camp_R1\CampR1.shp"
arcpy.Clip_management(input, "402075.686 2311450.636 429645.273 2350370.237",
                      "Output\Camp_NDVI.tif", clipfc, "0", "ClippingGeometry",
                      "MAINTAIN_EXTENT")
Exemplo n.º 17
0
csv = csv.reader(f)
for row in csv:
    print row[0]
    row1 = str(row[0])
    tifFile = arcpy.sa.Raster(row1)
    print "working on " + tifFile
    count = count + 1
    print count
    extent = tifFile.extent
    XMin = extent.XMin
    if XMin > 0:
        domain = tifFile[:-4] + '_domain.shp'
        bufferDomain =  tifFile[:-4] + '_buffer.shp'
        clip =  tifFile[:-4] + '_clip.tif'
        print 'working on domain'
        arcpy.RasterDomain_3d(tifFile, domain, "POLYGON")
        print 'working on buffer'
        arcpy.Buffer_analysis(domain, bufferDomain, "-300 Meters", "FULL", "ROUND", "NONE", "")
        print 'working on clip'
        desc = arcpy.Describe(bufferDomain)
        ExtObj = desc.extent
        clipExtent = "%d %d %d %d" % (ExtObj.XMin, ExtObj.YMin, ExtObj.XMax, ExtObj.YMax)
        arcpy.Clip_management(tifFile, clipExtent, clip, bufferDomain, "256", "ClippingGeometry")
        arcpy.Delete_management(domain)
        arcpy.Delete_management(bufferDomain)
        print 'Finished working on' + tifFile
    else:
        print "skipped " + tifFile
print "Finished clipping all .tif files"

     delete_me.append(mbgBuffer)
     surfaceSR = arcpy.Describe(input_surface).spatialReference
     mbgBufferPrj = os.path.join(
         envscratchworkspace,
         "mbgBufferPrj")  #(envscratchworkspace,"mbgBufferPrj")
     arcpy.Project_management(mbgBuffer, mbgBufferPrj, surfaceSR)
     delete_me.append(mbgBufferPrj)
     mbgBufferPrjExtent = arcpy.Describe(mbgBufferPrj).extent
     cellSize = max(
         float(mbgBufferPrjExtent.width) / 1000.0,
         float(mbgBufferPrjExtent.height) / 1000.0)
     env.cellSize = cellSize
     arcpy.AddMessage(
         "Clipping and resampling surface to analysis area with " +
         str(cellSize) + " meter cell size ...")
     arcpy.Clip_management(input_surface, "#", surf_extract, mbgBufferPrj)
 else:
     # buffer MBG by max RADIUS 2 + 10%
     mbgBuffer = os.path.join(
         envscratchworkspace,
         "mbgBuffer")  #(envscratchworkspace,"mbgBuffer")
     arcpy.Buffer_analysis(observers_mbg, mbgBuffer, obsMaximums['RADIUS2'])
     delete_me.append(mbgBuffer)
     # project buffer to surface SR
     surfaceSR = arcpy.Describe(input_surface).spatialReference
     mbgBufferPrj = os.path.join(envscratchworkspace, "mbgBufferPrj")
     arcpy.Project_management(mbgBuffer, mbgBufferPrj, surfaceSR)
     delete_me.append(mbgBufferPrj)
     # clip surface to projected buffer
     arcpy.Clip_management(input_surface, "#", surf_extract, mbgBufferPrj)
 delete_me.append(surf_extract)
Exemplo n.º 19
0
def workLines(lineNo):
    #Temporary files
    fileSeg = outWorkspace + "\\FLM_CO_Segment_" + str(lineNo) + ".shp"
    fileOrigin = outWorkspace + "\\FLM_CO_Origin_" + str(lineNo) + ".shp"
    fileDestination = outWorkspace + "\\FLM_CO_Destination_" + str(
        lineNo) + ".shp"
    fileBuffer = outWorkspace + "\\FLM_CO_Buffer_" + str(lineNo) + ".shp"
    fileClip = outWorkspace + "\\FLM_CO_Clip_" + str(lineNo) + ".tif"
    fileCostDa = outWorkspace + "\\FLM_CO_CostDa_" + str(lineNo) + ".tif"
    fileCostDb = outWorkspace + "\\FLM_CO_CostDb_" + str(lineNo) + ".tif"
    fileCorridor = outWorkspace + "\\FLM_CO_Corridor_" + str(lineNo) + ".tif"
    fileCorridorMin = outWorkspace + "\\FLM_CO_CorridorMin_" + str(
        lineNo) + ".tif"

    # Load segment list
    segment_list = []
    rows = arcpy.SearchCursor(fileSeg)
    shapeField = arcpy.Describe(fileSeg).ShapeFieldName
    for row in rows:
        feat = row.getValue(shapeField)  #creates a geometry object
        segmentnum = 0
        for segment in feat:  #loops through every segment in a line
            #loops through every vertex of every segment
            for pnt in feat.getPart(
                    segmentnum
            ):  #get.PArt returns an array of points for a particular part in the geometry
                if pnt:  #adds all the vertices to segment_list, which creates an array
                    segment_list.append(arcpy.Point(float(pnt.X),
                                                    float(pnt.Y)))

            segmentnum += 1
    del rows

    # Find origin and destination coordinates
    x1 = segment_list[0].X
    y1 = segment_list[0].Y
    x2 = segment_list[-1].X
    y2 = segment_list[-1].Y

    # Create origin feature class
    arcpy.CreateFeatureclass_management(outWorkspace, PathFile(fileOrigin),
                                        "POINT", Centerline_Feature_Class,
                                        "DISABLED", "DISABLED",
                                        Centerline_Feature_Class)
    cursor = arcpy.da.InsertCursor(fileOrigin, ["SHAPE@XY"])
    xy = (float(x1), float(y1))
    cursor.insertRow([xy])
    del cursor

    # Create destination feature class
    arcpy.CreateFeatureclass_management(outWorkspace,
                                        PathFile(fileDestination), "POINT",
                                        Centerline_Feature_Class, "DISABLED",
                                        "DISABLED", Centerline_Feature_Class)
    cursor = arcpy.da.InsertCursor(fileDestination, ["SHAPE@XY"])
    xy = (float(x2), float(y2))
    cursor.insertRow([xy])
    del cursor

    # Buffer around line
    arcpy.Buffer_analysis(fileSeg, fileBuffer,
                          Maximum_distance_from_centerline, "FULL", "ROUND",
                          "NONE", "", "PLANAR")

    # Clip cost raster using buffer
    DescBuffer = arcpy.Describe(fileBuffer)
    SearchBox = str(DescBuffer.extent.XMin) + " " + str(
        DescBuffer.extent.YMin) + " " + str(
            DescBuffer.extent.XMax) + " " + str(DescBuffer.extent.YMax)
    arcpy.Clip_management(Cost_Raster, SearchBox, fileClip, fileBuffer, "",
                          "ClippingGeometry", "NO_MAINTAIN_EXTENT")

    # Process: Cost Distance
    arcpy.gp.CostDistance_sa(fileOrigin, fileClip, fileCostDa, "", "", "", "",
                             "", "", "TO_SOURCE")
    arcpy.gp.CostDistance_sa(fileDestination, fileClip, fileCostDb, "", "", "",
                             "", "", "", "TO_SOURCE")

    # Process: Corridor
    arcpy.gp.Corridor_sa(fileCostDa, fileCostDb, fileCorridor)

    # Calculate minimum value of corridor raster
    RasterCorridor = arcpy.Raster(fileCorridor)
    CorrMin = float(RasterCorridor.minimum)

    # Set minimum as zero and save minimum file
    RasterCorridor = (RasterCorridor - CorrMin)
    RasterCorridor.save(fileCorridorMin)
    del RasterCorridor

    #Clean temporary files
    arcpy.Delete_management(fileSeg)
    arcpy.Delete_management(fileBuffer)
    arcpy.Delete_management(fileOrigin)
    arcpy.Delete_management(fileDestination)
    arcpy.Delete_management(fileClip)
    arcpy.Delete_management(fileCostDa)
    arcpy.Delete_management(fileCostDb)
    arcpy.Delete_management(fileCorridor)
Exemplo n.º 20
0
##        tif_files.append(filename)
##        count = count + 1

#print(las_files)
ascList = glob.glob(filepath + "/*.tif")  
print ascList  
       
##print('Number of .tif files: {}'.format(count))

# Open .txt file to write extent data for each .tif file
#with open(extFile, 'w') as f:
for ascFile in ascList:
    print ascFile
    print arcgisscripting.Raster(ascFile).extent
    myExtent = arcgisscripting.Raster(ascFile).extent
    newXMin = myExtent.XMin + 300
    newYMin = myExtent.YMin + 300
    newXMax = myExtent.XMax - 300
    newYMax = myExtent.YMax - 300
    newExtent = newXMin + " " + newYMin + " " + newXMax + " " + newYMax
    print newExtent
    in_raster = ascFile
    out_feature = "clip_" + ascFile
    arcpy.Clip_management(ascFile, newExtent, out_feature)
        ##extent1 = str(arcgisscripting.Raster(ascFile).extent)
        ##line = ascFile + " " + extent1 + '\n'
        ##f.write(line)

print('File written to\n {}'.format(extFile))
print('Closed: {}'.format(f.closed))
Exemplo n.º 21
0
def mosaic(dnight, sets, filter):
    '''
    This module creates the mosaic of median filtered images for each data set.
    '''
    #set arcpy environment variables part 2/2
    arcpy.CheckOutExtension("Spatial")
    arcpy.env.workspace = filepath.rasters + 'scratch_median/'
    arcpy.env.scratchWorkspace = filepath.rasters + 'scratch_median'

    #filter paths
    F = {'V': '', 'B': 'B/'}
    f = {'V': '', 'B': 'b'}

    for s in sets:
        #file paths
        calsetp = filepath.calibdata + dnight + '/S_0%s/%s' % (s[0], F[filter])
        gridsetp = filepath.griddata + dnight + '/S_0%s/%smedian/' % (
            s[0], F[filter])
        if os.path.exists(gridsetp):
            shutil.rmtree(gridsetp)
        os.makedirs(gridsetp)

        #read in the registered images coordinates
        file = filepath.calibdata + dnight + '/pointerr_%s.txt' % s[0]
        Obs_AZ, Obs_ALT = n.loadtxt(file, usecols=(3, 4)).T
        Obs_AZ[n.where(Obs_AZ > 180)] -= 360
        Obs_AZ[35] %= 360

        #read in the best-fit zeropoint and plate scale
        file = filepath.calibdata + dnight + '/extinction_fit_%s.txt' % filter
        zeropoint, platescale, exptime = n.loadtxt(file,
                                                   usecols=(2, 8, 9),
                                                   unpack=True,
                                                   ndmin=2)

        #loop through each file in the set
        for w in range(len(Obs_AZ) + 1):

            v = w + 1
            if w == 45:
                w = 35
                Obs_AZ[w] -= 360

            if v in range(0, 50, 5): print 'Generating median image %i/45' % v

            arcpy.CopyRaster_management(
                calsetp + '/tiff/median_ib%03d.tif' % (w + 1),
                'ib%03d.tif' % v, "DEFAULTS", "", "", "", "",
                "16_BIT_UNSIGNED")

            #re-define projection to topocentric coordinates
            arcpy.DefineProjection_management("ib%03d.tif" % v,
                                              tc(Obs_AZ[w], Obs_ALT[w]))

            #warp image to remove barrel distortion image
            arcpy.Warp_management('ib%03d.tif' % v, source_pnt, target_pnt,
                                  'ibw%03d.tif' % v, "POLYORDER3", "BILINEAR")

            #reproject into GCS
            arcpy.ProjectRaster_management('ibw%03d.tif' % v,
                                           'wib%03d.tif' % v, geogcs,
                                           "BILINEAR", "0.0266")

            #clip to image boundary
            rectangle = clip_envelope(Obs_AZ, Obs_ALT, w)
            arcpy.Clip_management("wib%03d.tif" % v, rectangle, "cib%03d" % v)

        #mosaic raster list must start with an image with max pixel value > 256
        v = 1
        mstart = 1
        while v < (len(Obs_AZ) + 1):
            im = imread(filepath.rasters + 'scratch_median/ib%03d.tif' % v)
            if n.max(im) > 255:
                mstart = v
                break
            v += 1

        #mosaic raster list
        R1 = ';'.join(['cib%03d' % i for i in range(mstart, 47)])
        R2 = ';'.join(['cib%03d' % i for i in range(1, mstart)])
        R = R1 + ';' + R2

        #mosaic to topocentric coordinate image; save in Griddata\
        print "Mosaicking into all sky median image"
        arcpy.MosaicToNewRaster_management(R, gridsetp, 'skytopom', geogcs,
                                           "32_BIT_FLOAT", "0.0266", "1",
                                           "BLEND", "FIRST")

        #re-sampling to 0.05 degree resolution
        gridname = gridsetp + "skybrightmags"
        arcpy.Resample_management(gridsetp + 'skytopom',
                                  gridsetp + 'skybright', '0.05', 'BILINEAR')

        #convert to magnitudes per square arc second
        print "Converting the mosaic to mag per squard arcsec"
        psa = 2.5 * n.log10(
            (platescale[int(s[0]) - 1] * 60)**2)  # platescale adjustment
        skytopomags = zeropoint[int(s[0]) - 1] + psa - 2.5 * arcpy.sa.Log10(
            arcpy.sa.Raster(gridsetp + 'skybright') / exptime[0])

        #save mags mosaic to disk
        skytopomags.save(gridsetp + 'skybrightmags')

        print "Creating layer files for median mosaic"
        layerfile = filepath.griddata + dnight + '/skybrightmags%s%s.lyr' % (
            f[filter], s[0])
        arcpy.MakeRasterLayer_management(
            gridsetp + 'skybrightmags',
            dnight + '_%s_median%s' % (s[0], f[filter]))
        arcpy.SaveToLayerFile_management(
            dnight + '_%s_median%s' % (s[0], f[filter]), layerfile, "ABSOLUTE")

        #Set layer symbology to magnitudes layer
        symbologyLayer = filepath.rasters + 'magnitudes.lyr'
        arcpy.ApplySymbologyFromLayer_management(layerfile, symbologyLayer)
        lyrFile = arcpy.mapping.Layer(layerfile)
        lyrFile.replaceDataSource(gridsetp, 'RASTER_WORKSPACE',
                                  'skybrightmags', 'FALSE')
        lyrFile.save()

        #Downscale the raster and save it as a fits file
        file = filepath.griddata + dnight + '/S_0%s/%smedian/skybrightmags' % (
            s[0], F[filter])
        arcpy_raster = arcpy.sa.Raster(file)
        A = arcpy.RasterToNumPyArray(arcpy_raster, "#", "#", "#", -9999)
        A_small = downscale_local_mean(A[:1800, :7200], (25, 25))  #72x288
        fname = filepath.griddata + dnight + '/skybrightmags%s%s.fits' % (
            f[filter], s[0])
        fits.writeto(fname, A_small, overwrite=True)

    #create mask.tif for horizon masking in the later process
    mask = filepath.griddata + dnight + '/mask.tif'
    if not os.path.isfile(mask):
        arcpy.CopyRaster_management(gridsetp + 'skybright', mask, "DEFAULTS",
                                    "0", "0", "", "", "16_BIT_UNSIGNED")
Exemplo n.º 22
0
suffix = 'tif'    #栅格数据后缀
bvalue= -9999	#栅格数据背景值
clpgeo = 'ClippingGeometry'

shp_file = r"G:\黄土高原胖边界\LPfat.shp"  #裁剪模板shp数据
ras_file_cut = r"D:\tmp"  #栅格数据裁剪结果存储位置
txtname=r"D:\CRU4.02\GeoTiff_LP\cru4.02_rh_yr_LP.txt"     #输出统计文本路径及名称
#计算部分
import arcpy
import os
if not os.path.exists(ras_file_cut):
    os.mkdir(ras_file_cut)

arcpy.env.workspace=ras_file
ras=arcpy.ListRasters('*',suffix)
print "共有"+'%d'%len(ras)+"个栅格数据"
#
print "Processing......"
result=[]
for rs in ras:
    outname=ras_file_cut+"\\"+str(rs[0:len(rs)-4])+".tif"    #####
    #arcpy.Clip_management(rs,"#",outname,shp_file,str(bvalue),"ClippingGeometry")   #ClippingGeometry   NONE
    arcpy.Clip_management(rs,"#",outname,shp_file,str(bvalue),clpgeo)
    stats = arcpy.GetRasterProperties_management(outname,"MEAN")
    result.append(str(stats)+"\n")
    #arcpy.Delete_management(outname,"")
    print str(rs)+"   OK!"

file(txtname,'w').writelines(result)
print "Finish!"
Exemplo n.º 23
0
#Script by Andrew Paladino
#Second script using Arcpy YEET

import arcpy

def print_message(msg):
    print(msg)
    arcpy.AddMessage(msg)

arcpy.env.overwriteOutput = True
#Specifyworkspace
arcpy.env.workspace = r"C:\Users\AndrewPaladino\Documents\ArcGIS\Projects\Holstein\Holstein.gdb"

print_message("Starting clip...")

#input raster dataset
InRaster = r"W:\Operations\Holstein\ArcGIS\Orthomosaics\12_18_19_20\19_20\Georect\19_20_RGB_WebM_Georect.tif"

#out raster dataset with \<name.tif>" at the end of the specified path
OutRaster = r"W:\Operations\Holstein\ArcGIS\Orthomosaics\12_18_19_20\19_20\Clipped\19_20_RGB_WebM_Georect_Clip.tif"

#Shapefile to base the clip extent on
ExtentPoly = r"W:\Operations\Holstein\ArcGIS\AGO\6_22_2020\19_20.shp"

clipraster = arcpy.Clip_management(InRaster, None, OutRaster, ExtentPoly, None, "ClippingGeometry", None)

print_message("Clipped Raster saved to {0}".format(OutRaster))
print_message("Script_Completed")

                                   
Exemplo n.º 24
0
envelope_america = "-130 22 -60 55"

for file in files:
    if not os.path.isdir(file):
        if re_tif.search(file):
            rasters.append(file)

if not rasters:
    exit

for raster in rasters:
    in_raster = files_path + '\\' + raster

    europe_out_raster = raster_path + '\\' + raster[:-4] + '_europe.tif'
    print("Clipping " + europe_out_raster)
    arcpy.Clip_management(in_raster, envelope_europe, europe_out_raster)

    india_out_raster = raster_path + '\\' + raster[:-4] + '_india.tif'
    print("Clipping " + india_out_raster)
    arcpy.Clip_management(in_raster, envelope_india, india_out_raster)

    east_china_out_raster = raster_path + '\\' + raster[:-4] + '_east_china.tif'
    print("Clipping " + east_china_out_raster)
    arcpy.Clip_management(in_raster, envelope_east_china,
                          east_china_out_raster)

    northen_east_asia_out_raster = raster_path + '\\' + raster[:-4] + '_northen_east_asia.tif'
    print("Clipping " + northen_east_asia_out_raster)
    arcpy.Clip_management(in_raster, envelope_northen_east_asia,
                          northen_east_asia_out_raster)
    print("*Deleting old " + date_output + " folder first...")
    arcpy.Delete_management(out_folder + date_output)
arcpy.CreateFolder_management(out_folder, date_output)
# Run through raster and each shapefile ...
print("Creating clip rasters for " + raster_file + "...")
rasterStartTime = time.time()
# Create a new folder for each band
# Loop and clip the input raster band with each polygon mask file defined earlier
out_folder_path = out_folder + date_output + "\\"
shpFiles = [f for f in listdir(mask_scratch + "\\GEOG490_maskscratch\\") if isfile(join(mask_scratch + "\\GEOG490_maskscratch\\", f)) and f[-4:].startswith('.shp')]
for shpFile in shpFiles:
	if (matchcount > 99): # change naming scheme if there is greater than 99 points, up to 9999
		arcpy.Clip_management(\
			raster_location + raster_file,\
			"",\
			out_folder_path + raster_file[:-4] + "_" + shpFile[-8:-4] + ".tif",\
			mask_scratch + "\\GEOG490_maskscratch\\" + shpFile,\
			"",\
			"ClippingGeometry",\
			"MAINTAIN_EXTENT") # can remove MAINTAIN_EXTENT parameter, if you want true raster location
	else:
		arcpy.Clip_management(\
			raster_location + raster_file,\
			"",\
			out_folder_path + raster_file[:-4] + "_" + shpFile[-6:-4] + ".tif",\
			mask_scratch + "\\GEOG490_maskscratch\\" + shpFile,\
			"",\
			"ClippingGeometry",\
			"MAINTAIN_EXTENT") # can remove MAINTAIN_EXTENT parameter, if you want true raster location
if (time.time() - rasterStartTime == 0):
	print("Error 2 found. The external script did not run successfully. (Maybe the parameters are wrong?)")
	sys.exit("Error 2 found. The external script did not run successfully. (Maybe the parameters are wrong?)")
FAR = os.path.join(INPUT_DIR, '20150820_1m baseecosystems_FAR.tif')
REGION_GROUP = os.path.join(INPUT_DIR,
                            '20151108_1m_base_ecosystem_region_group.tif')

borough_boundaries = os.path.join(INPUT_DIR, 'nybbwi.shp')

cursor = arcpy.SearchCursor(borough_boundaries)

for feature in cursor:
    print feature.BoroName

    base_ecosystems = os.path.join(
        INPUT_DIR, feature.BoroName,
        '%s_base_ecosystem_1m.tif' % feature.BoroCode)
    arcpy.Clip_management(in_raster=BASE_ECOSYSTEMS,
                          out_raster=base_ecosystems,
                          in_template_dataset=feature.Shape,
                          clipping_geometry='ClippingGeometry')

    far = os.path.join(INPUT_DIR, feature.BoroName,
                       '%s_far_1m.tif' % feature.BoroCode)
    arcpy.Clip_management(in_raster=FAR,
                          out_raster=far,
                          in_template_dataset=feature.Shape,
                          clipping_geometry='ClippingGeometry')

    region_group = os.path.join(INPUT_DIR, feature.BoroName,
                                '%s_region_group_1m.tif' % feature.BoroCode)
    arcpy.Clip_management(in_raster=REGION_GROUP,
                          out_raster=region_group,
                          in_template_dataset=feature.Shape,
                          clipping_geometry='ClippingGeometry')
def createTopoVars(sitePoly, largeDEM, bufferPoly, path, site, aggFac):

    import arcpy
    from arcpy import env
    #environment corresponds to the specific site
    env.workspace = "%s\\ChangeModeling\\%s" % (path, site)
    #allow overwriting
    arcpy.env.overwriteOutput = True
    #don't care much about this specific location, mostly here to help avoid arcGIS errors.
    env.scratchWorkspace = "%s\\ChangeModeling\\Scratch.gdb" % path

    # clip to approximate extent of poly for calculations of topo vars
    # uses the 2*scale of analysis buffer (e.g. 2*10 m or 2*40 m; buffer already created outside function)
    mediumDEM_UTM = arcpy.Clip_management(largeDEM, "#", "mediumDEM.tif",
                                          bufferPoly, "0", "ClippingGeometry")

    # calculate slope (maximum slope)
    mediumSLOPE = arcpy.Slope_3d(mediumDEM_UTM, "med_slope.tif", "DEGREE")
    print "Slope calculated"

    # calculate curvature (provides maximum curvature, plan curvature, and profile curvature)
    mediumcurvature = arcpy.Curvature_3d(mediumDEM_UTM, "med_curvature.tif",
                                         "", "med_curvature_prof.tif",
                                         "med_curvature_plan.tif")
    print "Curvature calculated"

    # calculate aspect, northness, and eastness
    mediumAspect = arcpy.Aspect_3d(mediumDEM_UTM, "med_aspect.tif")
    mediumNorthn = arcpy.sa.Cos(arcpy.Raster(mediumAspect) * 3.14159 / 180)
    mediumEastn = arcpy.sa.Sin(arcpy.Raster(mediumAspect) * 3.14159 / 180)
    mediumNorthn.save("med_Northn.tif")
    mediumEastn.save("med_Eastn.tif")
    print "aspect (Northness/Eastness) calculated"

    # calculate Heat Load Index (Stoddard and Hayes 2005 uses it)
    mediumHeatLoadIndex = 1 - (arcpy.sa.Cos(
        (arcpy.Raster(mediumAspect) - 45) * 3.14159 / 180)) / 2
    mediumHeatLoadIndex.save("med_HeatLoadIndex.tif")
    print "Heat Load Index calculated"

    # calculating solar radiation (insolation)
    mediumInsol = arcpy.sa.AreaSolarRadiation(
        mediumDEM_UTM, time_configuration=arcpy.sa.TimeWholeYear(
            "2009"))  #,out_global_radiation_raster="med_Insol.tif")
    mediumInsol.save("med_Insol.tif")
    print "Insolation calculated"

    # calculate Topographic Moisture/Wetness Index
    # how I learned to do this:
    # http://www.faculty.umb.edu/david.tenenbaum/eeos383/
    # http://www.faculty.umb.edu/david.tenenbaum/eeos383/eeos383-exercise03.pdf
    # http://www.faculty.umb.edu/david.tenenbaum/eeos383/eeos383-exercise05.pdf
    DEMfilled = arcpy.sa.Fill(mediumDEM_UTM)
    flowDir = arcpy.sa.FlowDirection(DEMfilled)
    flowAcc = arcpy.sa.FlowAccumulation(flowDir)
    contrArea = flowAcc * 10 * aggFac  # multiply by cell area and divide by cell length
    medTMI = arcpy.sa.Ln(
        (contrArea + 1) /
        (arcpy.sa.Tan(arcpy.Raster(mediumSLOPE) * 3.14159 / 180)))
    medTMI.save("med_TMI.tif")
    print "TMI calculated"

    # calculating a distance to the nearest ridgeline; something like 'slope position'
    DistToRidge = arcpy.sa.FlowLength(flowDir, "UPSTREAM", "")
    DistToRidge.save("med_DistToRidge.tif")
    print "Distance to Ridgeline calculated"
                    print("IDW", PointsLayer, '...')
                    try:
                        outIDW = Idw(PointsLayer, zField)
                    except Exception as (e):
                        print(e)

                    print('Clipping to ', MaskLayer, ' and saving',
                          outputIDWGrid, '...')
                    rectangle = "{} {} {} {}".format(outIDW.extent.XMin,
                                                     outIDW.extent.YMin,
                                                     outIDW.extent.XMax,
                                                     outIDW.extent.YMax)
                    try:
                        arcpy.Clip_management(outIDW, rectangle, outputIDWGrid,
                                              MaskLayer, 0, 'ClippingGeometry',
                                              'MAINTAIN_EXTENT')
                    except Exception as (e):
                        print(e)

                except Exception as (e):
                    print(e)
                del rectangle
                arcpy.management.Delete(outIDW)
                arcpy.management.Delete(PointsLayer)
                arcpy.management.Delete(MaskLayer)

            else:
                pass
            print("")
Exemplo n.º 29
0
           country_names.append(filename)

print(vector_names, raster_names, country_names)


# Set workspace
arcpy.env.workspace = args["output"][0]
   
for i in range(len(fc_vectors)):
    for j in range(len(fc_countries)):
        print(vector_names[i],country_names[j])
        outfc = "{}_{}_Clipped".format(country_names[j],vector_names[i])
        arcpy.Clip_analysis(fc_vectors[i], fc_countries[j], outfc)
        print(arcpy.GetMessages())


for i in range(len(fc_rasters)):
    for j in range(len(fc_countries)):
        print(raster_names[i],country_names[j])
        outfc = "{}_{}_Clipped".format(country_names[j],raster_names[i])
        arcpy.Clip_management(in_raster=fc_rasters[i], rectangle="",
                            clipping_geometry="ClippingGeometry",
                            in_template_dataset=fc_countries[j],
                            out_raster=outfc)
        print(arcpy.GetMessages())


        


Exemplo n.º 30
0
def workLinesMem(segment_info):
    """
    New version of worklines. It uses memory workspace instead of shapefiles.
    The refactoring is to accelerate the processing speed.
    """

    # input verification
    if segment_info is None or len(segment_info) <= 1:
        print("Input segment is corrupted, ignore")

    # Temporary files
    outWorkspace = flmc.GetWorkspace(workspaceName)

    # read params from text file
    f = open(outWorkspace + "\\params.txt")
    Forest_Line_Feature_Class = f.readline().strip()
    Cost_Raster = f.readline().strip()
    Line_Processing_Radius = float(f.readline().strip())
    f.close()

    lineNo = segment_info[1]  # second element is the line No.
    outWorkspaceMem = r"memory"
    arcpy.env.workspace = r"memory"

    fileSeg = os.path.join(outWorkspaceMem, "FLM_CL_Segment_" + str(lineNo))
    fileOrigin = os.path.join(outWorkspaceMem, "FLM_CL_Origin_" + str(lineNo))
    fileDestination = os.path.join(outWorkspaceMem,
                                   "FLM_CL_Destination_" + str(lineNo))
    fileBuffer = os.path.join(outWorkspaceMem, "FLM_CL_Buffer_" + str(lineNo))
    fileClip = os.path.join(outWorkspaceMem,
                            "FLM_CL_Clip_" + str(lineNo) + ".tif")
    fileCostDist = os.path.join(outWorkspaceMem,
                                "FLM_CL_CostDist_" + str(lineNo) + ".tif")
    fileCostBack = os.path.join(outWorkspaceMem,
                                "FLM_CL_CostBack_" + str(lineNo) + ".tif")
    fileCenterline = os.path.join(outWorkspaceMem,
                                  "FLM_CL_Centerline_" + str(lineNo))

    # Load segment list
    segment_list = []

    for line in segment_info[0]:
        for point in line:  # loops through every point in a line
            # loops through every vertex of every segment
            if point:
                # adds all the vertices to segment_list, which creates an array
                segment_list.append(point)

    # Find origin and destination coordinates
    x1 = segment_list[0].X
    y1 = segment_list[0].Y
    x2 = segment_list[-1].X
    y2 = segment_list[-1].Y

    # Create segment feature class
    try:
        arcpy.CreateFeatureclass_management(outWorkspaceMem,
                                            os.path.basename(fileSeg),
                                            "POLYLINE",
                                            Forest_Line_Feature_Class,
                                            "DISABLED", "DISABLED",
                                            Forest_Line_Feature_Class)
        cursor = arcpy.da.InsertCursor(fileSeg, ["SHAPE@"])
        cursor.insertRow([segment_info[0]])
        del cursor
    except Exception as e:
        print("Create feature class {} failed.".format(fileSeg))
        print(e)
        return

    # Create origin feature class
    # TODO: not in use, delete later
    try:
        arcpy.CreateFeatureclass_management(outWorkspaceMem,
                                            os.path.basename(fileOrigin),
                                            "POINT", Forest_Line_Feature_Class,
                                            "DISABLED", "DISABLED",
                                            Forest_Line_Feature_Class)
        cursor = arcpy.da.InsertCursor(fileOrigin, ["SHAPE@XY"])
        xy = (float(x1), float(y1))
        cursor.insertRow([xy])
        del cursor
    except Exception as e:
        print("Creating origin feature class failed: at X, Y" + str(xy) + ".")
        print(e)
        return

    # Create destination feature class
    # TODO: not in use, delete later
    try:
        arcpy.CreateFeatureclass_management(outWorkspaceMem,
                                            os.path.basename(fileDestination),
                                            "POINT", Forest_Line_Feature_Class,
                                            "DISABLED", "DISABLED",
                                            Forest_Line_Feature_Class)
        cursor = arcpy.da.InsertCursor(fileDestination, ["SHAPE@XY"])
        xy = (float(x2), float(y2))
        cursor.insertRow([xy])
        del cursor
    except Exception as e:
        print("Creating destination feature class failed: at X, Y" + str(xy) +
              ".")
        print(e)
        return

    try:
        # Buffer around line
        arcpy.Buffer_analysis(fileSeg, fileBuffer, Line_Processing_Radius,
                              "FULL", "ROUND", "NONE", "", "PLANAR")

        # Clip cost raster using buffer
        DescBuffer = arcpy.Describe(fileBuffer)
        SearchBox = str(DescBuffer.extent.XMin) + " " + str(DescBuffer.extent.YMin) + " " + \
                    str(DescBuffer.extent.XMax) + " " + str(DescBuffer.extent.YMax)
        arcpy.Clip_management(Cost_Raster, SearchBox, fileClip, fileBuffer, "",
                              "ClippingGeometry", "NO_MAINTAIN_EXTENT")

        # Least cost path
        # arcpy.gp.CostDistance_sa(fileOrigin, fileClip, fileCostDist, "", fileCostBack, "", "", "", "", "TO_SOURCE")
        fileCostDist = CostDistance(arcpy.PointGeometry(arcpy.Point(x1, y1)),
                                    fileClip, "", fileCostBack)
        # print("Cost distance file path: {}".format(fileCostDist))

        # arcpy.gp.CostPathAsPolyline_sa(fileDestination, fileCostDist,
        #                               fileCostBack, fileCenterline, "BEST_SINGLE", "")
        CostPathAsPolyline(arcpy.PointGeometry(arcpy.Point(x2,
                                                           y2)), fileCostDist,
                           fileCostBack, fileCenterline, "BEST_SINGLE", "")

        # get centerline polyline out of feature class file
        centerline = []
        with arcpy.da.SearchCursor(fileCenterline, ["SHAPE@"]) as cursor:
            for row in cursor:
                centerline.append(row[0])

    except Exception as e:
        print("Problem with line starting at X " + str(x1) + ", Y " + str(y1) +
              "; and ending at X " + str(x2) + ", Y " + str(y2) + ".")
        print(e)
        centerline = []
        return centerline

    # Clean temporary files
    arcpy.Delete_management(fileSeg)
    arcpy.Delete_management(fileOrigin)
    arcpy.Delete_management(fileDestination)
    arcpy.Delete_management(fileBuffer)
    arcpy.Delete_management(fileClip)
    arcpy.Delete_management(fileCostDist)
    arcpy.Delete_management(fileCostBack)

    # Return centerline
    print("Processing line {} done".format(fileSeg))
    return centerline, segment_info[2]