Example #1
0
    def calculate_suitability(self):
        """
        calculate set of suitability points to constrain the potential locations of new_ponds.
        new_ponds can only be placed on cells that meet the following conditions:
            1) outside the bounds existing beaver territory
            2) on mapped streams with gradients <= 8 degrees
            3) above the highest tidal influence
        :return:
        """
        # TODO what other conditions need to be met, make sure the correct stream types are used
        # TODO recalculate suitable streams with new landcover

        if type(self.SUITABLE_STREAMS) == str:
            self.SUITABLE_STREAMS = arcpy.Raster(self.SUITABLE_STREAMS)

        # calculate current territories
        exclude_territory = self.calculate_territory()

        # intersect un-colonized parts of the landscape with suitable streams
        suitability_surface = exclude_territory * self.SUITABLE_STREAMS

        suitability_surface_set_null = arcpy.sa.SetNull(
            suitability_surface == 0, suitability_surface)

        # convert suitable cells to points for random selection and watershed pour point
        arcpy.RasterToPoint_conversion(
            in_raster=suitability_surface_set_null,
            out_point_features=self.suitability_points)
 def get_patch_wses(self):
     """Gets list of WSE values in each patch at each discharge"""
     self.logger.info("Getting patch WSE values...")
     for q in self.discharges:
         # convert each WSE ras to points
         self.logger.info("Discharge: %s " % q)
         wse_pts = os.path.join(self.cache, 'wse_pts.shp')
         arcpy.RasterToPoint_conversion(self.q_wse_dict[q], wse_pts)
         # extract patch num. to each point
         wse_patch_pts = os.path.join(self.cache, 'wse_patch_pts.shp')
         ExtractValuesToPoints(wse_pts, self.patch_ras, wse_patch_pts)
         # export attribute table to csv
         wse_patch_table = 'patch_table%i.csv' % int(q)
         # 'RASTERVALU' = patch number, 'grid_code' = WSE
         array = arcpy.da.FeatureClassToNumPyArray(wse_patch_pts, ['RASTERVALU', 'grid_code'], skip_nulls=True,
                                                   where_clause='RASTERVALU <> -9999')
         df = pd.DataFrame(array.tolist(), columns=['patch', 'WSE'])
         df = df.groupby('patch')['WSE'].apply(list)
         for patch in df.index:
             if patch in self.patch_wses.keys():
                 self.patch_wses[patch].extend(df[patch])
                 self.patch_qs[patch].extend([q] * len(df[patch]))
             else:
                 self.patch_wses[patch] = df[patch]
                 self.patch_qs[patch] = [q] * len(df[patch])
Example #3
0
        def run(IFile, Ofile, Snumx, Snumy, Stype):
            from arcpy import env
            env.overwriteOutput = "True"
            print "--------------------------------------------------------------------"
            print "Program GetCentroid Starts: ", time.asctime(
                time.localtime(time.time()))
            print "--------------------------------------------------------------------"
            try:
                import arcpy
                from arcpy import env
                arcpy.env.workspace = Ofile
                cell = Snumx + " " + Snumy
                # Resample TIFF image
                arcpy.Resample_management(IFile, "resamplenew.tif", cell,
                                          Stype)

                inRaster = "resamplenew.tif"
                outPoint = Ofile + "/outpoint.shp"
                field = "VALUE"
                arcpy.RasterToPoint_conversion(inRaster, outPoint, field)

                print "Input corret, output file has been generated..!"
                print "--------------------------------------------------------------------"
                print "Program GetCentroid Ends: ", time.asctime(
                    time.localtime(time.time()))
                print "--------------------------------------------------------------------"

            except:
                print "Resample example failed."
                print arcpy.GetMessages()
Example #4
0
def ExtractVals(nc, aoi, out):
    i = 0
    for j in aoi:
        #Mask out raster with Marine Reserve limits and save it
        mask = arcpy.sa.ExtractByMask(nc, j)
        mask.save("SST_" + out[i] + ".TIF")
        #Create point features from masked out raster
        Pts = arcpy.RasterToPoint_conversion(mask, "SST_Pts" + out[i] + ".shp",
                                             "Value")
        #Extract values from multiple layers to point features
        SSTVal = arcpy.gp.ExtractMultiValuesToPoints_sa(
            Pts, "SST_" + out[i] + ".tif SST_" + out[i], "NONE")
        #Save attribute table of point features in excel format
        arcpy.TableToExcel_conversion(SSTVal, "SSTValues" + out[i] + ".xls",
                                      "NAME", "CODE")
        #Read excel table
        SSTPtVal = pd.ExcelFile("SSTValues" + out[i] + ".xls")
        #Load first excel sheet
        SST_sheet = SSTPtVal.parse(SSTPtVal.sheet_names[0])
        #Create dictionary to change column names to months
        NewColNames = dict(zip(SST_sheet.columns[3:14], TimeLabs))
        #Change column names using dictionary
        SST_sheet.rename(columns=NewColNames, inplace=True)
        #Write file with corrected column names into a csv file for further analysis
        SST_sheet.to_csv("SST_Values" + out[i] + ".csv")
        i += 1
def execute_FromPoints(flowacc, area_threshold, str_frompoints, messages, language = "FR"):



    # Calcul du seuil pour les points de départ sur le raster "flow accumulation"
    flowacc_threshold = area_threshold * 1000 * 1000 / (flowacc.meanCellWidth * flowacc.meanCellHeight)
    # Création du raster des cours d'eau
    binrivers = arcpy.sa.SetNull(flowacc, 1, "VALUE < " + str(flowacc_threshold))

    # Calcul du nombre de pixels voisines étant des cours d'eau
    nbvoisins = arcpy.sa.FocalStatistics(binrivers, arcpy.sa.NbrRectangle(3,3,"CELL"), "SUM", "")

    # On ne garde que les extrémités (1 voisin, donc nbvoisin = 2)
    extremities = arcpy.sa.SetNull(nbvoisins, binrivers, "VALUE <> 2")

    # On supprime les points situés sur le bord (exutoires de la zone modélisée)
    noborders = arcpy.sa.FocalStatistics(flowacc, arcpy.sa.NbrRectangle(3, 3, "CELL"), "SUM", "NODATA")
    borders = arcpy.sa.IsNull(noborders)
    frompoints = arcpy.sa.SetNull(borders, extremities, "VALUE = 1")

    # On convertit le raster en shapefile
    arcpy.RasterToPoint_conversion(frompoints, str_frompoints)



    return
Example #6
0
def finddist(clatoshad_14, dir, path):
    path2 = path
    # Local variables:
    points_shp = "{}\\points.shp".format(path2)
    dist1 = "{}\\dist1".format(path2)
    # Process: Raster to Point
    arcpy.RasterToPoint_conversion(clatoshad_14, points_shp, "VALUE")
    # Process: Add XY Coordinates
    arcpy.AddXY_management(points_shp)
    # Process: Add Field
    arcpy.AddField_management(points_shp, "distfield", "FLOAT", "", "", "", "",
                              "NULLABLE", "NON_REQUIRED", "")
    xlist = []
    ylist = []
    finames = ['POINT_X', 'POINT_Y', 'distfield']
    rows = arcpy.da.UpdateCursor(points_shp, finames)
    for row in rows:
        xlist.append(row[0])
        ylist.append(row[1])
    rows.reset()
    for row in rows:
        if dir == 'e':
            changex = row[0] - min(xlist)
            changey = row[1] - min(ylist)
            row[2] = math.sqrt(changex * changex + changey * changey)
        if dir == 'w':
            changex = row[0] - max(xlist)
            changey = row[1] - min(ylist)
            row[2] = math.sqrt(changex * changex + changey * changey)
        rows.updateRow(row)
    del row
    del rows
    arcpy.PointToRaster_conversion(points_shp, "distfield", dist1,
                                   "MOST_FREQUENT", "NONE", clatoshad_14)
    return dist1
Example #7
0
def createComplainRasterFeature(SelectSQL,InputComplainFeatures,POIFeatures,FinalResultFeature):
    logging.info("Process: 创建"+FinalResultFeature)
    if(arcpy.Exists(FinalResultFeature)):
        arcpy.Delete_management(FinalResultFeature, "FeatureClass")
    rmNo = random.randint(100000000,999999999)
    print rmNo
    # Process: 筛选
    print "Process: 筛选"
    logging.info("Process: 筛选")
    FeatureSelect=arcpy.Select_analysis(InputComplainFeatures, "in_memory/FeatureSelect"+repr(rmNo), SelectSQL)
    # Process: 点转栅格
    print FeatureSelect
    rowSear =  arcpy.SearchCursor(FeatureSelect)
    row = rowSear.next()
    if(row):
        print "Process: 点转栅格"
        logging.info("Process: 点转栅格")
        tempEnvironment0 = arcpy.env.extent
        arcpy.env.extent = "115 23 122 29"
        ResultRaster=arcpy.PointToRaster_conversion(FeatureSelect, "OBJECTID", "in_memory/ResultRaster"+repr(rmNo), "COUNT", "NONE", ".0018")
        arcpy.env.extent = tempEnvironment0
        # Process: 栅格转点 
        print "Process: 栅格转点"
        logging.info("Process: 栅格转点")
        COMPLAIN_RASTER_POINTS=arcpy.RasterToPoint_conversion(ResultRaster, "in_memory/COMPLAIN_RASTER_POINTS"+repr(rmNo), "VALUE")
        print "Process: 空间连接"
        # Process: 空间连接
        COMPLAIN_POI_UNION=arcpy.SpatialJoin_analysis(COMPLAIN_RASTER_POINTS, POI, "in_memory/COMPLAIN_POI_UNION"+repr(rmNo), "JOIN_ONE_TO_ONE", "KEEP_ALL", "","CLOSEST", ".1 DecimalDegrees", "DISTANCE")
        print "Process: 点转栅格 (2)"
        logging.info("Process: 点转栅格 (2)")
        # Process: 点转栅格 (2)
        tempEnvironment0 = arcpy.env.extent
        arcpy.env.extent = "115 23 122 29"
        ResultRaster2=arcpy.PointToRaster_conversion(COMPLAIN_POI_UNION, "OBJECTID", "in_memory/ResultRaster2"+repr(rmNo), "MOST_FREQUENT", "NONE", ".0018")
        arcpy.env.extent = tempEnvironment0
        print "Process: 栅格转面"
        logging.info("Process: 栅格转面")
        # Process: 栅格转面
        ResultFeature=arcpy.RasterToPolygon_conversion(ResultRaster2, "in_memory/ResultFeature"+repr(rmNo), "NO_SIMPLIFY", "VALUE")
        print "Process: 空间连接 (2)"
        logging.info("Process: 空间连接 (2)")
        # Process: 空间连接 (2)
        ResultFeatureZj=arcpy.SpatialJoin_analysis(ResultFeature, COMPLAIN_POI_UNION, "in_memory/ResultFeatureZj"+repr(rmNo), "JOIN_ONE_TO_ONE", "KEEP_ALL", "", "INTERSECT", "", "")
        # Process: 空间连接 (3)
        arcpy.SpatialJoin_analysis(FeatureSelect, ResultFeatureZj, FinalResultFeature, "JOIN_ONE_TO_ONE", "KEEP_ALL", "", "INTERSECT", "", "")
        #arcpy.SpatialJoin_analysis(FeatureSelect, ResultFeatureZj, FinalResultFeature, "JOIN_ONE_TO_ONE", "KEEP_ALL", "TIME \"TIME\" true true false 8 Date 0 0 ,First,#,D:\\HasmbyGis\\Cache.gdb\\GIS_OBJECT_COMPLAIN_Select1,TIME,-1,-1;WORK_ORDER_ID \"WORK_ORDER_ID\" true true false 100 Text 0 0 ,First,#,D:\\HasmbyGis\\Cache.gdb\\GIS_OBJECT_COMPLAIN_Select1,WORK_ORDER_ID,-1,-1;DISTANCE \"DISTANCE\" true true false 8 Double 0 0 ,First,#,D:\\HasmbyGis\\Cache.gdb\\Complain20140509_SpatialJoin,DISTANCE,-1,-1;POINTID \"POINTID\" true true false 4 Long 0 0 ,First,#,D:\\HasmbyGis\\Cache.gdb\\Complain20140509_SpatialJoin,POINTID,-1,-1;GRID_CODE \"聚合数\" true true false 4 Long 0 0 ,First,#,D:\\HasmbyGis\\Cache.gdb\\Complain20140509_SpatialJoin,GRID_CODE,-1,-1;Name \"聚合地址\" true true false 160 Text 0 0 ,First,#,D:\\HasmbyGis\\Cache.gdb\\Complain20140509_SpatialJoin,Name,-1,-1;Ctype \"聚合地址类型(原始)\" true true false 64 Text 0 0 ,First,#,D:\\HasmbyGis\\Cache.gdb\\Complain20140509_SpatialJoin,Ctype,-1,-1;CnType \"聚合地址类型\" true true false 50 Text 0 0 ,First,#,D:\\HasmbyGis\\Cache.gdb\\Complain20140509_SpatialJoin,CnType,-1,-1;CITY \"地市\" true true false 32 Text 0 0 ,First,#,D:\\HasmbyGis\\Cache.gdb\\Complain20140509_SpatialJoin,CITY,-1,-1;COUNTY \"区县\" true true false 32 Text 0 0 ,First,#,D:\\HasmbyGis\\Cache.gdb\\Complain20140509_SpatialJoin,COUNTY,-1,-1;GRID \"GRID\" true true false 32 Text 0 0 ,First,#,D:\\HasmbyGis\\Cache.gdb\\Complain20140509_SpatialJoin,GRID,-1,-1;SGLON \"栅格POI经度\" true true false 8 Double 0 0 ,First,#,D:\\HasmbyGis\\Cache.gdb\\Complain20140509_SpatialJoin,SGLON,-1,-1;SGLAT \"栅格POI纬度\" true true false 8 Double 0 0 ,First,#,D:\\HasmbyGis\\Cache.gdb\\Complain20140509_SpatialJoin,SGLAT,-1,-1;CQ_REGION \"城区网格所属区域\" true true false 60 Text 0 0 ,First,#,D:\\HasmbyGis\\Cache.gdb\\Complain20140509_SpatialJoin,CQ_REGION,-1,-1;CQ_REGION_TYPE \"城区网格区域属性\" true true false 60 Text 0 0 ,First,#,D:\\HasmbyGis\\Cache.gdb\\Complain20140509_SpatialJoin,CQ_REGION_TYPE,-1,-1;TEST_ID \"测试网格ID\" true true false 10 Text 0 0 ,First,#,D:\\HasmbyGis\\Cache.gdb\\Complain20140509_SpatialJoin,TEST_ID,-1,-1;TEST_GRIDID \"测试网格编号\" true true false 20 Text 0 0 ,First,#,D:\\HasmbyGis\\Cache.gdb\\Complain20140509_SpatialJoin,TEST_GRIDID,-1,-1;TEST_CLASS \"测试网格类型\" true true false 10 Text 0 0 ,First,#,D:\\HasmbyGis\\Cache.gdb\\Complain20140509_SpatialJoin,TEST_CLASS,-1,-1", "INTERSECT", "", "")

        
        arcpy.Delete_management(COMPLAIN_POI_UNION)
        arcpy.Delete_management(COMPLAIN_RASTER_POINTS)
        arcpy.Delete_management(ResultRaster)
        arcpy.Delete_management(ResultRaster2)
        arcpy.Delete_management(ResultFeature)
        arcpy.Delete_management(ResultFeatureZj)
        del COMPLAIN_POI_UNION,COMPLAIN_RASTER_POINTS,ResultRaster,ResultRaster2,ResultFeature,ResultFeatureZj
    arcpy.Delete_management(FeatureSelect)
    del FeatureSelect,rowSear
    logging.info("清理内存~~")
    gc.collect()
    def save_mu(self, *args):
        # args[0] can be an optional output directory
        try:
            self.out_dir = args[0]
        except:
            pass
        self.logger.info("")
        self.logger.info(" * SAVING ... ")
        arcpy.CheckOutExtension('Spatial')  # check out license
        arcpy.gp.overwriteOutput = True
        arcpy.env.workspace = self.cache
        arcpy.env.extent = "MAXOF"
        arcpy.CheckInExtension('Spatial')
        try:
            self.logger.info(" * Converting MU IDs to strings:")

            self.logger.info("   >> Converting raster to points ...")
            pts = arcpy.RasterToPoint_conversion(self.ras_mu, self.cache + "pts_del.shp")

            self.logger.info("   >> Converting numbers to strings ...")
            arcpy.AddField_management(pts, "MU", "TEXT")
            expression = "inverse_dict = " + fGl.dict2str(self.mu_dict, inverse_dict=True)
            arcpy.CalculateField_management(pts, "MU", "inverse_dict[!grid_code!]", "PYTHON", expression)

            self.logger.info("   >> OK")
            self.logger.info(" * Saving MU string raster as:")
            self.logger.info(str(self.out_dir) + "\\mu_str.tif")
            arcpy.PointToRaster_conversion(in_features=pts, value_field="MU",
                                           out_rasterdataset=self.out_dir + "\\mu_str.tif",
                                           cell_assignment="MOST_FREQUENT", cellsize=5)
            self.logger.info(" * OK")
        except arcpy.ExecuteError:
            self.logger.info("ExecuteERROR: (arcpy).")
            self.logger.info(arcpy.GetMessages(2))
        except Exception as e:
            self.logger.info("ExceptionERROR: (arcpy).")
            self.logger.info(e.args[0])
        except:
            self.logger.info("ERROR: Field assignment failed.")
            return True

        try:
            self.logger.info(" * Saving mu numeric raster as:")
            self.logger.info(str(self.out_dir) + "\\mu.tif")
            self.ras_mu.save(self.out_dir + "\\mu.tif")
            self.logger.info(" * OK")
        except arcpy.ExecuteError:
            self.logger.info(arcpy.AddError(arcpy.GetMessages(2)))
        except Exception as e:
            self.logger.info(arcpy.GetMessages(2))
        except:
            self.logger.info("ERROR: Saving failed.")
            return True

        try:
            self.clean_up()
        except:
            pass
        return False
Example #9
0
def calcVS(unsnfltpts, bldg_veg_mask, ct, datapath):
    """
    Calculates the cumulative viewshed from remaining unseen flight points. Generates Euclidean distance
    to mean center table.
    :param unsnfltpts: List of remaining unseen flight points
    :param bldg_veg_mask: Binary mask to remove invalid observer surfaces from cumulative VS raster
    :param ct: Pass number
    :param datapath:  Path to input/output directory
    """

    print('Calculating cumulative viewshed for {} unseen flight points...'.
          format(len(unsnfltpts)))
    # Make chunks of flight points
    usfp_chunks = makechunks(unsnfltpts, 500)
    print('Flight point chunks generated')
    print(len(chunk) for chunk in usfp_chunks)
    chunk_sums = []
    chunkpass = 1
    # Sum each chunk of single viewshed rasters
    for chunk in usfp_chunks:
        print('Chunksum operation {} on {} flight points...'.format(
            chunkpass, len(chunk)))
        # Set null values equal to 0 to avoid NoData holes
        chunkgen = (Con(IsNull(arcpy.Raster(datapath + "vs_" + str(usfp))), 0,
                        1) for usfp in chunk)
        chunkstats = arcpy.sa.CellStatistics(chunkgen, 'SUM', 'NODATA')
        chunk_sums.append((chunkstats))
        print('...Done.')
        chunkpass += 1
    # Sum chunks
    sumrast = arcpy.sa.CellStatistics(chunk_sums, 'SUM', 'NODATA')
    sumrast.save(datapath + "vs_pass_" + str(ct) + "_unmasked")
    print('Unmasked cumulative viewshed saved.')

    # mask out buildings and vegetation
    # set Bldg_Veg_Mask cells to 0
    unmasked = arcpy.Raster(datapath + "vs_pass_" + str(ct) + "_unmasked")
    cumulative_masked = unmasked * bldg_veg_mask
    print('Invalid observer surfaces masked.')
    # set 0 value cells to Null
    cumulative_masked = SetNull(cumulative_masked == 0, cumulative_masked)
    print('Setting null values.')
    # save to .GDB as cumulative raster
    cumulative_masked.save(datapath + "vs_pass_" + str(ct))
    print('Masked cumulative viewshed saved.')

    # Convert raster to points with number views for VS pass and X Y location
    vs_total_pts_ = datapath + "vs_pass_" + str(ct) + "_pts"
    arcpy.RasterToPoint_conversion(cumulative_masked, vs_total_pts_)
    arcpy.AddGeometryAttributes_management(vs_total_pts_, ['POINT_X_Y_Z_M'])
    print('Viewshed points for pass {} generated'.format(ct))
    # Find mean center of cumulative viewshed for pass, save as feature class
    vs_center_ = datapath + "vs_pass_" + str(ct) + "_cntr"
    arcpy.MeanCenter_stats(vs_total_pts_, vs_center_)
    print('Mean center calculated.')
    # Calculate distance of each observation from centroid of observer masspoints
    vs_dist_ = datapath + "vs_pass_" + str(ct) + "_dist"
    arcpy.PointDistance_analysis(vs_total_pts_, vs_center_, vs_dist_)
    print('Observer distances table calculated.')
Example #10
0
    def cutout_area(self, bbox, epsg, cutout_shape=None):
        """
        return the centroids of the zensus cells as points inside the
        selected communities
        """
        zensus_points = []
        zensus_raster = self.folders.ZENSUS_RASTER_FILE
        # temporary paths
        out_raster = os.path.join(self.tmp_folder, 'zensus_cutout')
        raster_points = os.path.join(self.tmp_folder, 'raster_points')
        raster_points_projected = os.path.join(self.tmp_folder,
                                               'raster_points_projected')
        raster_points_clipped = os.path.join(self.tmp_folder,
                                             'raster_points_clipped')

        def del_tmp():
            for fn in [
                    raster_points, raster_points_projected, out_raster,
                    raster_points_clipped
            ]:
                arcpy.Delete_management(fn)

        del_tmp()
        out_raster = os.path.join(self.tmp_folder, 'zensus_cutout')
        # clip minimum to rectangle shape that still contains all communities
        srid = clip_raster(zensus_raster, out_raster, bbox)
        # get raster points
        arcpy.RasterToPoint_conversion(out_raster, raster_points)
        start = time.time()
        with arcpy.da.UpdateCursor(raster_points, "GRID_CODE") as cursor:
            for row in cursor:
                if row[0] <= 0:
                    cursor.deleteRow()
        print(time.time() - start)
        # project raster points to gauss krueger
        out_cs = arcpy.SpatialReference(epsg)
        arcpy.Project_management(raster_points, raster_points_projected,
                                 out_cs)

        if cutout_shape:
            # clip raster points to selected communities
            arcpy.Clip_analysis(raster_points_projected, cutout_shape,
                                raster_points_clipped)
        else:
            raster_points_clipped = raster_points
        # create list with zensus points for return
        rows = arcpy.da.SearchCursor(raster_points_clipped,
                                     ['SHAPE@XY', 'GRID_CODE'])
        zensus_points = []
        index = 0
        for ((x, y), value) in rows:
            if value <= 0:
                continue
            p = ZensusCell(x, y, id=index, epsg=srid, ew=value)
            zensus_points.append(p)
            index += 1
        # delete temporary files
        del_tmp()
        return zensus_points, len(zensus_points)
Example #11
0
def raster_to_point():
    inRaster = r'{}/Buurten/{}/bk_ahn.tif'.format(path,buurt)

    outPoint = r'{}/RasterT_tif12'.format(outPoint_path)
    field = "VALUE"

    arcpy.env.overwriteOutput = True
    arcpy.RasterToPoint_conversion(inRaster, outPoint, field)
Example #12
0
def convert_raster_to_point(input_file, output_file):
    """Convert raster to point vector
    
    Args:
        input_file: input raster file
        output_file: output point vector file
    """
    print "Converting raster to points..."
    arcpy.RasterToPoint_conversion(input_file, output_file)
Example #13
0
def conversion(outRaster, raster):
    name = os.path.split(raster)[1][:-4]
    # 按掩膜提取
    outExtractByMask = None
    # 常熟市溯源范围矢量文件
    inMaskData = u"E:\\常熟溯源\\矢量\\常熟边界\\常熟市溯源范围.shp"
    # 中间文件
    outCliptif = outpath + '\\' + fileslist[2] + "\\Clip_" + name + '.tif'
    # 剪裁
    try:
        # Execute ExtractByMask
        arcpy.Clip_management(outRaster, "#", outCliptif, inMaskData, "#",
                              "ClippingGeometry", "NO_MAINTAIN_EXTENT")
    except Exception as err:
        arcpy.AddMessage("------Clip_management")
        arcpy.AddMessage(err)
    # 重采样
    # resampletif=outpath+"\\"+os.path.split(raster)[1][:-4]+'_resampletif_'+getTime()+'.tif'
    # arcpy.Resample_management(outExtractByMask, resampletif, "0.0005", "BILINEAR")
    inPointFeatures = outpath + '\\' + fileslist[
        3] + "\\RasterToPoint_" + name + '.shp'
    arcpy.AddMessage("------栅格转点开始。。。")
    try:
        arcpy.RasterToPoint_conversion(outCliptif, inPointFeatures, 'VALUE')
    except:
        arcpy.AddMessage('-------' + inPointFeatures + " 已经存在。。。")
    arcpy.AddMessage("------栅格转点完成")
    # 克里金插值
    Krigingfile = os.path.join(outpath + '\\' + fileslist[4],
                               'Kriging_' + name + ".tif")
    field = "GRID_CODE"
    cellSize = 0.00055
    outVarRaster = ""
    kModel = "Spherical"
    kRadius = 20000
    # Execute Kriging
    try:
        arcpy.Kriging_3d(inPointFeatures, field, Krigingfile, kModel, cellSize,
                         kRadius, outVarRaster)
    except:
        arcpy.AddMessage('------RasterToPoint_conversion Failed')
    # 常熟市范围掩膜
    changshumask = u'E:\\常熟溯源\\矢量\\常熟边界\\常熟边界缓冲区-去水域.shp'
    outtif = os.path.join(
        outpath + '\\' + fileslist[5],
        'Changshumask_' + os.path.split(raster)[1][:-4] + ".tif")
    # 按掩膜提取
    try:
        # Execute ExtractByMask
        outMask = arcpy.sa.ExtractByMask(Krigingfile, changshumask)
        outCon = arcpy.sa.Con(arcpy.sa.IsNull(outMask), 0.0001, outMask)
        outCon.save(outtif)
    except Exception as err:
        arcpy.AddMessage("------ExtractByMask Failed")
        arcpy.AddMessage(err)
    createTraceFiles(outtif, name)
Example #14
0
def conversion(outRaster,raster):
    # 按掩膜提取
    outExtractByMask=None
    # 常熟市溯源范围矢量文件
    inMaskData="F:\\20190905\\data\\changshurangle\\changshutrace.shp"
    # 中间文件
    inPointFeatures=outpath+"\\"+os.path.split(raster)[1][:-4]+'_'+getTime()+'.shp'
    try:
        # Execute ExtractByMask
        outExtractByMask = arcpy.sa.ExtractByMask(outRaster, inMaskData)
    except Exception as err:
        arcpy.AddMessage("------ExtractByMask Failed") 
        arcpy.AddMessage(err)
        return
    # 重采样
    # resampletif=outpath+"\\"+os.path.split(raster)[1][:-4]+'_resampletif_'+getTime()+'.tif'
    # arcpy.Resample_management(outExtractByMask, resampletif, "0.0005", "BILINEAR")
    arcpy.AddMessage("------栅格转点开始。。。")
    try:
        arcpy.RasterToPoint_conversion(outExtractByMask, inPointFeatures, 'VALUE')
    except :
        arcpy.AddMessage('-------'+inPointFeatures+" 已经存在。。。")
    arcpy.AddMessage("------栅格转点完成")
    # 克里金插值
    titfile=os.path.join(outpath,os.path.split(raster)[1][:-4]+".tif")
    field = "GRID_CODE"
    cellSize =0.0005
    outVarRaster = ""
    kModel = "Spherical"
    kRadius = 20000
    # Execute Kriging
    try:
        arcpy.Kriging_3d(inPointFeatures, field, titfile, kModel, cellSize, kRadius, outVarRaster)
    except :
        Arcpy.AddMessage('------RasterToPoint_conversion Failed')
    arcpy.AddMessage("------栅格转点开始。。。")
    outPointFeatures=outpath+"\\"+os.path.split(raster)[1][:-4]+'.shp'
    try:
        arcpy.RasterToPoint_conversion(titfile, outPointFeatures, 'VALUE')
    except :
        arcpy.AddMessage('-------'+inPointFeatures+" 已经存在。。。")
    arcpy.AddMessage("------栅格转点完成")
    selectByLocatin(outPointFeatures)
Example #15
0
def rst_to_pnt(inRst, pntShp, rstField=None):
    """Raster to Points Feature Class"""

    rstField = 'Value' if not rstField else rstField

    arcpy.RasterToPoint_conversion(in_raster=inRst,
                                   out_point_features=pntShp,
                                   raster_field=rstField)

    return pntShp
Example #16
0
    def spatial_join_analysis(self, raster, curve_data):
        # uses curve radius data and to mark all points within this radius of the input raster

        self.logger.info("   -> Converting raster to points ...")
        try:
            cov_points = self.cache + "cov_points.shp"
            arcpy.RasterToPoint_conversion(raster, cov_points)
            zero_raster = Con((IsNull(raster) == 1), (IsNull(raster) * 1), 1)
            all_points = self.cache + "all_points.shp"
            arcpy.RasterToPoint_conversion(zero_raster, all_points)
        except:
            self.error = True
            self.logger.info("ERROR: Could not perform spatial radius operations (RasterToPoint_conversion).")
        self.logger.info("   -> Delineating " + self.cover_type + " effect radius (spatial join radius: " + str(curve_data[0][0]) + ") ...")
        try:
            out_points = self.cache + "spatjoin.shp"
            rad = float(curve_data[0][0])
            arcpy.SpatialJoin_analysis(target_features=all_points, join_features=cov_points,
                                       out_feature_class=out_points, join_operation="JOIN_ONE_TO_MANY",
                                       join_type="KEEP_COMMON", field_mapping="", match_option="CLOSEST",
                                       search_radius=rad, distance_field_name="")
        except:
            self.error = True
            self.logger.info("ERROR: Could not perform spatial radius operations (SpatialJoin_analysis).")
        self.logger.info("   -> Converting points back to raster ...")
        try:
            arcpy.PointToRaster_conversion(in_features=out_points, value_field="grid_code",
                                           out_rasterdataset=self.cache + "cov_points",
                                           cell_assignment="MEAN", cellsize=self.cell_size)
            __temp_ras__ = arcpy.Raster(self.cache + "cov_points.tif")
            self.logger.info("   -> Assigning spatial HSI value (" + str(curve_data[1][0]) + ") where applies (raster calculator) ...")
            __ras__ = Con(__temp_ras__ > 0, curve_data[1][0])  # assign HSI value
        except:
            self.error = True
            self.logger.info("ERROR: Could not perform spatial radius operations (back conversion).")
        if not self.error:
            return Float(CellStatistics([__ras__], "SUM", "DATA"))
        else:
            return -1
Example #17
0
def listfile(file):
    pathDir = os.listdir(file)
    for contents in pathDir:
        # contents=100206
        try:
            hourtime = contents[-2:]
            datetime = contents[-4:-2]
            filename = "PM25_" + hourtime + '.tif'
            filepath = os.path.join(file, contents)
            if os.path.isdir(filepath):
                tifpath = filepath + '\\PM25\\tif\\' + filename
                print(tifpath)
                pm25shpdir = "F:\\changshu\\siteetis\\pm251002\\pm25_1002" + hourtime
                try:
                    os.makedirs(pm25shpdir)
                except:
                    print('')
                timeformat = '2019' + "-10-02" + " " + hourtime + ":" + "00" + ":" + "00"
                timeArray = time.strptime(timeformat, "%Y-%m-%d %H:%M:%S")
                timestamp = time.mktime(timeArray)
                '''try:
                    # 执行sql语句
                    sql='INSERT INTO newchangshu.ams_wms (wmsname, url, data_time, datatype, city,create_time,create_by,workspace) VALUES ({},{},{},{},{},{},{},{})'.format('"pm25_1008'+hourtime+'"','"http://119.3.37.164:8090/geoserver"',timestamp,u'"pm25"',u'"常熟"'.decode('ISO-8859-1').encode('utf-8'),int(time.time()),'"admin"','"0"')
                    #sql='insert into ams_sixindex values({},{},{},{},{},{})'.format(inspection_num,car_id,238,create_time,update_time)
                    cursor.execute(sql)
                    # 提交到数据库执行
                    db.commit()
                    pass
                except:
                    # 如果发生错误则回滚
                    traceback.print_exc()
                    db.rollback()
                continue'''
                inPointFeatures = os.path.join(
                    pm25shpdir, "pm25_1002" + hourtime + hourtime + '.shp')
                # 栅格转点
                arcpy.RasterToPoint_conversion(tifpath, inPointFeatures,
                                               "VALUE")
                pm25zippath = 'F:\\zip\\changshu\\PM2502'
                try:
                    os.makedirs(pm25zippath)
                except:
                    print('')
                make_zip(
                    pm25shpdir, pm25zippath + "\\" + "PM25_1002" + hourtime +
                    hourtime + ".zip")
                pass
        except:
            pass
Example #18
0
def createTraceFiles(in_layer, name):
    # 选中常熟市企业分区1中的AOT点并导出
    arcpy.AddMessage("-------制作溯源文件。。。")
    # 企业分区文件位置
    zoneLocation = u'E:\\常熟溯源\\矢量\\常熟市溯源范围'
    # 创建文件夹
    # AOT文件
    aot_Content = 'F:\\changshutraceresult\\aot'
    try:
        os.makedirs(aot_Content)
    except:
        arcpy.AddMessage('-------' + aot_Content + '已经存在')
    path2Dir = os.listdir(zoneLocation)
    for zonefile in path2Dir:
        if zonefile[-4:].lower() == '.shp':
            zone_Layer = os.path.join(zoneLocation, zonefile)
            arcpy.AddMessage('-------' + name + ' 分区' + zonefile[-6:-4] +
                             ' -------------------')
            try:
                # 剪裁
                # Execute ExtractByMask
                outClipRaster = os.path.join(
                    u'F:\\常熟溯源\\AOT\\裁剪',
                    'AOT裁剪分区' + zonefile[-6:-4] + '_' + name + '.tif')
                arcpy.Clip_management(in_layer, "#", outClipRaster, zone_Layer,
                                      "#", "ClippingGeometry",
                                      "NO_MAINTAIN_EXTENT")
                aotFeatures = os.path.join(
                    u'F:\\常熟溯源\\AOT\\shp',
                    'aot' + zonefile[-6:-4] + '_' + name + '.shp')
                # 栅格转点
                arcpy.RasterToPoint_conversion(outClipRaster, aotFeatures,
                                               'VALUE')
                arcpy.AddMessage(
                    '-------' + name + ' 分区' + zonefile[-6:-4] +
                    ' RasterToPoint_conversion Success-------------------')
                # 将AOT转为栅格,分辨率设为0.0005,参数设置如下
                out_rasterdataset = os.path.join(
                    aot_Content,
                    'AOT分区' + zonefile[-6:-4] + '_' + name + '.tif')
                arcpy.PointToRaster_conversion(aotFeatures, 'GRID_CODE',
                                               out_rasterdataset, 'MAXIMUM',
                                               'NONE', '0.00055')
            except:
                arcpy.AddMessage('-------' + name + ' 分区' + zonefile[-6:-4] +
                                 'Clip_management 错误')
                traceback.print_exc()
    arcpy.AddMessage("------制作溯源文件完成")
Example #19
0
def buildPointGrids(inFeature, tempRaster, cell_size, outShapefile):
    # Convert from polygon to raster and raster to point to create point grid
    arcpy.PolygonToRaster_conversion(inFeature, "OBJECTID", tempRaster, "CELL_CENTER", "NONE", cell_size)
    # Determine if raster contains only NoData values
    noData = int(arcpy.GetRasterProperties_management(tempRaster, 'ALLNODATA')[0])
    if noData == 0:
        # Convert raster to point grid
        arcpy.RasterToPoint_conversion(tempRaster, outShapefile, "VALUE")
        # Add XY Coordinates to feature class in the NAD_1983_Alaska_Albers projection
        arcpy.AddXY_management(outShapefile)
        # Delete intermediate files
        arcpy.Delete_management(tempRaster)
    elif noData == 1:
        arcpy.AddMessage("All values for this watershed are nodata...")
        # Delete intermediate files
        arcpy.Delete_management(tempRaster)
Example #20
0
def raster_to_point(ZSfile):

    print "Converting zonal statistics raster to points..."

    ZSpoints = "zs_points.shp"
    if arcpy.Exists(ZSpoints):
        arcpy.Delete_management(ZSpoints)

    arcpy.RasterToPoint_conversion(ZSfile, ZSpoints, "Value")

    print "Finished raster to points!\n"

    bar['value'] = 60
    bar.update()

    spatial_join(ZSpoints)
def extract_selection(streamlines, section_counter, slope):
    ##        arcpy.AddMessage("Buffering...")
    streambuffers = os.path.join(env.workspace,
                                 naming + "_buffers" + str(section_counter))
    arcpy.Buffer_analysis(streamlines, streambuffers, "25 Meters")

    ##        arcpy.AddMessage("Masking...")
    masked = ExtractByMask(slope, streambuffers)
    masked.save(
        os.path.join(env.workspace,
                     naming + "_maskedraster" + str(section_counter)))

    ##        arcpy.AddMessage("Converting to points...")
    slope_points = os.path.join(env.workspace,
                                naming + "_slopepoints" + str(section_counter))
    arcpy.RasterToPoint_conversion(masked, slope_points, "Value")

    return slope_points
Example #22
0
def TwoRas2OnePnt(dirRas,
                  magRas,
                  outFeature,
                  optArgs,
                  arrName1="DIR",
                  arrName2="MAG",
                  csizeMultiplier=1):
    #  uses either gdal or arcpy
    #  Create and save an ESRI point shapefile at pixel center coordinates
    #  with values from array of the same dimensions
    #
    espg = getModel_SR(optArgs['model'])
    # Using GDAL
    if optArgs['noArc']:
        dirRas = dirRas + '.tif'
        magRas = magRas + '.tif'
        outFeature = outFeature + '.shp'
        array1 = rasFile2array(dirRas)
        array2 = rasFile2array(magRas)
        Two_array2shp(array1, array2, outFeature, dirRas, "DIR", "MAG",
                      csizeMultiplier, espg)
    # Using ArcPy
    else:
        if 'clp' in dirRas:
            fgdb = optArgs['clpgdb']
        else:
            fgdb = optArgs['geodb']
        arcpy.env.workspace = fgdb
        arcpy.RasterToPoint_conversion(in_raster=dirRas,
                                       out_point_features=outFeature,
                                       raster_field=arrName1)
        rasFldMap = magRas + ' ' + arrName2
        arcpy.gp.ExtractMultiValuesToPoints_sa(outFeature, rasFldMap, "NONE")
        if csizeMultiplier != 1:
            express = "!Magnitude! * "+ str(csizeMultiplier)\
                                      + " * "+str(csizeMultiplier)
            arcpy.CalculateField_management(in_table=outFeature,
                                            field=arrName2,
                                            expression=express,
                                            expression_type="PYTHON_9.3",
                                            code_block="#")
        arcpy.env.workspace = optArgs['geodb']
Example #23
0
def eachFile(shppath, filepath):
    pathDir = os.listdir(filepath)
    path_file_number = glob.glob(pathname=filepath + '\\*.tif')
    path_file_number = bytes(len(path_file_number))
    print "alter " + path_file_number + " files"
    count = 0
    for inRaster in pathDir:
        outExtractByMask = ExtractByMask(outRaster, inMaskData)
        outPoint = shppath + os.path.splitext(inRaster)[0] + ".shp"
        field = "VALUE"
        count += 1
        if (os.path.exists(outPoint)):
            print outPoint + " alderly exists!"
            continue
        print "exporting " + inRaster
        arcpy.RasterToPoint_conversion(inRaster, outPoint, field)
        print inRaster + " alderly exported"
        file_number = bytes(count)
        print "alderly exported " + file_number + " file"
    print "success!"
Example #24
0
def RasterToSurface(dem, out_fc, field):
    if not arcpy.Exists(dem):
        arcpy.AddIDMessage("ERROR", 110, dem)
        raise SystemExit()

    desc = arcpy.Describe(dem)
    if desc.spatialReference.name == "Unknown":
        arcpy.AddIDMessage("ERROR", 1024)
        raise SystemExit()

    InitParams(desc.spatialReference)
    rowCount, colCount = desc.height, desc.width

    arcpy.env.outputCoordinateSystem = desc.SpatialReference.GCS
    result = arcpy.RasterToPoint_conversion(dem, "DEM2", "Value")
    demArray = arcpy.da.FeatureClassToNumPyArray(result, ("SHAPE@X", "SHAPE@Y", "grid_code")).reshape(
        (rowCount, colCount))
    arcpy.Delete_management(result)

    dtype = np.dtype([('X', '<f4'), ('Y', '<f4'), ('{0}'.format(field), '<f4')])
    surfaceArray = np.zeros(((rowCount - 1) * 2, (colCount - 1)), dtype)

    for row in xrange(0, rowCount - 1):
        for col in xrange(0, colCount - 1):
            pointA = demArray[row, col]
            pointB = demArray[row, col + 1]
            pointC = demArray[row + 1, col + 1]
            pointD = demArray[row + 1, col]

            xABC = (pointA[0] + pointB[0] + pointC[0]) / 3.0
            yABC = (pointA[1] + pointB[1] + pointC[1]) / 3.0
            sABC = CalcTriangleArea(pointA, pointB, pointC)
            surfaceArray[row * 2, col] = (xABC, yABC, sABC)

            xADC = (pointA[0] + pointD[0] + pointC[0]) / 3.0
            yADC = (pointA[1] + pointD[1] + pointC[1]) / 3.0
            sADC = CalcTriangleArea(pointA, pointD, pointC)  # unit: km2
            surfaceArray[row * 2 + 1, col] = (xADC, yADC, sADC)

    arcpy.da.NumPyArrayToFeatureClass(surfaceArray.reshape((rowCount - 1) * (colCount - 1) * 2, ),
                                      out_fc, ["X", "Y"], desc.spatialReference.GCS)
Example #25
0
 def create_lfp(self):
     arcpy.gp.FocalStatistics_sa(self.i.cat, self.i.l_rng,
                                 "Rectangle 1 1 CELL", "RANGE",
                                 "DATA")  # distance from boundaries
     arcpy.gp.Con_sa(self.i.l_rng, self.i.cat, self.i.l_ild, "#",
                     "Value = 0")
     arcpy.gp.IsNull_sa(self.i.l_ild, self.i.l_ild_n)
     arcpy.gp.ExtractByMask_sa(self.i.l_ild_n, self.i.cat, self.i.l_ild_n_c)
     arcpy.gp.Con_sa(self.i.l_ild_n_c, self.e.d, self.i.l_fnd, "#",
                     "Value = 0")
     arcpy.gp.Fill_sa(self.i.l_fnd, self.i.l_fill, "")
     arcpy.gp.FlowDirection_sa(self.i.l_fill, self.i.l_fdr, "NORMAL", "")
     arcpy.gp.FlowLength_sa(self.i.l_fdr, self.i.l_fln, "DOWNSTREAM", "")
     self.process_l_lmx()
     arcpy.gp.EqualTo_sa(self.i.l_lmx, self.i.l_fln, self.i.l_lmx_fln)
     arcpy.gp.Con_sa(self.i.l_lmx_fln, self.i.l_ild, self.i.l_mxp_r, "#",
                     "Value = 1")
     arcpy.RasterToPoint_conversion(in_raster=self.i.l_mxp_r,
                                    out_point_features=self.i.l_mxp_v,
                                    raster_field="Value")
     arcpy.CopyFeatures_management(in_features=self.i.l_mxp_v,
                                   out_feature_class=self.i.generate_temps(
                                       "v", "mxp"))
     arcpy.DeleteIdentical_management(in_dataset=self.i.l_mxp_v,
                                      fields="GRID_CODE",
                                      xy_tolerance="",
                                      z_tolerance="0")
     arcpy.gp.CostPath_sa(self.i.l_mxp_v, self.i.fdr, self.i.fdr,
                          self.i.l_pth, "EACH_CELL",
                          "GRID_CODE")  # main function
     print "Please wait...",
     if sleep(90) == None:
         arcpy.gp.StreamToFeature_sa(self.i.l_pth, self.i.fdr, self.i.l_lfp,
                                     "NO_SIMPLIFY")
         arcpy.SpatialJoin_analysis(target_features=self.i.l_lfp,
                                    join_features=self.i.catchment,
                                    out_feature_class=self.i.l_spa,
                                    join_operation="JOIN_ONE_TO_ONE",
                                    join_type="KEEP_ALL",
                                    match_option="HAVE_THEIR_CENTER_IN")
Example #26
0
def processUrbanExtents(extents, popRaster, tempFolder, fieldPref="c"):
    allCompactness = []
    filesToDelete = []
    #add two output fields
    tryAddField(extents, "%sCom" % fieldPref, "FLOAT")
    tryAddField(extents, "%sNCom" % fieldPref, "FLOAT")
    with arcpy.da.UpdateCursor(
            extents,
        ["OID@", "%sCom" % fieldPref,
         "%sNCom" % fieldPref]) as cursor:
        for featRow in cursor:
            tRaster = os.path.join(tempFolder, "pop_%s" % featRow[0])
            tRasterPoints = os.path.join(tempFolder,
                                         "pop_%s_pts.shp" % featRow[0])
            tLayer = "pop_%s" % featRow[0]
            filesToDelete.append(tRaster)
            filesToDelete.append(tRasterPoints)
            filesToDelete.append(tRasterPoints)

            arcpy.MakeFeatureLayer_management(extents, tLayer,
                                              '"FID" = %s' % featRow[0])
            #Use the current feature to extract the current raster
            arcpy.Clip_management(popRaster, '#', tRaster, tLayer, '0',
                                  'ClippingGeometry', 'MAINTAIN_EXTENT')
            try:
                arcpy.RasterToPoint_conversion(tRaster, tRasterPoints)
                compactness = main(tRasterPoints, "GRID_CODE", tempFolder)
                featRow[1] = compactness[0]
                featRow[2] = compactness[1]
            except:
                tPrint("Something went wrong with feature %s" % featRow[0])
                featRow[1] = 0
                featRow[2] = 0
            cursor.updateRow(featRow)
    for f in filesToDelete:
        arcpy.Delete_management(f)
    return (allCompactness)
def attributeESApoints(cntry, outFeatures, outDB, ntlFile, lcRaster, ghslValu, globalOutlines):    
    tempLC = "%s/%s" % (outDB,"ESA2009")
    tempPts = "%s/%s" % (outDB,"tPts")
    tempAdmin = "%s/%s" % (outDB, "Admin1")
    
    arcpy.Select_analysis(in_features=globalOutlines, out_feature_class=tempAdmin, where_clause=""""ISO3" = '%s'""" % cntry)
    tPrint("***Created Admin File")
    
    arcpy.Clip_management(in_raster=lcRaster, rectangle="", out_raster=tempLC, in_template_dataset=tempAdmin, nodata_value="0", clipping_geometry="ClippingGeometry")
    tPrint("***Clipped ESA Globcover")
    
    arcpy.RasterToPoint_conversion(in_raster=tempLC, out_point_features=tempPts, raster_field="Value")
    renameField(tempPts, "grid_code", "Globcover")
    tPrint("***Converted to points")
    
    ExtractValuesToPoints(tempPts, ntlFile, outFeatures, "NONE", "VALUE_ONLY")
    renameField(outFeatures, "RASTERVALU", "NTL")
    tPrint("***Extracted NTL Values")
    fOut = outFeatures.replace("Globcover", "Globcover_GHSL")
    
    arcpy.sa.ExtractValuesToPoints(outFeatures, ghslValu, fOut)
    renameField(fOut, "RASTERVALU", "GHSL")
    arcpy.Delete_management(outFeatures)
    arcpy.Rename_management(fOut, outFeatures)
Example #28
0
def main(argv=None):
    """Iterate over LM, BM, and restoration tasks."""
    if argv is None:
        argv = sys.argv  # Get parameters from ArcGIS tool dialog

    start_time = time.clock()

    # USER SETTINGS ######################################################

    # Restoration Settings
    # ALL input data must be in the same projection

    # Set to True to restore highest ROI. Set to False to restore strongest
    # barrier
    restore_max_roi = argv[1]

    # Resistance value of restored habitat.  Must be 1 or greater.
    restored_resistance_val = argv[2]

    # No spaces or special chars in paths or gdb names
    restoration_data_gdb = argv[3]

    # No spaces in path, avoid using dropbox or network drive
    # Project directories will be created in this (iter1, iter2...) as will an
    # output geodatabase
    output_dir = argv[4]

    # Resistance raster. Should be in input GDB
    resistance_ras = argv[5]
    # Core area feature class. Should be in input GDB 'URWA_HCAs_Doug_Grant'
    core_fc = argv[6]

    core_fn = argv[7]  # Core area field name

    radius = argv[8]  # Restoration radius in meters
    iterations = argv[9]  # Number of restorations to perform

    # If less than this proportion of ag in circle, don't consider restoring
    # circle
    min_ag_threshold = argv[10]

    # Don't consider barriers below this improvement score (average improvement
    # per meter diameter restored)
    min_improvement_val = argv[11]

    # Average per-m2 parcel cost per pixel. Snapped to resistance raster.
    parcel_cost_ras = argv[12]

    # Right now this is just a raster with all pixels set to 0.113174
    restoration_cost_ras = argv[13]

    ag_ras = argv[14]  # 1=Ag, 0=Not Ag

    # Some restorations benefit multiple corridors.
    # 'Maximum' takes the greatest improvement across core area pairs
    # 'Sum' adds improvement scores acreoss all pairs.
    barrier_combine_method = argv[15]

    # Use cwd_thresh = None for no threshold. Use cwd_thresh = X to not
    # consider restorations more than X map units away from each core area.
    cwd_thresh = argv[16]

    # END USER SETTINGS ######################################################

    try:
        # Setup path and create directories
        gprint('Hey! Make sure everything is in the same projection!\n')
        gprint('Setting up paths and creating directories')
        sys.path.append('..\\toolbox\\scripts')
        res_ras = os.path.join(restoration_data_gdb, resistance_ras)
        core_fc_path = os.path.join(restoration_data_gdb, core_fc)

        # Set up a NEW output gdb (leave previous ones on drive)
        i = None
        for i in range(1, 200):
            output_gdb = 'restorationOutput' + str(i) + '.gdb'
            if not arcpy.Exists(os.path.join(output_dir, output_gdb)):
                break
            gprint('Previous output GDB ' + output_gdb + ' exists.  '
                   'Delete to save disk space.')
        arcpy.CreateFileGDB_management(output_dir, output_gdb)
        output_gdb = os.path.join(output_dir, output_gdb)
        log_file = os.path.join(output_gdb,
                                'Iterate Barriers' + str(i) + '.py')

        # Write a copy of this file to output dir as a record of settings
        shutil.copyfile(__file__, log_file)

        arcpy.env.cellSize = res_ras
        arcpy.env.extent = res_ras
        arcpy.env.snapRaster = res_ras
        arcpy.env.overwriteOutput = True
        arcpy.env.scratchWorkspace = output_gdb
        arcpy.env.workspace = output_gdb

        spatialref = arcpy.Describe(res_ras).spatialReference
        mapunits = spatialref.linearUnitName
        gprint('Cell size = ' + str(arcpy.env.cellSize) + ' ' + mapunits + 's')

        # Calculate fraction of ag within radius of each pixel
        gprint('Calculating purchase cost, fraction of ag, etc within radius '
               'of each pixel.')
        ag_ras = os.path.join(restoration_data_gdb, ag_ras)
        in_neighborhood = arcpy.sa.NbrCircle(radius, "MAP")
        arcpy.env.extent = ag_ras
        out_focal_stats = arcpy.sa.FocalStatistics(ag_ras, in_neighborhood,
                                                   "MEAN", "NODATA")
        proportion_ag_ras = os.path.join(output_gdb, 'proportionAgRas')
        out_focal_stats.save(proportion_ag_ras)
        arcpy.env.extent = res_ras

        # Calculate purchase cost of circles
        parcel_cost_ras = os.path.join(restoration_data_gdb, parcel_cost_ras)
        arcpy.env.extent = parcel_cost_ras
        out_focal_stats = arcpy.sa.FocalStatistics(parcel_cost_ras,
                                                   in_neighborhood, "MEAN",
                                                   "DATA")
        cost_focal_stats_ras = os.path.join(output_gdb, 'cost_focal_stats_ras')
        out_focal_stats.save(cost_focal_stats_ras)
        arcpy.env.extent = res_ras

        circle_area = float(npy.pi * radius * radius)
        outras = arcpy.sa.Raster(cost_focal_stats_ras) * circle_area
        purch_cost_ras = os.path.join(output_gdb, 'purchaseCostRaster')
        outras.save(purch_cost_ras)
        lu.delete_data(cost_focal_stats_ras)

        restoration_cost_ras = os.path.join(restoration_data_gdb,
                                            restoration_cost_ras)
        outras = (
            arcpy.sa.Raster(purch_cost_ras) +
            (arcpy.sa.Raster(restoration_cost_ras) * radius * radius * npy.pi))
        total_cost_ras = os.path.join(output_gdb, 'totalCostRaster')
        outras.save(total_cost_ras)

        # Create mask to remove areas without cost data
        arcpy.env.extent = total_cost_ras
        cost_mask_ras = os.path.join(output_gdb, 'costMaskRaster')
        cost_thresh = 0
        out_con = arcpy.sa.Con(
            (arcpy.sa.Raster(total_cost_ras) > float(cost_thresh)), 1)
        out_con.save(cost_mask_ras)
        arcpy.env.extent = res_ras

        # Create mask to remove areas below ag threshold
        out_con = arcpy.sa.Con(
            (arcpy.sa.Raster(proportion_ag_ras) > float(min_ag_threshold)), 1)
        ag_mask_ras = os.path.join(output_gdb, 'agMaskRaster')
        out_con.save(ag_mask_ras)

        do_step_1 = 'true'
        do_step_2 = 'true'
        do_step_5 = 'false'
        all_restored_areas_ras = ''

        for cur_iter in range(1, iterations + 1):
            start_time1 = time.clock()

            # Some env settings get changed by linkage mapper and must be
            # reset here
            arcpy.env.cellSize = res_ras
            arcpy.env.extent = res_ras
            arcpy.env.snapRaster = res_ras
            arcpy.env.scratchWorkspace = output_gdb
            arcpy.env.workspace = output_gdb

            lu.dashline(1)
            gprint('Running iteration number ' + str(cur_iter))
            proj_dir = os.path.join(output_dir,
                                    'iter' + str(cur_iter) + 'Proj')
            lu.create_dir(output_dir)
            lu.delete_dir(proj_dir)
            lu.create_dir(proj_dir)
            if cur_iter > 1:  # Copy previous s2 linktable to new project dir
                datapass_dir = os.path.join(proj_dir, 'datapass')
                lu.create_dir(datapass_dir)
                proj_dir1 = os.path.join(output_dir, 'iter1Proj')
                datapass_dir_iter1 = os.path.join(proj_dir1, 'datapass')
                s2_link_tbl_iter1 = os.path.join(datapass_dir_iter1,
                                                 'linkTable_s2.csv')
                s2_link_tbl = os.path.join(datapass_dir, 'linkTable_s2.csv')
                shutil.copyfile(s2_link_tbl_iter1, s2_link_tbl)

            # Run Linkage Mapper

            # Copy distances text file from earlier LM run to the output
            # directory- speeds things up!
            dist_file = os.path.join(output_dir, core_fc + '_dists.txt')

            if not os.path.exists(dist_file):
                if cur_iter == 1:
                    gprint('Will calculate distance file.')
                    dist_file = '#'
                else:
                    proj_dir1 = os.path.join(output_dir, 'iter1Proj')
                    dist_file1 = os.path.join(proj_dir1,
                                              core_fc + '_dists.txt')
                    # Put a copy here for future runs
                    shutil.copyfile(dist_file1, dist_file)

            arcpy.env.scratchWorkspace = output_gdb
            arcpy.env.workspace = output_gdb

            argv = ('lm_master.py', proj_dir, core_fc_path, core_fn, res_ras,
                    do_step_1, do_step_2, 'Cost-Weighted & Euclidean',
                    dist_file, 'true', 'true', 'false', '4', 'Cost-Weighted',
                    'true', do_step_5, 'true', '200000', '10000', '#', '#',
                    '#', '#')
            gprint('Running ' + str(argv))
            lm_master.lm_master(argv)
            do_step_1 = 'false'  # Can skip for future iterations
            do_step_2 = 'false'  # Can skip for future iterations
            do_step_5 = 'false'  # Skipping for future iterations

            start_radius = str(radius)
            end_radius = str(radius)
            radius_step = '0'
            save_radius_ras = 'false'
            write_pct_ras = 'false'

            argv = ('barrier_master.py', proj_dir, res_ras, start_radius,
                    end_radius, radius_step, barrier_combine_method,
                    save_radius_ras, write_pct_ras, cwd_thresh)
            gprint('Running ' + str(argv))
            barrier_master.bar_master(argv)

            # Some env settings get changed by linkage mapper and must be
            # reset here
            arcpy.env.cellSize = res_ras
            arcpy.env.extent = res_ras
            arcpy.env.snapRaster = res_ras
            arcpy.env.scratchWorkspace = output_gdb
            arcpy.env.workspace = output_gdb

            gprint('Finding restoration circles with max barrier score / ROI')
            # Find points with max ROI
            prefix = os.path.basename(proj_dir)
            if barrier_combine_method == 'Sum':
                sum_suffix = 'Sum'
            else:
                sum_suffix = ''
            barrier_fn = (prefix + "_BarrierCenters" + sum_suffix + "_Rad" +
                          str(radius))
            barrier_ras = os.path.join(proj_dir, 'output', 'barriers.gdb',
                                       barrier_fn)
            if not arcpy.Exists(barrier_ras):
                msg = ('Error: cannot find barrier output: ' + barrier_ras)
                lu.raise_error(msg)

            if cur_iter > 1:
                gprint('Creating mask for previously restored areas')
                in_neighborhood = arcpy.sa.NbrCircle(radius, "MAP")
                arcpy.env.extent = all_restored_areas_ras
                out_focal_stats = arcpy.sa.FocalStatistics(
                    all_restored_areas_ras, in_neighborhood, "MEAN", "DATA")
                all_restored_focal_ras = os.path.join(
                    output_gdb, 'allRestFocRas_iter' + str(cur_iter))

                # Anything > 0 would include a restored area
                out_focal_stats.save(all_restored_focal_ras)
                arcpy.env.extent = res_ras
                rest_mask_ras = os.path.join(
                    output_gdb, 'restMaskRaster_iter' + str(cur_iter))
                minval = 0
                out_con = arcpy.sa.Con(
                    (arcpy.sa.Raster(all_restored_focal_ras) == float(minval)),
                    1)
                out_con.save(rest_mask_ras)

            # Candidate areas have not been restored, have cost data, meet
            # minimum improvement score criteria, and have enough ag in them
            candidate_barrier_ras = os.path.join(
                output_gdb, 'candidateBarrierRaster' + '_iter' + str(cur_iter))
            if cur_iter > 1:
                gprint('Creating candidate restoration raster using barrier '
                       'results, previous restorations, and selection '
                       'criteria')

                # ROI scores will be in terms of total improvement
                # (= score * diameter)
                out_calc = (arcpy.sa.Raster(cost_mask_ras) *
                            arcpy.sa.Raster(ag_mask_ras) *
                            arcpy.sa.Raster(barrier_ras) *
                            arcpy.sa.Raster(rest_mask_ras) * (radius * 2))
            else:
                out_calc = (arcpy.sa.Raster(cost_mask_ras) *
                            arcpy.sa.Raster(ag_mask_ras) *
                            arcpy.sa.Raster(barrier_ras) * radius * 2)

            min_barrier_score = min_improvement_val * radius * 2
            if restored_resistance_val != 1:
                out_calc_2 = (out_calc - (2 * radius *
                                          (restored_resistance_val - 1)))
                out_con = arcpy.sa.Con(
                    (out_calc_2 >= float(min_barrier_score)), out_calc_2)
            else:
                out_con = arcpy.sa.Con((out_calc >= float(min_barrier_score)),
                                       out_calc)
            out_con.save(candidate_barrier_ras)
            lu.build_stats(candidate_barrier_ras)

            purchase_roi_ras = os.path.join(
                output_gdb, 'purchaseRoiRaster' + '_iter' + str(cur_iter))
            out_calc = (arcpy.sa.Raster(candidate_barrier_ras) /
                        arcpy.sa.Raster(purch_cost_ras))
            out_calc.save(purchase_roi_ras)
            lu.build_stats(purchase_roi_ras)

            total_roi_ras = os.path.join(
                output_gdb, 'purchaseRestRoiRaster' + '_iter' + str(cur_iter))
            out_calc = (arcpy.sa.Raster(candidate_barrier_ras) /
                        arcpy.sa.Raster(total_cost_ras))
            out_calc.save(total_roi_ras)
            lu.build_stats(total_roi_ras)

            max_barrier = float(
                arcpy.GetRasterProperties_management(candidate_barrier_ras,
                                                     "MAXIMUM").getOutput(0))
            gprint('Maximum barrier improvement score: ' + str(max_barrier))
            if max_barrier < 0:
                arcpy.AddWarning("\nNo barriers found that meet CWD or Ag "
                                 "threshold criteria.")

            max_purch_roi = arcpy.GetRasterProperties_management(
                purchase_roi_ras, "MAXIMUM")
            gprint('Maximum purchase ROI score: ' +
                   str(max_purch_roi.getOutput(0)))

            max_roi = arcpy.GetRasterProperties_management(
                total_roi_ras, "MAXIMUM")
            gprint('Maximum total ROI score: ' + str(max_roi.getOutput(0)))

            if restore_max_roi:
                out_point = os.path.join(
                    output_gdb, 'maxRoiPoint' + '_iter' + str(cur_iter))
                gprint('Choosing circle with maximum ROI to restore')
                out_con = arcpy.sa.Con(
                    (arcpy.sa.Raster(total_roi_ras) >= float(
                        max_roi.getOutput(0))), total_roi_ras)
                max_roi_ras = os.path.join(output_gdb, 'max_roi_ras')
                out_con.save(max_roi_ras)
                # Save max ROI to point
                try:
                    arcpy.RasterToPoint_conversion(max_roi_ras, out_point)
                except Exception:
                    msg = ('Error: it looks like there are no viable '
                           'restoration candidates.')
                    lu.raise_error(msg)

            else:  # Restoring strongest barrier instead
                out_point = os.path.join(
                    output_gdb, 'maxBarrierPoint' + '_iter' + str(cur_iter))
                gprint('Choosing circle with maximum BARRIER IMPROVEMENT SCORE'
                       ' to restore')
                out_con = arcpy.sa.Con(
                    (arcpy.sa.Raster(candidate_barrier_ras) >= max_barrier),
                    candidate_barrier_ras)
                max_barrier_ras = os.path.join(output_gdb, 'maxBarrierRaster')
                out_con.save(max_barrier_ras)
                # Save max barrier to point
                try:
                    arcpy.RasterToPoint_conversion(max_barrier_ras, out_point)
                except Exception:
                    msg = ('Error: it looks like there are no viable '
                           'restoration candidates.')
                    lu.raise_error(msg)

            gprint('Done evaluating candidate restorations')
            result = int(arcpy.GetCount_management(out_point).getOutput(0))
            if result > 1:
                # Would be better to retain point with max barrier score when
                # we have multiple points with same ROI
                arcpy.AddWarning('Deleting points with identical '
                                 'ROI/improvement score values')

                arcpy.DeleteIdentical_management(out_point, "grid_code", 0.1,
                                                 0.1)

            arcpy.sa.ExtractMultiValuesToPoints(
                out_point,
                [[candidate_barrier_ras, "barrierScore"],
                 [purch_cost_ras, "purchCost"], [total_cost_ras, "totalCost"],
                 [purchase_roi_ras, "purchaseROI"],
                 [total_roi_ras, "totalROI"]], "NONE")

            arcpy.AddField_management(out_point, "restorationNumber", "SHORT")
            arcpy.CalculateField_management(out_point, "restorationNumber",
                                            cur_iter, "PYTHON_9.3")
            arcpy.AddField_management(out_point, "radius", "DOUBLE")
            arcpy.CalculateField_management(out_point, "radius", radius,
                                            "PYTHON_9.3")
            arcpy.AddField_management(out_point, "barrierScore_per_m",
                                      "DOUBLE")
            arcpy.CalculateField_management(
                out_point, "barrierScore_per_m",
                "(float(!barrierScore!) / (!radius! * 2))", "PYTHON_9.3")

            gprint('\nCreating restoration circles')
            if restore_max_roi:
                circle_fc = os.path.join(
                    output_gdb, 'maxRoiCircle' + '_iter' + str(cur_iter))
            else:
                circle_fc = os.path.join(
                    output_gdb, 'maxBarrierCircle' + '_iter' + str(cur_iter))
            arcpy.Buffer_analysis(out_point, circle_fc, radius)
            gprint('Rasterizing restoration circles')
            if restore_max_roi:
                circle_ras = os.path.join(
                    output_gdb, 'maxRoicircle_ras' + '_iter' + str(cur_iter))
            else:
                circle_ras = os.path.join(
                    output_gdb,
                    'maxBarrierCircleRas' + '_iter' + str(cur_iter))
            arcpy.FeatureToRaster_conversion(circle_fc, 'totalROI', circle_ras,
                                             arcpy.env.cellSize)

            # restore raster
            gprint('Digitally restoring resistance raster')
            res_ras_restored = os.path.join(
                output_gdb, 'resRastRestored' + '_iter' + str(cur_iter))
            out_con = arcpy.sa.Con(arcpy.sa.IsNull(circle_ras), res_ras,
                                   restored_resistance_val)
            out_con.save(res_ras_restored)

            all_restored_areas_ras = os.path.join(
                output_gdb, 'allRestoredAreas_iter' + str(cur_iter))
            prev_restored_areas_ras = os.path.join(
                output_gdb, 'allRestoredAreas_iter' + str(cur_iter - 1))
            if cur_iter == 1:
                out_con = arcpy.sa.Con(arcpy.sa.IsNull(circle_ras), 0, 1)
            else:
                # Add this restoration to areas restored
                out_con = arcpy.sa.Con(arcpy.sa.IsNull(circle_ras),
                                       prev_restored_areas_ras, 1)
            out_con.save(all_restored_areas_ras)

            lu.delete_data(circle_ras)

            # Use for next iteration resistance raster
            res_ras = res_ras_restored

            # Add circle into feature class with all circles
            if restore_max_roi:
                all_circles_fc = os.path.join(output_gdb, "allCirclesMaxROI")
            else:
                all_circles_fc = os.path.join(output_gdb,
                                              "allCirclesMaxBarriers")
            if cur_iter == 1:
                arcpy.CopyFeatures_management(circle_fc, all_circles_fc)
            else:
                arcpy.Append_management(circle_fc, all_circles_fc, "TEST")
            gprint('Finished iteration #' + str(cur_iter))
            start_time1 = lu.elapsed_time(start_time1)

        gprint('\nDone with iterations.')
        start_time = lu.elapsed_time(start_time)
        gprint('Outputs saved in: ' + output_gdb)
        gprint('Back up your project directories if you want to save '
               'corridor/barrier results.')

    # Return GEOPROCESSING specific errors
    except arcpy.ExecuteError:
        lu.dashline(1)
        gprint('****Iteration script failed. Details follow.****')
        lu.exit_with_geoproc_error(_SCRIPT_NAME)

    # Return any PYTHON or system specific errors
    except Exception:
        lu.dashline(1)
        gprint('****Iteration script failed. Details follow.****')
        lu.exit_with_python_error(_SCRIPT_NAME)
     ## Process: Clip raster - https://desktop.arcgis.com/en/arcmap/latest/tools/data-management-toolbox/clip.htm
     print 'Clipping raster - rectangular extent...'
     #arcpy.gp.ExtractByMask_sa(RASTER, Basin_Boundary, Basin_Raster)
     arcpy.Clip_management(RASTER, "#", Basin_Raster, Basin_Boundary, "", "NONE","NO_MAINTAIN_EXTENT")
     
     ## Process: Resample apriori raster - https://desktop.arcgis.com/en/arcmap/10.3/tools/data-management-toolbox/resample.htm
     print 'Resampling basin raster...'
     arcpy.Resample_management(Basin_Raster, Basin_Raster_resamp, "0.01 0.01", "NEAREST")
     
     ## Process: Clip raster to basin boundary
     print 'Extracting by mask - basin boundary...'
     arcpy.Clip_management(Basin_Raster_resamp, "#", Basin_Raster_resamp_clip, Basin_Boundary, "", "ClippingGeometry","NO_MAINTAIN_EXTENT")
     
     ## Process: Raster to Point
     print 'Raster to point...'
     arcpy.RasterToPoint_conversion(Basin_Raster_resamp_clip, Basin_Points, "VALUE")
     #
     ## Process: Export Feature Attribute to ASCII
     print 'Export attributes to text...'
     arcpy.ExportXYv_stats(Basin_Points + '.shp', "GRID_CODE", "COMMA", Out_text, "ADD_FIELD_NAMES")
     
 if snow == 'on':
     # list all Snow17 grids (directories containing data only) #ignore extra files in directory
     all_snow = [ name for name in os.listdir(SNOW17_Grids_Folder) if os.path.isdir(os.path.join(SNOW17_Grids_Folder, name)) ]    
     all_snow.remove('info') # remove info directory from list of variables 
     for variable in all_snow:
         print ch5id + ' --> ' + variable
         Out_text = output_dir + '\\' + ch5id + '\\' + ch5id + '_' + variable + '.txt' 
 
         ## Local variables:
         RASTER = SNOW17_Grids_Folder + '\\' + variable
Example #30
0
    # Run z-value function along channel axis
    z_raster_array = np.apply_along_axis(return_z_value, 2, rgb_raster_array,
                                         ranges, no_data)
    z_raster = arcpy.NumPyArrayToRaster(z_raster_array, lower_left, cell_width,
                                        cell_height, no_data)
    arcpy.AddMessage(
        "NumPy array re-stacked and z-value function ran across channel axis")

    # Create paths
    out_raster = os.path.join(workspace, "RGBtoZ_Ras")
    out_points = os.path.join(workspace, "RGBtoZ_Pnt")

    # Define raster, convert to points and save both outputs
    arcpy.CopyRaster_management(z_raster, out_raster)
    arcpy.DefineProjection_management(out_raster, sr)
    arcpy.RasterToPoint_conversion(out_raster, out_points)

except CompositeImage:
    error = "A RGB (3-band) composite image must be used."
    arcpy.AddError(error)
    print error

except GeorectifiedImage:
    error = "Raster must be projected"
    arcpy.AddError(error)
    print error

except FileFormat:
    error = "Incorrect text file format. See instructions"
    arcpy.AddError(error)
    print error