def main(argv = None): global targetWorkspace hasVersion = False desc = arcpy.Describe(gzSupport.workspace) if desc.workspaceType != "RemoteDatabase" and versionName == None: targetWorkspace = defaultWorkspace success = True arcpy.ResetProgressor() arcpy.env.Workspace = gzSupport.workspace uniqueValues = gzSupport.getFieldValues("Unique",fieldNames,datasets)[0] sources = gzSupport.listDatasets(gzSupport.workspace) sNames = sources[0] sFullNames = sources[1] arcpy.SetProgressor("Step","Load by " + str(fieldNames) + "...",0,len(uniqueValues)*len(datasets),1) for value in uniqueValues: try: hasVersion = False gzSupport.addMessage(value) if desc.workspaceType == "RemoteDatabase" and versionName != None: arcpy.SetProgressorLabel("Creating Version " + versionName) hasVersion = gzSupport.createVersion(defaultWorkspace,defaultVersionName,versionName) if hasVersion == True or versionName == None or desc.workspaceType == "LocalDatabase": arcpy.env.Workspace = targetWorkspace targets = gzSupport.listDatasets(targetWorkspace) tNames = targets[0] tFullNames = targets[1] for dataset in datasets: name = dataset.getAttributeNode("name").nodeValue arcpy.SetProgressorLabel("Loading Dataset " + name) targetTable = gzSupport.getFullName(name,tNames,tFullNames) sourceTable = gzSupport.getFullName(name,sNames,sFullNames) attrs = [f.name for f in arcpy.ListFields(targetTable)] expr = getExpression(attrs,fieldNames,value) arcpy.SetProgressorLabel("Loading Dataset " + name + " Where " + expr) tName = targetTable[targetTable.rfind("\\")+1:] tLocation = targetTable[0:targetTable.rfind("\\")] if gzSupport.deleteRows(tLocation,tName,expr) == True: retVal = gzSupport.appendRows(sourceTable,targetTable,expr) if retVal == False: success == False else: success == False arcpy.SetProgressorPosition() if success == True: if desc.workspaceType == "RemoteDatabase": arcpy.SetProgressorLabel("Reconcile and Post") retVal = gzSupport.reconcilePost(defaultWorkspace,versionName,defaultVersionName) if retVal == False: success = False gzSupport.deleteVersion(defaultWorkspace,versionName) elif desc.workspaceType == "LocalDatabase": arcpy.SetProgressorLabel("Completed Update for " + str(value)) gzSupport.logDatasetProcess(targetTable,sys.argv[0],retVal) else: gzSupport.logDatasetProcess(targetTable,sys.argv[0],retVal) except: gzSupport.showTraceback() success = False gzSupport.logDatasetProcess("Serious error",sys.argv[0],False) finally: arcpy.SetProgressorPosition() arcpy.ClearWorkspaceCache_management(defaultWorkspace) if success == False: gzSupport.addError("Errors occurred during process, look in log files for more information") if gzSupport.ignoreErrors == True: success = True if desc.workspaceType == "RemoteDatabase" and success == True: analyze(defaultWorkspace,datasets,tNames,tFullNames) arcpy.SetParameter(SUCCESS, success) arcpy.ClearWorkspaceCache_management(defaultWorkspace) gzSupport.compressGDB(gzSupport.workspace) gzSupport.compressGDB(defaultWorkspace) gzSupport.closeLog() return
def execute(self, parameters, messages): """The source code of the tool.""" visibility_lines = parameters[0].valueAsText observer_offset_field = parameters[1].valueAsText target_offset_field = parameters[2].valueAsText target_x_field = parameters[3].valueAsText target_y_field = parameters[4].valueAsText useCurvatures = parameters[5].value refCoeff = parameters[6].value fieldsNew = [("Visible", "Visibility of target", "SHORT"), ("AngleDiff_GH", "Angle difference of target to global horizon", "DOUBLE"), ("ElevDiff_GH", "Elevation difference of target to global horizon", "DOUBLE"), ("HorDist", "Distance of horizon from target point", "DOUBLE"), ("Horizon_C", "Horizon count behind target point", "SHORT")] fieldsNames = [row[0] for row in fieldsNew] columns = ["OBJECTID", "SHAPE@"] + [observer_offset_field, target_offset_field, target_x_field, target_y_field] farcmap.prepareDataColumns(visibility_lines, fieldsNew) number_of_LoS = int(arcpy.GetCount_management(visibility_lines).getOutput(0)) arcpy.SetProgressor("step", "Analyzing " + str(number_of_LoS) + " lines of sight...", 0, number_of_LoS, 1) with arcpy.da.UpdateCursor(visibility_lines, columns + fieldsNames) as cursor: for row in cursor: points = [] poi = visibility.WKTtoPoints(row[1].WKT) # get coordinates of first point for distance calculation observer_offset = row[2] target_offset = row[3] target_x = row[4] target_y = row[5] start_point_x = float(poi[0].split(" ")[0]) start_point_y = float(poi[0].split(" ")[1]) observer_elev = float(poi[0].split(" ")[2]) + observer_offset target_distance = visibility.distance(target_x, target_y, start_point_x, start_point_y) sampling_distance = visibility.distance(float(poi[1].split(" ")[0]), float(poi[1].split(" ")[1]), start_point_x, start_point_y) target_index = -1 # for every point do this for i in range(0, len(poi)): parts = poi[i].split(" ") x = float(parts[0]) y = float(parts[1]) z = float(parts[2]) dist = visibility.distance(x, y, start_point_x, start_point_y) if useCurvatures: z = visibility.curvatureCorrections(z, dist, refCoeff) target_offset = visibility.curvatureCorrections(target_offset, dist, refCoeff) if i == 0: points.append([x, y, 0, observer_elev, -90]) elif math.fabs(target_distance - dist) < sampling_distance/2: points.append([x, y, dist, z + target_offset, visibility.angle(dist, z + target_offset - observer_elev)]) target_index = i else: points.append([x, y, dist, z, visibility.angle(dist, z - observer_elev)]) results = self.analyzeLoS(points, target_index) for i in range(0, len(results)): row[i + 6] = results[i] cursor.updateRow(row) arcpy.SetProgressorPosition() arcpy.ResetProgressor() return
# 从外界获取的参数 raster_path = arcpy.GetParameterAsText(0) out_coor_system = arcpy.GetParameterAsText(1) rs_type = arcpy.GetParameterAsText(2) c_size = arcpy.GetParameterAsText(3) d_dir_name = arcpy.GetParameterAsText(4) new_path_name = d_dir_name os.makedirs(raster_path + "\\" + new_path_name) arcpy.AddMessage("Step1:Creating new folder named " + str(d_dir_name)) arcpy.AddMessage("Step1:Completed") def project_batch(): env.workspace = raster_path rafters = arcpy.ListRasters("*", "tif") for raster in rafters: out = "\\" + new_path_name + "\\" + "Pr_" + raster[:] arcpy.ProjectRaster_management(raster, out, out_coor_system, rs_type, c_size, "#", "#", "#") arcpy.AddMessage("Step2:Pr_" + raster[:] + "has done.") arcpy.SetProgressorPosition() arcpy.AddMessage("Step2:Completed") arcpy.ResetProgressor() if arcpy.CheckExtension("Spatial") == "Available": project_batch() else: arcpy.AddMessage("Error!!! Spatial Analyst is unavailable")
def end(self): '''Ends the counting and resets the progressor.''' print self.text + ' Done.' arcpy.ResetProgressor()
def execute(self): self.pre_process() incidents_layer = arcpy.MakeFeatureLayer_management( self.remote, common.unique_name("incidents"))[0] remote_layer = arcpy.MakeFeatureLayer_management( self.remote, common.unique_name("remote"))[0] fixed_layer = arcpy.MakeFeatureLayer_management( self.fixed, common.unique_name("fixed"))[0] # Get a list of all fields in the CF\Routes and results, ensuring that shape is first self.route_fields = ["SHAPE@"] + [ f.name for f in arcpy.Describe(self.CF.routes).fields if f.type.upper() not in ("OID", "GEOMETRY") ] result_fields = ["SHAPE@"] + [ f.name for f in arcpy.Describe(self.result).fields if f.type.upper() not in ("OID", "GEOMETRY") ] arcpy.AddMessage("\t{} Beginning backhaul process".format( common.timestamp())) with arcpy.da.InsertCursor(self.result, result_fields) as iCursor: arcpy.SetProgressor("step", "Solving routes...", 0, self.near_array.size) for i, record in enumerate(self.near_array, 1): # Select the current asset and mark it visited. incidents_layer.definitionQuery = common.create_sql( incidents_layer, "OID@", record["ID"]) record["visited"] = True self.add_locations(sublayer=self.CF.incidents.name, features=incidents_layer, append="CLEAR") # If the current remote asset is above the daisy threshold, only route to fixed assets if record["daisy"] >= self.daisy: remote_siblings = np.array([]) else: # Only add those remote assets that: # have NOT been routed from (i.e., have not been the current asset) -OR- # are under the daisy threshold remote_siblings = self.near_array[ ~self.near_array["visited"] & (self.near_array["daisy"] < self.daisy )]["ID"][:self.search] if remote_siblings.size: # If there are remote siblings to add, then the fixed siblings will be appended append = "APPEND" remote_layer.definitionQuery = common.create_sql( remote_layer, "OID@", remote_siblings.tolist()) self.add_locations(sublayer=self.CF.facilities.name, features=remote_layer, append="CLEAR") else: append = "CLEAR" fixed_layer.definitionQuery = common.create_sql( fixed_layer, "OID@", record["fixed"].tolist()) self.add_locations(sublayer=self.CF.facilities.name, features=fixed_layer, append=append) solve = arcpy.na.Solve(self.CF.na_layer, "SKIP", "CONTINUE") # The solve result tuple is (closest facility layer, solve succeeded) if solve.getOutput(1) == 'true': for row in self.transform_route(): iCursor.insertRow(row) else: arcpy.AddMessage("\tNo route found for {}".format( record["ID"])) arcpy.AddWarning(solve.getMessages()) arcpy.SetProgressorPosition(i) for tempData in [incidents_layer, remote_layer, fixed_layer]: common.delete(tempData) arcpy.ResetProgressor() return self.post_process()
def CalculateRandomWalkingValue(): ''' Calculate the random walking values of a network ''' try: ## parameters ThisTotalRvalue=1.0 PreTotalRvalue=0.0 loopvalue=1.0 i=0 # count of walking countnullloop=0 # count of null walking WalkPath = Graphy.WalkPath() # record walking paths Walk = False #record whether the random laking is success ''' Random walking loop ''' arcpy.SetProgressor("step", "Random Walking...", 0,int(1/float(PPermit)), 2) while loopvalue >= float(PPermit): # p threshold, default is 0.000001 # if the random walking values of the entire nodes are stable, the random walking will terminate preloopvalue = loopvalue # python requires float number in devision loopvalue = (float(ThisTotalRvalue) - float(PreTotalRvalue))/float(ThisTotalRvalue) arcpy.SetProgressorLabel("Current loop value is " + str(loopvalue) + "...") PreTotalRvalue = ThisTotalRvalue ThisPointNumber = random.choice(Net.NodeIDList) # index of point ID # 1st point ID is randomly selected i=i+1 points=[] # contain points in current walking pointsID=[] links=[] # contain edges in current walking linksID=[] TotalLength=0.0 # generate length permit for current walking LengthPermit = GetLengthPermit(SimulateMethod,MeanLength,StdLength,alpha) DeadEnd=False # record whether the current point is dead end DeadStart=False # record whether the first point is dead end DeadEndSelect=False # record whether the end selection is dead OtherWay=False # record whether start from the other side of the 1st point while TotalLength <= LengthPermit: # select the start point based on the ThisPointNumber RandomPoints=GetPoint(Net,ThisPointNumber) ''' Check previous points ''' while Contain(points,RandomPoints): if DeadEnd: if DeadEndSelect: arcpy.AddWarning("\t Fail to find the next point because of DeadEnd point and Dead End selection, the current point is "+str(RandomPoints.ID)+"!") # arcpy.AddWarning(pointsID) # arcpy.AddWarning(linksID) points, pointsID,links,linksID,TotalLength,countnullloop=EndWalking(points,pointsID,links,linksID,TotalLength,LengthPermit,countnullloop) break else: # start the walk from the other side of walk path PreviousPoints = points[0] ## set the previous point as the 1st point elif DeadEndSelect: if OtherWay: arcpy.AddWarning("\t Fail to find an Other Side walking, the current point is "+str(RandomPoints.ID)+"!") # arcpy.AddWarning(pointsID) # arcpy.AddWarning(linksID) points, pointsID,links,linksID,TotalLength,countnullloop=EndWalking(points,pointsID,links,linksID,TotalLength,LengthPermit,countnullloop) break else: OtherWay=True PreviousPoints = points[0] ## set the previous point as the 1st point elif (not DeadEndSelect) and (not OtherWay): PreviousPoints = points[len(points)-1] ## set the previous point as the 2nd last point else: arcpy.AddError("Unknown error") arcpy.AddError(pointsID) arcpy.AddError(linksID) arcpy.AddError(ThisPointNumber) arcpy.AddError(RandomPoints.ID) break ConnectedPointsID,ConnectedLinks = GetConnectedElements(PreviousPoints) NextPointID = random.choice(ConnectedPointsID) # select the next points from the connected points of previous point while (Contain(pointsID, NextPointID)): ConnectedPointsID.remove(NextPointID) if len(ConnectedPointsID)>0: NextPointID= random.choice(ConnectedPointsID) elif not DeadEndSelect: # 1st time of dead end DeadEndSelect = True arcpy.AddWarning("\t Fail to select a next point in this side!") if (not DeadStart): # dead start is false, there are other chances to start the walking from other side of the 1st point # start the walk from another side of the first point NextPointID = None break else: # dead start is true # end current walking arcpy.AddWarning("\t Fail to find the next point because of DeadStart point and Dead End selection, the current point is "+str(RandomPoints.ID)+"!") # arcpy.AddWarning(pointsID) # arcpy.AddWarning(linksID) points, pointsID,links,linksID,TotalLength,countnullloop=EndWalking(points,pointsID,links,linksID,TotalLength,LengthPermit,countnullloop) break elif OtherWay: break else: arcpy.AddError("Unknown error") arcpy.AddError(pointsID) arcpy.AddError(linksID) arcpy.AddError(ThisPointNumber) arcpy.AddError(RandomPoints.ID) break if len(ConnectedPointsID)>0 and NextPointID<>None: RandomPoints=GetPoint(Net,NextPointID) ThisPointNumber=NextPointID # replace the last edge in walking path for cl in ConnectedLinks: if (cl.FirstPoint.ID == NextPointID and cl.LastPoint.ID==PreviousPoints.ID)or (cl.LastPoint.ID == NextPointID and cl.FirstPoint.ID==PreviousPoints.ID): if DeadEnd: # add new edge directly in the walking path links.append(cl) linksID.append(cl.ID) else: # replace the last edge with the new edge in the walking path links[len(links)-1]=cl linksID[len(linksID)-1]=cl.ID # re-calculate the total length TotalLength= GetTotalLength(links) elif DeadEndSelect: if DeadStart: break else: RandomPoints = points[0] arcpy.AddWarning("\t Start walking from the other side of the 1st point." ) else: arcpy.AddError("Unknown error") arcpy.AddError(pointsID) arcpy.AddError(linksID) arcpy.AddError(ThisPointNumber) # select the connected link based on weight if points<>None and links <> None: RandomLinks=GetNextRandomLink(RandomPoints,SelectionType,links) ''' Check whether new link has already been walked through ''' if len(RandomPoints.EdgeList)>1 and points <> None: while (Contain(linksID, RandomLinks.ID)): if DeadEnd or DeadStart: LoopFirstPointID=RandomLinks.FirstPoint.ID LoopLastPointID=RandomLinks.LastPoint.ID if LoopFirstPointID == pointsID[0] or LoopLastPointID==pointsID[0]: arcpy.AddWarning("\t Fail to find the next point because of loop with dead point "+str(RandomPoints.ID)+"!") points, pointsID,links,linksID,TotalLength,countnullloop=EndWalking(points,pointsID,links,linksID,TotalLength,LengthPermit,countnullloop) break else: RandomLinks=GetNextRandomLink(RandomPoints,SelectionType,links) # if current link has already been walked, select another link else: RandomLinks=GetNextRandomLink(RandomPoints,SelectionType,links) if RandomLinks.FirstPoint.ID == ThisPointNumber: ThisPointNumber = RandomLinks.LastPoint.ID else: ThisPointNumber = RandomLinks.FirstPoint.ID else: if points <> None and len(RandomPoints.EdgeList)==1: if len(points)==0: ## the isolated point is selected as 1st point DeadStart = True arcpy.AddWarning("\t Dead start point of "+str(RandomPoints.ID)+" has been selected!") RandomLinks = RandomPoints.EdgeList[0] if RandomLinks.FirstPoint.ID == ThisPointNumber: ThisPointNumber = RandomLinks.LastPoint.ID else: ThisPointNumber = RandomLinks.FirstPoint.ID else: ## the isolated point is selected as the internal point DeadEnd = True arcpy.AddWarning("\t Dead end point of "+str(RandomPoints.ID)+" has been selected!") if DeadStart: arcpy.AddWarning("\t Dead walk from "+str(points[0].ID)+"to "+str(RandomPoints.ID)+"!") points, pointsID,links,linksID,TotalLength,countnullloop=EndWalking(points,pointsID,links,linksID,TotalLength,LengthPermit,countnullloop) else: # start the next walk from the 1st point ThisPointNumber=points[0].ID ## end of check new edge if (points<>None and links <>None): if (not Contain(pointsID,RandomPoints.ID)): # RandomPoints.ID not in pointsID: pointsID.append(RandomPoints.ID) points.append(RandomPoints) if (not Contain(linksID,RandomLinks.ID)): # RandomLinks.ID not in linksID: linksID.append(RandomLinks.ID) links.append(RandomLinks) TotalLength= GetTotalLength(links) ''' Add the terminal point to walk ''' if points<> None and links <> None: ## get terminal point if points[len(points)-1].ID == links[len(links)-1].FirstPoint.ID: TerminalPoint = links[len(links)-1].LastPoint else: TerminalPoint = links[len(links)-1].FirstPoint ## check whether the terminal point has already been walked if(not Contain(points, TerminalPoint)) and (not Contain(pointsID, TerminalPoint.ID)): # not been walked # add to points points.append(TerminalPoint) pointsID.append(TerminalPoint.ID) else: # has already been walked # remove the corresponding link links.remove(links[len(links)-1]) linksID.remove(linksID[len(linksID)-1]) ''' Increase the random walking value of points and edges in the current walk journal ''' IncreaseRvalue(points) IncreaseRvalue(links) ''' Record the points and links of the current walk journal ''' SingleWalk=Graphy.SingleWalk(i,TotalLength,LengthPermit,pointsID,linksID) WalkPath.walks.append(SingleWalk) ThisTotalRvalue = GetTotalRvalue() ## check loop value in case that points and links are null if ThisTotalRvalue ==0: ThisTotalRvalue =1 PreTotalRvalue = 0 if loopvalue ==0: loopvalue = preloopvalue ## end of check loop value arcpy.SetProgressorPosition() arcpy.ResetProgressor() Walk = True # this random walking is successful if Walk: arcpy.AddMessage("Random walking calculation is finished!") arcpy.AddMessage("Total number of walking is "+str(i)+"!") arcpy.AddWarning("Total number of invalid walking is "+str(countnullloop)+"!") arcpy.AddMessage("Final loop value is "+str(loopvalue)+"!") arcpy.AddMessage("Total Random Walking value is "+str(ThisTotalRvalue)+"!") WalkPath.i = i WalkPath.loopvalue = loopvalue WalkPath.countnullloop = countnullloop WalkPath.totalrvalue = ThisTotalRvalue return Walk,WalkPath except: # Get the trace back object # tb = sys.exc_info()[2] tbinfo = traceback.format_tb(tb)[0] # Concatenate information together concerning the error into a message string # pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(sys.exc_info()[1]) # Return python error messages for use in script tool or Python Window # # Print Python error messages for use in Python / Python Window # Walk = False arcpy.AddError(pymsg) arcpy.AddError(pointsID) arcpy.AddError(linksID) arcpy.AddError("Random point: "+ str(RandomPoints.ID))
def publish(xmlFileNames): # function called from main or from another script, performs the data update processing global sourceLayer, targetLayer, _success dla._errorCount = 0 arcpy.SetProgressor("default", "Data Assistant") arcpy.SetProgressorLabel("Data Assistant") xmlFiles = xmlFileNames.split(";") for xmlFile in xmlFiles: # multi value parameter, loop for each file dla.addMessage("Configuration file: " + xmlFile) xmlDoc = dla.getXmlDoc(xmlFile) # parse the xml document if xmlDoc == None: return svceS = False svceT = False if sourceLayer == "" or sourceLayer == None: sourceLayer = dla.getNodeValue(xmlDoc, "Source") svceS = dla.checkLayerIsService(sourceLayer) if targetLayer == "" or targetLayer == None: targetLayer = dla.getNodeValue(xmlDoc, "Target") svceT = dla.checkLayerIsService(targetLayer) dla.addMessage(targetLayer) ## Added May2016. warn user if capabilities are not correct, exit if not a valid layer if not dla.checkServiceCapabilities(sourceLayer, True): return False if not dla.checkServiceCapabilities(targetLayer, True): return False if svceS == True or svceT == True: token = dla.getSigninToken( ) # when signed in get the token and use this. Will be requested many times during the publish if token == None: dla.addError( "User must be signed in for this tool to work with services" ) return expr = getWhereClause(xmlDoc) if useReplaceSettings == True and (expr == '' or expr == None): dla.addError( "There must be an expression for replacing by field value, current value = " + str(expr)) return False dla.setWorkspace() targetName = dla.getTargetName(xmlDoc) res = dlaExtractLayerToGDB.extract(xmlFile, None, dla.workspace, sourceLayer, targetName) if res != True: table = dla.getTempTable(targetName) msg = "Unable to export data, there is a lock on existing datasets or another unknown error" if arcpy.TestSchemaLock(table) != True: msg = "Unable to export data, there is a lock on the intermediate feature class: " + table dla.addError(msg) print(msg) return else: res = dlaFieldCalculator.calculate(xmlFile, dla.workspace, targetName, False) if res == True: dlaTable = dla.getTempTable(targetName) res = doPublish(xmlDoc, dlaTable, targetLayer) arcpy.ResetProgressor() sourceLayer = None # set source and target back to None for multiple file processing targetLayer = None if res == False: err = "Data Assistant Update Failed, see messages for details" dla.addError(err) print(err)
def data_driven_raster_reclassify(In_Reference_Suit, In_Suit_Var, Out_Suit_Prox, Invert_Boolean): try: # Path setup-temp workspace: You can edit the script to use this if you want, but I defer to defaults. # tempFC=os.path.join(arcpy.env.scratchGDB,"tempFC")# for a temporary data # Progressor setup: arcpy.SetProgressor("step", "Creating Euclidean Distance raster...", 0, 7, 1) # Process-Euclidean Distance arcpy.AddMessage("Creating Euclidean Distance from Variable Layer") EuDist_Ra = EucDistance(In_Suit_Var) # Process: Add Field arcpy.SetProgressorLabel( "Appending and calculating a common field for Zonal Statistics by table..." ) arcpy.SetProgressorPosition() arcpy.AddMessage( "Adding and Calculating Common Field for Zonal Statistics by Table" ) Added_Field_st1 = arcpy.AddField_management(In_Reference_Suit, "All_Same", "LONG") # Process: Calculate Field Calced_Field_st2 = arcpy.CalculateField_management( Added_Field_st1, "All_Same", 1, "PYTHON") # Process: Make Feature Layer (2) arcpy.SetProgressorLabel( "Making Reference Feature Layer with new Field...") arcpy.SetProgressorPosition() arcpy.AddMessage("Making Variable Feature Layer with new Field") Zonal_Input = arcpy.MakeFeatureLayer_management(Calced_Field_st2) # Process: Zonal Statistics as Table arcpy.SetProgressorLabel( "Calculating Zonal Statistics for remap table...") arcpy.SetProgressorPosition() arcpy.AddMessage("Calculating Zonal Statistics") Zonal_Stat_Prox = ZonalStatisticsAsTable(Zonal_Input, "All_Same", EuDist_Ra, "outTable") # Process: Get Field Values (2-mean and standard deviation of distances) arcpy.SetProgressorLabel( "Declaring Cursors to read Zonal Statistics table...") arcpy.SetProgressorPosition() arcpy.AddMessage("Declaring cursors to read Zonal Statistics table") Std_Dev = ( arcpy.da.SearchCursor(Zonal_Stat_Prox, ["STD"]).next()[0] ) # Since it was all one field, the first element should be the only element Mean = ( arcpy.da.SearchCursor(Zonal_Stat_Prox, ["MEAN"]).next()[0] ) # Since it was all one field, the first element should be the only element Qrt_StD = Std_Dev / 4 # one quarter standard deviation arcpy.AddMessage("Retrieved Mean of {0} and Std Dev of {1}".format( Mean, Std_Dev)) arcpy.SetProgressorLabel( "Calculating Statistics for Distance Raster...") arcpy.SetProgressorPosition() arcpy.AddMessage("Calculating Statistics of Distance Raster") EuDist_Ra_wStats = arcpy.CalculateStatistics_management(EuDist_Ra) # Process: Get Max Raster Value for remap arcpy.SetProgressorLabel( "Retrieving maximum value from value raster...") arcpy.SetProgressorPosition() Max_Value_Result = arcpy.GetRasterProperties_management( EuDist_Ra_wStats, "MAXIMUM") Max_Ra_Value = float(Max_Value_Result.getOutput(0)) arcpy.AddMessage( "Maximum Raster Value of {0} is used as the final value in the remap table." .format(Max_Ra_Value)) # Remap List creation myremap = RemapRange( [[0, Mean, invert_suitability_value(9, Invert_Boolean)], [ Mean, Mean + (Qrt_StD), invert_suitability_value(8, Invert_Boolean) ], [ Mean + (Qrt_StD), Mean + (Qrt_StD * 2), invert_suitability_value(7, Invert_Boolean) ], [ Mean + (Qrt_StD * 2), Mean + (Qrt_StD * 3), invert_suitability_value(6, Invert_Boolean) ], [ Mean + (Qrt_StD * 3), Mean + (Qrt_StD * 4), invert_suitability_value(5, Invert_Boolean) ], [ Mean + (Qrt_StD * 4), Mean + (Qrt_StD * 5), invert_suitability_value(4, Invert_Boolean) ], [ Mean + (Qrt_StD * 5), Mean + (Qrt_StD * 6), invert_suitability_value(3, Invert_Boolean) ], [ Mean + (Qrt_StD * 6), Mean + (Qrt_StD * 7), invert_suitability_value(2, Invert_Boolean) ], [ Mean + (Qrt_StD * 7), (Max_Ra_Value + 1), invert_suitability_value(1, Invert_Boolean) ]]) # float("inf") does not work so this is the short term solution # Process: Reclassify arcpy.SetProgressorLabel("Starting Data Driven Reclassification...") arcpy.SetProgressorPosition() arcpy.AddMessage("Starting Data Driven Reclassification") Data_Driven_Reclass = Reclassify(EuDist_Ra_wStats, "Value", myremap) Data_Driven_Reclass.save(Out_Suit_Prox) # Finishing Messages and clean up. output_Name = (os.path.split(Out_Suit_Prox)[1]) arcpy.AddMessage( "Finished Data Driven Reclassification of {0}".format(output_Name)) arcpy.AddMessage("Final Reclassification: {0}".format(myremap)) arcpy.ResetProgressor() arcpy.Delete_management( Zonal_Stat_Prox ) # delete temporary table- edit script if you want to save it. except arcpy.ExecuteError: print(arcpy.GetMessages(2)) except Exception as e: print(e.args[0])
def execute(self, parameters, messages): """The source code of the tool.""" points_to_optimize = parameters[0].valueAsText raster = parameters[1].valueAsText distance = parameters[2].value optimized_points = parameters[3].valueAsText useMask = parameters[4].value if useMask: mask = parameters[5].valueAsText inRas = arcpy.Raster(raster) if useMask: maskRas = arcpy.Raster(mask) maskNoData = maskRas.noDataValue cellSize = inRas.meanCellWidth distanceCells = distance / cellSize noDataValue = inRas.noDataValue spatial_ref = arcpy.Describe(points_to_optimize).spatialReference arcpy.CopyFeatures_management(points_to_optimize, optimized_points) newPoint = arcpy.Point() number_of_points = int( arcpy.GetCount_management(points_to_optimize).getOutput(0)) arcpy.SetProgressor("step", "Updating location of points", 0, number_of_points, 1) extent = inRas.extent with arcpy.da.UpdateCursor(optimized_points, ["SHAPE@XY", "SHAPE@"]) as cursor: for row in cursor: columnNumber = int(row[0][0] - extent.XMin / cellSize) rowNumber = int(row[0][1] - extent.YMin / cellSize) centerX = extent.XMin + (columnNumber * cellSize) + 0.5 * cellSize centerY = extent.YMin + (rowNumber * cellSize) + 0.5 * cellSize array = arcpy.RasterToNumPyArray( inRas, arcpy.Point(centerX - distance, centerY - distance), distanceCells * 2, distanceCells * 2, noDataValue) # array = arcpy.RasterToNumPyArray(inRas, arcpy.Point(row[0][0] - distance, row[0][1] - distance), # distanceCells * 2, distanceCells * 2, noDataValue) maxValue = noDataValue maxX = row[0][0] maxY = row[0][1] for i in range(0, len(array)): for j in range(0, len(array[0])): x = row[0][0] + (j - distanceCells) * cellSize y = row[0][1] - (i - distanceCells + 1) * cellSize if useMask: maskValue = arcpy.RasterToNumPyArray( maskRas, arcpy.Point(x, y), 1, 1, maskNoData) if array[i][j] > maxValue and array[i][j] != noDataValue and \ visibility.distance(row[0][0], row[0][1], x, y) < distance: if useMask: if maskValue[0][0] > 0: maxValue = array[i][j] maxX = x maxY = y else: maxValue = array[i][j] maxX = x maxY = y newPoint.X = maxX newPoint.Y = maxY row[1] = arcpy.PointGeometry(newPoint) cursor.updateRow(row) arcpy.SetProgressorPosition() arcpy.ResetProgressor() functions_arcmap.addLayer(optimized_points) return
def reset(self): if self.initialized: arcpy.ResetProgressor()
def execute(self, parameters, messages): """The source code of the tool.""" visibility_lines = parameters[0].valueAsText id_observer_field = parameters[1].valueAsText observer_offset_field = parameters[2].valueAsText id_target_field = parameters[3].valueAsText target_offset_field = parameters[4].valueAsText target_x_field = parameters[5].valueAsText target_y_field = parameters[6].valueAsText horizons = parameters[7].valueAsText useCurvatures = parameters[8].value refCoeff = parameters[9].value workspace = fv.getPath(horizons) file_name = horizons.replace(workspace + "\\", "") arcpy.CreateFeatureclass_management( workspace, file_name, "POINT", has_z="ENABLED", spatial_reference=arcpy.Describe( visibility_lines).spatialReference) fieldsNew = [(id_observer_field, id_observer_field, "SHORT"), (id_target_field, id_target_field, "SHORT"), ("Elevation", "Elevation", "DOUBLE"), ("Hide_Tar", "Hides target point", "SHORT"), ("ViewAngle", "Viewing angle", "DOUBLE"), ("AngleDiff_Tar", "Viewing angle difference to target", "DOUBLE"), ("Dist_Observ", "Distance to observer", "DOUBLE"), ("Behind_Tar", "Behind target", "SHORT"), ("OID_LoS", "OID_LoS", "SHORT")] fieldsNames = [row[0] for row in fieldsNew] functions_arcmap.prepareDataColumns(horizons, fieldsNew) arcpy.AddMessage("\t Determination of horizons started...") insert_cursor = arcpy.da.InsertCursor(horizons, ["SHAPE@"] + fieldsNames) number_of_LoS = int( arcpy.GetCount_management(visibility_lines).getOutput(0)) arcpy.SetProgressor( "step", "Analyzing " + str(number_of_LoS) + " lines of sight...", 0, number_of_LoS, 1) with arcpy.da.SearchCursor(visibility_lines, [ "OBJECTID", "SHAPE@", id_observer_field, id_target_field, observer_offset_field, target_offset_field, target_x_field, target_y_field ]) as cursor: for row in cursor: target_x = row[6] target_y = row[7] target_offset = row[5] points = [] poi = visibility.WKTtoPoints(row[1].WKT) # get coordinates of first point for distance calculation start_point_x = float(poi[0].split(" ")[0]) start_point_y = float(poi[0].split(" ")[1]) observer_elev = float(poi[0].split(" ")[2]) + float(row[4]) target_distance = visibility.distance(target_x, target_y, start_point_x, start_point_y) sampling_distance = visibility.distance( float(poi[1].split(" ")[0]), float(poi[1].split(" ")[1]), start_point_x, start_point_y) target_index = -1 # for every point do this for i in range(0, len(poi)): parts = poi[i].split(" ") x = float(parts[0]) y = float(parts[1]) z = float(parts[2]) dist = visibility.distance(x, y, start_point_x, start_point_y) if useCurvatures: z = visibility.curvatureCorrections(z, dist, refCoeff) if i == 0: points.append([x, y, 0, observer_elev, -90]) elif math.fabs(target_distance - dist) < sampling_distance / 2: points.append([ x, y, dist, z + target_offset, visibility.angle(dist, z + target_offset - observer_elev) ]) target_index = i else: points.append([ x, y, dist, z, visibility.angle(dist, z - observer_elev) ]) results = visibility.findGlobalHorizons(points, target_index) point = arcpy.Point() for i in range(0, len(results)): hor_type = 0 point.X = results[i][0] point.Y = results[i][1] point.Z = results[i][3] ptGeometry = arcpy.PointGeometry(point) insert_cursor.insertRow([ ptGeometry, row[2], row[3], results[i][3], results[i][5], results[i][4], results[i][6], results[i][2], results[i][7], row[0] ]) arcpy.SetProgressorPosition() arcpy.ResetProgressor() arcpy.AddMessage("\t Determination of horizons sucessfuly ended.") functions_arcmap.addLayer(horizons) return
def postFIXtoConfinement(fcOutput, strStreamSide, lyrNearPointsConfinement, fcSplitPoints): ''' Description: This code attempts to fix the issue described in https://github.com/Riverscapes/ConfinementTool/issues/30. The code steps through each segment, selects it's near points, gets their ID's, use these to select the split points and then checks if ORIG_FID are constant. If they are, the segment is correctly attributed with confinement side. If they are not then this must be an error and the segment field will be reset. Inputs: fcOutput = This is the segmented network for the RIGHT or LEFT side strStreamSide = LEFT or RIGHT lyrNearPointsConfinement = A FeatureLayer, these are the points that were used to split the network, we will hook into the IN_FID field. fcSplitPoints = These are the end points (nodes) of the confinement margin polyline and were used to create the points in lyrNearPointsConfinement. This is a featureclass. Outputs: Returns True if code executed without error else False. Limitations: Code assumes the temporary workspace is a file geodatabase. Author: Duncan Hornby ([email protected]) Created: 4/9/18 ''' # This is used to overcome issues of tolerance in the select by location tool, currently set to 5cm # you may need to change this. search_distance = "5 Centimeters" # Create a layer object so selections can be done arcpy.MakeFeatureLayer_management(fcSplitPoints, "lyrSplitPoints") try: arcpy.AddMessage( "Applying post fix confinement code to resolve geometry issues...") aField = "Con_" + strStreamSide # Get a count on number of features to process and initialise progress bar resObj = arcpy.GetCount_management(fcOutput) n = int(resObj.getOutput(0)) arcpy.SetProgressor("step", "Checking and fixing segments...", 0, n, 1) # Main loop to step through each segment and check with arcpy.da.UpdateCursor(fcOutput, ["SHAPE@", aField]) as cursor: for row in cursor: arcpy.SetProgressorPosition() geom = row[0] # Use polyline to select near points arcpy.SelectLayerByLocation_management( lyrNearPointsConfinement, "INTERSECT", geom, search_distance, "NEW_SELECTION", "NOT_INVERT") # Now read the IN_FID values from the selected rows into a list idList = [] with arcpy.da.SearchCursor(lyrNearPointsConfinement, ["IN_FID"]) as cursor2: for row2 in cursor2: idList.append(row2[0]) # Check u/s and d/s limit selections where only 1 point will be selected if len(idList) == 1: arcpy.AddMessage("Skipping an end segment") else: # Now build a SQL query that can be used to select rows using the ID's in idList in the split points layer myTup = str(tuple(idList)) sql = "OBJECTID IN " + myTup #arcpy.AddMessage(sql) arcpy.SelectLayerByAttribute_management( "lyrSplitPoints", "NEW_SELECTION", sql) # Now read the ORIG_FID into a set, if 2 values are found then the line was incorrectly tagged and the aField needs resetting to null s = set() with arcpy.da.SearchCursor("lyrSplitPoints", ["ORIG_FID"]) as cursor3: for row3 in cursor3: s.add(row3[0]) if len(s) == 2: # Segmented was incorrectly identified the wrong confinement side, reset to null row[1] = None else: row[1] = 1 # Be aware this may incorrectly set a segment that has 3 node intersections at one end. cursor.updateRow(row) # Got here code ran without error arcpy.ResetProgressor() return True except Exception as e: arcpy.AddError("Error in postFIXtoConfinement function: " + str(e)) return False finally: arcpy.Delete_management("lyrSplitPoints")