def createVtpkIndexAndPackage(in_map, service_type, tileScheme, vertex_count, indexPolygon, outVtpk): try: arcpy.management.CreateVectorTileIndex(in_map=in_map, out_featureclass=indexPolygon, service_type=service_type, tiling_scheme=tileScheme, vertex_count=vertex_count) arcpy.AddMessage("tile index - ready.") arcpy.management.CreateVectorTilePackage(in_map=in_map, output_file=outVtpk, service_type=service_type, tiling_scheme=tileScheme, tile_structure="INDEXED", min_cached_scale="", max_cached_scale="", index_polygons=indexPolygon, summary=None, tags=None) if os.path.exists(outVtpk): arcpy.AddMessage("Pro standard tile package - ready!") return outVtpk except: arcpy.AddError("Failed to create vector tile package. Please see the following error for details.") for i in range(0, arcpy.GetMessageCount()): mesSev = arcpy.GetSeverity(i) if mesSev == 1: arcpy.AddWarning(arcpy.GetMessage(i)) elif mesSev == 2: arcpy.AddError(arcpy.GetMessage(i))
def delete_cwd_dir(argv=None): """Deletes cost-weighted distance directory and CWD rasters """ if argv is None: argv = sys.argv # Get parameters from ArcGIS tool dialog projectDir = argv[1] cwdBaseDir = os.path.join(projectDir, "datapass\\cwd") try: if os.path.exists(cwdBaseDir): arcpy.Delete_management(cwdBaseDir) except Exception: try: if os.path.exists(cwdBaseDir): shutil.rmtree(cwdBaseDir) except Exception: arcpy.AddError("Unable to delete cwd directory. One of the rasters " "might have been open in ArcMap.\n You may " 'need to re-start ArcMap to release the file lock.') for msg in range(0, arcpy.GetMessageCount() - 1): if arcpy.GetSeverity(msg) == 2: arcpy.AddReturnMessage(msg) print(arcpy.AddReturnMessage(msg)) exit(0) return
def exit_with_geoproc_error(filename): """Handle geoprocessor errors and provide details to user""" tb = sys.exc_info()[2] # get the traceback object # tbinfo contains the error's line number and the code tbinfo = traceback.format_tb(tb)[0] line = tbinfo.split(", ")[1] arcpy.AddError("Geoprocessing error on **" + line + "** of " + filename + " :") for msg in range(0, arcpy.GetMessageCount() - 1): if arcpy.GetSeverity(msg) == 2: arcpy.AddReturnMessage(msg) print(arcpy.AddReturnMessage(msg)) exit(0)
def GenerateVtpkTilingScheme(in_map,tileScheme): scales = "295829355.454565;147914677.727283;73957338.8636413;36978669.4318207;18489334.7159103;9244667.35795516;4622333.67897758;2311166.83948879;1155583.4197444;577791.709872198;288895.854936099;144447.927468049;72223.9637340247;36111.9818670124;18055.9909335062;9027.99546675309;4513.99773337654;2256.99886668827;1128.49943334414;564.249716672068" tile_origin = "-180 180" try: arcpy.server.GenerateMapServerCacheTilingScheme(in_map=in_map, tile_origin=tile_origin, output_tiling_scheme=tileScheme, num_of_scales=20, scales=scales, dots_per_inch=96, tile_size="512 x 512") arcpy.AddMessage("tile scheme - ready.") return tileScheme except: arcpy.AddError("Failed to create vector tile package. Please see the following error for details.") for i in range(0, arcpy.GetMessageCount()): mesSev = arcpy.GetSeverity(i) if mesSev == 1: arcpy.AddError(arcpy.GetMessage(i)) elif mesSev == 2: arcpy.AddError(arcpy.GetMessage(i))
try: arcpy.AddField_management(fc, "TEST", "TEXT") arcpy.AddField_management(fc, "TEST", "TEXT") except arcpy.ExecuteError: pass print "Test GetMessageCount:" messageCount = arcpy.GetMessageCount() print messageCount print "Test GetMessages:" print arcpy.GetMessages() print "Test GetMessage:" print "GetMessage(0):", arcpy.GetMessage(0) print "GetMessage(1):", arcpy.GetMessage(1) print "GetMessage(2):", arcpy.GetMessage(2) print "Test GetIDMessage:" print "GetIDMessage(84001):", arcpy.GetIDMessage(84001) print "GetIDMessage(999999):", arcpy.GetIDMessage(999999) print "Test GetReturnCode:" print "Message[1]'s ReturnCode:", arcpy.GetReturnCode(1) print "Message[2]'s ReturnCode:", arcpy.GetReturnCode(2) print "Test GetSeverity:" print "Message[1]'s Severity:", arcpy.GetSeverity(1) print "Message[2]'s Severity:", arcpy.GetSeverity(2) print "Test GetSeverityLevel:" print arcpy.GetSeverityLevel() arcpy.SetSeverityLevel(1) print arcpy.GetSeverityLevel() print "Test GetMaxSeverity:" print arcpy.GetMaxSeverity() arcpy.Delete_management(fc)
neighborhood = NbrRectangle(3, 3, "CELL") outFocalStatistics = FocalStatistics(outNFRaster, neighborhood, "MEDIAN", "DATA") outFocalStatistics.save(fsMedRaster) #Process: Copy Raster to create an 8bit tif. # The input image has a Min-max range of 0-255 but NoData is -32868 and therefore defaulted to 16bit in a GRID. # Using copy Raster does not change the data range but converts to TIF and assigns NoData value to 256, therefore is an 8bit. arcpy.AddMessage("Creating Filtered 8bit TIF image ...") arcpy.CopyRaster_management(fsMedRaster, outFRaster, "", "", "", "NONE", "NONE", "8_BIT_UNSIGNED") except arcpy.ExecuteError: #Return Geoprocessing tool specific errors line, filename, err = trace() arcpy.AddError("Geoprocessing error on " + line + " of " + filename + " :") for msg in range(0, arcpy.GetMessageCount()): if arcpy.GetSeverity(msg) == 2: arcpy.AddReturnMessage(msg) except: #Returns Python and non-tool errors line, filename, err = trace() arcpy.AddError("Python error on " + line + " of " + filename) arcpy.AddError(err) finally: # Process: Delete intermediate files if arcpy.Exists(inRastC): arcpy.Delete_management(inRastC, "") if arcpy.Exists(scImage1): arcpy.Delete_management(scImage1, "") if arcpy.Exists(fsMedRaster): arcpy.Delete_management(fsMedRaster, "")
def main(config_file, *args): """ Import the incidents to a feature class, filtering out duplicates if necessary, assign geometry using addresses or XY values, and publish the results usign AGOL or ArcGIS for Server. Output is an updated feature class, processign reports, and optionally a service """ # Current date and time for file names fileNow = dt.strftime(dt.now(), prefix) if isfile(config_file): cfg = ConfigParser.ConfigParser() cfg.read(config_file) else: raise Exception(e1.format("Configuration file", config_file, "")) # Get general configuration values incidents = cfg.get('GENERAL', 'spreadsheet') inc_features = cfg.get('GENERAL', 'incident_features') id_field = cfg.get('GENERAL', 'incident_id') report_date_field = cfg.get('GENERAL', 'report_date_field') reports = cfg.get('GENERAL', 'reports') loc_type = cfg.get('GENERAL', 'loc_type') summary_field = cfg.get('GENERAL', 'summary_field') transform_method = cfg.get('GENERAL', 'transform_method') pub_status = cfg.get('GENERAL', 'pub_status') delete_duplicates = cfg.get('GENERAL', 'delete_duplicates') if delete_duplicates in ('true', 'True', True): delete_duplicates = True if report_date_field == "": raise Exception(e16) if delete_duplicates in ('false', 'False'): delete_duplicates = False # Log file if exists(reports): rptLog = join(reports, "{0}_{1}.log".format(fileNow, log_name)) else: raise Exception(e1.format("Report location", reports, w5)) # Scratch workspace tempgdb = arcpy.env.scratchGDB with open(rptLog, "w") as log: try: # Log file header log.write(l1.format(fileNow)) log.write(l2.format(getpass.getuser())) log.write(l3.format(incidents)) log.write(l4.format(inc_features)) if loc_type == "ADDRESSES": log.write(l5.format(cfg.get('ADDRESSES', 'locator'))) # Validate output feature class geometry type desc = arcpy.Describe(inc_features) if not desc.shapeType == "Point": raise Exception(e6.format(inc_features)) # Identify field names in both fc and csv if arcpy.Exists(incidents): csvfieldnames = [f.name for f in arcpy.ListFields(incidents)] else: raise Exception(e1.format("Spreadsheet", incidents, "")) if arcpy.Exists(inc_features): incfieldnames = [ f.name for f in arcpy.ListFields(inc_features) ] else: raise Exception(e1.format("Feature Class", inc_features, "")) matchfieldnames = [] for name in csvfieldnames: if name in incfieldnames: matchfieldnames.append(name) # If data is to be geocoded if loc_type == "ADDRESSES": # Get geocoding parameters address_field = cfg.get('ADDRESSES', 'address_field') city_field = cfg.get('ADDRESSES', 'city_field') state_field = cfg.get('ADDRESSES', 'state_field') zip_field = cfg.get('ADDRESSES', 'zip_field') locator = cfg.get('ADDRESSES', 'locator') # Geocoding field names reqFields = [address_field, id_field] #, report_date_field] opFields = [ city_field, state_field, zip_field, summary_field, report_date_field ] if locator == "": raise Exception(e13) # Test geolocator fields loc_address_fields = [ loc_address_field, loc_city_field, loc_zip_field, loc_state_field ] for a in loc_address_fields: if not a == "": if not a in all_locator_fields: raise Exception(e14) # If data has coordinate values else: # Get coordinate parameters lg_field = cfg.get('COORDINATES', 'long_field') lt_field = cfg.get('COORDINATES', 'lat_field') coord_system = cfg.get('COORDINATES', 'coord_system') remove_zeros = cfg.get('COORDINATES', 'ignore_zeros') if remove_zeros in ('true', 'True'): remove_zeros = True if remove_zeros in ('false', 'False'): remove_zeros = False # Coordinate field names reqFields = [id_field, lg_field, lt_field] #, report_date_field] opFields = [summary_field, report_date_field] # Validate required field names field_test(incidents, reqFields, csvfieldnames, True) field_test(inc_features, reqFields, incfieldnames, True) # Validate optional field names field_test(incidents, opFields, csvfieldnames) field_test(inc_features, opFields, incfieldnames) # Validate basic publishing parameters if not pub_status == "": # Get general publishing parameters mxd = cfg.get('PUBLISHING', 'mxd') username = cfg.get('PUBLISHING', 'user_name') password = cfg.get('PUBLISHING', 'password') # Test for required inputs if not arcpy.Exists(mxd): raise Exception(e1.format("Map document", mxd, "")) if splitext(mxd)[1] != ".mxd": raise Exception(e3) # Test for required inputs if username == "" or password == "": if pub_status == "ARCGIS_ONLINE": raise Exception(e8) # Get address fields for geocoding if loc_type == "ADDRESSES": addresses = "" loc_fields = [] adr_string = "{0} {1} VISIBLE NONE;" for loc_field in all_locator_fields: if loc_field == loc_address_field: addresses += adr_string.format(loc_field, address_field) loc_fields.append(address_field) elif loc_field == loc_city_field and city_field != "": addresses += adr_string.format(loc_field, city_field) loc_fields.append(city_field) elif loc_field == loc_state_field and state_field != "": addresses += adr_string.format(loc_field, state_field) loc_fields.append(state_field) elif loc_field == loc_zip_field and zip_field != "": addresses += adr_string.format(loc_field, zip_field) loc_fields.append(zip_field) else: addresses += adr_string.format(loc_field, "<None>") # Get coordinate fields else: loc_fields = [lg_field, lt_field] total_records = len(field_vals(incidents, id_field)) messages(m17.format(total_records, incidents), log) if not summary_field == "": SumVals = field_vals(incidents, summary_field) listSumVals = [val for val in SumVals if val != None] if not len(SumVals) == len(listSumVals): print m19.format(len(SumVals) - len(listSumVals)) log.write(m19.format(len(SumVals) - len(listSumVals))) listSumVals.sort() log.write(l10.format(summary_field)) dateCount = 1 i = 0 n = len(listSumVals) while i < n: try: if listSumVals[i] == listSumVals[i + 1]: dateCount += 1 else: log.write(l11.format(listSumVals[i], dateCount)) dateCount = 1 except: log.write(l11.format(listSumVals[i], dateCount)) i += 1 log.write("\n") # Remove duplicate incidents if delete_duplicates: timeNow = dt.strftime(dt.now(), time_format) messages(m13.format(timeNow), log) incidents, req_nulls, countUpdate, countDelete = remove_dups( tempgdb, incidents, inc_features, matchfieldnames, id_field, report_date_field, loc_fields) if not req_nulls == "": req_nulls = "{}\n".format(req_nulls) messages(w3.format(req_nulls), log, 1) if not countUpdate == 0: messages(m14.format(countUpdate, inc_features), log) if countDelete > 0: messages(m15.format(countDelete, inc_features), log) # Create features tempFC = join(tempgdb, "tempDataLE") # Create point features from spreadsheet timeNow = dt.strftime(dt.now(), time_format) messages(m1.format(timeNow), log) if loc_type == "ADDRESSES": timeNow = dt.strftime(dt.now(), time_format) messages(m3.format(timeNow), log) # Geocode the incidents arcpy.GeocodeAddresses_geocoding(incidents, locator, addresses, tempFC, "STATIC") # Initiate geocoding report counts countMatch = 0 countTrueMatch = 0 countUnmatch = 0 # Create geocoding reports rptUnmatch = join(reports, "{0}_{1}.csv".format(fileNow, unmatch_name)) fieldnames = [f.name for f in arcpy.ListFields(tempFC)] # Sort incidents based on match status statusIndex = fieldnames.index(status) locIndex = fieldnames.index(addr_type) # Write incidents that were not well geocoded to file and # delete from temp directory with open(rptUnmatch, "wb") as umatchFile: unmatchwriter = csv.writer(umatchFile) unmatchwriter.writerow(fieldnames) # Delete incidents that were not Matched countUnmatch = sort_records(tempFC, unmatchwriter, statusIndex, match_value, False, True) if not countUnmatch == 0: messages(w6.format(countUnmatch, rptUnmatch), log, 1) # Incidents that were not matched to an acceptable accuracy countMatch = sort_records(tempFC, unmatchwriter, locIndex, addrOK, False, True) if not countMatch == 0: messages(w7.format(countMatch, addrOK, rptUnmatch), log, 1) countTrueMatch = len(field_vals(tempFC, "OBJECTID")) messages(m16.format(countTrueMatch, inc_features), log) else: # Create temporary output storage tempFL = arcpy.MakeXYEventLayer_management( incidents, lg_field, lt_field, "tempLayerLE", coord_system) # Convert the feature layer to a feature class to prevent # field name changes arcpy.CopyFeatures_management(tempFL, tempFC) arcpy.Delete_management(tempFL) timeNow = dt.strftime(dt.now(), time_format) messages(m4.format(timeNow, inc_features), log) # Fields that will be copied from geocode results to final fc copyfieldnames = [] copyfieldnames.extend(matchfieldnames) copyfieldnames.append("SHAPE@XY") # Fields for error reporting errorfieldnames = [] errorfieldnames.extend(matchfieldnames) errorfieldnames.insert(0, errorfield) errorfieldnames += [long_field, lat_field] # Reproject the features sr_input = arcpy.Describe(tempFC).spatialReference sr_output = arcpy.Describe(inc_features).spatialReference if sr_input != sr_output: proj_out = "{}_proj".format(tempFC) arcpy.Project_management(tempFC, proj_out, sr_output, transform_method) tempFC = proj_out # Append geocode results to fc rptNoAppend = join(reports, "{0}_{1}.csv".format(fileNow, noappend_name)) with arcpy.da.SearchCursor(tempFC, copyfieldnames) as csvrows: with arcpy.da.InsertCursor(inc_features, copyfieldnames) as incrows: # Open csv for un-appended records with open(rptNoAppend, "wb") as appendFile: appendwriter = csv.writer(appendFile) appendwriter.writerow(errorfieldnames) # Index of field with incident ID record = errorfieldnames.index(id_field) # Initiate count of successfully appended records countAppend = 0 # List of ids of records not successfully appended errorRecords = [] for csvrow in csvrows: try: if loc_type == "COORDINATES": if remove_zeros: lt_index = copyfieldnames.index( lt_field) lg_index = copyfieldnames.index( lg_field) ltVal = csvrow[lt_index] lgVal = csvrow[lg_index] if ltVal == 0 and lgVal == 0: raise Exception( "invalid_coordinates") # If the row can be appended incrows.insertRow(csvrow) countAppend += 1 except Exception as reason: # e.g. 'The value type is incompatible with the # field type. [INCIDENTDAT]' # Alternatively, the exception # 'invalid_coordinates' raised by the # remove_zeros test above # Get the name of the problem field badfield = reason[0].split(" ")[-1] badfield = badfield.strip(" []") # Append field name to start of record csvrow = list(csvrow) csvrow.insert(0, badfield) # Split the coordinate tuple into X and Y lng, lat = list(csvrow[-1]) csvrow[-1] = lng csvrow.append(lat) csvrow = tuple(csvrow) # Write the record out to csv appendwriter.writerow(csvrow) # Add id and field to issue list errorRecords.append( w4.format(csvrow[record], badfield)) # If issues were reported, print them if len(errorRecords) != 0: messages( w1.format(len(errorRecords), inc_features, rptNoAppend), log, 1) messages(m18.format(countAppend, inc_features), log) del incrows, csvrows # Convert times to UTC if publishing to AGOL if pub_status == "ARCGIS_ONLINE": # Get date fields date_fields = [ f.name for f in arcpy.ListFields(inc_features) if f.type == "Date" and f.name in matchfieldnames ] # Convert from system timezone to UTC convert_to_utc(inc_features, date_fields) # Publish incidents if not pub_status == "": timeNow = dt.strftime(dt.now(), time_format) messages(m5.format(timeNow), log) errors = serviceutils.publish_service(cfg, pub_status, mxd, username, password) # Print analysis errors if errors: raise Exception(e4.format(errors)) # Convert times from UTC to system timezone if pub_status == "ARCGIS_ONLINE": convert_from_utc(inc_features, date_fields) timeNow = dt.strftime(dt.now(), time_format) messages(m8.format(timeNow), log) except arcpy.ExecuteError: print("{}\n{}\n".format(gp_error, arcpy.GetMessages(2))) timeNow = dt.strftime(dt.now(), "{} {}".format(date_format, time_format)) arcpy.AddError("{} {}:\n".format(timeNow, gp_error)) arcpy.AddError("{}\n".format(arcpy.GetMessages(2))) log.write("{} ({}):\n".format(gp_error, timeNow)) log.write("{}\n".format(arcpy.GetMessages(2))) for msg in range(0, arcpy.GetMessageCount()): if arcpy.GetSeverity(msg) == 2: code = arcpy.GetReturnCode(msg) print("Code: {}".format(code)) print("Message: {}".format(arcpy.GetMessage(msg))) except Exception as ex: print("{}: {}\n".format(py_error, ex)) timeNow = dt.strftime(dt.now(), "{}".format(time_format)) arcpy.AddError("{} {}:\n".format(timeNow, py_error)) arcpy.AddError("{}\n".format(ex)) log.write("{} {}:\n".format(timeNow, py_error)) log.write("{}\n".format(ex)) finally: # Clean up try: arcpy.Delete_management(tempgdb) except: pass