def createBoundingBoxPolygon(mxd_path, bkmk_name, out_fc): """Create a polygon that from the coordinates of a bookmark bounding box""" geom_type = 'POLYGON' oregon_spn = arcpy.SpatialReference(2913) management.CreateFeatureclass(os.path.dirname(out_fc), os.path.basename(out_fc), geom_type, spatial_reference=oregon_spn) name_field, f_type = 'name', 'TEXT' management.AddField(out_fc, name_field, f_type) # drop defualt field drop_field = 'Id' management.DeleteField(out_fc, drop_field) i_fields = ['Shape@', name_field] i_cursor = da.InsertCursor(out_fc, i_fields) mxd = mapping.MapDocument(mxd_path) for bkmk in arcpy.mapping.ListBookmarks(mxd, bkmk_name): extent = bkmk.extent pt_array = arcpy.Array() pt_array.add(arcpy.Point(extent.XMin, extent.YMin)) pt_array.add(arcpy.Point(extent.XMin, extent.YMax)) pt_array.add(arcpy.Point(extent.XMax, extent.YMax)) pt_array.add(arcpy.Point(extent.XMax, extent.YMin)) # add first point again to close polygon pt_array.add(arcpy.Point(extent.XMin, extent.YMin)) i_cursor.insertRow((arcpy.Polygon(pt_array), bkmk.name)) del i_cursor
def create_points_feature_class(fc, sr=None): arcpy.env.addOutputsToMap = False sr = sr or arcpy.env.outputCoordinateSystem if sr is None: arcpy.AddError('No spatial reference system.') return None scratch_fc = os.path.join(arcpy.env.scratchWorkspace, os.path.basename(fc)) mgmt.CreateFeatureclass(*os.path.split(scratch_fc), 'POINT', spatial_reference=sr) mgmt.AddField(scratch_fc, 'ELEVATION', 'DOUBLE') mgmt.AddField(scratch_fc, 'TIME', 'TEXT', field_length=64) mgmt.AddField(scratch_fc, 'NAME', 'TEXT', field_length=64) mgmt.AddField(scratch_fc, 'DESCRIPTION', 'TEXT', field_length=64) mgmt.AddField(scratch_fc, 'SYMBOL', 'TEXT', field_length=64) mgmt.AddField(scratch_fc, 'TYPE', 'TEXT', field_length=64) mgmt.AddField(scratch_fc, 'SAMPLES', 'LONG') if fc != scratch_fc: mgmt.Copy(scratch_fc, fc) mgmt.Delete(scratch_fc) return fc
def generateCcCombinedRoutesFc(): """The city center routes are split into a few feature classes for the various modes of transportation, combine them into a unified one""" geom_type = 'POLYLINE' template = os.path.join(env.workspace, 'frequent_bus_carto') oregon_spn = arcpy.SpatialReference(2913) combined_routes_cc = os.path.join(cc_shapefiles, 'combined_routes_cc.shp') management.CreateFeatureclass(os.path.dirname(combined_routes_cc), os.path.basename(combined_routes_cc), geom_type, template, spatial_reference=oregon_spn) name_field = 'LINE' route_fields = ['Shape@', 'routes', 'serv_level', 'route_type'] i_cursor = da.InsertCursor(combined_routes_cc, route_fields) for fc in arcpy.ListFeatureClasses(feature_type='Polyline'): if name_field in [f.name for f in arcpy.ListFields(fc)]: assignRouteNumbersToRail(fc, name_field, route_fields[1]) with da.SearchCursor(fc, route_fields) as cursor: for row in cursor: i_cursor.insertRow(row) del i_cursor
def createCcBusLabelsFc(): """The offset routes for the city center have only one set of geometries for each service level, but there needs to be labels for each line so generate a unique geometry for each of the routes the line segments represent""" geom_type = 'POLYLINE' template = os.path.join(sm_shapefiles, 'distinct_routes.shp') oregon_spn = arcpy.SpatialReference(2913) bus_labels_cc = os.path.join(cc_shapefiles, 'bus_labels_cc.shp') management.CreateFeatureclass(os.path.dirname(bus_labels_cc), os.path.basename(bus_labels_cc), geom_type, template, spatial_reference=oregon_spn) i_fields = ['Shape@', 'route_id', 'serv_level', 'route_type'] i_cursor = da.InsertCursor(bus_labels_cc, i_fields) s_fields = i_fields[:] s_fields[1] = 'routes' for fc in arcpy.ListFeatureClasses(): if 'bus' in fc: with da.SearchCursor(fc, s_fields) as cursor: routes_ix = cursor.fields.index('routes') for row in cursor: for route in row[routes_ix].split(','): new_row = list(row) new_row[routes_ix] = route i_cursor.insertRow((new_row)) del i_cursor
def createUnifiedFc(): """Create feature class to hold all of the routes that have been offset as individual fc's""" geom_type = 'POLYLINE' template = os.path.join(project_dir, 'shp', 'system_map', 'distinct_routes.shp') oregon_spn = arcpy.SpatialReference(2913) management.CreateFeatureclass(os.path.dirname(offset_routes), os.path.basename(offset_routes), geom_type, template, spatial_reference=oregon_spn)
def createExtentFeatureClass(): """Create a feature class to the hold the map extent geometries""" geom_type = 'POLYGON' oregon_spn = arcpy.SpatialReference(2913) management.CreateFeatureclass(path.dirname(pylon_extents), path.basename(pylon_extents), geom_type, spatial_reference=oregon_spn) f_type = 'TEXT' management.AddField(pylon_extents, name_field, f_type) drop_field = 'Id' management.DeleteField(pylon_extents, drop_field)
def createInsetBox(): """The bus mall inset covers a portion of the city center map so that needs to be reflected in the inset box, using the inflection point and the city center bound box create an fc that contains the inset box""" inflect_pt = {'x': 7649075, 'y': 686384} bkmk_dict = getBookmarkBbox(city_center_mxd, city_center_bkmk) geom_type = 'POLYGON' oregon_spn = arcpy.SpatialReference(2913) management.CreateFeatureclass(os.path.dirname(inset_box), os.path.basename(inset_box), geom_type, spatial_reference=oregon_spn) f_name, f_type = 'name', 'TEXT' management.AddField(inset_box, f_name, f_type) drop_field = 'Id' arcpy.management.DeleteField(inset_box, drop_field) i_fields = ['Shape@', f_name] i_cursor = da.InsertCursor(inset_box, i_fields) ap_array = arcpy.Array() ap_array.add(arcpy.Point(bkmk_dict['x-min'], bkmk_dict['y-min'])) ap_array.add(arcpy.Point(bkmk_dict['x-min'], bkmk_dict['y-max'])) ap_array.add(arcpy.Point(bkmk_dict['x-max'], bkmk_dict['y-max'])) ap_array.add(arcpy.Point(bkmk_dict['x-max'], inflect_pt['y'])) ap_array.add(arcpy.Point(inflect_pt['x'], inflect_pt['y'])) ap_array.add(arcpy.Point(inflect_pt['x'], bkmk_dict['y-min'])) # add first point again to close polygon ap_array.add(arcpy.Point(bkmk_dict['x-min'], bkmk_dict['y-min'])) i_cursor.insertRow((arcpy.Polygon(ap_array), 'Portland City Center')) del i_cursor
def createOutput(self, outputFC): """Creates an Output Feature Class with the Mean Centers. INPUTS: outputFC (str): path to the output feature class """ #### Validate Output Workspace #### ERROR.checkOutputPath(outputFC) #### Shorthand Attributes #### ssdo = self.ssdo caseField = self.caseField dimField = self.dimField #### Create Output Feature Class #### ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84003)) outPath, outName = OS.path.split(outputFC) try: DM.CreateFeatureclass(outPath, outName, "POINT", "", ssdo.mFlag, ssdo.zFlag, ssdo.spatialRefString) except: ARCPY.AddIDMessage("ERROR", 210, outputFC) raise SystemExit() #### Add Field Names #### fn = UTILS.getFieldNames(mcFieldNames, outPath) xFieldName, yFieldName, zFieldName = fn shapeFieldNames = ["SHAPE@"] dataFieldNames = [xFieldName, yFieldName] if ssdo.zBool: dataFieldNames.append(zFieldName) for fieldName in dataFieldNames: UTILS.addEmptyField(outputFC, fieldName, "DOUBLE") caseIsDate = False if caseField: fcCaseField = ssdo.allFields[caseField] validCaseName = UTILS.validQFieldName(fcCaseField, outPath) caseType = UTILS.convertType[fcCaseField.type] UTILS.addEmptyField(outputFC, validCaseName, caseType) dataFieldNames.append(validCaseName) if caseType.upper() == "DATE": caseIsDate = True if dimField: fcDimField = ssdo.allFields[dimField] validDimName = UTILS.validQFieldName(fcDimField, outPath) if caseField: if validCaseName == validDimName: validDimName = ARCPY.GetIDMessage(84199) UTILS.addEmptyField(outputFC, validDimName, "DOUBLE") dataFieldNames.append(validDimName) #### Write Output #### allFieldNames = shapeFieldNames + dataFieldNames rows = DA.InsertCursor(outputFC, allFieldNames) for case in self.caseKeys: #### Mean Centers #### meanX, meanY, meanZ = self.meanCenter[case] pnt = (meanX, meanY, meanZ) if ssdo.zBool: rowResult = [pnt, meanX, meanY, meanZ] else: rowResult = [pnt, meanX, meanY] #### Set Attribute Fields #### if caseField: caseValue = case.item() if caseIsDate: caseValue = TUTILS.iso2DateTime(caseValue) rowResult.append(caseValue) if dimField: meanDim = self.dimCenter[case] rowResult.append(meanDim) rows.insertRow(rowResult) #### Clean Up #### del rows #### Set Attribute #### self.outputFC = outputFC
def createOutput(self, outputFC): """Creates an Output Feature Class with the Standard Distances. INPUTS: outputFC (str): path to the output feature class """ #### Validate Output Workspace #### ERROR.checkOutputPath(outputFC) #### Shorthand Attributes #### ssdo = self.ssdo caseField = self.caseField #### Increase Extent if not Projected #### if ssdo.spatialRefType != "Projected": sdValues = self.sd.values() if len(sdValues): maxRadius = max(sdValues) largerExtent = UTILS.increaseExtentByConstant( ssdo.extent, constant=maxRadius) largerExtent = [LOCALE.str(i) for i in largerExtent] ARCPY.env.XYDomain = " ".join(largerExtent) #### Create Output Feature Class #### ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84003)) outPath, outName = OS.path.split(outputFC) try: DM.CreateFeatureclass(outPath, outName, "POLYGON", "", ssdo.mFlag, ssdo.zFlag, ssdo.spatialRefString) except: ARCPY.AddIDMessage("ERROR", 210, outputFC) raise SystemExit() #### Add Fields to Output FC #### dataFieldNames = UTILS.getFieldNames(sdFieldNames, outPath) shapeFieldNames = ["SHAPE@"] for fieldName in dataFieldNames: UTILS.addEmptyField(outputFC, fieldName, "DOUBLE") caseIsDate = False if caseField: fcCaseField = ssdo.allFields[caseField] validCaseName = UTILS.validQFieldName(fcCaseField, outPath) caseType = UTILS.convertType[fcCaseField.type] UTILS.addEmptyField(outputFC, validCaseName, caseType) dataFieldNames.append(validCaseName) if caseType.upper() == "DATE": caseIsDate = True #### Write Output #### badCaseRadians = [] allFieldNames = shapeFieldNames + dataFieldNames rows = DA.InsertCursor(outputFC, allFieldNames) for case in self.caseKeys: #### Get Results #### xVal, yVal = self.meanCenter[case] radius = self.sd[case] #### Create Empty Polygon Geomretry #### poly = ARCPY.Array() #### Check for Valid Radius #### radiusZero = UTILS.compareFloat(0.0, radius, rTol=.0000001) radiusNan = NUM.isnan(radius) radiusBool = radiusZero + radiusNan if radiusBool: badRadian = 6 badCase = UTILS.caseValue2Print(case, self.caseIsString) badCaseRadians.append(badCase) else: badRadian = 0 #### Calculate a Point For Each #### #### Degree in Circle Polygon #### for degree in NUM.arange(0, 360): try: radians = NUM.pi / 180.0 * degree pntX = xVal + (radius * NUM.cos(radians)) pntY = yVal + (radius * NUM.sin(radians)) pnt = ARCPY.Point(pntX, pntY, ssdo.defaultZ) poly.add(pnt) except: badRadian += 1 if badRadian == 6: badCase = UTILS.caseValue2Print( case, self.caseIsString) badCaseRadians.append(badCase) break if badRadian < 6: #### Create and Populate New Feature #### poly = ARCPY.Polygon(poly, None, True) rowResult = [poly, xVal, yVal, radius] if caseField: caseValue = case.item() if caseIsDate: caseValue = TUTILS.iso2DateTime(caseValue) rowResult.append(caseValue) rows.insertRow(rowResult) #### Report Bad Cases Due to Geometry (coincident pts) #### nBadRadians = len(badCaseRadians) if nBadRadians: if caseField: badCaseRadians = " ".join(badCaseRadians) ARCPY.AddIDMessage("WARNING", 1011, caseField, badCaseRadians) else: ARCPY.AddIDMessage("ERROR", 978) raise SystemExit() #### Return Extent to Normal if not Projected #### if ssdo.spatialRefType != "Projected": ARCPY.env.XYDomain = None #### Clean Up #### del rows #### Set Attribute #### self.outputFC = outputFC
def createOutput(self, outputFC): """Creates an Output Feature Class with the Directional Mean Results. INPUTS: outputFC (str): path to the output feature class """ #### Validate Output Workspace #### ERROR.checkOutputPath(outputFC) #### Shorthand Attributes #### ssdo = self.ssdo caseField = self.caseField #### Create Output Feature Class #### ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84003)) outPath, outName = OS.path.split(outputFC) try: DM.CreateFeatureclass(outPath, outName, "POLYLINE", "", ssdo.mFlag, ssdo.zFlag, ssdo.spatialRefString) except: ARCPY.AddIDMessage("ERROR", 210, outputFC) raise SystemExit() #### Add Fields to Output FC #### dataFieldNames = UTILS.getFieldNames(lmFieldNames, outPath) shapeFieldNames = ["SHAPE@"] for fieldName in dataFieldNames: UTILS.addEmptyField(outputFC, fieldName, "DOUBLE") caseIsDate = False if caseField: fcCaseField = ssdo.allFields[caseField] validCaseName = UTILS.validQFieldName(fcCaseField, outPath) caseType = UTILS.convertType[fcCaseField.type] UTILS.addEmptyField(outputFC, validCaseName, caseType) dataFieldNames.append(validCaseName) if caseType.upper() == "DATE": caseIsDate = True #### Populate Output Feature Class #### allFieldNames = shapeFieldNames + dataFieldNames rows = DA.InsertCursor(outputFC, allFieldNames) for case in self.caseKeys: #### Get Results #### start, end, length, rAngle, dAngle, circVar = self.dm[case] meanX, meanY = self.meanCenter[case] dirMean = 360. - dAngle + 90. if not dirMean < 360: dirMean = dirMean - 360. #### Create Start and End Points #### x0, y0 = start startPoint = ARCPY.Point(x0, y0, ssdo.defaultZ) x1, y1 = end endPoint = ARCPY.Point(x1, y1, ssdo.defaultZ) #### Create And Populate Line Array #### line = ARCPY.Array() line.add(startPoint) line.add(endPoint) line = ARCPY.Polyline(line, None, True) #### Create and Populate New Line Feature #### rowResult = [line, dAngle, dirMean, circVar, meanX, meanY, length] if caseField: caseValue = case if caseIsDate: caseValue = TUTILS.iso2DateTime(caseValue) rowResult.append(caseValue) rows.insertRow(rowResult) #### Clean Up #### del rows #### Set Attribute #### self.outputFC = outputFC #### Set the Default Symbology #### params = ARCPY.gp.GetParameterInfo() if self.orientationOnly: renderLayerFile = "LinearMeanTwoWay.lyr" else: renderLayerFile = "LinearMeanOneWay.lyr" templateDir = OS.path.dirname(OS.path.dirname(SYS.argv[0])) fullRLF = OS.path.join(templateDir, "Templates", "Layers", renderLayerFile) params[1].Symbology = fullRLF
def createOutput(self, outputFC): """Creates an Output Feature Class with the Median Centers. INPUTS: outputFC (str): path to the output feature class """ #### Validate Output Workspace #### ERROR.checkOutputPath(outputFC) #### Shorthand Attributes #### ssdo = self.ssdo caseField = self.caseField attFields = self.attFields #### Create Output Feature Class #### ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84003)) outPath, outName = OS.path.split(outputFC) try: DM.CreateFeatureclass(outPath, outName, "POINT", "", ssdo.mFlag, ssdo.zFlag, ssdo.spatialRefString) except: ARCPY.AddIDMessage("ERROR", 210, outputFC) raise SystemExit() #### Add Field Names #### dataFieldNames = UTILS.getFieldNames(mdcFieldNames, outPath) shapeFieldNames = ["SHAPE@"] for fieldName in dataFieldNames: UTILS.addEmptyField(outputFC, fieldName, "DOUBLE") caseIsDate = False if caseField: fcCaseField = ssdo.allFields[caseField] validCaseName = UTILS.validQFieldName(fcCaseField, outPath) caseType = UTILS.convertType[fcCaseField.type] UTILS.addEmptyField(outputFC, validCaseName, caseType) dataFieldNames.append(validCaseName) if caseType.upper() == "DATE": caseIsDate = True if attFields: for attField in attFields: fcAttField = ssdo.allFields[attField] validAttName = UTILS.validQFieldName(fcAttField, outPath) if caseField: if validCaseName == validAttName: validAttName = ARCPY.GetIDMessage(84195) UTILS.addEmptyField(outputFC, validAttName, "DOUBLE") dataFieldNames.append(validAttName) outShapeFileBool = UTILS.isShapeFile(outputFC) #### Add Median X, Y, Dim #### allFieldNames = shapeFieldNames + dataFieldNames rows = DA.InsertCursor(outputFC, allFieldNames) for case in self.caseKeys: #### Median Centers #### medX, medY = self.medianCenter[case] pnt = (medX, medY, ssdo.defaultZ) rowResult = [pnt, medX, medY] #### Set Attribute Fields #### if caseField: caseValue = case.item() if caseIsDate: caseValue = TUTILS.iso2DateTime(caseValue) rowResult.append(caseValue) #### Set Attribute Fields #### if attFields: for attInd, attField in enumerate(self.attFields): medAtt = self.attCenter[case][attInd] rowResult.append(medAtt) rows.insertRow(rowResult) #### Clean Up #### del rows #### Set Attribute #### self.outputFC = outputFC
def mergeDualCarriageways(): """Collapse dual carriageways and turning circles in single, striagt-line roadways, the tools that achieve these effects are run on each route separately then the routes are added back to a single feature class as this yields better results""" generateMatchCode() # create at feature class to store all of the outputs geom_type = 'POLYLINE' template = distinct_routes_src oregon_spn = arcpy.SpatialReference(2913) management.CreateFeatureclass(os.path.dirname(collapsed_routes), os.path.basename(collapsed_routes), geom_type, template, spatial_reference=oregon_spn) # make a feature layer of the source routes so that selections can be made on it distinct_rte_lyr = 'distinct_transit_routes' management.MakeFeatureLayer(distinct_routes, distinct_rte_lyr) route_service_list = getRouteServicePairs() temp_merge = os.path.join(temp_shp_dir, 'temp_merge.shp') temp_collapse = os.path.join(temp_shp_dir, 'temp_collapse.shp') route_fields = ['Shape@', 'route_id', 'serv_level', 'route_type'] i_cursor = da.InsertCursor(collapsed_routes, route_fields) for route, service in route_service_list: select_type = 'NEW_SELECTION' where_clause = """"route_id" = {0} AND "serv_level" = '{1}'""".format( route, service) management.SelectLayerByAttribute(distinct_rte_lyr, select_type, where_clause) # merge dual carriageways merge_field = 'merge_id' # '0' in this field means won't be merged merge_distance = 100 # feet cartography.MergeDividedRoads(distinct_rte_lyr, merge_field, merge_distance, temp_merge) # collapse turing circles collapse_distance = 550 cartography.CollapseRoadDetail(temp_merge, collapse_distance, temp_collapse) with da.SearchCursor(temp_collapse, route_fields) as s_cursor: for row in s_cursor: i_cursor.insertRow(row) del i_cursor # now merge contiguous line segments with common attributes, now that dual carriage- # ways have been collapsed the data can be reduced to fewer segments dissolve_fields = ['route_id', 'serv_level', 'route_type'] geom_class = 'SINGLE_PART' line_handling = 'UNSPLIT_LINES' management.Dissolve(collapsed_routes, dissolved_routes, dissolve_fields, multi_part=geom_class, unsplit_lines=line_handling)
def separateRoutes(): """Create a distinct feature class for each of the routes in the source fc""" type_dict = { 1: 'bus', 2: 'aerial tram', 3: 'wes', 4: 'streetcar', 5: 'max' } line_dict = { 193: 'ns', 194: 'cl', 208: 'aerial_tram', # 999 is a place holder being used since the route_id field is type: int # and doesn't accept characters other than numbers 999: 'new_sellwood_099' } management.CreateFileGDB(os.path.dirname(route_gdb), os.path.basename(route_gdb)) service_list = [] service_levels = [] with da.SearchCursor(all_routes, route_fields[1:]) as s_cursor: for rte, serv, r_type in s_cursor: rs = (int(rte), serv) if r_type not in (3, 5) and rs not in service_list: service_list.append(rs) if serv not in service_levels: service_levels.append(serv) oregon_spn = arcpy.SpatialReference(2913) for level in service_levels: management.CreateFeatureDataset(route_gdb, level.replace('-', '_'), oregon_spn) for route_id, service in service_list: # translate number to name for streetcar and aerial tram lines try: route_text = line_dict[route_id] except: # adding leading zeros to lines with less than 3 digits for readability route_text = 'line_{0:03d}'.format(route_id) service_text = service.replace('-', '_') route_name = '{0}_{1}_carto'.format(route_text, service_text) current_route = os.path.join(route_gdb, service_text, route_name) geom_type = 'POLYLINE' template = all_routes management.CreateFeatureclass(os.path.dirname(current_route), os.path.basename(current_route), geom_type, template, spatial_reference=oregon_spn) i_cursor = da.InsertCursor(current_route, route_fields) with da.SearchCursor(all_routes, route_fields) as s_cursor: for geom, rte, serv, r_type in s_cursor: if rte == route_id and serv == service: i_cursor.insertRow((geom, rte, serv, r_type)) del i_cursor
def import_gpx(gpx_file, wpt_fc, trk_fc): GCS_WGS_84 = arcpy.SpatialReference(4326) GCS_TRANSFORMS = 'WGS_1984_(ITRF08)_To_NAD_1983_2011; NAD_1927_To_NAD_1983_NADCON' arcpy.env.geographicTransformations = arcpy.env.geographicTransformations or GCS_TRANSFORMS arcpy.AddMessage('Geographic Transformations: %s' % arcpy.env.geographicTransformations) scratch = arcpy.env.scratchWorkspace arcpy.env.addOutputsToMap = False WPT_FIELDS = [ ('ELEVATION', 'gpx:ele'), ('TIME', 'gpx:time'), ('NAME', 'gpx:name'), ('DESCRIPTION', 'gpx:desc'), ('SYMBOL', 'gpx:sym'), ('TYPE', 'gpx:type'), ('SAMPLES', 'gpx:extensions/wptx1:WaypointExtension/wptx1:Samples') ] ns = { 'gpx': 'http://www.topografix.com/GPX/1/1', 'gpxx': 'http://www.garmin.com/xmlschemas/GpxExtensions/v3', 'wptx1': 'http://www.garmin.com/xmlschemas/WaypointExtension/v1', 'ctx': 'http://www.garmin.com/xmlschemas/CreationTimeExtension/v1', } etree.register_namespace('', 'http://www.topografix.com/GPX/1/1') etree.register_namespace('gpxx', 'http://www.garmin.com/xmlschemas/GpxExtensions/v3') etree.register_namespace('wptx1', 'http://www.garmin.com/xmlschemas/WaypointExtension/v1') etree.register_namespace('ctx', 'http://www.garmin.com/xmlschemas/CreationTimeExtension/v1') gpx = etree.parse(gpx_file).getroot() sr = arcpy.env.outputCoordinateSystem if wpt_fc: create_points_feature_class(wpt_fc, sr) waypoints = [] for wpt in gpx.findall('gpx:wpt', ns): x, y = wpt.get('lon'), wpt.get('lat') row = [arcpy.PointGeometry(arcpy.Point(x, y), GCS_WGS_84).projectAs(sr)] for field, tag in WPT_FIELDS: elem = wpt.find(tag, ns) if elem is None: row.append(None) elif field == 'ELEVATION': row.append('%0.4f' % (float(elem.text) / sr.metersPerUnit)) elif field == 'NAME' and elem.text.isdigit(): row.append('%d' % int(elem.text)) else: row.append(elem.text) waypoints.append(row) if waypoints: fields = ['SHAPE@'] + [f[0] for f in WPT_FIELDS] cur = arcpy.da.InsertCursor(wpt_fc, fields) for row in waypoints: cur.insertRow(row) del cur if trk_fc: # idle time between trkpts to start a new track segment TRKSEG_IDLE_SECS = 600 tracks = [] track_num = 0 for trk in gpx.findall('gpx:trk', ns): track_num += 1 elem = trk.find('gpx:name', ns) if elem is None: track_name = 'track-%04d' % track_num else: track_name = elem.text track_pts = [] dt_last = None segment_num = 0 for trkpt in trk.findall('./gpx:trkseg/gpx:trkpt', ns): x, y = trkpt.get('lon'), trkpt.get('lat') pt = arcpy.PointGeometry(arcpy.Point(x, y), GCS_WGS_84).projectAs(sr).firstPoint # See if there's a track point time elem = trkpt.find('gpx:time', ns) if elem is None: dt_last = None else: dt = utils.default_tzinfo(parser.parse(elem.text), tz.UTC) if dt_last and (dt - dt_last).seconds > TRKSEG_IDLE_SECS: # start a new segment if len(track_pts) > 1: segment_num += 1 if segment_num > 1: segment_name = '%s SEG-%04d' % (track_name, segment_num) else: segment_name = track_name geom = arcpy.Polyline(arcpy.Array(track_pts), sr) tracks.append([geom , segment_name, len(track_pts)]) else: arcpy.AddMessage('Skipping track "%s": track_pts=%d' % (track_name, len(track_pts))) track_pts = [] dt_last = dt track_pts.append(pt) if len(track_pts) > 1: segment_num += 1 if segment_num > 1: segment_name = '%s SEG-%04d' % (track_name, segment_num) else: segment_name = track_name geom = arcpy.Polyline(arcpy.Array(track_pts), sr) tracks.append([geom, segment_name, len(track_pts)]) else: arcpy.AddMessage('Skipping track "%s": track_pts=%d' % (track_name, len(track_pts))) if tracks: temp_fc = os.path.join(scratch, os.path.basename(trk_fc) + '_Temp') if sr is None: arcpy.AddError('Geoprocessing environment not set: outputCoordinateSystem') return None fc = mgmt.CreateFeatureclass(*os.path.split(temp_fc), geometry_type='POLYLINE', spatial_reference=sr) mgmt.AddField(fc, 'NAME', 'TEXT', field_length=64) mgmt.AddField(fc, 'POINTS', 'LONG') cur = arcpy.da.InsertCursor(fc, ('SHAPE@', 'NAME', 'POINTS')) for row in tracks: cur.insertRow(row) del cur mgmt.CopyFeatures(temp_fc, trk_fc) del fc
def createOutputShapes(self, outputFC): #### Shorthand Attributes #### ssdoBase = self.ssdoBase ssdoCand = self.ssdoCand #### Validate Output Workspace #### ARCPY.overwriteOutput = True ERROR.checkOutputPath(outputFC) #### Create Output Feature Class #### ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84003)) outPath, outName = OS.path.split(outputFC) tempFC = UTILS.returnScratchName("TempSS_FC", fileType = "FEATURECLASS", scratchWS = outPath) outTempPath, outTempName = OS.path.split(tempFC) try: DM.CreateFeatureclass(outTempPath, outTempName, ssdoBase.shapeType, "", ssdoBase.mFlag, ssdoBase.zFlag, ssdoBase.spatialRefString) except: ARCPY.AddIDMessage("ERROR", 210, outputFC) raise SystemExit() #### Add Null Value Flag #### outIsShapeFile = UTILS.isShapeFile(outputFC) setNullable = outIsShapeFile == False #### Make Feature Layer and Select Result OIDs/Shapes #### featureCount = ssdoBase.numObs + ssdoCand.numObs ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84003), 0, featureCount, 1) #### Add Shape/ID Field Names #### matchID, candID = outputIDFieldNames outFieldNames = ["SHAPE@"] + outputIDFieldNames inFieldNames = ["OID@", "SHAPE@"] UTILS.addEmptyField(tempFC, matchID, "LONG", nullable = True) UTILS.addEmptyField(tempFC, candID, "LONG", nullable = True) #### Add Append Fields #### lenAppend = len(self.appendFields) appendIsDate = [] in2OutFieldNames = {} if lenAppend: for fieldName in self.appendFields: fcField = ssdoCand.allFields[fieldName] fieldType = UTILS.convertType[fcField.type] fieldOutName = UTILS.validQFieldName(fcField, outPath) in2OutFieldNames[fieldName] = fieldOutName if fieldType == "DATE": appendIsDate.append(fieldName) UTILS.addEmptyField(tempFC, fieldOutName, fieldType, alias = fcField.alias) outFieldNames.append(fieldOutName) #### Add Analysis Fields #### for fieldName in self.fieldNames: fcField = ssdoBase.allFields[fieldName] fieldType = UTILS.convertType[fcField.type] fieldOutName = UTILS.validQFieldName(fcField, outPath) in2OutFieldNames[fieldName] = fieldOutName UTILS.addEmptyField(tempFC, fieldOutName, fieldType, alias = fcField.alias) outFieldNames.append(fieldOutName) dataFieldNames = matchFieldInfo[self.similarType] dataFieldInfo = outputFieldInfo[self.matchMethod] baseValues = [] for fieldName in dataFieldNames: outAlias, outType, baseValue = dataFieldInfo[fieldName] UTILS.addEmptyField(tempFC, fieldName, outType, alias = outAlias, nullable = setNullable) outFieldNames.append(fieldName) baseValues.append(baseValue) #### Get Insert Cursor #### baseRows = DA.SearchCursor(ssdoBase.inputFC, inFieldNames) candRows = DA.SearchCursor(ssdoCand.inputFC, inFieldNames) rows = DA.InsertCursor(tempFC, outFieldNames) #### Set Base Data #### useShapeNull = outIsShapeFile if useShapeNull: nullIntValue = UTILS.shpFileNull['LONG'] else: nullIntValue = None #### Set Base Null For Append #### appendNull = {} for fieldName in self.appendFields: if fieldName not in ssdoBase.fields: if useShapeNull: outType = ssdoCand.fields[fieldName].type outNullValue = UTILS.shpFileNull[outType] else: outNullValue = None appendNull[fieldName] = outNullValue #### Add Base Data #### for masterID, shp in baseRows: orderID = ssdoBase.master2Order[masterID] #### Insert Shape, Match_ID and NULL (Cand_ID) #### rowRes = [shp, masterID, nullIntValue] #### Add Append Fields #### for fieldName in self.appendFields: if fieldName in appendNull: rowRes.append(appendNull[fieldName]) else: value = ssdoBase.fields[fieldName].data[orderID] if fieldName in appendIsDate: value = TUTILS.iso2DateTime(value) rowRes.append(value) #### Add Analysis Fields #### for fieldName in self.fieldNames: rowRes.append(ssdoBase.fields[fieldName].data[orderID]) #### Add Null Base Values #### rowRes += baseValues rows.insertRow(rowRes) ARCPY.SetProgressorPosition() del baseRows #### First Add Similar Results #### for masterID, shp in candRows: orderID = ssdoCand.master2Order[masterID] indTop = NUM.where(self.topIDs == orderID)[0] indBot = NUM.where(self.botIDs == orderID)[0] if self.similarType in ['MOST_SIMILAR', 'BOTH'] and len(indTop): ind = indTop[0] #### Insert Shape, NULL (Match_ID) and Cand_ID #### rowRes = [shp, nullIntValue, masterID] #### Add Append Fields #### for fieldName in self.appendFields: rowRes.append(ssdoCand.fields[fieldName].data[orderID]) #### Add Analysis Fields #### for fieldName in self.fieldNames: rowRes.append(ssdoCand.fields[fieldName].data[orderID]) #### Add Results #### rank = ind + 1 ss = self.totalDist[orderID] if self.similarType == 'BOTH': rowRes += [rank, nullIntValue, ss, rank] else: rowRes += [rank, ss, rank] rows.insertRow(rowRes) if self.similarType in ['LEAST_SIMILAR', 'BOTH'] and len(indBot): ind = indBot[0] #### Insert Shape, NULL (Match_ID) and Cand_ID #### rowRes = [shp, nullIntValue, masterID] #### Add Append Fields #### for fieldName in self.appendFields: rowRes.append(ssdoCand.fields[fieldName].data[orderID]) #### Add Analysis Fields #### for fieldName in self.fieldNames: rowRes.append(ssdoCand.fields[fieldName].data[orderID]) #### Add Results #### rank = ind + 1 labRank = rank * -1 ss = self.totalDist[orderID] if self.similarType == 'BOTH': rowRes += [nullIntValue, rank, ss, labRank] else: rowRes += [rank, ss, labRank] rows.insertRow(rowRes) ARCPY.SetProgressorPosition() del candRows del rows #### Do Final Sort #### if self.matchMethod == 'ATTRIBUTE_PROFILES': if self.similarType == 'MOST_SIMILAR': sortString = "SIMINDEX DESCENDING;SIMRANK DESCENDING" else: sortString = "SIMINDEX DESCENDING" else: if self.similarType == 'MOST_SIMILAR': sortString = "SIMINDEX ASCENDING;SIMRANK ASCENDING" else: sortString = "SIMINDEX ASCENDING" DM.Sort(tempFC, outputFC, sortString, "UR") #### Clean Up #### DM.Delete(tempFC) #### Symbology #### params = ARCPY.gp.GetParameterInfo() try: renderType = UTILS.renderType[self.ssdoBase.shapeType.upper()] renderKey = (self.similarType, renderType) renderLayerFile = outputRenderInfo[renderKey] templateDir = OS.path.dirname(OS.path.dirname(SYS.argv[0])) fullRLF = OS.path.join(templateDir, "Templates", "Layers", renderLayerFile) params[2].Symbology = fullRLF except: ARCPY.AddIDMessage("WARNING", 973)
def stCollectByKNN(ssdo, timeField, outputFC, inSpan, inDistance): """ This method applied Jacquez Space-Time K-NN to convert event data into weighted point data by dissolving all coincident points in space and time into unique points with a new count field that contains the number of original features at that location and time span. INPUTS: ssdo (obj): SSDataObject from input timeField (str): Date/Time field name in input feature outputFC (str): path to the output feature class inSpan (int): value of temporal units within the same time bin inDistance (int): value of spatial units considered as spatial neighbors OUTPUTS: Create new collected point feature """ #### Read raw time data #### timeData = ssdo.fields[timeField].data #### Convert temporal unit #### time = NUM.array(timeData, dtype='datetime64[s]').astype('datetime64[D]') #### Find Start Time #### startTime = time.min() #### Create Bin for Space and Time #### timeBin = (time - startTime) / inSpan numObs = ssdo.numObs #### Create Sudo-fid to Find K-NN in Space and Time fid = [i for i in xrange(numObs)] #### Validate Output Workspace #### ERROR.checkOutputPath(outputFC) #### True Centroid Warning For Non-Point FCs #### if ssdo.shapeType.upper() != "POINT": ARCPY / AddIDMessage("WARNING", 1021) #### Create GA Data Structure #### gaTable, gaInfo = WU.gaTable(ssdo.inputFC, spatRef=ssdo.spatialRefString) #### Assure Enough Observations #### cnt = UTILS.getCount(ssdo.inputFC) ERROR.errorNumberOfObs(cnt, minNumObs=4) N = gaInfo[0] ERROR.errorNumberOfObs(N, minNumObs=4) #### Process Any Bad Records Encountered #### numBadRecs = cnt - N if numBadRecs: badRecs = WU.parseGAWarnings(gaTable.warnings) if not ssdo.silentWarnings: ERROR.reportBadRecords(cnt, numBadRecs, badRecs, label=ssdo.oidName) #### Create Output Feature Class #### outPath, outName = OS.path.split(outputFC) try: DM.CreateFeatureclass(outPath, outName, "POINT", "", ssdo.mFlag, ssdo.zFlag, ssdo.spatialRefString) except: ARCPY.AddIDMessage("ERROR", 210, outputFC) raise SystemExit() #### Create k-Nearest Neighbor Search Type #### gaSearch = GAPY.ga_nsearch(gaTable) gaSearch.init_nearest(inDistance, 0, "euclidean") #### Add Count Field #### countFieldNameOut = ARCPY.ValidateFieldName(countFieldName, outPath) timeFieldNameOut = ARCPY.ValidateFieldName(timeFieldName, outPath) UTILS.addEmptyField(outputFC, countFieldNameOut, "LONG") UTILS.addEmptyField(outputFC, timeFieldNameOut, "DATE") fieldList = ["SHAPE@", countFieldNameOut, timeFieldNameOut] #### Set Insert Cursor #### rowsOut = DA.InsertCursor(outputFC, fieldList) #### Detect S-T K-NN by Space and Time Bin #### duplicateList = [] for record in fid: kNNList = [record] if record not in duplicateList: for pair in fid: if pair != record: gaSearch.search_by_idx(record) for nh in gaSearch: if timeBin[record] == timeBin[pair]: kNNList.append(nh.idx) duplicateList.append(nh.idx) #### Create and Populate New Feature #### kNNList = list(set(kNNList)) count = len(kNNList) dt = time[record] x0 = ssdo.xyCoords[kNNList, 0].mean() y0 = ssdo.xyCoords[kNNList, 1].mean() pnt = (x0, y0, ssdo.defaultZ) rowResult = [pnt, count, dt] rowsOut.insertRow(rowResult) ARCPY.SetProgressorPosition() #### Clean Up #### del rowsOut, timeBin, kNNList, duplicateList return countFieldNameOut
def createOutput(self, outputFC): """Creates an Output Feature Class with the Standard Distances. INPUTS: outputFC (str): path to the output feature class """ #### Validate Output Workspace #### ERROR.checkOutputPath(outputFC) #### Shorthand Attributes #### ssdo = self.ssdo caseField = self.caseField #### Increase Extent if not Projected #### if ssdo.spatialRefType != "Projected": seValues = self.se.values() if len(seValues): maxSE = NUM.array([i[0:2] for i in seValues]).max() largerExtent = UTILS.increaseExtentByConstant(ssdo.extent, constant=maxSE) largerExtent = [LOCALE.str(i) for i in largerExtent] ARCPY.env.XYDomain = " ".join(largerExtent) #### Create Output Feature Class #### ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84003)) outPath, outName = OS.path.split(outputFC) try: DM.CreateFeatureclass(outPath, outName, "POLYGON", "", ssdo.mFlag, ssdo.zFlag, ssdo.spatialRefString) except: ARCPY.AddIDMessage("ERROR", 210, outputFC) raise SystemExit() #### Add Fields to Output FC #### dataFieldNames = UTILS.getFieldNames(seFieldNames, outPath) shapeFieldNames = ["SHAPE@"] for fieldName in dataFieldNames: UTILS.addEmptyField(outputFC, fieldName, "DOUBLE") caseIsDate = False if caseField: fcCaseField = ssdo.allFields[caseField] validCaseName = UTILS.validQFieldName(fcCaseField, outPath) caseType = UTILS.convertType[fcCaseField.type] UTILS.addEmptyField(outputFC, validCaseName, caseType) dataFieldNames.append(validCaseName) if caseType.upper() == "DATE": caseIsDate = True #### Write Output #### badCaseRadians = [] allFieldNames = shapeFieldNames + dataFieldNames rows = DA.InsertCursor(outputFC, allFieldNames) for case in self.caseKeys: #### Get Results #### xVal, yVal = self.meanCenter[case] seX, seY, degreeRotation, radianR1, radianR2 = self.se[case] seX2 = seX**2.0 seY2 = seY**2.0 #### Create Empty Polygon Geomretry #### poly = ARCPY.Array() #### Check for Valid Radius #### seXZero = UTILS.compareFloat(0.0, seX, rTol=.0000001) seXNan = NUM.isnan(seX) seXBool = seXZero + seXNan seYZero = UTILS.compareFloat(0.0, seY, rTol=.0000001) seYNan = NUM.isnan(seY) seYBool = seYZero + seYNan if seXBool or seYBool: badRadian = 6 badCase = UTILS.caseValue2Print(case, self.caseIsString) badCaseRadians.append(badCase) else: badRadian = 0 cosRadian = NUM.cos(radianR1) sinRadian = NUM.sin(radianR1) #### Calculate a Point For Each #### #### Degree in Ellipse Polygon #### for degree in NUM.arange(0, 360): try: radians = UTILS.convert2Radians(degree) tanVal2 = NUM.tan(radians)**2.0 dX = MATH.sqrt( (seX2 * seY2) / (seY2 + (seX2 * tanVal2))) dY = MATH.sqrt((seY2 * (seX2 - dX**2.0)) / seX2) #### Adjust for Quadrant #### if 90 <= degree < 180: dX = -dX elif 180 <= degree < 270: dX = -dX dY = -dY elif degree >= 270: dY = -dY #### Rotate X and Y #### dXr = dX * cosRadian - dY * sinRadian dYr = dX * sinRadian + dY * cosRadian #### Create Point Shifted to #### #### Ellipse Centroid #### pntX = dXr + xVal pntY = dYr + yVal pnt = ARCPY.Point(pntX, pntY, ssdo.defaultZ) poly.add(pnt) except: badRadian += 1 if badRadian == 6: badCase = UTILS.caseValue2Print( case, self.caseIsString) badCaseRadians.append(badCase) break if badRadian < 6: #### Create and Populate New Feature #### poly = ARCPY.Polygon(poly, None, True) rowResult = [poly, xVal, yVal, seX, seY, radianR2] if caseField: caseValue = case.item() if caseIsDate: caseValue = TUTILS.iso2DateTime(caseValue) rowResult.append(caseValue) rows.insertRow(rowResult) #### Report Bad Cases Due to Geometry (coincident pts) #### nBadRadians = len(badCaseRadians) if nBadRadians: if caseField: badCaseRadians = " ".join(badCaseRadians) ARCPY.AddIDMessage("WARNING", 1011, caseField, badCaseRadians) else: ARCPY.AddIDMessage("ERROR", 978) raise SystemExit() #### Return Extent to Normal if not Projected #### if ssdo.spatialRefType != "Projected": ARCPY.env.XYDomain = "" #### Clean Up #### del rows #### Set Attribute #### self.outputFC = outputFC
def collectEvents(ssdo, outputFC): """This utility converts event data into weighted point data by dissolving all coincident points into unique points with a new count field that contains the number of original features at that location. INPUTS: inputFC (str): path to the input feature class outputFC (str): path to the input feature class """ #### Set Default Progressor for Neigborhood Structure #### ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143)) #### Validate Output Workspace #### ERROR.checkOutputPath(outputFC) #### True Centroid Warning For Non-Point FCs #### if ssdo.shapeType.upper() != "POINT": ARCPY.AddIDMessage("WARNING", 1021) #### Create GA Data Structure #### gaTable, gaInfo = WU.gaTable(ssdo.inputFC, spatRef=ssdo.spatialRefString) #### Assure Enough Observations #### cnt = UTILS.getCount(ssdo.inputFC) ERROR.errorNumberOfObs(cnt, minNumObs=4) N = gaInfo[0] ERROR.errorNumberOfObs(N, minNumObs=4) #### Process Any Bad Records Encountered #### numBadRecs = cnt - N if numBadRecs: badRecs = WU.parseGAWarnings(gaTable.warnings) if not ssdo.silentWarnings: ERROR.reportBadRecords(cnt, numBadRecs, badRecs, label=ssdo.oidName) #### Create k-Nearest Neighbor Search Type #### gaSearch = GAPY.ga_nsearch(gaTable) gaSearch.init_nearest(0.0, 0, "euclidean") #### Create Output Feature Class #### outPath, outName = OS.path.split(outputFC) try: DM.CreateFeatureclass(outPath, outName, "POINT", "", ssdo.mFlag, ssdo.zFlag, ssdo.spatialRefString) except: ARCPY.AddIDMessage("ERROR", 210, outputFC) raise SystemExit() #### Add Count Field #### countFieldNameOut = ARCPY.ValidateFieldName(countFieldName, outPath) UTILS.addEmptyField(outputFC, countFieldNameOut, "LONG") fieldList = ["SHAPE@", countFieldNameOut] #### Set Insert Cursor #### rowsOut = DA.InsertCursor(outputFC, fieldList) #### Set Progressor for Calculation #### ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84007), 0, N, 1) #### ID List to Search #### rowsIN = range(N) maxCount = 0 numUnique = 0 for row in rowsIN: #### Get Row Coords #### rowInfo = gaTable[row] x0, y0 = rowInfo[1] count = 1 #### Search For Exact Coord Match #### gaSearch.search_by_idx(row) for nh in gaSearch: count += 1 rowsIN.remove(nh.idx) ARCPY.SetProgressorPosition() #### Keep Track of Max Count #### maxCount = max([count, maxCount]) #### Create Output Point #### pnt = (x0, y0, ssdo.defaultZ) #### Create and Populate New Feature #### rowResult = [pnt, count] rowsOut.insertRow(rowResult) numUnique += 1 ARCPY.SetProgressorPosition() #### Clean Up #### del rowsOut, gaTable return countFieldNameOut, maxCount, N, numUnique
def createOutput(self, outputFC): #### Shorthand Attributes #### ssdoBase = self.ssdoBase ssdoCand = self.ssdoCand #### Validate Output Workspace #### ARCPY.overwriteOutput = True ERROR.checkOutputPath(outputFC) #### Create Output Feature Class #### ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84003)) outPath, outName = OS.path.split(outputFC) try: DM.CreateFeatureclass(outPath, outName, "POINT", "", ssdoBase.mFlag, ssdoBase.zFlag, ssdoBase.spatialRefString) except: ARCPY.AddIDMessage("ERROR", 210, outputFC) raise SystemExit() #### Add Null Value Flag #### outIsShapeFile = UTILS.isShapeFile(outputFC) setNullable = outIsShapeFile == False #### Add Shape/ID Field Names #### matchID, candID = outputIDFieldNames outFieldNames = ["SHAPE@"] + outputIDFieldNames UTILS.addEmptyField(outputFC, matchID, "LONG", nullable = True) UTILS.addEmptyField(outputFC, candID, "LONG", nullable = True) #### Add Append Fields #### lenAppend = len(self.appendFields) appendIsDate = [] in2OutFieldNames = {} if lenAppend: for fieldName in self.appendFields: fcField = ssdoCand.allFields[fieldName] fieldType = UTILS.convertType[fcField.type] fieldOutName = UTILS.validQFieldName(fcField, outPath) in2OutFieldNames[fieldName] = fieldOutName if fieldType == "DATE": appendIsDate.append(fieldName) UTILS.addEmptyField(outputFC, fieldOutName, fieldType, alias = fcField.alias) outFieldNames.append(fieldOutName) #### Add Analysis Fields #### for fieldName in self.fieldNames: fcField = ssdoBase.allFields[fieldName] fieldType = UTILS.convertType[fcField.type] fieldOutName = UTILS.validQFieldName(fcField, outPath) in2OutFieldNames[fieldName] = fieldOutName UTILS.addEmptyField(outputFC, fieldOutName, fieldType, alias = fcField.alias) outFieldNames.append(fieldOutName) dataFieldNames = matchFieldInfo[self.similarType] dataFieldInfo = outputFieldInfo[self.matchMethod] baseValues = [] for fieldName in dataFieldNames: outAlias, outType, baseValue = dataFieldInfo[fieldName] UTILS.addEmptyField(outputFC, fieldName, outType, alias = outAlias, nullable = setNullable) outFieldNames.append(fieldName) baseValues.append(baseValue) #### Step Progress #### featureCount = ssdoBase.numObs + self.numResults if self.similarType == "BOTH": featureCount += self.numResults ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84003), 0, featureCount, 1) #### Get Insert Cursor #### rows = DA.InsertCursor(outputFC, outFieldNames) #### Set Base Data #### useShapeNull = outIsShapeFile if useShapeNull: nullIntValue = UTILS.shpFileNull['LONG'] else: nullIntValue = None #### Set Base Null For Append #### appendNull = {} for fieldName in self.appendFields: if fieldName not in ssdoBase.fields: if useShapeNull: outType = ssdoCand.fields[fieldName].type outNullValue = UTILS.shpFileNull[outType] else: outNullValue = None appendNull[fieldName] = outNullValue #### Add Base Data #### for orderID in xrange(ssdoBase.numObs): x,y = ssdoBase.xyCoords[orderID] pnt = (x, y, ssdoBase.defaultZ) #### Insert Shape, Match_ID and NULL (Cand_ID) #### rowRes = [pnt, ssdoBase.order2Master[orderID], nullIntValue] #### Add Append Fields #### for fieldName in self.appendFields: if fieldName in appendNull: rowRes.append(appendNull[fieldName]) else: value = ssdoBase.fields[fieldName].data[orderID] if fieldName in appendIsDate: value = TUTILS.iso2DateTime(value) rowRes.append(value) #### Add Analysis Fields #### for fieldName in self.fieldNames: rowRes.append(ssdoBase.fields[fieldName].data[orderID]) #### Add Null Base Values #### rowRes += baseValues rows.insertRow(rowRes) ARCPY.SetProgressorPosition() if self.similarType in ['MOST_SIMILAR', 'BOTH']: #### First Add Similar Results #### for ind, orderID in enumerate(self.topIDs): x,y = ssdoCand.xyCoords[orderID] pnt = (x, y, ssdoBase.defaultZ) #### Insert Shape, NULL (Match_ID) and Cand_ID #### rowRes = [pnt, nullIntValue, ssdoCand.order2Master[orderID]] #### Add Append Fields #### for fieldName in self.appendFields: rowRes.append(ssdoCand.fields[fieldName].data[orderID]) #### Add Analysis Fields #### for fieldName in self.fieldNames: rowRes.append(ssdoCand.fields[fieldName].data[orderID]) #### Add Results #### rank = ind + 1 ss = self.totalDist[orderID] if self.similarType == 'BOTH': rowRes += [rank, nullIntValue, ss, rank] else: rowRes += [rank, ss, rank] rows.insertRow(rowRes) ARCPY.SetProgressorPosition() if self.similarType in ['LEAST_SIMILAR', 'BOTH']: #### Add Least Similar #### for ind, orderID in enumerate(self.botIDs): x,y = ssdoCand.xyCoords[orderID] pnt = (x, y, ssdoBase.defaultZ) #### Insert Shape, NULL (Match_ID) and Cand_ID #### rowRes = [pnt, nullIntValue, ssdoCand.order2Master[orderID]] #### Add Append Fields #### for fieldName in self.appendFields: rowRes.append(ssdoCand.fields[fieldName].data[orderID]) #### Add Analysis Fields #### for fieldName in self.fieldNames: rowRes.append(ssdoCand.fields[fieldName].data[orderID]) #### Add Results #### rank = ind + 1 labRank = rank * -1 ss = self.totalDist[orderID] if self.similarType == 'BOTH': rowRes += [nullIntValue, rank, ss, labRank] else: rowRes += [rank, ss, labRank] rows.insertRow(rowRes) ARCPY.SetProgressorPosition() #### Clean Up #### del rows #### Symbology #### params = ARCPY.gp.GetParameterInfo() try: renderKey = (self.similarType, 0) renderLayerFile = outputRenderInfo[renderKey] templateDir = OS.path.dirname(OS.path.dirname(SYS.argv[0])) fullRLF = OS.path.join(templateDir, "Templates", "Layers", renderLayerFile) params[2].Symbology = fullRLF except: ARCPY.AddIDMessage("WARNING", 973)