def createBoundingBoxPolygon(mxd_path, bkmk_name, out_fc):
	"""Create a polygon that from the coordinates of a bookmark bounding box"""

	geom_type = 'POLYGON'
	oregon_spn = arcpy.SpatialReference(2913)
	management.CreateFeatureclass(os.path.dirname(out_fc),
		os.path.basename(out_fc), geom_type, spatial_reference=oregon_spn)

	name_field, f_type = 'name', 'TEXT'
	management.AddField(out_fc, name_field, f_type)

	# drop defualt field
	drop_field = 'Id'
	management.DeleteField(out_fc, drop_field)

	i_fields = ['Shape@', name_field]
	i_cursor = da.InsertCursor(out_fc, i_fields)

	mxd = mapping.MapDocument(mxd_path)
	for bkmk in arcpy.mapping.ListBookmarks(mxd, bkmk_name):
		extent = bkmk.extent
		pt_array = arcpy.Array()

		pt_array.add(arcpy.Point(extent.XMin, extent.YMin))
		pt_array.add(arcpy.Point(extent.XMin, extent.YMax))
		pt_array.add(arcpy.Point(extent.XMax, extent.YMax))
		pt_array.add(arcpy.Point(extent.XMax, extent.YMin))
		# add first point again to close polygon
		pt_array.add(arcpy.Point(extent.XMin, extent.YMin))

		i_cursor.insertRow((arcpy.Polygon(pt_array), bkmk.name))

	del i_cursor
def createCcBusLabelsFc():
    """The offset routes for the city center have only one set of geometries for
	each service level, but there needs to be labels for each line so generate a 
	unique geometry for each of the routes the line segments represent"""

    geom_type = 'POLYLINE'
    template = os.path.join(sm_shapefiles, 'distinct_routes.shp')
    oregon_spn = arcpy.SpatialReference(2913)
    bus_labels_cc = os.path.join(cc_shapefiles, 'bus_labels_cc.shp')
    management.CreateFeatureclass(os.path.dirname(bus_labels_cc),
                                  os.path.basename(bus_labels_cc),
                                  geom_type,
                                  template,
                                  spatial_reference=oregon_spn)

    i_fields = ['Shape@', 'route_id', 'serv_level', 'route_type']
    i_cursor = da.InsertCursor(bus_labels_cc, i_fields)

    s_fields = i_fields[:]
    s_fields[1] = 'routes'
    for fc in arcpy.ListFeatureClasses():
        if 'bus' in fc:
            with da.SearchCursor(fc, s_fields) as cursor:
                routes_ix = cursor.fields.index('routes')
                for row in cursor:
                    for route in row[routes_ix].split(','):
                        new_row = list(row)
                        new_row[routes_ix] = route

                        i_cursor.insertRow((new_row))

    del i_cursor
def getDataPagesMapExtents():
    """Get the exent of each of the maps in the data driven pages collection
	and write them as polygons to a feature class"""

    createExtentFeatureClass()

    i_fields = ['Shape@', name_field]
    i_cursor = da.InsertCursor(pylon_extents, i_fields)

    for page_num in range(1, data_pages.pageCount + 1):
        data_pages.currentPageID = page_num
        extent = data_frame.extent
        pt_array = arcpy.Array()

        pt_array.add(arcpy.Point(extent.XMin, extent.YMin))
        pt_array.add(arcpy.Point(extent.XMin, extent.YMax))
        pt_array.add(arcpy.Point(extent.XMax, extent.YMax))
        pt_array.add(arcpy.Point(extent.XMax, extent.YMin))
        # add first point again to close polygon
        pt_array.add(arcpy.Point(extent.XMin, extent.YMin))

        # get the page name of the map and add it as an attribute
        pg_name_field = data_pages.pageNameField.name
        page_name = data_pages.pageRow.getValue(pg_name_field)

        i_cursor.insertRow((arcpy.Polygon(pt_array), page_name))

    del i_cursor
def generateCcCombinedRoutesFc():
    """The city center routes are split into a few feature classes for the various
	modes of transportation, combine them into a unified one"""

    geom_type = 'POLYLINE'
    template = os.path.join(env.workspace, 'frequent_bus_carto')
    oregon_spn = arcpy.SpatialReference(2913)
    combined_routes_cc = os.path.join(cc_shapefiles, 'combined_routes_cc.shp')
    management.CreateFeatureclass(os.path.dirname(combined_routes_cc),
                                  os.path.basename(combined_routes_cc),
                                  geom_type,
                                  template,
                                  spatial_reference=oregon_spn)

    name_field = 'LINE'
    route_fields = ['Shape@', 'routes', 'serv_level', 'route_type']
    i_cursor = da.InsertCursor(combined_routes_cc, route_fields)

    for fc in arcpy.ListFeatureClasses(feature_type='Polyline'):
        if name_field in [f.name for f in arcpy.ListFields(fc)]:
            assignRouteNumbersToRail(fc, name_field, route_fields[1])

        with da.SearchCursor(fc, route_fields) as cursor:
            for row in cursor:
                i_cursor.insertRow(row)

    del i_cursor
    def createOutput(self, outputTable):
        """Creates Moran's I Step Output Table.

        INPUTS
        outputTable (str): path to the output table
        """

        #### Allow Overwrite Output ####
        ARCPY.env.overwriteOutput = 1

        #### Get Output Table Name With Extension if Appropriate ####
        outputTable, dbf = UTILS.returnTableName(outputTable)

        #### Set Progressor ####
        ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84008))

        #### Delete Table If Exists ####
        UTILS.passiveDelete(outputTable)

        #### Create Table ####
        outPath, outName = OS.path.split(outputTable)
        try:
            DM.CreateTable(outPath, outName)
        except:
            ARCPY.AddIDMessage("ERROR", 541)
            raise SystemExit()

        #### Add Result Fields ####
        self.outputFields = []
        for field in iaFieldNames:
            fieldOut = ARCPY.ValidateFieldName(field, outPath)
            UTILS.addEmptyField(outputTable, fieldOut, "DOUBLE")
            self.outputFields.append(fieldOut)

        #### Create Insert Cursor ####
        try:
            insert = DA.InsertCursor(outputTable, self.outputFields)
        except:
            ARCPY.AddIDMessage("ERROR", 204)
            raise SystemExit()

        #### Add Rows to Output Table ####
        for testIter in xrange(self.nIncrements):
            insert.insertRow(self.giResults[testIter])

        #### Clean Up ####
        del insert

        return outputTable, dbf
def copy_rows(source, destination):
    """
    copies a dataset from source to a destination
    """
    try:
        if arcpy.Exists(destination):
            arcpy.Delete_management(destination)
        desc = arcpy.Describe(source)
        fields = [
            field.name for field in arcpy.ListFields(source)
            if field.type not in ['Geometry', 'OID@']
        ]
        if desc.datasetType.lower() == 'table':
            out_table = arcpy.CreateTable_management(
                out_path=os.path.dirname(destination),
                out_name=os.path.basename(destination),
                template=source)[0]
        elif desc.datasetType.lower() == 'featureclass':
            out_table = arcpy.CreateFeatureclass_management(
                out_path=os.path.dirname(destination),
                out_name=os.path.basename(destination),
                geometry_type=desc.shapeType.upper(),
                template=source,
                spatial_reference=desc.spatialReference)[0]
            fields.append("SHAPE@")
        else:
            raise Exception("Invalid datatype")
        with arcpy.da.SearchCursor(source, fields) as rows:
            with da.InsertCursor(out_table, fields) as irows:
                for row in rows:
                    irows.insertRow(row)
                    del row
                del irows
            del rows
        return out_table
    except:
        line, filename, synerror = trace()
        raise FunctionError({
            "function": "copy_rows",
            "line": line,
            "filename": __file__,
            "synerror": synerror,
            "arc": str(arcpy.GetMessages(2))
        })
Example #7
0
def populateUnifiedFc():
    """Iterate through all of the individual route feature classes and add them
	to common fc"""

    route_fields = ['Shape@', 'route_id', 'serv_level', 'route_type']
    i_cursor = da.InsertCursor(offset_routes, route_fields)

    feat_datasets = ['frequent', 'standard', 'rush_hour', 'rail_tram']
    for fd in feat_datasets:
        for fc in arcpy.ListFeatureClasses(feature_dataset=fd):
            # exclude these as a different more generalized fc is being used
            # to represent the streetcar
            print fc
            fc_path = os.path.join(env.workspace, fd, fc)
            with da.SearchCursor(fc_path, route_fields) as s_cursor:
                for row in s_cursor:
                    i_cursor.insertRow(row)

    del i_cursor
def createInsetBox():
    """The bus mall inset covers a portion of the city center map so that
	needs to be reflected in the inset box, using the inflection point and the
	city center bound box create an fc that contains the inset box"""

    inflect_pt = {'x': 7649075, 'y': 686384}
    bkmk_dict = getBookmarkBbox(city_center_mxd, city_center_bkmk)

    geom_type = 'POLYGON'
    oregon_spn = arcpy.SpatialReference(2913)
    management.CreateFeatureclass(os.path.dirname(inset_box),
                                  os.path.basename(inset_box),
                                  geom_type,
                                  spatial_reference=oregon_spn)

    f_name, f_type = 'name', 'TEXT'
    management.AddField(inset_box, f_name, f_type)

    drop_field = 'Id'
    arcpy.management.DeleteField(inset_box, drop_field)

    i_fields = ['Shape@', f_name]
    i_cursor = da.InsertCursor(inset_box, i_fields)

    ap_array = arcpy.Array()
    ap_array.add(arcpy.Point(bkmk_dict['x-min'], bkmk_dict['y-min']))
    ap_array.add(arcpy.Point(bkmk_dict['x-min'], bkmk_dict['y-max']))
    ap_array.add(arcpy.Point(bkmk_dict['x-max'], bkmk_dict['y-max']))
    ap_array.add(arcpy.Point(bkmk_dict['x-max'], inflect_pt['y']))
    ap_array.add(arcpy.Point(inflect_pt['x'], inflect_pt['y']))
    ap_array.add(arcpy.Point(inflect_pt['x'], bkmk_dict['y-min']))
    # add first point again to close polygon
    ap_array.add(arcpy.Point(bkmk_dict['x-min'], bkmk_dict['y-min']))

    i_cursor.insertRow((arcpy.Polygon(ap_array), 'Portland City Center'))

    del i_cursor
def generate_squares(in_polygon, in_raster):
    from arcpy import Describe, Array, Point, Polygon, da
    desc = Describe(in_raster)
    eXMin = desc.extent.XMin
    eYMin = desc.extent.YMin
    eXMax = desc.extent.XMax
    eYMax = desc.extent.YMax

    offset = 1
    sqLen = 1
    # Store extent values as list of coordinate
    blX = eXMin - offset
    blY = eYMin - offset
    bottom_left_square = Array([
        Point(blX - sqLen, blY - sqLen),
        Point(blX - sqLen, blY),
        Point(blX, blY),
        Point(blX, blY - sqLen)
    ])
    trX = eXMax + offset
    trY = eYMax + offset
    top_right_square = Array([
        Point(trX, trY),
        Point(trX, trY + sqLen),
        Point(trX + sqLen, trY + sqLen),
        Point(trX + sqLen, trY)
    ])
    # Get coordinate system
    # Open an InsertCursor and insert the new geometry
    cursor = da.InsertCursor(in_polygon, ['SHAPE@'])
    for sq in [bottom_left_square, top_right_square]:
        # Create a polygon geometry
        polygon = Polygon(sq)
        cursor.insertRow([polygon])
    # Delete cursor object
    del cursor
Example #10
0
def merge_fcs(fcs, merged_fc, gdb):
    """combines like geometries into a feature class"""

    desc = arcpy.Describe(os.path.join(gdb, fcs[0]))
    if arcpy.Exists(merged_fc):
        arcpy.Delete_management(merged_fc)
    ifc = arcpy.CreateFeatureclass_management(
        out_path=os.path.dirname(merged_fc),
        out_name=os.path.basename(merged_fc),
        geometry_type=desc.shapeType.upper(),
        spatial_reference=desc.spatialReference)[0]
    icur = da.InsertCursor(ifc, ['SHAPE@'])
    count = 0
    for fc in fcs:
        fc = os.path.join(gdb, fc)
        with da.SearchCursor(fc, ["SHAPE@"]) as rows:
            for row in rows:
                icur.insertRow(row)
                count += 1
                del row
        del rows
        del fc
    del icur, desc
    return ifc, count
    def createOutput(self, outputFC):
        """Creates an Output Feature Class with the Directional Mean
        Results.

        INPUTS:
        outputFC (str): path to the output feature class
        """

        #### Validate Output Workspace ####
        ERROR.checkOutputPath(outputFC)

        #### Shorthand Attributes ####
        ssdo = self.ssdo
        caseField = self.caseField

        #### Create Output Feature Class ####
        ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84003))
        outPath, outName = OS.path.split(outputFC)

        try:
            DM.CreateFeatureclass(outPath, outName, "POLYLINE", "", ssdo.mFlag,
                                  ssdo.zFlag, ssdo.spatialRefString)
        except:
            ARCPY.AddIDMessage("ERROR", 210, outputFC)
            raise SystemExit()

        #### Add Fields to Output FC ####
        dataFieldNames = UTILS.getFieldNames(lmFieldNames, outPath)
        shapeFieldNames = ["SHAPE@"]
        for fieldName in dataFieldNames:
            UTILS.addEmptyField(outputFC, fieldName, "DOUBLE")

        caseIsDate = False
        if caseField:
            fcCaseField = ssdo.allFields[caseField]
            validCaseName = UTILS.validQFieldName(fcCaseField, outPath)
            caseType = UTILS.convertType[fcCaseField.type]
            UTILS.addEmptyField(outputFC, validCaseName, caseType)
            dataFieldNames.append(validCaseName)
            if caseType.upper() == "DATE":
                caseIsDate = True

        #### Populate Output Feature Class ####
        allFieldNames = shapeFieldNames + dataFieldNames
        rows = DA.InsertCursor(outputFC, allFieldNames)
        for case in self.caseKeys:
            #### Get Results ####
            start, end, length, rAngle, dAngle, circVar = self.dm[case]
            meanX, meanY = self.meanCenter[case]
            dirMean = 360. - dAngle + 90.
            if not dirMean < 360:
                dirMean = dirMean - 360.

            #### Create Start and End Points ####
            x0, y0 = start
            startPoint = ARCPY.Point(x0, y0, ssdo.defaultZ)
            x1, y1 = end
            endPoint = ARCPY.Point(x1, y1, ssdo.defaultZ)

            #### Create And Populate Line Array ####
            line = ARCPY.Array()
            line.add(startPoint)
            line.add(endPoint)
            line = ARCPY.Polyline(line, None, True)

            #### Create and Populate New Line Feature ####
            rowResult = [line, dAngle, dirMean, circVar, meanX, meanY, length]

            if caseField:
                caseValue = case
                if caseIsDate:
                    caseValue = TUTILS.iso2DateTime(caseValue)
                rowResult.append(caseValue)
            rows.insertRow(rowResult)

        #### Clean Up ####
        del rows

        #### Set Attribute ####
        self.outputFC = outputFC

        #### Set the Default Symbology ####
        params = ARCPY.gp.GetParameterInfo()
        if self.orientationOnly:
            renderLayerFile = "LinearMeanTwoWay.lyr"
        else:
            renderLayerFile = "LinearMeanOneWay.lyr"
        templateDir = OS.path.dirname(OS.path.dirname(SYS.argv[0]))
        fullRLF = OS.path.join(templateDir, "Templates", "Layers",
                               renderLayerFile)
        params[1].Symbology = fullRLF
def to_featureclass(geo,
                    location,
                    overwrite=True,
                    validate=False,
                    sanitize_columns=True,
                    has_m=True,
                    has_z=False):
    """
    Exports the DataFrame to a Feature class.

    ===============     ====================================================
    **Argument**        **Description**
    ---------------     ----------------------------------------------------
    location            Required string. This is the output location for the
                        feature class. This should be the path and feature
                        class name.
    ---------------     ----------------------------------------------------
    overwrite           Optional Boolean. If overwrite is true, existing
                        data will be deleted and replaced with the spatial
                        dataframe.
    ---------------     ----------------------------------------------------
    validate            Optional Boolean. If True, the export will check if
                        all the geometry objects are correct upon export.
    ---------------     ----------------------------------------------------
    sanitize_columns    Optional Boolean. If True, column names will be
                        converted to string, invalid characters removed and
                        other checks will be performed. The default is True.
    ---------------     ----------------------------------------------------
    ham_m               Optional Boolean to indicate if data has linear
                        referencing (m) values. Default is False.
    ---------------     ----------------------------------------------------
    has_z               Optional Boolean to indicate if data has elevation
                        (z) values. Default is False.
    ===============     ====================================================


    :returns: string

    """
    out_location = os.path.dirname(location)
    fc_name = os.path.basename(location)
    df = geo._data
    old_idx = df.index
    df.reset_index(drop=True, inplace=True)
    if geo.name is None:
        raise ValueError("DataFrame must have geometry set.")
    if validate and \
       geo.validate(strict=True) == False:
        raise ValueError(("Mixed geometry types detected, "
                          "cannot export to feature class."))

    # sanitize
    if sanitize_columns:
        # logic
        _sanitize_column_names(geo, inplace=True)

    columns = df.columns.tolist()
    for col in columns[:]:
        if not isinstance(col, str):
            df.rename(columns={col: str(col)}, inplace=True)
            col = str(col)

    if HASARCPY:
        # 1. Create the Save Feature Class
        #
        columns = df.columns.tolist()
        join_dummy = "AEIOUYAJC81Z"
        columns.pop(columns.index(df.spatial.name))
        dtypes = [(join_dummy, np.int64)]
        if overwrite and arcpy.Exists(location):
            arcpy.Delete_management(location)
        elif overwrite == False and arcpy.Exists(location):
            raise ValueError(('overwrite set to False, Cannot '
                              'overwrite the table. '))

        notnull = geo._data[geo._name].notnull()
        idx = geo._data[geo._name][notnull].first_valid_index()
        sr = geo._data[geo._name][idx]['spatialReference']
        gt = geo._data[geo._name][idx].geometry_type.upper()
        null_geom = {
            'point':
            pd.io.json.dumps({
                'x': None,
                'y': None,
                'spatialReference': sr
            }),
            'polyline':
            pd.io.json.dumps({
                'paths': [],
                'spatialReference': sr
            }),
            'polygon':
            pd.io.json.dumps({
                'rings': [],
                'spatialReference': sr
            }),
            'multipoint':
            pd.io.json.dumps({
                'points': [],
                'spatialReference': sr
            })
        }
        sr = geo._data[geo._name][idx].spatial_reference.as_arcpy
        null_geom = null_geom[gt.lower()]

        if has_m == True:
            has_m = "ENABLED"
        else:
            has_m = None

        if has_z == True:
            has_z = "ENABLED"
        else:
            has_z = None

        fc = arcpy.CreateFeatureclass_management(out_location,
                                                 spatial_reference=sr,
                                                 geometry_type=gt,
                                                 out_name=fc_name,
                                                 has_m=has_m,
                                                 has_z=has_z)[0]

        # 2. Add the Fields and Data Types
        oidfld = da.Describe(fc)['OIDFieldName']
        for col in columns[:]:
            if col.lower() in ['fid', 'oid', 'objectid']:
                dtypes.append((col, np.int32))
            elif df[col].dtype.name.startswith('datetime64[ns'):
                dtypes.append((col, '<M8[us]'))
            elif df[col].dtype.name == 'object':
                try:
                    u = type(df[col][df[col].first_valid_index()])
                except:
                    u = pd.unique(df[col].apply(type)).tolist()[0]
                if issubclass(u, str):
                    mlen = df[col].str.len().max()
                    dtypes.append((col, '<U%s' % int(mlen)))
                else:
                    try:
                        if df[col][idx] is None:
                            dtypes.append((col, '<U254'))
                        else:
                            dtypes.append((col, type(df[col][idx])))
                    except:
                        dtypes.append((col, '<U254'))
            elif df[col].dtype.name == 'int64':
                dtypes.append((col, np.int64))
            elif df[col].dtype.name == 'bool':
                dtypes.append((col, np.int32))
            else:
                dtypes.append((col, df[col].dtype.type))

        array = np.array([], np.dtype(dtypes))
        arcpy.da.ExtendTable(fc, oidfld, array, join_dummy, append_only=False)

        # 3. Insert the Data
        fields = arcpy.ListFields(fc)
        icols = [fld.name for fld in fields \
                 if fld.type not in ['OID', 'Geometry'] and \
                 fld.name in df.columns] + ['SHAPE@JSON']
        dfcols = [fld.name for fld in fields \
                  if fld.type not in ['OID', 'Geometry'] and\
                  fld.name in df.columns] + [df.spatial.name]

        with da.InsertCursor(fc, icols) as irows:
            dt_fld_idx = [irows.fields.index(col) for col in df.columns \
                          if df[col].dtype.name.startswith('datetime64[ns')]

            def _insert_row(row):
                row[-1] = pd.io.json.dumps(row[-1])
                for idx in dt_fld_idx:
                    if isinstance(row[idx], type(pd.NaT)):
                        row[idx] = None
                irows.insertRow(row)

            q = df[geo._name].isna()
            df.loc[q, 'SHAPE'] = null_geom  # set null values to proper JSON
            np.apply_along_axis(_insert_row, 1, df[dfcols].values)
            df.loc[q, 'SHAPE'] = None  # reset null values
        df.set_index(old_idx)
        return fc
    elif HASPYSHP:
        if fc_name.endswith('.shp') == False:
            fc_name = "%s.shp" % fc_name
        if SHPVERSION < [2]:
            res = _pyshp_to_shapefile(df=df,
                                      out_path=out_location,
                                      out_name=fc_name)
            df.set_index(old_idx)
            return res
        else:
            res = _pyshp2(df=df, out_path=out_location, out_name=fc_name)
            df.set_index(old_idx)
            return res
    elif HASARCPY == False and HASPYSHP == False:
        raise Exception(
            ("Cannot Export the data without ArcPy or PyShp modules."
             " Please install them and try again."))
    else:
        df.set_index(old_idx)
        return None
Example #13
0
def to_featureclass(df, out_name, out_location=None,
                    overwrite=True, out_sr=None,
                    skip_invalid=True):
    """
    converts a SpatialDataFrame to a feature class

    Parameters:
     :out_location: path to the workspace
     :out_name: name of the output feature class table
     :overwrite: True, the data will be erased then replaced, else the
      table will be appended to an existing table.
     :out_sr: if set, the data will try to reproject itself
     :skip_invalid: if True, the cursor object will not raise an error on
      insertion of invalid data, if False, the first occurence of invalid
      data will raise an exception.
    Returns:
     path to the feature class
    """
    fc = None
    if HASARCPY:
        import arcgis
        cols = []
        dt_idx = []
        invalid_rows = []
        idx = 0
        max_length = None
        if out_location:
            if os.path.isdir(out_location) == False and \
               out_location.lower().endswith('.gdb'):
                out_location = arcpy.CreateFileGDB_management(out_folder_path=os.path.dirname(out_location),
                                                             out_name=os.path.basename(out_location))[0]
            elif os.path.isdir(out_location) == False and \
                 out_name.lower().endswith('.shp'):
                os.makedirs(out_location)
            elif os.path.isfile(out_location) == False and \
                 out_location.lower().endswith('.sde'):
                raise ValueError("The sde connection file does not exist")
        else:
            if out_name.lower().endswith('.shp'):
                out_location = tempfile.gettempdir()
            elif HASARCPY:
                out_location = arcpy.env.scratchGDB
            else:
                out_location = tempfile.gettempdir()
                out_name = out_name + ".shp"
        fc = os.path.join(out_location, out_name)
        df = df.copy() # create a copy so we don't modify the source data.
        if out_name.lower().endswith('.shp'):
            max_length = 10
        for col in df.columns:
            if col.lower() != 'shape':
                if df[col].dtype.type in NUMERIC_TYPES:
                    df[col] = df[col].fillna(0)
                elif df[col].dtype.type in DATETIME_TYPES:
                    dt_idx.append(col)
                else:
                    df.loc[df[col].isnull(), col] = ""
                idx += 1
                col = sanitize_field_name(s=col,
                                          length=max_length)
            cols.append(col)
            del col
        df.columns = cols

        if arcpy.Exists(fc) and \
           overwrite:
            arcpy.Delete_management(fc)
        if arcpy.Exists(fc) ==  False:
            sr = df.sr
            if df.sr is None:
                sr = df['SHAPE'].loc[df['SHAPE'].first_valid_index()].spatial_reference
                if isinstance(sr, dict) and \
                   'wkid' in sr:
                    sr = arcpy.SpatialReference(sr['wkid'])
                elif isinstance(sr, arcpy.SpatialReference):
                    sr = sr
                else:
                    sr = None
            elif df.sr:
                sr = _types.SpatialReference(df.sr).as_arcpy
            elif sr is None:
                sr = df['SHAPE'].loc[df['SHAPE'].first_valid_index()].spatial_reference
                if isinstance(sr, dict) and \
                               'wkid' in sr:
                    sr = arcpy.SpatialReference(sr['wkid'])
                elif isinstance(sr, arcpy.SpatialReference):
                    sr = sr
                else:
                    sr = None
            elif isinstance(sr, dict):
                sr = _types.SpatialReference(sr).as_arcpy
            elif isinstance(sr, _types.SpatialReference):
                sr = df.sr.as_arcpy

            fc = arcpy.CreateFeatureclass_management(out_path=out_location,
                                                     out_name=out_name,
                                                     geometry_type=df.geometry_type.upper(),
                                                     spatial_reference=sr)[0]
        desc = arcpy.Describe(fc)
        oidField = desc.oidFieldName
        col_insert = copy.copy(df.columns).tolist()
        if hasattr(desc, 'areaFieldName'):
            af = desc.areaFieldName.lower()
        else:
            af = None
        if hasattr(desc, 'lengthFieldName'):
            lf = desc.lengthFieldName.lower()
        else:
            lf = None
        col_insert = [f for f in col_insert if f.lower() not in ['oid', 'objectid', 'fid', desc.oidFieldName.lower(), af, lf]]
        df_cols = col_insert.copy()
        lower_col_names = [f.lower() for f in col_insert if f.lower() not in ['oid', 'objectid', 'fid']]
        idx_shp = None

        if oidField.lower() in lower_col_names:
            val = col_insert.pop(lower_col_names.index(oidField.lower()))
            del df[val]
            col_insert = copy.copy(df.columns).tolist()
            lower_col_names = [f.lower() for f in col_insert]
        if hasattr(desc, "areaFieldName") and \
           desc.areaFieldName.lower() in lower_col_names:
            val = col_insert.pop(lower_col_names.index(desc.areaFieldName.lower()))
            del df[val]
            col_insert = copy.copy(df.columns).tolist()
            lower_col_names = [f.lower() for f in col_insert]
        elif 'shape_area' in lower_col_names:
            val = col_insert.pop(lower_col_names.index('shape_area'))
            del df[val]
            col_insert = copy.copy(df.columns).tolist()
            lower_col_names = [f.lower() for f in col_insert]
        if hasattr(desc, "lengthFieldName") and \
           desc.lengthFieldName.lower() in lower_col_names:
            val = col_insert.pop(lower_col_names.index(desc.lengthFieldName.lower()))
            del df[val]
            col_insert = copy.copy(df.columns).tolist()
            lower_col_names = [f.lower() for f in col_insert]
        elif 'shape_length' in lower_col_names:
            val = col_insert.pop(lower_col_names.index('shape_length'))
            del df[val]
            col_insert = copy.copy(df.columns).tolist()
            lower_col_names = [f.lower() for f in col_insert]
        if "SHAPE" in df.columns:
            idx_shp = col_insert.index("SHAPE")
            col_insert[idx_shp] = "SHAPE@"
        existing_fields = [field.name.lower() for field in arcpy.ListFields(fc)]
        for col in col_insert:
            if col.lower() != 'shape@' and \
               col.lower() != 'shape' and \
               col.lower() not in existing_fields:
                try:
                    t = _infer_type(df, col)
                    if t == "TEXT" and out_name.lower().endswith('.shp') == False:
                        l = int(df[col].str.len().max()) or 0
                        if l < 255:
                            l = 255
                        arcpy.AddField_management(in_table=fc, field_name=col,
                                                  field_length=l,
                                                  field_type=_infer_type(df, col))
                    else:
                        arcpy.AddField_management(in_table=fc, field_name=col,
                                              field_type=t)
                except:
                    print('col %s' % col)
        dt_idx = [col_insert.index(col) for col in dt_idx if col in col_insert]
        icur = da.InsertCursor(fc, col_insert)
        for index, row in df[df_cols].iterrows():
            if len(dt_idx) > 0:
                row = row.tolist()
                for i in dt_idx:
                    row[i] = row[i].to_pydatetime()
                    del i
                try:
                    if idx_shp:
                        row[idx_shp] = row[idx_shp].as_arcpy
                    icur.insertRow(row)
                except:
                    invalid_rows.append(index)
                    if skip_invalid == False:
                        raise Exception("Invalid row detected at index: %s" % index)
            else:
                try:
                    row = row.tolist()
                    if isinstance(idx_shp, int):
                        row[idx_shp] = row[idx_shp].as_arcpy
                    icur.insertRow(row)
                except:
                    invalid_rows.append(index)
                    if skip_invalid == False:
                        raise Exception("Invalid row detected at index: %s" % index)

            del row
        del icur
        if len(invalid_rows) > 0:
            t = ",".join([str(r) for r in invalid_rows])
            _log.warning('The following rows could not be written to the table: %s' % t)
    elif HASARCPY == False and \
         HASPYSHP:
        return _pyshp_to_shapefile(df=df,
                                   out_path=out_location,
                                   out_name=out_name)
    else:
        raise Exception("Cannot Export the data without ArcPy or PyShp modules. "+ \
                        "Please install them and try again.")
    return fc
Example #14
0
def ScriptStatusLogging(taskName='Unavailable',
                        taskTarget='Unknown',
                        completionStatus=scriptFailure,
                        taskStartDateTime=datetime.datetime.now(),
                        taskEndDateTime=datetime.datetime.now(),
                        completionMessage='Unexpected Error.',
                        targetTable=None):
    # Changed to run on SQL Server instead of trying to share a couple logging tables with gatetest.
    # Also add case for incorrect completion statuses.
    try:
        print 'Script status logging started.'

        # Calculate task duration and format it for insertion.
        # Duration should only be 00:00:00 when the information is
        # not correct.
        taskDuration = FindDuration(taskEndDateTime, taskStartDateTime)

        # Change the datetimes to ISO 8601 Format (YYYY-MM-DD HH:MM:SS).
        dtStartTimeStamp = CreateTimeStamp(taskStartDateTime)

        dtEndTimeStamp = CreateTimeStamp(taskEndDateTime)
        pythonLoggingFieldList = [
            'Process_Name',
            'Table_Name',
            'Status',
            'Start_Date',  # Status 0x0 = Success, 0x1 = Failure.
            'Completion_Date',
            'Execution_Time',
            'Process_Message'
        ]

        # Choose the logging table to write to based on the completion status.
        if completionStatus == scriptSuccess or completionStatus == scriptFailure:  # Received the correct status format.

            # Create the row to be inserted and fill it with the proper values.
            newRow = [
                taskName, taskTarget, completionStatus, dtStartTimeStamp,
                dtEndTimeStamp, taskDuration, completionMessage
            ]

            cursor = da.InsertCursor(
                pythonLoggingTable,
                pythonLoggingFieldList)  # @UndefinedVariable
            newRowNumber = cursor.insertRow(newRow)

            if 'cursor' in locals():
                del cursor
            else:
                pass

            print "Inserted a new row with ObjectID of: " + str(newRowNumber)

        else:  # Status format is incorrect.
            print "Received incorrect status information. Will not write to the logging table."

    except:
        print 'Script status logging failed.'
        print sys.exc_info()[0], " ", sys.exc_info()[1], " ", sys.exc_info()[2]
        print(GetMessages(2))
    finally:
        if 'cursor' in locals():
            del cursor
        else:
            pass

    print 'Script status logging completed.'
def to_table(geo, location, overwrite=True):
    """
    Exports a geo enabled dataframe to a table.

    ===========================     ====================================================================
    **Argument**                    **Description**
    ---------------------------     --------------------------------------------------------------------
    location                        Required string. The output of the table.
    ---------------------------     --------------------------------------------------------------------
    overwrite                       Optional Boolean.  If True and if the table exists, it will be
                                    deleted and overwritten.  This is default.  If False, the table and
                                    the table exists, and exception will be raised.
    ===========================     ====================================================================

    :returns: String
    """
    out_location = os.path.dirname(location)
    fc_name = os.path.basename(location)
    df = geo._data
    if location.lower().find('.csv') > -1:
        geo._df.to_csv(location)
        return location
    elif HASARCPY:
        columns = df.columns.tolist()
        join_dummy = "AEIOUYAJC81Z"
        try:
            columns.pop(columns.index(df.spatial.name))
        except:
            pass
        dtypes = [(join_dummy, np.int64)]
        if overwrite and arcpy.Exists(location):
            arcpy.Delete_management(location)
        elif overwrite == False and arcpy.Exists(location):
            raise ValueError(('overwrite set to False, Cannot '
                              'overwrite the table. '))
        fc = arcpy.CreateTable_management(out_path=out_location,
                                          out_name=fc_name)[0]
        # 2. Add the Fields and Data Types
        #
        oidfld = da.Describe(fc)['OIDFieldName']
        for col in columns[:]:
            if col.lower() in ['fid', 'oid', 'objectid']:
                dtypes.append((col, np.int32))
            elif df[col].dtype.name == 'datetime64[ns]':
                dtypes.append((col, '<M8[us]'))
            elif df[col].dtype.name == 'object':
                try:
                    u = type(df[col][df[col].first_valid_index()])
                except:
                    u = pd.unique(df[col].apply(type)).tolist()[0]
                if issubclass(u, str):
                    mlen = df[col].str.len().max()
                    dtypes.append((col, '<U%s' % int(mlen)))
                else:
                    try:
                        dtypes.append((col, type(df[col][0])))
                    except:
                        dtypes.append((col, '<U254'))
            elif df[col].dtype.name == 'int64':
                dtypes.append((col, np.int64))
            elif df[col].dtype.name == 'bool':
                dtypes.append((col, np.int32))
            else:
                dtypes.append((col, df[col].dtype.type))

        array = np.array([], np.dtype(dtypes))
        arcpy.da.ExtendTable(fc, oidfld, array, join_dummy, append_only=False)
        # 3. Insert the Data
        #
        fields = arcpy.ListFields(fc)
        icols = [fld.name for fld in fields \
                 if fld.type not in ['OID', 'Geometry'] and \
                 fld.name in df.columns]
        dfcols = [fld.name for fld in fields \
                  if fld.type not in ['OID', 'Geometry'] and\
                  fld.name in df.columns]
        with da.InsertCursor(fc, icols) as irows:
            for idx, row in df[dfcols].iterrows():
                try:
                    irows.insertRow(row.tolist())
                except:
                    print("row %s could not be inserted." % idx)
        return fc

    return
Example #16
0
def GeoPhotoToPoint(folder,
                    fc,
                    badphotostable="",
                    addnongps="",
                    attachphotos=""):
    try:
        # Convert text from boolean parameters to Python True | False
        addnongps = True if addnongps.lower() in ["true", "all_photos", ""
                                                  ] else False
        attachphotos = True if attachphotos.lower() in [
            "true", "add_attachments", ""
        ] else False

        # Get all photo files from Input Folder
        photolist = ExifUtils.ListPhotos(folder)
        # Create outputs
        CreateOutputs(fc, badphotostable, photolist)
        foundone = 0
        incur = incurbad = None
        # Set progress bar
        arcpy.SetProgressor("step", "", 0, len(photolist), 1)

        try:
            with arcpy.da.Editor(os.path.dirname(fc)) as edit_session:
                # Open an InsertCursor to write point locations to a new feature class
                incur = da.InsertCursor(fc, [
                    "Path", "Name", "DateTime", "SHAPE@X", "SHAPE@Y",
                    "SHAPE@Z", "Direction"
                ])
                # Open an InsertCursor to write a list of photos with no GPS coordinates
                incurbad = da.InsertCursor(
                    badphotostable, ["Photo"]) if badphotostable else None
                # Get GPS information from each photo
                for file in photolist:
                    photo = ExifUtils.GetExifMetadata(file)
                    # If the photo has a valid Exif header with coordinate information
                    if (photo.x and photo.y) or addnongps:
                        # Create the point with geometry and attributes
                        incur.insertRow([
                            photo.file,
                            os.path.basename(photo.file), photo.m, photo.x,
                            photo.y, photo.z, photo.d
                        ])
                        foundone = 1
                    if (not photo.x or not photo.y) and badphotostable:
                        # Write the photo path to the Invalid Photos Table output
                        incurbad.insertRow([photo.file])
                    arcpy.SetProgressorPosition()
        except:
            raise
        finally:
            if incur:
                del incur
            if incurbad:
                del incurbad

        # If none of the photos were geotagged, give the standard empty output warning
        if not foundone:
            arcpy.AddIDMessage("WARNING", 117)

        # Attach photos if option specified
        if attachphotos:
            if foundone or addnongps:
                oidfield = arcpy.Describe(fc).OIDFieldName
                arcpy.EnableAttachments_management(fc)
                arcpy.AddAttachments_management(fc, oidfield, fc, oidfield,
                                                "Path", "")
    except:
        # Delete outputs if failure occurs
        if arcpy.Exists(fc):
            arcpy.Delete_management(fc)
        if arcpy.Exists(badphotostable):
            arcpy.Delete_management(badphotostable)
        arcpy.AddIDMessage("ERROR", 999999)
        sys.exit()
Example #17
0
def _arcpy_to_featureclass(df, out_name, out_location=None,
                           overwrite=True, out_sr=None,
                           skip_invalid=True):
    """
    """
    import arcgis
    import numpy as np
    import datetime
    from arcpy import da
    from arcgis.features import SpatialDataFrame

    gtype = df.geometry_type.upper()
    gname = df.geometry.name
    df = df.copy()

    if overwrite and \
       arcpy.Exists(os.path.join(out_location, out_name)):
        arcpy.Delete_management(os.path.join(out_location, out_name))
    elif overwrite == False and \
        arcpy.Exists(os.path.join(out_location, out_name)):
        raise Exception("Dataset exists, please provide a new out_name or location.")

    if out_sr is None:
        try:
            if isinstance(df.sr, dict):
                sr = arcgis.geometry.SpatialReference(df.sr).as_arcpy
            elif isinstance(df.sr, arcgis.geometry.SpatialReference):
                sr = df.sr.as_arcpy
        except:
            sr = arcpy.SpatialReference(4326)
    else:
        if isinstance(df.sr, dict):
            sr = arcgis.geometry.SpatialReference(df.sr).as_arcpy
        elif isinstance(df.sr, arcgis.geometry.SpatialReference):
            sr = df.sr.as_arcpy
        elif isinstance(out_sr, arcpy.SpatialReference):
            sr = out_sr
    fc = arcpy.CreateFeatureclass_management(out_path=out_location,
                                             out_name=out_name,
                                             geometry_type=gtype.upper(),
                                             spatial_reference=sr)[0]
    df['JOIN_ID_FIELD_DROP'] = df.index.tolist()
    flds = df.columns.tolist()

    flds.pop(flds.index(gname))
    flds_lower = [f.lower() for f in flds]
    for f in ['objectid', 'oid', 'fid']:
        if f in flds_lower:
            idx = flds_lower.index(f)
            flds.pop(idx)
            flds_lower.pop(idx)
            del idx
        del f
    array = [tuple(row) for row in df[flds].as_matrix()]
    geoms = df.geometry.as_arcpy.tolist()
    dtypes = []

    for idx, a in enumerate(array[0]):
        if isinstance(a,
                      STRING_TYPES):
            dtypes.append((flds[idx], '<U%s' %  df[flds[idx]].map(len).max()))
        elif flds[idx].lower() in ['fid', 'oid', 'objectid']:
            dtypes.append((flds[idx], np.int32))
        elif isinstance(a,
                        (int, np.int32)):
            dtypes.append((flds[idx], np.int64))
        elif isinstance(a,
                        (float, np.float, np.float64)):
            dtypes.append((flds[idx], np.float64))
        elif isinstance(a,
                        (datetime.datetime, pd.datetime)):
            dtypes.append((flds[idx], '<M8[us]'))
        else:
            dtypes.append((flds[idx], type(a)))
        del idx, a

    array = np.array(array, dtype=dtypes)
    del dtypes, flds, flds_lower

    with da.InsertCursor(fc, ['SHAPE@']) as icur:
        for g in geoms:
            if skip_invalid:
                try:
                    icur.insertRow([g])
                except: pass
            else:
                icur.insertRow([g])
    desc = arcpy.Describe(fc)
    oidField = desc.oidFieldName
    del desc
    da.ExtendTable(in_table=fc, table_match_field=oidField,
                   in_array=array, array_match_field='JOIN_ID_FIELD_DROP',
                   append_only=False)
    del df['JOIN_ID_FIELD_DROP']
    return fc
def collectEvents(ssdo, outputFC):
    """This utility converts event data into weighted point data by
    dissolving all coincident points into unique points with a new count
    field that contains the number of original features at that
    location.

    INPUTS: 
    inputFC (str): path to the input feature class
    outputFC (str): path to the input feature class
    """

    #### Set Default Progressor for Neigborhood Structure ####
    ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143))

    #### Validate Output Workspace ####
    ERROR.checkOutputPath(outputFC)

    #### True Centroid Warning For Non-Point FCs ####
    if ssdo.shapeType.upper() != "POINT":
        ARCPY.AddIDMessage("WARNING", 1021)

    #### Create GA Data Structure ####
    gaTable, gaInfo = WU.gaTable(ssdo.inputFC, spatRef=ssdo.spatialRefString)

    #### Assure Enough Observations ####
    cnt = UTILS.getCount(ssdo.inputFC)
    ERROR.errorNumberOfObs(cnt, minNumObs=4)
    N = gaInfo[0]
    ERROR.errorNumberOfObs(N, minNumObs=4)

    #### Process Any Bad Records Encountered ####
    numBadRecs = cnt - N
    if numBadRecs:
        badRecs = WU.parseGAWarnings(gaTable.warnings)
        if not ssdo.silentWarnings:
            ERROR.reportBadRecords(cnt,
                                   numBadRecs,
                                   badRecs,
                                   label=ssdo.oidName)

    #### Create k-Nearest Neighbor Search Type ####
    gaSearch = GAPY.ga_nsearch(gaTable)
    gaSearch.init_nearest(0.0, 0, "euclidean")

    #### Create Output Feature Class ####
    outPath, outName = OS.path.split(outputFC)
    try:
        DM.CreateFeatureclass(outPath, outName, "POINT", "", ssdo.mFlag,
                              ssdo.zFlag, ssdo.spatialRefString)
    except:
        ARCPY.AddIDMessage("ERROR", 210, outputFC)
        raise SystemExit()

    #### Add Count Field ####
    countFieldNameOut = ARCPY.ValidateFieldName(countFieldName, outPath)
    UTILS.addEmptyField(outputFC, countFieldNameOut, "LONG")
    fieldList = ["SHAPE@", countFieldNameOut]

    #### Set Insert Cursor ####
    rowsOut = DA.InsertCursor(outputFC, fieldList)

    #### Set Progressor for Calculation ####
    ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84007), 0, N, 1)

    #### ID List to Search ####
    rowsIN = range(N)
    maxCount = 0
    numUnique = 0

    for row in rowsIN:
        #### Get Row Coords ####
        rowInfo = gaTable[row]
        x0, y0 = rowInfo[1]
        count = 1

        #### Search For Exact Coord Match ####
        gaSearch.search_by_idx(row)
        for nh in gaSearch:
            count += 1
            rowsIN.remove(nh.idx)
            ARCPY.SetProgressorPosition()

        #### Keep Track of Max Count ####
        maxCount = max([count, maxCount])

        #### Create Output Point ####
        pnt = (x0, y0, ssdo.defaultZ)

        #### Create and Populate New Feature ####
        rowResult = [pnt, count]
        rowsOut.insertRow(rowResult)
        numUnique += 1
        ARCPY.SetProgressorPosition()

    #### Clean Up ####
    del rowsOut, gaTable

    return countFieldNameOut, maxCount, N, numUnique
def MatchPhotos2Rows(folder,
                     fc,
                     timefield,
                     outtable,
                     badphotostable="",
                     attachphotos="",
                     timetol=0,
                     offset=0):
    try:
        # Convert text from boolean parameters to Python True | False
        attachphotos = True if attachphotos.lower() in [
            "true", "add_attachments", ""
        ] else False

        oidfield = arcpy.Describe(fc).OIDFieldName
        dict = {}
        # Create dictionary of timestamps in Input Table
        for row in da.SearchCursor(fc, [oidfield, timefield]):
            dict[row[0]] = row[1]

        # Get all photo files from Input Folder
        photolist = ExifUtils.ListPhotos(folder)

        # Create outputs
        CreateOutputs(outtable, badphotostable, photolist)
        foundone = 0
        icur = incurbad = None
        # Set progress bar
        arcpy.SetProgressor("step", "", 0, len(photolist), 1)

        try:
            with arcpy.da.Editor(os.path.dirname(outtable)) as edit_session:
                # Open an InsertCursor to write matches to the Output Table
                icur = da.InsertCursor(
                    outtable,
                    ["IN_FID", "Photo_Path", "Photo_Name", "Match_Diff"])
                # Open an InsertCursor to write a list of non-matching photos
                incurbad = da.InsertCursor(
                    badphotostable, ["Photo"]) if badphotostable else None
                # Get DateTime information from each photo
                for file in photolist:
                    photo = ExifUtils.GetExifMetadata(file)
                    # If the photo has a valid Exif header with DateTime information
                    if photo.m:
                        # Turn timestamp string into a Python datetime class
                        photo.m = datetime.datetime.fromtimestamp(
                            time.mktime(
                                time.strptime(photo.m, '%Y:%m:%d %H:%M:%S')))
                        # If a time offset was specified, change the photo timestamp
                        if offset:
                            photo.m += datetime.timedelta(seconds=offset)
                        # Find a match for this DateTime
                        closestID = ClosestTime(photo.m, dict.items())
                        # Determine if the time difference between this photo and row is within the tolerance
                        closestDif = abs(dict[closestID] -
                                         photo.m).total_seconds()
                        # If the difference is within the tolerance, make a match by writing an output row
                        if closestDif <= timetol or timetol == 0:
                            icur.insertRow([
                                closestID, photo.file,
                                os.path.basename(photo.file), closestDif
                            ])
                            foundone = 1
                        else:
                            # Write the photo path to the Unmatched Photos Table
                            if badphotostable:
                                incurbad.insertRow([photo.file])
                    else:
                        # Write the photo path to the Unmatched Photos Table
                        if badphotostable:
                            incurbad.insertRow([photo.file])
                    arcpy.SetProgressorPosition()
        except:
            raise
        finally:
            if icur:
                del icur
            if incurbad:
                del incurbad

        # Attach photos if option specified
        if attachphotos:
            arcpy.EnableAttachments_management(fc)
            arcpy.AddAttachments_management(fc, oidfield, outtable, "IN_FID",
                                            "Photo_Path", "")

        # If none of the photos were matched give the standard empty output warning
        if not foundone:
            arcpy.AddIDMessage("WARNING", 117)
    except:
        if arcpy.Exists(outtable):
            arcpy.Delete_management(outtable)
        if arcpy.Exists(badphotostable):
            arcpy.Delete_management(badphotostable)
        arcpy.AddIDMessage("ERROR", 999999)
        sys.exit()
def main(*argv):
    """ main driver of program """
    try:
        #   User Inputs
        #
        inputFC = argv[0]
        #inputFD = os.path.split(inputFC)[0]
        desc = arcpy.Describe(inputFC)
        sr = desc.spatialReference
        try:
            inputFD = desc.featureClass.path #argv[1]#
            fc = decs.featureClass.name

        except:
            inputFD = desc.path #argv[1]#
            fc = desc.name

        #Spreedsheet
        filename = argv[1]#
        #Check Type
        tabname = argv[2]#
        #Output GDB
        output_fcs = argv[3]


        alias_table = get_field_alias(inputFC)
        arcpy.AddMessage(alias_table)
        arcpy.AddMessage(os.path.dirname(inputFD))
        try:
            fc_domain_dict = get_fc_domains(os.path.dirname(inputFD))
        except:
            #fc_domain_dict = get_fc_domains(r'C:\PROJECTS\STATE_OF_THE_DATA\DATA\TDS\TDS_6_1_MNG_FGCM_sub1.gdb')
            arcpy.AddMessage("Attribution Assessment not configured to pull "
                + "domain dictionary from service. Please add a path to a TDS"
                + " feature class here.")
            exit(0)
        arcpy.AddMessage(os.path.dirname(inputFD))
        arcpy.AddMessage(fc_domain_dict)

        outputGDB = os.path.dirname(output_fcs) #argv[3]#
        #  Local Variables
        #
        error_fcs = {}
        empty = (-999999, '', None, 'noInformation',
                 'None', 'Null', 'NULL', -999999.0)
        #  Logic
        #
        now = datetime.datetime.now()
        if outputGDB is None or \
           outputGDB == "" or \
           outputGDB == "#":
            outputGDB = env.scratchGDB
        if arcpy.Exists(outputGDB) == False:
            arcpy.CreateFileGDB_management(out_folder_path=os.path.dirname(outputGDB),
                                          out_name=os.path.basename(outputGDB))
        arcpy.AddMessage("Beginning null attribute check.")
        env.workspace = inputFD
        specificAttributeDict, attrCheck = create_attr_dict(filename, tabname)
        #desc = arcpy.Describe(inputFD)
##        if desc.dataType.lower() == "FeatureDataset".lower():
##            sr = arcpy.Describe(inputFD).spatialReference
##        else:
##            sr = None
        error_fc = output_fcs
        error_fc = create_error_fc(output_fcs,'POLYLINE',sr=sr)
        del sr
        edit = da.Editor(outputGDB)
        edit.startEditing(False, True)
        edit.startOperation()
        crvInsert = da.InsertCursor(error_fc,
                                    ["SHAPE@", "DEFICIENCY", "FEATURE_CLASS",
                                     "SUBTYPE", "ORIG_OID", "DEFICIENCY_CNT"])

##-----------------
        arcpy.AddMessage("Looking at: %s" % output_fcs)
        stList = unique_values(inputFC,"F_CODE")
        errorCount = 0
        if len(stList) > 0 :
            field_names_lookup = {field.name : field.type \
                                  for field in arcpy.ListFields(inputFC) \
                                  if field.type not in ['Blob', 'Geometry', 'OID', 'Raster']}
            field_names_lookup['SHAPE@'] = 'Geometry'
            field_names_lookup['OID@'] = 'OID'
            for s in stList:
                if s in specificAttributeDict:
                    sub_sql = " or ".join([assemble_sql(field_name=f,
                                            field_type=field_names_lookup[f]) \
                                           for f in specificAttributeDict[s] ])
                    sql = "F_CODE = '{fcode}' and ({subsql})".format(fcode=s, subsql=sub_sql)
                    with da.SearchCursor(inputFC,
                                         field_names_lookup.keys(),
                                         where_clause=sql) as rows:
                        index_lookup = None
                        for row in rows:
                            if index_lookup is None:
                                index_lookup = {key:rows.fields.index(key) \
                                                for key in rows.fields}
                            vals = [alias_table[i] for i in specificAttributeDict[s] \
                                    if row[index_lookup[i]] in empty]
                            if len(vals) > 0:
                                fs = ",".join(vals)
                                oid = row[index_lookup["OID@"]]
                                #arcpy.AddMessage(fc_domain_dict[s])
                                ERROR = str(fc) + r" | " + str(fc_domain_dict[s]) + r" | OID: " + str(oid) + r" | " + fs
                                irow = [row[index_lookup['SHAPE@']],
                                        ERROR,
                                        fc,
                                        fc_domain_dict[s],
                                        oid,
                                        len(vals)
                                        ]

                                crvInsert.insertRow(irow)

                                errorCount += 1
                                del irow
                                del oid
                                del ERROR
                                del fs
                            del vals
                            del row

                    not_sub_sql = " and ".join([assemble_sql(field_name=f,
                                            field_type=field_names_lookup[f],
                                            not_in=True) \
                                           for f in specificAttributeDict[s] ])

                    not_sql = "F_CODE = '{fcode}' and ({subsql})".format(fcode=s, subsql=not_sub_sql)
                    with da.SearchCursor(inputFC,
                                         field_names_lookup.keys(),
                                         where_clause=not_sql) as rows:
                        index_lookup = None
                        for row in rows:
                            if index_lookup is None:
                                index_lookup = {key:rows.fields.index(key) \
                                                for key in rows.fields}
                            vals = [i for i in specificAttributeDict[s] \
                                    if row[index_lookup[i]] in empty]
                            fs = "N/A"
                            oid = row[index_lookup["OID@"]]
                            ERROR = str(fc) + r" | " + str(fc_domain_dict[s]) + r" | OID: " + str(oid) + r" | " + fs
                            irow = [row[index_lookup['SHAPE@']],
                                    ERROR,
                                    fc,
                                    fc_domain_dict[s],
                                    oid,
                                    0
                                    ]

                            crvInsert.insertRow(irow)

                            errorCount += 1
                            del irow
                            del oid
                            del ERROR
                            del fs
                            del vals
                            del row
                        del index_lookup
                del s
            del field_names_lookup
        if errorCount > 0:
            arcpy.AddMessage("       Errors in " + fc + ": " + str(errorCount))
        del stList

##------------------------------------------------------------------------------
        edit.stopOperation()
        edit.stopEditing(True)
        del crvInsert
        del edit
        arcpy.AddMessage("Total Processing time: %s" % str(datetime.datetime.now() - now))
        #arcpy.SetParameterAsText(4, ";".join(error_fcs.values()))
    except arcpy.ExecuteError:
        line, filename, synerror = trace()
        arcpy.AddError("error on line: %s" % line)
        arcpy.AddError("error in file name: %s" % filename)
        arcpy.AddError("with error message: %s" % synerror)
        arcpy.AddError("ArcPy Error Message: %s" % arcpy.GetMessages(2))
    except FunctionError as f_e:
        messages = f_e.args[0]
        arcpy.AddError("error in function: %s" % messages["function"])
        arcpy.AddError("error on line: %s" % messages["line"])
        arcpy.AddError("error in file name: %s" % messages["filename"])
        arcpy.AddError("with error message: %s" % messages["synerror"])
        arcpy.AddError("ArcPy Error Message: %s" % messages["arc"])
    except:
        line, filename, synerror = trace()
        arcpy.AddError("error on line: %s" % line)
        arcpy.AddError("error in file name: %s" % filename)
        arcpy.AddError("with error message: %s" % synerror)
def gpxToPoints(gpxfile, outFC):
    ''' This is called by the __main__ if run from a tool or at the command line
    '''

    # Set the tree to the input GPX file
    #
    tree = ElementTree.parse(gpxfile)

    global TOPOGRAFIX_NS
    TOPOGRAFIX_NS = ''
    TOPOGRAFIX_NS10 = './/{http://www.topografix.com/GPX/1/0}'
    TOPOGRAFIX_NS11 = './/{http://www.topografix.com/GPX/1/1}'

    badPt = 0

    # Inspection of the GPX file will yield and set the appropriate namespace. If 1.0 or 1.1
    # is not found, empty output will be generated
    #
    for ptType in gpxTypes:   
        if tree.findall(TOPOGRAFIX_NS10 + ptType):
            TOPOGRAFIX_NS = TOPOGRAFIX_NS10
        elif tree.findall(TOPOGRAFIX_NS11 + ptType):
            TOPOGRAFIX_NS = TOPOGRAFIX_NS11


    if TOPOGRAFIX_NS == '':
        arcpy.AddIDMessage("Warning", 1202)
            
    
    # Inspect the GPX file to get field lengths and how many points    
    arcpy.SetProgressorLabel("Scanning contents of GPX file")    
        
    howManyElements = 0
    
    # Initialize scanner with max values of 255
    scanner = gpxDetails(255, 255, 255, 255)
    for gType in gpxTypes:      
      for node in tree.findall(TOPOGRAFIX_NS + gType):                     
      
        if gType == 'trk': 
          #howManyElements +=1                 
          scanner.scan(node)
          for node in node.findall(TOPOGRAFIX_NS + 'trkseg'):
            for subnode in node.findall(TOPOGRAFIX_NS + 'trkpt'):
              howManyElements +=1                 
              scanner.scan(subnode)
            
        elif gType == 'rte':
          scanner.scan(node)
          for subnode in node.findall(TOPOGRAFIX_NS + 'rtept'):             
            scanner.scan(subnode)
            howManyElements +=1
            
        else: #wpt             
          scanner.scan(node)
          howManyElements +=1    
    

    # Create the output feature class in WGS84    
    arcpy.CreateFeatureclass_management(os.path.dirname(outFC), os.path.basename(outFC), 'POINT', '', 'DISABLED', 'ENABLED', 4326)


    # Join fields to the feature class, using ExtendTable    
    inarray = numpy.array([],
                      numpy.dtype([('intfield', numpy.int32),
                                   ('Name', '|S' + str(scanner.nameLen)),
                                   ('Descript', '|S' + str(scanner.descLen)),
                                   ('Type', '|S255'),
                                   ('Comment', '|S' + str(scanner.cmtLen)), 
                                   ('Symbol', '|S' + str(scanner.symLen)),
                                   ('DateTimeS', '|S'),
                                   ('Elevation', numpy.float),
                                   ]))
    
    arcpy.da.ExtendTable(outFC, "OID@", inarray, "intfield")


    rowsDA = da.InsertCursor(outFC, ['Name', 'Descript', 'Type', 'Comment', 'Symbol', 'DateTimeS', 'Elevation', 'SHAPE@X', 'SHAPE@Y', 'SHAPE@Z'])

    arcpy.SetProgressor('step', 'Converting GPX points...', 0, howManyElements, 1)
    # Loop over each point in the tree and put the information inside a new row
    #
    for index, trkPoint in enumerate(GeneratePointFromXML(tree)):
        if trkPoint.asPoint() is not None:
            try:              
              rowsDA.insertRow([trkPoint.name, trkPoint.desc, trkPoint.gpxtype, trkPoint.cmt,
                                trkPoint.sym, trkPoint.t, trkPoint.z, trkPoint.x, trkPoint.y, trkPoint.z])              
            except RuntimeError(e):
              arcpy.AddError(str(e))
       
        else:
            badPt +=1
            
        arcpy.SetProgressorPosition(index)

    if badPt > 0:
        arcpy.AddIDMessage("WARNING", 1201, badPt, index + 1)
        
    if tree:
        del tree
    if rowsDA:
        del rowsDA    
        
        
    # Try to create a DateTime field of Date-type for non-shapefile output
    #
    if not outFC.lower().endswith(".shp"):
      try:
        arcpy.ResetProgressor()
        arcpy.SetProgressorLabel("Calculating Datetime field")
        arcpy.ConvertTimeField_management(outFC, 'DateTimeS', 'yyyy-MM-ddTHH:mm:ssZ', "Date_Time")

      except:
        arcpy.AddIDMessage("WARNING", 1227)

        try:
          arcpy.DeleteField_management(outFC, "Date_Time")
        except:
          pass
def separateRoutes():
    """Create a distinct feature class for each of the routes in the source fc"""

    type_dict = {
        1: 'bus',
        2: 'aerial tram',
        3: 'wes',
        4: 'streetcar',
        5: 'max'
    }

    line_dict = {
        193: 'ns',
        194: 'cl',
        208: 'aerial_tram',
        # 999 is a place holder being used since the route_id field is type: int
        # and doesn't accept characters other than numbers
        999: 'new_sellwood_099'
    }

    management.CreateFileGDB(os.path.dirname(route_gdb),
                             os.path.basename(route_gdb))

    service_list = []
    service_levels = []

    with da.SearchCursor(all_routes, route_fields[1:]) as s_cursor:
        for rte, serv, r_type in s_cursor:
            rs = (int(rte), serv)
            if r_type not in (3, 5) and rs not in service_list:
                service_list.append(rs)

            if serv not in service_levels:
                service_levels.append(serv)

    oregon_spn = arcpy.SpatialReference(2913)
    for level in service_levels:
        management.CreateFeatureDataset(route_gdb, level.replace('-', '_'),
                                        oregon_spn)

    for route_id, service in service_list:
        # translate number to name for streetcar and aerial tram lines
        try:
            route_text = line_dict[route_id]
        except:
            # adding leading zeros to lines with less than 3 digits for readability
            route_text = 'line_{0:03d}'.format(route_id)

        service_text = service.replace('-', '_')
        route_name = '{0}_{1}_carto'.format(route_text, service_text)

        current_route = os.path.join(route_gdb, service_text, route_name)
        geom_type = 'POLYLINE'
        template = all_routes
        management.CreateFeatureclass(os.path.dirname(current_route),
                                      os.path.basename(current_route),
                                      geom_type,
                                      template,
                                      spatial_reference=oregon_spn)

        i_cursor = da.InsertCursor(current_route, route_fields)

        with da.SearchCursor(all_routes, route_fields) as s_cursor:
            for geom, rte, serv, r_type in s_cursor:
                if rte == route_id and serv == service:
                    i_cursor.insertRow((geom, rte, serv, r_type))

        del i_cursor
Example #23
0
def to_featureclass(df, out_name, out_location=None,
                    overwrite=True, out_sr=None,
                    skip_invalid=True):
    """
    converts a SpatialDataFrame to a feature class

    Parameters:
     :out_location: path to the workspace
     :out_name: name of the output feature class table
     :overwrite: True, the data will be erased then replaced, else the
      table will be appended to an existing table.
     :out_sr: if set, the data will try to reproject itself
     :skip_invalid: if True, the cursor object will not raise an error on
      insertion of invalid data, if False, the first occurence of invalid
      data will raise an exception.
    Returns:
     path to the feature class
    """
    cols = []
    dt_idx = []
    invalid_rows = []
    idx = 0
    max_length = None
    if out_location:
        if os.path.isdir(out_location) == False and \
           out_location.lower().endswith('.gdb'):
            out_location = arcpy.CreateFileGDB_management(out_folder_path=os.path.dirname(out_location),
                                                         out_name=os.path.basename(out_location))[0]
        elif os.path.isdir(out_location) == False and \
             out_name.lower().endswith('.shp'):
            os.makedirs(out_location)
        elif os.path.isfile(out_location) == False and \
             out_location.lower().endswith('.sde'):
            raise ValueError("The sde connection file does not exist")
    else:
        if out_name.lower().endswith('.shp'):
            out_location = tempfile.gettempdir()
        elif HASARCPY:
            out_location = arcpy.env.scratchGDB
        else:
            out_location = tempfile.gettempdir()
            out_name = out_name + ".shp"
    fc = os.path.join(out_location, out_name)
    df = df.copy() # create a copy so we don't modify the source data.
    if out_name.lower().endswith('.shp'):
        max_length = 10
    for col in df.columns:
        if df[col].dtype.type in NUMERIC_TYPES:
            df[col] = df[col].fillna(0)
        elif df[col].dtype.type in DATETIME_TYPES:
            dt_idx.append(idx)
        else:
            df.loc[df[col].isnull(), col] = ""
        idx += 1
        col = sanitize_field_name(s=col,
                                  length=max_length)
        cols.append(col)
        del col
    df.columns = cols

    if arcpy.Exists(fc) and \
       overwrite:
        arcpy.Delete_management(fc)
    if arcpy.Exists(fc) ==  False:
        sr = df.sr
        if sr is None:
            sr = df['SHAPE'].loc[df['SHAPE'].first_valid_index()]
        fc = arcpy.CreateFeatureclass_management(out_path=out_location,
                                                 out_name=out_name,
                                                 geometry_type=df.geometry_type.upper(),
                                                 spatial_reference=sr)[0]
    oidField = arcpy.Describe(fc).oidFieldName
    col_insert = copy.copy(df.columns).tolist()
    lower_col_names = [f.lower() for f in col_insert]
    if "SHAPE" in df.columns:
        idx = col_insert.index("SHAPE")
        col_insert[idx] = "SHAPE@"
    if oidField.lower() in lower_col_names:
        val = col_insert.pop(lower_col_names.index(oidField.lower()))
        del df[val]
    existing_fields = [field.name.lower() for field in arcpy.ListFields(fc)]
    for col in col_insert:
        if col.lower().find('shape') == -1 and \
           col.lower not in existing_fields:
            arcpy.AddField_management(in_table=fc, field_name=col,
                                      field_type=_infer_type(df, col))
    icur = da.InsertCursor(fc, col_insert)
    for index, row in df.iterrows():
        if len(dt_idx) > 0:
            row = row.tolist()
            for i in dt_idx:
                row[i] = row[i].to_pydatetime()
                del i
            try:
                icur.insertRow(row)
            except:
                invalid_rows.append(index)
                if skip_invalid == False:
                    raise Exception("Invalid row detected at index: %s" % index)
        else:
            try:
                icur.insertRow(row.tolist())
            except:
                invalid_rows.append(index)
                if skip_invalid == False:
                    raise Exception("Invalid row detected at index: %s" % index)

        del row
    del icur
    if len(invalid_rows) > 0:
        t = ",".join([str(r) for r in invalid_rows])
        #import warnings
        print('The following rows could not be written to the table: %s' % t)
    return fc
Example #24
0
    def createOutput(self, outputFC):
        """Creates an Output Feature Class with the Mean Centers.

        INPUTS:
        outputFC (str): path to the output feature class
        """

        #### Validate Output Workspace ####
        ERROR.checkOutputPath(outputFC)

        #### Shorthand Attributes ####
        ssdo = self.ssdo
        caseField = self.caseField
        dimField = self.dimField

        #### Create Output Feature Class ####
        ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84003))
        outPath, outName = OS.path.split(outputFC)

        try:
            DM.CreateFeatureclass(outPath, outName, "POINT", "", ssdo.mFlag,
                                  ssdo.zFlag, ssdo.spatialRefString)
        except:
            ARCPY.AddIDMessage("ERROR", 210, outputFC)
            raise SystemExit()

        #### Add Field Names ####
        fn = UTILS.getFieldNames(mcFieldNames, outPath)
        xFieldName, yFieldName, zFieldName = fn
        shapeFieldNames = ["SHAPE@"]
        dataFieldNames = [xFieldName, yFieldName]
        if ssdo.zBool:
            dataFieldNames.append(zFieldName)

        for fieldName in dataFieldNames:
            UTILS.addEmptyField(outputFC, fieldName, "DOUBLE")

        caseIsDate = False
        if caseField:
            fcCaseField = ssdo.allFields[caseField]
            validCaseName = UTILS.validQFieldName(fcCaseField, outPath)
            caseType = UTILS.convertType[fcCaseField.type]
            UTILS.addEmptyField(outputFC, validCaseName, caseType)
            dataFieldNames.append(validCaseName)
            if caseType.upper() == "DATE":
                caseIsDate = True

        if dimField:
            fcDimField = ssdo.allFields[dimField]
            validDimName = UTILS.validQFieldName(fcDimField, outPath)
            if caseField:
                if validCaseName == validDimName:
                    validDimName = ARCPY.GetIDMessage(84199)
            UTILS.addEmptyField(outputFC, validDimName, "DOUBLE")
            dataFieldNames.append(validDimName)

        #### Write Output ####
        allFieldNames = shapeFieldNames + dataFieldNames
        rows = DA.InsertCursor(outputFC, allFieldNames)
        for case in self.caseKeys:

            #### Mean Centers ####
            meanX, meanY, meanZ = self.meanCenter[case]
            pnt = (meanX, meanY, meanZ)
            if ssdo.zBool:
                rowResult = [pnt, meanX, meanY, meanZ]
            else:
                rowResult = [pnt, meanX, meanY]

            #### Set Attribute Fields ####
            if caseField:
                caseValue = case.item()
                if caseIsDate:
                    caseValue = TUTILS.iso2DateTime(caseValue)
                rowResult.append(caseValue)

            if dimField:
                meanDim = self.dimCenter[case]
                rowResult.append(meanDim)

            rows.insertRow(rowResult)

        #### Clean Up ####
        del rows

        #### Set Attribute ####
        self.outputFC = outputFC
Example #25
0
    def createOutput(self, outputFC):
        """Creates an Output Feature Class with the Standard Distances.

        INPUTS:
        outputFC (str): path to the output feature class
        """

        #### Validate Output Workspace ####
        ERROR.checkOutputPath(outputFC)

        #### Shorthand Attributes ####
        ssdo = self.ssdo
        caseField = self.caseField

        #### Increase Extent if not Projected ####
        if ssdo.spatialRefType != "Projected":
            sdValues = self.sd.values()
            if len(sdValues):
                maxRadius = max(sdValues)
                largerExtent = UTILS.increaseExtentByConstant(
                    ssdo.extent, constant=maxRadius)
                largerExtent = [LOCALE.str(i) for i in largerExtent]
                ARCPY.env.XYDomain = " ".join(largerExtent)

        #### Create Output Feature Class ####
        ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84003))
        outPath, outName = OS.path.split(outputFC)

        try:
            DM.CreateFeatureclass(outPath, outName, "POLYGON", "", ssdo.mFlag,
                                  ssdo.zFlag, ssdo.spatialRefString)
        except:
            ARCPY.AddIDMessage("ERROR", 210, outputFC)
            raise SystemExit()

        #### Add Fields to Output FC ####
        dataFieldNames = UTILS.getFieldNames(sdFieldNames, outPath)
        shapeFieldNames = ["SHAPE@"]
        for fieldName in dataFieldNames:
            UTILS.addEmptyField(outputFC, fieldName, "DOUBLE")

        caseIsDate = False
        if caseField:
            fcCaseField = ssdo.allFields[caseField]
            validCaseName = UTILS.validQFieldName(fcCaseField, outPath)
            caseType = UTILS.convertType[fcCaseField.type]
            UTILS.addEmptyField(outputFC, validCaseName, caseType)
            dataFieldNames.append(validCaseName)
            if caseType.upper() == "DATE":
                caseIsDate = True

        #### Write Output ####
        badCaseRadians = []
        allFieldNames = shapeFieldNames + dataFieldNames
        rows = DA.InsertCursor(outputFC, allFieldNames)
        for case in self.caseKeys:

            #### Get Results ####
            xVal, yVal = self.meanCenter[case]
            radius = self.sd[case]

            #### Create Empty Polygon Geomretry ####
            poly = ARCPY.Array()

            #### Check for Valid Radius ####
            radiusZero = UTILS.compareFloat(0.0, radius, rTol=.0000001)
            radiusNan = NUM.isnan(radius)
            radiusBool = radiusZero + radiusNan
            if radiusBool:
                badRadian = 6
                badCase = UTILS.caseValue2Print(case, self.caseIsString)
                badCaseRadians.append(badCase)
            else:
                badRadian = 0

                #### Calculate a Point For Each ####
                #### Degree in Circle Polygon ####
                for degree in NUM.arange(0, 360):
                    try:
                        radians = NUM.pi / 180.0 * degree
                        pntX = xVal + (radius * NUM.cos(radians))
                        pntY = yVal + (radius * NUM.sin(radians))
                        pnt = ARCPY.Point(pntX, pntY, ssdo.defaultZ)
                        poly.add(pnt)
                    except:
                        badRadian += 1
                        if badRadian == 6:
                            badCase = UTILS.caseValue2Print(
                                case, self.caseIsString)
                            badCaseRadians.append(badCase)
                            break

            if badRadian < 6:
                #### Create and Populate New Feature ####
                poly = ARCPY.Polygon(poly, None, True)
                rowResult = [poly, xVal, yVal, radius]

                if caseField:
                    caseValue = case.item()
                    if caseIsDate:
                        caseValue = TUTILS.iso2DateTime(caseValue)
                    rowResult.append(caseValue)
                rows.insertRow(rowResult)

        #### Report Bad Cases Due to Geometry (coincident pts) ####
        nBadRadians = len(badCaseRadians)
        if nBadRadians:
            if caseField:
                badCaseRadians = " ".join(badCaseRadians)
                ARCPY.AddIDMessage("WARNING", 1011, caseField, badCaseRadians)
            else:
                ARCPY.AddIDMessage("ERROR", 978)
                raise SystemExit()

        #### Return Extent to Normal if not Projected ####
        if ssdo.spatialRefType != "Projected":
            ARCPY.env.XYDomain = None

        #### Clean Up ####
        del rows

        #### Set Attribute ####
        self.outputFC = outputFC
    def createOutput(self, outputFC):
        """Creates an Output Feature Class with the Median Centers.

        INPUTS:
        outputFC (str): path to the output feature class
        """

        #### Validate Output Workspace ####
        ERROR.checkOutputPath(outputFC)

        #### Shorthand Attributes ####
        ssdo = self.ssdo
        caseField = self.caseField
        attFields = self.attFields

        #### Create Output Feature Class ####
        ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84003))
        outPath, outName = OS.path.split(outputFC)

        try:
            DM.CreateFeatureclass(outPath, outName, "POINT", "", ssdo.mFlag, 
                                  ssdo.zFlag, ssdo.spatialRefString)
        except:
            ARCPY.AddIDMessage("ERROR", 210, outputFC)
            raise SystemExit()

        #### Add Field Names ####
        dataFieldNames = UTILS.getFieldNames(mdcFieldNames, outPath)
        shapeFieldNames = ["SHAPE@"]
        for fieldName in dataFieldNames:
            UTILS.addEmptyField(outputFC, fieldName, "DOUBLE")

        caseIsDate = False
        if caseField:
            fcCaseField = ssdo.allFields[caseField]
            validCaseName = UTILS.validQFieldName(fcCaseField, outPath)
            caseType = UTILS.convertType[fcCaseField.type]
            UTILS.addEmptyField(outputFC, validCaseName, caseType)
            dataFieldNames.append(validCaseName)
            if caseType.upper() == "DATE":
                caseIsDate = True

        if attFields:
            for attField in attFields:
                fcAttField = ssdo.allFields[attField]
                validAttName = UTILS.validQFieldName(fcAttField, outPath)
                if caseField:
                    if validCaseName == validAttName:
                        validAttName = ARCPY.GetIDMessage(84195)
                UTILS.addEmptyField(outputFC, validAttName, "DOUBLE") 
                dataFieldNames.append(validAttName)

        outShapeFileBool = UTILS.isShapeFile(outputFC)
            
        #### Add Median X, Y, Dim ####
        allFieldNames = shapeFieldNames + dataFieldNames
        rows = DA.InsertCursor(outputFC, allFieldNames)
        for case in self.caseKeys:

            #### Median Centers ####
            medX, medY = self.medianCenter[case]
            pnt = (medX, medY, ssdo.defaultZ)
            rowResult = [pnt, medX, medY]

            #### Set Attribute Fields ####
            if caseField:
                caseValue = case.item()
                if caseIsDate:
                    caseValue = TUTILS.iso2DateTime(caseValue)
                rowResult.append(caseValue)

            #### Set Attribute Fields ####
            if attFields:
                for attInd, attField in enumerate(self.attFields):
                    medAtt = self.attCenter[case][attInd]
                    rowResult.append(medAtt)

            rows.insertRow(rowResult)
        
        #### Clean Up ####
        del rows

        #### Set Attribute ####
        self.outputFC = outputFC
Example #27
0
def mergeDualCarriageways():
    """Collapse dual carriageways and turning circles in single, striagt-line roadways, the 
	tools that achieve these effects are run on each route separately then the routes are 
	added back to a single feature class as this yields better results"""

    generateMatchCode()

    # create at feature class to store all of the outputs
    geom_type = 'POLYLINE'
    template = distinct_routes_src
    oregon_spn = arcpy.SpatialReference(2913)
    management.CreateFeatureclass(os.path.dirname(collapsed_routes),
                                  os.path.basename(collapsed_routes),
                                  geom_type,
                                  template,
                                  spatial_reference=oregon_spn)

    # make a feature layer of the source routes so that selections can be made on it
    distinct_rte_lyr = 'distinct_transit_routes'
    management.MakeFeatureLayer(distinct_routes, distinct_rte_lyr)

    route_service_list = getRouteServicePairs()
    temp_merge = os.path.join(temp_shp_dir, 'temp_merge.shp')
    temp_collapse = os.path.join(temp_shp_dir, 'temp_collapse.shp')

    route_fields = ['Shape@', 'route_id', 'serv_level', 'route_type']
    i_cursor = da.InsertCursor(collapsed_routes, route_fields)

    for route, service in route_service_list:
        select_type = 'NEW_SELECTION'
        where_clause = """"route_id" = {0} AND "serv_level" = '{1}'""".format(
            route, service)

        management.SelectLayerByAttribute(distinct_rte_lyr, select_type,
                                          where_clause)

        # merge dual carriageways
        merge_field = 'merge_id'  # '0' in this field means won't be merged
        merge_distance = 100  # feet
        cartography.MergeDividedRoads(distinct_rte_lyr, merge_field,
                                      merge_distance, temp_merge)

        # collapse turing circles
        collapse_distance = 550
        cartography.CollapseRoadDetail(temp_merge, collapse_distance,
                                       temp_collapse)

        with da.SearchCursor(temp_collapse, route_fields) as s_cursor:
            for row in s_cursor:
                i_cursor.insertRow(row)

    del i_cursor

    # now merge contiguous line segments with common attributes, now that dual carriage-
    # ways have been collapsed the data can be reduced to fewer segments
    dissolve_fields = ['route_id', 'serv_level', 'route_type']
    geom_class = 'SINGLE_PART'
    line_handling = 'UNSPLIT_LINES'
    management.Dissolve(collapsed_routes,
                        dissolved_routes,
                        dissolve_fields,
                        multi_part=geom_class,
                        unsplit_lines=line_handling)
Example #28
0
def gpxToPoints(gpxfile, outFC):
    ''' This is called by the __main__ if run from a tool or at the command line
    '''

    # Set the tree to the input GPX file
    #
    tree = ElementTree.parse(gpxfile)

    global TOPOGRAFIX_NS
    TOPOGRAFIX_NS = ''
    TOPOGRAFIX_NS10 = './/{http://www.topografix.com/GPX/1/0}'
    TOPOGRAFIX_NS11 = './/{http://www.topografix.com/GPX/1/1}'

    badPt = 0

    # Inspection of the GPX file will yield and set the appropraite namespace. If 1.0 or 1.1
    # is not found, empty output will be generated
    #
    for TRKorWPT in ['wpt', 'trk', 'rte']:
        if tree.findall(TOPOGRAFIX_NS10 + TRKorWPT):
            TOPOGRAFIX_NS = TOPOGRAFIX_NS10
        elif tree.findall(TOPOGRAFIX_NS11 + TRKorWPT):
            TOPOGRAFIX_NS = TOPOGRAFIX_NS11

    if TOPOGRAFIX_NS == '':
        arcpy.AddIDMessage("Warning", 1202)

    # Create the output feature class in WGS84
    #
    arcpy.CreateFeatureclass_management(os.path.dirname(outFC),
                                        os.path.basename(outFC), 'POINT', '',
                                        'DISABLED', 'ENABLED', 4326)

    # Join fields to the feature class, using ExtendTable
    inarray = numpy.array([],
                          numpy.dtype([
                              ('intfield', numpy.int32),
                              ('Name', '|S'),
                              ('Descript', '|S'),
                              ('Type', '|S'),
                              ('Comment', '|S'),
                              ('Symbol', '|S'),
                              ('DateTimeS', '|S'),
                              ('Elevation', numpy.float),
                              ('X', numpy.float),
                              ('Y', numpy.float),
                          ]))

    arcpy.da.ExtendTable(outFC, "OID@", inarray, "intfield")

    rowsDA = da.InsertCursor(outFC, [
        'Name', 'Descript', 'Type', 'Comment', 'Symbol', 'DateTimeS',
        'Elevation', 'X', 'Y', 'SHAPE@X', 'SHAPE@Y', 'SHAPE@Z'
    ])

    # Loop over each point in the tree and put the information inside a new row
    #
    for index, trkPoint in enumerate(GeneratePointFromXML(tree)):
        if trkPoint.asPoint() is not None:
            rowsDA.insertRow([
                trkPoint.name, trkPoint.desc, trkPoint.gpxtype, trkPoint.cmt,
                trkPoint.sym, trkPoint.t, trkPoint.z, trkPoint.x, trkPoint.y,
                trkPoint.x, trkPoint.y, trkPoint.z
            ])
        else:
            badPt += 1

    if badPt > 0:
        arcpy.AddIDMessage("WARNING", 1201, badPt, index + 1)

    if tree:
        del tree
    if rowsDA:
        del rowsDA

    # Try to create a DateTime field of Date-type for non-shapefile output
    #
    if not outFC.lower().endswith(".shp"):
        try:
            arcpy.ConvertTimeField_management(outFC, 'DateTimeS',
                                              'yyyy-MM-ddTHH:mm:ssZ',
                                              "DateTime")

        except:
            arcpy.AddIDMessage("WARNING", 1227)

            try:
                arcpy.DeleteField_management(outFC, "DateTime")
            except:
                pass
Example #29
0
import arcpy as arc
from arcpy import env
from arcpy import da
# set the environment variable
env.workspace = "C:/EsriTraining/PythonGP10_0/Data/SanJuan.gdb"
#create cursor
rows = da.InsertCursor("Plants")
#Creates a new row
row = rows.newRow()
#update the plant name field
row.PLANT_NAME = "Canada Thistle"
#add the new row to the table
rows.insertRow(row)

del rows
del row
#can get the value of a row
fieldVal = row.getValue("DISTANCE")
#can set value for a row
row.setValue("DISTANCE", "1000")
#set row value to null
row.setNull("DISTANCE")

#initiate the cursor for inserting
campsites = da.InsertCursor("campsites")
#create data to insert into table
pnt = arc.Point(242340, 4165468)
#specify new Row for the new data
newFeature = campsites.newRow()
# the field shape is the pnt value
newFeature.SHAPE = pnt
Example #30
0
def main(*argv):
    """ main driver of program """
    try:
        #   User Inputs
        #
        #inputFC = argv[0]
        inputFD = argv[0]  #os.path.split(inputFC)[0]
        #inputFD = argv[0]#
        #Spreedsheet
        filename = argv[1]  #
        #Check Type
        tabname = argv[2]  #
        #Output GDB
        output_fcs = argv[3]

        #alias_table = get_field_alias(inputFC)
        #arcpy.AddMessage(alias_table)
        fc_domain_dict = get_fc_domains(os.path.dirname(inputFD))
        arcpy.AddMessage(os.path.dirname(inputFD))
        arcpy.AddMessage(fc_domain_dict)

        fcs = os.path.basename(output_fcs)
        outputGDB = os.path.dirname(output_fcs)  #argv[3]#
        #  Local Variables
        #
        error_fcs = {}
        empty = (-999999, '', None, 'noInformation', 'None', 'Null', 'NULL',
                 -999999.0)
        #  Logic
        #
        now = datetime.datetime.now()
        if outputGDB is None or \
           outputGDB == "" or \
           outputGDB == "#":
            outputGDB = env.scratchGDB
        if arcpy.Exists(outputGDB) == False:
            arcpy.CreateFileGDB_management(
                out_folder_path=os.path.dirname(outputGDB),
                out_name=os.path.basename(outputGDB))
        arcpy.AddMessage("Beginning null attribute check.")
        env.workspace = inputFD
        specificAttributeDict, attrCheck = create_attr_dict(filename, tabname)
        if "Crv" in fcs:
            pnt_fc = fcs.replace("Crv", "Pnt")
            srf_fc = fcs.replace("Crv", "Srf")
        else:
            pnt_fc = fcs + "Pnt"
            srf_fc = fcs + "Srf"
        errorFCs = [
            [os.path.join(outputGDB, pnt_fc), "POINT"
             ],  # "FindTdsErrorPnt_"+attrCheck.replace('-','_')), "POINT"],
            [os.path.join(outputGDB, fcs), "POLYLINE"
             ],  #"FindTdsErrorCrv_"+attrCheck.replace('-','_')), "POLYLINE"],
            [os.path.join(outputGDB, srf_fc), "POLYGON"]
        ]  #"FindTdsErrorSrf_"+attrCheck.replace('-','_')), "POLYGON"]]
        desc = arcpy.Describe(inputFD)
        if desc.dataType.lower() == "FeatureDataset".lower():
            sr = arcpy.Describe(inputFD).spatialReference
        else:
            sr = None
        for fc in errorFCs:
            error_fcs[fc[1]] = create_error_fc(outFC=fc[0],
                                               geometryType=fc[1],
                                               sr=sr)
            del fc
        del errorFCs
        del sr
        edit = da.Editor(outputGDB)
        edit.startEditing(False, True)
        edit.startOperation()
        pntInsert = da.InsertCursor(error_fcs['POINT'], [
            "SHAPE@", "DEFICIENCY", "FEATURE_CLASS", "SUBTYPE", "ORIG_OID",
            "DEFICIENCY_CNT"
        ])
        crvInsert = da.InsertCursor(error_fcs['POLYLINE'], [
            "SHAPE@", "DEFICIENCY", "FEATURE_CLASS", "SUBTYPE", "ORIG_OID",
            "DEFICIENCY_CNT"
        ])
        srfInsert = da.InsertCursor(error_fcs['POLYGON'], [
            "SHAPE@", "DEFICIENCY", "FEATURE_CLASS", "SUBTYPE", "ORIG_OID",
            "DEFICIENCY_CNT"
        ])
        for fc in arcpy.ListFeatureClasses():  #[os.path.split(inputFC)[1]]: #
            arcpy.AddMessage("Looking at: %s" % fc)
            alias_table = get_field_alias(fc)
            arcpy.AddMessage(alias_table)
            stList = unique_values(os.path.join(inputFD, fc), "F_CODE")
            errorCount = 0
            if len(stList) > 0:
                field_names_lookup = {field.name : field.type \
                                      for field in arcpy.ListFields(os.path.join(inputFD, fc)) \
                                      if field.type not in ['Blob', 'Geometry', 'OID', 'Raster']}
                field_names_lookup['SHAPE@'] = 'Geometry'
                field_names_lookup['OID@'] = 'OID'
                for s in stList:
                    if s in specificAttributeDict:
                        sub_sql = " or ".join([assemble_sql(field_name=f,
                                                field_type=field_names_lookup[f]) \
                                               for f in specificAttributeDict[s] ])
                        sql = "F_CODE = '{fcode}' and ({subsql})".format(
                            fcode=s, subsql=sub_sql)
                        with da.SearchCursor(os.path.join(inputFD, fc),
                                             field_names_lookup.keys(),
                                             where_clause=sql) as rows:
                            index_lookup = None
                            for row in rows:
                                if index_lookup is None:
                                    index_lookup = {key:rows.fields.index(key) \
                                                    for key in rows.fields}
                                vals = [alias_table[i] for i in specificAttributeDict[s] \
                                        if row[index_lookup[i]] in empty]
                                if len(vals) > 0:
                                    fs = ",".join(vals)
                                    oid = row[index_lookup["OID@"]]
                                    #arcpy.AddMessage(fc_domain_dict[s])
                                    ERROR = str(fc) + r" | " + str(
                                        fc_domain_dict[s]) + r" | OID: " + str(
                                            oid) + r" | " + fs
                                    irow = [
                                        row[index_lookup['SHAPE@']], ERROR, fc,
                                        fc_domain_dict[s], oid,
                                        len(vals)
                                    ]
                                    if fc[-3:].lower() == "pnt":
                                        pntInsert.insertRow(irow)
                                    elif fc[-3:].lower() == "crv":
                                        crvInsert.insertRow(irow)
                                    elif fc[-3:].lower() == "srf":
                                        srfInsert.insertRow(irow)
                                    errorCount += 1
                                    del irow
                                    del oid
                                    del ERROR
                                    del fs
                                del vals
                                del row

                        not_sub_sql = " and ".join([assemble_sql(field_name=f,
                                                field_type=field_names_lookup[f],
                                                not_in=True) \
                                               for f in specificAttributeDict[s] ])

                        not_sql = "F_CODE = '{fcode}' and ({subsql})".format(
                            fcode=s, subsql=not_sub_sql)
                        with da.SearchCursor(os.path.join(inputFD, fc),
                                             field_names_lookup.keys(),
                                             where_clause=not_sql) as rows:
                            index_lookup = None
                            for row in rows:
                                if index_lookup is None:
                                    index_lookup = {key:rows.fields.index(key) \
                                                    for key in rows.fields}
                                vals = [i for i in specificAttributeDict[s] \
                                        if row[index_lookup[i]] in empty]
                                fs = "N/A"
                                oid = row[index_lookup["OID@"]]
                                ERROR = str(fc) + r" | " + str(
                                    fc_domain_dict[s]) + r" | OID: " + str(
                                        oid) + r" | " + fs
                                irow = [
                                    row[index_lookup['SHAPE@']], ERROR, fc,
                                    fc_domain_dict[s], oid, 0
                                ]
                                if fc[-3:].lower() == "pnt":
                                    pntInsert.insertRow(irow)
                                elif fc[-3:].lower() == "crv":
                                    crvInsert.insertRow(irow)
                                elif fc[-3:].lower() == "srf":
                                    srfInsert.insertRow(irow)
                                errorCount += 1
                                del irow
                                del oid
                                del ERROR
                                del fs
                                del vals
                                del row
                            del index_lookup
                    del s
                del field_names_lookup
            if errorCount > 0:
                arcpy.AddMessage("       Errors in " + fc + ": " +
                                 str(errorCount))
            del stList
        edit.stopOperation()
        edit.stopEditing(True)
        del pntInsert, crvInsert, srfInsert
        del edit
        arcpy.AddMessage("Total Processing time: %s" %
                         str(datetime.datetime.now() - now))
        arcpy.SetParameterAsText(4, ";".join(error_fcs.values()))
    except arcpy.ExecuteError:
        line, filename, synerror = trace()
        arcpy.AddError("error on line: %s" % line)
        arcpy.AddError("error in file name: %s" % filename)
        arcpy.AddError("with error message: %s" % synerror)
        arcpy.AddError("ArcPy Error Message: %s" % arcpy.GetMessages(2))
    except FunctionError as f_e:
        messages = f_e.args[0]
        arcpy.AddError("error in function: %s" % messages["function"])
        arcpy.AddError("error on line: %s" % messages["line"])
        arcpy.AddError("error in file name: %s" % messages["filename"])
        arcpy.AddError("with error message: %s" % messages["synerror"])
        arcpy.AddError("ArcPy Error Message: %s" % messages["arc"])
    except:
        line, filename, synerror = trace()
        arcpy.AddError("error on line: %s" % line)
        arcpy.AddError("error in file name: %s" % filename)
        arcpy.AddError("with error message: %s" % synerror)