Exemplo n.º 1
0
def RotateFeatureClass(inputFC, outputFC, angle=0, pivot_point=None):
    """Rotate Feature Class

    inputFC     Input features
    outputFC    Output feature class
    angle       Angle to rotate, in degrees
    pivot_point X,Y coordinates (as space-separated string)
                Default is lower-left of inputFC

    As the output feature class no longer has a "real" xy locations,
    after rotation, it no coordinate system defined.
    """
    def RotateXY(x, y, xc=0, yc=0, angle=0, units="DEGREES"):
        """Rotate an xy cooordinate about a specified origin
        x,y      xy coordinates
        xc,yc   center of rotation
        angle   angle
        units    "DEGREES" (default) or "RADIANS"
        """
        x = x - xc
        y = y - yc
        # make angle clockwise (like Rotate_management)
        angle = angle * -1
        if units == "DEGREES":
            angle = math.radians(angle)
        xr = (x * math.cos(angle)) - (y * math.sin(angle)) + xc
        yr = (x * math.sin(angle)) + (y * math.cos(angle)) + yc
        return xr, yr

    # temp names for cleanup
    env_file = None
    lyrFC, lyrTmp = [None] * 2  # layers
    tmpFC = None  # temp dataset

    try:
        # process parameters
        try:
            xcen, ycen = [float(xy) for xy in pivot_point.split()]
            pivot_point = xcen, ycen
        except:
            # if pivot point was not specified, get it from
            # the lower-left corner of the feature class
            ext = arcpy.Describe(inputFC).extent
            xcen, ycen = ext.XMin, ext.YMin
            pivot_point = xcen, ycen

        angle = float(angle)

        # set up environment
        env_file = arcpy.CreateScratchName("xxenv", ".xml", "file",
                                           os.environ["TEMP"])
        arcpy.gp.SaveSettings(env_file)

        WKS = env.workspace
        if not WKS:
            if os.path.dirname(outputFC):
                WKS = os.path.dirname(outputFC)
            else:
                WKS = os.path.dirname(arcpy.Describe(inputFC).catalogPath)
        env.workspace = env.scratchWorkspace = WKS

        # Disable any GP environment clips
        arcpy.ClearEnvironment("extent")

        # get feature class properties
        lyrFC = 'lyrFC'
        arcpy.MakeFeatureLayer_management(inputFC, lyrFC)
        dFC = arcpy.Describe(lyrFC)
        shpField = dFC.shapeFieldName
        shpType = dFC.shapeType

        # create temp feature class
        tmpFC = arcpy.CreateScratchName("xxfc", "", "featureclass")

        # Create Feature Class using inputFC as template (so will have "Grid" field)
        arcpy.CreateFeatureclass_management(os.path.dirname(tmpFC),
                                            os.path.basename(tmpFC), shpType,
                                            inputFC)
        lyrTmp = 'lyrTmp'
        arcpy.MakeFeatureLayer_management(tmpFC, lyrTmp)

        ## WORKAROUND: removed below because it was creating a schema lock until Pro/arcpy exited
        ## set up grid field
        #gridField = "Grid"
        #arcpy.AddField_management(lyrTmp, gridField, "TEXT")
        #arcpy.DeleteField_management(lyrTmp, 'ID')

        # rotate the feature class coordinates for each feature, and each feature part

        # open read and write cursors
        updateFields = ['SHAPE@', 'Grid']
        arcpy.AddMessage('Rotating temporary dataset')

        parts = arcpy.Array()
        rings = arcpy.Array()
        ring = arcpy.Array()

        with arcpy.da.SearchCursor(lyrFC, updateFields) as inRows,\
          arcpy.da.InsertCursor(lyrTmp, updateFields) as outRows:
            for inRow in inRows:
                shp = inRow[0]  # SHAPE
                p = 0
                for part in shp:
                    for pnt in part:
                        if pnt:
                            x, y = RotateXY(pnt.X, pnt.Y, xcen, ycen, angle)
                            ring.add(arcpy.Point(x, y, pnt.ID))
                        else:
                            # if we have a ring, save it
                            if len(ring) > 0:
                                rings.add(ring)
                                ring.removeAll()
                    # we have our last ring, add it
                    rings.add(ring)
                    ring.removeAll()
                    # if only one, remove nesting
                    if len(rings) == 1: rings = rings.getObject(0)
                    parts.add(rings)
                    rings.removeAll()
                    p += 1

                # if only one, remove nesting
                if len(parts) == 1: parts = parts.getObject(0)
                if dFC.shapeType == "Polyline":
                    shp = arcpy.Polyline(parts)
                else:
                    shp = arcpy.Polygon(parts)
                parts.removeAll()

                gridValue = inRow[1]  # GRID string
                outRows.insertRow([shp, gridValue])  # write row to output

        arcpy.AddMessage('Merging temporary, rotated dataset with output')
        env.qualifiedFieldNames = False
        arcpy.Merge_management(lyrTmp, outputFC)

    except MsgError as xmsg:
        arcpy.AddError(str(xmsg))
    except arcpy.ExecuteError:
        tbinfo = traceback.format_tb(sys.exc_info()[2])[0]
        arcpy.AddError(tbinfo.strip())
        arcpy.AddError(arcpy.GetMessages())
        numMsg = arcpy.GetMessageCount()
        for i in range(0, numMsg):
            arcpy.AddReturnMessage(i)
    except Exception as xmsg:
        tbinfo = traceback.format_tb(sys.exc_info()[2])[0]
        arcpy.AddError(tbinfo + str(xmsg))
    finally:
        # reset environment
        if env_file: arcpy.gp.LoadSettings(env_file)
        # Clean up temp files
        for f in [lyrFC, lyrTmp, tmpFC, env_file]:
            try:
                if f and arcpy.Exists(f):
                    arcpy.Delete_management(f)
            except:
                pass

        # return pivot point
        try:
            pivot_point = "{0} {1}".format(*pivot_point)
        except:
            pivot_point = None

        return pivot_point
    def create_info_table(self,
                          raster_join_field,
                          attribute_file,
                          attribute_join_field,
                          drop_fields=None):
        """
        Create ArcInfo table from attribute csv file

        Parameters
        ----------
        raster_join_field : str
            field in raster to use for joining to attribute data
        attribute_file : str
            name and path of file containing attribute information
        attribute_join_field : str
            field in attribute file to use to join to raster
        drop_fields : list of str
            fields in the attribute file to drop before join to raster

        Returns
        -------
        name of temp ArcInfo table, list of fields to join from info table

        """
        print('Building info table from attribute file')

        # Crosswalk of numpy types to ESRI types for numeric data
        numpy_to_esri_type = {
            ('b', 1): 'SHORT',
            ('i', 1): 'SHORT',
            ('i', 2): 'SHORT',
            ('i', 4): 'LONG',
            ('f', 4): 'FLOAT',
            ('f', 8): 'DOUBLE',
        }

        # Read the CSV file in to a recarray
        ra = mlab.csv2rec(attribute_file)
        col_names = [str(x).upper() for x in ra.dtype.names]
        ra.dtype.names = col_names

        # If there are fields to drop, do that now and get a new recarray
        if drop_fields is not None:

            # Ensure that the drop fields are actually fields in the current
            # recarray
            drop_fields = [x for x in drop_fields if x in ra.dtype.names]

            # Create a new recarray with these fields omitted
            ra = mlab.rec_drop_fields(ra, drop_fields)
            col_names = list(ra.dtype.names)

        # Get the column types and formats
        col_types = [(ra.dtype[i].kind, ra.dtype[i].itemsize)
                     for i in range(len(ra.dtype))]
        formats = [ra.dtype[i].str for i in range(len(ra.dtype))]

        # Sanitize column names
        #   No field name may be longer than 16 chars
        #   No field name can start with a number
        for i in range(len(col_names)):
            if len(col_names[i]) > 16:
                col_names[i] = col_names[i][0:16]
            if col_names[i][0].isdigit():
                col_names[i] = col_names[i].lstrip('0123456789')

        # Reset the names for the recarray
        ra.dtype.names = col_names

        # Sanitize the data
        # Change True/False to 1/0 to be read into short type
        bit_fields = [(i, n)
                      for (i, (n, t)) in enumerate(zip(col_names, col_types))
                      if t[0] == 'b']
        if bit_fields:
            for rec in ra:
                for (col_num, field) in bit_fields:
                    value = getattr(rec, field)
                    if value:
                        setattr(rec, field, 1)
                    else:
                        setattr(rec, field, 0)

            # Change the bit fields to be short integer
            for (col_num, field) in bit_fields:
                formats[col_num] = '<i2'

        # Create a sanitized recarray and output back to CSV
        temp_csv = os.path.join(env.workspace, 'xxtmp.csv')
        ra2 = np.rec.fromrecords(ra, names=col_names, formats=formats)
        mlab.rec2csv(ra2, temp_csv)

        # Create a scratch name for the temporary ArcInfo table
        temp_table = arcpy.CreateScratchName('', '', 'ArcInfoTable')

        # Create the ArcInfo table and add the fields
        table_name = os.path.basename(temp_table)
        arcpy.CreateTable_management(env.workspace, table_name)
        for (n, t) in zip(col_names, col_types):
            try:
                esri_type = numpy_to_esri_type[t]
                arcpy.AddField_management(temp_table, n, esri_type)
            except KeyError:
                if t[0] == 'S':
                    arcpy.AddField_management(temp_table, n, 'TEXT', '#', '#',
                                              t[1])
                else:
                    err_msg = 'Type not found for ' + str(t)
                    print(err_msg)
                    continue

        # Append the records from the CSV field to the temporary INFO table
        arcpy.Append_management(temp_csv, temp_table, 'NO_TEST')

        # Strip out the join field from the names if they are the same
        raster_join_field = raster_join_field.upper()
        attribute_join_field = attribute_join_field.upper()
        if raster_join_field == attribute_join_field:
            col_names.remove(attribute_join_field)

        # Create a semi-colon delimited string of the fields we want to join
        field_list = ';'.join(col_names)

        # Clean up
        os.remove(temp_csv)

        return temp_table, field_list
Exemplo n.º 3
0
def create_input_nc(start_date,
                    years,
                    cellsize,
                    basin_shp,
                    p_path,
                    et_path,
                    eto_path,
                    lai_path,
                    swi_path,
                    swio_path,
                    swix_path,
                    qratio_path,
                    rainydays_path,
                    thetasat_ras,
                    rootdepth_ras,
                    input_nc,
                    epsg=4326,
                    bbox=None):
    """
    Creates the input netcdf file required to run waterpix
    """
    # Script parameters
    print "Variable\tRaster"
    arcpy.CheckOutExtension('spatial')
    if bbox:
        latlim = [bbox[1], bbox[3]]
        lonlim = [bbox[0], bbox[2]]
    else:
        shp_extent = arcpy.Describe(basin_shp).extent
        latlim = [shp_extent.YMin, shp_extent.YMax]
        lonlim = [shp_extent.XMin, shp_extent.XMax]
    arcpy.env.extent = arcpy.Extent(lonlim[0], latlim[0], lonlim[1], latlim[1])
    arcpy.env.cellSize = cellsize

    time_range = pd.date_range(start_date, periods=12 * years, freq='MS')
    time_ls = [d.strftime('%Y%m') for d in time_range]
    time_dt = [pd.to_datetime(i, format='%Y%m') for i in time_ls]

    time_n = len(time_ls)

    years_ls = set()
    years_ls = [
        i.year for i in time_dt
        if i.year not in years_ls and not years_ls.add(i.year)
    ]

    time_indeces = {}

    for j, item in enumerate(years_ls):
        temp_ls = [
            int(i.strftime('%Y%m')) for i in pd.date_range(
                str(item) + '0101', str(item) + '1231', freq='MS')
        ]
        time_indeces[item] = [time_ls.index(str(i)) for i in temp_ls]

    for key in time_indeces.keys():
        if time_indeces[key] != range(time_indeces[key][0],
                                      time_indeces[key][-1] + 1):
            raise Exception('The year {0} in the netcdf file is incomplete'
                            ' or the dates are non-consecutive')

    all_paths = {
        'p': p_path,
        'et': et_path,
        'eto': eto_path,
        'lai': lai_path,
        'swi': swi_path,
        'swio': swio_path,
        'swix': swix_path,
        'qratio': qratio_path,
        'rainydays': rainydays_path
    }

    # Latitude and longitude
    lat_ls = pd.np.arange(latlim[0] + 0.5 * cellsize,
                          latlim[1] + 0.5 * cellsize, cellsize)
    lat_ls = lat_ls[::-1]  # ArcGIS numpy
    lon_ls = pd.np.arange(lonlim[0] + 0.5 * cellsize,
                          lonlim[1] + 0.5 * cellsize, cellsize)
    lat_n = len(lat_ls)
    lon_n = len(lon_ls)
    spa_ref = arcpy.SpatialReference(epsg)
    projection = spa_ref.exportToString()
    ll_corner = arcpy.Point(lonlim[0], latlim[0])

    # Snap raster
    temp_ras = arcpy.NumPyArrayToRaster(pd.np.zeros((lat_n, lon_n)), ll_corner,
                                        cellsize, cellsize)
    scratch_ras = arcpy.CreateScratchName('ras_', '.tif', '',
                                          arcpy.env.scratchFolder)
    temp_ras.save(scratch_ras)
    arcpy.management.DefineProjection(scratch_ras, spa_ref)
    arcpy.env.snapRaster = scratch_ras

    # Basin mask
    basin_ras = arcpy.CreateScratchName('bas_', '.tif', '',
                                        arcpy.env.scratchFolder)
    buff_shp = arcpy.CreateScratchName('bas_', '.shp', '',
                                       arcpy.env.scratchFolder)
    arcpy.analysis.Buffer(basin_shp, buff_shp, 2 * cellsize, 'FULL', 'ROUND',
                          'NONE', '#', 'PLANAR')
    arcpy.conversion.FeatureToRaster(buff_shp, "FID", basin_ras, cellsize)

    # Create NetCDF file
    nc_file = netCDF4.Dataset(input_nc, 'w', format="NETCDF4")
    nc_file.set_fill_on()

    # Create dimensions
    lat_dim = nc_file.createDimension('latitude', lat_n)
    lon_dim = nc_file.createDimension('longitude', lon_n)
    month_dim = nc_file.createDimension('time_yyyymm', time_n)
    year_dim = nc_file.createDimension('time_yyyy', len(years_ls))

    # Create NetCDF variables
    crs_var = nc_file.createVariable('crs', 'i', (), fill_value=-9999)
    crs_var.standard_name = 'crs'
    crs_var.grid_mapping_name = 'latitude_longitude'
    crs_var.crs_wkt = projection

    lat_var = nc_file.createVariable('latitude',
                                     'f8', ('latitude'),
                                     fill_value=-9999)
    lat_var.units = 'degrees_north'
    lat_var.standard_name = 'latitude'

    lon_var = nc_file.createVariable('longitude',
                                     'f8', ('longitude'),
                                     fill_value=-9999)
    lon_var.units = 'degrees_east'
    lon_var.standard_name = 'longitude'

    month_var = nc_file.createVariable('time_yyyymm',
                                       'l', ('time_yyyymm'),
                                       fill_value=-9999)
    month_var.standard_name = 'time'
    month_var.format = 'YYYYMM'

    year_var = nc_file.createVariable('time_yyyy',
                                      'l', ('time_yyyy'),
                                      fill_value=-9999)
    year_var.standard_name = 'time'
    year_var.format = 'YYYY'

    # Variables
    p_var = nc_file.createVariable('Precipitation_M',
                                   'f8',
                                   ('time_yyyymm', 'latitude', 'longitude'),
                                   fill_value=-9999)
    p_var.long_name = 'Precipitation'
    p_var.units = 'mm/month'

    py_var = nc_file.createVariable('Precipitation_Y',
                                    'f8',
                                    ('time_yyyy', 'latitude', 'longitude'),
                                    fill_value=-9999)
    py_var.long_name = 'Precipitation'
    py_var.units = 'mm/year'

    et_var = nc_file.createVariable('Evapotranspiration_M',
                                    'f8',
                                    ('time_yyyymm', 'latitude', 'longitude'),
                                    fill_value=-9999)
    et_var.long_name = 'Evapotranspiration'
    et_var.units = 'mm/month'

    ety_var = nc_file.createVariable('Evapotranspiration_Y',
                                     'f8',
                                     ('time_yyyy', 'latitude', 'longitude'),
                                     fill_value=-9999)
    ety_var.long_name = 'Evapotranspiration'
    ety_var.units = 'mm/year'

    eto_var = nc_file.createVariable('ReferenceET_M',
                                     'f8',
                                     ('time_yyyymm', 'latitude', 'longitude'),
                                     fill_value=-9999)
    eto_var.long_name = 'Reference Evapotranspiration'
    eto_var.units = 'mm/month'

    lai_var = nc_file.createVariable('LeafAreaIndex_M',
                                     'f8',
                                     ('time_yyyymm', 'latitude', 'longitude'),
                                     fill_value=-9999)
    lai_var.long_name = 'Leaf Area Index'
    lai_var.units = 'm2/m2'

    swi_var = nc_file.createVariable('SWI_M',
                                     'f8',
                                     ('time_yyyymm', 'latitude', 'longitude'),
                                     fill_value=-9999)
    swi_var.long_name = 'Soil Water Index - Monthly mean'
    swi_var.units = '%'

    swio_var = nc_file.createVariable('SWIo_M',
                                      'f8',
                                      ('time_yyyymm', 'latitude', 'longitude'),
                                      fill_value=-9999)
    swio_var.long_name = 'Soil water index - First day of the month'
    swio_var.units = '%'

    swix_var = nc_file.createVariable('SWIx_M',
                                      'f8',
                                      ('time_yyyymm', 'latitude', 'longitude'),
                                      fill_value=-9999)
    swix_var.long_name = 'Soil water index - Last day of the month'
    swix_var.units = '%'

    qratio_var = nc_file.createVariable('RunoffRatio_Y',
                                        'f8',
                                        ('time_yyyy', 'latitude', 'longitude'),
                                        fill_value=-9999)
    qratio_var.long_name = 'Runoff ratio'
    qratio_var.units = '-'

    rainydays_var = nc_file.createVariable(
        'RainyDays_M',
        'f8', ('time_yyyymm', 'latitude', 'longitude'),
        fill_value=-9999)
    rainydays_var.long_name = 'Number of rainy days per month'
    rainydays_var.units = 'No. rainy days/month'

    thetasat_var = nc_file.createVariable('SaturatedWaterContent',
                                          'f8', ('latitude', 'longitude'),
                                          fill_value=-9999)
    thetasat_var.long_name = 'Saturated water content (top soil)'
    thetasat_var.units = 'cm3/cm3'

    rootdepth_var = nc_file.createVariable('RootDepth',
                                           'f8', ('latitude', 'longitude'),
                                           fill_value=-9999)
    rootdepth_var.long_name = 'Root depth'
    rootdepth_var.units = 'mm'

    basinmask_var = nc_file.createVariable('BasinBuffer',
                                           'l', ('latitude', 'longitude'),
                                           fill_value=0)
    basinmask_var.long_name = 'Basin buffer'

    # Load data
    lat_var[:] = lat_ls
    lon_var[:] = lon_ls
    month_var[:] = time_ls
    year_var[:] = years_ls

    # Static variables
    temp_dir = tempfile.mkdtemp()

    # Theta sat
    print "{0}\t{1}".format('thetasat', thetasat_ras)
    thetasat_temp = os.path.join(temp_dir, 'thetasat.tif')
    arcpy.management.Resample(thetasat_ras, thetasat_temp, cellsize)
    inp_ras = arcpy.Raster(thetasat_temp)

    array = arcpy.RasterToNumPyArray(inp_ras, ll_corner, lon_n, lat_n, -9999)
    thetasat_var[:, :] = array[:, :]

    # Root depth
    print "{0}\t{1}".format('rootdepth', rootdepth_ras)
    rootdepth_temp = os.path.join(temp_dir, 'rootdepth.tif')
    arcpy.management.Resample(rootdepth_ras, rootdepth_temp, cellsize)
    inp_ras = arcpy.Raster(rootdepth_temp)

    array = arcpy.RasterToNumPyArray(inp_ras, ll_corner, lon_n, lat_n, -9999)
    rootdepth_var[:, :] = array[:, :]

    # Basin mask
    inp_ras = arcpy.sa.Con(arcpy.sa.IsNull(arcpy.Raster(basin_ras)), 0, 1)

    array = arcpy.RasterToNumPyArray(inp_ras, ll_corner, lon_n, lat_n, -9999)
    basinmask_var[:, :] = array[:, :]

    # Dynamic variables
    for var in ['p', 'et', 'eto', 'lai', 'swi', 'swio', 'swix', 'rainydays']:
        # Make temp directory
        temp_dir2 = tempfile.mkdtemp()
        for yyyymm in time_ls:
            yyyy = yyyymm[:4]
            mm = yyyymm[-2:]
            ras = all_paths[var].format(yyyy=yyyy, mm=mm)
            print "{0}\t{1}".format(var, ras)
            arcpy.management.Resample(
                ras, os.path.join(temp_dir2, os.path.basename(ras)), cellsize,
                'NEAREST')
            inp_ras = arcpy.Raster(
                os.path.join(temp_dir2, os.path.basename(ras)))
            array = arcpy.RasterToNumPyArray(inp_ras, ll_corner, lon_n, lat_n,
                                             pd.np.nan)
            t_index = time_ls.index(yyyymm)
            exec('{0}_var[t_index, :, :] = array[:, :]'.format(var))
    # Runoff ratio
    temp_dir2 = tempfile.mkdtemp()
    for yyyy in years_ls:
        ras = all_paths['qratio'].format(yyyy=yyyy)
        print "{0}\t{1}".format('qratio', ras)
        arcpy.management.Resample(
            ras, os.path.join(temp_dir2, os.path.basename(ras)), cellsize,
            'NEAREST')
        inp_ras = arcpy.Raster(os.path.join(temp_dir2, os.path.basename(ras)))
        array = arcpy.RasterToNumPyArray(inp_ras, ll_corner, lon_n, lat_n,
                                         pd.np.nan)
        y_index = years_ls.index(yyyy)
        qratio_var[y_index, :, :] = array[:, :]
    # Calculate yearly rasters
    for yyyy in years_ls:
        yyyyi = years_ls.index(yyyy)
        ti1 = time_indeces[yyyy][0]
        ti2 = time_indeces[yyyy][-1] + 1

        py_var[yyyyi, :, :] = pd.np.sum(p_var[ti1:ti2, :, :], axis=0)
        ety_var[yyyyi, :, :] = pd.np.sum(et_var[ti1:ti2, :, :], axis=0)

    # Close file
    arcpy.env.extent = None
    arcpy.env.snapRaster = None
    arcpy.env.cellSize = 'MAXOF'
    nc_file.close()

    # Return
    return input_nc
def get_path_residence_times (in_file, cost_rast, out_raster, t_diff_fld_name, workspace):
    
    if len (out_raster) == 0:
        arcpy.AddError ("Missing argument: out_rast")
        raise Exception
    if len (t_diff_fld_name) == 0:
        t_diff_fld_name = "T_DIFF_HRS"

    arcpy.env.overwriteOutput = True  #  This is underhanded.  It should be an argument.

    if arcpy.env.outputCoordinateSystem is None:
        arcpy.env.outputCoordinateSystem = cost_rast
    arcpy.AddMessage ("coordinate system is %s" % arcpy.env.outputCoordinateSystem.name)

    if len(workspace):
        arcpy.env.workspace = workspace
    if arcpy.env.workspace is None or len(arcpy.env.workspace) == 0:
        arcpy.env.workspace = os.getcwd()

    if '.gdb' in arcpy.env.workspace:
        arcpy.AddError (
            "Worskpace is a geodatabase.  " +
            "This brings too much pain for this script to work.\n" +
            "%s" % arcpy.env.workspace
        )
        raise WorkspaceIsGeodatabase


    r = Raster(cost_rast)
    
    if r.maximum == 0 and r.minimum == 0:
        arcpy.AddMessage ('Cost raster has only zero value.  Cannot calculate cost distances.')
        raise CostRasterIsZero

    size = r.height * r.width * 4
    if size > 2 * 1028 ** 3:
        import struct
        struct_size = struct.calcsize("P") * 8
        if struct_size == 32:
            size_in_gb = float (size) / (1028 ** 3)
            arcpy.AddMessage (
                'Cost raster exceeds 2 GiB in size (%s GiB).  This is too large for a 32 bit NumPy.' % size_in_gb
            )
            raise NumPyArrayExceedsSizeLimits

    if not check_points_are_in_cost_raster(in_file, cost_rast):
        arcpy.AddError ('One or more input points do not intersect the cost raster')
        raise PointNotOnRaster

    arcpy.env.snapRaster = cost_rast
    suffix = None
    wk = arcpy.env.workspace
    if not '.gdb' in wk:
        suffix = '.shp'


    ext = arcpy.env.extent
    if ext is None:
        arcpy.env.extent = r.extent

    arcpy.AddMessage ("Extent is %s" % arcpy.env.extent)

    arcpy.env.cellSize = r.meanCellWidth
    arcpy.AddMessage ("Cell size is %s" % arcpy.env.cellSize)
    cellsize_used = float (arcpy.env.cellSize)
    extent = arcpy.env.extent
    lower_left_coord = extent.lowerLeft
    
    arcpy.AddMessage ('Currently in directory: %s\n' % os.getcwd())
    arcpy.AddMessage ('Workspace is: %s' % arcpy.env.workspace)
    arcpy.AddMessage ("lower left is %s" % lower_left_coord)

    if arcpy.env.mask is None:
        arcpy.AddMessage ("Setting mask to %s" % cost_rast)
        arcpy.env.mask = cost_rast

    #  accumulated transits
    transit_array_accum = arcpy.RasterToNumPyArray (Raster(cost_rast) * 0)

    feat_layer = "feat_layer"
    arcmgt.MakeFeatureLayer(in_file, feat_layer)
    desc = arcpy.Describe (feat_layer)
    oid_fd_name = desc.OIDFieldName
    arcpy.AddMessage("oid_fd_name = %s" % oid_fd_name)

    #  variable name is redundant now??? - should all calls be to oid_fd_name?
    target_fld = oid_fd_name

    proc_layer = "process_layer"
    arcmgt.MakeFeatureLayer(in_file, proc_layer)
    rows = arcpy.SearchCursor(proc_layer)
    last_target = None

    for row_cur in rows:
        transit_time = row_cur.getValue (t_diff_fld_name)

        if last_target is None or transit_time == 0:
            message = 'Skipping %s = %s' % (oid_fd_name, row_cur.getValue(oid_fd_name))
            if transit_time == 0:
                message = message + "  Transit time is zero"
            arcpy.AddMessage(message)
            last_target = row_cur.getValue(target_fld)
            last_oid    = row_cur.getValue(oid_fd_name)
            continue

        arcpy.AddMessage ("Processing %s %i" % (oid_fd_name, row_cur.getValue(oid_fd_name)))

        arcmgt.SelectLayerByAttribute(
            feat_layer,
            "NEW_SELECTION",
            '%s = %s' % (target_fld, last_target)
        )
        backlink_rast  = arcpy.CreateScratchName("backlink")
        path_dist_rast = PathDistance(feat_layer, cost_rast, out_backlink_raster = backlink_rast)

        #  extract the distance from the last point
        shp = row_cur.shape
        centroid = shp.centroid
        (x, y) = (centroid.X, centroid.Y)
        result = arcmgt.GetCellValue(path_dist_rast, "%s %s" % (x, y), "1")
        res_val = result.getOutput(0)
        if res_val == "NoData":
            this_oid = row_cur.getValue(oid_fd_name)
            arcpy.AddMessage ("Got nodata for coordinate (%s, %s)" % (x, y))
            arcpy.AddMessage ("Is the path between features %s and %s wholly contained by the cost raster?" % (last_oid, this_oid))
            pras_name = "pth_%s_%s.tif" % (last_oid, this_oid)
            arcpy.AddMessage ("Attempting to save path raster as %s" % pras_name)
            try:
                path_dist_rast.save(pras_name)
            except Exception as e:
                arcpy.AddMessage (e)
            raise PathDistanceIsNoData
        try:
            path_distance = float (res_val)
        except:
            #  kludge around locale/radix issues 
            if res_val.find(","):
                res_val = res_val.replace(",", ".")
                path_distance = float (res_val)
            else:
                raise
        arcpy.AddMessage("Path distance is %s\nTransit time is %s" % (path_distance, transit_time))

        #  get a raster of the path from origin to destination
        condition = '%s in (%i, %i)' % (oid_fd_name, last_oid, row_cur.getValue(oid_fd_name))
        dest_layer = "dest_layer" + str (last_oid)
        arcmgt.MakeFeatureLayer(in_file, dest_layer, where_clause = condition)

        count = arcmgt.GetCount(dest_layer)
        count = int (count.getOutput(0))
        if count == 0:
            raise NoFeatures("No features selected.  Possible coordinate system issues.\n" + condition)

        try:
            path_cost_rast = CostPath(dest_layer, path_dist_rast, backlink_rast)
            #path_dist_rast.save("xx_pr" + str (last_oid))
        except Exception as e:
            raise

        try:
            pcr_mask       = 1 - IsNull (path_cost_rast)
            #pcr_mask.save ("xx_pcr_mask" + str (last_oid))
            dist_masked    = path_dist_rast * pcr_mask
            path_array     = arcpy.RasterToNumPyArray(dist_masked, nodata_to_value = -9999)
            path_array_idx = numpy.where(path_array > 0)
            transit_array  = numpy.zeros_like(path_array)  #  past experience suggests we might need to use a different approach to guarantee we get zeroes
        except:
            raise

        path_sum = None
        arcpy.AddMessage ("processing %i cells of path raster" % (len(path_array_idx[0])))

        if path_distance == 0 or not len(path_array_idx[0]):
            path_sum = 1 #  stayed in the same cell
            mask_array = arcpy.RasterToNumPyArray(pcr_mask, nodata_to_value = -9999)
            mask_array_idx = numpy.where(mask_array == 1)
            i = mask_array_idx[0][0]
            j = mask_array_idx[1][0]
            transit_array[i][j] = path_sum
        else:
            row_count = len (path_array) 
            col_count = len (path_array[0])

            for idx in range (len(path_array_idx[0])):
                i = path_array_idx[0][idx]
                j = path_array_idx[1][idx]
                val = path_array[i][j]
                nbrs = []
                for k in (i-1, i, i+1):
                    if k < 0 or k >= row_count:
                        continue
                    checkrow = path_array[k]
                    for l in (j-1, j, j+1):
                        if l < 0 or l >= col_count:
                            continue
                        if k == i and j == l:
                            continue  #  don't check self
                        checkval = checkrow[l]
                        #  negs are nodata, and this way we
                        #  don't need to care what that value is
                        if checkval >= 0:
                            diff = val - checkval
                            if diff > 0:
                                nbrs.append(diff)
                                #arcpy.AddMessage ("Check and diff vals are %s %s" % (checkval, diff))
                diff = min (nbrs)
                #arcpy.AddMessage ("Diff  val is %s" % diff)
                transit_array[i][j] = diff

            path_sum = path_array.max()  #  could use path_distance?
            #arcpy.AddMessage ("path_array.max is %s" % path_sum)

        #  sometimes we get a zero path_sum even when the path_distance is non-zero
        if path_sum == 0:
            path_sum = 1

        #  Increment the cumulative transit array by the fraction of the
        #  transit time spent in each cell.
        #  Use path_sum because it corrects for cases where we stayed in the same cell.
        transit_array_accum = transit_array_accum + ((transit_array / path_sum) * transit_time)

        #xx = arcpy.NumPyArrayToRaster (transit_array, lower_left_coord, cellsize_used, cellsize_used, 0)
        #tmpname = "xx_t_arr_" + str (last_oid)
        #print "Saving transit array to %s" % tmpname
        #xx.save (tmpname)


        try:
            arcmgt.Delete(backlink_rast)
            arcmgt.Delete(dest_layer)
        except Exception as e:
            arcpy.AddMessage (e)

        #  getting off-by-one errors when using the environment, so use this directly
        ext = path_cost_rast.extent
        lower_left_coord = ext.lowerLeft

        last_target = row_cur.getValue(target_fld)
        last_oid    = row_cur.getValue(oid_fd_name)

    #  need to use env settings to get it to be the correct size
    try:
        arcpy.AddMessage ("lower left is %s" % lower_left_coord)
        xx = arcpy.NumPyArrayToRaster (transit_array_accum, lower_left_coord, cellsize_used, cellsize_used, 0)
        print "Saving to %s" % out_raster
        xx.save (out_raster)
    except:
        raise


    print "Completed"

    return ()
Exemplo n.º 5
0
def DefinePopulation(study_area, out_points,
                     newsite_meth="NUMBER", newsite_val=10000, site_points=None):
    """Define population

    study_area    input study area polygons
    out_points    output points
    newsite_meth  new site generation parameter: "NUMBER","DISTANCE"
    newsite_val   value to use for above method
    site_points   existing site points
    """

    try:

        # initialize temp file variables
        lyrStudy, lyrSites, tmpFC, tmpRas, tmpPoints, numSites = [None] * 6
        rasWK = None

        fmtI = "  {0:<35s}{1:>8}"        # format to report integer/string values
        fmtF = "  {0:<35s}{1:>8.1f} {2}" # format to report float values w/ units

        lyrStudy = "lyr1"
        arcpy.MakeFeatureLayer_management(study_area, lyrStudy)

        # set processing environment
        arcpy.env.workspace = os.path.dirname(out_points)
        D = arcpy.Describe(study_area)
        env.extent = ext = D.extent
        env.outputCoordinateSystem = D.spatialReference
        xyUnits = D.spatialReference.linearUnitName
        arcpy.ClearEnvironment("snapRaster")
        rasWK = ScratchFolder()

        procLabel = "Defining population characteristics"
        GPMsg(procLabel)

        if not site_points:

            GPMsg("  Creating points...")

            # Prepare a population of inside study area

            if newsite_meth == "NUMBER":
                newsite_val = int(newsite_val)
                GPMsg(fmtI.format('Approximate number of sites:', newsite_val))
                samp_dist = ((ext.width * ext.height) / newsite_val) ** 0.5
            elif newsite_meth == "DISTANCE":
                samp_dist = float(newsite_val)
            else:
                raise Exception("Invalid new site method " + newsite_meth)

            GPMsg(fmtF.format(
                'Sample distance:', samp_dist, xyUnits.lower()[0]))

            # randomize the lattice origin
            xmin = ext.XMin - samp_dist * random.random()
            ymin = ext.YMin - samp_dist * random.random()
            env.extent = arcpy.Extent(xmin, ymin, ext.XMax, ext.YMax)

            # Report number sites
            n = int((env.extent.width * env.extent.height) /
                    (samp_dist ** 2))
            GPMsg(fmtI.format(
                "Building a population with", n) + " sites")

            # Create a raster covering the the study area
            tmpRas = arcpy.CreateScratchName("saras", "", "raster", rasWK)
            arcpy.FeatureToRaster_conversion(lyrStudy, D.OIDFieldName,
                                             tmpRas, samp_dist)

            # check raster - are there data cells?
            try:
                arcpy.GetRasterProperties_management(tmpRas, "MINIMUM")
            except:
                GPMsg()
                raise MsgError("No points created")

            # Generate a point lattice from raster cell centroids
            tmpPoints = arcpy.CreateScratchName("pt", "",
                                                "featureclass", rasWK)
            arcpy.RasterToPoint_conversion(tmpRas, tmpPoints, "VALUE")
            lyrSites = "lyrSites"
            arcpy.MakeFeatureLayer_management(tmpPoints, lyrSites)
            arcpy.DeleteField_management(lyrSites, "GRID_CODE;GRIDCODE")

            # count points
            numSites = int(arcpy.GetCount_management(lyrSites).getOutput(0))
            GPMsg(fmtI.format("Points inside study area:", numSites))

        else:

            # Select points from an existing point feature class

            lyrSites = "lyrSites"
            arcpy.MakeFeatureLayer_management(site_points, lyrSites)
            numSites = int(arcpy.GetCount_management(lyrSites).getOutput(0))
            # select points within study area
            arcpy.SelectLayerByLocation_management(lyrSites, "WITHIN", lyrStudy)

            # check number of sites selected
            numSelected = int(arcpy.GetCount_management(lyrSites).getOutput(0))
            if not numSelected:
                raise MsgError("No points selected")
            nsel = "{0}/{1}".format(numSelected, numSites)
            GPMsg(fmtI.format("Points inside study area:", nsel))
            numSites = numSelected

        # copy points to output
        arcpy.CopyFeatures_management(lyrSites, out_points)

    except MsgError, xmsg:
        GPMsg("e", str(xmsg))
Exemplo n.º 6
0
                    2)
                exit()
        else:
            AddMsgAndPrint(
                "\t" + demName +
                " is NOT in a Projected Coordinate System. Exiting...", 2)
            exit()

        # zUnits are feet because we are using WASCOB project DEM
        # This Zfactor is used for expressing elevations from input data as feet, regardless of input z-units. But z-units are feet in this toolbox. Redundant.
        Zfactor = 1

        # ------------------------------------------------------------- Delete [previous toc lyrs if present
        # Copy the input line before deleting the TOC layer reference in case input line IS the previous line selected from the TOC
        lineTemp = arcpy.CreateScratchName("lineTemp",
                                           data_type="FeatureClass",
                                           workspace="in_memory")
        arcpy.CopyFeatures_management(inputLine, lineTemp)

        if arcpy.Exists(outLineLyr):
            AddMsgAndPrint("\nRemoving previous layers from ArcMap", 0)
            arcpy.Delete_management(outLineLyr)

        if arcpy.Exists(outPointsLyr):
            arcpy.Delete_management(outPointsLyr)

        # ------------------------------------------------------------- Copy input and create routes / points
        # Check for fields: if user input is previous line they will already exist
        if len(arcpy.ListFields(lineTemp, "ID")) < 1:
            arcpy.AddField_management(lineTemp, "ID", "LONG", "", "", "", "",
                                      "NULLABLE", "NON_REQUIRED")
                                       str(demCellSize) + " Meter(s)")

                    elif linearUnits in ('Foot', 'Foot_US', 'Feet'):
                        bufferSize = str(demCellSize) + " Feet"
                        AddMsgAndPrint("\nBuffer size applied on Culverts: " +
                                       bufferSize)

                    else:
                        bufferSize = str(demCellSize) + " Unknown"
                        AddMsgAndPrint(
                            "\nBuffer size applied on Culverts: Equivalent of 1 pixel since linear units are unknown",
                            0)

                    # Buffer the culverts to 1 pixel
                    culvertBuffered = arcpy.CreateScratchName(
                        "culvertBuffered",
                        data_type="FeatureClass",
                        workspace="in_memory")
                    arcpy.Buffer_analysis(culverts, culvertBuffered,
                                          bufferSize, "FULL", "ROUND", "NONE",
                                          "")

                    # Dummy field just to execute Zonal stats on each feature
                    expression = "!" + arcpy.da.Describe(
                        culvertBuffered)['OIDFieldName'] + "!"
                    arcpy.AddField_management(culvertBuffered, "ZONE", "TEXT",
                                              "", "", "", "", "NULLABLE",
                                              "NON_REQUIRED")
                    arcpy.CalculateField_management(culvertBuffered, "ZONE",
                                                    expression, "PYTHON3")

                    # Get the minimum elevation value for each culvert
        # ----------------------------------- Set Environment Settings
        arcpy.env.extent = "MAXOF"
        arcpy.env.cellSize = demCellSize
        arcpy.env.snapRaster = demPath
        arcpy.env.outputCoordinateSystem = demSR
        arcpy.env.workspace = watershedGDB_path

        # --------------------------------------------------------------------- Convert outlet Line Feature to Raster Pour Point.

        # Add dummy field for buffer dissolve and raster conversion using OBJECTID (which becomes subbasin ID)
        objectIDfld = "!" + arcpy.da.Describe(outletFC)['OIDFieldName'] + "!"
        arcpy.AddField_management(outletFC, "IDENT", "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED")
        arcpy.CalculateField_management(outletFC, "IDENT", objectIDfld, "PYTHON3")

        # Buffer outlet features by  raster cell size
        outletBuffer = "in_memory" + os.sep + os.path.basename(arcpy.CreateScratchName("outletBuffer",data_type="FeatureClass",workspace=watershedGDB_path))
        bufferDist = "" + str(demCellSize) + " " + str(linearUnits) + ""
        arcpy.Buffer_analysis(outletFC, outletBuffer, bufferDist, "FULL", "ROUND", "LIST", "IDENT")

        # Convert bufferd outlet to raster
        #arcpy.MakeFeatureLayer(outletBuffer,"outletBufferLyr")
        pourPointGrid = "in_memory" + os.sep + os.path.basename(arcpy.CreateScratchName("PourPoint",data_type="RasterDataset",workspace=watershedGDB_path))
        arcpy.PolygonToRaster_conversion(outletBuffer,"IDENT",pourPointGrid,"MAXIMUM_AREA","NONE",demCellSize)

        # Delete intermediate data
        arcpy.Delete_management(outletBuffer)
        arcpy.DeleteField_management(outletFC, "IDENT")

        # Create Watershed Raster using the raster pour point
        AddMsgAndPrint("\nDelineating Watershed(s)...")
        #watershedGrid = "in_memory" + os.sep + os.path.basename(arcpy.CreateScratchName("watershedGrid",data_type="RasterDataset",workspace=watershedGDB_path))
                                                watershedDesc['OIDFieldName'],
                                                "PYTHON3")

            if not len(arcpy.ListFields(watershed, "Acres")) > 0:
                arcpy.AddField_management(watershed, "Acres", "DOUBLE", "", "",
                                          "", "", "NULLABLE", "NON_REQUIRED")
                arcpy.CalculateField_management(watershed, "Acres",
                                                "!shape.area@ACRES!",
                                                "PYTHON3")

        # ------------------------------------------------------------------------------------------------ Create Landuse Layer
        if bSplitLU:

            # Dissolve in case the watershed has multiple polygons
            watershedDissolve = arcpy.CreateScratchName(
                "watershedDissolve",
                data_type="FeatureClass",
                workspace="in_memory")
            arcpy.Dissolve_management(inWatershedPath, watershedDissolve, "",
                                      "", "MULTI_PART", "DISSOLVE_LINES")

            # Clip the CLU layer to the dissolved watershed layer
            cluClip = arcpy.CreateScratchName("cluClip",
                                              data_type="FeatureClass",
                                              workspace="in_memory")
            arcpy.Clip_analysis(inCLU, watershedDissolve, cluClip)
            AddMsgAndPrint(
                "\nSuccessfully clipped the CLU to your Watershed Layer")

            # Union the CLU and dissolve watershed layer simply to fill in gaps
            arcpy.Union_analysis(cluClip + ";" + watershedDissolve, landuse,
                                 "ONLY_FID", "", "GAPS")
Exemplo n.º 10
0
        if (pn != pn1):
            temp_point.X = pn[0]
            temp_point.Y = pn[1]
            array = arcpy.Array()
            array.add(temp_point)
            temp_point.X = pn1[0]
            temp_point.Y = pn1[1]
            array.add(temp_point)
            polyLine = arcpy.Polyline(array)
            featureList.append(polyLine)
            field_list.append(
                [Id_list[xyList.index(pn)], Id_list[xyList.index(pn1)]])

############################################################################

temp_feature = arcpy.CreateScratchName('temp', data_type='FeatureClass ')
arcpy.CopyFeatures_management(featureList, temp_feature)
arcpy.MakeFeatureLayer_management(temp_feature, 'lyr')
arcpy.AddField_management('lyr', field_name='p1', field_type='SHORT')
arcpy.AddField_management('lyr', field_name='p2', field_type='SHORT')
arcpy.AddField_management('lyr', field_name='flux', field_type='DOUBLE')
if arcpy.Exists(outFeature):
    arcpy.Delete_management(outFeature)
arcpy.CopyFeatures_management('lyr', outFeature)
arcpy.Delete_management(temp_feature)

##########################################################################

####
with arcpy.da.UpdateCursor(outFeature, ['p1', 'p2']) as curse:
    i = 0
        sys.exit()

    # define and set the scratch workspace
    scratchWS = os.path.dirname(sys.argv[0]) + os.sep + r'scratch.gdb'
    if not arcpy.Exists(scratchWS):
        scratchWS = setScratchWorkspace()

    if not scratchWS:
        AddMsgAndPrint("\Could Not set scratchWorkspace!")
        sys.exit()

    arcpy.env.scratchWorkspace = scratchWS
    arcpy.env.workspace = scratchWS

    temp_dem = arcpy.CreateScratchName("temp_dem",
                                       data_type="RasterDataset",
                                       workspace=scratchWS)
    merged_dem = arcpy.CreateScratchName("merged_dem",
                                         data_type="RasterDataset",
                                         workspace=scratchWS)
    clu_selected = "in_memory" + os.sep + os.path.basename(
        arcpy.CreateScratchName(
            "clu_selected", data_type="FeatureClass", workspace=scratchWS))
    clu_buffer = "in_memory" + os.sep + os.path.basename(
        arcpy.CreateScratchName(
            "clu_buffer", data_type="FeatureClass", workspace=scratchWS))

    # Make sure CLU fields are selected
    cluDesc = arcpy.Describe(source_clu)
    if cluDesc.FIDset == '':
        AddMsgAndPrint("\nPlease select fields from the CLU Layer. Exiting!",
Exemplo n.º 12
0
def _constrain_from_raster(constrain_area, rasters):
    """
    _constrain_from_raster
        Constrains an area to zones where all bands of the raster contain information 
        
    :param constrain_area: Original area  
    :param rasters: Multiband rasters to be used as information source
    
    :return: Constrained area 
    """

    global MESSAGES
    MESSAGES.AddMessage("Constraining Area from rasters...")
    _verbose_print("Constrain Area: {}".format(constrain_area))
    _verbose_print("rasters: {}".format(rasters))

    scratch_files = []

    # Obtain the name of the bands
    oldws = arcpy.env.workspace  # Save previous workspace
    raster_path = arcpy.Describe(rasters.strip("'")).catalogPath
    arcpy.env.workspace = raster_path
    rasters = [os.path.join(raster_path, b) for b in arcpy.ListRasters()]
    arcpy.env.workspace = oldws  # Restore previous workspace
    _verbose_print("Rasters list: {}".format(str(rasters)))

    # Start a progression bar to feedback for the user
    arcpy.SetProgressor("step",
                        "Restricting area from missings",
                        min_range=0,
                        max_range=len(rasters),
                        step_value=1)

    try:
        # TODO: Maybe this is faster if is transform to numpy arrays, make calculations and the back to raster
        # Initialize raster with all the Null points
        final_raster = arcpy.sa.IsNull(arcpy.sa.Raster(rasters[0]))
        arcpy.SetProgressorPosition()
        # loop trough all the remaining rasters adding the points where other bands have missings
        if len(rasters) > 1:
            for raster in rasters[1:]:
                final_raster = arcpy.sa.BooleanOr(
                    final_raster, arcpy.sa.IsNull(arcpy.sa.Raster(raster)))
                arcpy.SetProgressorPosition()
                _verbose_print(
                    "Area reduced with nulls from {}".format(raster))
        # Set null the positions where it was found at least one null
        final_raster = arcpy.sa.SetNull(final_raster, final_raster)
        # reset the Progressor to previous state
        arcpy.SetProgressorLabel("Executing Select Random Points")
        arcpy.ResetProgressor()

        # Transform the raster to polygon
        domain_scratch = arcpy.CreateScratchName(
            "temp", workspace=arcpy.env.scratchWorkspace)
        arcpy.RasterToPolygon_conversion(final_raster, domain_scratch,
                                         "SIMPLIFY")
        scratch_files.append(domain_scratch)
        _verbose_print(
            "Scratch file created (domain): {}".format(domain_scratch))

        # Intersect the polygon created with the original area
        intersect_scratch = arcpy.CreateScratchName(
            "temp", workspace=arcpy.env.scratchWorkspace)
        arcpy.Intersect_analysis([domain_scratch, constrain_area],
                                 intersect_scratch)
        _verbose_print(
            "Scratch file created (intersect): {}".format(domain_scratch))

    except:
        raise

    finally:
        # Clean up intermediate files
        for s_file in scratch_files:
            arcpy.Delete_management(s_file)
            _verbose_print("Scratch file deleted: {}".format(s_file))

    _verbose_print("Constrain from rasters finished")
    return intersect_scratch
Exemplo n.º 13
0
def execute(self, parameters, messages):
    """
        Create random points tool 
            Create a set of random points constrained to a specific area, maintaining a minimum maximum distance to a 
                set of points and remaining in areas with full information 
        :param parameters: parameters object with all the parameters from the python-tool. It contains:
            output: Name of the file where the points will be stored 
            n_points: number of random points to be created 
            constrain_area: Original constraining area used as baseline for further constraints
            rasters: Information rasters that will be used to restrict to areas with full information 
            buffer_points: The random points will maintain a minimal/maximal distance to these points
            buffer_distance: Distance away buffer points
            min_distance: Minimal distance along created points 
            select_inside: Boolean to  create the points inside the area (True) or outside the area (False) 
        :param messages: messages object to print in the console, must implement AddMessage
         
        :return: None
    """

    global MESSAGES
    MESSAGES = messages

    # Print parameters for debugging purposes
    print_parameters(parameters)
    parameter_dic = {par.name: par for par in parameters}

    output = parameter_dic["output_points"].valueAsText.strip("'")
    n_points = parameter_dic["number_points"].value
    constrain_area = parameter_dic["constraining_area"].valueAsText.strip("'")
    rasters = parameter_dic["constraining_rasters"].valueAsText
    buffer_points = parameter_dic["buffer_points"].valueAsText
    buffer_distance = parameter_dic["buffer_distance"].valueAsText
    min_distance = parameter_dic["minimum_distance"].valueAsText
    select_inside = parameter_dic["select_inside"].value

    # Split the path of the output file in file and database, necesarry for
    out_ws, out_f = os.path.split(output)

    scratch_files = []
    try:
        # constrain area to avoid modifications to the original
        constrain_scratch = arcpy.CreateScratchName(
            "temp", workspace=arcpy.env.scratchWorkspace)
        arcpy.CopyFeatures_management(
            arcpy.Describe(constrain_area).catalogPath, constrain_scratch)
        scratch_files.append(constrain_scratch)
        _verbose_print(
            "Scratch file created (constrain): {}".format(constrain_scratch))
        # Constrain to the points only if they exist, otherwise is not constrained
        if buffer_points is None or buffer_distance is None:
            _verbose_print("Exclude from points omitted")
            points_scratch = constrain_scratch
        else:
            points_scratch = _constrain_from_points(constrain_scratch,
                                                    buffer_points,
                                                    buffer_distance,
                                                    select_inside)
            scratch_files.append(points_scratch)
        # Constrain to the information raster only if is specified, otherwise do not constrain
        if rasters is None:
            _verbose_print("Exclude from rasters omitted")
            rasters_scratch = points_scratch
        else:
            rasters_scratch = _constrain_from_raster(points_scratch, rasters)
            scratch_files.append(rasters_scratch)
        # Dissolve the polygon into a single object to make the selection
        dissolve_scratch = arcpy.CreateScratchName(
            "temp", workspace=arcpy.env.scratchWorkspace)
        arcpy.Dissolve_management(in_features=rasters_scratch,
                                  out_feature_class=dissolve_scratch,
                                  multi_part="MULTI_PART")
        scratch_files.append(dissolve_scratch)

        # Select the random points
        # TODO: Sometimes, random points fall right in the border of the rasters and therefore they show null information, an erosion needs to be added to avoid this
        result = arcpy.CreateRandomPoints_management(
            out_ws,
            out_f,
            dissolve_scratch,
            number_of_points_or_field=n_points,
            minimum_allowed_distance=min_distance)
        arcpy.DefineProjection_management(
            result,
            arcpy.Describe(constrain_area).spatialReference)
        MESSAGES.AddMessage("Random points saved in {}".format(result))
    except:
        raise
    finally:
        # Delete intermediate files
        for s_file in scratch_files:
            arcpy.Delete_management(s_file)
            _verbose_print("Scratch file deleted: {}".format(s_file))

    return
Exemplo n.º 14
0
    def zonalStatisticsAsTable(self, nullunit=None):
        """
        1.使用 以表格显示分区统计(ZonalStatisticsAsTable)工具将
        栅格属性赋予矢量;
        2.然后连接表与矢量;
        3.导出有值的矢量和空值矢量。
        :return:
        """

        # if nullunit is None:
        #     nullunit = self.unit
        # print nullunit

        #___ZonalStatisticsAsTable___
        # 内部用于连接的唯一标识码
        bsm = "ORIG_FID"
        out_table = "table_" + self.field  # table_有效土层厚
        # arcpy.MakeFeatureLayer_management(nullunit1, nullunit)
        ZonalTable(nullunit, bsm, self.kriging, out_table, "DATA", "MEAN")
        print "Output Table:", out_table

        #___Join Table Into Lyr___
        # f_lyr = "AOIFeatureLayer"
        # arcpy.MakeFeatureLayer_management(nullunit, f_lyr)
        # connect
        # 可能会因为表中没有该字段而报错
        arcpy.JoinField_management(nullunit, bsm, out_table, bsm, "MEAN")
        # 添加字段
        ezarcpy.add_field(nullunit, [self.field], "DOUBLE", delete=1)
        # 将连接过来的字段值赋予我们新建字段
        expression0 = "!{}!".format("MEAN")  # !MEAN!
        arcpy.CalculateField_management(nullunit, self.field, expression0,
                                        "PYTHON_9.3")
        print "Join Table"

        #___Output AOI Layer___
        aoi_lyr = "aoi"
        arcpy.MakeFeatureLayer_management(nullunit, aoi_lyr)
        # 筛选出空值的要素
        expression = "MEAN is NULL"
        arcpy.SelectLayerByAttribute_management(aoi_lyr, "NEW_SELECTION",
                                                expression)
        # 创建空 AOI 和有值的 AOI 的唯一名称
        aoi_false = arcpy.CreateScratchName("aoi_false", "", "featureclass",
                                            arcpy.env.workspace)
        aoi_true = arcpy.CreateScratchName("aoi_true", "", "featureclass",
                                           arcpy.env.workspace)
        # 输出为空值的 AOI
        arcpy.CopyFeatures_management(aoi_lyr, aoi_false)
        print "Output FeatureClass:", aoi_false
        # 反选,输出有值的 AOI
        arcpy.SelectLayerByAttribute_management(aoi_lyr, "SWITCH_SELECTION")
        arcpy.CopyFeatures_management(aoi_lyr, aoi_true)
        print "Output FeatureClass:", aoi_true
        print "\n"

        #___handle lyr filed___
        arcpy.DeleteField_management(aoi_false, ["MEAN", self.field])
        arcpy.DeleteField_management(nullunit, ["MEAN", self.field])

        arcpy.Delete_management(out_table)
        # arcpy.Delete_management(kriging)
        return aoi_true, aoi_false
Exemplo n.º 15
0
def getLargePatchViewGrid(classValuesList, excludedValuesList, inLandCoverGrid,
                          landCoverValues, viewRadius, conValues,
                          minimumPatchSize, timer, saveIntermediates,
                          metricConst):
    # create class (value = 1) / other (value = 0) / excluded grid (value = 0) raster
    # define the reclass values
    classValue = 1
    excludedValue = 0
    otherValue = 0
    newValuesList = [classValue, excludedValue, otherValue]

    # generate a reclass list where each item in the list is a two item list: the original grid value, and the reclass value
    reclassPairs = getInOutOtherReclassPairs(landCoverValues, classValuesList,
                                             excludedValuesList, newValuesList)

    AddMsg((
        "{0} Reclassifying selected land cover class to 1. All other values = 0..."
    ).format(timer.split()))
    reclassGrid = Reclassify(inLandCoverGrid, "VALUE",
                             RemapValue(reclassPairs))

    ##calculate the big patches for LandCover

    AddMsg(("{0} Calculating size of excluded area patches...").format(
        timer.split()))
    regionGrid = RegionGroup(reclassGrid, "EIGHT", "WITHIN", "ADD_LINK")

    AddMsg(
        ("{0} Assigning {1} to patches >= minimum size threshold...").format(
            timer.split(), "1"))
    delimitedCOUNT = arcpy.AddFieldDelimiters(regionGrid, "COUNT")
    whereClause = delimitedCOUNT + " >= " + minimumPatchSize + " AND LINK = 1"
    burnInGrid = Con(regionGrid, classValue, 0, whereClause)

    # save the intermediate raster if save intermediates option has been chosen
    if saveIntermediates:
        namePrefix = metricConst.burnInGridName
        scratchName = arcpy.CreateScratchName(namePrefix, "", "RasterDataset")
        burnInGrid.save(scratchName)
        AddMsg(timer.split() + " Save intermediate grid complete: " +
               os.path.basename(scratchName))

    ##end of calculating the big patches for LandCover

    AddMsg((
        "{0} Performing focal SUM on reclassified raster with big patches using {1} cell radius neighborhood..."
    ).format(timer.split(), viewRadius))
    neighborhood = arcpy.sa.NbrCircle(int(viewRadius), "CELL")
    #focalGrid = arcpy.sa.FocalStatistics(reclassGrid == classValue, neighborhood, "SUM")
    focalGrid = arcpy.sa.FocalStatistics(burnInGrid == classValue,
                                         neighborhood, "SUM")

    AddMsg((
        "{0} Reclassifying focal SUM results into view = 1 and no-view = 0 binary raster..."
    ).format(timer.split()))
    #    delimitedVALUE = arcpy.AddFieldDelimiters(focalGrid,"VALUE")
    #    whereClause = delimitedVALUE+" = 0"
    #    viewGrid = Con(focalGrid, 1, 0, whereClause)
    whereValue = conValues[0]
    trueValue = conValues[1]
    viewGrid = Con(Raster(focalGrid) > whereValue, trueValue)
    return viewGrid
Exemplo n.º 16
0
        # ---------------------------------------------------------------- Temporary Datasets
        #linesNear = watershedGDB_path + os.sep + "linesNear"
        #pointsNear = watershedGDB_path + os.sep + "pointsNear"
        pointsLyr = "pointsLyr"
        stationTemp = watershedFD_path + os.sep + "stations"
        stationTable = watershedGDB_path + os.sep + "stationTable"
        routes = watershedFD_path + os.sep + "routes"
        stationEvents = watershedGDB_path + os.sep + "stationEvents"
        station_lyr = "stations"
        stationLyr = "stationLyr"
        stationBuffer = watershedFD_path + os.sep + "stationsBuffer"
        stationElev = watershedGDB_path + os.sep + "stationElev"
        outlets = watershedFD_path + os.sep + "tileOutlets"

        # -------------------------------------------------------------- Create Temp Point(s)
        pointsTemp = arcpy.CreateScratchName("pointsTemp",data_type="FeatureClass",workspace="in_memory")
        arcpy.CopyFeatures_management(inPoints, pointsTemp)

        AddMsgAndPrint("\nChecking inputs...")

        # Exit if no TileLines
        if not arcpy.Exists(tileLines):
            if arcpy.Exists("TileLines"):
                tileLines = "TileLines"
            else:
                AddMsgAndPrint("\tTile Lines Feature Class not found in same directory as Station Points ",2)
                AddMsgAndPrint("\tor in Current ArcMap Document. Unable to compute Stationing.",2)
                AddMsgAndPrint("\tCheck the source of your inputs and try again. Exiting...",2)
                exit()

        # Exit if no Project DEM
Exemplo n.º 17
0
    cost_rast = arcpy.GetParameterAsText(1)
    target_fld = arcpy.GetParameterAsText(2)
    workspace = arcpy.GetParameterAsText(3)

    if len(target_fld) == 0 or target_fld == "#":
        target_fld = "New_WP"

    arcpy.env.overwriteOutput = True

    arcpy.env.workspace = workspace
    if (arcpy.env.workspace is None):
        arcpy.env.workspace = os.getcwd()

    #arcpy.env.extent = "MINOF"
    arcpy.env.snapRaster = cost_rast
    scratch = arcpy.CreateScratchName('xx', '.shp')
    arcpy.Buffer_analysis(in_file, scratch, "2000 meters")
    desc = arcpy.Describe(scratch)
    arcpy.env.extent = desc.extent
    arcmgt.Delete(scratch)
    print "Extent is %s" % arcpy.env.extent

    add_msg_and_print('Currently in directory: %s\n' % os.getcwd())
    add_msg_and_print('Workspace is: %s' % arcpy.env.workspace)
    #add_msg_and_print ('Scratch table is: %s' % out_table)

    table_view = "table_view"
    arcmgt.MakeTableView(in_file, table_view)

    fields = arcpy.ListFields(in_file)
if i > 1:
    addMsgAndPrint('OOPS! More than one arc in ' + xsLine)
    sys.exit()
elif i == 0:
    addMsgAndPrint('OOPS! Mo arcs in ' + xsLine)
    sys.exit()

## make output fds if it doesn't exist
#  set output fds spatial reference to input fds spatial reference
if not arcpy.Exists(outFds):
    addMsgAndPrint('  Making feature data set ' + shortName(outFds))
    arcpy.CreateFeatureDataset_management(gdb, shortName(outFds), inFds)

addMsgAndPrint('  Prepping section line')
## make copy of section line
tempXsLine = arcpy.CreateScratchName('xx', outFdsTag + "xsLine",
                                     'FeatureClass', scratch)
addMsgAndPrint('    copying ' + shortName(xsLine) + ' to xxxXsLine')
#addMsgAndPrint(xsLine+' '+scratch)
arcpy.FeatureClassToFeatureClass_conversion(xsLine, scratch,
                                            shortName(tempXsLine))

desc = arcpy.Describe(tempXsLine)
xslfields = fieldNameList(tempXsLine)
idField = ''
for fld in xslfields:
    if fld.find('_ID') > 0:
        idField = fld
if idField == '':
    idField = 'ORIG_ID'
    arcpy.AddField_management(tempXsLine, idField, 'TEXT')
    arcpy.CalculateField_management(tempXsLine, idField, '01', 'PYTHON')
Exemplo n.º 19
0
            objectIDfld = "!" + arcpy.da.Describe(
                ReferenceLine)['OIDFieldName'] + "!"
            arcpy.CalculateField_management(ReferenceLine, "Subbasin",
                                            objectIDfld, "PYTHON3")

            arcpy.SetProgressorLabel("Updating LengthFt Field")
            arcpy.CalculateField_management(ReferenceLine, "LengthFt",
                                            "!shape.length@feet!", "PYTHON3")

            # Buffer outlet features by  raster cell size
            bufferDist = "" + str(
                demCellSize * 2) + " " + str(linearUnits) + ""
            arcpy.SetProgressorLabel("Buffering ReferenceLine by " +
                                     str(bufferDist) + " " + linearUnits)
            outletBuffer = arcpy.CreateScratchName("outletBuffer",
                                                   data_type="FeatureClass",
                                                   workspace="in_memory")
            arcpy.Buffer_analysis(ReferenceLine, outletBuffer, bufferDist,
                                  "FULL", "ROUND", "LIST", "Subbasin")

            # Get Reference Line Elevation Properties (Uses ProjectDEM, which is vertical feet by 1/10ths)
            arcpy.SetProgressorLabel("Calculating Reference Line Attributes")
            AddMsgAndPrint("\nCalculating Reference Line Attributes", 0)

            outletStats = arcpy.CreateTable_management("in_memory",
                                                       "outletStats")
            ZonalStatisticsAsTable(outletBuffer, "Subbasin", ProjectDEM,
                                   outletStats, "DATA")

            arcpy.CopyRows_management(storageTemplate, storageTable)
Exemplo n.º 20
0
    def create_OD_line(self, dis_feild):
        #dis_feild="DIS"
        env.workspace = self.env
        pointFeature = self.point_object
        outFeature = "temp_feature"
        field_id = dis_feild
        with arcpy.da.SearchCursor(pointFeature,
                                   [field_id, "SHAPE@XY"]) as curse:
            xyList = []
            Id_list = []
            pn = arcpy.Point()
            for row in curse:
                Id_list.append(row[0])
                pn = row[1]
                xyList.append([pn[0], pn[1]])
            del row, curse
        featureList = []
        field_list = []
        temp_point = arcpy.Point()
        for pn in xyList:
            for pn1 in xyList:
                if (pn != pn1):
                    temp_point.X = pn[0]
                    temp_point.Y = pn[1]
                    array = arcpy.Array()
                    array.add(temp_point)
                    temp_point.X = pn1[0]
                    temp_point.Y = pn1[1]
                    array.add(temp_point)
                    polyLine = arcpy.Polyline(array)
                    featureList.append(polyLine)
                    field_list.append([
                        Id_list[xyList.index(pn)], Id_list[xyList.index(pn1)]
                    ])

        ############################################################################

        temp_feature = arcpy.CreateScratchName('temp',
                                               data_type='FeatureClass ')
        arcpy.CopyFeatures_management(featureList, temp_feature)
        arcpy.MakeFeatureLayer_management(temp_feature, 'lyr')
        arcpy.AddField_management('lyr', field_name='p1', field_type='SHORT')
        arcpy.AddField_management('lyr', field_name='p2', field_type='SHORT')
        arcpy.AddField_management('lyr',
                                  field_name='flux',
                                  field_type='DOUBLE')
        if arcpy.Exists(outFeature):
            arcpy.Delete_management(outFeature)
        arcpy.CopyFeatures_management('lyr', outFeature)
        arcpy.Delete_management(temp_feature)

        ##########################################################################

        ####
        with arcpy.da.UpdateCursor(outFeature, ['p1', 'p2']) as curse:
            i = 0
            for row in curse:
                row[0] = field_list[i][0]
                row[1] = field_list[i][1]
                curse.updateRow(row)
                i += 1
            del curse, row
        ####
        print(0)
Exemplo n.º 21
0
def execute(self, parameters, messages):
   
   #########################################################################
   # Step 10
   # Abend if edits are pending
   #########################################################################
   if util.sniff_editing_state():
      raise arcpy.ExecuteError("Error.  Pending edits must be saved or cleared before proceeding.");
      
   #########################################################################
   # Step 20 
   # Read the parameters
   #########################################################################
   scenarioid   = util.clean_id(parameters[2].valueAsText);
   waste_type   = parameters[3].valueAsText;
   waste_medium = parameters[4].valueAsText;
   waste_unit   = parameters[5].valueAsText;
   waste_amount = parameters[6].value;
   
   #########################################################################
   # Step 30 
   # Initialize the haz toc object
   #########################################################################
   haz = obj_AllHazardsWasteLogisticsTool.AllHazardsWasteLogisticsTool();
   
   #########################################################################
   # Step 40 
   # Check if the Incident Area has content
   #########################################################################
   if haz.incident_area.recordCount() == 0:
      raise arcpy.ExecuteError("Error.  Incident Area Feature Class is empty.");
      
   #########################################################################
   # Step 50 
   # Load the incidents as requested
   #########################################################################
   incident_temp = arcpy.CreateScratchName(
       "Incident_Centroid"
      ,""
      ,"FeatureClass"
      ,arcpy.env.scratchGDB
   );
   
   util.polygons_to_points(
       in_features       = haz.incident_area.dataSource
      ,out_feature_class = incident_temp
   );
   
   str_fm = "Name Name #;"              \
          + "CurbApproach # 0;"         \
          + "Attr_Minutes # 0;"         \
          + "Attr_TravelTime # 0;"      \
          + "Attr_Miles # 0;"           \
          + "Attr_Kilometers # 0;"      \
          + "Attr_TimeAt1KPH # 0;"      \
          + "Attr_WalkTime # 0;"        \
          + "Attr_TruckMinutes # 0;"    \
          + "Attr_TruckTravelTime # 0;" \
          + "Cutoff_Minutes # #;"       \
          + "Cutoff_TravelTime # #;"    \
          + "Cutoff_Miles # #;"         \
          + "Cutoff_Kilometers # #;"    \
          + "Cutoff_TimeAt1KPH # #;"    \
          + "Cutoff_WalkTime # #;"      \
          + "Cutoff_TruckMinutes # #;"  \
          + "Cutoff_TruckTravelTime # #";               
   
   arcpy.na.AddLocations(
       in_network_analysis_layer      = haz.network.lyr()
      ,sub_layer                      = haz.network.incidents.name
      ,in_table                       = incident_temp
      ,field_mappings                 = str_fm
      ,search_tolerance               = None
      ,sort_field                     = None
      ,search_criteria                = None
      ,match_type                     = None
      ,append                         = False
      ,snap_to_position_along_network = None
      ,snap_offset                    = None
      ,exclude_restricted_elements    = None
      ,search_query                   = None
   );
   
   arcpy.AddMessage("Network Incidents Layer loaded.");
      
   #########################################################################
   # Step 60 
   # Persist the waste amounts
   #########################################################################
   haz.scenario.upsertScenarioID(
       scenarioid   = scenarioid
      ,waste_type   = waste_type
      ,waste_medium = waste_medium
      ,waste_amount = waste_amount
      ,waste_unit   = waste_unit
   );
   haz.system_cache.set_current_scenarioid(scenarioid);
  
   del haz;
   
   return;
      
Exemplo n.º 22
0
    y = float(inPoint.Y) - float(centrePoint.Y)
    xr = (x * math.cos(angle)) - (y * math.sin(angle)) + float(centrePoint.X)
    yr = (x * math.sin(angle)) + (y * math.cos(angle)) + float(centrePoint.Y)
    return arcpy.Point(xr, yr)


# Rotate a (single part) polygon about a point
def RotatePolygon(inPoly, centrePoint, angle=0):
    arr = arcpy.Array()
    for pt in inPoly.getPart(0):
        arr.add(RotatePoint(pt, centrePoint, angle))
    return arcpy.Polygon(arr)


# Process the data
scratchName = arcpy.CreateScratchName("MBG", "ByWidth", "FeatureClass")
arcpy.MinimumBoundingGeometry_management(in_features, scratchName,
                                         "RECTANGLE_BY_WIDTH", group_option,
                                         group_fields, "MBG_FIELDS")
arcpy.DeleteField_management(scratchName,
                             ["MBG_Width", "MBG_Length", "ORIG_FID"])
arcpy.CreateFeatureclass_management(os.path.dirname(out_features),\
                                    os.path.basename(out_features),"POLYGON",\
                                    template=scratchName,\
                                    spatial_reference=sR)
cur = arcpy.da.InsertCursor(out_features, ["SHAPE@", "MBG_Orientation"])
if labels == "LABELS":
    arcpy.CreateFeatureclass_management(os.path.dirname(out_features),\
                                        os.path.basename(out_features)+"_label","POINT",\
                                        spatial_reference=sR)
for row in arcpy.da.SearchCursor(scratchName,
Exemplo n.º 23
0
        addMsgAndPrint('  deleted feature class {}'.format(xx))

## MAP OUTLINE
# make XY file for map outline
addMsgAndPrint('  writing map outline file')
genf = open(os.path.join(scratch, 'xxxbox.csv'), 'w')
genf.write('LONGITUDE,LATITUDE\n')
genf.write('{},{}\n'.format(str(minLong), str(maxLat)))
genf.write('{},{}\n'.format(str(maxLong), str(maxLat)))
genf.write('{},{}\n'.format(str(maxLong), str(minLat)))
genf.write('{},{}\n'.format(str(minLong), str(minLat)))
genf.write('{},{}\n'.format(str(minLong), str(maxLat)))
genf.close()

# convert XY file to .dbf table
boxdbf = arcpy.CreateScratchName('xxx', '.dbf', '', scratch)
boxdbf = os.path.basename(boxdbf)
arcpy.TableToTable_conversion(os.path.join(scratch, 'xxxbox.csv'), scratch,
                              boxdbf)

# make XY event layer from .dbf table
arcpy.MakeXYEventLayer_management(os.path.join(scratch, boxdbf), 'LONGITUDE',
                                  'LATITUDE', 'boxlayer', xycs)

# convert event layer to preliminary line feature class with PointsToLine_management
arcpy.PointsToLine_management('boxlayer', 'xxMapOutline')

# densify MapOutline
arcpy.Densify_edit('xxMapOutline', 'DISTANCE', 0.0001)

# project to correct spatial reference
Exemplo n.º 24
0
class ReportWriter():
    "Writes the report files"

    def __init__(self, reportFormat, reportName=''):
        supportedFormats = ["CSV", "SHP", "KMZ"]
        if reportFormat not in supportedFormats:
            raise Exception("Report format not supported: " + reportFormat)
        self.reportFormat = reportFormat

        if ('' != reportName):
            self.overWriteOutput = True
            self.reportName = reportName
        else:
            self.overWriteOutput = False
            self.reportName = 'report'

    def write(self, fObject, doZip=False):

        folder = arcpy.env.scratchFolder
        name = self.reportName

        if "CSV" == self.reportFormat:
            fname = self._writeCSV(fObject, name, folder, doZip)
        elif "SHP" == self.reportFormat:
            fname = self._writeSHP(fObject, name, folder)
        elif "KMZ" == self.reportFormat:
            fname = self._writeKMZ(fObject, name, folder)
        else:
            raise Exception("Report format not implemented")

        return fname

    def _writeCSV(self, fObject, name, folder, doZip=False):

        if self.overWriteOutput:
            filePath = os.path.join(folder, name + '.csv')
            if arcpy.Exists(filePath): arcpy.Delete_management(filePath)
        else:
            filePath = arcpy.CreateScratchName(name, ".csv", "", folder)
        print filePath
        try:
            if isinstance(fObject, FieldObject):
                with open(filePath, 'wb') as csvfile:
                    fieldwriter = csv.writer(csvfile, delimiter=',', \
                                             quoting=csv.QUOTE_MINIMAL)

                    fieldwriter.writerow(fObject.getLabelsList())
                    for i in range(fObject.getNumberOfFeatures()):
                        featureList = fObject.getFeatureList(i, doFormat=True)
                        fieldwriter.writerow(featureList)
            else:
                fs = fObject.getFeatureSet()
                rows = arcpy.SearchCursor(fs)
                fieldnames = [f.name for f in arcpy.ListFields(fs)]

                allRows = []
                for row in rows:
                    rowlist = []
                    for field in fieldnames:
                        rowlist.append(row.getValue(field))
                    allRows.append(rowlist)

                with open(filePath, 'wb') as csvfile:
                    fieldwriter = csv.writer(csvfile, delimiter=',', \
                                             quoting=csv.QUOTE_MINIMAL)
                    fieldwriter.writerow(fieldnames)
                    for row in allRows:
                        fieldwriter.writerow(row)
        except Exception, e:
            raise e
            #raise Exception("Could not write to CSV file")

        if doZip:
            try:
                if self.overWriteOutput:
                    zipPath = os.path.join(folder, name + '.zip')
                    if arcpy.Exists(zipPath): arcpy.Delete_management(zipPath)
                else:
                    zipPath = arcpy.CreateScratchName(name, ".zip", "", folder)

                z = Zipper()
                z.zipFiles([filePath], zipPath)
                filePath = zipPath
            except:
                raise Exception("Could not zip CSV file")

        return filePath
Exemplo n.º 25
0
        # Populate Subbasin Field and Calculate embankment length
        arcpy.SetProgressorLabel("Updating Subbasin Field")
        objectIDfld = "!" + arcpy.da.Describe(outletFC)['OIDFieldName'] + "!"
        arcpy.CalculateField_management(outletFC, "Subbasin", objectIDfld,
                                        "PYTHON3")

        arcpy.SetProgressorLabel("Updating LengthFt Field")
        arcpy.CalculateField_management(outletFC, "LengthFt",
                                        "!shape.length@feet!", "PYTHON3")

        # Buffer outlet features by  raster cell size
        bufferDist = "" + str(demCellSize * 2) + " " + str(linearUnits) + ""
        arcpy.SetProgressorLabel("Buffering ReferenceLine by " +
                                 str(bufferDist) + " " + linearUnits)
        outletBuffer = arcpy.CreateScratchName("outletBuffer",
                                               data_type="FeatureClass",
                                               workspace="in_memory")
        arcpy.Buffer_analysis(outletFC, outletBuffer, bufferDist, "FULL",
                              "ROUND", "LIST", "Subbasin")

        # Get Reference Line Elevation Properties (Uses ProjectDEM, which is vertical feet by 1/10ths)
        arcpy.SetProgressorLabel("Calculating Reference Line Attributes")
        AddMsgAndPrint("\nCalculating Reference Line Attributes", 0)
        ZonalStatisticsAsTable(outletBuffer, "Subbasin", ProjectDEM,
                               outletStats, "DATA")

        # Update the outlet FC with the zonal stats
        with arcpy.da.UpdateCursor(
                outletFC,
            ['Subbasin', 'MinElev', 'MaxElev', 'MeanElev']) as cursor:
            for row in cursor:
Exemplo n.º 26
0
def getEdgeCoreGrid(m, lccObj, lccClassesDict, inLandCoverGrid,
                    PatchEdgeWidth_str, processingCellSize_str, timer,
                    shortName, scratchNameReference):
    # Get the lccObj values dictionary to determine if a grid code is to be included in the effective reporting unit area calculation
    lccValuesDict = lccObj.values
    #landCoverValues = raster.getRasterValues(inLandCoverGrid)
    landCoverValues = getRasterValues(inLandCoverGrid)

    # get the grid codes for this specified metric
    ClassValuesList = lccClassesDict[m].uniqueValueIds.intersection(
        landCoverValues)

    # get the frozenset of excluded values (i.e., values not to use when calculating the reporting unit effective area)
    ExcludedValueList = lccValuesDict.getExcludedValueIds().intersection(
        landCoverValues)

    # create grid where cover type of interest (e.g., forest) is coded 3, excluded values are coded 1, everything else is coded 2
    reclassPairs = []
    for val in landCoverValues:
        oldValNewVal = []
        oldValNewVal.append(val)
        if val in ClassValuesList:
            oldValNewVal.append(3)
            reclassPairs.append(oldValNewVal)
        elif val in ExcludedValueList:
            oldValNewVal.append(1)
            reclassPairs.append(oldValNewVal)
        else:
            oldValNewVal.append(2)
            reclassPairs.append(oldValNewVal)

    AddMsg(
        timer.split() +
        " Step 1 of 4: Reclassing land cover grid to Class = 3, Other = 2, and Excluded = 1..."
    )
    reclassGrid = Reclassify(inLandCoverGrid, "VALUE",
                             RemapValue(reclassPairs))

    AddMsg(timer.split() + " Step 2 of 4: Setting Class areas to Null...")
    delimitedVALUE = arcpy.AddFieldDelimiters(reclassGrid, "VALUE")
    otherGrid = SetNull(reclassGrid, 1, delimitedVALUE + " = 3")

    AddMsg(timer.split() + " Step 3 of 4: Finding distance from Other...")
    distGrid = EucDistance(otherGrid)

    AddMsg(timer.split() +
           " Step 4 of 4: Delimiting Class areas to Edge = 3 and Core = 4...")
    edgeDist = round(float(PatchEdgeWidth_str) * float(processingCellSize_str))

    zonesGrid = Con((distGrid >= edgeDist) & reclassGrid, 4, reclassGrid)

    # it appears that ArcGIS cannot process the BuildRasterAttributeTable request without first saving the raster.
    # This step wasn't the case earlier. Either ESRI changed things, or I altered something in ATtILA that unwittingly caused this. -DE
    namePrefix = shortName + "_" + "Raster" + m + PatchEdgeWidth_str
    scratchName = arcpy.CreateScratchName(namePrefix, "", "RasterDataset")
    scratchNameReference[0] = scratchName
    zonesGrid.save(scratchName)

    arcpy.BuildRasterAttributeTable_management(zonesGrid, "Overwrite")

    arcpy.AddField_management(zonesGrid, "CATEGORY", "TEXT", "#", "#", "10")
    updateCoreEdgeCategoryLabels(zonesGrid)

    return zonesGrid
Exemplo n.º 27
0
def RotateFeatureClass(inputFC, outputFC,
                       angle=0, pivot_point=None):
    """Rotate Feature Class

    inputFC     Input features
    outputFC    Output feature class
    angle       Angle to rotate, in degrees
    pivot_point X,Y coordinates (as space-separated string)
                Default is lower-left of inputFC

    As the output feature class no longer has a "real" xy locations,
    after rotation, it no coordinate system defined.
    """

    def RotateXY(x, y, xc=0, yc=0, angle=0, units="DEGREES"):
        """Rotate an xy cooordinate about a specified origin

        x,y      xy coordinates
        xc,yc   center of rotation
        angle   angle
        units    "DEGREES" (default) or "RADIANS"
        """
        import math
        x = x - xc
        y = y - yc
        # make angle clockwise (like Rotate_management)
        angle = angle * -1
        if units == "DEGREES":
            angle = math.radians(angle)
        xr = (x * math.cos(angle)) - (y * math.sin(angle)) + xc
        yr = (x * math.sin(angle)) + (y * math.cos(angle)) + yc
        return xr, yr

    # temp names for cleanup
    env_file = None
    lyrFC, lyrTmp, lyrOut   = [None] * 3  # layers
    tmpFC  = None # temp dataset
    Row, Rows, oRow, oRows = [None] * 4 # cursors

    try:
        # process parameters
        try:
            xcen, ycen = [float(xy) for xy in pivot_point.split()]
            pivot_point = xcen, ycen
        except:
            # if pivot point was not specified, get it from
            # the lower-left corner of the feature class
            ext = arcpy.Describe(inputFC).extent
            xcen, ycen  = ext.XMin, ext.YMin
            pivot_point = xcen, ycen

        angle = float(angle)

        # set up environment
        env_file = arcpy.CreateScratchName("xxenv",".xml","file",
                                           os.environ["TEMP"])
        arcpy.SaveSettings(env_file)

        # Disable any GP environment clips or project on the fly
        arcpy.ClearEnvironment("extent")
        arcpy.ClearEnvironment("outputCoordinateSystem")
        WKS = env.workspace
        if not WKS:
            if os.path.dirname(outputFC):
                WKS = os.path.dirname(outputFC)
            else:
                WKS = os.path.dirname(
                    arcpy.Describe(inputFC).catalogPath)
        env.workspace = env.scratchWorkspace = WKS

        # Disable GP environment clips or project on the fly
        arcpy.ClearEnvironment("extent")
        arcpy.ClearEnvironment("outputCoordinateSystem")

        # get feature class properties
        lyrFC = "lyrFC"
        arcpy.MakeFeatureLayer_management(inputFC, lyrFC)
        dFC = arcpy.Describe(lyrFC)
        shpField = dFC.shapeFieldName
        shpType = dFC.shapeType
        FID = dFC.OIDFieldName

        # create temp feature class
        tmpFC = arcpy.CreateScratchName("xxfc","","featureclass")
        arcpy.CreateFeatureclass_management(os.path.dirname(tmpFC),
                                            os.path.basename(tmpFC),
                                            shpType)
        lyrTmp = "lyrTmp"
        arcpy.MakeFeatureLayer_management(tmpFC, lyrTmp)

        # set up id field (used to join later)
        TFID = "XXXX_FID"
        arcpy.AddField_management(lyrTmp, TFID, "LONG")
        arcpy.DeleteField_management(lyrTmp, "ID")

        # rotate the feature class coordinates
        # only points, polylines, and polygons are supported

        # open read and write cursors
        Rows = arcpy.SearchCursor(lyrFC, "", "",
                                  "%s;%s" % (shpField,FID))
        oRows = arcpy.InsertCursor(lyrTmp)
        arcpy.AddMessage("Opened search cursor")
        if shpType  == "Point":
            for Row in Rows:
                shp = Row.getValue(shpField)
                pnt = shp.getPart()
                pnt.X, pnt.Y = RotateXY(pnt.X,pnt.Y,xcen,ycen,angle)
                oRow = oRows.newRow()
                oRow.setValue(shpField, pnt)
                oRow.setValue(TFID,Row.getValue(FID))
                oRows.insertRow(oRow)
        elif shpType in ["Polyline","Polygon"]:
            parts = arcpy.Array()
            rings = arcpy.Array()
            ring = arcpy.Array()
            for Row in Rows:
                shp = Row.getValue(shpField)
                p = 0
                for part in shp:
                    for pnt in part:
                        if pnt:
                            x, y = RotateXY(pnt.X, pnt.Y, xcen, ycen, angle)
                            ring.add(arcpy.Point(x, y, pnt.ID))
                        else:
                            # if we have a ring, save it
                            if len(ring) > 0:
                                rings.add(ring)
                                ring.removeAll()
                    # we have our last ring, add it
                    rings.add(ring)
                    ring.removeAll()
                    # if only one, remove nesting
                    if len(rings) == 1: rings = rings.getObject(0)
                    parts.add(rings)
                    rings.removeAll()
                    p += 1

                # if only one, remove nesting
                if len(parts) == 1: parts = parts.getObject(0)
                if dFC.shapeType == "Polyline":
                    shp = arcpy.Polyline(parts)
                else:
                    shp = arcpy.Polygon(parts)
                parts.removeAll()
                oRow = oRows.newRow()
                oRow.setValue(shpField, shp)
                oRow.setValue(TFID,Row.getValue(FID))
                oRows.insertRow(oRow)
        else:
            #raise Exception, "Shape type {0} is not supported".format(shpType) #UPDATE
            raise Exception("Shape type {0} is not supported".format(shpType))

        del oRow, oRows # close write cursor (ensure buffer written)
        oRow, oRows = None, None # restore variables for cleanup

        # join attributes, and copy to output
        arcpy.AddJoin_management(lyrTmp, TFID, lyrFC, FID)
        env.qualifiedFieldNames = False
        arcpy.Merge_management(lyrTmp, outputFC)
        lyrOut = "lyrOut"
        arcpy.MakeFeatureLayer_management(outputFC, lyrOut)
        # drop temp fields 2,3 (TFID, FID)
        fnames = [f.name for f in arcpy.ListFields(lyrOut)]
        dropList = ";".join(fnames[2:4])
        arcpy.DeleteField_management(lyrOut, dropList)

    #except MsgError, xmsg: #UPDATE
    except MsgError as xmsg:
        arcpy.AddError(str(xmsg))
    except arcpy.ExecuteError:
        tbinfo = traceback.format_tb(sys.exc_info()[2])[0]
        arcpy.AddError(tbinfo.strip())
        arcpy.AddError(arcpy.GetMessages())
        numMsg = arcpy.GetMessageCount()
        for i in range(0, numMsg):
            arcpy.AddReturnMessage(i)
    #except Exception, xmsg: #UPDATE
    except Exception as xmsg:
        tbinfo = traceback.format_tb(sys.exc_info()[2])[0]
        arcpy.AddError(tbinfo + str(xmsg))
    finally:
        # reset environment
        if env_file: arcpy.LoadSettings(env_file)
        # Clean up temp files
        for f in [lyrFC, lyrTmp, lyrOut, tmpFC, env_file]:
            try:
                if f: arcpy.Delete_management(f)
            except:
                pass
        # delete cursors
        try:
            for c in [Row, Rows, oRow, oRows]: del c
        except:
            pass

        # return pivot point
        try:
            pivot_point = "{0} {1}".format(*pivot_point)
        except:
            pivot_point = None

        return pivot_point
Exemplo n.º 28
0
def createPatchRaster(m, lccObj, lccClassesDict, inLandCoverGrid, metricConst,
                      maxSeparation, minPatchSize, processingCellSize_str,
                      timer, scratchNameReference):
    # create a list of all the grid values in the selected landcover grid
    #landCoverValues = raster.getRasterValues(inLandCoverGrid)
    landCoverValues = getRasterValues(inLandCoverGrid)

    # for the selected land cover class, get the class codes found in the input landcover grid
    lccValuesDict = lccObj.values
    classValuesList = lccClassesDict[m].uniqueValueIds.intersection(
        landCoverValues)

    # get the frozenset of excluded values (i.e., values not to use when calculating the reporting unit effective area)
    excludedValuesList = lccValuesDict.getExcludedValueIds().intersection(
        landCoverValues)

    # create class (value = 3) / other (value = 0) / excluded grid (value = -9999) raster
    # define the reclass values
    classValue = metricConst.classValue
    excludedValue = metricConst.excludedValue
    otherValue = metricConst.otherValue
    newValuesList = [classValue, excludedValue, otherValue]

    # generate a reclass list where each item in the list is a two item list: the original grid value, and the reclass value
    reclassPairs = getInOutOtherReclassPairs(landCoverValues, classValuesList,
                                             excludedValuesList, newValuesList)

    AddMsg(timer.split() + " Reclassing land cover to Class:" + m + " = " +
           str(classValue) + ", Other = " + str(otherValue) +
           ", and Excluded = " + str(excludedValue) + "...")
    reclassGrid = Reclassify(inLandCoverGrid, "VALUE",
                             RemapValue(reclassPairs))

    # create patch raster where:
    #    clusters of cells within the input threshold distance are considered a single patch
    #    and patches below the input minimum size have been discarded

    # Ensure all parameter inputs are the appropriate number type
    intMaxSeparation = int(maxSeparation)
    intMinPatchSize = int(minPatchSize)

    # Check if Maximum Separation > 0 if it is then skip to regions group analysis otherwise run Euclidean distance
    if intMaxSeparation == 0:
        AddMsg(
            timer.split() +
            " Assigning unique numbers to each unconnected cluster of Class:" +
            m + "...")
        regionOther = RegionGroup(reclassGrid == classValue, "EIGHT", "CROSS",
                                  "ADD_LINK", "0")
    else:
        AddMsg(timer.split() + " Connecting clusters of Class:" + m +
               " within maximum separation distance...")
        fltProcessingCellSize = float(processingCellSize_str)
        maxSep = intMaxSeparation * float(processingCellSize_str)
        delimitedVALUE = arcpy.AddFieldDelimiters(reclassGrid, "VALUE")
        whereClause = delimitedVALUE + " < " + str(classValue)
        classRaster = SetNull(reclassGrid, 1, whereClause)
        eucDistanceRaster = EucDistance(classRaster, maxSep,
                                        fltProcessingCellSize)

        # Run Region Group analysis on UserEuclidPlus, ignores 0/NoData values
        AddMsg(
            timer.split() +
            " Assigning unique numbers to each unconnected cluster of Class:" +
            m + "...")
        UserEuclidRegionGroup = RegionGroup(eucDistanceRaster >= 0, "EIGHT",
                                            "CROSS", "ADD_LINK", "0")

        # Maintain the original boundaries of each patch
        regionOther = Con(reclassGrid == classValue, UserEuclidRegionGroup,
                          reclassGrid)

    if intMinPatchSize > 1:
        AddMsg(timer.split() +
               " Eliminating clusters below minimum patch size...")
        delimitedCOUNT = arcpy.AddFieldDelimiters(regionOther, "COUNT")
        whereClause = delimitedCOUNT + " < " + str(intMinPatchSize)
        regionOtherFinal = Con(regionOther, otherValue, regionOther,
                               whereClause)
    else:
        regionOtherFinal = regionOther

    # add the excluded class areas back to the raster if present
    if excludedValuesList:
        regionOtherExcluded = Con(reclassGrid == excludedValue, reclassGrid,
                                  regionOtherFinal)
    else:
        regionOtherExcluded = regionOtherFinal

    # The Patch Metrics tool appears to have trouble calculating its metrics when the raster area is large and the
    # regionOtherExcluded grid is treated as a raster object in memory and not saved as a raster on disk
    namePrefix = metricConst.shortName + "_" + m + "_PatchRast"
    scratchName = arcpy.CreateScratchName(namePrefix, "", "RasterDataset")
    regionOtherExcluded.save(scratchName)
    desc = arcpy.Describe(regionOtherExcluded)
    scratchNameReference[0] = desc.catalogPath

    return regionOtherExcluded
Exemplo n.º 29
0
import arcpy
from arcpy import env  

env.workspace = r"C:/users/wfcla/Desktop/Classification_Automation/"  
env.overwriteOutput = True  
  
multipatch = arcpy.GetParameterAsText(0)

buildings = arcpy.CreateScratchName("building_footprints",
                                       data_type="Shapefile",
                                       workspace=arcpy.env.scratchFolder)

slr1 = arcpy.GetParameterAsText(1)
slr2 = arcpy.GetParameterAsText(2)
slr3 = arcpy.GetParameterAsText(3)
slr4 = arcpy.GetParameterAsText(4)
slr5 = arcpy.GetParameterAsText(5)

cat1 = arcpy.GetParameterAsText(6)
cat2 = arcpy.GetParameterAsText(7)
cat3 = arcpy.GetParameterAsText(8)
cat4 = arcpy.GetParameterAsText(9)
cat5 = arcpy.GetParameterAsText(10)

slr_temp1 = arcpy.CreateScratchName("temp1",
                                       data_type="Shapefile",
                                       workspace=arcpy.env.scratchFolder)
slr_temp2 = arcpy.CreateScratchName("temp2",
                                       data_type="Shapefile",
                                       workspace=arcpy.env.scratchFolder)
def tableToPoint(inputTable, inputCoordinateFormat, inputXField, inputYField,
                 outputPointFeatures, inputSpatialReference):
    '''
    Converts table of coordinate formats to point features.
    
    inputTable - input table, each row will be a separate line feature in output
    inputCoordinateFormat - coordinate notation format of input vertices
    inputXField - field in inputTable for vertex x-coordinate, or full coordinate
    inputYField - field in inputTable for vertex y-coordinate, or None
    outputPointFeatures - output point features to create
    inputSpatialReference - spatial reference of input coordinates
    
    returns point feature class
    
    inputCoordinateFormat must be one of the following:
    * DD_1: Both longitude and latitude values are in a single field. Two values are separated by a space, a comma, or a slash.
    * DD_2: Longitude and latitude values are in two separate fields.
    * DDM_1: Both longitude and latitude values are in a single field. Two values are separated by a space, a comma, or a slash.
    * DDM_2: Longitude and latitude values are in two separate fields.
    * DMS_1: Both longitude and latitude values are in a single field. Two values are separated by a space, a comma, or a slash.
    * DMS_2: Longitude and latitude values are in two separate fields.
    * GARS: Global Area Reference System. Based on latitude and longitude, it divides and subdivides the world into cells.
    * GEOREF: World Geographic Reference System. A grid-based system that divides the world into 15-degree quadrangles and then subdivides into smaller quadrangles.
    * UTM_ZONES: The letter N or S after the UTM zone number designates only North or South hemisphere.
    * UTM_BANDS: The letter after the UTM zone number designates one of the 20 latitude bands. N or S does not designate a hemisphere.
    * USNG: United States National Grid. Almost exactly the same as MGRS but uses North American Datum 1983 (NAD83) as its datum.
    * MGRS: Military Grid Reference System. Follows the UTM coordinates and divides the world into 6-degree longitude and 20 latitude bands, but MGRS then further subdivides the grid zones into smaller 100,000-meter grids. These 100,000-meter grids are then divided into 10,000-meter, 1,000-meter, 100-meter, 10-meter, and 1-meter grids.

    '''
    try:
        env.overwriteOutput = True

        deleteme = []
        scratch = '%scratchGDB%'
        if env.scratchWorkspace:
            scratch = env.scratchWorkspace

        inputSpatialReference = _checkSpatialRef(inputSpatialReference)
        if (inputCoordinateFormat == 'DD_2') and (inputSpatialReference is not None) and \
            (inputSpatialReference != arcpy.SpatialReference(4326)):
            # default is GCS_WGS_1984 - if the SR is different, create feature class first using XYTableToPoint/MakeXYEventLayer

            # make scratch name for temp FC
            scratch_name = arcpy.CreateScratchName("temp",
                                                   data_type="Featureclass",
                                                   workspace=scratch)

            layername = os.path.basename(scratch_name) + "-layer"
            tempLayerOut = arcpy.management.MakeXYEventLayer(
                inputTable, inputXField, inputYField, layername,
                inputSpatialReference)

            scratch_out = arcpy.management.CopyFeatures(
                tempLayerOut, scratch_name)

            # Use the geometry of the scratch feature class, with SHAPE keyword, ingnores X and Y values
            arcpy.ConvertCoordinateNotation_management(
                scratch_out, outputPointFeatures, inputXField, inputYField,
                "SHAPE", "#", "#", inputSpatialReference)
            # Delete scratch dataset
            arcpy.Delete_management(scratch_out)

        else:
            #Using Geographic coordinates
            arcpy.ConvertCoordinateNotation_management(
                inputTable, outputPointFeatures, inputXField, inputYField,
                inputCoordinateFormat, "DD_NUMERIC", "#",
                inputSpatialReference)

        return outputPointFeatures

    except arcpy.ExecuteError:
        # Get the tool error messages
        msgs = arcpy.GetMessages()
        arcpy.AddError(msgs)
        print(msgs)

    except:
        # Get the traceback object
        tb = sys.exc_info()[2]
        tbinfo = traceback.format_tb(tb)[0]

        # Concatenate information together concerning the error into a message string
        pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(
            sys.exc_info()[1])
        msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages() + "\n"

        # Return python error messages for use in script tool or Python Window
        arcpy.AddError(pymsg)
        arcpy.AddError(msgs)

        # Print Python error messages for use in Python / Python Window
        print(pymsg + "\n")
        print(msgs)

    finally:
        if len(deleteme) > 0:
            # cleanup intermediate datasets
            if debug == True:
                arcpy.AddMessage("Removing intermediate datasets...")
            for i in deleteme:
                if debug == True: arcpy.AddMessage("Removing: " + str(i))
                arcpy.Delete_management(i)
            if debug == True: arcpy.AddMessage("Done")