Example #1
0
# Name: David Espinola
#Date: March, 8, 2019
#Description: Challenge 1- Write a script that creates a new polygon feature class containing a single (square) polygon with the following coordinates:
#(0,0) ,(0,1000),(1000,0) and (1000, 1000)
import arcpy
arcpy.env.workspace="C:/GEO6533/Python/Data/Exercise08"#set workspace and import arcpy
arcpy.env.overwriteOutput=True
poly_coords=[(0,0,0),(1,0,1000),(2,1000,1000),(3,1000,0)]#coordiantes of rectangle with id as first value
outpath="C:/GEO6533/Python/Data/Exercise08"
newfc="Results/challenge_1_poly.shp"
arcpy.CreateFeatureclass_management(outpath,newfc,"Polygon")#create the challenge_1_poly.shp feature class
cursor=arcpy.da.InsertCursor(newfc,["SHAPE@"])#initiate a search cursor to insert the geometry into feature class
array=arcpy.Array()#create array to hold coordinates of polygon
point=arcpy.Point()#create point object to create point geometry for polygon
for coords in poly_coords:#iterate over tuples in poly_coords list
    ID,X,Y=coords#pull out individual values
    point.ID=ID#assign id to id attribute of point class
    point.X=X#assign X to X attribute of point class
    point.Y=Y#assign Y to Y attribute of point class
    array.add(point)#add the point to array
cursor.insertRow([arcpy.Polygon(array)])#create polygon object from point and insert into geometry field of feature class
del cursor#remove lock on feature class
    



# Name: David Espinola
#Date: March, 8, 2019
#Description: Challenge 2- Write a script that determines the perimeter (in meters) and area(in square meters) of each of the individual islands of the Hawaii.shp
#feature class. Recall that this is a multipart feature class
import arcpy
Example #2
0
def execute_task(args):
    in_extentDict, data, traj_list = args

    fc_count = in_extentDict[0]

    procExt = in_extentDict[1]
    # print procExt
    XMin = procExt[0]
    YMin = procExt[1]
    XMax = procExt[2]
    YMax = procExt[3]

    #set environments
    arcpy.env.snapRaster = data['pre']['traj']['path']
    arcpy.env.cellsize = data['pre']['traj']['path']
    arcpy.env.outputCoordinateSystem = data['pre']['traj']['path']
    arcpy.env.extent = arcpy.Extent(XMin, YMin, XMax, YMax)

    # outData = numpy.zeros((rows,cols), numpy.int16)
    outData = np.zeros((rws, cls), dtype=np.uint8)

    ### create numpy arrays for input datasets nlcds and traj
    nlcds = {
        1992:
        arcpy.RasterToNumPyArray(
            in_raster=
            'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\nlcd.gdb\\nlcd30_1992',
            lower_left_corner=arcpy.Point(XMin, YMin),
            nrows=rws,
            ncols=cls),
        2001:
        arcpy.RasterToNumPyArray(
            in_raster=
            'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\nlcd.gdb\\nlcd30_2001',
            lower_left_corner=arcpy.Point(XMin, YMin),
            nrows=rws,
            ncols=cls),
        2006:
        arcpy.RasterToNumPyArray(
            in_raster=
            'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\nlcd.gdb\\nlcd30_2006',
            lower_left_corner=arcpy.Point(XMin, YMin),
            nrows=rws,
            ncols=cls),
        2011:
        arcpy.RasterToNumPyArray(
            in_raster=
            'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\nlcd.gdb\\nlcd30_2011',
            lower_left_corner=arcpy.Point(XMin, YMin),
            nrows=rws,
            ncols=cls),
    }

    arr_traj = arcpy.RasterToNumPyArray(in_raster=data['pre']['traj']['path'],
                                        lower_left_corner=arcpy.Point(
                                            XMin, YMin),
                                        nrows=rws,
                                        ncols=cls)

    #### find the location of each pixel labeled with specific arbitray value in the rows list
    #### note the traj_list is derived from the sql query above
    nlcd_list = []
    i = 0
    for row in traj_list:

        print 'i', i
        traj = row[0]
        ytc = row[1]
        print 'ytc', ytc
        yfc = row[2]
        print 'yfc', yfc

        #Return the indices of the pixels that have values of the ytc arbitray values of the traj.
        indices = (arr_traj == row[0]).nonzero()

        #stack the indices variable above so easier to work with
        stacked_indices = np.column_stack((indices[0], indices[1]))

        #get the x and y location of each pixel that has been selected from above
        for pixel_location in stacked_indices:
            row = pixel_location[0]
            col = pixel_location[1]

            ####depending on year of conversion, append the value of the two previous nlcds
            if ytc != None:
                if ytc < 2012:
                    nlcd_list.append(nlcds[2001][row][col])
                    nlcd_list.append(nlcds[2006][row][col])
                else:
                    nlcd_list.append(nlcds[2006][row][col])
                    nlcd_list.append(nlcds[2011][row][col])

            elif yfc != None:
                if yfc < 2012:
                    nlcd_list.append(nlcds[2001][row][col])
                    nlcd_list.append(nlcds[2006][row][col])
                else:
                    nlcd_list.append(nlcds[2006][row][col])
                    nlcd_list.append(nlcds[2011][row][col])

            # #get the length of nlcd list containing only the value 81 and 82
            # ##81 = pasture/hay
            # ##82 = cultivated crop
            count_81 = nlcd_list.count(81)
            count_82 = nlcd_list.count(82)
            count_81_82 = count_81 + count_82

            ####create masks for both ytc and yfc ######################

            ##label the pixel ############################################################
            # outData[row,col] = labelPixel(data, ytc, yfc, count_82, count_81_82)

        i = i + 1

    arcpy.ClearEnvironment("extent")

    outname = "tile_" + str(fc_count) + '.tif'

    # #create
    outpath = os.path.join("C:/Users/Bougie/Desktop/Gibbs/data/", r"tiles",
                           outname)

    # NumPyArrayToRaster (in_array, {lower_left_corner}, {x_cell_size}, {y_cell_size}, {value_to_nodata})
    myRaster = arcpy.NumPyArrayToRaster(outData,
                                        lower_left_corner=arcpy.Point(
                                            XMin, YMin),
                                        x_cell_size=30,
                                        y_cell_size=30,
                                        value_to_nodata=0)

    ##free memory from outdata array!!
    outData = None

    myRaster.save(outpath)

    myRaster = None
Example #3
0
def create_other_point(featuredata):
    cur = None
    try:
        cur = arcpy.InsertCursor(featuredata)
        fldname = [fldn.name for fldn in arcpy.ListFields(featuredata)]
        for i in range(1, nrows):

            L1 = table_coor.cell(
                getRowIndex(
                    table_coor, table.cell(
                        i, getColumnIndex(
                            table, "POINTNUMBER")).value, "POINTNUMBER"), getColumnIndex(
                    table_coor, "X")).value
            B1 = table_coor.cell(
                getRowIndex(
                    table_coor, table.cell(
                        i, getColumnIndex(
                            table, "POINTNUMBER")).value, "POINTNUMBER"), getColumnIndex(
                    table_coor, "Y")).value
            H1 = table_coor.cell(
                getRowIndex(
                    table_coor, table.cell(
                        i, getColumnIndex(
                            table, "POINTNUMBER")).value, "POINTNUMBER"), getColumnIndex(
                    table_coor, "Z")).value
            row = cur.newRow()
            if L1:
                point = arcpy.Point(
                    round(
                        float(L1), 8), round(
                        float(B1), 8), float(H1))
                row.shape = point
            for fldn in fldname:
                for j in range(0, ncols):
                    #print 112
                    if fldn == (str(table.cell(0, j).value).strip()).upper():
                        # print table.cell(i, j).ctype
                        if table.cell(i, j).ctype == 3:
                            # print table.cell(i, j).ctype
                            date = xlrd.xldate_as_tuple(
                                table.cell(i, j).value, 0)
                            # print(date)
                            tt = datetime.datetime.strftime(
                                datetime.datetime(*date), "%Y-%m-%d")
                            try:
                                row.setValue(fldn, tt)
                            except Exception as e:
                                print e

                        else:
                            try:
                                row.setValue(fldn, table.cell(i, j).value)
                            except Exception as e:
                                print e

            cur.insertRow(row)

    except Exception as e:
        # print e
        # print "\t 请检查表:%s 是否表头没有删除中文字段及注释部分" % filename
        arcpy.AddMessage(e)
        arcpy.AddMessage("\t check table:%s chinese fieldname deleted" % filename)
        arcpy.AddMessage(e)
    else:
        # print "导数数据表{0}  {1}条数据".format(featuredata, nrows-1)
        arcpy.AddMessage("\t table{0} finished dataloading {1}items".format(featuredata, nrows))

    finally:
        if cur:
            del cur
Example #4
0
    def execute(self, parameters, messages):
        """The source code of the tool."""

        distance = float(parameters[2].valueAsText)
        azi = float(parameters[3].valueAsText)
        report = parameters[4].valueAsText
        latit_sp = float(parameters[1].valueAsText)
        longit_sp = float(parameters[0].valueAsText)

        #Calculates Angular Distance
        ang_distance = distance / 6371

        #Calculate the ending point latitude
        p1 = math.sin(math.radians(latit_sp)) * math.cos(ang_distance)
        p2 = math.cos(
            math.radians(latit_sp)) * math.sin(ang_distance) * math.cos(
                math.radians(azi))
        lat_ep = math.degrees(math.asin(p1 + p2))

        #Calculate the ending point longitude
        p3 = math.sin(math.radians(azi)) * math.sin(ang_distance) * math.cos(
            math.radians(latit_sp))
        p4 = math.cos(ang_distance) - (math.sin(math.radians(latit_sp)) *
                                       math.sin(math.radians(lat_ep)))
        long_ep = longit_sp + math.degrees(math.atan2(p3, p4))

        outshp = os.path.splitext(report)[0] + '.shp'

        point = arcpy.Point(latit_sp, longit_sp)
        end_point = arcpy.Point(lat_ep, long_ep)

        ptGeometry = arcpy.PointGeometry(point)
        ptGeometry2 = arcpy.PointGeometry(end_point)

        degree = int(latit_sp)
        r1 = 0
        minutes = 0
        seconds = 0

        report_h = open(report, "w")
        report_h.write("SP_Lat_DD" + "\n" + latit_sp)
        # report_h.write("\nStarting Point Latitude: ")
        #report_h.write(str(getDegree(latit_sp)) + "? " + str(getMinute(latit_sp)) + " minutes " + str(getSecond(latit_sp)) + " seconds")
        report_h.write("SP_Lon_DD" + "\n" + longit_sp)
        # report_h.write("\nStarting Point Longitude: ")
        report_h.write(
            str(getDegree(longit_sp)) + "? " + str(getMinute(longit_sp)) +
            " minutes " + str(getSecond(longit_sp)) + " seconds")
        report_h.write("\nYour Range in Kilometers: " + str(distance))
        report_h.write("\nYour Range in Nautical Miles: " +
                       str(distance * 0.539957))
        report_h.write("\nYour Range in Miles: " + str(distance * 0.621371))
        report_h.write("\nYour Range in Yards: " + str(distance * 1093.61))
        report_h.write("\nYour Range in Feet: " + str(distance * 3280.84))
        report_h.write("\nYour Range in Meters: " + str(distance * 1000))
        report_h.write("\nYour Azimuth in Degrees: " + str(azi))
        report_h.write("\nEnding Point Latitude: " + str(lat_ep))
        report_h.write("\nEnding Point Latitude: ")
        report_h.write(
            str(getDegree(lat_ep)) + "? " + str(getMinute(lat_ep)) +
            " minutes " + str(getSecond(lat_ep)) + " seconds")
        report_h.write("\nEnding Point Latitude: " + str(long_ep))
        report_h.write("\nEnding Point Longitude: ")
        report_h.write(
            str(getDegree(long_ep)) + "? " + str(getMinute(long_ep)) +
            " minutes " + str(getSecond(long_ep)) + " seconds")
        report_h.close()

        return
Example #5
0
def createArea(inFeatures, limitingEdges, endNodes, range, unit):
    #creates limit points on the limiting edges (based on remaining distances)
    #computes centroid of limit points
    #connects points in order of azimuth between centroid and Nth point
    #smoothens polygon with Bezier interpolation algorithm
    #adds graph ending points which were reached without limit
    def azimuth(center_x, center_y, x, y):
        angle = degrees(atan2(y - center_y, x - center_x))
        az = (angle + 360) % 360
        return az

    #select all limiting edges
    condition = ''
    for fid in limitingEdges:
        condition = condition + str(fid) + ','

    condition = '"FID" IN (' + condition.rstrip(',') + ')'
    arcpy.SelectLayerByAttribute_management(inFeatures, 'NEW_SELECTION',
                                            condition)

    pointList = []
    with arcpy.da.SearchCursor(inFeatures, ['FID', 'SHAPE@', 'LENGTH']) as cur:
        xSum = 0
        ySum = 0
        for row in cur:
            fid = row[0]
            polyline = row[1]
            length = row[2]
            dist = limitingEdges[fid]['dist']
            endsSwitched = limitingEdges[fid]['switch']
            #move point along the edge by the remaining distance value
            rangePoint = polyline.positionAlongLine(dist)
            xSum = xSum + rangePoint[0].X
            ySum = ySum + rangePoint[0].Y

            #if ends were switched the remaining distance is reversed for "rDistance" point attribute value
            if endsSwitched:
                dist = length - dist
            pointList.append([0, dist, (rangePoint)])
    centroid = (xSum / len(pointList), ySum / len(pointList))

    i = 0
    while i < len(pointList):
        p = pointList[i]
        az = azimuth(centroid[0], centroid[1], p[2][0].X, p[2][0].Y)
        pointList[i][0] = az
        i += 1

    arcpy.SelectLayerByAttribute_management(inFeatures, 'CLEAR_SELECTION')

    #create rangePoints layer
    suffix = '__' + str(range) + '_' + unit
    print('Creating limit points')
    filename = 'rangePoints' + suffix + '.shp'
    path = arcpy.env.workspace
    filepath = os.path.join(path, filename)
    arcpy.CreateFeatureclass_management(path, filename, 'POINT')
    arcpy.AddField_management(filepath, 'rDistance')
    arcpy.AddField_management(filepath, 'azimuth')
    cursor = arcpy.da.InsertCursor(filepath,
                                   ['azimuth', 'rDistance', 'SHAPE@XY'])
    for row in pointList:
        cursor.insertRow(row)
    del cursor

    #connect points ordered by azimuth
    inFeatures = filepath
    filename = 'rangeLines' + suffix
    filepath = os.path.join(path, filename)
    arcpy.PointsToLine_management(inFeatures,
                                  filepath,
                                  Sort_Field='azimuth',
                                  Close_Line='CLOSE')
    arcpy.Delete_management(inFeatures)

    #closed polyline to polygon
    inFeatures = filepath + '.shp'
    filename = 'rangeArea' + suffix + '.shp'
    filepath = os.path.join(path, filename)
    arcpy.FeatureToPolygon_management(inFeatures, filepath, "",
                                      "NO_ATTRIBUTES", "")
    polygon = filepath
    arcpy.Delete_management(inFeatures)

    #graph ending points visited
    filename = 'pointsReachedWithoutLimit' + suffix + '.shp'
    print('Creating reached graph ending points: ' + filename)
    path = arcpy.env.workspace
    filepath = os.path.join(path, filename)
    arcpy.CreateFeatureclass_management(path, filename, 'POINT')
    cursor = arcpy.da.InsertCursor(filepath, ['SHAPE@XY'])
    for id in endNodes:
        x = endNodes[id].x
        y = endNodes[id].y
        newPoint = arcpy.Point(x, y)
        az = azimuth(centroid[0], centroid[1], x, y)
        cursor.insertRow([arcpy.PointGeometry(newPoint)])
    del cursor

    #smoothen polygon
    inFeatures = polygon
    filename = 'smoothArea' + suffix + '.shp'
    print('Creating smooth polygon: ' + filename)
    filepath = os.path.join(path, filename)
    arcpy.cartography.SmoothPolygon(inFeatures, filepath,
                                    'BEZIER_INTERPOLATION', 0)
    smoothPolygon = filepath

    #check if polygon smoothened correctly, display normal polygon if it did not
    with arcpy.da.SearchCursor(filepath, ['SHAPE@LENGTH']) as cursor:
        for row in cursor:
            if row[0] == None:
                arcpy.Delete_management(smoothPolygon)
            else:
                arcpy.Delete_management(polygon)
            break
    arcpy.RefreshActiveView()
    arcpy.RefreshTOC()
Example #6
0
    def onClick(self):
        import arcpy
        import os
        import pythonaddins

        # Restore Page Layout (from PageLayoutElements table) before running this script.
        mxd = arcpy.mapping.MapDocument('CURRENT')
        ddp = mxd.dataDrivenPages
        pageName = str(ddp.pageRow.getValue(ddp.pageNameField.name))
        df_lst = arcpy.mapping.ListDataFrames(mxd)
        onMapDFs = []
        # List of data frames on the current page.
        for df in df_lst:
            if (df.elementPositionX > 0
                    and df.elementPositionX < mxd.pageSize[0]
                    and df.elementPositionY > 0
                    and df.elementPositionY < mxd.pageSize[1]):
                onMapDFs.append(df)

        feature_info = []
        for df in onMapDFs:
            # Only creates geometry for data frames on the page. Also creates FGDB.
            XMin = df.extent.XMin
            YMin = df.extent.YMin
            XMax = df.extent.XMax
            YMax = df.extent.YMax
            # A list of features and coordinate pairs
            df_info = [[XMin, YMin], [XMax, YMin], [XMax, YMax], [XMin, YMax]]
            feature_info.append(df_info)

        # A list that will hold each of the Polygon objects
        features = []
        for feature in feature_info:
            # Create a Polygon object based on the array of points
            # Append to the list of Polygon objects
            features.append(
                arcpy.Polygon(
                    arcpy.Array([arcpy.Point(*coords) for coords in feature])))

        # Persist a copy of the Polygon objects using CopyFeatures
        poly_filename = "DF_Polygons_{}".format(pageName)
        parentDir = os.path.abspath(
            os.path.join(os.path.dirname(mxd.filePath), os.pardir))
        edDir = os.path.join(parentDir, pageName)
        if not os.path.exists(edDir):
            os.makedirs(edDir)
        outDir = os.path.join(edDir, "anno_fgdb")
        if not os.path.exists(outDir):
            os.makedirs(outDir)
        workspace = arcpy.env.workspace = outDir

        arcpy.CopyFeatures_management(features, poly_filename)

        # Create FGDB(s).
        for df in onMapDFs:
            arcpy.CreateFileGDB_management(
                workspace,
                "{}_{}_{}_extentBoxes".format(pageName, df.name,
                                              str(int(round(df.scale)))),
                "CURRENT")

        del coords, feature_info, features, feature, poly_filename, outDir, mxd, df_lst, df_info, df, XMax, XMin, YMax, YMin, ddp, pageName
Example #7
0
        # translate cardinal directions to quadrants
        try:

            if lat[-1] == 'N':
                obs_lat = float(lat[:-1])
            else:
                obs_lat = float(lat[:-1] * -1)
            if lon[-1] == 'W':
                obs_lon = float(lon[:-1] * -1)
            else:
                obs_lon = float(lon[:-1])

            print(tagID, obs_lat, obs_lon)

            # create point object
            obsPoint = arcpy.Point()
            obsPoint.X = obs_lon
            obsPoint.Y = obs_lat

            # convert point to pointGeom
            inputSR = arcpy.SpatialReference(
                4326)  # specify SR of argos pts (global lat/lon = WGS84)
            obsPointGeom = arcpy.PointGeometry(obsPoint, inputSR)

            # use insert cursor to add data to fc
            feature = cursor_insert.insertRow(
                (obsPointGeom, tagID, LC, date.replace('.', '/') + ' ' + time))
            # the (()) was to squash a whole line into one argument

        except Exception as e:  # grabs all error lines (records with lat = '???')
            print(f"Added error record {tagID} to error bucket")
Example #8
0
        cur.close()
        con.close()

    logger.info("order " + OrderIDText + " starting at: " +
                time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()))
    arcpy.env.overwriteOutput = True
    arcpy.env.OverWriteOutput = True

    deliverfolder = OrderNumText
    pdfreport_name = OrderNumText + "_US_FIM.pdf"

    if coverInfotext["COUNTRY"] == 'MEX':
        pdfreport_name = OrderNumText + "_MEX_FIM.pdf"
    pdfreport = os.path.join(scratch, pdfreport_name)

    point = arcpy.Point()
    array = arcpy.Array()
    sr = arcpy.SpatialReference()
    sr.factoryCode = 4269  # requires input geometry is in 4269
    sr.XYTolerance = .00000001
    sr.scaleFactor = 2000
    sr.create()
    featureList = []

    for feature in OrderCoord:
        # For each coordinate pair, set the x,y properties and add to the Array object.
        for coordPair in feature:
            point.X = coordPair[0]
            point.Y = coordPair[1]
            sr.setDomain(point.X, point.X, point.Y, point.Y)
            array.add(point)
Example #9
0
        area = row[1].getArea()
        ptoa = perim / area

        sqside = math.sqrt(area)
        shapeindex = ((sqside * 4) / area) / ptoa
        cperim = math.sqrt(area / math.pi) * 2 * math.pi
        scircle = cperim / perim

        pointList = []
        #calculate ccircle
        buf = row[1].buffer(0.01)
        for part in buf:
            for coord in part:
                try:
                    pointList.append((coord.X, coord.Y))
                except:
                    print "bad point"

        c = SEC.make_circle(pointList)
        circlearea = math.pi * c[2] * c[2]
        ccircle = area / circlearea
        print ptoa, shapeindex, scircle, ccircle

        cp = arcpy.PointGeometry(arcpy.Point(c[0], c[1]))
        print c
        polygonbuf = cp.buffer(c[2])
        row[1] = polygonbuf
        row[2] = ptoa
        # Three more times
        cursor.updateRow(row)
Example #10
0
def __mesh_to_point (row, col, nw_corner):
    x = nw_corner.X + col*MIN_DIAMETER + .5*MIN_DIAMETER
    y = nw_corner.Y - row*MIN_DIAMETER - .5*MIN_DIAMETER
    return arcpy.Point(x,y)
arcpy.env.workspace = directory

table = arcpy.CreateTable_management("in_memory", "table1")
arcpy.AddField_management(table, "Field1", "TEXT", field_length=20)

cursor = arcpy.da.InsertCursor(table, ["Field1"])
cursor.insertRow(["Hello World"])

arcpy.TableToTable_conversion(table, directory, "Step_3_Output.csv")

# Example with a Shapefile

import os

points_list =[[20.000,43.000],[25.500, 45.085],[26.574, 46.025], [28.131, 48.124]]
pt = arcpy.Point()
ptGeoms = []
for p in points_list:
    pt.X = p[0]
    pt.Y = p[1]
    ptGeoms.append(arcpy.PointGeometry(pt))

arcpy.CopyFeatures_management(ptGeoms, "in_memory/points")

# Lets do something with the shapefile, just print the OID
field = "OID"
cursor = arcpy.SearchCursor("in_memory/points")
row = cursor.next()
while row:
    print(row.getValue(field))
    row = cursor.next()
Example #12
0
def __get_mesh_dim (polygon, center, tiers):
    max_dim = tiers*2 + 1
    nw_corner = arcpy.Point (center.X - tiers*MIN_DIAMETER, center.Y + tiers*MIN_DIAMETER)
    rows_outside_extent = 2 * math.floor( (nw_corner.Y - polygon.extent.YMax) / MIN_DIAMETER ) 
    cols_outside_extent = 2 * math.floor( (polygon.extent.XMin - nw_corner.X) / MIN_DIAMETER )     
    return max_dim - rows_outside_extent, max_dim - cols_outside_extent  # This does not change the center point
Example #13
0
def create_arcpy_polygon(polygon):
    arcpy_polygon = arcpy.Polygon(
        arcpy.Array([arcpy.Point(ppoint[0], ppoint[1]) for ppoint in polygon]))
    return (arcpy_polygon)
Example #14
0
def csv2line():
    arcpy.env.overwriteOutput = True
    inPt = arcpy.GetParameterAsText(0)
    outFeature = arcpy.GetParameterAsText(1)
    X = arcpy.GetParameterAsText(2)
    Y = arcpy.GetParameterAsText(3)
    Z = arcpy.GetParameterAsText(4)
    idField = arcpy.GetParameterAsText(5)
    reserveField = arcpy.GetParameterAsText(6)
    maxvField = "MAX_V"

    try:

        outPath, outFC = os.path.split(outFeature)

        #change C:\Users\leizengxiang\Desktop\drawCsvInArcgis to your directory, and change the wgs84.prj to your projection file
        arcpy.CreateFeatureclass_management(
            outPath, outFC, "POLYLINE", "", "DISABLED", "ENABLED",
            "C:\Users\leizengxiang\Desktop\drawCsvInArcgis\\wgs84.prj")

        field1 = arcpy.ListFields(inPt, idField)[0]
        arcpy.AddField_management(outFeature, field1.name, field1.type)
        if reserveField:
            field2 = arcpy.ListFields(inPt, reserveField)[0]
            arcpy.AddField_management(outFeature, field2.name, field2.type)
        # Add v
        arcpy.AddField_management(outFeature, maxvField, "double")

        oCur, iCur, sRow, feat = None, None, None, None

        shapeName = "Shape"
        idName = "id"

        oCur = arcpy.SearchCursor(inPt)
        iCur = arcpy.InsertCursor(outFeature)
        array = arcpy.Array()
        ID = -1
        PID = 0
        LID = 0
        if reserveField:
            RESERVE = 0
        MAXV = 0
        TEMPV = 0
        X1 = 0
        X2 = 0
        Y1 = 0
        Y2 = 0
        Z1 = 0
        Z2 = 0

        for sRow in oCur:
            X2 = sRow.getValue(X)
            Y2 = sRow.getValue(Y)
            Z2 = sRow.getValue(Z)
            pt = arcpy.Point(X2, Y2, Z2, None, PID)
            PID += 1
            currentValue = sRow.getValue(idField)
            if ID == -1:
                ID = currentValue
                if reserveField:
                    RESERVE = sRow.getValue(reserveField)
                X1 = X2
                Y1 = Y2
                Z1 = Z2
            if ID <> currentValue:
                if array.count >= 2:
                    feat = iCur.newRow()
                    feat.setValue(idField, ID)
                    feat.setValue(shapeName, array)
                    feat.setValue(idName, LID)
                    LID += 1
                    if reserveField:
                        feat.setValue(reserveField, RESERVE)
                    feat.setValue(maxvField, MAXV)
                    iCur.insertRow(feat)

                else:
                    arcpy.AddIDMessage("WARNING", 1059, str(ID))

                X1 = X2
                Y1 = Y2
                Z1 = Z2
                MAXV = 0
                array.removeAll()
                if reserveField:
                    RESERVE = sRow.getValue(reserveField)

            if (Z1 < Z2) and (X1 != X2 or Y1 != Y2):
                TEMPV = 0.36 * getdis(X1, Y1, X2, Y2) / (Z2 - Z1)  #KM/H
            else:
                TEMPV = 0
            MAXV = getmax(MAXV, TEMPV)
            array.add(pt)
            X1 = X2
            Y1 = Y2
            Z1 = Z2
            ID = currentValue

        if array.count > 1:
            feat = iCur.newRow()
            feat.setValue(idField, currentValue)
            feat.setValue(shapeName, array)
            feat.setValue(idName, LID)
            if reserveField:
                feat.setValue(reserveField, RESERVE)
            feat.setValue(maxvField, MAXV)
            iCur.insertRow(feat)
        else:
            arcpy.AddIDMessage("WARNING", 1059, str(ID))
        array.removeAll()

    except Exception as err:
        arcpy.AddError(err[0])

    finally:
        if oCur:
            del oCur
        if iCur:
            del iCur
        if sRow:
            del sRow
        if feat:
            del feat
        try:
            # Update the spatial index(es)
            #
            r = arcpy.CalculateDefaultGridIndex_management(outFeature)
            arcpy.AddSpatialIndex_management(outFeature, r.getOutput(0),
                                             r.getOutput(1), r.getOutput(2))
        except:
            pass
Example #15
0
    def to_feature_class(self, gdb_path, gdb_name):
        import arcpy
        abspath = gdb_path

        if (os.path.isabs(abspath) == False):
            print "[INFO] The path %s for ESRI Geodatabase is not absolute." % (
                gdb_path)
            print "[INFO] Automatically changed to absolute by using the file path of this script."
            abspath = os.path.join(os.getcwd(), abspath)
            print "[INFO] New path: %s" % (abspath)

        if not os.path.isdir(os.path.join(abspath, gdb_name)):
            arcpy.CreateFileGDB_management(abspath, gdb_name)
        self._workspace = os.path.join(abspath, gdb_name)
        arcpy.env.workspace = self._workspace

        print "[INFO] Now handle the file type of %s" % (self._name)

        points = []
        for (i, tuple) in enumerate(iter(self)):
            #print str(tuple)
            point = arcpy.Point(X=float(tuple[self._fieldName["Easting"]]),
                                Y=tuple[self._fieldName["Northing"]],
                                ID=i + 1)
            points.append(
                arcpy.PointGeometry(point, arcpy.SpatialReference(2326)))
        arcpy.CopyFeatures_management(
            points, os.path.join(self._workspace, self._category))

        field_to_update = [k for k in self._fieldName.iterkeys()]

        arcpy.AddField_management(in_table=self._category,
                                  field_name="Name",
                                  field_type="TEXT")
        arcpy.AddField_management(in_table=self._category,
                                  field_name="Address",
                                  field_type="TEXT")
        arcpy.AddField_management(in_table=self._category,
                                  field_name="Longitude",
                                  field_type="DOUBLE")
        arcpy.AddField_management(in_table=self._category,
                                  field_name="Latitude",
                                  field_type="DOUBLE")
        arcpy.AddField_management(in_table=self._category,
                                  field_name="Easting",
                                  field_type="DOUBLE")
        arcpy.AddField_management(in_table=self._category,
                                  field_name="Northing",
                                  field_type="DOUBLE")
        if self._fieldName.has_key("District"):
            arcpy.AddField_management(in_table=self._category,
                                      field_name="District",
                                      field_type="TEXT")
        if self._fieldName.has_key("Type"):
            arcpy.AddField_management(in_table=self._category,
                                      field_name="Type",
                                      field_type="TEXT")

        with arcpy.da.UpdateCursor(in_table=self._category,
                                   field_names=field_to_update) as cursor:
            for (i, row) in enumerate(cursor):
                for (j, field) in enumerate(field_to_update):
                    row[j] = self._facilities[i][self._fieldName[field]]
                cursor.updateRow(row)
Example #16
0
def tail():
    tupDateNow = datetime.now()
    while (1):
        # buka file csv untuk mengetahui scene yang telah selesai diproses
        #arcpy.env.workspace = config.gdbPath

        log = pd.read_csv("logComplete.csv")
        liScene = log["scene"].tolist()
        liDate = log["dateComplete"].tolist()

        msg = str(datetime.now()) + '\t' + "Importing Library ... \n"
        redis.rpush(config.MESSAGES_KEY, msg)
        redis.publish(config.CHANNEL_NAME, msg)

        arcpy.CheckOutExtension("spatial")
        # pass list yang telah selesai ke ftp download
        filenameNow, scene, boolScene, year, month = ft.downloadFile(liScene)

        del log
        del liScene
        del liDate

        if (boolScene == False):
            print "Data hari ini selesai diproses"
            tupDateLoop = datetime.now()
            while (tupDateNow.day == tupDateLoop.day):
                print "menunggu hari berganti :)"
                time.sleep(10)
                tupDateLoop = datetime.now()

            tupDateNow = tupDateLoop
            print "hari telah berganti"

        #definisikan nama file yang akan diproses
        filename = filenameNow
        # definisikan nama file keluaran hasil klasifikasi yang masih mentah
        filenameOut = filenameNow + "_classified.TIF"
        # definisikan letak file ers yang telah didownload dalam workstation
        dataPath = config.dataPath + scene + "/" + filename
        # definisikan letak model .pkl hasil training data sampel
        modelPath = config.modelPath
        # definisikan shp file indonesia untuk cropping batas administrasi
        shpPath = config.shpPath

        # definisikan folder keluaran hasil proses
        outFolder = config.outputPath + filename.split(".")[0]
        # jika folder ada maka hapus
        if (os.path.exists(outFolder)):
            shutil.rmtree(outFolder)
        # buat folder yang telah didefinisikan
        os.makedirs(outFolder)
        # definisikan path file keluaran
        outputPath = outFolder + "/" + filenameOut

        ##################### KONVERSI DATA ERS KE TIAP BAND ######################################
        print("converting b3")
        if (os.path.exists(dataPath + "TOA_B3" + ".TIF")):
            os.remove(dataPath + "TOA_B3" + ".TIF")
        # Ambil hanya band 3 dan jadikan raster
        try:
            b_green = arcpy.Raster(dataPath + "/B3") * 1.0
        except:
            b_green = arcpy.Raster(dataPath + "/Band_3") * 1.0

        print("saving b3")
        msg = str(datetime.now()) + '\t' + "saving b3 \n"
        redis.rpush(config.MESSAGES_KEY, msg)
        redis.publish(config.CHANNEL_NAME, msg)
        # save raster band 3 ke folder data input
        b_green.save(dataPath + "TOA_B3" + ".TIF")
        del b_green

        print("converting b5")
        if (os.path.exists(dataPath + "TOA_B5" + ".TIF")):
            os.remove(dataPath + "TOA_B5" + ".TIF")
        # Ambil hanya band 5 dan jadikan raster
        try:
            b_nir = arcpy.Raster(dataPath + "/B5") * 1.0
        except:
            b_nir = arcpy.Raster(dataPath + "/Band_5") * 1.0

        print("saving b5")
        msg = str(datetime.now()) + '\t' + "saving b5 \n"
        redis.rpush(config.MESSAGES_KEY, msg)
        redis.publish(config.CHANNEL_NAME, msg)
        # save raster band 5 ke folder data input
        b_nir.save(dataPath + "TOA_B5" + ".TIF")
        del b_nir

        print("converting b6")
        if (os.path.exists(dataPath + "TOA_B6" + ".TIF")):
            os.remove(dataPath + "TOA_B6" + ".TIF")
        # Ambil hanya band 6 dan jadikan raster
        try:
            b_swir1 = arcpy.Raster(dataPath + "/B6") * 1.0
        except:
            b_swir1 = arcpy.Raster(dataPath + "/Band_6") * 1.0

        msg = str(datetime.now()) + '\t' + "saving b6 \n"
        redis.rpush(config.MESSAGES_KEY, msg)
        redis.publish(config.CHANNEL_NAME, msg)
        print("saving b6")
        # save raster band 6 ke folder data input
        b_swir1.save(dataPath + "TOA_B6" + ".TIF")
        del b_swir1

        ####################### SELESAI KONVERSI DATA #######################################

        #################### UBAH RASTER KE FORMAT DATAFRAME ###############################
        msg = str(datetime.now()) + '\t' + "Processing file " + filename + "\n"
        redis.rpush(config.MESSAGES_KEY, msg)
        redis.publish(config.CHANNEL_NAME, msg)

        # load semua raster yang telah dikonversi diawal
        rasterarrayband6 = arcpy.RasterToNumPyArray(dataPath + "TOA_B3.TIF")
        rasterarrayband6 = np.array(rasterarrayband6, dtype=np.uint32)
        rasterarrayband5 = arcpy.RasterToNumPyArray(dataPath + "TOA_B5.TIF")
        rasterarrayband5 = np.array(rasterarrayband5, dtype=np.uint32)
        rasterarrayband3 = arcpy.RasterToNumPyArray(dataPath + "TOA_B6.TIF")
        rasterarrayband3 = np.array(rasterarrayband3, dtype=np.uint32)

        print rasterarrayband6.dtype
        print("Change raster format to numpy array")
        # gabungkan 3 array data secara horizontal
        data = np.array([
            rasterarrayband6.ravel(),
            rasterarrayband5.ravel(),
            rasterarrayband3.ravel()
        ],
                        dtype=np.int16)
        # ubah menjadi vertikal untuk kebutuhan prediksi .pkl
        data = data.transpose()

        # langsung hapus variabel yang tidak digunakan lagi
        del rasterarrayband6
        del rasterarrayband5
        del rasterarrayband3

        print("Change to dataframe format")
        msg = str(datetime.now()) + '\t' + "Change to dataframe format \n"
        redis.rpush(config.MESSAGES_KEY, msg)
        redis.publish(config.CHANNEL_NAME, msg)
        #time.sleep(1)

        # definisikan nama kolom dataframe
        columns = ['band3', 'band5', 'band6']
        # ubah array vertical menjadi dataframe
        df = pd.DataFrame(data, columns=columns)
        # hapus array vertikal
        del data
        ###################### SELESAI ####################################################
        print("Split data to 20 chunks ")
        msg = str(datetime.now()) + '\t' + "Split data to 20 chunks \n"
        redis.rpush(config.MESSAGES_KEY, msg)
        redis.publish(config.CHANNEL_NAME, msg)
        #time.sleep(1)

        # bagi data menjadi 20 bagian karena program tidak kuat prediksi sekaligus
        df_arr = np.array_split(df, 20)
        # hapus dataframe
        del df
        # load classifier (model pkl) yang telah di train
        clf = joblib.load(modelPath)

        # definisikan array untuk menampung nilai integer hasil prediksi
        kelasAll = []
        # ulangi untuk setiap bagian data
        for i in range(len(df_arr)):

            print("predicting data chunk-%s\n" % i)
            msg = str(datetime.now()) + '\t' + "predicting data chunk-%s\n" % i
            redis.rpush(config.MESSAGES_KEY, msg)
            redis.publish(config.CHANNEL_NAME, msg)

            msg2 = i
            redis.rpush(config.MESSAGES_KEY_2, msg2)
            redis.publish(config.CHANNEL_NAME_2, msg2)
            #time.sleep(1)
            # fungi untuk prediksi data baru dengan data ke i
            kelas = clf.predict(df_arr[i])

            # buat dataframe kosong
            dat = pd.DataFrame()
            # masukkan hasil prediksi data ke i ke kolom kelas
            dat['kel'] = kelas
            print("mapping to integer class")
            msg = str(datetime.now()) + '\t' + "mapping to integer class \n"
            redis.rpush(config.MESSAGES_KEY, msg)
            redis.publish(config.CHANNEL_NAME, msg)
            #time.sleep(1)
            # definisikan dictionary untuk ubah string kelas ke integer kelas prediksi
            mymap = {'awan': 1, 'air': 2, 'tanah': 3, 'vegetasi': 4}
            # fungsi map dengan parameter dictionary
            dat['kel'] = dat['kel'].map(mymap)

            # ubah kolom dataframe ke array
            band1Array = dat['kel'].values
            # ubah array ke numpy array dengan tipe unsigned 8 untuk menghindari memory error
            band1Array = np.array(band1Array, dtype=np.uint8)
            print("extend to list")
            msg = str(datetime.now()) + '\t' + "extend to list \n"
            redis.rpush(config.MESSAGES_KEY, msg)
            redis.publish(config.CHANNEL_NAME, msg)
            #time.sleep(1)
            #kelasAllZeros[] = band1Array
            # masukkan numpy aray ke list prediksi
            kelasAll.extend(band1Array.tolist())
            # mencoba cek array hasil prediksi
            print(kelasAll[1:10])

        # hapus semua variabel yang tidak digunakan lagi
        del df_arr
        del clf
        del kelas
        del dat
        del band1Array

        print("change list to np array")
        msg = str(datetime.now()) + '\t' + "change list to np array \n"
        redis.rpush(config.MESSAGES_KEY, msg)
        redis.publish(config.CHANNEL_NAME, msg)

        # ubah list prediksi ke numpy array
        kelasAllArray = np.array(kelasAll, dtype=np.uint8)
        # hapus list prediksi
        del kelasAll
        print("reshaping np array")
        msg = str(datetime.now()) + '\t' + "reshaping np array \n"
        redis.rpush(config.MESSAGES_KEY, msg)
        redis.publish(config.CHANNEL_NAME, msg)

        rasterarrayband6 = arcpy.RasterToNumPyArray(dataPath + "TOA_B3.TIF")
        # reshape numpy array 1 dimensi ke dua dimensi sesuai format raster
        band1 = np.reshape(kelasAllArray, (-1, rasterarrayband6[0].size))
        # ubah tipe data ke unsigned integer
        band1 = band1.astype(np.uint8)
        del rasterarrayband6

        # load raster band6 untuk kebutuhan projeksi dan batas batas raster
        raster = arcpy.Raster(dataPath + "TOA_B6.TIF")
        inputRaster = dataPath + "TOA_B6.TIF"

        # ambil referensi spatial
        spatialref = arcpy.Describe(inputRaster).spatialReference
        # ambil tinggi dan lebar raster
        cellsize1 = raster.meanCellHeight
        cellsize2 = raster.meanCellWidth
        # definisikan extent dari raster dan point dari extent
        extent = arcpy.Describe(inputRaster).Extent
        pnt = arcpy.Point(extent.XMin, extent.YMin)

        # hapus yang tidak dipakai lagi
        del raster
        del kelasAllArray

        # save the raster
        print("numpy array to raster ..")
        msg = str(datetime.now()) + '\t' + "numpy array to raster .. \n"
        redis.rpush(config.MESSAGES_KEY, msg)
        redis.publish(config.CHANNEL_NAME, msg)

        # ubah numpy array ke raster dengan atribut yang telah didefinisikan
        out_ras = arcpy.NumPyArrayToRaster(band1, pnt, cellsize1, cellsize2)

        arcpy.CheckOutExtension("Spatial")
        print("define projection ..")
        msg = str(datetime.now()) + '\t' + "define projection ..\n"
        redis.rpush(config.MESSAGES_KEY, msg)
        redis.publish(config.CHANNEL_NAME, msg)

        # simpan raster hasil konversi ke path yang telah didefinisikan
        arcpy.CopyRaster_management(out_ras, outputPath)
        # definisikan projeksi dengan referensi spatial
        arcpy.DefineProjection_management(outputPath, spatialref)

        print("Majority Filter..")
        msg = str(datetime.now()) + '\t' + "majority filter..\n"
        redis.rpush(config.MESSAGES_KEY, msg)
        redis.publish(config.CHANNEL_NAME, msg)

        #overwriteoutputPath outputPath enable
        arcpy.env.workspace = config.outputPath
        arcpy.env.overwriteOutput = True

        #majority filter
        arcpy.CheckOutExtension("Spatial")
        outMajFilt = MajorityFilter(outputPath, "FOUR", "MAJORITY")

        #Save the output
        outMajFilt.save(outputPath)

        # hapus yang tidak digunakan lagi
        del out_ras
        del band1
        del spatialref
        del cellsize1
        del cellsize2
        del extent
        del pnt

        ########################### MASKING CLOUD AND BORDER #########################
        print("Masking Cloud")
        # load file cm hasil download yang disediakan
        mask = Raster(
            os.path.dirname(dataPath) + "/" + filename.split(".")[0] +
            "_cm.ers")
        # load raster hasil klasifikasi mentah
        inRas = Raster(outputPath)
        # jika file cm bernilai 1 = cloud, 2 = shadow, 11 = border
        # ubah nilai tersebut menjadi 1 dan lainnya menjadi 0
        #inRas_mask = Con((mask == 1), 1, Con((mask == 2), 1, Con((mask == 11), 1, 0)))
        inRas_mask = Con((mask == 1), 1,
                         Con((mask == 2), 1,
                             Con((mask == 11), 1,
                                 Con((mask == 3), 1,
                                     Con((mask == 4), 1,
                                         Con((mask == 5), 1,
                                             Con((mask == 6), 1,
                                                 Con((mask == 7), 1, 0))))))))

        # buat raster yang merupakan nilai no data dari hasil kondisi diatas, hasilnya nodata = 1
        # saya juga tidak mengerti yang bukan cloud jadi no data
        mask2 = IsNull(inRas_mask)
        # jika raster bernilai 1 maka ubah jadi 0, jika tidak tetap nilai raster hasil kondisi
        inRas2 = Con((mask2 == 1), 0, inRas_mask)
        # simpan raster pure dimana semua nilai 1 akan dihilangkan dari hasil klasifikasi
        inRas2.save(
            os.path.dirname(outputPath) + "/" + filenameOut.split(".")[0] +
            "_mask.TIF")
        # jika raster bernilai 1 maka jadi no data, jika tidak maka tetap si raster hasil awal
        inRas_mask2 = SetNull(inRas2 == 1, inRas)
        # simpan raster yang telah bersih dari cloud dan border yang jelek
        inRas_mask2.save(
            os.path.dirname(outputPath) + "/" + filenameOut.split(".")[0] +
            "_maskCloud.TIF")

        # hapus variabel conditional yang tidak digunakan lagi
        del mask
        del mask2
        del inRas
        del inRas2
        del inRas_mask
        del inRas_mask2
        ############################## SELESAI ###########################################

        ####################### MASKING DENGAN SHP INDONESIA ##############################
        print("Masking with shp indonesia")
        arcpy.CheckOutExtension("Spatial")
        # buka file shp indonesia
        inMaskData = os.path.join(shpPath, "INDONESIA_PROP.shp")
        # buka raster hasil masking cloud dan border
        inRasData = Raster(
            os.path.dirname(outputPath) + "/" + filenameOut.split(".")[0] +
            "_maskCloud.TIF")
        # terapkan fungsi masking dengan shapefile
        try:
            outExtractByMask = ExtractByMask(inRasData, inMaskData)
            print(
                "Saving in: " + str(
                    os.path.dirname(outputPath) + "/" +
                    filenameOut.split(".")[0] + "_maskShp.TIF"))
            # simpan hasil masking

            outExtractByMask.save(
                os.path.dirname(outputPath) + "/" + filenameOut.split(".")[0] +
                "_maskShp.TIF")
            finalPath = config.finalOutputPath + year + "/" + month + "/" + filenameNow.split(
                ".")[0]
            print finalPath
            if (os.path.exists(finalPath)):
                shutil.rmtree(finalPath)
            os.makedirs(finalPath)
            arcpy.CopyRaster_management(outExtractByMask,
                                        finalPath + "/" + filenameOut)
            # hapus lagi dan lagi variabel yang tidak digunakan
            del inMaskData
            del inRasData
            del outExtractByMask
        except:
            print "diluar indonesia shp"
            finalPath = config.finalOutputPath + year + "/" + month + "/" + filenameNow.split(
                ".")[0]
            print finalPath
            if (os.path.exists(finalPath)):
                shutil.rmtree(finalPath)
            os.makedirs(finalPath)
            arcpy.CopyRaster_management(inRasData,
                                        finalPath + "/" + filenameOut)
            pass

        ########################## SELESAI ################################################

        ####################### SAVE LOG DATA YANG TELAH SELESAI DIPROSES ########################################
        log = pd.read_csv("logComplete.csv")
        liScene = log["scene"].tolist()
        liDate = log["dateComplete"].tolist()

        liScene.append(scene)
        liDate.append(str(datetime.now()))

        print(liScene)
        print(liDate)

        serScene = pd.Series(liScene)
        serDate = pd.Series(liDate)

        print(serScene)
        print(serDate)
        log2 = pd.DataFrame()
        log2["scene"] = serScene
        log2["dateComplete"] = serDate

        print(log2.head(5))
        log2.to_csv("logComplete.csv", index=False)

        del liScene
        del liDate
        del serScene
        del serDate
        del log
        del log2

        ##########################################################################################################
        # delete downloaded data in workstation
        dataFolder = os.listdir(config.dataPath)
        print dataFolder
        if (len(dataFolder) > 1):
            print config.dataPath + dataFolder[0]
            shutil.rmtree(config.dataPath + dataFolder[0])

        hasilFolder = os.listdir(config.outputPath)
        print hasilFolder
        if (len(hasilFolder) > 1):
            print config.outputPath + hasilFolder[0]
            shutil.rmtree(config.outputPath + hasilFolder[0])
        print("Finished ..")
        msg = str(datetime.now()) + '\t' + "Finished ... \n"
        redis.rpush(config.MESSAGES_KEY, msg)
        redis.publish(config.CHANNEL_NAME, msg)

        redis.delete(config.MESSAGES_KEY)
        redis.delete(config.MESSAGES_KEY_2)

        # local variable to list
        dictLocal = locals()
        # delete all local variable, hope will free some space
        for key in dictLocal.keys():
            del key
        clear_all()
        # bersih bersih lainnya
        gc.collect()
        #shutil.rmtree(config.gdbPath)
        arcpy.Delete_management(config.gdbPathDefault)
        arcpy.Delete_management("in_memory")
        arcpy.env.overwriteOutput = True
Example #17
0
def Generate_Shapes_Straight(Created_Street_Output):
    '''Generate route shapes as straight lines between stops.'''

    arcpy.AddMessage(
        "Generating straight-line route shapes for routes of the following types, if they exist in your data:"
    )
    for rtype in route_type_Straight_textlist:
        arcpy.AddMessage(rtype)
    arcpy.AddMessage("(This step may take a while for large GTFS datasets.)")

    # If we didn't already create the output feature class with the Street-based routes, create it now.
    if not Created_Street_Output or not arcpy.Exists(outRoutesfc):
        arcpy.management.CreateFeatureclass(outGDB, outRoutesfcName,
                                            "POLYLINE", '', '', '', WGSCoords)
        arcpy.management.AddField(outRoutesfc, "Name", "TEXT")
        spatial_ref = WGSCoords
    else:
        spatial_ref = arcpy.Describe(outRoutesfc).spatialReference

# ----- Create polylines using stops as vertices -----

    arcpy.AddMessage("- Generating polylines using stops as vertices")

    # Set up insertCursors for output shapes polylines and stop sequences
    # Have to open an edit session to have two simultaneous InsertCursors.

    edit = arcpy.da.Editor(outGDB)
    ucursor = arcpy.da.InsertCursor(outRoutesfc, ["SHAPE@", "Name"])
    cur = arcpy.da.InsertCursor(outSequencePoints, [
        "SHAPE@X", "SHAPE@Y", "shape_id", "sequence", "CurbApproach", "stop_id"
    ])
    edit.startEditing()

    badStops = []

    for sequence in sequence_shape_dict:
        shape_id = sequence_shape_dict[sequence]
        route_id = sequence[0]
        route_type = RouteDict[route_id][4]
        if route_type in route_types_Straight or shape_id in NoRouteGenerated:
            sequence_num = 1
            # Add stop sequence to an Array of Points
            array = arcpy.Array()
            pt = arcpy.Point()
            for stop in sequence[1]:
                try:
                    stop_lat = stoplatlon_dict[stop][0]
                    stop_lon = stoplatlon_dict[stop][1]
                except KeyError:
                    if shape_id not in NoRouteGenerated:
                        # Don't repeat a warning if they already got it once.
                        badStops.append(stop)
                    sequence_num += 1
                    continue
                pt.X = float(stop_lon)
                pt.Y = float(stop_lat)
                # Add stop sequences to points fc for user to look at.
                cur.insertRow((float(stop_lon), float(stop_lat), shape_id,
                               sequence_num, CurbApproach, stop))
                sequence_num = sequence_num + 1
                array.add(pt)
            # Generate a Polyline from the Array of stops
            polyline = arcpy.Polyline(array, WGSCoords)
            # Project the polyline to the correct output coordinate system.
            if spatial_ref != WGSCoords:
                polyline.projectAs(spatial_ref)
            # Add the polyline to the Shapes feature class
            ucursor.insertRow((polyline, shape_id))
    del ucursor
    del cur

    edit.stopEditing(True)

    if badStops:
        badStops = list(set(badStops))
        messageText = "Your stop_times.txt lists times for the following stops which are not included in your stops.txt file. These stops will be ignored. "
        if ProductName == "ArcGISPro":
            messageText += str(badStops)
        else:
            messageText += unicode(badStops)
        arcpy.AddWarning(messageText)
Example #18
0
]

#Verarbeitungsausdehnung festlegen
arcpy.env.extent = arcpy.Extent(xmin, ymin, xmax, ymax,
                                "ETRS_1989_UTM_Zone_32N")
#Mergen in aktueller Ausdehnung
arcpy.Merge_management(rauhList, "mergetemp", "")

#FeatureClass und Feature zum Ausschneiden erstellen
arcpy.CreateFeatureclass_management(
    r"X:\05_Basisdaten\Rauhigkeiten\Corine2006_V17.gdb", "cliptemp", "POLYGON",
    "", "", "", sr)

#Punkte zu Array
array = arcpy.Array([
    arcpy.Point(xmin, ymin),
    arcpy.Point(xmin, ymax),
    arcpy.Point(xmax, ymax),
    arcpy.Point(xmax, ymin)
])

#Polygon erstellen
clipPoly = arcpy.Polygon(array)

#InsertCursor erstellen
cur = arcpy.da.InsertCursor("cliptemp", ["Shape@"])

#Zeile einfuegen
cur.insertRow([clipPoly])

#Cursor loeschen
Example #19
0
#create list of bearings
angles = range(0, 360,angle)


for ang in angles:
    # calculate offsets with  trig
    angle = float(int(ang))
    (disp_x, disp_y) = (distance * sin(radians(angle)), distance * cos(radians(angle)))
    (end_x, end_y) = (origin_x + disp_x, origin_y + disp_y)
    (end2_x, end2_y) = (origin_x + disp_x, origin_y + disp_y)

    cur = arcpy.InsertCursor(OutputFeature)
    lineArray = arcpy.Array()

    # start point
    start = arcpy.Point()
    (start.ID, start.X, start.Y) = (1, origin_x, origin_y)
    lineArray.add(start)

    # end point
    end = arcpy.Point()
    (end.ID, end.X, end.Y) = (2, end_x, end_y)
    lineArray.add(end)

    # write our fancy feature to the shapefile
    feat = cur.newRow()
    feat.shape = lineArray
    cur.insertRow(feat)

	# yes, this shouldn't really be necessary...
    lineArray.removeAll()
Example #20
0
 def update_position(self, lon, lat):
     self.ship_position = arcpy.Point(lon, lat)
Example #21
0
            # Step through each vertex in the feature
            #
            for pnt in part:
                if pnt:
                    # Print x,y coordinates of current point
                    #
                    points.append((pnt.X, pnt.Y))
                    #print("{0}, {1}".format(pnt.X, pnt.Y))
                else:
                    # If pnt is None, this represents an interior ring
                    #
                    pass
                    #print("Interior Ring:")
            partnum += 1
        try:

            circledef = SEC.make_circle(points)
            cp = arcpy.PointGeometry(arcpy.Point(circledef[0], circledef[1]))
            scc = cp.buffer(circledef[2])
            #HOW TO CALCULATE???
            CIRCLE = geo.area / scc.area
            print row[0], CIRCLE
            row[1] = scc
            cursor.updateRow(row)

        except:
            print row[0] + " Did not work"

del cursor, row
def create_thiessen_points(study_area, side_length, output_fc):
    """Creates points spaced such that Thiessen polygons will be hexagons.

    Arguments:
        study_area -- feature class defining area of interest
        side_length -- length of regular hexagon side
        output_fc -- name and location of output feature class

    Remarks:
        Hexagons can be created for Thiessen polygons built from points spaced
        in a pattern like the one below.

        *   *   *   *
          *   *   *
        *   *   *   *
          *   *   *
        *   *   *   *

    """
    
    # Validate inputs
    count = int(str(arcpy.GetCount_management(study_area)))
    if count == 0:
        arcpy.AddError('Error: No features found in ' + str(study_area))
        return
    side_length = float(side_length)
    if side_length <= 0:
        arcpy.AddError('Error: Hexagon side length must be greater than zero.')
        return

    # Determine point spacing
    dx = 3.0 * side_length
    dy = side_length / 2.0 * math.sqrt(3.0)
    indent = dx / 2

    # Get the extent of the study area.
    # If in ArcMap, make sure we use feature coordinates, not map coordinates.
    desc = arcpy.Describe(study_area)
    if desc.dataType == "FeatureLayer":
        desc = arcpy.Describe(desc.featureClass.catalogPath)
    ext = desc.extent

    # Determine number of rows and columns.  Add extra just to be sure.
    xmin = ext.XMin - dx
    ymin = ext.YMin - dy * 3.0
    xmax = ext.XMax + dx
    ymax = ext.YMax + dy * 3.0
    num_rows = int((ymax - ymin) / dy) + 1
    num_cols = int((xmax - xmin) / dx) + 2

    # Create the output feature class
    spatial_ref = desc.spatialReference
    workspace = os.path.dirname(output_fc)
    fc_name = os.path.basename(output_fc)
    fc = arcpy.CreateFeatureclass_management(
        workspace, fc_name, "POINT", "", "", "", spatial_ref)

    # Populate output features
    arcpy.AddMessage('Creating ' + str(num_rows * num_cols) + ' points...')
    cursor = arcpy.InsertCursor(output_fc)
    feature = None

    try:
        y = ymin
        for r in range(num_rows):
            x = xmin - indent / 2
            if r % 2 != 0:
                x += indent

            for c in range(num_cols):
                feature = cursor.newRow()
                p = arcpy.Point()
                p.X = x
                p.Y = y
                feature.shape = p
                cursor.insertRow(feature)
                x += dx

            y += dy
                
    finally:
        if feature:
            del feature
        if cursor:
            del cursor
Example #23
0
def switchPointsToNodes(routes, county_indexing, nodes_data):
    """
    change the x-y coordinates for non-bus stops to actual nodes
    return a list of node entries to append to the node file
    """
    #return: stop_nodes = ['N','X','Y','COUNTY','MODE','TYPE','ID']
    point_list = {}
    point_mode = {}
    id = 100  # to ensure we aren't accidentally matching ids
    for route in routes:
        for point in routes[route][2]:
            if not point in point_list:
                point_list[point] = id
                point_mode[id] = {}
                id += 1
            point_mode[point_list[point]][routes[route][
                3]] = None  # will be used to check if nodes are shared by different modes

    arcpy.env.workspace = WORKING_GDB
    arcpy.env.overwriteOutput = True
    pt = arcpy.Point()
    out_coordinate_system = os.path.join(arcpy.GetInstallInfo()['InstallDir'],
                                         NAD_83_DIRECTORY)
    spatial_ref = arcpy.SpatialReference(out_coordinate_system)
    pt_geoms = []
    point_order = []
    for point in point_list:
        pt.X = point[0]
        pt.Y = point[1]
        point_order.append(point_list[point])
        pt_geoms.append(arcpy.PointGeometry(pt, spatial_ref))
    nodes_layer = 'non_bus_nodes'
    if arcpy.Exists(nodes_layer):
        arcpy.Delete_management(nodes_layer)
    arcpy.CopyFeatures_management(pt_geoms, nodes_layer)

    node_map = {}
    stop_nodes = []
    node_layer = 'node_layer'
    arcpy.MakeFeatureLayer_management(nodes_layer, node_layer)
    county_layer = 'county_layer'
    nc_layer = 'node_county_layer'
    neg_counter = -100  #placeholder for old ids
    for i in range(len(COUNTY_MAP)):
        arcpy.MakeFeatureLayer_management(
            SOURCE_COUNTY, county_layer,
            COUNTY_ID_FIELD + " = '" + COUNTY_MAP[i] + "'")
        arcpy.SelectLayerByLocation_management(node_layer, 'WITHIN',
                                               county_layer)
        arcpy.MakeFeatureLayer_management(node_layer, nc_layer)
        arcpy.AddXY_management(nc_layer)
        rows = arcpy.SearchCursor(nc_layer, '', '', 'OBJECTID;POINT_X;POINT_Y')
        for row in rows:
            id = point_order[row.OBJECTID - 1]
            x = row.POINT_X
            y = row.POINT_Y
            n = county_indexing[i]
            county_indexing[i] += 1
            node_map[id] = n
            modes = point_mode[id]
            if len(modes) > 1:
                print 'more than one mode for ' + str(
                    (x, y)) + ' : ' + str(modes)
            stop_nodes.append([n, x, y, i + 1, modes.keys()[0], 0, 0])
            nodes_data[neg_counter] = [x, y, n, None]
            neg_counter -= 1

    for route in routes:
        for point in range(len(routes[route][2])):
            routes[route][2][point] = node_map[point_list[routes[route][2]
                                                          [point]]]

    return stop_nodes
def import_gpx(gpx_file, wpt_fc, trk_fc):

    GCS_WGS_84 = arcpy.SpatialReference(4326)
    GCS_TRANSFORMS = 'WGS_1984_(ITRF08)_To_NAD_1983_2011; NAD_1927_To_NAD_1983_NADCON'

    arcpy.env.geographicTransformations = arcpy.env.geographicTransformations or GCS_TRANSFORMS
    arcpy.AddMessage('Geographic Transformations: %s' % arcpy.env.geographicTransformations)

    scratch = arcpy.env.scratchWorkspace
    arcpy.env.addOutputsToMap = False

    WPT_FIELDS = [
        ('ELEVATION', 'gpx:ele'),
        ('TIME', 'gpx:time'),
        ('NAME', 'gpx:name'),
        ('DESCRIPTION', 'gpx:desc'),
        ('SYMBOL', 'gpx:sym'),
        ('TYPE', 'gpx:type'),
        ('SAMPLES', 'gpx:extensions/wptx1:WaypointExtension/wptx1:Samples')
    ]

    ns = {
        'gpx': 'http://www.topografix.com/GPX/1/1',
        'gpxx': 'http://www.garmin.com/xmlschemas/GpxExtensions/v3',
        'wptx1': 'http://www.garmin.com/xmlschemas/WaypointExtension/v1',
        'ctx': 'http://www.garmin.com/xmlschemas/CreationTimeExtension/v1',
    }

    etree.register_namespace('', 'http://www.topografix.com/GPX/1/1')
    etree.register_namespace('gpxx', 'http://www.garmin.com/xmlschemas/GpxExtensions/v3')
    etree.register_namespace('wptx1', 'http://www.garmin.com/xmlschemas/WaypointExtension/v1')
    etree.register_namespace('ctx', 'http://www.garmin.com/xmlschemas/CreationTimeExtension/v1')

    gpx = etree.parse(gpx_file).getroot()

    sr = arcpy.env.outputCoordinateSystem

    if wpt_fc:
        create_points_feature_class(wpt_fc, sr)

        waypoints = []
        for wpt in gpx.findall('gpx:wpt', ns):
            x, y = wpt.get('lon'), wpt.get('lat')
            row = [arcpy.PointGeometry(arcpy.Point(x, y), GCS_WGS_84).projectAs(sr)]
            for field, tag in WPT_FIELDS:
                elem = wpt.find(tag, ns)

                if elem is None:
                    row.append(None)
                elif field == 'ELEVATION':
                    row.append('%0.4f' % (float(elem.text) / sr.metersPerUnit))
                elif field == 'NAME' and elem.text.isdigit():
                    row.append('%d' % int(elem.text))
                else:
                    row.append(elem.text)
            waypoints.append(row)

        if waypoints:
            fields = ['SHAPE@'] + [f[0] for f in WPT_FIELDS]
            cur = arcpy.da.InsertCursor(wpt_fc, fields)
            for row in waypoints:
                cur.insertRow(row)
            del cur

    if trk_fc:

        # idle time between trkpts to start a new track segment
        TRKSEG_IDLE_SECS = 600

        tracks = []
        track_num = 0
        for trk in gpx.findall('gpx:trk', ns):
            track_num += 1
            elem = trk.find('gpx:name', ns)
            if elem is None:
                track_name = 'track-%04d' % track_num
            else:
                track_name = elem.text

            track_pts = []
            dt_last = None
            segment_num = 0
            for trkpt in trk.findall('./gpx:trkseg/gpx:trkpt', ns):
                x, y = trkpt.get('lon'), trkpt.get('lat')
                pt = arcpy.PointGeometry(arcpy.Point(x, y), GCS_WGS_84).projectAs(sr).firstPoint

                # See if there's a track point time
                elem = trkpt.find('gpx:time', ns)
                if elem is None:
                    dt_last = None
                else:
                    dt = utils.default_tzinfo(parser.parse(elem.text), tz.UTC)
                    if dt_last and (dt - dt_last).seconds > TRKSEG_IDLE_SECS:
                        # start a new segment
                        if len(track_pts) > 1:
                            segment_num += 1
                            if segment_num > 1:
                                segment_name = '%s SEG-%04d' % (track_name, segment_num)
                            else:
                                segment_name = track_name
                            geom = arcpy.Polyline(arcpy.Array(track_pts), sr)
                            tracks.append([geom , segment_name, len(track_pts)])
                        else:
                            arcpy.AddMessage('Skipping track "%s": track_pts=%d' % (track_name, len(track_pts)))
                        track_pts = []
                    dt_last = dt

                track_pts.append(pt)

            if len(track_pts) > 1:
                segment_num += 1
                if segment_num > 1:
                    segment_name = '%s SEG-%04d' % (track_name, segment_num)
                else:
                    segment_name = track_name
                geom = arcpy.Polyline(arcpy.Array(track_pts), sr)
                tracks.append([geom, segment_name, len(track_pts)])
            else:
                arcpy.AddMessage('Skipping track "%s": track_pts=%d' % (track_name, len(track_pts)))

        if tracks:
            temp_fc = os.path.join(scratch, os.path.basename(trk_fc) + '_Temp')
            if sr is None:
                arcpy.AddError('Geoprocessing environment not set: outputCoordinateSystem')
                return None

            fc = mgmt.CreateFeatureclass(*os.path.split(temp_fc), geometry_type='POLYLINE', spatial_reference=sr)
            mgmt.AddField(fc, 'NAME', 'TEXT', field_length=64)
            mgmt.AddField(fc, 'POINTS', 'LONG')

            cur = arcpy.da.InsertCursor(fc, ('SHAPE@', 'NAME', 'POINTS'))
            for row in tracks:
                cur.insertRow(row)
            del cur

            mgmt.CopyFeatures(temp_fc, trk_fc)
            del fc
Example #25
0
def createExtentBoxes(mxdPath):
    try:
        # Restore Page Layout (from PageLayoutElements table) before running this script.
        mxd = arcpy.mapping.MapDocument(mxdPath)
        ddp = mxd.dataDrivenPages
        pageName = str(ddp.pageRow.getValue(ddp.pageNameField.name))
        df_lst = arcpy.mapping.ListDataFrames(mxd)

        # Set the main dataframe variable.
        try:
            MDF = arcpy.mapping.ListDataFrames(mxd, "MDF")[0]
        except IndexError:
            MDF = arcpy.mapping.ListDataFrames(mxd)[0]

        log.info("MXD path: {}".format(mxd.filePath))
        log.info("Page Name: {}".format(pageName))

        onMapDFs = []
        # List of data frames on the current page.
        for df in df_lst:
            if (df.elementPositionX > 0 and df.elementPositionX < mxd.pageSize[0] and df.elementPositionY > 0 and df.elementPositionY < mxd.pageSize[1]):
                onMapDFs.append(df)

        feature_info = []

        XMin = MDF.extent.XMin
        YMin = MDF.extent.YMin
        XMax = MDF.extent.XMax
        YMax = MDF.extent.YMax
        # A list of features and coordinate pairs
        df_info = [[XMin, YMin], [XMax, YMin], [XMax, YMax], [XMin, YMax]]
        feature_info.append(df_info)

        # A list that will hold each of the Polygon objects
        features = []
        for feature in feature_info:
            # Create a Polygon object based on the array of points
            # Append to the list of Polygon objects
            features.append(arcpy.Polygon(arcpy.Array([arcpy.Point(*coords) for coords in feature])))

        # Persist a copy of the Polygon objects using CopyFeatures
        poly_filename = "DF_Polygons_{}".format(pageName)
        parentDir = os.path.abspath(os.path.join(os.path.dirname(mxd.filePath), os.pardir))
        edDir = os.path.join(parentDir, pageName)
        outDir = os.path.join(edDir, "anno_fgdb")
        if not os.path.exists(outDir):
            os.makedirs(outDir)
        workspace = arcpy.env.workspace = outDir
        log.info("Output directory set to {}".format(outDir))

        poly_shp = os.path.join(workspace, poly_filename)
        for filename in glob.glob(poly_shp + "*"):
            os.remove(filename)

        arcpy.CopyFeatures_management(features, poly_filename)

        removeFGDBs(workspace)
        createFGDBs(onMapDFs, workspace)

        del coords, feature_info, features, feature, poly_filename, outDir, mxd, df_info, XMax, XMin, YMax, YMin, ddp, pageName
    except Exception as e:
        log.info("An error occured: {}".format(e))
Example #26
0
curSfc = arcpy.da.SearchCursor(fc,["SHAPE@XY","FID"])


def retFID(X):
    sql = (""""FID" = {0}""").format(X)
    arcpy.AddMessage("Creating line for"+sql)
    cur= arcpy.da.SearchCursor(fc,["FID","SHAPE@XY"],sql)
    for i in cur:
        return i[1]
    del cur
#=====================Create polyine=============================#    
for m,n in pair:
    coordS = retFID(m)
    coordE = retFID(n)
    array = arcpy.Array([arcpy.Point(coordS[0], coordS[1]),arcpy.Point(coordE[0], coordE[1])])
    
    polyline = arcpy.Polyline(array)
    
    curI.insertRow([polyline])
    array.removeAll()            
del curI,curSfc
arcpy.Delete_management(distance)
try:
    shutil.rmtree(r"C:\temp")
except:
    pass

print "Completed Line Generation"

Example #27
0
def f7():
    for y in range(2015, 2018):
        path = 'F:/Test/Data/LST/'
        path2 = path + 'LTN8D/' + str(y) + '/'  # LTD8D
        files = os.listdir(path2)
        Input_file = []
        for i in range(0, len(files)):
            if os.path.splitext(files[i])[1] == '.tif':  # "文件获取"
                Input_file.append(path2 + files[i])

        mFiles = [[] for i in range(0, 12)]
        for i in range(0, len(Input_file)):
            if (int(Input_file[i][41:44]) > 0
                    and int(Input_file[i][41:44]) < 31):
                mFiles[0].append(Input_file[i])
            elif (int(Input_file[i][41:44]) >= 31
                  and int(Input_file[i][41:44]) < 53):
                mFiles[1].append(Input_file[i])
            elif (int(Input_file[i][41:44]) >= 53
                  and int(Input_file[i][41:44]) < 60):
                mFiles[1].append(Input_file[i])
                mFiles[2].append(Input_file[i])
            elif (int(Input_file[i][41:44]) >= 60
                  and int(Input_file[i][41:44]) < 83):
                mFiles[2].append(Input_file[i])
            elif (int(Input_file[i][41:44]) >= 83
                  and int(Input_file[i][41:44]) < 91):
                mFiles[2].append(Input_file[i])
                mFiles[3].append(Input_file[i])
            elif (int(Input_file[i][41:44]) >= 91
                  and int(Input_file[i][41:44]) < 121):
                mFiles[3].append(Input_file[i])
            elif (int(Input_file[i][41:44]) >= 121
                  and int(Input_file[i][41:44]) < 152):
                mFiles[4].append(Input_file[i])
            elif (int(Input_file[i][41:44]) >= 152
                  and int(Input_file[i][41:44]) < 174):
                mFiles[5].append(Input_file[i])
            elif (int(Input_file[i][41:44]) >= 174
                  and int(Input_file[i][41:44]) < 182):
                mFiles[5].append(Input_file[i])
                mFiles[6].append(Input_file[i])
            elif (int(Input_file[i][41:44]) >= 182
                  and int(Input_file[i][41:44]) < 206):
                mFiles[6].append(Input_file[i])
            elif (int(Input_file[i][41:44]) >= 206
                  and int(Input_file[i][41:44]) < 213):
                mFiles[6].append(Input_file[i])
                mFiles[7].append(Input_file[i])
            elif (int(Input_file[i][41:44]) >= 213
                  and int(Input_file[i][41:44]) < 236):
                mFiles[7].append(Input_file[i])
            elif (int(Input_file[i][41:44]) >= 236
                  and int(Input_file[i][41:44]) < 244):
                mFiles[7].append(Input_file[i])
                mFiles[8].append(Input_file[i])
            elif (int(Input_file[i][41:44]) >= 244
                  and int(Input_file[i][41:44]) < 268):
                mFiles[8].append(Input_file[i])
            elif (int(Input_file[i][41:44]) >= 268
                  and int(Input_file[i][41:44]) < 274):
                mFiles[9].append(Input_file[i])
            elif (int(Input_file[i][41:44]) >= 274
                  and int(Input_file[i][41:44]) < 297):
                mFiles[9].append(Input_file[i])
            elif (int(Input_file[i][41:44]) >= 297
                  and int(Input_file[i][41:44]) < 305):
                mFiles[9].append(Input_file[i])
            elif (int(Input_file[i][41:44]) >= 305
                  and int(Input_file[i][41:44]) < 327):
                mFiles[10].append(Input_file[i])
            elif (int(Input_file[i][41:44]) >= 327
                  and int(Input_file[i][41:44]) < 335):
                mFiles[10].append(Input_file[i])
                mFiles[11].append(Input_file[i])
            elif (int(Input_file[i][41:44]) >= 335
                  and int(Input_file[i][41:44]) < 3666):
                mFiles[11].append(Input_file[i])
        # for i in range(0, 12):
        #     print len(mFiles[i])
        for m in range(0, 12):
            ras = Raster(mFiles[m][0])
            arcpy.env.overwriteOutput = True
            arcpy.env.outputCoordinateSystem = ras

            outWidth = ras.meanCellWidth
            outHeight = ras.meanCellHeight
            lowerLeft = arcpy.Point(ras.extent.XMin, ras.extent.YMin)

            arrTemp = arcpy.RasterToNumPyArray(ras)

            rows = arrTemp.shape[0]
            cols = arrTemp.shape[1]
            # print rows,'\n',cols
            arr = np.zeros((len(mFiles[m]), rows, cols), np.float)
            for j in range(0, len(mFiles[m])):
                temp = Raster(mFiles[m][j])
                arr[j] = arcpy.RasterToNumPyArray(temp)

            arrSum = np.zeros((rows, cols), np.float)

            for r in range(0, rows):
                for c in range(0, cols):
                    counts = 0
                    for j in range(0, len(mFiles[m])):
                        if (arr[j, r, c] >= 7500):
                            arrSum[r, c] = arrSum[r, c] + arr[j, r, c]
                            counts = counts + 1
                    if (counts != 0):
                        arrSum[r, c] = arrSum[r, c] / counts

            for i in range(0, rows):
                for j in range(0, cols):
                    while (arrSum[i, j] == 0):
                        direction = random.randint(1, 4)
                        if direction == 1:
                            if i - 1 > 0:
                                arrSum[i, j] = arrSum[i - 1, j]
                            else:
                                arrSum[i, j] = arrSum[i, j]
                        elif direction == 2:
                            if j + 1 < cols:
                                arrSum[i, j] = arrSum[i, j + 1]
                            else:
                                arrSum[i, j] = arrSum[i, j]
                        elif direction == 3:
                            if i + 1 < rows:
                                arrSum[i, j] = arrSum[i + 1, j]
                            else:
                                arrSum[i, j] = arrSum[i, j]
                        else:
                            if j - 1 > 0:
                                arrSum[i, j] = arrSum[i, j - 1]
                            else:
                                arrSum[i, j] = arrSum[i, j]

            tempRaster = arcpy.NumPyArrayToRaster(arrSum * 0.02 - 273.15,
                                                  lowerLeft, outWidth,
                                                  outHeight, -273.15)
            tempRaster.save('F:/Test/Paper180829/Data/LTN/monthlyLTN/' +
                            'LTN' + str(y * 100 + m + 1) +
                            '.tif')  # monthlyLTD

    print '啦啦啦'
Example #28
0
    #Add a field to hold the scale
    fldLst = [f.name for f in arcpy.ListFields(ddp.indexLayer.dataSource)]
    if "DDP_Scale" not in fldLst:
        arcpy.AddField_management(ddp.indexLayer, "DDP_Scale", "LONG")

    #For each shape in the viewport feature class...
    counter = 1
    while counter <= ddp.pageCount:
        ddp.currentPageID = counter
        arcpy.AddMessage("...Updating page " + str(ddp.currentPageID))
        ddp.refresh()

        #Grab the extent of the data frame
        curExtent = ddp.dataFrame.extent
        pnt1 = arcpy.Point(curExtent .XMin, curExtent .YMin)
        pnt2 = arcpy.Point(curExtent .XMin, curExtent .YMax)
        pnt3 = arcpy.Point(curExtent .XMax, curExtent .YMax)
        pnt4 = arcpy.Point(curExtent .XMax, curExtent .YMin)
        array = arcpy.Array([pnt1, pnt2, pnt3, pnt4])
        polygon = arcpy.Polygon(array)

        #And the then use it to update the shape
        rows = arcpy.da.UpdateCursor(ddp.indexLayer,["SHAPE@","DDP_Scale"], '"FID" = ' + str(ddp.pageRow.FID))
        # ---> to make this generally useful, you'll have to write something to find the object id field and verify proper syntax to call it based on file type
        for row in rows:
            rows.updateRow([polygon, ddp.dataFrame.scale])
        counter = counter + 1

    arcpy.AddMessage("...Your layer has been updated.")
    arcpy.AddMessage("...I backed up the old one here: " + os.path.join(outputPath,outputName))
Example #29
0
    return float(maxval - (rown * psize))


def makecolcoord(coln, minval):
    return float(minval + (coln * psize))


Vmakerowcoord = numpy.vectorize(makerowcoord)
Vmakecolcoord = numpy.vectorize(makecolcoord)
nrows, ncols = numpy.arange(rows), numpy.arange(cols)
rowcoord = Vmakerowcoord(nrows, DataExtent[3])
colcoord = Vmakecolcoord(ncols, DataExtent[0])
nrows, ncols = 0, 0

#setting the bottem left corner point for later georeferencing
pnt = arcpy.Point(colcoord[0] - (.5 * psize), rowcoord[-1] - (.5 * psize))

#Setting minimum pixels required by each interpolation method
sampledict = {"linear": 4, "cubic": 9}
if SampleMethodInput == "nearest":
    SampleMethod = "linear"
else:
    SampleMethod = SampleMethodInput
NeedForInterp = sampledict[SampleMethod]

#For each band an output array is created
#The output array is written by blocks of size "blockSize" by "blockSize".
#The extent of these blocks correspond output grid coordinates.
#Data points are retreived from the input HDF that have lat longs that
#fall within the extent of these blocks plus some slop.
#These collected data points are then used to interpolate and
def createpolyfeatureclass(mainpolylist, pfcadd, postbottomboxlist, minsfirst,
                           minslast, maxfirst, maxlast, prior):
    #postbottomboxlist=[ [point1coord,point2coord,point 1 borehole or mid (0 or 1) ,point 2 borehole or mid (0 or 1), polyline] ,... ]
    allpolies_temp = list()
    homeadd = joinadd(expanduser("~"), "arcgistemp")
    #################
    'making  priorpolylist [ priority number, [polyline1,polyline2,...] ]'
    for i in range(1, len(mainpolylist) - 1):
        for j in mainpolylist[i][2]:

            allpolies_temp.append([j[0], j[3], j[4]])
            #allpolies=[prio_num,p1 coord,p2 coord]
    allpolies_temp.append(["firstbhline", maxfirst, minsfirst[1]])

    #pointlist=[maxfirst,minsfirst[1]]

    #pointlist=[maxlast,minslast[1]]
    allpolies_temp.append(["lastbhline", maxlast, minslast[1]])

    #######
    priorpolylist_temp = list()

    for n in allpolies_temp:
        if [n[0], []] not in priorpolylist_temp:
            priorpolylist_temp.append([n[0], []])

    for m in priorpolylist_temp:
        for b in allpolies_temp:
            if b[0] == m[0]:

                m[1].append([b[1], b[2]])
    #################################
    #################################
    #priorpolylist=[ [prio_num,[[p1,p2],[p1,p2],...]],... ]
    #this is for solving  arcgis 3 dimension dissolve Dissolve_management malfunction:
    priorpolylist = list()
    '''zhigh=None
    zdown=None

    for m in priorpolylist_temp:

        for pnts in m[1]:
            if zhigh==None or pnts[0][2]>zhigh:
                zhigh=pnts[0][2]
            elif zdown==None or pnts[0][2]<zdown:
                zdown=pnts[0][2]
            if zhigh==None or pnts[1][2]>zhigh:
                zhigh=pnts[1][2]
            elif zdown==None or pnts[1][2]<zdown:
                zdown=pnts[1][2]
    print "zdown,zhigh", zdown,zhigh
    print 'priorpolylist_temp
    for mm in priorpolylist_temp:
        print mm[0]
        for mmm in mm[1]:
            print mmm
            mmm[0]=copy.deepcopy(mmm[0])
            mmm[1]=copy.deepcopy(mmm[1])'''
    #priorpolylist=[ [prio_num,[[p1,p2],[p1,p2],...]],... ]
    for m in priorpolylist_temp:
        polss = list()
        for pnts in m[1]:
            '''if zdown!=zhigh  :

                pnts[0][0]=pnts[0][0]+(float(pnts[0][2]-zhigh)/(zdown-zhigh))*0.01
                pnts[0][1]=pnts[0][1]+(float(pnts[0][2]-zhigh)/(zdown-zhigh))*0.01

                pnts[1][0]=pnts[1][0]+(float(pnts[1][2]-zhigh)/(zdown-zhigh))*0.01
                pnts[1][1]=pnts[1][1]+(float(pnts[1][2]-zhigh)/(zdown-zhigh))*0.01'''
            polylinee = arcpy.Polyline(
                arcpy.Array(
                    [arcpy.Point(*coords) for coords in [pnts[0], pnts[1]]]),
                "Unknown", True, False)
            polss.append(polylinee)

        if type(m[0]) == float:
            m[0] = int(m[0])
        priorpolylist.append([m[0], polss])

    #########################################
    #########################################
    #########################################

    #print 'priorpolylist' , priorpolylist

    #######
    tempost = list()
    for kk in postbottomboxlist:
        tempost.append(kk[4])
    priorpolylist.append(["post_bottombox", tempost])
    ######
    con = 0
    polylineadlist = [pfcadd]
    polylineadlistmerge = []
    for ii in range(0, len(priorpolylist)):
        con = con + 1
        temppolyname = "temppoly" + str(con)
        #mypolyname=joinadd(homeadd,"mypoly")+str(con)
        #plnslayertempname=joinadd("in_memory","plnslayertemp"+str(con))+"_"+str(priorpolylist[ii][0])
        #plnslayername=joinadd(homeadd,"plnslayer")+str(con)+".shp"
        #mainplnsadd=joinadd(homeadd,"mainplns")+str(con)+".shp"
        ############
        arcpy.CreateFeatureclass_management("in_memory", temppolyname,
                                            "POLYLINE", "", "DISABLED",
                                            "ENABLED", "")
        cursor = arcpy.da.InsertCursor(joinadd("in_memory", temppolyname),
                                       ["SHAPE@"])
        for t in priorpolylist[ii][1]:
            cursor.insertRow([t])
        del cursor

        #arcpy.Dissolve_management (joinadd("in_memory",temppolyname), plnslayertempname, "", "", "", "UNSPLIT_LINES")
        #polylineadlist.append(plnslayertempname)
        ########test 2019##########
        polylineadlist.append(joinadd("in_memory", temppolyname))
        ######################
        #arcpy.Delete_management(temppolyname)

    arcgistempdb = joinadd(homeadd, "arcgistempdb.gdb")

    arcpy.CreateFileGDB_management(homeadd, "arcgistempdb.gdb")
    arcpy.FeatureClassToGeodatabase_conversion(polylineadlist, arcgistempdb)
    #for iii in polylineadlist:
    #    arcpy.Delete_management(iii)

    return arcgistempdb