Exemple #1
0
def start(targetpath):

    arcpy.AddMessage("8_重新生成union字段")
    arcpyDeal.deleteFields(targetpath, ["unionfzh"])
    arcpyDeal.ensureFields(targetpath, ["unionfzh"])

    arcpy.AddMessage("8_收集原有分组号")
    fzhlist, targetValueDict = searchALL(targetpath)

    arcpy.AddMessage("8_收集融合分组号")
    unionfzhlist = []
    result = arcpy.GetCount_management(targetpath)
    count = int(result.getOutput(0))
    arcpy.SetProgressor('step', '8_收集分组号', 0, count, 1)

    mergeFzhList = collecteUnionFzh(targetpath, fzhlist, unionfzhlist,
                                    targetValueDict)

    arcpy.AddMessage("8_数据更新")
    result = arcpy.GetCount_management(targetpath)
    count = int(result.getOutput(0))
    arcpy.SetProgressor('step', '8_数据更新', 0, count, 1)

    updateTarget(targetpath, unionfzhlist, targetValueDict, mergeFzhList)
 def executeRScript(rScript, arguments):
     arcpy.SetProgressor("default", "Executing R Script...")
     args = ["R", "--slave", "--vanilla", "--args"]
     for thisArgument in range(0, len(arguments)):
         args.append(arguments[thisArgument])
     scriptSource = open(rScript, 'r')
     rCommand = subprocess.Popen(args,
                                 stdin=scriptSource,
                                 stdout=subprocess.PIPE,
                                 stderr=subprocess.PIPE,
                                 shell=True)
     outString, errString = rCommand.communicate()
     scriptSource.close()
     if errString and "...completed execution of R-script" not in outString:
         arcpy.AddMessage(errString)
Exemple #3
0
def ConvertEncoding(input_fc, input_fd, sE, dE, caseI):
    # Define progressor to update progress info
    arcpy.SetProgressor("default")
    res = arcpy.GetCount_management(input_fc)
    count = int(res.getOutput(0))
    fcount = 0

    #open UpdateCursor on input FC
    rows = arcpy.UpdateCursor(input_fc)
    row = rows.next()

    # Loop through each record
    while row:
        #update progressor
        fcount += 1
        progressMessage = "Processing source feature: " + str(
            fcount) + " of " + str(count)
        arcpy.SetProgressorLabel(progressMessage)
        #newValue = ""
        # read row's value
        string1 = row.getValue(input_fd)
        # convert text & update to row
        if string1.strip() <> "":
            # Convert
            if sE == _VNIWin:
                # Convert VNI-Win to Unicode
                newValue = ConvertVNIWindows(string1)
                # if targerEncode is not Unicode -> Convert to other option
                if dE <> _Unicode:
                    newValue = Convert(newValue, _Unicode, dE)
            else:
                newValue = Convert(string1, sE, dE)
            print newValue
            # Character Case-setting
            if caseI <> "none":
                newValue = ChangeCase(newValue, caseI)
                print newValue
            # update new value
            row.setValue(input_fd, newValue)
            rows.updateRow(row)
        else:
            row.setValue(input_fd, "")
            rows.updateRow(row)
        row = rows.next()  #go to next row in input FC

    arcpy.ResetProgressor()
    del rows
    del row
def ChangeWeightFromGrapy(net,netfield):
    '''
    Change the weight of edge to number of connection
    '''
    arcpy.SetProgressor("step","Start reading rows ",0,int(len(net.EdgeList)), 1) 
    for l in net.EdgeList:
        arcpy.SetProgressorLabel("Changing the Edge with FID of " + str(l.ID) + " ...")
        if netfield=="connect":
            l.weight=l.connect
        elif netfield=="length":
            l.weight=l.length
        else:
            arcpy.AddError("Invalid command.")
                
        arcpy.SetProgressorPosition()
    return net
Exemple #5
0
def updateTSTYBM(tp, reverseRelationDict):

    arcpy.SetProgressor('step', '13_更新TP值', 0,
                        int(arcpy.GetCount_management(tp).getOutput(0)), 1)

    with arcpy.da.UpdateCursor(tp, "TSTYBM") as cur:

        for row in cur:

            if row[0] in reverseRelationDict:

                row[0] = reverseRelationDict[row[0]]

                cur.updateRow(row)

            arcpy.SetProgressorPosition()
Exemple #6
0
    def calculate(self):
        """Performs GM Error Model and related diagnostics."""

        ARCPY.SetProgressor("default", "Executing Spatial Lag regression...")

        #### Perform GM_Lag regression ####
        self.lag = PYSAL.spreg.GM_Lag(self.y,
                                      self.x,
                                      w=self.w,
                                      robust='white',
                                      spat_diag=True,
                                      name_y=self.depVarName,
                                      name_x=self.indVarNames,
                                      name_w=self.wName,
                                      name_ds=self.ssdo.inputFC)
        ARCPY.AddMessage(self.lag.summary)
def extract(xmlFileName,rowLimit,workspace,sourceLayer,targetFC):          

    xmlDoc = dla.getXmlDoc(xmlFileName)
    if workspace == "" or workspace == "#" or workspace == None:  
        dla.workspace = arcpy.env.scratchGDB
    else:
        dla.workspace = workspace
    fields = dla.getFields(xmlFileName)
    success = True
    name = ''
    try:
        if not arcpy.Exists(dla.workspace):
            dla.addMessage(dla.workspace + " does not exist, attempting to create")
            dla.createGeodatabase()
        if len(fields) > 0:
            arcpy.SetProgressor("step", "Importing Layer...",0,1,1)

            if sourceLayer == '' or sourceLayer == '#':                
                source = dla.getNodeValue(xmlDoc,"Datasets/Source")
            else:
                source = sourceLayer
            if targetFC == '' or targetFC == '#':
                targetName = dla.getTargetName(xmlDoc)
            else:
                targetName = targetFC[targetFC.rfind(os.sep)+1:]

            sourceName = dla.getSourceName(xmlDoc)
            arcpy.SetProgressorLabel("Loading " + sourceName + " to " + targetName +"...")
            #if not arcpy.Exists(sourceLayer):
            #    dla.addError("Layer " + sourceLayer + " does not exist, exiting")
            #    return
            
            retVal = exportDataset(xmlDoc,sourceLayer,dla.workspace,targetName,rowLimit)
            if retVal == False:
                success = False

        arcpy.SetProgressorPosition()
    except:
        dla.addError("A Fatal Error occurred")
        dla.showTraceback()
        success = False
    finally:
        arcpy.ResetProgressor()
        #arcpy.RefreshCatalog(dla.workspace)
        arcpy.ClearWorkspaceCache_management(dla.workspace)

    return success
Exemple #8
0
def setLINKTBS(xzkpath):
    """把linktbs指向的图斑的linktbs字段设为它本身的bsm,没有按照LINKTBS指向的图斑按照顺序复制"""

    arcpy.SetProgressor('step', '6_设置LINKTBS值', 0,
                        int(arcpy.GetCount_management(xzkpath).getOutput(0)),
                        1)

    where_clause1 = " LINKTBS <> ''"

    linktbsList = []
    tstybmList = []
    relationList = []

    with arcpy.da.SearchCursor(xzkpath, ["LINKTBS", "TSTYBM"],
                               where_clause1) as cur:

        for row in cur:

            linktbsList.append(row[0])
            tstybmList.append(row[1])

    num = 0

    with arcpy.da.UpdateCursor(xzkpath, ["BSM", "LINKTBS", "TSTYBM"]) as cur:

        for row in cur:

            BSM = row[0]
            LINKTBS = row[1]

            if BSM in linktbsList:

                row[1] = BSM
                index = linktbsList.index(BSM)
                relationList.append([row[2], tstybmList[index]])

            elif len(LINKTBS) != 18:

                row[1] = num

                num += 1

            cur.updateRow(row)

            arcpy.SetProgressorPosition()

    return tstybmList, relationList
    def execute(self, parameters, messages):

        inFeatures = parameters[0].valueAsText
        outFeatures = parameters[1].valueAsText
        pointType = parameters[2].valueAsText

        inDesc = arcpy.Describe(inFeatures)
        if (inDesc.dataType == "FeatureLayer"):
            inDesc = inDesc.featureClass

        dirName, fcName = os.path.split(outFeatures)

        #create output
        arcpy.CreateFeatureclass_management(dirName, fcName, "POINT",
                                            inFeatures, "SAME_AS_TEMPLATE",
                                            "SAME_AS_TEMPLATE",
                                            inDesc.spatialReference)

        fields = arcpy.ListFields(outFeatures)
        fieldList = []

        for field in fields:
            #messages.addMessage("{0} is a type of {1} with a length of {2}"
            #   .format(field.name, field.type, field.length))
            if (field.type != "OID" or field.type != "Geometry"):
                fieldList.append(field.name)
        fieldList.append("SHAPE@")

        # for Progress step count
        result = arcpy.GetCount_management(inFeatures)
        count = int(result.getOutput(0))
        arcpy.SetProgressor("step", "Inserting ...", 0, count, 1)

        with arcpy.da.InsertCursor(outFeatures, fieldList) as inCursor:
            with arcpy.da.SearchCursor(inFeatures, fieldList) as cursor:
                for row in cursor:
                    if (isinstance(row[-1], arcpy.Polygon)):
                        if (pointType == "LabelPoint"):
                            insertRow = row[:-1] + (row[-1].labelPoint, )
                        elif (pointType == "Centroid"):
                            insertRow = row[:-1] + (row[-1].centroid, )
                        else:
                            insertRow = row[:-1] + (row[-1].trueCentroid, )

                        inCursor.insertRow(insertRow)
                    #step count
                    arcpy.SetProgressorPosition()
Exemple #10
0
def _get_best_parameters(num_estimators, learning_rate, regressors, response,  messages):
    if (num_estimators is not None) and (learning_rate is not None):
        return [num_estimators, learning_rate]

    messages.AddMessage("Calculating best parameters...")
    if num_estimators is None:
        num_estimators_space = np.array([1, 2, 3, 5, 7, 10, 15, 20, 30, 50, 70, 100])
    else:
        num_estimators_space = [num_estimators]

    if learning_rate is None:
        learning_rate_space = np.array([0.1, 0.5, 0.75, 1, 1.25, 1.5, 2])
    else:
        learning_rate_space = [learning_rate]

    arcpy.SetProgressor("step", "Calculating best parameter combination", min_range=0,
                        max_range=len(num_estimators_space)*len(learning_rate_space), step_value=1)
    scores = np.empty([len(num_estimators_space), len(learning_rate_space), 3])
    for it_n_est, n_est in enumerate(num_estimators_space):
        for it_l_rate, l_rate in enumerate(learning_rate_space):
            scores[it_n_est, it_l_rate] = cross_val_score(AdaBoostClassifier(n_estimators=n_est, learning_rate=l_rate),
                                                          regressors, response)
            arcpy.SetProgressorPosition()
    arcpy.SetProgressorLabel("Executing Adaboost")
    arcpy.ResetProgressor()

    scores_mean = np.mean(scores, 2)
    # scores_std = np.std(scores,2)
    i, j = np.unravel_index(scores_mean.argmax(), scores_mean.shape)
    max_num_estimators = num_estimators_space[i]
    max_learning_rate = learning_rate_space[j]

    if num_estimators is None:
        messages.AddMessage("Number of estimators selected: " + str(max_num_estimators))
    if learning_rate is None:
        messages.AddMessage("Learning rate selected: " + str(max_learning_rate))

    if num_estimators is None or learning_rate is None:
        messages.AddMessage("Score: " + str(scores_mean[i, j]))
        messages.AddMessage("Other Options:")
        for unused in xrange(3):
            scores_mean[i, j] = 0
            i, j = np.unravel_index(scores_mean.argmax(), scores_mean.shape)
            messages.AddMessage("  Num Estimators: " + str(num_estimators_space[i]) + " Learning Rate: " +
                                str(learning_rate_space[j]) + " Score(K-folds): " + str(scores_mean[i, j]))

    return [max_num_estimators, max_learning_rate]
Exemple #11
0
    def doRaster(self, outputRaster, varName=None):
        """Creates the Output Raster."""

        renderType = UTILS.renderType[self.ssdo.shapeType.upper()]
        if renderType:
            #### No Output When Not Points ####
            printOHSSubject(84480)
        else:
            if varName:
                msg = ARCPY.GetIDMessage(84479)
                rasterLayerFile = "PointDensityHSGray.lyr"
            else:
                msg = ARCPY.GetIDMessage(84478)
                rasterLayerFile = "PointDensityHSGrayPoints.lyr"
            ARCPY.SetProgressor("default", msg)

            #### Subject w/ Value - Use AddMessage Explicitly ####
            outMSG = ARCPY.GetIDMessage(84497).format(outputRaster)
            ARCPY.AddMessage(outMSG)

            #### Distance Band Answer ####
            msg = ARCPY.GetIDMessage(84481).format(self.distanceStr)
            printOHSAnswer(msg, addNewLine=False)

            #### Clip Message ####
            if self.maskExists:
                msg = ARCPY.GetIDMessage(84483)
            else:
                msg = ARCPY.GetIDMessage(84482)
            printOHSAnswer(msg)

            #### Do Raster ####
            try:
                UTILS.fc2DensityRaster(self.ssdo.inputFC,
                                       outputRaster,
                                       varName,
                                       boundaryFC=self.boundaryFC,
                                       searchRadius=self.distanceBand)
            except:
                msg = ARCPY.GetIDMessage(84498)
                printOHSAnswer(msg)

            #### Set Symbology ####
            fullRLF = OS.path.join(self.templateDir, "Templates", "Layers",
                                   rasterLayerFile)
            self.params[6].Symbology = fullRLF
Exemple #12
0
def main(argv = None):
    # main function - list the source and target datasets, then append where there is a match on non-prefixed name
    success = True
    name = ''
    arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
    try:
        if len(datasetNames) == 0:
            sources = gzSupport.listDatasets(sourceGDB)
            sNames = sources[0]
            sFullNames = sources[1]
            targets = gzSupport.listDatasets(targetGDB)
            tNames = targets[0]
            tFullNames = targets[1]
        else:
            sNames = datasetNames
        
        s = 0
        arcpy.SetProgressor("Step","Appending rows...",0,len(sNames),1)
        for name in sNames:
            arcpy.SetProgressorPosition(s)
            arcpy.SetProgressorLabel(" Appending rows in " + name + "...")
            # for each source name
            if debug:
                gzSupport.addMessage(name)
            target = os.path.join(targetGDB,name)
            if arcpy.Exists(target):
                # append if there is a match
                if len(datasetNames) == 0 or gzSupport.nameTrimmer(name) in datasetNames:
                    retVal = doAppend(os.path.join(sourceGDB,name),target)
                    gzSupport.logDatasetProcess("appendAlltoGDB",name,retVal)
                    if retVal == False:
                        success = False
                else:
                    gzSupport.addMessage("Skipping "  + gzSupport.nameTrimmer(name))

            s = s + 1
    except:
        gzSupport.showTraceback()
        gzSupport.addError("Unable to append datasets")
        success = False
        gzSupport.logDatasetProcess("appendAlltoGDB",name,success)
    finally:
        arcpy.SetParameter(SUCCESS, success)
        arcpy.ResetProgressor()
        gzSupport.closeLog()
        arcpy.ClearWorkspaceCache_management(targetGDB)
Exemple #13
0
def main(argv=None):
    # main function - list the source and target datasets, then append where there is a match on non-prefixed name
    success = True
    arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
    try:
        sources = gzSupport.listDatasets(sourceGDB)
        sNames = sources[0]
        sFullNames = sources[1]
        targets = gzSupport.listDatasets(targetGDB)
        tNames = targets[0]
        tFullNames = targets[1]
        s = 0
        arcpy.SetProgressor("Step", "Appending rows...", 0, len(sFullNames), 1)
        for name in sNames:
            arcpy.SetProgressorPosition(s)
            arcpy.SetProgressorLabel(" Deleting rows in " + name + "...")
            # for each source name
            if debug:
                gzSupport.addMessage(name)
            try:
                # look for the matching name in target names
                t = tNames.index(name)
            except:
                # will get here if no match
                t = -1
            if t > -1:
                # append if there is a match
                if len(datasetNames) == 0 or name.upper() in datasetNames:
                    retVal = doAppend(sFullNames[s], tFullNames[t])
                    gzSupport.logDatasetProcess(name, "appendAlltoGDB", retVal)
                    if retVal == False:
                        success = False
                else:
                    gzSupport.addMessage("Skipping " + name)

            s = s + 1
    except:
        gzSupport.showTraceback()
        gzSupport.addError(pymsg)
        success = False
        gzSupport.logDatasetProcess(name, "appendAlltoGDB", success)
    finally:
        arcpy.SetParameter(SUCCESS, success)
        arcpy.ResetProgressor()
        gzSupport.closeLog()
        arcpy.ClearWorkspaceCache_management(targetGDB)
Exemple #14
0
def join(left_side_fc, left_side_key, right_side_fc, right_side_key,
         right_side_include_fields):
    log('Join left side: ' + left_side_fc)
    log('Join right side: ' + right_side_fc)
    log('Join keys: ' + str(left_side_key) + ':' + str(right_side_key))

    common_functions.create_index(left_side_fc, [left_side_key], 'LeftIdx')
    common_functions.create_index(right_side_fc, [right_side_key], 'RightIdx')

    # Update the left side feature class with the fields from the right side (they will be populated in the next step)
    left_side_add_fields(left_side_fc, [
        f for f in arcpy.ListFields(right_side_fc)
        if f.name in right_side_include_fields.split(';')
    ])

    # Prepare to write values to left side
    right_side_cursor = right_side_row_gen(
        right_side_fc, [right_side_key] + right_side_include_fields.split(';'),
        right_side_key)
    right_side_row = right_side_cursor.next()

    # Since both cursors return rows sorted, we simply advance then in tandem. When we find a matching
    # key, we simply copy to specified right hand fields into the left side feature class
    # the matching keys
    count = int(arcpy.GetCount_management(left_side_fc).getOutput(0))
    arcpy.SetProgressor("step", "Joining files...", 0, count, 1)
    i = 0
    with arcpy.da.UpdateCursor(
            left_side_fc,
        [left_side_key] + right_side_include_fields.split(';'),
            sql_clause=(None,
                        'ORDER BY ' + left_side_key)) as left_side_cursor:
        for left_side_row in left_side_cursor:
            i += 1
            common_functions.log_progress("Joining record ", count, i)
            try:
                while left_side_row[0] > right_side_row[0]:
                    right_side_row = right_side_cursor.next()
                if left_side_row[0] == right_side_row[0]:
                    left_side_cursor.updateRow(right_side_row)
            except StopIteration:
                arcpy.AddWarning('End of join table.')
                break
    del left_side_cursor

    log('Done.')
Exemple #15
0
def makeGlobalLoS(data, maximal_possible_distance, spatial_ref):
    number_of_LoS = int(arcpy.GetCount_management(data).getOutput(0))

    arcpy.SetProgressor("step", "Updating " + str(number_of_LoS) + " lines of sight...", 0, number_of_LoS, 1)

    start_point = arcpy.Point()
    end_point = arcpy.Point()
    polyline_array = arcpy.Array()

    mid_point = arcpy.Point()

    with arcpy.da.UpdateCursor(data, ["OID", "SHAPE@"]) as cursor:
        for row in cursor:
            start_point.X = row[1].firstPoint.X
            start_point.Y = row[1].firstPoint.Y
            start_point.Z = row[1].firstPoint.Z
            end_point.X = row[1].lastPoint.X
            end_point.Y = row[1].lastPoint.Y
            end_point.Z = row[1].lastPoint.Z

            mid_point.X = row[1].lastPoint.X
            mid_point.Y = row[1].lastPoint.Y
            mid_point.Z = row[1].lastPoint.Z

            start_pointGeometry = arcpy.PointGeometry(start_point, spatial_ref, True)
            end_pointGeometry = arcpy.PointGeometry(end_point, spatial_ref, True)

            new_end_pointGeometry = start_pointGeometry.pointFromAngleAndDistance(
                start_pointGeometry.angleAndDistanceTo(end_pointGeometry)[0], maximal_possible_distance)

            end_point.X = new_end_pointGeometry.centroid.X
            end_point.Y = new_end_pointGeometry.centroid.Y

            polyline_array.add(start_point)
            polyline_array.add(mid_point)
            polyline_array.add(end_point)

            polyline_new = arcpy.Polyline(polyline_array, spatial_ref, True)

            polyline_array.removeAll()

            row[1] = polyline_new
            cursor.updateRow(row)
            arcpy.SetProgressorPosition()

    arcpy.ResetProgressor()
def calc_nrst_dist_cpu(gids,xs,ys,densities,cpu_core):
    n=xs.shape[0]
    
    def calc_nrst_dist_np(gidxys,result_q,gids,xs,ys,densities):
        while True:
            try:
                i=gidxys.get_nowait()
                distpow2=(xs-xs[i])**2+(ys-ys[i])**2
                distpow2[densities<=densities[i]]=1e100
                pg=distpow2.argsort()[0]
                if distpow2[pg]>1e99:
                    result_q.put((i,1e10,-1))
                else:
                    result_q.put((i,math.sqrt(distpow2[pg]),gids[pg]))
            except queue.Empty:
                break;
                
    n=xs.shape[0]
    gidxys=queue.Queue()
    result_q=queue.Queue()
    for i in range(n):
        gidxys.put(i)
    
    arcpy.SetProgressor("step", "Find Point with Higher Density on CPU...",0, n, 1)
    
    ts=[]
    for i in range(cpu_core):
        t=Process(target=calc_nrst_dist_np,args=(gidxys,result_q,gids,xs,ys,densities))
        t.start()
        ts.append(t)
    for t in ts:
        while t.is_alive():
            arcpy.SetProgressorPosition(n-gidxys.qsize())
            time.sleep(0.05)
        
    result_a=[]
    while result_q.empty()==False:
        result_a.append(result_q.get())
    result_a.sort()
    result_nd=[]
    result_pg=[]
    for v in result_a:
        result_nd.append(v[1])
        result_pg.append(v[2])
    return (np.array(result_nd),np.array(result_pg))
def judge(outputxzkpath):
    """判断变化类型"""

    arcpy.SetProgressor(
        'step', "8_判断变化类型", 0,
        int(arcpy.GetCount_management(outputxzkpath).getOutput(0)), 1)

    arcpyDeal.ensureFields(outputxzkpath, ['shuvary', 'shpvary'])

    xzkcur = arcpy.da.UpdateCursor(outputxzkpath,
                                   ['cskmianji', 'SHAPE@AREA', 'shpvary'])

    for row in xzkcur:

        if abs(row[0] - row[1]) < 0.1:

            row[2] = 'N'

        else:

            row[2] = 'Y'

        xzkcur.updateRow(row)

    searchFields = [
        'shuvary', 'dlbm', 'cskdlbm', "WJZLX", 'GDLX', 'TBXHDM', 'GDZZSXDM'
    ]

    xzkcur = arcpy.da.UpdateCursor(outputxzkpath, searchFields)

    for row in xzkcur:

        data = dict(zip(searchFields, row))

        if data["dlbm"] == data["cskdlbm"] and data['WJZLX'] == '' and data[
                'GDLX'] == '' and data['TBXHDM'] == '' and data[
                    'GDZZSXDM'] == '':

            row[0] = 'N'

        else:

            row[0] = 'Y'

        xzkcur.updateRow(row)
Exemple #18
0
def AddArea(locationLayer,
            mcpFeatureClass,
            percentUsed,
            sr=None,
            shapeName=None):
    #using arcpyPoints here is 8% (143points) to 44% (18407points) faster
    #points = GetPoints(locationLayer, sr, shapeName)
    points = utils.GetArcpyPoints(locationLayer, sr, shapeName)
    finalLength = int(0.5 + (percentUsed / 100.0) * len(points))
    utils.info("Removing " + str(len(points) - finalLength) + " of " +
               str(len(points)) + " points.")
    arcpy.SetProgressor("step", "Finding points to ignore...", 0,
                        len(points) - finalLength, 1)
    while finalLength < len(points):
        points = RemovePointWithMostArea(points)
        arcpy.SetProgressorPosition()
    mcp = Mcp(points)
    arcpy.CopyFeatures_management(mcp, mcpFeatureClass)
def write_features(out_feature_class, out_schema, json_struct):
    arcpy.AddMessage("Writing features")
    # Create a list of (sane_field_name, field_name) tuples
    reverse_field_name_mapping = list(
        sorted((v, k) for k, v in out_schema['field_names'].iteritems()))
    fields = ["SHAPE@WKT"] + [f[0] for f in reverse_field_name_mapping]
    record_count = len(json_struct['features'])
    arcpy.SetProgressor("step", "Writing rows", 0, record_count)
    with arcpy.da.InsertCursor(out_feature_class, fields) as out_cur:
        for row_index, row_struct in enumerate(json_struct['features']):
            if (row_index % 100 == 1):
                arcpy.SetProgressorPosition(row_index)
            row_data = row_struct['properties']
            row_list = [
                row_data.get(k[1], None) for k in reverse_field_name_mapping
            ]
            wkt = geojson_to_geometry(row_struct['geometry'])
            out_cur.insertRow([wkt] + row_list)
Exemple #20
0
def takeBsmBack(xzkdissolvepath, originLackBSMList):
    """把缺失的BSM找回来"""

    arcpy.SetProgressor('step', '12_把缺失的BSM找回来', 0, len(originLackBSMList), 1)

    table = "xzkdissolvepath_12"

    arcpy.MakeFeatureLayer_management(xzkdissolvepath, table)

    for i in range(len(originLackBSMList)):

        bsm = originLackBSMList[i]

        arcpy.AddMessage("处理第%s个缺失得BSM" % (i + 1))

        traverseLayer(bsm, xzkdissolvepath, table, [])

        arcpy.SetProgressorPosition()
def extract(xmlFileName, rowLimit, workspace, source, target, datasetType):

    xmlDoc = dla.getXmlDoc(xmlFileName)
    if workspace == "" or workspace == "#" or workspace == None:
        dla.workspace = dla.setWorkspace()
    else:
        dla.workspace = workspace
    fields = dla.getFields(xmlFileName)
    success = True
    name = ''
    try:
        if not arcpy.Exists(dla.workspace):
            dla.addMessage(dla.workspace +
                           " does not exist, attempting to create")
            dla.createGeodatabase()
        if len(fields) > 0:
            arcpy.SetProgressor("step", "Importing Layer...", 0, 1, 1)

            targetName = dla.getDatasetName(target)
            sourceName = dla.getDatasetName(source)
            arcpy.SetProgressorLabel("Loading " + sourceName + " to " +
                                     targetName + "...")

            if not arcpy.Exists(source):
                dla.addError("Layer " + source + " does not exist, exiting")
                return

            retVal = exportDataset(xmlDoc, source, dla.workspace, targetName,
                                   rowLimit, datasetType)
            if retVal == False:
                success = False

        arcpy.SetProgressorPosition()
    except:
        dla.addError("A Fatal Error occurred")
        dla.showTraceback()
        success = False
    finally:
        arcpy.ResetProgressor()
        #arcpy.RefreshCatalog(dla.workspace)
        arcpy.ClearWorkspaceCache_management(dla.workspace)

    return success
    def execute(self, parameters, messages):
        inFeatures = parameters[0].valueAsText
        outFeatures = parameters[1].valueAsText

        dirName, fcName = os.path.split(outFeatures)
        inDesc = arcpy.Describe(inFeatures)

        #messages.addMessage(inDesc.dataType)
        if (inDesc.dataType == "FeatureLayer"):
            inDesc = inDesc.featureClass

        arcpy.CreateFeatureclass_management(dirName, fcName, "POLYGON",
                                            inFeatures, "SAME_AS_TEMPLATE",
                                            "SAME_AS_TEMPLATE",
                                            inDesc.spatialReference)

        fields = arcpy.ListFields(outFeatures)
        #fieldList = ["SHAPE@"]
        fieldList = []

        for field in fields:
            #messages.addMessage("{0} is a type of {1} with a length of {2}"
            #   .format(field.name, field.type, field.length))
            if (field.type != "OID" or field.type != "Geometry"):
                fieldList.append(field.name)
        fieldList.append("SHAPE@")

        # for Progress step count
        result = arcpy.GetCount_management(inFeatures)
        count = int(result.getOutput(0))
        arcpy.SetProgressor("step", "Inserting ...", 0, count, 1)

        with arcpy.da.InsertCursor(outFeatures, fieldList) as inCursor:
            with arcpy.da.SearchCursor(inFeatures, fieldList) as cursor:
                for row in cursor:
                    lst = []
                    # inner polygon is not part..
                    for part in row[-1]:
                        # trick getPart is not hole split
                        lst.append(arcpy.Polygon(part).getPart(0))

                    insertRow = row[:-1] + (arcpy.Polygon(arcpy.Array(lst)), )
                    inCursor.insertRow(insertRow)
def start(targetpath, outname):

    arcpyDeal.createExistsTempLayer(targetpath, outname)

    cursor = arcpy.da.UpdateCursor(outname, ["bhlx"])

    result = arcpy.GetCount_management(outname)
    count = int(result.getOutput(0))
    arcpy.SetProgressor('step', '遍历进度', 0, count, 1)

    for row in cursor:

        bhlx = dealNone.dealNoneAndBlank(row[0])

        if bhlx == "":

            cursor.deleteRow()

        arcpy.SetProgressorPosition()
Exemple #24
0
    def createDiagnosticTable(self, tableName):
        """Creates Output Diagnostic Database Table for OLS.

        INPUTS:
        tableName (str): catalog path to the output table
        """

        #### Set Progressor ####
        ARCPY.AddMessage(ARCPY.GetIDMessage(84098))
        ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84098))
        outPath, outName = OS.path.split(tableName)

        #### Set Up Field Names and Types ####
        inputFields = UTILS.getFieldNames(olsDiagFieldNames, outPath)
        inputTypes = ["TEXT", "DOUBLE", "TEXT"]

        #### Set Up Input Data ####
        inputData = []
        diags = [
            84114, 84249, 84018, 84021, 84024, 84027, 84030, 84033, 84036,
            84039, 84042, 84045, 84062
        ]

        desc = [
            84116, 84250, 84020, 84023, 84026, 84029, 84032, 84035, 84038,
            84041, 84044, 84047, 84063
        ]

        diags = [ARCPY.GetIDMessage(i) for i in diags]
        desc = [ARCPY.GetIDMessage(i) for i in desc]

        stats = [
            self.aic, self.aicc, self.r2, self.r2Adj, self.fStat, self.fProb,
            self.waldStat, self.waldProb, self.BP, self.BPProb, self.JB,
            self.JBProb, self.s2
        ]

        for rowInd, rowVal in enumerate(stats):
            inputData.append((diags[rowInd], rowVal, desc[rowInd]))

        #### Write Diagnostic Table ####
        UTILS.createOutputTable(tableName, inputFields, inputTypes, inputData)
Exemple #25
0
def download_attachments(folder_attachments: str, base_url: str,
                         token: Optional[str], oids: List[Union[int, str]],
                         is_layer_table: bool):
    """download attachments"""
    if not is_blank(folder_attachments):
        has_attachments: bool = get_has_attachments(base_url, token)
        if has_attachments:
            arcpy.AddMessage(
                'Please wait a moment ... download attachments ...')
            params: Dict[str, Any] = init_params(token)
            total: int = len(oids)
            describe_recs: str = records_desc(is_layer_table)
            arcpy.ResetProgressor()
            arcpy.SetProgressor(
                'step',
                f'''{total} {describe_recs.lower()} to be downloaded''', 0,
                total)
            total_progress: int = 0

            for oid in oids:
                url_attachment: str = add_url_path(base_url, str(oid),
                                                   'attachments')
                attachment_infos: Dict[str, Any] = cast(
                    Dict[str, Any], request(url_attachment,
                                            params))['attachmentInfos']
                if attachment_infos:
                    for attachment_info in attachment_infos:
                        id_attachment: int = cast(Dict[str, int],
                                                  attachment_info)['id']
                        id_attachment_name: str = cast(Dict[str, str],
                                                       attachment_info)['name']
                        name_file: str = f'{oid}-{id_attachment}-{id_attachment_name}'
                        download_file(
                            f'''{add_url_path(url_attachment, str(id_attachment))}?{urlencode(params)}''',
                            os.path.join(folder_attachments, name_file))
                total_progress += 1
                arcpy.SetProgressorLabel(
                    f'''{total_progress} {describe_recs.lower()} attachments'''
                )
                arcpy.SetProgressorPosition()
        else:
            arcpy.AddWarning('Service hasn\'t attachments')
def calc_density_cpu(xs,ys,weights,kernel_type,cpu_core,cutoffd=0,sigma=0):
    xs=xs-xs.min()
    ys=ys-ys.min()
        
    def calc_density_np(gidxys,result_q,xs,ys,weights,kernel_type,cutoffd=0,sigma=0):
        while True:
            try:
                i=gidxys.get_nowait()
                distpow2=(xs-xs[i])**2+(ys-ys[i])**2
                if kernel_type=='GAUSS':
                    result_q.put( (i,((distpow2<((3*sigma)**2))*np.exp(-distpow2/(sigma**2))*weights).sum()))
                else:
                    result_q.put( (i,((distpow2<(cutoffd**2))*weights).sum()))                    
            except queue.Empty:
                break;
        
    n=xs.shape[0]
    gidxys=queue.Queue()
    result_q=queue.Queue()
    for i in range(n):
        gidxys.put(i)
    
    arcpy.SetProgressor("step", "Calculate Densities on CPU...",0, n, 1)
    
    ts=[]
    for i in range(cpu_core):
        t=Process(target=calc_density_np,args=(gidxys,result_q,xs,ys,weights,kernel_type,cutoffd,sigma))
        t.start()
        ts.append(t)
    for t in ts:
        while t.is_alive():
            arcpy.SetProgressorPosition(n-gidxys.qsize())
            time.sleep(0.05)
        
    result_a=[]
    while result_q.empty()==False:
        result_a.append(result_q.get())
    result_a.sort()
    result_d=[]
    for v in result_a:
        result_d.append(v[1])
    return np.array(result_d)    
Exemple #27
0
def add_dll_defined_fields_to_table(dll,calculation,table,overwrite):
    global outlength, names
    
    arcpy.AddMessage('  Overwrite output = %s'%overwrite)
    
    # get output names    
    dll.calc_get_all_output_names.restype = ctypes.POINTER(ctypes.c_char_p)
    dll.calc_get_short_output_names.restype = ctypes.POINTER(ctypes.c_char_p)
    dll.calc_get_output_length.restype = ctypes.c_int

    outlength = dll.calc_get_output_length(calculation)
    alii = list(dll.calc_get_all_output_names(calculation)[0:outlength]);
    names = list(dll.calc_get_short_output_names(calculation)[0:outlength]);

    # ensure names are valid for table type
    names = [arcpy.ValidateFieldName(x,os.path.dirname(table)) for x in names]

    # check fields won't be overwritten unless specified
    existing_field_names = [x.name for x in arcpy.ListFields(table)]
    if not overwrite:
        error_happened = False
        for name,alias in zip(names,alii):
            if name in existing_field_names:
                    arcpy.AddError('Field %s (%s) exists already'%(name,alias))
                    error_happened = True
        if error_happened:
            arcpy.AddError("Either enable 'Overwrite output fields' in the tool dialog box\n\
                            Or delete/rename the existing fields")
            raise StandardError, "Can't overwrite output data"

    arcpy.SetProgressor("step", "Checking output columns", 0, outlength, 1)
            
    # create fields if needed
    for i,(name,alias) in enumerate(zip(names,alii)):
        arcpy.SetProgressorPosition(i)
        if name not in [x.name for x in arcpy.ListFields(table)]:
            arcpy.AddMessage('    Field %s (%s) not present, adding'%(name,alias))
            arcpy.AddField_management(table,name,'FLOAT',field_alias=alias)
        else:
            arcpy.AddMessage('    Field %s (%s) exists already, overwriting'%(name,alias))

    arcpy.SetProgressorPosition(outlength)
    def createOutput(self, outputFC):
        """Creates an Output Feature Class with the Directional Mean
        Results.

        INPUTS:
        outputFC (str): path to the output feature class
        """

        #### Validate Output Workspace ####
        ERROR.checkOutputPath(outputFC)

        #### Shorthand Attributes ####
        ssdo = self.ssdo
        caseField = self.caseField

        #### Create Output Feature Class ####
        ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84003))
        tempCFLayer = "tmpCFLayer"

        try:
            DM.MakeFeatureLayer(ssdo.inputFC, tempCFLayer)
            first = True
            for key, value in self.cf.iteritems():
                oids = value[0]
                for oid in oids:
                    sqlString = ssdo.oidName + '=' + str(oid)
                    if first:
                        DM.SelectLayerByAttribute(tempCFLayer, "NEW_SELECTION",
                                                  sqlString)
                        first = False
                    else:
                        DM.SelectLayerByAttribute(tempCFLayer,
                                                  "ADD_TO_SELECTION",
                                                  sqlString)

            UTILS.clearExtent(DM.CopyFeatures(tempCFLayer, outputFC))
        except:
            ARCPY.AddIDMessage("ERROR", 210, outputFC)
            raise SystemExit()

        #### Set Attribute ####
        self.outputFC = outputFC
Exemple #29
0
def make_shadows(in_fc, out_fc, angle, length, is_meters=False):
    in_sr = arcpy.Describe(in_fc).spatialReference
    arcpy.management.CreateFeatureclass(os.path.dirname(out_fc),
                                        os.path.basename(out_fc),
                                        'POLYGON',
                                        spatial_reference=in_sr)
    if is_meters:
        length /= in_sr.metersPerUnit
    radian_angle = math.radians(angle)
    xmul, ymul = math.sin(radian_angle), math.cos(radian_angle)
    xadd, yadd = length * xmul, length * ymul
    row_count = int(arcpy.management.GetCount(in_fc)[0])
    arcpy.AddMessage("Shadowing {} features".format(row_count))
    arcpy.SetProgressor("step", "Shadowing", 0, row_count)
    with arcpy.da.SearchCursor(in_fc, ['SHAPE@']) as in_cur, \
         arcpy.da.InsertCursor(out_fc, ['SHAPE@']) as out_cur:
        for row_idx, row in enumerate(in_cur):
            out_cur.insertRow([shadow_geometry(row[0], xadd, yadd, in_sr)])
            if row_idx % 20 == 1:
                arcpy.SetProgressorPosition(row_idx)
Exemple #30
0
def variable_summary(feature_class_path, variable_name):
    # Use a context manager to make sure we have a shapefile to work with
    with TempShapefile(feature_class_path) as shapefile_path:
        # Assemble command line
        r_exe = find_r_executable()
        arcpy.AddMessage("Found R.exe at {}".format(r_exe))

        commandlineargs = [
            r_exe, '--slave', '--vanilla', '--args', shapefile_path,
            variable_name
        ]

        # Locate and read R input script
        rscriptname = os.path.join(os.path.abspath(os.path.dirname(__file__)),
                                   "variable_summary.r")
        scriptsource = open(rscriptname, 'rb')

        # Open R and feed it the script
        arcpy.SetProgressor("default")
        rprocess = subprocess.Popen(commandlineargs,
                                    stdin=scriptsource,
                                    stdout=subprocess.PIPE,
                                    stderr=subprocess.PIPE,
                                    shell=True)

        # Grab the output written to stdout/stderr
        stdoutstring, stderrstring = rprocess.communicate()

        # Push output to messages window
        if stderrstring and "Calculations Complete..." not in stdoutstring:
            arcpy.AddError(stderrstring)
        else:
            # Just grab the tables
            table_string = ('\[1\] "Begin Calculations[.]{4}"\n(.*)\n'
                            '\[1\] "Calculations Complete[.]{3}"')
            tables = re.findall(table_string, stdoutstring.replace('\r', ''),
                                re.DOTALL)
            # Push to output window
            arcpy.AddMessage(" ")
            arcpy.AddMessage("\n".join(tables))
            arcpy.AddMessage(" ")