예제 #1
0
    def createOutput(self, outputFC):
        """Creates an Output Feature Class with the Directional Mean
        Results.

        INPUTS:
        outputFC (str): path to the output feature class
        """

        #### Validate Output Workspace ####
        ERROR.checkOutputPath(outputFC)

        #### Shorthand Attributes ####
        ssdo = self.ssdo
        caseField = self.caseField

        #### Create Output Feature Class ####
        ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84003))
        tempCFLayer = "tmpCFLayer"

        try:
            DM.MakeFeatureLayer(ssdo.inputFC, tempCFLayer)
            first = True
            for key, value in self.cf.iteritems():
                oids = value[0]
                for oid in oids:
                    sqlString = ssdo.oidName + '=' + str(oid)
                    if first:
                        DM.SelectLayerByAttribute(tempCFLayer, 
                                                  "NEW_SELECTION",
                                                  sqlString)
                        first = False
                    else:
                        DM.SelectLayerByAttribute(tempCFLayer,
                                                  "ADD_TO_SELECTION", 
                                                  sqlString)

            UTILS.clearExtent(DM.CopyFeatures(tempCFLayer, outputFC))
        except:
            ARCPY.AddIDMessage("ERROR", 210, outputFC)
            raise SystemExit()

        #### Set Attribute ####
        self.outputFC = outputFC
예제 #2
0
    def createOutput(self, outputFC):
        """Creates an Output Feature Class with the Directional Mean
        Results.

        INPUTS:
        outputFC (str): path to the output feature class
        """

        #### Validate Output Workspace ####
        ERROR.checkOutputPath(outputFC)

        #### Shorthand Attributes ####
        ssdo = self.ssdo
        caseField = self.caseField

        #### Create Output Feature Class ####
        ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84003))
        tempCFLayer = "tmpCFLayer"

        try:
            DM.MakeFeatureLayer(ssdo.inputFC, tempCFLayer)
            first = True
            for key, value in self.cf.iteritems():
                oids = value[0]
                for oid in oids:
                    sqlString = ssdo.oidName + '=' + str(oid)
                    if first:
                        DM.SelectLayerByAttribute(tempCFLayer, "NEW_SELECTION",
                                                  sqlString)
                        first = False
                    else:
                        DM.SelectLayerByAttribute(tempCFLayer,
                                                  "ADD_TO_SELECTION",
                                                  sqlString)

            UTILS.clearExtent(DM.CopyFeatures(tempCFLayer, outputFC))
        except:
            ARCPY.AddIDMessage("ERROR", 210, outputFC)
            raise SystemExit()

        #### Set Attribute ####
        self.outputFC = outputFC
예제 #3
0
def calculateAreas(inputFC, outputFC):
    """Creates a new feature class from the input polygon feature class 
    and adds a field that includes the area of the polygons.

    INPUTS:
    inputFC (str): path to the input feature class
    outputFC (str): path to the output feature class
    """

    #### Validate Output Workspace ####
    ERROR.checkOutputPath(outputFC)
    outPath, outName = OS.path.split(outputFC)

    #### Create SSDataObject ####
    ssdo = SSDO.SSDataObject(inputFC, templateFC = outputFC,
                             useChordal = False)

    #### Assure Polygon FC ####
    if ssdo.shapeType.lower() != "polygon":
        ARCPY.AddIDMessage("ERROR", 931)
        raise SystemExit()

    #### Check Number of Observations ####
    cnt = UTILS.getCount(inputFC)
    ERROR.errorNumberOfObs(cnt, minNumObs = 1)

    #### Copy Features ####
    try:
        clearCopy = UTILS.clearExtent(DM.CopyFeatures)
        clearCopy(inputFC, outputFC)
    except:
        ARCPY.AddIDMessage("ERROR", 210, outputFC)
        raise SystemExit()

    #### Add Area Field ####
    areaFieldNameOut = ARCPY.ValidateFieldName(areaFieldName, outPath)
    if not ssdo.allFields.has_key(areaFieldNameOut): 
        UTILS.addEmptyField(outputFC, areaFieldNameOut, "DOUBLE")

    #### Calculate Field ####
    clearCalc = UTILS.clearExtent(DM.CalculateField)
    clearCalc(outputFC, areaFieldNameOut, "!shape.area!", "PYTHON_9.3")
예제 #4
0
파일: Weights.py 프로젝트: leochin/GSWMtest
def kNearest2SWM(inputFC, swmFile, masterField, concept = "EUCLIDEAN", 
                 kNeighs = 1, rowStandard = True):
    """Creates a sparse spatial weights matrix (SWM) based on k-nearest
    neighbors.

    INPUTS: 
    inputFC (str): path to the input feature class
    swmFile (str): path to the SWM file.
    masterField (str): field in table that serves as the mapping.
    concept: {str, EUCLIDEAN}: EUCLIDEAN or MANHATTAN 
    kNeighs {int, 1}: number of neighbors to return
    rowStandard {bool, True}: row standardize weights?
    """

    #### Assure that kNeighs is Non-Zero ####
    if kNeighs <= 0:
        ARCPY.AddIDMessage("ERROR", 976)
        raise SystemExit()

    #### Set Default Progressor for Neigborhood Structure ####
    ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143))

    #### Create SSDataObject ####
    ssdo = SSDO.SSDataObject(inputFC, templateFC = inputFC,
                             useChordal = True)
    cnt = UTILS.getCount(inputFC)
    ERROR.errorNumberOfObs(cnt, minNumObs = 2)

    #### Validation of Master Field ####
    verifyMaster = ERROR.checkField(ssdo.allFields, masterField, types = [0,1])

    #### Create GA Data Structure ####
    gaTable, gaInfo = WU.gaTable(ssdo.catPath, [masterField],
                                 spatRef = ssdo.spatialRefString)

    #### Assure Enough Observations ####
    N = gaInfo[0]
    ERROR.errorNumberOfObs(N, minNumObs = 2)

    #### Process any bad records encountered ####
    numBadRecs = cnt - N
    if numBadRecs:
        badRecs = WU.parseGAWarnings(gaTable.warnings)
        err = ERROR.reportBadRecords(cnt, numBadRecs, badRecs,
                                     label = ssdo.oidName)

    #### Assure k-Nearest is Less Than Number of Features ####
    if kNeighs >= N:
        ARCPY.AddIDMessage("ERROR", 975)
        raise SystemExit()

    #### Create k-Nearest Neighbor Search Type ####
    gaSearch = GAPY.ga_nsearch(gaTable)
    concept, gaConcept = WU.validateDistanceMethod(concept, ssdo.spatialRef)
    gaSearch.init_nearest(0.0, kNeighs, gaConcept)
    neighWeights = ARC._ss.NeighborWeights(gaTable, gaSearch, 
                                           weight_type = 1,
                                           row_standard = False)

    #### Set Progressor for Weights Writing ####
    ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84127), 0, N, 1)

    #### Initialize Spatial Weights Matrix File ####
    swmWriter = WU.SWMWriter(swmFile, masterField, ssdo.spatialRefName, 
                             N, rowStandard, inputFC = inputFC,
                             wType = 2, distanceMethod = concept,
                             numNeighs = kNeighs)

    #### Unique Master ID Dictionary ####
    masterSet = set([])

    for row in xrange(N):
        masterID = int(gaTable[row][2])
        if masterID in masterSet:
            ARCPY.AddIDMessage("Error", 644, masterField)
            ARCPY.AddIDMessage("Error", 643)
            raise SystemExit()
        else:
            masterSet.add(masterID)

        neighs, weights = neighWeights[row]
        neighs = [ gaTable[nh][2] for nh in neighs ]

        #### Add Spatial Weights Matrix Entry ####
        swmWriter.swm.writeEntry(masterID, neighs, weights) 

        #### Set Progress ####
        ARCPY.SetProgressorPosition()

    swmWriter.close()
    del gaTable

    #### Report Warning/Max Neighbors ####
    swmWriter.reportNeighInfo()

    #### Report Spatial Weights Summary ####
    swmWriter.report()

    #### Report SWM File is Large ####
    swmWriter.reportLargeSWM()
예제 #5
0
파일: Weights.py 프로젝트: leochin/GSWMtest
def polygon2SWM(inputFC, swmFile, masterField, 
                concept = "EUCLIDEAN", kNeighs = 0,
                rowStandard = True, contiguityType = "ROOK"):
    """Creates a sparse spatial weights matrix (SWM) based on polygon
    contiguity. 

    INPUTS: 
    inputFC (str): path to the input feature class
    swmFile (str): path to the SWM file.
    masterField (str): field in table that serves as the mapping.
    concept: {str, EUCLIDEAN}: EUCLIDEAN or MANHATTAN
    kNeighs {int, 0}: number of neighbors to return (1)
    rowStandard {bool, True}: row standardize weights?
    contiguityType {str, Rook}: {Rook = Edges Only, Queen = Edges/Vertices}

    NOTES:
    (1) kNeighs is used if polygon is not contiguous. E.g. Islands
    """

    #### Set Default Progressor for Neigborhood Structure ####
    ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143))

    #### Create SSDataObject ####
    ssdo = SSDO.SSDataObject(inputFC, templateFC = inputFC,
                             useChordal = True)
    cnt = UTILS.getCount(inputFC)
    ERROR.errorNumberOfObs(cnt, minNumObs = 2)

    #### Validation of Master Field ####
    verifyMaster = ERROR.checkField(ssdo.allFields, masterField, 
                                    types = [0,1])

    #### Create GA Data Structure ####
    gaTable, gaInfo = WU.gaTable(ssdo.catPath, [masterField],
                                 spatRef = ssdo.spatialRefString)

    #### Assure Enough Observations ####
    N = gaInfo[0]
    ERROR.errorNumberOfObs(N, minNumObs = 2)

    #### Assure k-Nearest is Less Than Number of Features ####
    if kNeighs >= N:
        ARCPY.AddIDMessage("ERROR", 975)
        raise SystemExit()

    #### Create Nearest Neighbor Search Type For Islands ####
    if kNeighs > 0:
        gaSearch = GAPY.ga_nsearch(gaTable)
        concept, gaConcept = WU.validateDistanceMethod(concept, ssdo.spatialRef)
        gaSearch.init_nearest(0.0, kNeighs, gaConcept)
        forceNeighbor = True
        neighWeights = ARC._ss.NeighborWeights(gaTable, gaSearch, 
                                               weight_type = 1,
                                               row_standard = False)
    else:
        forceNeighbor = False
        neighSearch = None

    #### Create Polygon Neighbors ####
    polyNeighborDict = WU.polygonNeighborDict(inputFC, masterField, 
                                   contiguityType = contiguityType)

    #### Write Poly Neighbor List (Dict) ####
    #### Set Progressor for SWM Writing ####
    ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84127), 0, N, 1)

    #### Initialize Spatial Weights Matrix File ####
    if contiguityType == "ROOK":
        wType = 4
    else:
        wType = 5

    swmWriter = WU.SWMWriter(swmFile, masterField, ssdo.spatialRefName, 
                             N, rowStandard, inputFC = inputFC,
                             wType = wType, distanceMethod = concept,
                             numNeighs = kNeighs)

    #### Keep Track of Polygons w/o Neighbors ####
    islandPolys = []
    
    #### Write Polygon Contiguity to SWM File ####
    for row in xrange(N):
        rowInfo = gaTable[row]
        oid = rowInfo[0]
        masterID = rowInfo[2]
        neighs = polyNeighborDict[masterID]
        nn = len(neighs)
        if forceNeighbor:
            if nn < kNeighs:
                #### Only Force KNN If Specified & Contiguity is Less ####
                islandPolys.append(oid)
                flag = True
                knnNeighs, knnWeights = neighWeights[row]
                c = 0
                while flag:
                    try:
                        neighID = gaTable[knnNeighs[c]][2]
                        if neighID not in neighs:
                            neighs.append(neighID)
                            nn += 1
                            if nn == kNeighs:
                                flag = False
                        c += 1
                    except:
                        flag = False

        weights = NUM.ones(nn)

        #### Add Weights Entry ####
        swmWriter.swm.writeEntry(masterID, neighs, weights)

        #### Set Progress ####
        ARCPY.SetProgressorPosition()

    #### Report on Features with No Neighbors ####
    countIslands = len(islandPolys)
    if countIslands:
        islandPolys.sort()
        if countIslands > 30:
            islandPolys = islandPolys[0:30]
        
        ERROR.warningNoNeighbors(N, countIslands, islandPolys, ssdo.oidName, 
                                 forceNeighbor = forceNeighbor, 
                                 contiguity = True)

    #### Clean Up ####
    swmWriter.close()
    del gaTable

    #### Report Spatial Weights Summary ####
    swmWriter.report()

    #### Report SWM File is Large ####
    swmWriter.reportLargeSWM()

    del polyNeighborDict
예제 #6
0
    def createOutput(self, outputFC):
        """Creates an Output Feature Class with the Standard Distances.

        INPUTS:
        outputFC (str): path to the output feature class
        """

        #### Validate Output Workspace ####
        ERROR.checkOutputPath(outputFC)

        #### Shorthand Attributes ####
        ssdo = self.ssdo
        caseField = self.caseField

        #### Increase Extent if not Projected ####
        if ssdo.spatialRefType != "Projected":
            seValues = self.se.values()
            if len(seValues):
                maxSE = NUM.array([ i[0:2] for i in seValues ]).max()
                largerExtent = UTILS.increaseExtentByConstant(ssdo.extent,
                                                        constant = maxSE)
                largerExtent = [ LOCALE.str(i) for i in largerExtent ]
                ARCPY.env.XYDomain = " ".join(largerExtent)

        #### Create Output Feature Class ####
        ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84003))
        outPath, outName = OS.path.split(outputFC)

        try:
            DM.CreateFeatureclass(outPath, outName, "POLYGON", 
                                  "", ssdo.mFlag, ssdo.zFlag, 
                                  ssdo.spatialRefString)
        except:
            ARCPY.AddIDMessage("ERROR", 210, outputFC)
            raise SystemExit()

        #### Add Fields to Output FC ####
        dataFieldNames = UTILS.getFieldNames(seFieldNames, outPath)
        shapeFieldNames = ["SHAPE@"]
        for fieldName in dataFieldNames:
            UTILS.addEmptyField(outputFC, fieldName, "DOUBLE")

        caseIsDate = False
        if caseField:
            fcCaseField = ssdo.allFields[caseField]
            validCaseName = UTILS.validQFieldName(fcCaseField, outPath)
            caseType = UTILS.convertType[fcCaseField.type]
            UTILS.addEmptyField(outputFC, validCaseName, caseType)
            dataFieldNames.append(validCaseName)
            if caseType.upper() == "DATE":
                caseIsDate = True

        #### Write Output ####
        badCaseRadians = []
        allFieldNames = shapeFieldNames + dataFieldNames
        rows = DA.InsertCursor(outputFC, allFieldNames)
        for case in self.caseKeys:

            #### Get Results ####
            xVal, yVal = self.meanCenter[case]
            seX, seY, degreeRotation, radianR1, radianR2 = self.se[case]
            seX2 = seX**2.0
            seY2 = seY**2.0

            #### Create Empty Polygon Geomretry ####
            poly = ARCPY.Array()

            #### Check for Valid Radius ####
            seXZero = UTILS.compareFloat(0.0, seX, rTol = .0000001)
            seXNan = NUM.isnan(seX)
            seXBool = seXZero + seXNan
            seYZero = UTILS.compareFloat(0.0, seY, rTol = .0000001)
            seYNan = NUM.isnan(seY)
            seYBool = seYZero + seYNan
            if seXBool or seYBool:
                badRadian = 6
                badCase = UTILS.caseValue2Print(case, self.caseIsString)
                badCaseRadians.append(badCase)
            else:
                badRadian = 0
                cosRadian = NUM.cos(radianR1)
                sinRadian = NUM.sin(radianR1)

                #### Calculate a Point For Each ####
                #### Degree in Ellipse Polygon ####                
                for degree in NUM.arange(0, 360): 
                    try:
                        radians = UTILS.convert2Radians(degree)
                        tanVal2 = NUM.tan(radians)**2.0
                        dX = MATH.sqrt((seX2 * seY2) /
                                      (seY2 + (seX2 * tanVal2)))
                        dY = MATH.sqrt((seY2 * (seX2 - dX**2.0)) /
                                       seX2)

                        #### Adjust for Quadrant ####
                        if 90 <= degree < 180:
                            dX = -dX
                        elif 180 <= degree < 270:
                            dX = -dX
                            dY = -dY
                        elif degree >= 270:
                            dY = -dY

                        #### Rotate X and Y ####
                        dXr = dX * cosRadian - dY * sinRadian
                        dYr = dX * sinRadian + dY * cosRadian

                        #### Create Point Shifted to ####
                        #### Ellipse Centroid ####
                        pntX = dXr + xVal
                        pntY = dYr + yVal
                        pnt = ARCPY.Point(pntX, pntY, ssdo.defaultZ)
                        poly.add(pnt)
                    except:
                        badRadian += 1
                        if badRadian == 6:
                            badCase = UTILS.caseValue2Print(case, 
                                               self.caseIsString)
                            badCaseRadians.append(badCase)
                            break

            if badRadian < 6:
                #### Create and Populate New Feature ####
                poly = ARCPY.Polygon(poly, None, True)
                rowResult = [poly, xVal, yVal, seX, seY, radianR2]

                if caseField:
                    caseValue = case.item()
                    if caseIsDate:
                        caseValue = TUTILS.iso2DateTime(caseValue)
                    rowResult.append(caseValue)
                rows.insertRow(rowResult)

        #### Report Bad Cases Due to Geometry (coincident pts) ####
        nBadRadians = len(badCaseRadians)
        if nBadRadians:
            if caseField:
                badCaseRadians = " ".join(badCaseRadians)
                ARCPY.AddIDMessage("WARNING", 1011, caseField, badCaseRadians)
            else:
                ARCPY.AddIDMessage("ERROR", 978)
                raise SystemExit()

        #### Return Extent to Normal if not Projected ####
        if ssdo.spatialRefType != "Projected":
            ARCPY.env.XYDomain = ""

        #### Clean Up ####
        del rows

        #### Set Attribute ####
        self.outputFC = outputFC
예제 #7
0
    def output2NewFC(self,
                     outputFC,
                     candidateFields,
                     appendFields=[],
                     fieldOrder=[]):
        """Creates a new feature class with the same shape charcteristics as
        the source input feature class and appends data to it.

        INPUTS:
        outputFC (str): catalogue path to output feature class
        candidateFields (dict): fieldName = instance of CandidateField
        appendFields {list, []}: field names in the order you want appended
        fieldOrder {list, []}: the order with which to write fields
        """

        #### Initial Progressor Bar ####
        ARCPY.overwriteOutput = True
        ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84006))

        #### Validate Output Workspace ####
        ERROR.checkOutputPath(outputFC)

        #### Create Path for Output FC ####
        outPath, outName = OS.path.split(outputFC)

        #### Get Output Name for SDE if Necessary ####
        baseType = UTILS.getBaseWorkspaceType(outPath)
        if baseType.upper() == 'REMOTEDATABASE':
            outName = outName.split(".")[-1]
        self.outputFC = OS.path.join(outPath, outName)

        #### Assess Whether to Honor Original Field Nullable Flag ####
        setNullable = UTILS.setToNullable(self.catPath, self.outputFC)

        #### Add Null Value Flag ####
        outIsShapeFile = UTILS.isShapeFile(self.outputFC)

        #### Create Output Field Names to be Appended From Input ####
        inputFieldNames = ["SHAPE@", self.masterField]
        appendFieldNames = []
        masterIsOID = self.masterField == self.oidName
        if masterIsOID:
            appendFieldNames.append("SOURCE_ID")
        else:
            master = self.allFields[self.masterField.upper()]
            returnName = UTILS.returnOutputFieldName(master)
            appendFieldNames.append(returnName)

        for fieldName in appendFields:
            field = self.allFields[fieldName.upper()]
            returnName = UTILS.returnOutputFieldName(field)
            inputFieldNames.append(fieldName)
            appendFieldNames.append(returnName)
        appendFieldNames = UTILS.createAppendFieldNames(
            appendFieldNames, outPath)
        masterOutName = appendFieldNames[0]

        #### Create Field Mappings for Visible Fields ####
        outputFieldMaps = ARCPY.FieldMappings()

        #### Add Input Fields to Output ####
        for ind, fieldName in enumerate(appendFieldNames):
            if ind == 0:
                #### Master Field ####
                sourceFieldName = self.masterField
                if masterIsOID:
                    fieldType = "LONG"
                    alias = fieldName
                    setOutNullable = False
                    fieldLength = None
                    fieldPrecision = None
                else:
                    masterOutField = self.allFields[self.masterField.upper()]
                    fieldType = masterOutField.type
                    alias = masterOutField.baseName
                    setOutNullable = setNullable
                    fieldLength = masterOutField.length
                    fieldPrecision = masterOutField.precision
            else:
                #### Append Fields ####
                sourceFieldName = appendFields[ind - 1]
                outField = self.allFields[sourceFieldName]
                fieldType = outField.type
                alias = outField.baseName
                setOutNullable = setNullable
                fieldLength = outField.length
                fieldPrecision = outField.precision

            #### Create Candidate Field ####
            outCandidate = CandidateField(fieldName,
                                          fieldType,
                                          None,
                                          alias=alias,
                                          precision=fieldPrecision,
                                          length=fieldLength)

            #### Create Output Field Map ####
            outFieldMap = UTILS.createOutputFieldMap(
                self.inputFC,
                sourceFieldName,
                outFieldCandidate=outCandidate,
                setNullable=setOutNullable)

            #### Add Output Field Map to New Field Mapping ####
            outputFieldMaps.addFieldMap(outFieldMap)

        #### Do FC2FC Without Extent Env Var ####
        FC2FC = UTILS.clearExtent(CONV.FeatureClassToFeatureClass)
        try:
            FC2FC(self.inputFC, outPath, outName, "", outputFieldMaps)
        except:
            ARCPY.AddIDMessage("ERROR", 210, self.outputFC)
            raise SystemExit()

        #### Create/Verify Result Field Order ####
        fieldKeys = candidateFields.keys()
        fieldKeys.sort()
        if len(fieldOrder) == len(fieldKeys):
            fKeySet = set(fieldKeys)
            fieldOrderSet = set(fieldOrder)
            if fieldOrderSet == fKeySet:
                fieldKeys = fieldOrder

            del fKeySet, fieldOrderSet

        #### Add Empty Output Analysis Fields ####
        outputFieldNames = [masterOutName]
        for fieldInd, fieldName in enumerate(fieldKeys):
            field = candidateFields[fieldName]
            field.copy2FC(outputFC)
            outputFieldNames.append(fieldName)

            #### Replace NaNs for Shapefiles ####
            if outIsShapeFile:
                if field.type != "TEXT":
                    isNaN = NUM.isnan(field.data)
                    if NUM.any(isNaN):
                        field.data[isNaN] = UTILS.shpFileNull[field.type]

        #### Populate Output Feature Class with Values ####
        ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84003), 0, self.numObs,
                            1)
        outRows = DA.UpdateCursor(self.outputFC, outputFieldNames)

        for row in outRows:
            masterID = row[0]
            if self.master2Order.has_key(masterID):
                order = self.master2Order[masterID]

                #### Create Output Row from Input ####
                resultValues = [masterID]

                #### Add Result Values ####
                for fieldName in fieldKeys:
                    field = candidateFields[fieldName]
                    fieldValue = field.data.item(order)
                    resultValues.append(fieldValue)

                #### Insert Values into Output ####
                outRows.updateRow(resultValues)

            else:
                #### Bad Record ####
                outRows.deleteRow()

            ARCPY.SetProgressorPosition()

        #### Clean Up ####
        del outRows
예제 #8
0
    def obtainDataGA(self,
                     masterField,
                     fields=[],
                     types=[0, 1, 2, 3, 5, 6],
                     minNumObs=0,
                     warnNumObs=0):
        """Takes a list of field names and returns it in a dictionary
        structure.

        INPUTS:
        masterField (str): name of field being used as the master
        fields {list, []}: name(s) of the field to be returned
        types (list): types of data allowed to be returned (1)
        minNumObs {int, 0}: minimum number of observations for error
        warnNumObs {int, 0}: minimum number of observations for warning

        ATTRIBUTES:
        gaTable (structure): instance of the GA Table
        fields (dict): fieldName = instance of FCField
        master2Order (dict): masterID = order in lists
        order2Master (dict): order in lists = masterID
        masterField (str): field that serves as the master
        badRecords (list): master IDs that could not be read
        xyCoords (array, nunObs x 2): xy-coordinates for feature centroids

        NOTES:
        (1) No Text Fields; short [0], long [1], float [2], double[3]
        """

        #### Validation of Master Field ####
        verifyMaster = ERROR.checkField(self.allFields,
                                        masterField,
                                        types=[0, 1, 5])

        #### Set MasterIsOID Boolean ####
        self.masterIsOID = masterField == self.oidName

        #### Set Master and Data Indices ####
        if self.masterIsOID:
            self.masterColumnIndex = 0
            self.dataColumnIndex = 2
            fieldList = []
        else:
            self.masterColumnIndex = 2
            self.dataColumnIndex = 3
            fieldList = [masterField]

        #### Validation and Initialization of Data Fields ####
        numFields = len(fields)
        for field in fields:
            fType = ERROR.checkField(self.allFields, field, types=types)
            fieldList.append(field)
            self.fields[field] = self.allFields[field]

        #### ZCoords Are Last ####
        getZBool = self.hasZ and (not self.renderType)
        if getZBool:
            fieldList.append("SHAPE&Z")

        #### Create GA Data Structure ####
        cnt = UTILS.getCount(self.inputFC)
        fieldList = tuple(fieldList)
        gaTable, gaInfo = WU.gaTable(self.inputFC,
                                     fieldNames=fieldList,
                                     spatRef=self.spatialRefString)

        #### Check Whether the Number of Features is Appropriate ####
        numObs = gaInfo[0]
        ERROR.checkNumberOfObs(numObs,
                               minNumObs=minNumObs,
                               warnNumObs=warnNumObs,
                               silentWarnings=self.silentWarnings)

        #### Process any bad records encountered ####
        numBadIDs = cnt - numObs
        if numBadIDs:
            badIDs = WU.parseGAWarnings(gaTable.warnings)
            if not self.silentWarnings:
                ERROR.reportBadRecords(cnt,
                                       numBadIDs,
                                       badIDs,
                                       label=self.oidName)
        else:
            badIDs = []

        #### Initialization of Centroids  ####
        xyCoords = NUM.empty((numObs, 2), float)

        #### Z Coords ####
        if self.hasZ:
            zCoords = NUM.empty((numObs, ), float)

        #### Create Empty Data Arrays ####
        for fieldName, fieldObj in self.fields.iteritems():
            fieldObj.createDataArray(numObs)

        #### Populate SSDataObject ####
        ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84001), 0, numObs, 1)
        for row in xrange(numObs):
            rowInfo = gaTable[row]
            x, y = rowInfo[1]
            masterID = int(rowInfo[self.masterColumnIndex])
            if self.master2Order.has_key(masterID):
                ARCPY.AddIDMessage("ERROR", 644, masterField)
                ARCPY.AddIDMessage("ERROR", 643)
                raise SystemExit()
            else:
                self.master2Order[masterID] = row
                self.order2Master[row] = masterID
                xyCoords[row] = (x, y)
            if numFields:
                restFields = rowInfo[self.dataColumnIndex:]
                for fieldInd, fieldName in enumerate(fields):
                    self.fields[fieldName].data[row] = restFields[fieldInd]
            if self.hasZ:
                if getZBool:
                    zCoords[row] = rowInfo[-1]
                else:
                    zCoords[row] = NUM.nan

            ARCPY.SetProgressorPosition()

        #### Set the Hidden Fields (E.g. Not in Use) ####
        self.setHiddenFields()

        #### Reset Extent to Honor Env and Subsets ####
        try:
            self.extent = UTILS.resetExtent(xyCoords)
        except:
            pass

        #### Reset Coordinates for Chordal ####
        if self.useChordal:
            #### Project to XY on Spheroid ####
            self.spheroidCoords = ARC._ss.lonlat_to_xy(xyCoords,
                                                       self.spatialRef)
            self.sliceInfo = UTILS.SpheroidSlice(self.extent, self.spatialRef)
        else:
            self.spheroidCoords = None
            self.sliceInfo = None

        #### Set Further Attributes ####
        self.badRecords = badIDs
        self.xyCoords = xyCoords
        self.masterField = masterField
        self.gaTable = gaTable
        self.numObs = numObs
        if self.hasZ:
            self.zCoords = zCoords
        else:
            self.zCoords = None
예제 #9
0
    def __init__(self,
                 ssdo,
                 weightField=None,
                 caseField=None,
                 stdDeviations=1.0):

        #### Set Initial Attributes ####
        UTILS.assignClassAttr(self, locals())

        #### Set Data ####
        self.xyCoords = self.ssdo.xyCoords

        #### Verify Weights ####
        if weightField:
            self.weights = self.ssdo.fields[weightField].returnDouble()

            #### Report Negative Weights ####
            lessThanZero = NUM.where(self.weights < 0.0)
            if len(lessThanZero[0]):
                self.weights[lessThanZero] = 0.0
                ARCPY.AddIDMessage("Warning", 941)

            #### Verify Weight Sum ####
            self.weightSum = self.weights.sum()
            if not self.weightSum > 0.0:
                ARCPY.AddIDMessage("ERROR", 898)
                raise SystemExit()
        else:
            self.weights = NUM.ones((self.ssdo.numObs, ))

        #### Set Case Field ####
        if caseField:
            caseType = ssdo.allFields[caseField].type.upper()
            self.caseIsString = caseType == "STRING"
            self.caseVals = self.ssdo.fields[caseField].data
            cases = NUM.unique(self.caseVals)
            if self.caseIsString:
                self.uniqueCases = cases[NUM.where(cases != "")]
            else:
                self.uniqueCases = cases
        else:
            self.caseIsString = False
            self.caseVals = NUM.ones((self.ssdo.numObs, ), int)
            self.uniqueCases = [1]

        #### Set Result Dict ####
        meanCenter = COLL.defaultdict(NUM.array)
        sd = COLL.defaultdict(float)

        #### Keep Track of Bad Cases ####
        badCases = []

        #### Calculate Mean Center and Standard Distance ####
        for case in self.uniqueCases:
            indices = NUM.where(self.caseVals == case)
            numFeatures = len(indices[0])
            xy = self.xyCoords[indices]
            w = self.weights[indices]
            w.shape = numFeatures, 1
            weightSum = w.sum()
            if (weightSum != 0.0) and (numFeatures > 2):
                xyWeighted = w * xy

                #### Mean Center ####
                centers = xyWeighted.sum(0) / weightSum
                meanCenter[case] = centers

                #### Standard Distance ####
                devXY = xy - centers
                sigXY = (w * devXY**2.0).sum(0) / weightSum
                sdVal = (MATH.sqrt(sigXY.sum())) * stdDeviations
                sd[case] = sdVal
            else:
                badCases.append(case)

        #### Report Bad Cases ####
        nCases = len(self.uniqueCases)
        nBadCases = len(badCases)
        badCases.sort()
        if nBadCases:
            cBool = self.caseIsString
            if not self.caseIsString:
                badCases = [UTILS.caseValue2Print(i, cBool) for i in badCases]
            ERROR.reportBadCases(nCases, nBadCases, badCases, label=caseField)

        #### Sorted Case List ####
        caseKeys = sd.keys()
        caseKeys.sort()
        self.caseKeys = caseKeys

        #### Set Attributes ####
        self.meanCenter = meanCenter
        self.sd = sd
        self.badCases = badCases
        self.caseField = caseField
        self.stdDeviations = stdDeviations
        self.weightField = weightField
예제 #10
0
    def __init__ (self, ssdo, weightField = None, caseField = None, 
                  attFields = None):

        #### Set Initial Attributes ####
        UTILS.assignClassAttr(self, locals())

        #### Set Data ####
        self.xyCoords = self.ssdo.xyCoords

        #### Verify Weights ####
        if weightField:
            self.weights = self.ssdo.fields[weightField].returnDouble()

            #### Report Negative Weights ####
            lessThanZero = NUM.where(self.weights < 0.0)
            if len(lessThanZero[0]):
                self.weights[lessThanZero] = 0.0
                ARCPY.AddIDMessage("Warning", 941)

            #### Verify Weight Sum ####
            self.weightSum = self.weights.sum()
            if not self.weightSum > 0.0: 
                ARCPY.AddIDMessage("ERROR", 898)
                raise SystemExit()
        else:
            self.weights = NUM.ones((self.ssdo.numObs,))

        #### Set Case Field ####
        if caseField:
            caseType = ssdo.allFields[caseField].type.upper()
            self.caseIsString = caseType == "STRING"
            self.caseVals = self.ssdo.fields[caseField].data
            cases = NUM.unique(self.caseVals)
            if self.caseIsString:
                self.uniqueCases = cases[NUM.where(cases != "")]
            else:
                self.uniqueCases = cases
        else:
            self.caseIsString = False
            self.caseVals = NUM.ones((self.ssdo.numObs, ), int)
            self.uniqueCases = [1]

        #### Set Result Dict ####
        medianCenter = COLL.defaultdict(NUM.array)

        if attFields:
            attCenter = COLL.defaultdict(NUM.array)

        #### Keep Track of Bad Cases ####
        badCases = []

        #### Calculate Results ####
        for case in self.uniqueCases:
            indices = NUM.where(self.caseVals == case)
            numFeatures = len(indices[0])
            xy = self.xyCoords[indices]
            w = self.weights[indices]
            weightSum = w.sum()
            if (weightSum != 0.0) and (numFeatures > 0):

                #### Calculate Median Center ####
                medX, medY, iters = calcMedianCenter(xy, w)
                medianCenter[case] = (medX, medY)
                if attFields:
                    attMeds = []
                    for attField in attFields:
                        attCaseVals = ssdo.fields[attField].returnDouble()
                        attCaseVals = attCaseVals[indices] 
                        attMed = STATS.median(attCaseVals, weights = w)
                        attMeds.append(attMed)
                    attMeds = NUM.array(attMeds)
                    attCenter[case] = attMeds
            else:
                badCases.append(case)

        #### Report Bad Cases ####
        nCases = len(self.uniqueCases)
        nBadCases = len(badCases) 
        badCases.sort()
        if nBadCases:
            cBool = self.caseIsString
            if not self.caseIsString:
                badCases = [UTILS.caseValue2Print(i, cBool) for i in badCases]
            ERROR.reportBadCases(nCases, nBadCases, badCases, 
                                 label = caseField)   

        #### Sorted Case List ####
        caseKeys = medianCenter.keys()
        caseKeys.sort()
        self.caseKeys = caseKeys

        #### Set Attributes ####
        self.medianCenter = medianCenter
        self.badCases = badCases
        self.caseField = caseField
        self.attFields = attFields
        self.weightField = weightField
        if attFields:
            self.attCenter = attCenter
예제 #11
0
def delaunay2SWM(inputFC, swmFile, masterField, rowStandard = True):
    """Creates a sparse spatial weights matrix (SWM) based on Delaunay
    Triangulation.  

    INPUTS: 
    inputFC (str): path to the input feature class
    swmFile (str): path to the SWM file.
    masterField (str): field in table that serves as the mapping.
    rowStandard {bool, True}: row standardize weights?
    """

    #### Set Default Progressor for Neigborhood Structure ####
    ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143))

    #### Create SSDataObject ####
    ssdo = SSDO.SSDataObject(inputFC, templateFC = inputFC,
                             useChordal = True)
    cnt = UTILS.getCount(inputFC)
    ERROR.errorNumberOfObs(cnt, minNumObs = 2)

    #### Validation of Master Field ####
    verifyMaster = ERROR.checkField(ssdo.allFields, masterField, types = [0,1])

    #### Create GA Data Structure ####
    gaTable, gaInfo = WU.gaTable(ssdo.catPath, [masterField],
                                 spatRef = ssdo.spatialRefString)

    #### Assure Enough Observations ####
    N = gaInfo[0]
    ERROR.errorNumberOfObs(N, minNumObs = 2)

    #### Process any bad records encountered ####
    numBadRecs = cnt - N
    if numBadRecs:
        badRecs = WU.parseGAWarnings(gaTable.warnings)
        err = ERROR.reportBadRecords(cnt, numBadRecs, badRecs,
                                     label = ssdo.oidName)

    #### Create Delaunay Neighbor Search Type ####
    gaSearch = GAPY.ga_nsearch(gaTable)
    gaSearch.init_delaunay()
    neighWeights = ARC._ss.NeighborWeights(gaTable, gaSearch, 
                                           weight_type = 1,
                                           row_standard = False)

    #### Set Progressor for Weights Writing ####
    ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84127), 0, N, 1)

    #### Initialize Spatial Weights Matrix File ####
    swmWriter = WU.SWMWriter(swmFile, masterField, ssdo.spatialRefName, 
                             N, rowStandard, inputFC = inputFC,
                             wType = 3)

    #### Unique Master ID Dictionary ####
    masterSet = set([])

    for row in xrange(N):
        masterID = int(gaTable[row][2])
        if masterID in masterSet:
            ARCPY.AddIDMessage("Error", 644, masterField)
            ARCPY.AddIDMessage("Error", 643)
            raise SystemExit()
        else:
            masterSet.add(masterID)

        neighs, weights = neighWeights[row]
        neighs = [ gaTable[nh][2] for nh in neighs ]

        #### Add Spatial Weights Matrix Entry ####
        swmWriter.swm.writeEntry(masterID, neighs, weights) 

        #### Set Progress ####
        ARCPY.SetProgressorPosition()

    #### Clean Up ####
    swmWriter.close()
    del gaTable

    #### Report if Any Features Have No Neighbors ####
    swmWriter.reportNoNeighbors()

    #### Report Spatial Weights Summary ####
    swmWriter.report()

    #### Report SWM File is Large ####
    swmWriter.reportLargeSWM()
예제 #12
0
def polygon2SWM(inputFC, swmFile, masterField, 
                concept = "EUCLIDEAN", kNeighs = 0,
                rowStandard = True, contiguityType = "ROOK"):
    """Creates a sparse spatial weights matrix (SWM) based on polygon
    contiguity. 

    INPUTS: 
    inputFC (str): path to the input feature class
    swmFile (str): path to the SWM file.
    masterField (str): field in table that serves as the mapping.
    concept: {str, EUCLIDEAN}: EUCLIDEAN or MANHATTAN
    kNeighs {int, 0}: number of neighbors to return (1)
    rowStandard {bool, True}: row standardize weights?
    contiguityType {str, Rook}: {Rook = Edges Only, Queen = Edges/Vertices}

    NOTES:
    (1) kNeighs is used if polygon is not contiguous. E.g. Islands
    """

    #### Set Default Progressor for Neigborhood Structure ####
    ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143))

    #### Create SSDataObject ####
    ssdo = SSDO.SSDataObject(inputFC, templateFC = inputFC,
                             useChordal = True)
    cnt = UTILS.getCount(inputFC)
    ERROR.errorNumberOfObs(cnt, minNumObs = 2)

    #### Validation of Master Field ####
    verifyMaster = ERROR.checkField(ssdo.allFields, masterField, 
                                    types = [0,1])

    #### Create GA Data Structure ####
    gaTable, gaInfo = WU.gaTable(ssdo.catPath, [masterField],
                                 spatRef = ssdo.spatialRefString)

    #### Assure Enough Observations ####
    N = gaInfo[0]
    ERROR.errorNumberOfObs(N, minNumObs = 2)

    #### Assure k-Nearest is Less Than Number of Features ####
    if kNeighs >= N:
        ARCPY.AddIDMessage("ERROR", 975)
        raise SystemExit()

    #### Create Nearest Neighbor Search Type For Islands ####
    gaSearch = GAPY.ga_nsearch(gaTable)
    concept, gaConcept = WU.validateDistanceMethod(concept, ssdo.spatialRef)
    gaSearch.init_nearest(0.0, kNeighs, gaConcept)
    if kNeighs > 0:
        forceNeighbor = True
        neighWeights = ARC._ss.NeighborWeights(gaTable, gaSearch, 
                                              weight_type = 1,
                                              row_standard = False)
    else:
        forceNeighbor = False
        neighSearch = None

    #### Create Polygon Neighbors ####
    polyNeighborDict = WU.polygonNeighborDict(inputFC, masterField, 
                                   contiguityType = contiguityType)

    #### Write Poly Neighbor List (Dict) ####
    #### Set Progressor for SWM Writing ####
    ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84127), 0, N, 1)

    #### Initialize Spatial Weights Matrix File ####
    if contiguityType == "ROOK":
        wType = 4
    else:
        wType = 5

    swmWriter = WU.SWMWriter(swmFile, masterField, ssdo.spatialRefName, 
                             N, rowStandard, inputFC = inputFC,
                             wType = wType, distanceMethod = concept,
                             numNeighs = kNeighs)

    #### Keep Track of Polygons w/o Neighbors ####
    islandPolys = []
    
    #### Write Polygon Contiguity to SWM File ####
    for row in xrange(N):
        rowInfo = gaTable[row]
        oid = rowInfo[0]
        masterID = rowInfo[2]
        neighs = polyNeighborDict[masterID]
        if neighs:
            weights = [ 1. for nh in neighs ]
            isIsland = False
        else:
            isIsland = True
            islandPolys.append(oid)
            weights = []

        #### Get Nearest Neighbor Based On Centroid Distance ####
        if isIsland and forceNeighbor:
            neighs, weights = neighWeights[row]
            neighs = [ gaTable[nh][2] for nh in neighs ]

        #### Add Weights Entry ####
        swmWriter.swm.writeEntry(masterID, neighs, weights)

        #### Set Progress ####
        ARCPY.SetProgressorPosition()

    #### Report on Features with No Neighbors ####
    countIslands = len(islandPolys)
    if countIslands:
        islandPolys.sort()
        if countIslands > 30:
            islandPolys = islandPolys[0:30]
        
        ERROR.warningNoNeighbors(N, countIslands, islandPolys, ssdo.oidName, 
                                 forceNeighbor = forceNeighbor, 
                                 contiguity = True)

    #### Clean Up ####
    swmWriter.close()
    del gaTable

    #### Report Spatial Weights Summary ####
    swmWriter.report()

    #### Report SWM File is Large ####
    swmWriter.reportLargeSWM()

    del polyNeighborDict
예제 #13
0
    def createOutput(self, outputFC):
        """Creates an Output Feature Class with the Standard Distances.

        INPUTS:
        outputFC (str): path to the output feature class
        """

        #### Validate Output Workspace ####
        ERROR.checkOutputPath(outputFC)

        #### Shorthand Attributes ####
        ssdo = self.ssdo
        caseField = self.caseField

        #### Increase Extent if not Projected ####
        if ssdo.spatialRefType != "Projected":
            seValues = self.se.values()
            if len(seValues):
                maxSE = NUM.array([i[0:2] for i in seValues]).max()
                largerExtent = UTILS.increaseExtentByConstant(ssdo.extent,
                                                              constant=maxSE)
                largerExtent = [LOCALE.str(i) for i in largerExtent]
                ARCPY.env.XYDomain = " ".join(largerExtent)

        #### Create Output Feature Class ####
        ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84003))
        outPath, outName = OS.path.split(outputFC)

        try:
            DM.CreateFeatureclass(outPath, outName, "POLYGON", "", ssdo.mFlag,
                                  ssdo.zFlag, ssdo.spatialRefString)
        except:
            ARCPY.AddIDMessage("ERROR", 210, outputFC)
            raise SystemExit()

        #### Add Fields to Output FC ####
        dataFieldNames = UTILS.getFieldNames(seFieldNames, outPath)
        shapeFieldNames = ["SHAPE@"]
        for fieldName in dataFieldNames:
            UTILS.addEmptyField(outputFC, fieldName, "DOUBLE")

        caseIsDate = False
        if caseField:
            fcCaseField = ssdo.allFields[caseField]
            validCaseName = UTILS.validQFieldName(fcCaseField, outPath)
            caseType = UTILS.convertType[fcCaseField.type]
            UTILS.addEmptyField(outputFC, validCaseName, caseType)
            dataFieldNames.append(validCaseName)
            if caseType.upper() == "DATE":
                caseIsDate = True

        #### Write Output ####
        badCaseRadians = []
        allFieldNames = shapeFieldNames + dataFieldNames
        rows = DA.InsertCursor(outputFC, allFieldNames)
        for case in self.caseKeys:

            #### Get Results ####
            xVal, yVal = self.meanCenter[case]
            seX, seY, degreeRotation, radianR1, radianR2 = self.se[case]
            seX2 = seX**2.0
            seY2 = seY**2.0

            #### Create Empty Polygon Geomretry ####
            poly = ARCPY.Array()

            #### Check for Valid Radius ####
            seXZero = UTILS.compareFloat(0.0, seX, rTol=.0000001)
            seXNan = NUM.isnan(seX)
            seXBool = seXZero + seXNan
            seYZero = UTILS.compareFloat(0.0, seY, rTol=.0000001)
            seYNan = NUM.isnan(seY)
            seYBool = seYZero + seYNan
            if seXBool or seYBool:
                badRadian = 6
                badCase = UTILS.caseValue2Print(case, self.caseIsString)
                badCaseRadians.append(badCase)
            else:
                badRadian = 0
                cosRadian = NUM.cos(radianR1)
                sinRadian = NUM.sin(radianR1)

                #### Calculate a Point For Each ####
                #### Degree in Ellipse Polygon ####
                for degree in NUM.arange(0, 360):
                    try:
                        radians = UTILS.convert2Radians(degree)
                        tanVal2 = NUM.tan(radians)**2.0
                        dX = MATH.sqrt(
                            (seX2 * seY2) / (seY2 + (seX2 * tanVal2)))
                        dY = MATH.sqrt((seY2 * (seX2 - dX**2.0)) / seX2)

                        #### Adjust for Quadrant ####
                        if 90 <= degree < 180:
                            dX = -dX
                        elif 180 <= degree < 270:
                            dX = -dX
                            dY = -dY
                        elif degree >= 270:
                            dY = -dY

                        #### Rotate X and Y ####
                        dXr = dX * cosRadian - dY * sinRadian
                        dYr = dX * sinRadian + dY * cosRadian

                        #### Create Point Shifted to ####
                        #### Ellipse Centroid ####
                        pntX = dXr + xVal
                        pntY = dYr + yVal
                        pnt = ARCPY.Point(pntX, pntY, ssdo.defaultZ)
                        poly.add(pnt)
                    except:
                        badRadian += 1
                        if badRadian == 6:
                            badCase = UTILS.caseValue2Print(
                                case, self.caseIsString)
                            badCaseRadians.append(badCase)
                            break

            if badRadian < 6:
                #### Create and Populate New Feature ####
                poly = ARCPY.Polygon(poly, None, True)
                rowResult = [poly, xVal, yVal, seX, seY, radianR2]

                if caseField:
                    caseValue = case.item()
                    if caseIsDate:
                        caseValue = TUTILS.iso2DateTime(caseValue)
                    rowResult.append(caseValue)
                rows.insertRow(rowResult)

        #### Report Bad Cases Due to Geometry (coincident pts) ####
        nBadRadians = len(badCaseRadians)
        if nBadRadians:
            if caseField:
                badCaseRadians = " ".join(badCaseRadians)
                ARCPY.AddIDMessage("WARNING", 1011, caseField, badCaseRadians)
            else:
                ARCPY.AddIDMessage("ERROR", 978)
                raise SystemExit()

        #### Return Extent to Normal if not Projected ####
        if ssdo.spatialRefType != "Projected":
            ARCPY.env.XYDomain = ""

        #### Clean Up ####
        del rows

        #### Set Attribute ####
        self.outputFC = outputFC
예제 #14
0
    def __init__(self,
                 ssdo,
                 weightField=None,
                 caseField=None,
                 stdDeviations=1.0):

        #### Set Initial Attributes ####
        UTILS.assignClassAttr(self, locals())

        #### Set Data ####
        self.xyCoords = self.ssdo.xyCoords

        #### Verify Weights ####
        if weightField:
            self.weights = self.ssdo.fields[weightField].returnDouble()

            #### Report Negative Weights ####
            lessThanZero = NUM.where(self.weights < 0.0)
            if len(lessThanZero[0]):
                self.weights[lessThanZero] = 0.0
                ARCPY.AddIDMessage("Warning", 941)

            #### Verify Weight Sum ####
            self.weightSum = self.weights.sum()
            if not self.weightSum > 0.0:
                ARCPY.AddIDMessage("ERROR", 898)
                raise SystemExit()
        else:
            self.weights = NUM.ones((self.ssdo.numObs, ))

        #### Set Case Field ####
        if caseField:
            caseType = ssdo.allFields[caseField].type.upper()
            self.caseIsString = caseType == "STRING"
            self.caseVals = self.ssdo.fields[caseField].data
            cases = NUM.unique(self.caseVals)
            if self.caseIsString:
                self.uniqueCases = cases[NUM.where(cases != "")]
            else:
                self.uniqueCases = cases
        else:
            self.caseIsString = False
            self.caseVals = NUM.ones((self.ssdo.numObs, ), int)
            self.uniqueCases = [1]

        #### Set Result Dict ####
        meanCenter = COLL.defaultdict(NUM.array)
        se = COLL.defaultdict(float)

        #### Keep Track of Bad Cases ####
        badCases = []

        #### Calculate Mean Center and Standard Distance ####
        for case in self.uniqueCases:
            indices = NUM.where(self.caseVals == case)
            numFeatures = len(indices[0])
            xy = self.xyCoords[indices]
            w = self.weights[indices]
            w.shape = numFeatures, 1
            weightSum = w.sum()
            if (weightSum != 0.0) and (numFeatures > 2):
                xyWeighted = w * xy

                #### Mean Center ####
                centers = xyWeighted.sum(0) / weightSum
                meanX, meanY = centers
                meanCenter[case] = centers

                #### Standard Ellipse ####
                devXY = xy - centers
                flatW = w.flatten()
                sigX = (flatW * devXY[:, 0]**2.0).sum()
                sigY = (flatW * devXY[:, 1]**2.0).sum()
                sigXY = (flatW * devXY[:, 0] * devXY[:, 1]).sum()
                denom = 2.0 * sigXY
                diffXY = sigX - sigY
                sum1 = diffXY**2.0 + 4.0 * sigXY**2.0

                if not abs(denom) > 0:
                    arctanVal = 0.0
                else:
                    tempVal = (diffXY + NUM.sqrt(sum1)) / denom
                    arctanVal = NUM.arctan(tempVal)

                if arctanVal < 0.0:
                    arctanVal += (NUM.pi / 2.0)

                sinVal = NUM.sin(arctanVal)
                cosVal = NUM.cos(arctanVal)
                sqrt2 = NUM.sqrt(2.0)
                sigXYSinCos = 2.0 * sigXY * sinVal * cosVal
                seX = (sqrt2 * NUM.sqrt(
                    ((sigX * cosVal**2.0) - sigXYSinCos +
                     (sigY * sinVal**2.0)) / weightSum) * stdDeviations)

                seY = (sqrt2 * NUM.sqrt(
                    ((sigX * sinVal**2.0) + sigXYSinCos +
                     (sigY * cosVal**2.0)) / weightSum) * stdDeviations)

                #### Counter Clockwise from Noon ####
                degreeRotation = 360.0 - (arctanVal * 57.2957795)

                #### Convert to Radians ####
                radianRotation1 = UTILS.convert2Radians(degreeRotation)

                #### Add Rotation ####
                radianRotation2 = 360.0 - degreeRotation
                if seX > seY:
                    radianRotation2 += 90.0
                    if radianRotation2 > 360.0:
                        radianRotation2 = radianRotation2 - 180.0

                se[case] = (seX, seY, degreeRotation, radianRotation1,
                            radianRotation2)
            else:
                badCases.append(case)

        #### Report Bad Cases ####
        nCases = len(self.uniqueCases)
        nBadCases = len(badCases)
        badCases.sort()
        if nBadCases:
            cBool = self.caseIsString
            if not self.caseIsString:
                badCases = [UTILS.caseValue2Print(i, cBool) for i in badCases]
            ERROR.reportBadCases(nCases, nBadCases, badCases, label=caseField)

        #### Sorted Case List ####
        caseKeys = se.keys()
        caseKeys.sort()
        self.caseKeys = caseKeys

        #### Set Attributes ####
        self.meanCenter = meanCenter
        self.se = se
        self.badCases = badCases
        self.caseField = caseField
        self.stdDeviations = stdDeviations
        self.weightField = weightField
예제 #15
0
def writeErrorForEveryTrial(pid, datas, pathFordata):

    androidfile = pathFordata + 'PID_' + str(
        pid) + '_TwoDFittsData_External.csv'

    leapTimeStamp_list = loadLeapTimeStampData(pathFordata, pid)

    with open(androidfile) as f:

        f_csv = csv.reader(f)
        for i in range(0, 9):  # skip the beginning
            next(f_csv)
        oldheaders = next(f_csv)  # get the old headers

        # since first TRE and TRE are from Mackenzie,we need to move them behind
        headers = oldheaders  # store the original header
        headers = headers[0:len(headers) -
                          2]  # remove the 'firstTRE' and 'TRE'
        headers.extend([
            'leapStartTimestamp', 'leapFirstLiftUpTimeStamp',
            'leapFinalLiftUpTimeStamp'
        ])
        errorheaders = [
            'Error', 'SlipError', 'NarrowSlipError', 'ModerateSlipError',
            'LargeSlipError', 'VeryLargeSlipError', 'MissError',
            'NearMissError', 'NotSoNearMissError', 'OtherError',
            'AccidentalTap', 'AccidentalHit'
        ]  # the headers for the error data
        headers.extend(errorheaders)  # append the error header to the headers
        i = 0
        for row in f_csv:
            # restore the firstTRE and average TRE
            firstTRE.append(row[len(row) - 2])
            TRE.append(row[len(row) - 1])

            row = row[0:len(row) - 2]  # omit the firstTRE and average TRE

            row.extend([
                leapTimeStamp_list[i].leapStartTimeStamp,
                leapTimeStamp_list[i].leapFirstLiftUpTimeStamp,
                leapTimeStamp_list[i].leapFinalLiftUpTime
            ])

            targetX = float(row[colNumAndroidTargetX])
            targetY = float(row[colNumAndroidTargetY])
            firstTouchDownX = float(row[colNumAndroidFirstTouchDownX])
            firstTouchDownY = float(row[colNumAndroidFirstTouchDownY])
            firstLiftUpX = float(row[colNumAndroidFirstLiftUpX])
            firstLiftUpY = float(row[colNumAndroidFirstLiftUpY])
            targetWidthInPixel = float(row[colNumAndroidWidthInPixel])
            errorUtils = ErrorUtils(firstLiftUpX, firstLiftUpY,
                                    firstTouchDownX, firstTouchDownY,
                                    targetWidthInPixel, targetX, targetY)
            errorUtils.calculateErrors(
            )  # this function is used to calculate all kinds of errors,the results are the variables of ErrorUntils
            # restore the result in an array
            errordata = [
                errorUtils.error, errorUtils.SlipError,
                errorUtils.NarrowSlipError, errorUtils.ModerateSlipError,
                errorUtils.LargeSlipError, errorUtils.VeryLargeSlipError,
                errorUtils.MissError, errorUtils.NearMissError,
                errorUtils.NotSoNearMissError, errorUtils.OtherError,
                errorUtils.AccidentalTap, errorUtils.AccidentalHit
            ]
            row.extend(
                errordata)  # append the error data after the android data
            datas.append(row)  # restore the data in a two-dimensional array

    return datas, headers
예제 #16
0
def stCollectByKNN(ssdo, timeField, outputFC, inSpan, inDistance):
    """
    This method applied Jacquez Space-Time K-NN to convert event data into weighted
    point data by dissolving all coincident points in space and time into unique
    points with a new count field that contains the number of original features
    at that location and time span.

    INPUTS:
        ssdo (obj): SSDataObject from input
        timeField (str): Date/Time field name in input feature
        outputFC (str): path to the output feature class
        inSpan (int): value of temporal units within the same time bin
        inDistance (int): value of spatial units considered as spatial neighbors
    OUTPUTS:
        Create new collected point feature

    """
    #### Read raw time data ####
    timeData = ssdo.fields[timeField].data
    #### Convert temporal unit ####
    time = NUM.array(timeData, dtype='datetime64[s]').astype('datetime64[D]')
    #### Find Start Time ####
    startTime = time.min()
    #### Create Bin for Space and Time ####
    timeBin = (time - startTime) / inSpan

    numObs = ssdo.numObs
    #### Create Sudo-fid to Find K-NN in Space and Time
    fid = [i for i in xrange(numObs)]

    #### Validate Output Workspace ####
    ERROR.checkOutputPath(outputFC)

    #### True Centroid Warning For Non-Point FCs ####
    if ssdo.shapeType.upper() != "POINT":
        ARCPY / AddIDMessage("WARNING", 1021)

    #### Create GA Data Structure ####
    gaTable, gaInfo = WU.gaTable(ssdo.inputFC, spatRef=ssdo.spatialRefString)

    #### Assure Enough Observations ####
    cnt = UTILS.getCount(ssdo.inputFC)
    ERROR.errorNumberOfObs(cnt, minNumObs=4)
    N = gaInfo[0]
    ERROR.errorNumberOfObs(N, minNumObs=4)

    #### Process Any Bad Records Encountered ####
    numBadRecs = cnt - N
    if numBadRecs:
        badRecs = WU.parseGAWarnings(gaTable.warnings)
        if not ssdo.silentWarnings:
            ERROR.reportBadRecords(cnt,
                                   numBadRecs,
                                   badRecs,
                                   label=ssdo.oidName)

    #### Create Output Feature Class ####
    outPath, outName = OS.path.split(outputFC)
    try:
        DM.CreateFeatureclass(outPath, outName, "POINT", "", ssdo.mFlag,
                              ssdo.zFlag, ssdo.spatialRefString)
    except:
        ARCPY.AddIDMessage("ERROR", 210, outputFC)
        raise SystemExit()

    #### Create k-Nearest Neighbor Search Type ####
    gaSearch = GAPY.ga_nsearch(gaTable)
    gaSearch.init_nearest(inDistance, 0, "euclidean")

    #### Add Count Field ####
    countFieldNameOut = ARCPY.ValidateFieldName(countFieldName, outPath)
    timeFieldNameOut = ARCPY.ValidateFieldName(timeFieldName, outPath)
    UTILS.addEmptyField(outputFC, countFieldNameOut, "LONG")
    UTILS.addEmptyField(outputFC, timeFieldNameOut, "DATE")
    fieldList = ["SHAPE@", countFieldNameOut, timeFieldNameOut]

    #### Set Insert Cursor ####
    rowsOut = DA.InsertCursor(outputFC, fieldList)

    #### Detect S-T K-NN by Space and Time Bin ####
    duplicateList = []
    for record in fid:
        kNNList = [record]
        if record not in duplicateList:
            for pair in fid:
                if pair != record:
                    gaSearch.search_by_idx(record)
                    for nh in gaSearch:
                        if timeBin[record] == timeBin[pair]:
                            kNNList.append(nh.idx)
                            duplicateList.append(nh.idx)
            #### Create and Populate New Feature ####
            kNNList = list(set(kNNList))
            count = len(kNNList)
            dt = time[record]
            x0 = ssdo.xyCoords[kNNList, 0].mean()
            y0 = ssdo.xyCoords[kNNList, 1].mean()
            pnt = (x0, y0, ssdo.defaultZ)
            rowResult = [pnt, count, dt]
            rowsOut.insertRow(rowResult)
            ARCPY.SetProgressorPosition()

    #### Clean Up ####
    del rowsOut, timeBin, kNNList, duplicateList

    return countFieldNameOut
예제 #17
0
    def __init__(self,  ssdo, weightField = None, caseField = None, 
                 stdDeviations = 1.0):

        #### Set Initial Attributes ####
        UTILS.assignClassAttr(self, locals())

        #### Set Data ####
        self.xyCoords = self.ssdo.xyCoords

        #### Verify Weights ####
        if weightField:
            self.weights = self.ssdo.fields[weightField].returnDouble()

            #### Report Negative Weights ####
            lessThanZero = NUM.where(self.weights < 0.0)
            if len(lessThanZero[0]):
                self.weights[lessThanZero] = 0.0
                ARCPY.AddIDMessage("Warning", 941)

            #### Verify Weight Sum ####
            self.weightSum = self.weights.sum()
            if not self.weightSum > 0.0: 
                ARCPY.AddIDMessage("ERROR", 898)
                raise SystemExit()
        else:
            self.weights = NUM.ones((self.ssdo.numObs,))

        #### Set Case Field ####
        if caseField:
            caseType = ssdo.allFields[caseField].type.upper()
            self.caseIsString = caseType == "STRING"
            self.caseVals = self.ssdo.fields[caseField].data
            cases = NUM.unique(self.caseVals)
            if self.caseIsString:
                self.uniqueCases = cases[NUM.where(cases != "")]
            else:
                self.uniqueCases = cases
        else:
            self.caseIsString = False
            self.caseVals = NUM.ones((self.ssdo.numObs, ), int)
            self.uniqueCases = [1]

        #### Set Result Dict ####
        meanCenter = COLL.defaultdict(NUM.array)
        se = COLL.defaultdict(float)

        #### Keep Track of Bad Cases ####
        badCases = []

        #### Calculate Mean Center and Standard Distance ####
        for case in self.uniqueCases:
            indices = NUM.where(self.caseVals == case)
            numFeatures = len(indices[0])
            xy = self.xyCoords[indices]
            w = self.weights[indices]
            w.shape = numFeatures, 1
            weightSum = w.sum()
            if (weightSum != 0.0) and (numFeatures > 2):
                xyWeighted = w * xy

                #### Mean Center ####
                centers = xyWeighted.sum(0) / weightSum
                meanX, meanY = centers
                meanCenter[case] = centers

                #### Standard Ellipse ####
                devXY = xy - centers
                flatW = w.flatten()
                sigX = (flatW * devXY[:,0]**2.0).sum()  
                sigY = (flatW * devXY[:,1]**2.0).sum()
                sigXY = (flatW * devXY[:,0] * devXY[:,1]).sum()
                denom = 2.0 * sigXY
                diffXY = sigX - sigY
                sum1 = diffXY**2.0 + 4.0 * sigXY**2.0

                if not abs(denom) > 0:
                    arctanVal = 0.0
                else:
                    tempVal = (diffXY + NUM.sqrt(sum1)) / denom
                    arctanVal = NUM.arctan(tempVal)

                if arctanVal < 0.0: 
                    arctanVal += (NUM.pi / 2.0)

                sinVal = NUM.sin(arctanVal)
                cosVal = NUM.cos(arctanVal)
                sqrt2 = NUM.sqrt(2.0)
                sigXYSinCos = 2.0 * sigXY * sinVal * cosVal
                seX = (sqrt2 *
                       NUM.sqrt(((sigX * cosVal**2.0) - sigXYSinCos +
                                 (sigY * sinVal**2.0)) / 
                                  weightSum) * stdDeviations)

                seY = (sqrt2 *
                       NUM.sqrt(((sigX * sinVal**2.0) + sigXYSinCos +
                                 (sigY * cosVal**2.0)) / 
                                  weightSum) * stdDeviations)
                
                #### Counter Clockwise from Noon ####
                degreeRotation = 360.0 - (arctanVal * 57.2957795)  
                
                #### Convert to Radians ####
                radianRotation1 = UTILS.convert2Radians(degreeRotation)

                #### Add Rotation ####
                radianRotation2 = 360.0 - degreeRotation
                if seX > seY:
                    radianRotation2 += 90.0
                    if radianRotation2 > 360.0: 
                        radianRotation2 = radianRotation2 - 180.0

                se[case] = (seX, seY, degreeRotation, 
                            radianRotation1, radianRotation2)
            else:
                badCases.append(case)

        #### Report Bad Cases ####
        nCases = len(self.uniqueCases)
        nBadCases = len(badCases)
        badCases.sort()
        if nBadCases:
            cBool = self.caseIsString
            if not self.caseIsString:
                badCases = [UTILS.caseValue2Print(i, cBool) for i in badCases]
            ERROR.reportBadCases(nCases, nBadCases, badCases, 
                                 label = caseField)   
        
        #### Sorted Case List ####
        caseKeys = se.keys()
        caseKeys.sort()
        self.caseKeys = caseKeys

        #### Set Attributes ####
        self.meanCenter = meanCenter
        self.se = se
        self.badCases = badCases
        self.caseField = caseField
        self.stdDeviations = stdDeviations
        self.weightField = weightField
예제 #18
0
파일: Weights.py 프로젝트: leochin/GSWMtest
def spaceTime2SWM(inputFC, swmFile, masterField, concept = "EUCLIDEAN",
                  threshold = None, rowStandard = True,
                  timeField = None, timeType = None,
                  timeValue = None):
    """
    inputFC (str): path to the input feature class
    swmFile (str): path to the SWM file.
    masterField (str): field in table that serves as the mapping.
    concept: {str, EUCLIDEAN}: EUCLIDEAN or MANHATTAN 
    threshold {float, None}: distance threshold
    rowStandard {bool, True}: row standardize weights?
    timeField {str, None}: name of the date-time field
    timeType {str, None}: ESRI enumeration of date-time intervals
    timeValue {float, None}: value forward and backward in time
    """

    #### Assure Temporal Parameters are Set ####
    if not timeField:
        ARCPY.AddIDMessage("ERROR", 1320)
        raise SystemExit()
    if not timeType:
        ARCPY.AddIDMessage("ERROR", 1321)
        raise SystemExit()
    if not timeValue or timeValue <= 0:
        ARCPY.AddIDMessage("ERROR", 1322)
        raise SystemExit()

    #### Create SSDataObject ####
    ssdo = SSDO.SSDataObject(inputFC, templateFC = inputFC,
                             useChordal = True)
    cnt = UTILS.getCount(inputFC)
    ERROR.errorNumberOfObs(cnt, minNumObs = 2)
    ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84001), 0, cnt, 1)

    #### Validation of Master Field ####
    verifyMaster = ERROR.checkField(ssdo.allFields, masterField, types = [0,1])
    badIDs = []

    #### Create Temporal Hash ####
    timeInfo = {}
    xyCoords = NUM.empty((cnt, 2), float)

    #### Process Field Values ####
    fieldList = [masterField, "SHAPE@XY", timeField]
    try:
        rows = DA.SearchCursor(ssdo.catPath, fieldList, "", 
                               ssdo.spatialRefString)
    except:
        ARCPY.AddIDMessage("ERROR", 204)
        raise SystemExit()

    #### Add Data to GATable and Time Dictionary ####
    c = 0
    for row in rows:
        badRow = False

        #### Assure Masterfield is Valid ####
        masterID = row[0]
        if masterID == None or masterID == "":
            badRow = True

        #### Assure Date/Time is Valid ####
        timeStamp = row[-1]
        if timeStamp == None or timeStamp == "":
            badRow = True

        #### Assure Centroid is Valid ####
        badXY = row[1].count(None)
        if not badXY:
            x,y = row[1]
            xyCoords[c] = (x,y)
        else:
            badRow = True

        #### Process Data ####
        if not badRow:
            if timeInfo.has_key(masterID):
                #### Assure Uniqueness ####
                ARCPY.AddIDMessage("Error", 644, masterField)
                ARCPY.AddIDMessage("Error", 643)
                raise SystemExit()
            else:
                #### Fill Date/Time Dict ####
                startDT, endDT = TUTILS.calculateTimeWindow(timeStamp, 
                                                            timeValue, 
                                                            timeType)
                timeInfo[masterID] = (timeStamp, startDT, endDT)

        else:
            badIDs.append(masterID)

        #### Set Progress ####
        c += 1
        ARCPY.SetProgressorPosition()

    #### Clean Up ####
    del rows

    #### Get Set of Bad IDs ####
    numBadObs = len(badIDs)
    badIDs = list(set(badIDs))
    badIDs.sort()
    badIDs = [ str(i) for i in badIDs ]
    
    #### Process any bad records encountered ####
    if numBadObs:
        ERROR.reportBadRecords(cnt, numBadObs, badIDs, label = masterField)

    #### Load Neighbor Table ####
    gaTable, gaInfo = WU.gaTable(ssdo.inputFC, 
                                 fieldNames = [masterField, timeField],
                                 spatRef = ssdo.spatialRefString)
    numObs = len(gaTable)
    xyCoords = xyCoords[0:numObs]

    #### Set the Distance Threshold ####
    concept, gaConcept = WU.validateDistanceMethod(concept, ssdo.spatialRef)
    if threshold == None:
        #### Set Progressor for Search ####
        ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84144))

        #### Create k-Nearest Neighbor Search Type ####
        gaSearch = GAPY.ga_nsearch(gaTable)
        gaSearch.init_nearest(0.0, 1, gaConcept)
        neighDist = ARC._ss.NeighborDistances(gaTable, gaSearch)
        N = len(neighDist)
        threshold = 0.0
        sumDist = 0.0 

        #### Find Maximum Nearest Neighbor Distance ####
        for row in xrange(N):
            dij = neighDist[row][-1][0]
            if dij > threshold:
                threshold = dij
            sumDist += dij

            ARCPY.SetProgressorPosition()

        #### Increase For Rounding Error ####
        threshold = threshold * 1.0001
        avgDist = sumDist / (N * 1.0)

        #### Add Linear/Angular Units ####
        thresholdStr = ssdo.distanceInfo.printDistance(threshold)
        ARCPY.AddIDMessage("Warning", 853, thresholdStr)

        #### Chordal Default Check ####
        if ssdo.useChordal:
            hardMaxExtent = ARC._ss.get_max_gcs_distance(ssdo.spatialRef)
            if threshold > hardMaxExtent:
                ARCPY.AddIDMessage("ERROR", 1609)
                raise SystemExit()

        #### Clean Up ####
        del gaSearch

    #### Create Missing SSDO Info ####
    extent = UTILS.resetExtent(xyCoords)

    #### Reset Coordinates for Chordal ####
    if ssdo.useChordal:
        sliceInfo = UTILS.SpheroidSlice(extent, ssdo.spatialRef)
        maxExtent = sliceInfo.maxExtent
    else:
        env = UTILS.Envelope(extent)
        maxExtent = env.maxExtent

    threshold = checkDistanceThresholdSWM(ssdo, threshold, maxExtent)
    
    #### Set Default Progressor for Neigborhood Structure ####
    ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143))

    #### Create Distance Neighbor Search Type ####
    gaSearch = GAPY.ga_nsearch(gaTable)
    gaSearch.init_nearest(threshold, 0, gaConcept)
    neighSearch = ARC._ss.NeighborSearch(gaTable, gaSearch)

    #### Set Progressor for Weights Writing ####
    ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84127), 0, numObs, 1)

    #### Initialize Spatial Weights Matrix File ####
    swmWriter = WU.SWMWriter(swmFile, masterField, ssdo.spatialRefName, 
                             numObs, rowStandard, inputFC = inputFC,
                             wType = 9, distanceMethod = concept,
                             threshold = threshold, timeField = timeField,
                             timeType = timeType, timeValue = timeValue)

    for row in xrange(numObs):
        masterID = gaTable[row][2]

        #### Get Date/Time Info ####
        dt0, startDT0, endDT0 = timeInfo[masterID]

        nhs = neighSearch[row]
        neighs = []
        weights = []
        for nh in nhs:
            #### Search Through Spatial Neighbors ####
            neighID = gaTable[nh][2]

            #### Get Date/Time Info ####
            dt1, startDT1, endDT1 = timeInfo[neighID]

            #### Filter Based on Date/Time ####
            insideTimeWindow = TUTILS.isTimeNeighbor(startDT0, endDT0, dt1)
            if insideTimeWindow:
                neighs.append(neighID)
                weights.append(1.0)

        #### Add Spatial Weights Matrix Entry ####
        swmWriter.swm.writeEntry(masterID, neighs, weights) 

        #### Set Progress ####
        ARCPY.SetProgressorPosition()

    swmWriter.close()
    del gaTable

    #### Report Warning/Max Neighbors ####
    swmWriter.reportNeighInfo()

    #### Report Spatial Weights Summary ####
    swmWriter.report()

    #### Report SWM File is Large ####
    swmWriter.reportLargeSWM()
예제 #19
0
    def createOutput(self, outputFC):
        """Creates an Output Feature Class with the Standard Distances.

        INPUTS:
        outputFC (str): path to the output feature class
        """

        #### Validate Output Workspace ####
        ERROR.checkOutputPath(outputFC)

        #### Shorthand Attributes ####
        ssdo = self.ssdo
        caseField = self.caseField

        #### Increase Extent if not Projected ####
        if ssdo.spatialRefType != "Projected":
            sdValues = self.sd.values()
            if len(sdValues):
                maxRadius = max(sdValues)
                largerExtent = UTILS.increaseExtentByConstant(ssdo.extent, 
                                                    constant = maxRadius)
                largerExtent = [ LOCALE.str(i) for i in largerExtent ]
                ARCPY.env.XYDomain = " ".join(largerExtent)

        #### Create Output Feature Class ####
        ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84003))
        outPath, outName = OS.path.split(outputFC)

        try:
            DM.CreateFeatureclass(outPath, outName, "POLYGON", 
                                  "", ssdo.mFlag, ssdo.zFlag, 
                                  ssdo.spatialRefString)
        except:
            ARCPY.AddIDMessage("ERROR", 210, outputFC)
            raise SystemExit()

        #### Add Fields to Output FC ####
        dataFieldNames = UTILS.getFieldNames(sdFieldNames, outPath)
        shapeFieldNames = ["SHAPE@"]
        for fieldName in dataFieldNames:
            UTILS.addEmptyField(outputFC, fieldName, "DOUBLE")

        caseIsDate = False
        if caseField:
            fcCaseField = ssdo.allFields[caseField]
            validCaseName = UTILS.validQFieldName(fcCaseField, outPath)
            caseType = UTILS.convertType[fcCaseField.type]
            UTILS.addEmptyField(outputFC, validCaseName, caseType)
            dataFieldNames.append(validCaseName)
            if caseType.upper() == "DATE":
                caseIsDate = True

        #### Write Output ####
        badCaseRadians = []
        allFieldNames = shapeFieldNames + dataFieldNames
        rows = DA.InsertCursor(outputFC, allFieldNames)
        for case in self.caseKeys:

            #### Get Results ####
            xVal, yVal = self.meanCenter[case]
            radius = self.sd[case]

            #### Create Empty Polygon Geomretry ####
            poly = ARCPY.Array()

            #### Check for Valid Radius ####
            radiusZero = UTILS.compareFloat(0.0, radius, rTol = .0000001)
            radiusNan = NUM.isnan(radius)
            radiusBool = radiusZero + radiusNan
            if radiusBool:
                badRadian = 6
                badCase = UTILS.caseValue2Print(case, self.caseIsString)
                badCaseRadians.append(badCase)
            else:
                badRadian = 0

                #### Calculate a Point For Each ####
                #### Degree in Circle Polygon ####
                for degree in NUM.arange(0, 360):  
                    try:
                        radians = NUM.pi / 180.0 * degree
                        pntX = xVal + (radius * NUM.cos(radians))
                        pntY = yVal + (radius * NUM.sin(radians))
                        pnt = ARCPY.Point(pntX, pntY, ssdo.defaultZ)
                        poly.add(pnt)
                    except:
                        badRadian += 1
                        if badRadian == 6:
                            badCase = UTILS.caseValue2Print(case, 
                                               self.caseIsString)
                            badCaseRadians.append(badCase)
                            break

            if badRadian < 6:
                #### Create and Populate New Feature ####
                poly = ARCPY.Polygon(poly, None, True)
                rowResult = [poly, xVal, yVal, radius]

                if caseField:
                    caseValue = case.item()
                    if caseIsDate:
                        caseValue = TUTILS.iso2DateTime(caseValue)
                    rowResult.append(caseValue)
                rows.insertRow(rowResult)

        #### Report Bad Cases Due to Geometry (coincident pts) ####
        nBadRadians = len(badCaseRadians)
        if nBadRadians:
            if caseField:
                badCaseRadians = " ".join(badCaseRadians)
                ARCPY.AddIDMessage("WARNING", 1011, caseField,
                                badCaseRadians)
            else:
                ARCPY.AddIDMessage("ERROR", 978)
                raise SystemExit()

        #### Return Extent to Normal if not Projected ####
        if ssdo.spatialRefType != "Projected":
            ARCPY.env.XYDomain = None

        #### Clean Up ####
        del rows

        #### Set Attribute ####
        self.outputFC = outputFC
예제 #20
0
def kNearest2SWM(inputFC, swmFile, masterField, concept = "EUCLIDEAN", 
                 kNeighs = 1, rowStandard = True):
    """Creates a sparse spatial weights matrix (SWM) based on k-nearest
    neighbors.

    INPUTS: 
    inputFC (str): path to the input feature class
    swmFile (str): path to the SWM file.
    masterField (str): field in table that serves as the mapping.
    concept: {str, EUCLIDEAN}: EUCLIDEAN or MANHATTAN 
    kNeighs {int, 1}: number of neighbors to return
    rowStandard {bool, True}: row standardize weights?
    """

    #### Assure that kNeighs is Non-Zero ####
    if kNeighs <= 0:
        ARCPY.AddIDMessage("ERROR", 976)
        raise SystemExit()

    #### Set Default Progressor for Neigborhood Structure ####
    ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143))

    #### Create SSDataObject ####
    ssdo = SSDO.SSDataObject(inputFC, templateFC = inputFC,
                             useChordal = True)
    cnt = UTILS.getCount(inputFC)
    ERROR.errorNumberOfObs(cnt, minNumObs = 2)

    #### Validation of Master Field ####
    verifyMaster = ERROR.checkField(ssdo.allFields, masterField, types = [0,1])

    #### Create GA Data Structure ####
    gaTable, gaInfo = WU.gaTable(ssdo.catPath, [masterField],
                                 spatRef = ssdo.spatialRefString)

    #### Assure Enough Observations ####
    N = gaInfo[0]
    ERROR.errorNumberOfObs(N, minNumObs = 2)

    #### Process any bad records encountered ####
    numBadRecs = cnt - N
    if numBadRecs:
        badRecs = WU.parseGAWarnings(gaTable.warnings)
        err = ERROR.reportBadRecords(cnt, numBadRecs, badRecs,
                                     label = ssdo.oidName)

    #### Assure k-Nearest is Less Than Number of Features ####
    if kNeighs >= N:
        ARCPY.AddIDMessage("ERROR", 975)
        raise SystemExit()

    #### Create k-Nearest Neighbor Search Type ####
    gaSearch = GAPY.ga_nsearch(gaTable)
    concept, gaConcept = WU.validateDistanceMethod(concept, ssdo.spatialRef)
    gaSearch.init_nearest(0.0, kNeighs, gaConcept)
    neighWeights = ARC._ss.NeighborWeights(gaTable, gaSearch, 
                                           weight_type = 1,
                                           row_standard = False)

    #### Set Progressor for Weights Writing ####
    ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84127), 0, N, 1)

    #### Initialize Spatial Weights Matrix File ####
    swmWriter = WU.SWMWriter(swmFile, masterField, ssdo.spatialRefName, 
                             N, rowStandard, inputFC = inputFC,
                             wType = 2, distanceMethod = concept,
                             numNeighs = kNeighs)

    #### Unique Master ID Dictionary ####
    masterSet = set([])

    for row in xrange(N):
        masterID = int(gaTable[row][2])
        if masterID in masterSet:
            ARCPY.AddIDMessage("Error", 644, masterField)
            ARCPY.AddIDMessage("Error", 643)
            raise SystemExit()
        else:
            masterSet.add(masterID)

        neighs, weights = neighWeights[row]
        neighs = [ gaTable[nh][2] for nh in neighs ]

        #### Add Spatial Weights Matrix Entry ####
        swmWriter.swm.writeEntry(masterID, neighs, weights) 

        #### Set Progress ####
        ARCPY.SetProgressorPosition()

    swmWriter.close()
    del gaTable

    #### Report Warning/Max Neighbors ####
    swmWriter.reportNeighInfo()

    #### Report Spatial Weights Summary ####
    swmWriter.report()

    #### Report SWM File is Large ####
    swmWriter.reportLargeSWM()
예제 #21
0
    def createOutput(self, outputFC):
        """Creates an Output Feature Class with the Directional Mean
        Results.

        INPUTS:
        outputFC (str): path to the output feature class
        """

        #### Validate Output Workspace ####
        ERROR.checkOutputPath(outputFC)

        #### Shorthand Attributes ####
        ssdo = self.ssdo
        caseField = self.caseField

        #### Create Output Feature Class ####
        ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84003))
        outPath, outName = OS.path.split(outputFC)

        try:
            DM.CreateFeatureclass(outPath, outName, "POLYLINE", "", ssdo.mFlag,
                                  ssdo.zFlag, ssdo.spatialRefString)
        except:
            ARCPY.AddIDMessage("ERROR", 210, outputFC)
            raise SystemExit()

        #### Add Fields to Output FC ####
        dataFieldNames = UTILS.getFieldNames(lmFieldNames, outPath)
        shapeFieldNames = ["SHAPE@"]
        for fieldName in dataFieldNames:
            UTILS.addEmptyField(outputFC, fieldName, "DOUBLE")

        caseIsDate = False
        if caseField:
            fcCaseField = ssdo.allFields[caseField]
            validCaseName = UTILS.validQFieldName(fcCaseField, outPath)
            caseType = UTILS.convertType[fcCaseField.type]
            UTILS.addEmptyField(outputFC, validCaseName, caseType)
            dataFieldNames.append(validCaseName)
            if caseType.upper() == "DATE":
                caseIsDate = True

        #### Populate Output Feature Class ####
        allFieldNames = shapeFieldNames + dataFieldNames
        rows = DA.InsertCursor(outputFC, allFieldNames)
        for case in self.caseKeys:
            #### Get Results ####
            start, end, length, rAngle, dAngle, circVar = self.dm[case]
            meanX, meanY = self.meanCenter[case]
            dirMean = 360. - dAngle + 90.
            if not dirMean < 360:
                dirMean = dirMean - 360.

            #### Create Start and End Points ####
            x0, y0 = start
            startPoint = ARCPY.Point(x0, y0, ssdo.defaultZ)
            x1, y1 = end
            endPoint = ARCPY.Point(x1, y1, ssdo.defaultZ)

            #### Create And Populate Line Array ####
            line = ARCPY.Array()
            line.add(startPoint)
            line.add(endPoint)
            line = ARCPY.Polyline(line, None, True)

            #### Create and Populate New Line Feature ####
            rowResult = [line, dAngle, dirMean, circVar, meanX, meanY, length]

            if caseField:
                caseValue = case
                if caseIsDate:
                    caseValue = TUTILS.iso2DateTime(caseValue)
                rowResult.append(caseValue)
            rows.insertRow(rowResult)

        #### Clean Up ####
        del rows

        #### Set Attribute ####
        self.outputFC = outputFC

        #### Set the Default Symbology ####
        params = ARCPY.gp.GetParameterInfo()
        if self.orientationOnly:
            renderLayerFile = "LinearMeanTwoWay.lyr"
        else:
            renderLayerFile = "LinearMeanOneWay.lyr"
        templateDir = OS.path.dirname(OS.path.dirname(SYS.argv[0]))
        fullRLF = OS.path.join(templateDir, "Templates", "Layers",
                               renderLayerFile)
        params[1].Symbology = fullRLF
예제 #22
0
def distance2SWM(inputFC, swmFile, masterField, fixed = 0, 
                 concept = "EUCLIDEAN", exponent = 1.0, threshold = None, 
                 kNeighs = 1, rowStandard = True):
    """Creates a sparse spatial weights matrix (SWM) based on k-nearest
    neighbors.

    INPUTS: 
    inputFC (str): path to the input feature class
    swmFile (str): path to the SWM file.
    masterField (str): field in table that serves as the mapping.
    fixed (boolean): fixed (1) or inverse (0) distance? 
    concept: {str, EUCLIDEAN}: EUCLIDEAN or MANHATTAN 
    exponent {float, 1.0}: distance decay
    threshold {float, None}: distance threshold
    kNeighs (int): number of neighbors to return
    rowStandard {bool, True}: row standardize weights?
    """

    #### Create SSDataObject ####
    ssdo = SSDO.SSDataObject(inputFC, templateFC = inputFC,
                             useChordal = True)

    #### Validation of Master Field ####
    verifyMaster = ERROR.checkField(ssdo.allFields, masterField, types = [0,1])

    #### Read Data ####
    ssdo.obtainDataGA(masterField, minNumObs = 2)
    N = ssdo.numObs
    gaTable = ssdo.gaTable
    if fixed:
        wType = 1
    else:
        wType = 0

    #### Set Default Progressor for Neigborhood Structure ####
    ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143))

    #### Set the Distance Threshold ####
    concept, gaConcept = WU.validateDistanceMethod(concept, ssdo.spatialRef)
    if threshold == None:
        threshold, avgDist = WU.createThresholdDist(ssdo, 
                                        concept = concept)

    #### Assures that the Threshold is Appropriate ####
    gaExtent = UTILS.get92Extent(ssdo.extent)
    threshold, maxSet = WU.checkDistanceThreshold(ssdo, threshold,
                                                  weightType = wType)

    #### If the Threshold is Set to the Max ####
    #### Set to Zero for Script Logic ####
    if maxSet:
        #### All Locations are Related ####
        threshold = SYS.maxint
        if N > 500:
            ARCPY.AddIDMessage("Warning", 717)

    #### Assure k-Nearest is Less Than Number of Features ####
    if kNeighs >= N and fixed:
        ARCPY.AddIDMessage("ERROR", 975)
        raise SystemExit()

    #### Create Distance/k-Nearest Neighbor Search Type ####
    gaSearch = GAPY.ga_nsearch(gaTable)
    gaSearch.init_nearest(threshold, kNeighs, gaConcept)
    neighWeights = ARC._ss.NeighborWeights(gaTable, gaSearch, 
                                           weight_type = wType,
                                           exponent = exponent,
                                           row_standard = False)

    #### Set Progressor for Weights Writing ####
    ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84127), 0, N, 1)

    #### Initialize Spatial Weights Matrix File ####
    swmWriter = WU.SWMWriter(swmFile, masterField, ssdo.spatialRefName, 
                             N, rowStandard, inputFC = inputFC,
                             wType = wType, distanceMethod = concept,
                             exponent = exponent, threshold = threshold)

    #### Unique Master ID Dictionary ####
    masterDict = {}

    #### Unique Master ID Dictionary ####
    masterSet = set([])

    for row in xrange(N):
        masterID = int(gaTable[row][2])
        if masterID in masterSet:
            ARCPY.AddIDMessage("Error", 644, masterField)
            ARCPY.AddIDMessage("Error", 643)
            raise SystemExit()
        else:
            masterSet.add(masterID)

        neighs, weights = neighWeights[row]
        neighs = [ gaTable[nh][2] for nh in neighs ]

        #### Add Spatial Weights Matrix Entry ####
        swmWriter.swm.writeEntry(masterID, neighs, weights) 

        #### Set Progress ####
        ARCPY.SetProgressorPosition()

    swmWriter.close()
    del gaTable

    #### Report Warning/Max Neighbors ####
    swmWriter.reportNeighInfo()

    #### Add Linear/Angular Unit (Distance Based Only) ####
    distanceOut = ssdo.distanceInfo.outputString
    distanceOut = [ARCPY.GetIDMessage(84344).format(distanceOut)]

    #### Report Spatial Weights Summary ####
    swmWriter.report(additionalInfo = distanceOut)

    #### Report SWM File is Large ####
    swmWriter.reportLargeSWM()
예제 #23
0
    def createOutputShapes(self, outputFC):
        #### Shorthand Attributes ####
        ssdoBase = self.ssdoBase
        ssdoCand = self.ssdoCand

        #### Validate Output Workspace ####
        ARCPY.overwriteOutput = True
        ERROR.checkOutputPath(outputFC)

        #### Create Output Feature Class ####
        ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84003))
        outPath, outName = OS.path.split(outputFC)
        tempFC = UTILS.returnScratchName("TempSS_FC", fileType = "FEATURECLASS",
                                         scratchWS = outPath)
        outTempPath, outTempName = OS.path.split(tempFC)

        try:
            DM.CreateFeatureclass(outTempPath, outTempName, ssdoBase.shapeType, 
                                  "", ssdoBase.mFlag, 
                                  ssdoBase.zFlag, ssdoBase.spatialRefString)
        except:
            ARCPY.AddIDMessage("ERROR", 210, outputFC)
            raise SystemExit()

        #### Add Null Value Flag ####
        outIsShapeFile = UTILS.isShapeFile(outputFC)
        setNullable = outIsShapeFile == False

        #### Make Feature Layer and Select Result OIDs/Shapes ####
        featureCount = ssdoBase.numObs + ssdoCand.numObs
        ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84003), 0,
                                                 featureCount, 1)

        #### Add Shape/ID Field Names ####
        matchID, candID = outputIDFieldNames
        outFieldNames = ["SHAPE@"] + outputIDFieldNames
        inFieldNames = ["OID@", "SHAPE@"]
        UTILS.addEmptyField(tempFC, matchID, "LONG", nullable = True)
        UTILS.addEmptyField(tempFC, candID, "LONG", nullable = True)

        #### Add Append Fields ####
        lenAppend = len(self.appendFields) 
        appendIsDate = []
        in2OutFieldNames = {}
        if lenAppend:
            for fieldName in self.appendFields:
                fcField = ssdoCand.allFields[fieldName]
                fieldType = UTILS.convertType[fcField.type]
                fieldOutName = UTILS.validQFieldName(fcField, outPath)
                in2OutFieldNames[fieldName] = fieldOutName
                if fieldType == "DATE":
                    appendIsDate.append(fieldName)
                UTILS.addEmptyField(tempFC, fieldOutName, fieldType,
                                    alias = fcField.alias)
                outFieldNames.append(fieldOutName)

        #### Add Analysis Fields ####
        for fieldName in self.fieldNames:
            fcField = ssdoBase.allFields[fieldName]
            fieldType = UTILS.convertType[fcField.type]
            fieldOutName = UTILS.validQFieldName(fcField, outPath)
            in2OutFieldNames[fieldName] = fieldOutName
            UTILS.addEmptyField(tempFC, fieldOutName, fieldType,
                                alias = fcField.alias)
            outFieldNames.append(fieldOutName)

        dataFieldNames = matchFieldInfo[self.similarType]
        dataFieldInfo = outputFieldInfo[self.matchMethod]
        baseValues = []
        for fieldName in dataFieldNames:
            outAlias, outType, baseValue = dataFieldInfo[fieldName]
            UTILS.addEmptyField(tempFC, fieldName, outType, 
                                alias = outAlias, 
                                nullable = setNullable) 
            outFieldNames.append(fieldName)
            baseValues.append(baseValue)

        #### Get Insert Cursor ####
        baseRows = DA.SearchCursor(ssdoBase.inputFC, inFieldNames)
        candRows = DA.SearchCursor(ssdoCand.inputFC, inFieldNames)
        rows = DA.InsertCursor(tempFC, outFieldNames)

        #### Set Base Data ####
        useShapeNull = outIsShapeFile
        if useShapeNull:
            nullIntValue = UTILS.shpFileNull['LONG']
        else:
            nullIntValue = None

        #### Set Base Null For Append ####
        appendNull = {}
        for fieldName in self.appendFields:
            if fieldName not in ssdoBase.fields:
                if useShapeNull:
                    outType = ssdoCand.fields[fieldName].type
                    outNullValue = UTILS.shpFileNull[outType]
                else:
                    outNullValue = None
                appendNull[fieldName] = outNullValue

        #### Add Base Data ####
        for masterID, shp in baseRows:
            orderID = ssdoBase.master2Order[masterID]

            #### Insert Shape, Match_ID and NULL (Cand_ID) ####
            rowRes = [shp, masterID, nullIntValue]

            #### Add Append Fields ####
            for fieldName in self.appendFields:
                if fieldName in appendNull:
                    rowRes.append(appendNull[fieldName])
                else:
                    value = ssdoBase.fields[fieldName].data[orderID]
                    if fieldName in appendIsDate:
                        value = TUTILS.iso2DateTime(value)
                    rowRes.append(value)

            #### Add Analysis Fields ####
            for fieldName in self.fieldNames:
                rowRes.append(ssdoBase.fields[fieldName].data[orderID])

            #### Add Null Base Values ####
            rowRes += baseValues

            rows.insertRow(rowRes)
            ARCPY.SetProgressorPosition()
        del baseRows
        
        #### First Add Similar Results ####
        for masterID, shp in candRows:
            orderID = ssdoCand.master2Order[masterID]
            indTop = NUM.where(self.topIDs == orderID)[0]
            indBot = NUM.where(self.botIDs == orderID)[0]
            if self.similarType in ['MOST_SIMILAR', 'BOTH'] and len(indTop):
                ind = indTop[0]
                #### Insert Shape, NULL (Match_ID) and Cand_ID ####
                rowRes = [shp, nullIntValue, masterID]
                
                #### Add Append Fields ####
                for fieldName in self.appendFields:
                    rowRes.append(ssdoCand.fields[fieldName].data[orderID])

                #### Add Analysis Fields ####
                for fieldName in self.fieldNames:
                    rowRes.append(ssdoCand.fields[fieldName].data[orderID])

                #### Add Results ####
                rank = ind + 1
                ss = self.totalDist[orderID]

                if self.similarType == 'BOTH':
                    rowRes += [rank, nullIntValue, ss, rank]
                else:
                    rowRes += [rank, ss, rank]

                rows.insertRow(rowRes)
            if self.similarType in ['LEAST_SIMILAR', 'BOTH'] and len(indBot):
                ind = indBot[0]
                #### Insert Shape, NULL (Match_ID) and Cand_ID ####
                rowRes = [shp, nullIntValue, masterID]

                #### Add Append Fields ####
                for fieldName in self.appendFields:
                    rowRes.append(ssdoCand.fields[fieldName].data[orderID])

                #### Add Analysis Fields ####
                for fieldName in self.fieldNames:
                    rowRes.append(ssdoCand.fields[fieldName].data[orderID])

                #### Add Results ####
                rank = ind + 1
                labRank = rank * -1
                ss = self.totalDist[orderID]

                if self.similarType == 'BOTH':
                    rowRes += [nullIntValue, rank, ss, labRank]
                else:
                    rowRes += [rank, ss, labRank]

                rows.insertRow(rowRes)

            ARCPY.SetProgressorPosition()
        del candRows
        del rows

        #### Do Final Sort ####
        if self.matchMethod == 'ATTRIBUTE_PROFILES':
            if self.similarType == 'MOST_SIMILAR':
                sortString = "SIMINDEX DESCENDING;SIMRANK DESCENDING"
            else:
                sortString = "SIMINDEX DESCENDING"
        else:
            if self.similarType == 'MOST_SIMILAR':
                sortString = "SIMINDEX ASCENDING;SIMRANK ASCENDING"
            else:
                sortString = "SIMINDEX ASCENDING"
        DM.Sort(tempFC, outputFC, sortString, "UR")

        #### Clean Up ####
        DM.Delete(tempFC)

        #### Symbology ####
        params = ARCPY.gp.GetParameterInfo()
        try:
            renderType = UTILS.renderType[self.ssdoBase.shapeType.upper()]
            renderKey = (self.similarType, renderType)
            renderLayerFile = outputRenderInfo[renderKey]
            templateDir = OS.path.dirname(OS.path.dirname(SYS.argv[0]))
            fullRLF = OS.path.join(templateDir, "Templates",
                                   "Layers", renderLayerFile)
            params[2].Symbology = fullRLF
        except:
            ARCPY.AddIDMessage("WARNING", 973)
예제 #24
0
def spaceTime2SWM(inputFC, swmFile, masterField, concept = "EUCLIDEAN",
                  threshold = None, rowStandard = True,
                  timeField = None, timeType = None,
                  timeValue = None):
    """
    inputFC (str): path to the input feature class
    swmFile (str): path to the SWM file.
    masterField (str): field in table that serves as the mapping.
    concept: {str, EUCLIDEAN}: EUCLIDEAN or MANHATTAN 
    threshold {float, None}: distance threshold
    rowStandard {bool, True}: row standardize weights?
    timeField {str, None}: name of the date-time field
    timeType {str, None}: ESRI enumeration of date-time intervals
    timeValue {float, None}: value forward and backward in time
    """

    #### Assure Temporal Parameters are Set ####
    if not timeField:
        ARCPY.AddIDMessage("ERROR", 1320)
        raise SystemExit()
    if not timeType:
        ARCPY.AddIDMessage("ERROR", 1321)
        raise SystemExit()
    if not timeValue or timeValue <= 0:
        ARCPY.AddIDMessage("ERROR", 1322)
        raise SystemExit()

    #### Create SSDataObject ####
    ssdo = SSDO.SSDataObject(inputFC, templateFC = inputFC,
                             useChordal = True)
    cnt = UTILS.getCount(inputFC)
    ERROR.errorNumberOfObs(cnt, minNumObs = 2)
    ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84001), 0, cnt, 1)

    #### Validation of Master Field ####
    verifyMaster = ERROR.checkField(ssdo.allFields, masterField, types = [0,1])
    badIDs = []

    #### Create Temporal Hash ####
    timeInfo = {}
    xyCoords = NUM.empty((cnt, 2), float)

    #### Process Field Values ####
    fieldList = [masterField, "SHAPE@XY", timeField]
    try:
        rows = DA.SearchCursor(ssdo.catPath, fieldList, "", 
                               ssdo.spatialRefString)
    except:
        ARCPY.AddIDMessage("ERROR", 204)
        raise SystemExit()

    #### Add Data to GATable and Time Dictionary ####
    c = 0
    for row in rows:
        badRow = False

        #### Assure Masterfield is Valid ####
        masterID = row[0]
        if masterID == None or masterID == "":
            badRow = True

        #### Assure Date/Time is Valid ####
        timeStamp = row[-1]
        if timeStamp == None or timeStamp == "":
            badRow = True

        #### Assure Centroid is Valid ####
        badXY = row[1].count(None)
        if not badXY:
            x,y = row[1]
            xyCoords[c] = (x,y)
        else:
            badRow = True

        #### Process Data ####
        if not badRow:
            if timeInfo.has_key(masterID):
                #### Assure Uniqueness ####
                ARCPY.AddIDMessage("Error", 644, masterField)
                ARCPY.AddIDMessage("Error", 643)
                raise SystemExit()
            else:
                #### Fill Date/Time Dict ####
                startDT, endDT = TUTILS.calculateTimeWindow(timeStamp, 
                                                            timeValue, 
                                                            timeType)
                timeInfo[masterID] = (timeStamp, startDT, endDT)

        else:
            badIDs.append(masterID)

        #### Set Progress ####
        c += 1
        ARCPY.SetProgressorPosition()

    #### Clean Up ####
    del rows

    #### Get Set of Bad IDs ####
    numBadObs = len(badIDs)
    badIDs = list(set(badIDs))
    badIDs.sort()
    badIDs = [ str(i) for i in badIDs ]
    
    #### Process any bad records encountered ####
    if numBadObs:
        ERROR.reportBadRecords(cnt, numBadObs, badIDs, label = masterField)

    #### Load Neighbor Table ####
    gaTable, gaInfo = WU.gaTable(ssdo.inputFC, 
                                 fieldNames = [masterField, timeField],
                                 spatRef = ssdo.spatialRefString)
    numObs = len(gaTable)
    xyCoords = xyCoords[0:numObs]

    #### Set the Distance Threshold ####
    concept, gaConcept = WU.validateDistanceMethod(concept, ssdo.spatialRef)
    if threshold == None:
        #### Set Progressor for Search ####
        ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84144))

        #### Create k-Nearest Neighbor Search Type ####
        gaSearch = GAPY.ga_nsearch(gaTable)
        gaSearch.init_nearest(0.0, 1, gaConcept)
        neighDist = ARC._ss.NeighborDistances(gaTable, gaSearch)
        N = len(neighDist)
        threshold = 0.0
        sumDist = 0.0 

        #### Find Maximum Nearest Neighbor Distance ####
        for row in xrange(N):
            dij = neighDist[row][-1][0]
            if dij > threshold:
                threshold = dij
            sumDist += dij

            ARCPY.SetProgressorPosition()

        #### Increase For Rounding Error ####
        threshold = threshold * 1.0001
        avgDist = sumDist / (N * 1.0)

        #### Add Linear/Angular Units ####
        thresholdStr = ssdo.distanceInfo.printDistance(threshold)
        ARCPY.AddIDMessage("Warning", 853, thresholdStr)

        #### Chordal Default Check ####
        if ssdo.useChordal:
            hardMaxExtent = ARC._ss.get_max_gcs_distance(ssdo.spatialRef)
            if threshold > hardMaxExtent:
                ARCPY.AddIDMessage("ERROR", 1609)
                raise SystemExit()

        #### Clean Up ####
        del gaSearch

    #### Create Missing SSDO Info ####
    extent = UTILS.resetExtent(xyCoords)

    #### Reset Coordinates for Chordal ####
    if ssdo.useChordal:
        sliceInfo = UTILS.SpheroidSlice(extent, ssdo.spatialRef)
        maxExtent = sliceInfo.maxExtent
    else:
        env = UTILS.Envelope(extent)
        maxExtent = env.maxExtent

    threshold = checkDistanceThresholdSWM(ssdo, threshold, maxExtent)
    
    #### Set Default Progressor for Neigborhood Structure ####
    ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143))

    #### Create Distance Neighbor Search Type ####
    gaSearch = GAPY.ga_nsearch(gaTable)
    gaSearch.init_nearest(threshold, 0, gaConcept)
    neighSearch = ARC._ss.NeighborSearch(gaTable, gaSearch)

    #### Set Progressor for Weights Writing ####
    ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84127), 0, numObs, 1)

    #### Initialize Spatial Weights Matrix File ####
    swmWriter = WU.SWMWriter(swmFile, masterField, ssdo.spatialRefName, 
                             numObs, rowStandard, inputFC = inputFC,
                             wType = 9, distanceMethod = concept,
                             threshold = threshold, timeField = timeField,
                             timeType = timeType, timeValue = timeValue)

    for row in xrange(numObs):
        masterID = gaTable[row][2]

        #### Get Date/Time Info ####
        dt0, startDT0, endDT0 = timeInfo[masterID]

        nhs = neighSearch[row]
        neighs = []
        weights = []
        for nh in nhs:
            #### Search Through Spatial Neighbors ####
            neighID = gaTable[nh][2]

            #### Get Date/Time Info ####
            dt1, startDT1, endDT1 = timeInfo[neighID]

            #### Filter Based on Date/Time ####
            insideTimeWindow = TUTILS.isTimeNeighbor(startDT0, endDT0, dt1)
            if insideTimeWindow:
                neighs.append(neighID)
                weights.append(1.0)

        #### Add Spatial Weights Matrix Entry ####
        swmWriter.swm.writeEntry(masterID, neighs, weights) 

        #### Set Progress ####
        ARCPY.SetProgressorPosition()

    swmWriter.close()
    del gaTable

    #### Report Warning/Max Neighbors ####
    swmWriter.reportNeighInfo()

    #### Report Spatial Weights Summary ####
    swmWriter.report()

    #### Report SWM File is Large ####
    swmWriter.reportLargeSWM()
예제 #25
0
    def obtainData(self,
                   masterField,
                   fields=[],
                   types=[0, 1, 2, 3, 4, 5, 6],
                   minNumObs=0,
                   warnNumObs=0,
                   dateStr=False,
                   explicitBadRecordID=None):
        """Takes a list of field names and returns it in a dictionary
        structure.

        INPUTS:
        masterField (str): name of field being used as the master
        fields {list, []}: name(s) of the field to be returned
        types (list): types of data allowed to be returned (1)
        minNumObs {int, 0}: minimum number of observations for error
        warnNumObs {int, 0}: minimum number of observations for warning
        OID {bool, False}: OID field allowed to be master field?

        ATTRIBUTES:
        gaTable (structure): instance of the GA Table
        fields (dict): fieldName = instance of FCField
        master2Order (dict): masterID = order in lists
        order2Master (dict): order in lists = masterID
        masterField (str): field that serves as the master
        badRecords (list): master IDs that could not be read
        xyCoords (array, nunObs x 2): xy-coordinates for feature centroids
        """

        #### Get Base Count, May Include Bad Records ####
        cnt = UTILS.getCount(self.inputFC)

        #### Validation of Master Field ####
        verifyMaster = ERROR.checkField(self.allFields,
                                        masterField,
                                        types=[0, 1, 5])

        #### Set MasterIsOID Boolean ####
        self.masterIsOID = masterField == self.oidName

        #### Set Master and Data Indices ####
        if self.masterIsOID:
            self.masterColumnIndex = 0
            self.dataColumnIndex = 2
            fieldList = [self.oidName, "shape@XY"]
        else:
            self.masterColumnIndex = 2
            self.dataColumnIndex = 3
            fieldList = [self.oidName, "shape@XY", masterField]

        #### Initialization of Centroids  ####
        xyCoords = NUM.empty((cnt, 2), float)

        #### Validation and Initialization of Data Fields ####
        numFields = len(fields)
        fieldTypes = {}
        hasDate = False
        for field in fields:
            fieldType = ERROR.checkField(self.allFields, field, types=types)
            fieldTypes[field] = fieldType
            fieldList.append(field)
            self.fields[field] = self.allFields[field]
            if fieldType.upper() == "DATE":
                hasDate = True
                nowTime = DT.datetime.now()

        #### Create Empty Data Arrays ####
        for fieldName, fieldObj in self.fields.iteritems():
            fieldObj.createDataArray(cnt, dateStr=dateStr)

        #### Z Coords ####
        if self.hasZ:
            zCoords = NUM.empty((cnt, ), float)
            fieldList.append("shape@Z")

        #### Keep track of Invalid Fields ####
        badIDs = []
        badRecord = 0

        #### Create Progressor Bar ####
        ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84001), 0, cnt, 1)

        #### Process Field Values ####
        try:
            rows = DA.SearchCursor(self.inputFC, fieldList, "",
                                   self.spatialRefString)
        except:
            ARCPY.AddIDMessage("ERROR", 204)
            raise SystemExit()

        c = 0
        for row in rows:
            oid = row[0]
            badXY = row[1].count(None)
            if self.hasZ:
                badValues = row[0:-1].count(None)
            else:
                badValues = row.count(None)

            #### Check Bad Record ####
            if badXY or badValues:
                badRow = 1
                badRecord = 1
                badIDs.append(oid)
            else:
                #### Get Centroid and Master ID ####
                xyCoords[c] = row[1]
                masterID = row[self.masterColumnIndex]

                #### Add Field Values ####
                if numFields:
                    restFields = row[self.dataColumnIndex:]
                    for fieldInd, fieldName in enumerate(fields):
                        fieldValue = restFields[fieldInd]
                        fieldType = fieldTypes[fieldName]
                        if fieldType.upper() == "DATE":
                            if dateStr:
                                fieldValue = str(fieldValue)
                            else:
                                fieldValue = (nowTime -
                                              fieldValue).total_seconds()
                        self.fields[fieldName].data[c] = fieldValue
                if self.hasZ:
                    zCoords[c] = row[-1]

                #### Check uniqueness of masterID field ####
                if self.master2Order.has_key(masterID):
                    del rows
                    ARCPY.AddIDMessage("ERROR", 644, masterField)
                    ARCPY.AddIDMessage("ERROR", 643)
                    raise SystemExit()
                else:
                    self.master2Order[masterID] = c
                    self.order2Master[c] = masterID
                    c += 1

            ARCPY.SetProgressorPosition()

        del rows

        #### Check Whether the Number of Features is Appropriate ####
        numObs = len(self.master2Order)
        ERROR.checkNumberOfObs(numObs,
                               minNumObs=minNumObs,
                               warnNumObs=warnNumObs,
                               silentWarnings=self.silentWarnings)

        #### Get Set of Bad IDs ####
        badIDs = list(set(badIDs))
        badIDs.sort()
        badIDs = [str(i) for i in badIDs]

        #### Process any bad records encountered ####
        if badRecord != 0:
            bn = len(badIDs)
            if not self.silentWarnings:
                ERROR.reportBadRecords(cnt,
                                       bn,
                                       badIDs,
                                       label=self.oidName,
                                       explicitBadRecordID=explicitBadRecordID)

            #### Prune Data Arrays ####
            xyCoords = xyCoords[0:numObs]
            self.resizeDataArrays(numObs)
            if self.hasZ:
                zCoords = zCoords[0:numObs]

        #### Set the Hidden Fields (E.g. Not in Use) ####
        self.setHiddenFields()

        #### Reset Extent to Honor Env and Subsets ####
        try:
            self.extent = UTILS.resetExtent(xyCoords)
        except:
            pass

        #### Reset Coordinates for Chordal ####
        if self.useChordal:
            #### Project to XY on Spheroid ####
            self.spheroidCoords = ARC._ss.lonlat_to_xy(xyCoords,
                                                       self.spatialRef)
            self.sliceInfo = UTILS.SpheroidSlice(self.extent, self.spatialRef)
        else:
            self.spheroidCoords = None
            self.sliceInfo = None

        #### Set Further Attributes ####
        self.badRecords = badIDs
        self.xyCoords = xyCoords
        self.masterField = masterField
        self.gaTable = None
        self.numObs = numObs
        if self.hasZ:
            self.zCoords = zCoords
        else:
            self.zCoords = None
예제 #26
0
    def __init__(self, inputFC, templateFC = None, explicitSpatialRef = None,
                 silentWarnings = False, useChordal = True):
        #### Validate Input Feature Class ####
        ERROR.checkFC(inputFC)
        try:
            self.inPath, self.inName = OS.path.split(inputFC)
        except:
            self.inPath = None
            self.inName = inputFC

        #### Validate Template FC ####
        if templateFC != None:
            if ARCPY.Exists(templateFC) == False:
                templateFC = None

        #### ShapeFile Boolean ####
        self.shapeFileBool = False
        if self.inPath:
            self.shapeFileBool = UTILS.isShapeFile(inputFC)

            #### Create Feature Layer if LYR File ####
            path, ext = OS.path.splitext(inputFC)
            if ext.upper() == ".LYR":
                tempFC = "SSDO_FeatureLayer"
                DM.MakeFeatureLayer(inputFC, tempFC)
                inputFC = tempFC

        #### Describe Input ####
        self.info = ARCPY.Describe(inputFC)

        #### Assure Input are Features with OIDs ####
        if not self.info.hasOID:
            ARCPY.AddIDMessage("ERROR", 339, self.inName)
            raise SystemExit()

        #### Assign Describe Objects to Class Attributes ####
        self.inputFC = inputFC
        self.catPath = self.info.CatalogPath
        self.shapeType = self.info.ShapeType
        self.oidName = self.info.oidFieldName
        self.dataType = self.info.DataType
        self.shapeField = self.info.ShapeFieldName
        self.templateFC = templateFC
        self.hasM = self.info.HasM
        self.hasZ = self.info.HasZ
        self.silentWarnings = silentWarnings

        #### Set Initial Extent Depending on DataType ####
        if self.dataType in ["FeatureLayer", "Layer"]:
            try:
                tempInfo = ARCPY.Describe(self.catPath)
                extent = tempInfo.extent
            except:
                #### in_memory, SDE, NetCDF etc... ####
                extent = self.info.extent
            self.fidSet = self.info.FIDSet
            if self.fidSet == "":
                self.selectionSet = False
            else:
                self.selectionSet = True
        else:
            extent = self.info.extent
            self.fidSet = ""
            self.selectionSet = False
        self.extent = extent

        #### Set Spatial Reference ####
        inputSpatRef = self.info.SpatialReference
        inputSpatRefName = inputSpatRef.name
        if explicitSpatialRef:
            #### Explicitely Override Spatial Reference ####
            self.templateFC = None
            self.spatialRef = explicitSpatialRef
        else:
            #### 1. Feature Dataset, 2. Env Setting, 3. Input Hierarchy ####
            self.spatialRef = UTILS.returnOutputSpatialRef(inputSpatRef,
                                                  outputFC = templateFC)
        self.spatialRefString = UTILS.returnOutputSpatialString(self.spatialRef)
        self.spatialRefName = self.spatialRef.name
        self.spatialRefType = self.spatialRef.type

        #### Warn if Spatial Reference Changed ####
        if not silentWarnings:
            UTILS.compareSpatialRefNames(inputSpatRefName, self.spatialRefName)

        #### Check for Projection ####
        if self.spatialRefType.upper() != "PROJECTED":
            if self.spatialRefType.upper() == "GEOGRAPHIC":
                self.useChordal = useChordal
                if not explicitSpatialRef:
                    if self.useChordal:
                        ARCPY.AddIDMessage("WARNING", 1605)
                    else:
                        ARCPY.AddIDMessage("WARNING", 916)
            else:
                self.useChordal = False
                if not explicitSpatialRef:
                    ARCPY.AddIDMessage("WARNING", 916)
        else:
            self.useChordal = False

        #### Angular/Linear Unit Info ####
        self.distanceInfo = UTILS.DistanceInfo(self.spatialRef, 
                                         useChordalDistances = self.useChordal)

        #### Create Composition and Accounting Structure ####
        self.fields = {}
        self.master2Order = {}
        self.order2Master = {}

        #### Obtain a Full List of Field Names/Type ####
        self.allFields = {}
        listFields = self.info.fields
        for field in listFields:
            name = field.name.upper()
            self.allFields[name] = FCField(field)

        #### Set Z and M Flags and Defaults ####
        zmInfo = UTILS.setZMFlagInfo(self.hasM, self.hasZ, self.spatialRef)
        self.zFlag, self.mFlag, self.defaultZ = zmInfo
        self.zBool = self.zFlag == "ENABLED"

        #### Render Type ####
        self.renderType = UTILS.renderType[self.shapeType.upper()]
예제 #27
0
    def obtainData(self, masterField, fields = [], types = [0,1,2,3,4,5,6],
                   minNumObs = 0, warnNumObs = 0, dateStr = False,
                   explicitBadRecordID = None):
        """Takes a list of field names and returns it in a dictionary
        structure.

        INPUTS:
        masterField (str): name of field being used as the master
        fields {list, []}: name(s) of the field to be returned
        types (list): types of data allowed to be returned (1)
        minNumObs {int, 0}: minimum number of observations for error
        warnNumObs {int, 0}: minimum number of observations for warning
        OID {bool, False}: OID field allowed to be master field?

        ATTRIBUTES:
        gaTable (structure): instance of the GA Table
        fields (dict): fieldName = instance of FCField
        master2Order (dict): masterID = order in lists
        order2Master (dict): order in lists = masterID
        masterField (str): field that serves as the master
        badRecords (list): master IDs that could not be read
        xyCoords (array, nunObs x 2): xy-coordinates for feature centroids
        """

        #### Get Base Count, May Include Bad Records ####
        cnt = UTILS.getCount(self.inputFC)

        #### Validation of Master Field ####
        verifyMaster = ERROR.checkField(self.allFields, masterField,
                                        types = [0,1,5])

        #### Set MasterIsOID Boolean ####
        self.masterIsOID = masterField == self.oidName

        #### Set Master and Data Indices ####
        if self.masterIsOID:
            self.masterColumnIndex = 0
            self.dataColumnIndex = 2
            fieldList = [self.oidName, "shape@XY"]
        else:
            self.masterColumnIndex = 2
            self.dataColumnIndex = 3
            fieldList = [self.oidName, "shape@XY", masterField]

        #### Initialization of Centroids  ####
        xyCoords = NUM.empty((cnt, 2), float)

        #### Validation and Initialization of Data Fields ####
        numFields = len(fields)
        fieldTypes = {}
        hasDate = False
        for field in fields:
            fieldType = ERROR.checkField(self.allFields, field, types = types)
            fieldTypes[field] = fieldType
            fieldList.append(field)
            self.fields[field] = self.allFields[field]
            if fieldType.upper() == "DATE":
                hasDate = True
                nowTime = DT.datetime.now()

        #### Create Empty Data Arrays ####
        for fieldName, fieldObj in self.fields.iteritems():
            fieldObj.createDataArray(cnt, dateStr = dateStr)

        #### Z Coords ####
        if self.hasZ:
            zCoords = NUM.empty((cnt, ), float)
            fieldList.append("shape@Z")

        #### Keep track of Invalid Fields ####
        badIDs = []
        badRecord = 0

        #### Create Progressor Bar ####
        ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84001), 0, cnt, 1)

        #### Process Field Values ####
        try:
            rows = DA.SearchCursor(self.inputFC, fieldList, "",
                                   self.spatialRefString)
        except:
            ARCPY.AddIDMessage("ERROR", 204)
            raise SystemExit()

        c = 0
        for row in rows:
            oid = row[0]
            badXY = row[1].count(None)
            if self.hasZ:
                badValues = row[0:-1].count(None)
            else:
                badValues = row.count(None)

            #### Check Bad Record ####
            if badXY or badValues:
                badRow = 1
                badRecord = 1
                badIDs.append(oid)
            else:
                #### Get Centroid and Master ID ####
                xyCoords[c] = row[1]
                masterID = row[self.masterColumnIndex]

                #### Add Field Values ####
                if numFields:
                    restFields = row[self.dataColumnIndex:]
                    for fieldInd, fieldName in enumerate(fields):
                        fieldValue = restFields[fieldInd]
                        fieldType = fieldTypes[fieldName]
                        if fieldType.upper() == "DATE":
                            if dateStr:
                                fieldValue = str(fieldValue)
                            else:
                                fieldValue = (nowTime - fieldValue).total_seconds()
                        self.fields[fieldName].data[c] = fieldValue
                if self.hasZ:
                    zCoords[c] = row[-1]

                #### Check uniqueness of masterID field ####
                if self.master2Order.has_key(masterID):
                    del rows
                    ARCPY.AddIDMessage("ERROR", 644, masterField)
                    ARCPY.AddIDMessage("ERROR", 643)
                    raise SystemExit()
                else:
                    self.master2Order[masterID] = c
                    self.order2Master[c] = masterID
                    c += 1

            ARCPY.SetProgressorPosition()

        del rows

        #### Check Whether the Number of Features is Appropriate ####
        numObs = len(self.master2Order)
        ERROR.checkNumberOfObs(numObs, minNumObs = minNumObs,
                               warnNumObs = warnNumObs,
                               silentWarnings = self.silentWarnings)

        #### Get Set of Bad IDs ####
        badIDs = list(set(badIDs))
        badIDs.sort()
        badIDs = [ str(i) for i in badIDs ]

        #### Process any bad records encountered ####
        if badRecord != 0:
            bn = len(badIDs)
            if not self.silentWarnings:
                ERROR.reportBadRecords(cnt, bn, badIDs, label = self.oidName,
                                       explicitBadRecordID = explicitBadRecordID)

            #### Prune Data Arrays ####
            xyCoords = xyCoords[0:numObs]
            self.resizeDataArrays(numObs)
            if self.hasZ:
                zCoords = zCoords[0:numObs]

        #### Set the Hidden Fields (E.g. Not in Use) ####
        self.setHiddenFields()

        #### Reset Extent to Honor Env and Subsets ####
        try:
            self.extent = UTILS.resetExtent(xyCoords)
        except:
            pass

        #### Reset Coordinates for Chordal ####
        if self.useChordal:
            #### Project to XY on Spheroid ####
            self.spheroidCoords = ARC._ss.lonlat_to_xy(xyCoords, 
                                                self.spatialRef) 
            self.sliceInfo = UTILS.SpheroidSlice(self.extent,
                                                self.spatialRef)
        else:
            self.spheroidCoords = None
            self.sliceInfo = None

        #### Set Further Attributes ####
        self.badRecords = badIDs
        self.xyCoords = xyCoords
        self.masterField = masterField
        self.gaTable = None
        self.numObs = numObs
        if self.hasZ:
            self.zCoords = zCoords
        else:
            self.zCoords = None
예제 #28
0
def stCollectByKNN(ssdo, timeField, outputFC, inSpan, inDistance):
    """
    This method applied Jacquez Space-Time K-NN to convert event data into weighted
    point data by dissolving all coincident points in space and time into unique
    points with a new count field that contains the number of original features
    at that location and time span.

    INPUTS:
        ssdo (obj): SSDataObject from input
        timeField (str): Date/Time field name in input feature
        outputFC (str): path to the output feature class
        inSpan (int): value of temporal units within the same time bin
        inDistance (int): value of spatial units considered as spatial neighbors
    OUTPUTS:
        Create new collected point feature

    """
    #### Read raw time data ####
    timeData = ssdo.fields[timeField].data
    #### Convert temporal unit ####
    time = NUM.array(timeData, dtype = 'datetime64[s]').astype('datetime64[D]')
    #### Find Start Time ####
    startTime = time.min()
    #### Create Bin for Space and Time ####
    timeBin = (time - startTime) / inSpan

    numObs = ssdo.numObs
    #### Create Sudo-fid to Find K-NN in Space and Time
    fid = [i for i in xrange(numObs)]

    #### Validate Output Workspace ####
    ERROR.checkOutputPath(outputFC)

    #### True Centroid Warning For Non-Point FCs ####
    if ssdo.shapeType.upper() != "POINT":
        ARCPY/AddIDMessage("WARNING", 1021)

    #### Create GA Data Structure ####
    gaTable, gaInfo = WU.gaTable(ssdo.inputFC, spatRef = ssdo.spatialRefString)

    #### Assure Enough Observations ####
    cnt = UTILS.getCount(ssdo.inputFC)
    ERROR.errorNumberOfObs(cnt, minNumObs = 4)
    N = gaInfo[0]
    ERROR.errorNumberOfObs(N, minNumObs = 4)

    #### Process Any Bad Records Encountered ####
    numBadRecs = cnt -N
    if numBadRecs:
        badRecs = WU.parseGAWarnings(gaTable.warnings)
        if not ssdo.silentWarnings:
            ERROR.reportBadRecords(cnt, numBadRecs, badRecs, label = ssdo.oidName)

    #### Create Output Feature Class ####
    outPath, outName = OS.path.split(outputFC)
    try:
        DM.CreateFeatureclass(outPath, outName, "POINT", "", ssdo.mFlag,
                              ssdo.zFlag, ssdo.spatialRefString)
    except:
        ARCPY.AddIDMessage("ERROR", 210, outputFC)
        raise SystemExit()

    #### Create k-Nearest Neighbor Search Type ####
    gaSearch = GAPY.ga_nsearch(gaTable)
    gaSearch.init_nearest(inDistance, 0, "euclidean")

    #### Add Count Field ####
    countFieldNameOut = ARCPY.ValidateFieldName(countFieldName, outPath)
    timeFieldNameOut = ARCPY.ValidateFieldName(timeFieldName, outPath)
    UTILS.addEmptyField(outputFC, countFieldNameOut, "LONG")
    UTILS.addEmptyField(outputFC, timeFieldNameOut, "DATE")
    fieldList = ["SHAPE@", countFieldNameOut, timeFieldNameOut]

    #### Set Insert Cursor ####
    rowsOut = DA.InsertCursor(outputFC, fieldList)

    #### Detect S-T K-NN by Space and Time Bin ####
    duplicateList = []
    for record in fid:
        kNNList = [record]
        if record not in duplicateList:
            for pair in fid:
                if pair != record :
                    gaSearch.search_by_idx(record)
                    for nh in gaSearch:
                        if timeBin[record] == timeBin[pair]:
                            kNNList.append(nh.idx)
                            duplicateList.append(nh.idx)
            #### Create and Populate New Feature ####
            kNNList = list(set(kNNList))
            count = len(kNNList)
            dt = time[record]
            x0 = ssdo.xyCoords[kNNList, 0].mean()
            y0 = ssdo.xyCoords[kNNList, 1].mean()
            pnt =(x0, y0, ssdo.defaultZ)
            rowResult = [pnt, count, dt]
            rowsOut.insertRow(rowResult)
            ARCPY.SetProgressorPosition()

    #### Clean Up ####
    del rowsOut, timeBin, kNNList, duplicateList

    return countFieldNameOut
예제 #29
0
    def obtainDataGA(self, masterField, fields = [], types = [0,1,2,3,5,6],
                     minNumObs = 0, warnNumObs = 0):
        """Takes a list of field names and returns it in a dictionary
        structure.

        INPUTS:
        masterField (str): name of field being used as the master
        fields {list, []}: name(s) of the field to be returned
        types (list): types of data allowed to be returned (1)
        minNumObs {int, 0}: minimum number of observations for error
        warnNumObs {int, 0}: minimum number of observations for warning

        ATTRIBUTES:
        gaTable (structure): instance of the GA Table
        fields (dict): fieldName = instance of FCField
        master2Order (dict): masterID = order in lists
        order2Master (dict): order in lists = masterID
        masterField (str): field that serves as the master
        badRecords (list): master IDs that could not be read
        xyCoords (array, nunObs x 2): xy-coordinates for feature centroids

        NOTES:
        (1) No Text Fields; short [0], long [1], float [2], double[3]
        """

        #### Validation of Master Field ####
        verifyMaster = ERROR.checkField(self.allFields, masterField,
                                        types = [0,1,5])

        #### Set MasterIsOID Boolean ####
        self.masterIsOID = masterField == self.oidName

        #### Set Master and Data Indices ####
        if self.masterIsOID:
            self.masterColumnIndex = 0
            self.dataColumnIndex = 2
            fieldList = []
        else:
            self.masterColumnIndex = 2
            self.dataColumnIndex = 3
            fieldList = [masterField]

        #### Validation and Initialization of Data Fields ####
        numFields = len(fields)
        for field in fields:
            fType = ERROR.checkField(self.allFields, field, types = types)
            fieldList.append(field)
            self.fields[field] = self.allFields[field]

        #### ZCoords Are Last ####
        getZBool = self.hasZ and (not self.renderType)
        if getZBool:
            fieldList.append("SHAPE&Z")

        #### Create GA Data Structure ####
        cnt = UTILS.getCount(self.inputFC)
        fieldList = tuple(fieldList)
        gaTable, gaInfo = WU.gaTable(self.inputFC, fieldNames = fieldList,
                                     spatRef = self.spatialRefString)

        #### Check Whether the Number of Features is Appropriate ####
        numObs = gaInfo[0]
        ERROR.checkNumberOfObs(numObs, minNumObs = minNumObs,
                               warnNumObs = warnNumObs,
                               silentWarnings = self.silentWarnings)

        #### Process any bad records encountered ####
        numBadIDs = cnt - numObs
        if numBadIDs:
            badIDs = WU.parseGAWarnings(gaTable.warnings)
            if not self.silentWarnings:
                ERROR.reportBadRecords(cnt, numBadIDs, badIDs,
                                       label = self.oidName)
        else:
            badIDs = []

        #### Initialization of Centroids  ####
        xyCoords = NUM.empty((numObs, 2), float)

        #### Z Coords ####
        if self.hasZ:
            zCoords = NUM.empty((numObs, ), float)

        #### Create Empty Data Arrays ####
        for fieldName, fieldObj in self.fields.iteritems():
            fieldObj.createDataArray(numObs)

        #### Populate SSDataObject ####
        ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84001), 0, numObs, 1)
        for row in xrange(numObs):
            rowInfo = gaTable[row]
            x,y = rowInfo[1]
            masterID = int(rowInfo[self.masterColumnIndex])
            if self.master2Order.has_key(masterID):
                ARCPY.AddIDMessage("ERROR", 644, masterField)
                ARCPY.AddIDMessage("ERROR", 643)
                raise SystemExit()
            else:
                self.master2Order[masterID] = row
                self.order2Master[row] = masterID
                xyCoords[row] = (x, y)
            if numFields:
                restFields = rowInfo[self.dataColumnIndex:]
                for fieldInd, fieldName in enumerate(fields):
                    self.fields[fieldName].data[row] = restFields[fieldInd]
            if self.hasZ:
                if getZBool:
                    zCoords[row] = rowInfo[-1]
                else:
                    zCoords[row] = NUM.nan

            ARCPY.SetProgressorPosition()

        #### Set the Hidden Fields (E.g. Not in Use) ####
        self.setHiddenFields()

        #### Reset Extent to Honor Env and Subsets ####
        try:
            self.extent = UTILS.resetExtent(xyCoords)
        except:
            pass

        #### Reset Coordinates for Chordal ####
        if self.useChordal:
            #### Project to XY on Spheroid ####
            self.spheroidCoords = ARC._ss.lonlat_to_xy(xyCoords,
                                                self.spatialRef) 
            self.sliceInfo = UTILS.SpheroidSlice(self.extent,
                                                self.spatialRef)
        else:
            self.spheroidCoords = None
            self.sliceInfo = None

        #### Set Further Attributes ####
        self.badRecords = badIDs
        self.xyCoords = xyCoords
        self.masterField = masterField
        self.gaTable = gaTable
        self.numObs = numObs
        if self.hasZ:
            self.zCoords = zCoords
        else:
            self.zCoords = None
def calculateDistanceBand(inputFC, kNeighs, concept="EUCLIDEAN"):
    """Provides the minimum, maximum and average distance from a
    set of features based on a given neighbor count.

    INPUTS: 
    inputFC (str): path to the input feature class
    kNeighs (int): number of neighbors to return
    concept {str, EUCLIDEAN}: EUCLIDEAN or MANHATTAN distance
    """

    #### Assure that kNeighs is Non-Zero ####
    if kNeighs <= 0:
        ARCPY.AddIDMessage("ERROR", 976)
        raise SystemExit()

    #### Set Default Progressor for Neigborhood Structure ####
    ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143))

    #### Create SSDataObject ####
    ssdo = SSDO.SSDataObject(inputFC, useChordal=True)
    cnt = UTILS.getCount(inputFC)
    ERROR.errorNumberOfObs(cnt, minNumObs=2)

    #### Create GA Data Structure ####
    gaTable, gaInfo = WU.gaTable(inputFC, spatRef=ssdo.spatialRefString)

    #### Assure Enough Observations ####
    N = gaInfo[0]
    ERROR.errorNumberOfObs(N, minNumObs=2)

    #### Process Any Bad Records Encountered ####
    numBadRecs = cnt - N
    if numBadRecs:
        badRecs = WU.parseGAWarnings(gaTable.warnings)
        err = ERROR.reportBadRecords(cnt,
                                     numBadRecs,
                                     badRecs,
                                     label=ssdo.oidName)

    #### Assure k-Nearest is Less Than Number of Features ####
    if kNeighs >= N:
        ARCPY.AddIDMessage("ERROR", 975)
        raise SystemExit()

    #### Create k-Nearest Neighbor Search Type ####
    gaSearch = GAPY.ga_nsearch(gaTable)
    gaConcept = concept.lower()
    gaSearch.init_nearest(0.0, kNeighs, gaConcept)
    neighDist = ARC._ss.NeighborDistances(gaTable, gaSearch)

    #### Set Progressor for Weights Writing ####
    ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84007), 0, N, 1)
    distances = NUM.empty((N, ), float)

    for row in xrange(N):
        distances[row] = neighDist[row][-1].max()
        ARCPY.SetProgressorPosition()

    #### Calculate and Report ####
    minDist = distances.min()
    avgDist = distances.mean()
    maxDist = distances.max()
    if ssdo.useChordal:
        hardMaxExtent = ARC._ss.get_max_gcs_distance(ssdo.spatialRef)
        if maxDist > hardMaxExtent:
            ARCPY.AddIDMessage("ERROR", 1609)
            raise SystemExit()

    minDistOut = LOCALE.format("%0.6f", minDist)
    avgDistOut = LOCALE.format("%0.6f", avgDist)
    maxDistOut = LOCALE.format("%0.6f", maxDist)

    #### Create Output Text Table ####
    header = ARCPY.GetIDMessage(84171)
    row1 = [ARCPY.GetIDMessage(84165).format(kNeighs), minDistOut]
    row2 = [ARCPY.GetIDMessage(84166).format(kNeighs), avgDistOut]
    row3 = [ARCPY.GetIDMessage(84167).format(kNeighs), maxDistOut]
    total = [row1, row2, row3]
    tableOut = UTILS.outputTextTable(total, header=header, pad=1)

    #### Add Linear/Angular Unit ####
    distanceOut = ssdo.distanceInfo.outputString
    distanceMeasuredStr = ARCPY.GetIDMessage(84344).format(distanceOut)
    tableOut += "\n%s\n" % distanceMeasuredStr

    #### Report Text Output ####
    ARCPY.AddMessage(tableOut)

    #### Set Derived Output ####
    ARCPY.SetParameterAsText(3, minDist)
    ARCPY.SetParameterAsText(4, avgDist)
    ARCPY.SetParameterAsText(5, maxDist)

    #### Clean Up ####
    del gaTable
예제 #31
0
    def output2NewFC(self, outputFC, candidateFields, appendFields = [],
                     fieldOrder = []):
        """Creates a new feature class with the same shape charcteristics as
        the source input feature class and appends data to it.

        INPUTS:
        outputFC (str): catalogue path to output feature class
        candidateFields (dict): fieldName = instance of CandidateField
        appendFields {list, []}: field names in the order you want appended
        fieldOrder {list, []}: the order with which to write fields
        """

        #### Initial Progressor Bar ####
        ARCPY.overwriteOutput = True
        ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84006))

        #### Validate Output Workspace ####
        ERROR.checkOutputPath(outputFC)

        #### Create Path for Output FC ####
        outPath, outName = OS.path.split(outputFC)

        #### Get Output Name for SDE if Necessary ####
        baseType = UTILS.getBaseWorkspaceType(outPath)
        if baseType.upper() == 'REMOTEDATABASE':
            outName = outName.split(".")[-1]
        self.outputFC = OS.path.join(outPath, outName)

        #### Assess Whether to Honor Original Field Nullable Flag ####
        setNullable = UTILS.setToNullable(self.catPath, self.outputFC)

        #### Add Null Value Flag ####
        outIsShapeFile = UTILS.isShapeFile(self.outputFC)

        #### Create Output Field Names to be Appended From Input ####
        inputFieldNames = ["SHAPE@", self.masterField]
        appendFieldNames = []
        masterIsOID = self.masterField == self.oidName
        if masterIsOID:
            appendFieldNames.append("SOURCE_ID")
        else:
            master = self.allFields[self.masterField.upper()]
            returnName = UTILS.returnOutputFieldName(master)
            appendFieldNames.append(returnName)

        for fieldName in appendFields:
            field = self.allFields[fieldName.upper()]
            returnName = UTILS.returnOutputFieldName(field)
            inputFieldNames.append(fieldName)
            appendFieldNames.append(returnName)
        appendFieldNames = UTILS.createAppendFieldNames(appendFieldNames,
                                                        outPath)
        masterOutName = appendFieldNames[0]

        #### Create Field Mappings for Visible Fields ####
        outputFieldMaps = ARCPY.FieldMappings()

        #### Add Input Fields to Output ####
        for ind, fieldName in enumerate(appendFieldNames):
            if ind == 0:
                #### Master Field ####
                sourceFieldName = self.masterField
                if masterIsOID:
                    fieldType = "LONG"
                    alias = fieldName
                    setOutNullable = False
                    fieldLength = None
                    fieldPrecision = None
                else:
                    masterOutField = self.allFields[self.masterField.upper()]
                    fieldType = masterOutField.type
                    alias = masterOutField.baseName
                    setOutNullable = setNullable
                    fieldLength = masterOutField.length
                    fieldPrecision = masterOutField.precision
            else:
                #### Append Fields ####
                sourceFieldName = appendFields[ind-1]
                outField = self.allFields[sourceFieldName]
                fieldType = outField.type
                alias = outField.baseName
                setOutNullable = setNullable
                fieldLength = outField.length
                fieldPrecision = outField.precision

            #### Create Candidate Field ####
            outCandidate = CandidateField(fieldName, fieldType, None,
                                          alias = alias,
                                          precision = fieldPrecision,
                                          length = fieldLength)

            #### Create Output Field Map ####
            outFieldMap = UTILS.createOutputFieldMap(self.inputFC,
                                                  sourceFieldName,
                                 outFieldCandidate = outCandidate,
                                     setNullable = setOutNullable)

            #### Add Output Field Map to New Field Mapping ####
            outputFieldMaps.addFieldMap(outFieldMap)

        #### Do FC2FC Without Extent Env Var ####
        FC2FC = UTILS.clearExtent(CONV.FeatureClassToFeatureClass)
        try:
            FC2FC(self.inputFC, outPath, outName, "", outputFieldMaps)
        except:
            ARCPY.AddIDMessage("ERROR", 210, self.outputFC)
            raise SystemExit()

        #### Create/Verify Result Field Order ####
        fieldKeys = candidateFields.keys()
        fieldKeys.sort()
        if len(fieldOrder) == len(fieldKeys):
            fKeySet = set(fieldKeys)
            fieldOrderSet = set(fieldOrder)
            if fieldOrderSet == fKeySet:
                fieldKeys = fieldOrder

            del fKeySet, fieldOrderSet

        #### Add Empty Output Analysis Fields ####
        outputFieldNames = [masterOutName]
        for fieldInd, fieldName in enumerate(fieldKeys):
            field = candidateFields[fieldName]
            field.copy2FC(outputFC)
            outputFieldNames.append(fieldName)

            #### Replace NaNs for Shapefiles ####
            if outIsShapeFile:
                if field.type != "TEXT":
                    isNaN = NUM.isnan(field.data)
                    if NUM.any(isNaN):
                        field.data[isNaN] = UTILS.shpFileNull[field.type]

        #### Populate Output Feature Class with Values ####
        ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84003),
                            0, self.numObs, 1)
        outRows = DA.UpdateCursor(self.outputFC, outputFieldNames)

        for row in outRows:
            masterID = row[0]
            if self.master2Order.has_key(masterID):
                order = self.master2Order[masterID]

                #### Create Output Row from Input ####
                resultValues = [masterID]

                #### Add Result Values ####
                for fieldName in fieldKeys:
                    field = candidateFields[fieldName]
                    fieldValue = field.data.item(order)
                    resultValues.append(fieldValue)

                #### Insert Values into Output ####
                outRows.updateRow(resultValues)

            else:
                #### Bad Record ####
                outRows.deleteRow()

            ARCPY.SetProgressorPosition()

        #### Clean Up ####
        del outRows
예제 #32
0
    def createOutput(self, outputFC):
        """Creates an Output Feature Class with the Mean Centers.

        INPUTS:
        outputFC (str): path to the output feature class
        """

        #### Validate Output Workspace ####
        ERROR.checkOutputPath(outputFC)

        #### Shorthand Attributes ####
        ssdo = self.ssdo
        caseField = self.caseField
        dimField = self.dimField

        #### Create Output Feature Class ####
        ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84003))
        outPath, outName = OS.path.split(outputFC)

        try:
            DM.CreateFeatureclass(outPath, outName, "POINT", "", ssdo.mFlag,
                                  ssdo.zFlag, ssdo.spatialRefString)
        except:
            ARCPY.AddIDMessage("ERROR", 210, outputFC)
            raise SystemExit()

        #### Add Field Names ####
        fn = UTILS.getFieldNames(mcFieldNames, outPath)
        xFieldName, yFieldName, zFieldName = fn
        shapeFieldNames = ["SHAPE@"]
        dataFieldNames = [xFieldName, yFieldName]
        if ssdo.zBool:
            dataFieldNames.append(zFieldName)

        for fieldName in dataFieldNames:
            UTILS.addEmptyField(outputFC, fieldName, "DOUBLE")

        caseIsDate = False
        if caseField:
            fcCaseField = ssdo.allFields[caseField]
            validCaseName = UTILS.validQFieldName(fcCaseField, outPath)
            caseType = UTILS.convertType[fcCaseField.type]
            UTILS.addEmptyField(outputFC, validCaseName, caseType)
            dataFieldNames.append(validCaseName)
            if caseType.upper() == "DATE":
                caseIsDate = True

        if dimField:
            fcDimField = ssdo.allFields[dimField]
            validDimName = UTILS.validQFieldName(fcDimField, outPath)
            if caseField:
                if validCaseName == validDimName:
                    validDimName = ARCPY.GetIDMessage(84199)
            UTILS.addEmptyField(outputFC, validDimName, "DOUBLE")
            dataFieldNames.append(validDimName)

        #### Write Output ####
        allFieldNames = shapeFieldNames + dataFieldNames
        rows = DA.InsertCursor(outputFC, allFieldNames)
        for case in self.caseKeys:

            #### Mean Centers ####
            meanX, meanY, meanZ = self.meanCenter[case]
            pnt = (meanX, meanY, meanZ)
            if ssdo.zBool:
                rowResult = [pnt, meanX, meanY, meanZ]
            else:
                rowResult = [pnt, meanX, meanY]

            #### Set Attribute Fields ####
            if caseField:
                caseValue = case.item()
                if caseIsDate:
                    caseValue = TUTILS.iso2DateTime(caseValue)
                rowResult.append(caseValue)

            if dimField:
                meanDim = self.dimCenter[case]
                rowResult.append(meanDim)

            rows.insertRow(rowResult)

        #### Clean Up ####
        del rows

        #### Set Attribute ####
        self.outputFC = outputFC
예제 #33
0
파일: Weights.py 프로젝트: leochin/GSWMtest
def delaunay2SWM(inputFC, swmFile, masterField, rowStandard = True):
    """Creates a sparse spatial weights matrix (SWM) based on Delaunay
    Triangulation.  

    INPUTS: 
    inputFC (str): path to the input feature class
    swmFile (str): path to the SWM file.
    masterField (str): field in table that serves as the mapping.
    rowStandard {bool, True}: row standardize weights?
    """

    #### Set Default Progressor for Neigborhood Structure ####
    ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143))

    #### Create SSDataObject ####
    ssdo = SSDO.SSDataObject(inputFC, templateFC = inputFC,
                             useChordal = True)
    cnt = UTILS.getCount(inputFC)
    ERROR.errorNumberOfObs(cnt, minNumObs = 2)

    #### Validation of Master Field ####
    verifyMaster = ERROR.checkField(ssdo.allFields, masterField, types = [0,1])

    #### Create GA Data Structure ####
    gaTable, gaInfo = WU.gaTable(ssdo.catPath, [masterField],
                                 spatRef = ssdo.spatialRefString)

    #### Assure Enough Observations ####
    N = gaInfo[0]
    ERROR.errorNumberOfObs(N, minNumObs = 2)

    #### Process any bad records encountered ####
    numBadRecs = cnt - N
    if numBadRecs:
        badRecs = WU.parseGAWarnings(gaTable.warnings)
        err = ERROR.reportBadRecords(cnt, numBadRecs, badRecs,
                                     label = ssdo.oidName)

    #### Create Delaunay Neighbor Search Type ####
    gaSearch = GAPY.ga_nsearch(gaTable)
    gaSearch.init_delaunay()
    neighWeights = ARC._ss.NeighborWeights(gaTable, gaSearch, 
                                           weight_type = 1,
                                           row_standard = False)

    #### Set Progressor for Weights Writing ####
    ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84127), 0, N, 1)

    #### Initialize Spatial Weights Matrix File ####
    swmWriter = WU.SWMWriter(swmFile, masterField, ssdo.spatialRefName, 
                             N, rowStandard, inputFC = inputFC,
                             wType = 3)

    #### Unique Master ID Dictionary ####
    masterSet = set([])

    for row in xrange(N):
        masterID = int(gaTable[row][2])
        if masterID in masterSet:
            ARCPY.AddIDMessage("Error", 644, masterField)
            ARCPY.AddIDMessage("Error", 643)
            raise SystemExit()
        else:
            masterSet.add(masterID)

        neighs, weights = neighWeights[row]
        neighs = [ gaTable[nh][2] for nh in neighs ]

        #### Add Spatial Weights Matrix Entry ####
        swmWriter.swm.writeEntry(masterID, neighs, weights) 

        #### Set Progress ####
        ARCPY.SetProgressorPosition()

    #### Clean Up ####
    swmWriter.close()
    del gaTable

    #### Report if Any Features Have No Neighbors ####
    swmWriter.reportNoNeighbors()

    #### Report Spatial Weights Summary ####
    swmWriter.report()

    #### Report SWM File is Large ####
    swmWriter.reportLargeSWM()
예제 #34
0
    def __init__(self, ssdo, weightField=None, caseField=None, dimField=None):

        #### Set Initial Attributes ####
        UTILS.assignClassAttr(self, locals())

        #### Set Data ####
        self.xyCoords = self.ssdo.xyCoords
        self.zCoords = self.ssdo.zCoords

        #### Verify Weights ####
        if weightField:
            self.weights = self.ssdo.fields[weightField].returnDouble()

            #### Report Negative Weights ####
            lessThanZero = NUM.where(self.weights < 0.0)
            if len(lessThanZero[0]):
                self.weights[lessThanZero] = 0.0
                ARCPY.AddIDMessage("Warning", 941)

            #### Verify Weight Sum ####
            self.weightSum = self.weights.sum()
            if not self.weightSum > 0.0:
                ARCPY.AddIDMessage("ERROR", 898)
                raise SystemExit()
        else:
            self.weights = NUM.ones((self.ssdo.numObs, 1))

        #### Set Case Field ####
        if caseField:
            caseType = ssdo.allFields[caseField].type.upper()
            self.caseIsString = caseType == "STRING"
            self.caseVals = self.ssdo.fields[caseField].data
            cases = NUM.unique(self.caseVals)
            if self.caseIsString:
                self.uniqueCases = cases[NUM.where(cases != "")]
            else:
                self.uniqueCases = cases
        else:
            self.caseIsString = False
            self.caseVals = NUM.ones((self.ssdo.numObs, ), int)
            self.uniqueCases = [1]

        #### Set Result Dict ####
        meanCenter = COLL.defaultdict(NUM.array)
        if dimField:
            dimCenter = COLL.defaultdict(float)
            self.dimVals = self.ssdo.fields[dimField].returnDouble()
        else:
            dimCenter = None

        #### Keep Track of Bad Cases ####
        badCases = []

        #### Calculate Results ####
        for case in self.uniqueCases:
            indices = NUM.where(self.caseVals == case)
            numFeatures = len(indices[0])
            xy = self.xyCoords[indices]
            w = self.weights[indices]
            w.shape = numFeatures, 1
            weightSum = w.sum()
            if (weightSum != 0.0) and (numFeatures > 0):
                xyWeighted = w * xy

                #### Mean Center ####
                centers = xyWeighted.sum(0) / weightSum
                meanX, meanY = centers
                meanZ = None
                if ssdo.hasZ:
                    z = self.ssdo.zCoords[indices]
                    try:
                        zWeighted = w * z
                        meanZ = zWeighted.sum() / weightSum
                    except:
                        meanZ = 0.0
                else:
                    meanZ = self.ssdo.defaultZ
                meanCenter[case] = NUM.array([meanX, meanY, meanZ])

                #### Attribute Field ####
                if dimField:
                    dimWeighted = w.flatten() * self.dimVals[indices]
                    meanDim = dimWeighted.sum() / weightSum
                    dimCenter[case] = meanDim

            else:
                badCases.append(case)

        #### Report Bad Cases ####
        nCases = len(self.uniqueCases)
        nBadCases = len(badCases)
        badCases.sort()
        if nBadCases:
            cBool = self.caseIsString
            if not self.caseIsString:
                badCases = [UTILS.caseValue2Print(i, cBool) for i in badCases]
            ERROR.reportBadCases(nCases, nBadCases, badCases, label=caseField)

        #### Sorted Case List ####
        caseKeys = meanCenter.keys()
        caseKeys.sort()
        self.caseKeys = caseKeys

        #### Set Attributes ####
        self.meanCenter = meanCenter
        self.dimCenter = dimCenter
        self.badCases = badCases
        self.caseField = caseField
        self.dimField = dimField
        self.weightField = weightField
예제 #35
0
파일: Weights.py 프로젝트: leochin/GSWMtest
def distance2SWM(inputFC, swmFile, masterField, fixed = 0, 
                 concept = "EUCLIDEAN", exponent = 1.0, threshold = None, 
                 kNeighs = 1, rowStandard = True):
    """Creates a sparse spatial weights matrix (SWM) based on k-nearest
    neighbors.

    INPUTS: 
    inputFC (str): path to the input feature class
    swmFile (str): path to the SWM file.
    masterField (str): field in table that serves as the mapping.
    fixed (boolean): fixed (1) or inverse (0) distance? 
    concept: {str, EUCLIDEAN}: EUCLIDEAN or MANHATTAN 
    exponent {float, 1.0}: distance decay
    threshold {float, None}: distance threshold
    kNeighs (int): number of neighbors to return
    rowStandard {bool, True}: row standardize weights?
    """

    #### Create SSDataObject ####
    ssdo = SSDO.SSDataObject(inputFC, templateFC = inputFC,
                             useChordal = True)

    #### Validation of Master Field ####
    verifyMaster = ERROR.checkField(ssdo.allFields, masterField, types = [0,1])

    #### Read Data ####
    ssdo.obtainDataGA(masterField, minNumObs = 2)
    N = ssdo.numObs
    gaTable = ssdo.gaTable
    if fixed:
        wType = 1
    else:
        wType = 0

    #### Set Default Progressor for Neigborhood Structure ####
    ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143))

    #### Set the Distance Threshold ####
    concept, gaConcept = WU.validateDistanceMethod(concept, ssdo.spatialRef)
    if threshold == None:
        threshold, avgDist = WU.createThresholdDist(ssdo, 
                                        concept = concept)

    #### Assures that the Threshold is Appropriate ####
    gaExtent = UTILS.get92Extent(ssdo.extent)
    threshold, maxSet = WU.checkDistanceThreshold(ssdo, threshold,
                                                  weightType = wType)

    #### If the Threshold is Set to the Max ####
    #### Set to Zero for Script Logic ####
    if maxSet:
        #### All Locations are Related ####
        threshold = SYS.maxint
        if N > 500:
            ARCPY.AddIDMessage("Warning", 717)

    #### Assure k-Nearest is Less Than Number of Features ####
    if kNeighs >= N and fixed:
        ARCPY.AddIDMessage("ERROR", 975)
        raise SystemExit()

    #### Create Distance/k-Nearest Neighbor Search Type ####
    gaSearch = GAPY.ga_nsearch(gaTable)
    gaSearch.init_nearest(threshold, kNeighs, gaConcept)
    neighWeights = ARC._ss.NeighborWeights(gaTable, gaSearch, 
                                           weight_type = wType,
                                           exponent = exponent,
                                           row_standard = False)

    #### Set Progressor for Weights Writing ####
    ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84127), 0, N, 1)

    #### Initialize Spatial Weights Matrix File ####
    swmWriter = WU.SWMWriter(swmFile, masterField, ssdo.spatialRefName, 
                             N, rowStandard, inputFC = inputFC,
                             wType = wType, distanceMethod = concept,
                             exponent = exponent, threshold = threshold)

    #### Unique Master ID Dictionary ####
    masterDict = {}

    #### Unique Master ID Dictionary ####
    masterSet = set([])

    for row in xrange(N):
        masterID = int(gaTable[row][2])
        if masterID in masterSet:
            ARCPY.AddIDMessage("Error", 644, masterField)
            ARCPY.AddIDMessage("Error", 643)
            raise SystemExit()
        else:
            masterSet.add(masterID)

        neighs, weights = neighWeights[row]
        neighs = [ gaTable[nh][2] for nh in neighs ]

        #### Add Spatial Weights Matrix Entry ####
        swmWriter.swm.writeEntry(masterID, neighs, weights) 

        #### Set Progress ####
        ARCPY.SetProgressorPosition()

    swmWriter.close()
    del gaTable

    #### Report Warning/Max Neighbors ####
    swmWriter.reportNeighInfo()

    #### Add Linear/Angular Unit (Distance Based Only) ####
    distanceOut = ssdo.distanceInfo.outputString
    distanceOut = [ARCPY.GetIDMessage(84344).format(distanceOut)]

    #### Report Spatial Weights Summary ####
    swmWriter.report(additionalInfo = distanceOut)

    #### Report SWM File is Large ####
    swmWriter.reportLargeSWM()
예제 #36
0
def exportXYV(inputFC, fieldList, delimiter, outFile, outFieldNames=False):
    """Exports the X,Y Coords and Set of Field Values for a Given
    Feature Class.

    INPUTS:
    inputFC (str): path to the input feature class
    fieldList (list): list of field names to export
    delimiter (str): token to delimit output file with
    outFile (str): path to the output text file
    outFieldNames (bool): return field names in first row of text file?

    OUTPUT:
    outFile (file): output text file
    """

    #### Get Feature Class Properties ####
    ssdo = SSDO.SSDataObject(inputFC, useChordal=False)
    inputFields = [ssdo.oidName, "SHAPE@XY"] + fieldList

    #### Create Progressor Bar ####
    cnt = UTILS.getCount(inputFC)
    ARCPY.AddMessage(ARCPY.GetIDMessage(84012))
    ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84012), 0, cnt, 1)

    #### Keep track of Invalid Fields ####
    badIDs = []
    badRecord = 0

    #### Process Field Values ####
    try:
        rows = DA.SearchCursor(ssdo.inputFC, inputFields, "", ssdo.spatialRefString)
    except:
        ARCPY.AddIDMessage("ERROR", 204)
        raise SystemExit()

    #### Get Field Types and Set LOCALE Dictionary ####
    floatTypes = ["Single", "Double"]
    localeDict = {}
    for field in fieldList:
        fieldType = ssdo.allFields[field].type
        if fieldType in floatTypes:
            formatToken = "%f"
        else:
            formatToken = "%s"
        localeDict[field] = formatToken

    #### Create Output File ####
    fo = UTILS.openFile(outFile, "w")

    #### Write Field Names to File ####
    if outFieldNames:
        outPath, outName = OS.path.split(outFile)
        allFieldNames = UTILS.getFieldNames(exyvFieldNames, outPath)
        allFieldNames += fieldList
        outRow = delimiter.join(allFieldNames)
        fo.write("%s\n" % outRow.encode("utf-8"))

    for row in rows:
        OID = row[0]
        badValues = row.count(None)
        badXY = row[1].count(None)
        badRow = badValues or badXY
        if not badXY:
            xCoord, yCoord = row[1]
            x = LOCALE.format("%0.8f", xCoord)
            y = LOCALE.format("%0.8f", yCoord)
        else:
            x = "NULL"
            y = "NULL"

        #### Check to see whether field values are OK ####
        rowValues = [x, y]
        for ind, field in enumerate(fieldList):
            value = row[ind + 2]
            if value == "" or value == None:
                rowValues.append("NULL")
            else:
                formatValue = LOCALE.format(localeDict[field], value)
                rowValues.append(formatValue)

        #### Keep TRack of Bad Records ####
        if badRow:
            badIDs.append(OID)

        #### Continue Based on Whether a Bad Row ####
        outRow = delimiter.join(rowValues)
        fo.write("%s\n" % outRow.encode("utf-8"))

        ARCPY.SetProgressorPosition()

    #### Clean Up ####
    del rows
    fo.close()
    ARCPY.AddMessage(outFile)

    #### Get Set of Bad IDs ####
    badIDs = list(set(badIDs))
    badIDs.sort()
    badIDs = [str(i) for i in badIDs]

    #### Process any bad records encountered ####
    bn = len(badIDs)
    if bn:
        err = ERROR.reportBadRecords(cnt, bn, badIDs, label=ssdo.oidName, allowNULLs=True)
예제 #37
0
    def __init__(self, ssdo, weightField = None, caseField = None, 
                 stdDeviations = 1.0):

        #### Set Initial Attributes ####
        UTILS.assignClassAttr(self, locals())

        #### Set Data ####
        self.xyCoords = self.ssdo.xyCoords

        #### Verify Weights ####
        if weightField:
            self.weights = self.ssdo.fields[weightField].returnDouble()

            #### Report Negative Weights ####
            lessThanZero = NUM.where(self.weights < 0.0)
            if len(lessThanZero[0]):
                self.weights[lessThanZero] = 0.0
                ARCPY.AddIDMessage("Warning", 941)

            #### Verify Weight Sum ####
            self.weightSum = self.weights.sum()
            if not self.weightSum > 0.0: 
                ARCPY.AddIDMessage("ERROR", 898)
                raise SystemExit()
        else:
            self.weights = NUM.ones((self.ssdo.numObs,))

        #### Set Case Field ####
        if caseField:
            caseType = ssdo.allFields[caseField].type.upper()
            self.caseIsString = caseType == "STRING"
            self.caseVals = self.ssdo.fields[caseField].data
            cases = NUM.unique(self.caseVals)
            if self.caseIsString:
                self.uniqueCases = cases[NUM.where(cases != "")]
            else:
                self.uniqueCases = cases
        else:
            self.caseIsString = False
            self.caseVals = NUM.ones((self.ssdo.numObs, ), int)
            self.uniqueCases = [1]

        #### Set Result Dict ####
        meanCenter = COLL.defaultdict(NUM.array)
        sd = COLL.defaultdict(float)

        #### Keep Track of Bad Cases ####
        badCases = []

        #### Calculate Mean Center and Standard Distance ####
        for case in self.uniqueCases:
            indices = NUM.where(self.caseVals == case)
            numFeatures = len(indices[0])
            xy = self.xyCoords[indices]
            w = self.weights[indices]
            w.shape = numFeatures, 1
            weightSum = w.sum()
            if (weightSum != 0.0) and (numFeatures > 2):
                xyWeighted = w * xy

                #### Mean Center ####
                centers = xyWeighted.sum(0) / weightSum
                meanCenter[case] = centers

                #### Standard Distance ####
                devXY = xy - centers
                sigXY = (w * devXY**2.0).sum(0)/weightSum 
                sdVal = (MATH.sqrt(sigXY.sum())) * stdDeviations
                sd[case] = sdVal
            else:
                badCases.append(case)
                
        #### Report Bad Cases ####
        nCases = len(self.uniqueCases)
        nBadCases = len(badCases) 
        badCases.sort()
        if nBadCases:
            cBool = self.caseIsString
            if not self.caseIsString:
                badCases = [UTILS.caseValue2Print(i, cBool) for i in badCases]
            ERROR.reportBadCases(nCases, nBadCases, badCases, 
                                 label = caseField)   
        
        #### Sorted Case List ####
        caseKeys = sd.keys()
        caseKeys.sort()
        self.caseKeys = caseKeys

        #### Set Attributes ####
        self.meanCenter = meanCenter
        self.sd = sd
        self.badCases = badCases
        self.caseField = caseField
        self.stdDeviations = stdDeviations
        self.weightField = weightField
예제 #38
0
def exportXYV(inputFC, fieldList, delimiter, outFile, outFieldNames=False):
    """Exports the X,Y Coords and Set of Field Values for a Given
    Feature Class.

    INPUTS:
    inputFC (str): path to the input feature class
    fieldList (list): list of field names to export
    delimiter (str): token to delimit output file with
    outFile (str): path to the output text file
    outFieldNames (bool): return field names in first row of text file?

    OUTPUT:
    outFile (file): output text file
    """

    #### Get Feature Class Properties ####
    ssdo = SSDO.SSDataObject(inputFC, useChordal=False)
    inputFields = [ssdo.oidName, "SHAPE@XY"] + fieldList

    #### Create Progressor Bar ####
    cnt = UTILS.getCount(inputFC)
    ARCPY.AddMessage(ARCPY.GetIDMessage(84012))
    ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84012), 0, cnt, 1)

    #### Keep track of Invalid Fields ####
    badIDs = []
    badRecord = 0

    #### Process Field Values ####
    try:
        rows = DA.SearchCursor(ssdo.inputFC, inputFields, "",
                               ssdo.spatialRefString)
    except:
        ARCPY.AddIDMessage("ERROR", 204)
        raise SystemExit()

    #### Get Field Types and Set LOCALE Dictionary ####
    floatTypes = ["Single", "Double"]
    localeDict = {}
    for field in fieldList:
        fieldType = ssdo.allFields[field].type
        if fieldType in floatTypes:
            formatToken = "%f"
        else:
            formatToken = "%s"
        localeDict[field] = formatToken

    #### Create Output File ####
    fo = UTILS.openFile(outFile, 'w')

    #### Write Field Names to File ####
    if outFieldNames:
        outPath, outName = OS.path.split(outFile)
        allFieldNames = UTILS.getFieldNames(exyvFieldNames, outPath)
        allFieldNames += fieldList
        outRow = delimiter.join(allFieldNames)
        fo.write("%s\n" % outRow.encode('utf-8'))

    for row in rows:
        OID = row[0]
        badValues = row.count(None)
        badXY = row[1].count(None)
        badRow = badValues or badXY
        if not badXY:
            xCoord, yCoord = row[1]
            x = LOCALE.format("%0.8f", xCoord)
            y = LOCALE.format("%0.8f", yCoord)
        else:
            x = "NULL"
            y = "NULL"

        #### Check to see whether field values are OK ####
        rowValues = [x, y]
        for ind, field in enumerate(fieldList):
            value = row[ind + 2]
            if value == "" or value == None:
                rowValues.append("NULL")
            else:
                formatValue = LOCALE.format(localeDict[field], value)
                rowValues.append(formatValue)

        #### Keep TRack of Bad Records ####
        if badRow:
            badIDs.append(OID)

        #### Continue Based on Whether a Bad Row ####
        outRow = delimiter.join(rowValues)
        fo.write("%s\n" % outRow.encode('utf-8'))

        ARCPY.SetProgressorPosition()

    #### Clean Up ####
    del rows
    fo.close()
    ARCPY.AddMessage(outFile)

    #### Get Set of Bad IDs ####
    badIDs = list(set(badIDs))
    badIDs.sort()
    badIDs = [str(i) for i in badIDs]

    #### Process any bad records encountered ####
    bn = len(badIDs)
    if bn:
        err = ERROR.reportBadRecords(cnt,
                                     bn,
                                     badIDs,
                                     label=ssdo.oidName,
                                     allowNULLs=True)
예제 #39
0
def setupLogit():
    #### Get User Provided Inputs ####
    inputFC = ARCPY.GetParameterAsText(0)
    outputFC = ARCPY.GetParameterAsText(1)
    depVarName = str(ARCPY.GetParameterAsText(2))
    indVarNames = ARCPY.GetParameterAsText(3)
    indVarNames = [str(i) for i in indVarNames.split(";")]
    indVarNames = ";".join(indVarNames)
    usePenalty = ARCPY.GetParameterAsText(4)
    if usePenalty == 'true':
        usePenalty = "1"
    else:
        usePenalty = "0"

    coefTableIn = ARCPY.GetParameterAsText(5)
    coefTable, dbf = UTILS.returnTableName(coefTableIn)

    diagTableIn = ARCPY.GetParameterAsText(6)
    diagTable, dbf = UTILS.returnTableName(diagTableIn)

    #### Create R Command ####
    pyScript = SYS.argv[0]
    toolDir = OS.path.dirname(pyScript)
    rScript = OS.path.join(toolDir, "logitWithR.r")
    ARCPY.SetProgressor("default", "Executing R Script...")
    args = [
        "R", "--slave", "--vanilla", "--args", inputFC, outputFC, depVarName,
        indVarNames, usePenalty, coefTable, diagTable
    ]

    #### Uncomment Next Two Lines to Print/Create Command Line Args ####
    #cmd = RARC.createRCommand(args, rScript)
    #ARCPY.AddWarning(cmd)

    #### Execute Command ####
    scriptSource = open(rScript, 'rb')
    rCommand = SUB.Popen(args,
                         stdin=scriptSource,
                         stdout=SUB.PIPE,
                         stderr=SUB.PIPE,
                         shell=True)

    #### Print Result ####
    resString, errString = rCommand.communicate()

    #### Push Output to Message Window ####
    if errString and "Calculations Complete..." not in resString:
        ARCPY.AddError(errString)
    else:
        resOutString = RARC.printRMessages(resString)
        ARCPY.AddMessage(resOutString)

        #### Project the Data ####
        DM.DefineProjection(outputFC, inputFC)

        #### Create SSDO ####
        ssdo = SSDO.SSDataObject(outputFC)

        #### Display Symbology ####
        params = ARCPY.gp.GetParameterInfo()
        try:
            renderType = UTILS.renderType[ssdo.shapeType.upper()]
            if renderType == 0:
                renderLayerFile = "StdResidPoints.lyr"
            elif renderType == 1:
                renderLayerFile = "StdResidPolylines.lyr"
            else:
                renderLayerFile = "StdResidPolygons.lyr"
            fullRLF = OS.path.join(ARCPY.GetInstallInfo()['InstallDir'],
                                   "ArcToolbox", "Templates", "Layers",
                                   renderLayerFile)
            params[1].Symbology = fullRLF
        except:
            ARCPY.AddIDMessage("WARNING", 973)

        #### Print Coef Output Table ####
        try:
            rows = ARCPY.SearchCursor(coefTable)
        except:
            ARCPY.AddIDMessage("ERROR", 204)
            raise ERROR.ScriptError()

        labels = ["Variable", "Coef", "StdError", "Wald", "Prob"]
        header = "Logistic Regression Coefficient Table"
        res = [labels]
        for row in rows:
            rowRes = []
            for i, val in enumerate(labels):
                if i == 0:
                    rowRes.append(row.getValue(val))
                else:
                    rowRes.append(LOCALE.format("%0.6f", row.getValue(val)))
            res.append(rowRes)
        del rows

        coefTextTab = UTILS.outputTextTable(res, header=header)
        ARCPY.AddMessage("\n")
        ARCPY.AddMessage(coefTextTab)

        #### Add to TOC ####
        ARCPY.SetParameterAsText(5, coefTable)

        #### Print Diag Table (In Two Parts) ####
        try:
            rows = ARCPY.SearchCursor(diagTable)
        except:
            ARCPY.AddIDMessage("ERROR", 204)
            raise ERROR.ScriptError()

        labels = ["Diag_Name", "Diag_Value"]
        header = "Logistic Regression Diagnostic Table"
        resLab1 = []
        resVal1 = []
        resLab2 = []
        resVal2 = []
        c = 0
        for row in rows:
            for i, val in enumerate(labels):
                if i == 0:
                    cellVal = row.getValue(val)
                    if c <= 6:
                        resLab1.append(cellVal)
                    else:
                        resLab2.append(cellVal)
                else:
                    cellVal = LOCALE.format("%0.6f", row.getValue(val))
                    if c <= 6:
                        resVal1.append(cellVal)
                    else:
                        resVal2.append(cellVal)
            c += 1
        del rows

        diagTextTab1 = UTILS.outputTextTable([resLab1, resVal1], header=header)
        ARCPY.AddMessage("\n")
        ARCPY.AddMessage(diagTextTab1)
        ARCPY.AddMessage("\n")
        diagTextTab2 = UTILS.outputTextTable([resLab2, resVal2])
        ARCPY.AddMessage(diagTextTab2)
        ARCPY.AddMessage("\n")

        #### Add to TOC ####
        ARCPY.SetParameterAsText(6, diagTable)
예제 #40
0
def collectEvents(ssdo, outputFC):
    """This utility converts event data into weighted point data by
    dissolving all coincident points into unique points with a new count
    field that contains the number of original features at that
    location.

    INPUTS: 
    inputFC (str): path to the input feature class
    outputFC (str): path to the input feature class
    """

    #### Set Default Progressor for Neigborhood Structure ####
    ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143))

    #### Validate Output Workspace ####
    ERROR.checkOutputPath(outputFC)

    #### True Centroid Warning For Non-Point FCs ####
    if ssdo.shapeType.upper() != "POINT":
        ARCPY.AddIDMessage("WARNING", 1021)

    #### Create GA Data Structure ####
    gaTable, gaInfo = WU.gaTable(ssdo.inputFC, spatRef=ssdo.spatialRefString)

    #### Assure Enough Observations ####
    cnt = UTILS.getCount(ssdo.inputFC)
    ERROR.errorNumberOfObs(cnt, minNumObs=4)
    N = gaInfo[0]
    ERROR.errorNumberOfObs(N, minNumObs=4)

    #### Process Any Bad Records Encountered ####
    numBadRecs = cnt - N
    if numBadRecs:
        badRecs = WU.parseGAWarnings(gaTable.warnings)
        if not ssdo.silentWarnings:
            ERROR.reportBadRecords(cnt,
                                   numBadRecs,
                                   badRecs,
                                   label=ssdo.oidName)

    #### Create k-Nearest Neighbor Search Type ####
    gaSearch = GAPY.ga_nsearch(gaTable)
    gaSearch.init_nearest(0.0, 0, "euclidean")

    #### Create Output Feature Class ####
    outPath, outName = OS.path.split(outputFC)
    try:
        DM.CreateFeatureclass(outPath, outName, "POINT", "", ssdo.mFlag,
                              ssdo.zFlag, ssdo.spatialRefString)
    except:
        ARCPY.AddIDMessage("ERROR", 210, outputFC)
        raise SystemExit()

    #### Add Count Field ####
    countFieldNameOut = ARCPY.ValidateFieldName(countFieldName, outPath)
    UTILS.addEmptyField(outputFC, countFieldNameOut, "LONG")
    fieldList = ["SHAPE@", countFieldNameOut]

    #### Set Insert Cursor ####
    rowsOut = DA.InsertCursor(outputFC, fieldList)

    #### Set Progressor for Calculation ####
    ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84007), 0, N, 1)

    #### ID List to Search ####
    rowsIN = range(N)
    maxCount = 0
    numUnique = 0

    for row in rowsIN:
        #### Get Row Coords ####
        rowInfo = gaTable[row]
        x0, y0 = rowInfo[1]
        count = 1

        #### Search For Exact Coord Match ####
        gaSearch.search_by_idx(row)
        for nh in gaSearch:
            count += 1
            rowsIN.remove(nh.idx)
            ARCPY.SetProgressorPosition()

        #### Keep Track of Max Count ####
        maxCount = max([count, maxCount])

        #### Create Output Point ####
        pnt = (x0, y0, ssdo.defaultZ)

        #### Create and Populate New Feature ####
        rowResult = [pnt, count]
        rowsOut.insertRow(rowResult)
        numUnique += 1
        ARCPY.SetProgressorPosition()

    #### Clean Up ####
    del rowsOut, gaTable

    return countFieldNameOut, maxCount, N, numUnique
예제 #41
0
    def createOutput(self, outputFC):
        """Creates an Output Feature Class with the Median Centers.

        INPUTS:
        outputFC (str): path to the output feature class
        """

        #### Validate Output Workspace ####
        ERROR.checkOutputPath(outputFC)

        #### Shorthand Attributes ####
        ssdo = self.ssdo
        caseField = self.caseField
        attFields = self.attFields

        #### Create Output Feature Class ####
        ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84003))
        outPath, outName = OS.path.split(outputFC)

        try:
            DM.CreateFeatureclass(outPath, outName, "POINT", "", ssdo.mFlag, 
                                  ssdo.zFlag, ssdo.spatialRefString)
        except:
            ARCPY.AddIDMessage("ERROR", 210, outputFC)
            raise SystemExit()

        #### Add Field Names ####
        dataFieldNames = UTILS.getFieldNames(mdcFieldNames, outPath)
        shapeFieldNames = ["SHAPE@"]
        for fieldName in dataFieldNames:
            UTILS.addEmptyField(outputFC, fieldName, "DOUBLE")

        caseIsDate = False
        if caseField:
            fcCaseField = ssdo.allFields[caseField]
            validCaseName = UTILS.validQFieldName(fcCaseField, outPath)
            caseType = UTILS.convertType[fcCaseField.type]
            UTILS.addEmptyField(outputFC, validCaseName, caseType)
            dataFieldNames.append(validCaseName)
            if caseType.upper() == "DATE":
                caseIsDate = True

        if attFields:
            for attField in attFields:
                fcAttField = ssdo.allFields[attField]
                validAttName = UTILS.validQFieldName(fcAttField, outPath)
                if caseField:
                    if validCaseName == validAttName:
                        validAttName = ARCPY.GetIDMessage(84195)
                UTILS.addEmptyField(outputFC, validAttName, "DOUBLE") 
                dataFieldNames.append(validAttName)

        outShapeFileBool = UTILS.isShapeFile(outputFC)
            
        #### Add Median X, Y, Dim ####
        allFieldNames = shapeFieldNames + dataFieldNames
        rows = DA.InsertCursor(outputFC, allFieldNames)
        for case in self.caseKeys:

            #### Median Centers ####
            medX, medY = self.medianCenter[case]
            pnt = (medX, medY, ssdo.defaultZ)
            rowResult = [pnt, medX, medY]

            #### Set Attribute Fields ####
            if caseField:
                caseValue = case.item()
                if caseIsDate:
                    caseValue = TUTILS.iso2DateTime(caseValue)
                rowResult.append(caseValue)

            #### Set Attribute Fields ####
            if attFields:
                for attInd, attField in enumerate(self.attFields):
                    medAtt = self.attCenter[case][attInd]
                    rowResult.append(medAtt)

            rows.insertRow(rowResult)
        
        #### Clean Up ####
        del rows

        #### Set Attribute ####
        self.outputFC = outputFC
예제 #42
0
def collectEvents(ssdo, outputFC):
    """This utility converts event data into weighted point data by
    dissolving all coincident points into unique points with a new count
    field that contains the number of original features at that
    location.

    INPUTS: 
    inputFC (str): path to the input feature class
    outputFC (str): path to the input feature class
    """

    #### Set Default Progressor for Neigborhood Structure ####
    ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143))

    #### Validate Output Workspace ####
    ERROR.checkOutputPath(outputFC)

    #### True Centroid Warning For Non-Point FCs ####
    if ssdo.shapeType.upper() != "POINT":
        ARCPY.AddIDMessage("WARNING", 1021)

    #### Create GA Data Structure ####
    gaTable, gaInfo = WU.gaTable(ssdo.inputFC, spatRef = ssdo.spatialRefString)

    #### Assure Enough Observations ####
    cnt = UTILS.getCount(ssdo.inputFC)
    ERROR.errorNumberOfObs(cnt, minNumObs = 4)
    N = gaInfo[0]
    ERROR.errorNumberOfObs(N, minNumObs = 4)

    #### Process Any Bad Records Encountered ####
    numBadRecs = cnt - N
    if numBadRecs:
        badRecs = WU.parseGAWarnings(gaTable.warnings)
        if not ssdo.silentWarnings:
            ERROR.reportBadRecords(cnt, numBadRecs, badRecs,
                                   label = ssdo.oidName)

    #### Create k-Nearest Neighbor Search Type ####
    gaSearch = GAPY.ga_nsearch(gaTable)
    gaSearch.init_nearest(0.0, 0, "euclidean")

    #### Create Output Feature Class ####
    outPath, outName = OS.path.split(outputFC)
    try:
        DM.CreateFeatureclass(outPath, outName, "POINT", "", ssdo.mFlag, 
                              ssdo.zFlag, ssdo.spatialRefString)
    except:
        ARCPY.AddIDMessage("ERROR", 210, outputFC)
        raise SystemExit()

    #### Add Count Field ####
    countFieldNameOut = ARCPY.ValidateFieldName(countFieldName, outPath)
    UTILS.addEmptyField(outputFC, countFieldNameOut, "LONG")
    fieldList = ["SHAPE@", countFieldNameOut]

    #### Set Insert Cursor ####
    rowsOut = DA.InsertCursor(outputFC, fieldList)

    #### Set Progressor for Calculation ####
    ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84007), 0, N, 1)

    #### ID List to Search ####
    rowsIN = range(N)
    maxCount = 0
    numUnique = 0

    for row in rowsIN:
        #### Get Row Coords ####
        rowInfo = gaTable[row]
        x0, y0 = rowInfo[1]
        count = 1

        #### Search For Exact Coord Match ####
        gaSearch.search_by_idx(row)
        for nh in gaSearch:
            count += 1
            rowsIN.remove(nh.idx)
            ARCPY.SetProgressorPosition()

        #### Keep Track of Max Count ####
        maxCount = max([count, maxCount])
        
        #### Create Output Point ####
        pnt = (x0, y0, ssdo.defaultZ)

        #### Create and Populate New Feature ####
        rowResult = [pnt, count]
        rowsOut.insertRow(rowResult)
        numUnique += 1
        ARCPY.SetProgressorPosition()
    
    #### Clean Up ####
    del rowsOut, gaTable

    return countFieldNameOut, maxCount, N, numUnique
예제 #43
0
    def __init__(self,
                 inputFC,
                 outputFC=None,
                 caseField=None,
                 orientationOnly=False):

        #### Create SSDataObject ####
        ssdo = SSDO.SSDataObject(inputFC,
                                 templateFC=outputFC,
                                 useChordal=False)
        cnt = UTILS.getCount(inputFC)
        ERROR.errorNumberOfObs(cnt, minNumObs=1)
        fieldList = [ssdo.oidName, "SHAPE@"]
        caseIsString = False
        if caseField:
            fieldList.append(caseField)
            caseType = ssdo.allFields[caseField].type.upper()
            caseIsString = caseType == "STRING"

        #### Initialize Accounting Structures ####
        xyLenVals = {}
        sinCosVals = {}

        #### Open Search Cursor ####
        try:
            rows = DA.SearchCursor(inputFC, fieldList, "",
                                   ssdo.spatialRefString)
        except:
            ARCPY.AddIDMessage("ERROR", 204)
            raise SystemExit()

        #### Keep track of Invalid Fields ####
        badIDs = []
        badLengths = []
        badRecord = False
        negativeWeights = False

        #### Create Progressor ####
        ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84001), 0, cnt, 1)

        for row in rows:
            OID = row[0]
            shapeInfo = row[1]
            badRow = row.count(None)
            try:
                centroidInfo = shapeInfo.trueCentroid
                xVal = centroidInfo.X
                yVal = centroidInfo.Y
                length = float(shapeInfo.length)
                firstPoint = shapeInfo.firstPoint
                lastPoint = shapeInfo.lastPoint
                if firstPoint == lastPoint:
                    badLengths.append(OID)
                    badRow = True
                else:
                    firstX = float(firstPoint.X)
                    firstY = float(firstPoint.Y)
                    lastX = float(lastPoint.X)
                    lastY = float(lastPoint.Y)
            except:
                badRow = True

            #### Process Good Records ####
            if not badRow:
                #### Case Field ####
                caseVal = "ALL"
                if caseField:
                    caseVal = UTILS.caseValue2Print(row[2], caseIsString)

                #### Get Angle ####
                numer = lastX - firstX
                denom = lastY - firstY
                angle = UTILS.getAngle(numer, denom)

                #### Adjust for Orientation Only ####
                if orientationOnly:
                    angle2Degree = UTILS.convert2Degree(angle)
                    if angle2Degree < 180:
                        numer = firstX - lastX
                        denom = firstY - lastY
                        angle = UTILS.getAngle(numer, denom)

                sinVal = NUM.sin(angle)
                cosVal = NUM.cos(angle)

                xyLenVal = (xVal, yVal, length)
                sinCosVal = (sinVal, cosVal)

                try:
                    xyLenVals[caseVal].append(xyLenVal)
                    sinCosVals[caseVal].append(sinCosVal)
                except:
                    xyLenVals[caseVal] = [xyLenVal]
                    sinCosVals[caseVal] = [sinCosVal]

            else:
                #### Bad Record ####
                badRecord = True
                badIDs.append(OID)

            ARCPY.SetProgressorPosition()

        del rows

        #### Get Set of Bad IDs ####
        badIDs = list(set(badIDs))
        badIDs.sort()
        badIDs = [str(i) for i in badIDs]

        #### Process any bad records encountered ####
        bn = len(badIDs)
        if badRecord:
            err = ERROR.reportBadRecords(cnt, bn, badIDs, label=ssdo.oidName)

        #### Error For Not Enough Observations ####
        goodRecs = cnt - bn
        ERROR.errorNumberOfObs(goodRecs, minNumObs=1)

        #### Report Features With No Length ####
        badLengths = list(set(badLengths))
        badLengths.sort()
        badLengths = [str(i) for i in badLengths]
        numBadLengths = len(badLengths)
        if numBadLengths > 0:
            ERROR.reportBadLengths(cnt,
                                   numBadLengths,
                                   badLengths,
                                   label=ssdo.oidName)

        #### Set up for Bad Cases ####
        badCases = []
        cases = xyLenVals.keys()
        meanCenter = {}
        dm = {}

        #### Calculate Mean Center and Standard Distance ####
        for case in cases:
            xyLens = xyLenVals[case]
            numFeatures = len(xyLens)
            if numFeatures > 0:
                #### Mean Centers and Lengths ####
                xyLens = NUM.array(xyLens)
                meanX, meanY, meanL = NUM.mean(xyLens, 0)

                #### Sum Sin and Cos ####
                scVals = NUM.array(sinCosVals[case])
                sumSin, sumCos = NUM.sum(scVals, 0)

                #### Calculate Angle ####
                radianAngle = UTILS.getAngle(sumSin, sumCos)
                degreeAngle = UTILS.convert2Degree(radianAngle)

                #### Get Start and End Points ####
                halfMeanLen = meanL / 2.0
                endX = (halfMeanLen * NUM.sin(radianAngle)) + meanX
                startX = (2.0 * meanX) - endX
                endY = (halfMeanLen * NUM.cos(radianAngle)) + meanY
                startY = (2.0 * meanY) - endY
                unstandardized = NUM.sqrt(sumSin**2.0 + sumCos**2.0)
                circVar = 1.0 - (unstandardized / (numFeatures * 1.0))

                #### Re-adjust Angle Back towards North ####
                if orientationOnly:
                    degreeAngle = degreeAngle - 180.0
                    radianAngle = UTILS.convert2Radians(degreeAngle)

                #### Populate Results Structure ####
                meanCenter[case] = (meanX, meanY)
                dm[case] = [(startX, startY), (endX, endY), meanL, radianAngle,
                            degreeAngle, circVar]

        #### Sorted Case List ####
        caseKeys = dm.keys()
        caseKeys.sort()
        self.caseKeys = caseKeys

        #### Set Attributes ####
        self.ssdo = ssdo
        self.meanCenter = meanCenter
        self.dm = dm
        self.badCases = badCases
        self.inputFC = inputFC
        self.outputFC = outputFC
        self.caseField = caseField
        self.orientationOnly = orientationOnly
        self.caseIsString = caseIsString
예제 #44
0
    def __init__(self,
                 inputFC,
                 templateFC=None,
                 explicitSpatialRef=None,
                 silentWarnings=False,
                 useChordal=True):
        #### Validate Input Feature Class ####
        ERROR.checkFC(inputFC)
        try:
            self.inPath, self.inName = OS.path.split(inputFC)
        except:
            self.inPath = None
            self.inName = inputFC

        #### Validate Template FC ####
        if templateFC != None:
            if ARCPY.Exists(templateFC) == False:
                templateFC = None

        #### ShapeFile Boolean ####
        self.shapeFileBool = False
        if self.inPath:
            self.shapeFileBool = UTILS.isShapeFile(inputFC)

            #### Create Feature Layer if LYR File ####
            path, ext = OS.path.splitext(inputFC)
            if ext.upper() == ".LYR":
                tempFC = "SSDO_FeatureLayer"
                DM.MakeFeatureLayer(inputFC, tempFC)
                inputFC = tempFC

        #### Describe Input ####
        self.info = ARCPY.Describe(inputFC)

        #### Assure Input are Features with OIDs ####
        if not self.info.hasOID:
            ARCPY.AddIDMessage("ERROR", 339, self.inName)
            raise SystemExit()

        #### Assign Describe Objects to Class Attributes ####
        self.inputFC = inputFC
        self.catPath = self.info.CatalogPath
        self.shapeType = self.info.ShapeType
        self.oidName = self.info.oidFieldName
        self.dataType = self.info.DataType
        self.shapeField = self.info.ShapeFieldName
        self.templateFC = templateFC
        self.hasM = self.info.HasM
        self.hasZ = self.info.HasZ
        self.silentWarnings = silentWarnings

        #### Set Initial Extent Depending on DataType ####
        if self.dataType in ["FeatureLayer", "Layer"]:
            try:
                tempInfo = ARCPY.Describe(self.catPath)
                extent = tempInfo.extent
            except:
                #### in_memory, SDE, NetCDF etc... ####
                extent = self.info.extent
            self.fidSet = self.info.FIDSet
            if self.fidSet == "":
                self.selectionSet = False
            else:
                self.selectionSet = True
        else:
            extent = self.info.extent
            self.fidSet = ""
            self.selectionSet = False
        self.extent = extent

        #### Set Spatial Reference ####
        inputSpatRef = self.info.SpatialReference
        inputSpatRefName = inputSpatRef.name
        if explicitSpatialRef:
            #### Explicitely Override Spatial Reference ####
            self.templateFC = None
            self.spatialRef = explicitSpatialRef
        else:
            #### 1. Feature Dataset, 2. Env Setting, 3. Input Hierarchy ####
            self.spatialRef = UTILS.returnOutputSpatialRef(inputSpatRef,
                                                           outputFC=templateFC)
        self.spatialRefString = UTILS.returnOutputSpatialString(
            self.spatialRef)
        self.spatialRefName = self.spatialRef.name
        self.spatialRefType = self.spatialRef.type

        #### Warn if Spatial Reference Changed ####
        if not silentWarnings:
            UTILS.compareSpatialRefNames(inputSpatRefName, self.spatialRefName)

        #### Check for Projection ####
        if self.spatialRefType.upper() != "PROJECTED":
            if self.spatialRefType.upper() == "GEOGRAPHIC":
                self.useChordal = useChordal
                if not explicitSpatialRef:
                    if self.useChordal:
                        ARCPY.AddIDMessage("WARNING", 1605)
                    else:
                        ARCPY.AddIDMessage("WARNING", 916)
            else:
                self.useChordal = False
                if not explicitSpatialRef:
                    ARCPY.AddIDMessage("WARNING", 916)
        else:
            self.useChordal = False

        #### Angular/Linear Unit Info ####
        self.distanceInfo = UTILS.DistanceInfo(
            self.spatialRef, useChordalDistances=self.useChordal)

        #### Create Composition and Accounting Structure ####
        self.fields = {}
        self.master2Order = {}
        self.order2Master = {}

        #### Obtain a Full List of Field Names/Type ####
        self.allFields = {}
        listFields = self.info.fields
        for field in listFields:
            name = field.name.upper()
            self.allFields[name] = FCField(field)

        #### Set Z and M Flags and Defaults ####
        zmInfo = UTILS.setZMFlagInfo(self.hasM, self.hasZ, self.spatialRef)
        self.zFlag, self.mFlag, self.defaultZ = zmInfo
        self.zBool = self.zFlag == "ENABLED"

        #### Render Type ####
        self.renderType = UTILS.renderType[self.shapeType.upper()]
예제 #45
0
    def createOutput(self, outputFC):
        """Creates an Output Feature Class with the Standard Distances.

        INPUTS:
        outputFC (str): path to the output feature class
        """

        #### Validate Output Workspace ####
        ERROR.checkOutputPath(outputFC)

        #### Shorthand Attributes ####
        ssdo = self.ssdo
        caseField = self.caseField

        #### Increase Extent if not Projected ####
        if ssdo.spatialRefType != "Projected":
            sdValues = self.sd.values()
            if len(sdValues):
                maxRadius = max(sdValues)
                largerExtent = UTILS.increaseExtentByConstant(
                    ssdo.extent, constant=maxRadius)
                largerExtent = [LOCALE.str(i) for i in largerExtent]
                ARCPY.env.XYDomain = " ".join(largerExtent)

        #### Create Output Feature Class ####
        ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84003))
        outPath, outName = OS.path.split(outputFC)

        try:
            DM.CreateFeatureclass(outPath, outName, "POLYGON", "", ssdo.mFlag,
                                  ssdo.zFlag, ssdo.spatialRefString)
        except:
            ARCPY.AddIDMessage("ERROR", 210, outputFC)
            raise SystemExit()

        #### Add Fields to Output FC ####
        dataFieldNames = UTILS.getFieldNames(sdFieldNames, outPath)
        shapeFieldNames = ["SHAPE@"]
        for fieldName in dataFieldNames:
            UTILS.addEmptyField(outputFC, fieldName, "DOUBLE")

        caseIsDate = False
        if caseField:
            fcCaseField = ssdo.allFields[caseField]
            validCaseName = UTILS.validQFieldName(fcCaseField, outPath)
            caseType = UTILS.convertType[fcCaseField.type]
            UTILS.addEmptyField(outputFC, validCaseName, caseType)
            dataFieldNames.append(validCaseName)
            if caseType.upper() == "DATE":
                caseIsDate = True

        #### Write Output ####
        badCaseRadians = []
        allFieldNames = shapeFieldNames + dataFieldNames
        rows = DA.InsertCursor(outputFC, allFieldNames)
        for case in self.caseKeys:

            #### Get Results ####
            xVal, yVal = self.meanCenter[case]
            radius = self.sd[case]

            #### Create Empty Polygon Geomretry ####
            poly = ARCPY.Array()

            #### Check for Valid Radius ####
            radiusZero = UTILS.compareFloat(0.0, radius, rTol=.0000001)
            radiusNan = NUM.isnan(radius)
            radiusBool = radiusZero + radiusNan
            if radiusBool:
                badRadian = 6
                badCase = UTILS.caseValue2Print(case, self.caseIsString)
                badCaseRadians.append(badCase)
            else:
                badRadian = 0

                #### Calculate a Point For Each ####
                #### Degree in Circle Polygon ####
                for degree in NUM.arange(0, 360):
                    try:
                        radians = NUM.pi / 180.0 * degree
                        pntX = xVal + (radius * NUM.cos(radians))
                        pntY = yVal + (radius * NUM.sin(radians))
                        pnt = ARCPY.Point(pntX, pntY, ssdo.defaultZ)
                        poly.add(pnt)
                    except:
                        badRadian += 1
                        if badRadian == 6:
                            badCase = UTILS.caseValue2Print(
                                case, self.caseIsString)
                            badCaseRadians.append(badCase)
                            break

            if badRadian < 6:
                #### Create and Populate New Feature ####
                poly = ARCPY.Polygon(poly, None, True)
                rowResult = [poly, xVal, yVal, radius]

                if caseField:
                    caseValue = case.item()
                    if caseIsDate:
                        caseValue = TUTILS.iso2DateTime(caseValue)
                    rowResult.append(caseValue)
                rows.insertRow(rowResult)

        #### Report Bad Cases Due to Geometry (coincident pts) ####
        nBadRadians = len(badCaseRadians)
        if nBadRadians:
            if caseField:
                badCaseRadians = " ".join(badCaseRadians)
                ARCPY.AddIDMessage("WARNING", 1011, caseField, badCaseRadians)
            else:
                ARCPY.AddIDMessage("ERROR", 978)
                raise SystemExit()

        #### Return Extent to Normal if not Projected ####
        if ssdo.spatialRefType != "Projected":
            ARCPY.env.XYDomain = None

        #### Clean Up ####
        del rows

        #### Set Attribute ####
        self.outputFC = outputFC
예제 #46
0
    def createOutput(self, outputFC):
        #### Shorthand Attributes ####
        ssdoBase = self.ssdoBase
        ssdoCand = self.ssdoCand

        #### Validate Output Workspace ####
        ARCPY.overwriteOutput = True
        ERROR.checkOutputPath(outputFC)

        #### Create Output Feature Class ####
        ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84003))
        outPath, outName = OS.path.split(outputFC)

        try:
            DM.CreateFeatureclass(outPath, outName, "POINT", "", ssdoBase.mFlag, 
                                  ssdoBase.zFlag, ssdoBase.spatialRefString)
        except:
            ARCPY.AddIDMessage("ERROR", 210, outputFC)
            raise SystemExit()

        #### Add Null Value Flag ####
        outIsShapeFile = UTILS.isShapeFile(outputFC)
        setNullable = outIsShapeFile == False

        #### Add Shape/ID Field Names ####
        matchID, candID = outputIDFieldNames
        outFieldNames = ["SHAPE@"] + outputIDFieldNames
        UTILS.addEmptyField(outputFC, matchID, "LONG", nullable = True)
        UTILS.addEmptyField(outputFC, candID, "LONG", nullable = True)

        #### Add Append Fields ####
        lenAppend = len(self.appendFields) 
        appendIsDate = []
        in2OutFieldNames = {}
        if lenAppend:
            for fieldName in self.appendFields:
                fcField = ssdoCand.allFields[fieldName]
                fieldType = UTILS.convertType[fcField.type]
                fieldOutName = UTILS.validQFieldName(fcField, outPath)
                in2OutFieldNames[fieldName] = fieldOutName
                if fieldType == "DATE":
                    appendIsDate.append(fieldName)
                UTILS.addEmptyField(outputFC, fieldOutName, fieldType,
                                    alias = fcField.alias)
                outFieldNames.append(fieldOutName)

        #### Add Analysis Fields ####
        for fieldName in self.fieldNames:
            fcField = ssdoBase.allFields[fieldName]
            fieldType = UTILS.convertType[fcField.type]
            fieldOutName = UTILS.validQFieldName(fcField, outPath)
            in2OutFieldNames[fieldName] = fieldOutName
            UTILS.addEmptyField(outputFC, fieldOutName, fieldType,
                                alias = fcField.alias)
            outFieldNames.append(fieldOutName)

        dataFieldNames = matchFieldInfo[self.similarType]
        dataFieldInfo = outputFieldInfo[self.matchMethod]
        baseValues = []
        for fieldName in dataFieldNames:
            outAlias, outType, baseValue = dataFieldInfo[fieldName]
            UTILS.addEmptyField(outputFC, fieldName, outType, 
                                alias = outAlias, 
                                nullable = setNullable) 
            outFieldNames.append(fieldName)
            baseValues.append(baseValue)

        #### Step Progress ####
        featureCount = ssdoBase.numObs + self.numResults
        if self.similarType == "BOTH":
            featureCount += self.numResults
        ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84003), 0,
                                                 featureCount, 1)
        #### Get Insert Cursor ####
        rows = DA.InsertCursor(outputFC, outFieldNames)
        
        #### Set Base Data ####
        useShapeNull = outIsShapeFile
        if useShapeNull:
            nullIntValue = UTILS.shpFileNull['LONG']
        else:
            nullIntValue = None

        #### Set Base Null For Append ####
        appendNull = {}
        for fieldName in self.appendFields:
            if fieldName not in ssdoBase.fields:
                if useShapeNull:
                    outType = ssdoCand.fields[fieldName].type
                    outNullValue = UTILS.shpFileNull[outType]
                else:
                    outNullValue = None
                appendNull[fieldName] = outNullValue

        #### Add Base Data ####
        for orderID in xrange(ssdoBase.numObs):
            x,y = ssdoBase.xyCoords[orderID]
            pnt = (x, y, ssdoBase.defaultZ)

            #### Insert Shape, Match_ID and NULL (Cand_ID) ####
            rowRes = [pnt, ssdoBase.order2Master[orderID], nullIntValue]

            #### Add Append Fields ####
            for fieldName in self.appendFields:
                if fieldName in appendNull:
                    rowRes.append(appendNull[fieldName])
                else:
                    value = ssdoBase.fields[fieldName].data[orderID]
                    if fieldName in appendIsDate:
                        value = TUTILS.iso2DateTime(value)
                    rowRes.append(value)

            #### Add Analysis Fields ####
            for fieldName in self.fieldNames:
                rowRes.append(ssdoBase.fields[fieldName].data[orderID])

            #### Add Null Base Values ####
            rowRes += baseValues

            rows.insertRow(rowRes)
            ARCPY.SetProgressorPosition()
        
        if self.similarType in ['MOST_SIMILAR', 'BOTH']:
            #### First Add Similar Results ####
            for ind, orderID in enumerate(self.topIDs):
                x,y = ssdoCand.xyCoords[orderID]
                pnt = (x, y, ssdoBase.defaultZ)

                #### Insert Shape, NULL (Match_ID) and Cand_ID ####
                rowRes = [pnt, nullIntValue, ssdoCand.order2Master[orderID]]

                #### Add Append Fields ####
                for fieldName in self.appendFields:
                    rowRes.append(ssdoCand.fields[fieldName].data[orderID])

                #### Add Analysis Fields ####
                for fieldName in self.fieldNames:
                    rowRes.append(ssdoCand.fields[fieldName].data[orderID])

                #### Add Results ####
                rank = ind + 1
                ss = self.totalDist[orderID]

                if self.similarType == 'BOTH':
                    rowRes += [rank, nullIntValue, ss, rank]
                else:
                    rowRes += [rank, ss, rank]

                rows.insertRow(rowRes)
                ARCPY.SetProgressorPosition()

        if self.similarType in ['LEAST_SIMILAR', 'BOTH']:
            #### Add Least Similar #### 
            for ind, orderID in enumerate(self.botIDs):
                x,y = ssdoCand.xyCoords[orderID]
                pnt = (x, y, ssdoBase.defaultZ)

                #### Insert Shape, NULL (Match_ID) and Cand_ID ####
                rowRes = [pnt, nullIntValue, ssdoCand.order2Master[orderID]]

                #### Add Append Fields ####
                for fieldName in self.appendFields:
                    rowRes.append(ssdoCand.fields[fieldName].data[orderID])

                #### Add Analysis Fields ####
                for fieldName in self.fieldNames:
                    rowRes.append(ssdoCand.fields[fieldName].data[orderID])

                #### Add Results ####
                rank = ind + 1
                labRank = rank * -1
                ss = self.totalDist[orderID]

                if self.similarType == 'BOTH':
                    rowRes += [nullIntValue, rank, ss, labRank]
                else:
                    rowRes += [rank, ss, labRank]

                rows.insertRow(rowRes)
                ARCPY.SetProgressorPosition()

        #### Clean Up ####
        del rows

        #### Symbology ####
        params = ARCPY.gp.GetParameterInfo()
        try:
            renderKey = (self.similarType, 0)
            renderLayerFile = outputRenderInfo[renderKey]
            templateDir = OS.path.dirname(OS.path.dirname(SYS.argv[0]))
            fullRLF = OS.path.join(templateDir, "Templates",
                                   "Layers", renderLayerFile)
            params[2].Symbology = fullRLF
        except:
            ARCPY.AddIDMessage("WARNING", 973)