def initialize(self): """Populates the instance of the Spatial Statistics Data Object (SSDataObject) and resolves a default distance threshold if none given. """ #### Shorthand Attributes #### ssdo = self.ssdo varName = self.varName concept = self.concept threshold = self.threshold exponent = self.exponent wType = self.wType rowStandard = self.rowStandard weightsFile = self.weightsFile swmFileBool = self.swmFileBool masterField = ssdo.masterField field = ssdo.fields[varName] self.y = field.returnDouble() self.numObs = ssdo.numObs maxSet = False #### Distance Threshold #### if wType in [0, 1, 7]: if threshold == None: threshold, avgDist = WU.createThresholdDist(ssdo, concept=concept) #### Assures that the Threshold is Appropriate #### gaExtent = UTILS.get92Extent(ssdo.extent) fixed = (wType == 1) threshold, maxSet = WU.checkDistanceThreshold(ssdo, threshold, weightType=wType) #### If the Threshold is Set to the Max #### #### Set to Zero for Script Logic #### if maxSet: #### All Locations are Related #### if self.numObs > 500: ARCPY.AddIDMessage("Warning", 717) self.thresholdStr = ssdo.distanceInfo.printDistance(threshold) else: self.thresholdStr = "None" #### Set Attributes #### self.maxSet = maxSet self.threshold = threshold self.master2Order = ssdo.master2Order self.swmFileBool = swmFileBool
def initialize(self): """Populates the instance of the Spatial Statistics Data Object (SSDataObject) and resolves a default distance threshold if none given. """ #### Shorthand Attributes #### ssdo = self.ssdo varName = self.varName concept = self.concept threshold = self.threshold exponent = self.exponent wType = self.wType rowStandard = self.rowStandard weightsFile = self.weightsFile swmFileBool = self.swmFileBool masterField = ssdo.masterField field = ssdo.fields[varName] self.y = field.returnDouble() self.numObs = ssdo.numObs maxSet = False #### Distance Threshold #### if wType in [0, 1, 7]: if threshold == None: threshold, avgDist = WU.createThresholdDist(ssdo, concept = concept) #### Assures that the Threshold is Appropriate #### gaExtent = UTILS.get92Extent(ssdo.extent) fixed = (wType == 1) threshold, maxSet = WU.checkDistanceThreshold(ssdo, threshold, weightType = wType) #### If the Threshold is Set to the Max #### #### Set to Zero for Script Logic #### if maxSet: #### All Locations are Related #### if self.numObs > 500: ARCPY.AddIDMessage("Warning", 717) self.thresholdStr = ssdo.distanceInfo.printDistance(threshold) else: self.thresholdStr = "None" #### Set Attributes #### self.maxSet = maxSet self.threshold = threshold self.master2Order = ssdo.master2Order self.swmFileBool = swmFileBool
def poly2Weights(ssdo, contiguityType = "ROOK", rowStandard = True): """Uses GP Polygon Neighbor Tool to construct contiguity relationships and stores them in PySAL Sparse Spatial Weights class. INPUTS: ssdo (class): instance of SSDataObject [1] contiguityType {str, ROOK}: ROOK or QUEEN contiguity rowStandard {bool, True}: whether to row standardize the spatial weights NOTES: (1) Data must already be obtained using ssdo.obtainData() or ssdo.obtainDataGA () """ neighbors = {} weights = {} polyNeighDict = WU.polygonNeighborDict(ssdo.inputFC, ssdo.masterField, contiguityType = contiguityType) for masterID, neighIDs in UTILS.iteritems(polyNeighDict): orderID = ssdo.master2Order[masterID] neighbors[orderID] = [ssdo.master2Order[i] for i in neighIDs] w = PYSAL.W(neighbors) if rowStandard: w.transform = 'R' return w
def poly2Weights(ssdo, contiguityType="ROOK", rowStandard=True): """Uses GP Polygon Neighbor Tool to construct contiguity relationships and stores them in PySAL Sparse Spatial Weights class. INPUTS: ssdo (class): instance of SSDataObject [1] contiguityType {str, ROOK}: ROOK or QUEEN contiguity rowStandard {bool, True}: whether to row standardize the spatial weights NOTES: (1) Data must already be obtained using ssdo.obtainData() or ssdo.obtainDataGA () """ neighbors = {} weights = {} polyNeighDict = WU.polygonNeighborDict(ssdo.inputFC, ssdo.masterField, contiguityType=contiguityType) for masterID, neighIDs in UTILS.iteritems(polyNeighDict): orderID = ssdo.master2Order[masterID] neighbors[orderID] = [ssdo.master2Order[i] for i in neighIDs] w = PYSAL.W(neighbors) if rowStandard: w.transform = 'R' return w
def createOutput(self, rowStandard = False): """ Write Kernel-based Weights to File. """ ARCPY.SetProgressor("default", \ "Writing Spatial Weights to Output File...") #### Shorthand Attributes #### ssdo = self.ssdo idField = self.idField weightObj = self.weightObj outputFile = self.outputFile outputExt = self.outputExt #### Get File Name Without Extension #### fileName = ssdo.inName.rsplit('.', 1)[0] if outputExt == EXTENSIONS[0]: # KWT file outputWriter = PYSAL.open(outputFile, 'w') outputWriter.shpName = fileName if idField: outputWriter.varName = idField outputWriter.write(weightObj) outputWriter.close() else: # SWM file masterField = idField if idField else 'UNKNOWN' swmWriter = WU.SWMWriter(outputFile, masterField, \ ssdo.spatialRefName, weightObj.n, \ rowStandard) masterIDs = list(weightObj.neighbors.keys()) masterIDs.sort() for key in masterIDs: swmWriter.swm.writeEntry(key, weightObj.neighbors[key], \ weightObj.weights[key]) swmWriter.close()
def outputResults(self): """Creates output feature class Local Gi*.""" #### Prepare Derived Variables for Output Feature Class #### outPath, outName = OS.path.split(self.outputFC) fieldOrder = UTILS.getFieldNames(giFieldNames, outPath) fieldData = [self.gi, self.pVals] fieldTypes = ["DOUBLE", "DOUBLE"] #### Add Pseudo-P Field #### if self.permutations: fieldOrder.append(giPseudoFieldName) fieldData.append(self.pseudoPVals) fieldTypes.append("DOUBLE") #### Add Gi Bin Field #### fieldOrder.append(giBinFieldName) fieldData.append(self.giBins) fieldTypes.append("LONG") #### Create Alias Field Names #### rowStandard = False if self.wType == 8: addString = OS.path.basename(self.weightsFile) elif self.wType in [0, 1, 7]: if self.maxSet: addString = "0" else: addString = str(int(self.threshold)) else: addString = None aliasList = WU.createSpatialFieldAliases( fieldOrder, addString=addString, wType=self.wType, exponent=self.exponent, rowStandard=rowStandard ) if self.applyFDR: aliasList[-1] += "_FDR" #### Create/Populate Dictionary of Candidate Fields #### candidateFields = {} for fieldInd, fieldName in enumerate(fieldOrder): fieldType = fieldTypes[fieldInd] candidateField = SSDO.CandidateField(fieldName, fieldType, fieldData[fieldInd], alias=aliasList[fieldInd]) candidateFields[fieldName] = candidateField #### Input Fields to Copy to Output FC #### appendFields = [i for i in self.fieldNames] #### Add Date-Time Field If Applicable #### if self.swmFileBool: if self.swm.wType == 9: if self.ssdo.allFields.has_key(self.swm.timeField.upper()): appendFields.insert(0, self.swm.timeField.upper()) #### Write Data to Output Feature Class #### self.ssdo.output2NewFC(self.outputFC, candidateFields, appendFields=appendFields, fieldOrder=fieldOrder) return fieldOrder[0], fieldOrder[1]
def swm2Weights(swmFile, master2Order=None): swm = WU.SWMReader(swmFile) numObs = swm.numObs adjust = False if master2Order and len(master2Order) < numObs: msg = ("The spatial attributes have fewer entries than spatial" "weights! Weights will be adjusted dynamically...") ARCPY.AddWarning(msg) adjust = True neighs = {} w = {} rowStandard = swm.rowStandard for i in range(numObs): masterID, nn, nhsTemp, weightsTemp, sumUnstandard = swm.swm.readEntry() if master2Order == None: # no need adjust when convert directly from weights content orderID = masterID nhIDs = nhsTemp weights = weightsTemp elif masterID in master2Order: orderID = master2Order[masterID] if not adjust: nhIDs = [master2Order[nh] for nh in nhsTemp] weights = weightsTemp else: # Restandardize Due to Subset/Select nhIDs = [] weights = [] if nn: for i in range(nn): nh = nhsTemp[i] if nh in master2Order: nhOrder = master2Order[nh] nhIDs.append(nhOrder) nhWeight = weightsTemp[i] if rowStandard: # Unstandardize if Necessary nhWeight = nhWeight * sumUnstandard[0] weights.append(nhWeight) # Re-Standardize if nhIDs: weights = NUM.array(weights) if rowStandard: weights = (1.0 / weights.sum()) * weights # Add To Dict Structures neighs[orderID] = nhIDs w[orderID] = weights swm.close() wobj = W(neighs, w) wobj._varName = swm.masterField return wobj
def createOutput(self, rowStandard=False): """ Write Distance-based Weights to File. """ ARCPY.SetProgressor("default", \ "Writing Spatial Weights to Output File...") #### Shorthand Attributes #### ssdo = self.ssdo idField = self.idField weightObj = self.weightObj outputFile = self.outputFile outputExt = self.outputExt #### Get File Name Without Extension #### fileName = ssdo.inName.rsplit('.', 1)[0] if outputExt == EXTENSIONS[0]: # GAL file outputWriter = open(outputFile, 'w') # write header in the first line header = "%s\n" % weightObj.n if not idField else \ "%s %s %s %s\n" % (0, weightObj.n, idField, 'UNKNOWN') outputWriter.write(header) # write content masterIDs = weightObj.neighbors.keys() masterIDs.sort() for id in masterIDs: neighbors = weightObj.neighbors[id] outputWriter.write("%s %s\n" % (id, len(neighbors))) outputWriter.write("%s\n" % \ (" ".join([str(nbr) for nbr in neighbors]))) outputWriter.close() elif outputExt == EXTENSIONS[1]: # GWT file outputWriter = PYSAL.open(outputFile, 'w') outputWriter.shpName = fileName if idField: outputWriter.varName = idField outputWriter.write(weightObj) outputWriter.close() else: # SWM file masterField = idField if idField else 'UNKNOWN' swmWriter = WU.SWMWriter(outputFile, masterField, \ ssdo.spatialRefName, weightObj.n, \ rowStandard) masterIDs = weightObj.neighbors.keys() masterIDs.sort() for key in masterIDs: swmWriter.swm.writeEntry(key, weightObj.neighbors[key], \ weightObj.weights[key]) swmWriter.close()
def distance2Weights(ssdo, neighborType = 1, distanceBand = 0.0, numNeighs = 0, distanceType = "euclidean", exponent = 1.0, rowStandard = True, includeSelf = False): """Uses ArcGIS Neighborhood Searching Structure to create a PySAL Sparse Spatial Weights Matrix. INPUTS: ssdo (class): instance of SSDataObject [1] neighborType {int, 1}: 0 = inverse distance, 1 = fixed distance, 2 = k-nearest-neighbors, 3 = delaunay distanceBand {float, 0.0}: return all neighbors within this distance for inverse/fixed distance numNeighs {int, 0}: number of neighbors for k-nearest-neighbor, can also be used to set a minimum number of neighbors for inverse/fixed distance distanceType {str, euclidean}: manhattan or euclidean distance [2] exponent {float, 1.0}: distance decay factor for inverse distance rowStandard {bool, True}: whether to row standardize the spatial weights includeSelf {bool, False}: whether to return self as a neighbor NOTES: (1) Data must already be obtained using ssdo.obtainDataGA() (2) Chordal Distance is used for GCS Data """ neighbors = {} weights = {} gaSearch = GAPY.ga_nsearch(ssdo.gaTable) if neighborType == 3: gaSearch.init_delaunay() neighSearch = ARC._ss.NeighborWeights(ssdo.gaTable, gaSearch, weight_type = 1) else: if neighborType == 2: distanceBand = 0.0 weightType = 1 else: weightType = neighborType concept, gaConcept = WU.validateDistanceMethod(distanceType.upper(), ssdo.spatialRef) gaSearch.init_nearest(distanceBand, numNeighs, gaConcept) neighSearch = ARC._ss.NeighborWeights(ssdo.gaTable, gaSearch, weight_type = weightType, exponent = exponent, include_self = includeSelf) for i in range(len(neighSearch)): neighOrderIDs, neighWeights = neighSearch[i] neighbors[i] = neighOrderIDs weights[i] = neighWeights w = PYSAL.W(neighbors, weights) if rowStandard: w.transform = 'R' return w
def createOutput(self, rowStandard=False): """ Write New Weights File. """ ARCPY.SetProgressor("default", \ "Writing new spatial weights file as output...") #### Shorthand Attributes #### ssdo = self.ssdo weightObj = self.weightObj inputIDField = self.inputIDField outputFile = self.outputFile outputExt = self.outputExt #### Write WeightObj to New Weights File #### uniqueID = weightObj._varName if not uniqueID: uniqueID = inputIDField if outputExt == EXTENSIONS[0]: # GAL file outputWriter = open(outputFile, 'w') header = "%s %s %s %s\n" % \ (0, weightObj.n, uniqueID, 'UNKNOWN') outputWriter.write(header) masterIDs = list(weightObj.neighbors.keys()) masterIDs.sort() for id in masterIDs: neighbors = weightObj.neighbors[id] outputWriter.write("%s %s\n" % (id, len(neighbors))) outputWriter.write("%s\n" % \ (" ".join([str(nbr) for nbr in neighbors]))) outputWriter.close() elif outputExt == EXTENSIONS[1] or outputExt == EXTENSIONS[2]: # GWT, KWT outputWriter = PYSAL.open(outputFile, 'w') outputWriter.varName = uniqueID outputWriter.write(weightObj) outputWriter.close() else: # SWM swmWriter = WU.SWMWriter(outputFile, uniqueID, \ ssdo.spatialRefName if ssdo else '#', \ weightObj.n, rowStandard) masterIDs = weightObj.neighbors.keys() masterIDs.sort() for key in masterIDs: swmWriter.swm.writeEntry(key, weightObj.neighbors[key], \ weightObj.weights[key]) swmWriter.close()
def __init__(self, ssdo, dependentVar, independentVars, weightsFile, outputReportFile=None, outputTable=None, maxIndVars=5, minIndVars=1, minR2=.5, maxCoef=.01, maxVIF=5.0, minJB=.1, minMI=.1): ARCPY.env.overwriteOutput = True #### Set Initial Attributes #### UTILS.assignClassAttr(self, locals()) self.masterField = self.ssdo.masterField self.warnedTProb = False #### Set Boolean For Passing All Moran's I #### self.allMIPass = UTILS.compareFloat(0.0, self.minMI, rTol=.00000001) #### Assess Whether SWM File Being Used #### if weightsFile: weightSuffix = weightsFile.split(".")[-1].lower() if weightSuffix == "swm": self.weightsType = "SWM" self.weightsMatrix = self.weightsFile else: self.weightsType = "GWT" self.weightsMatrix = WU.buildTextWeightDict( weightsFile, self.ssdo.master2Order) else: #### If No Weightsfile Provided, Use 8 Nearest Neighbors #### if ssdo.numObs <= 9: nn = ssdo.numObs - 2 ARCPY.AddIDMessage("WARNING", 1500, 8, nn) else: nn = 8 self.weightsType = "GA" gaSearch = GAPY.ga_nsearch(self.ssdo.gaTable) gaSearch.init_nearest(0.0, nn, "euclidean") self.weightsMatrix = gaSearch #### Initialize Data #### self.runModels()
def buildWeights(self): """Performs Contiguity-based Weights Creation.""" ARCPY.SetProgressor("default", "Constructing spatial weights object...") #### Shorthand Attributes #### ssdo = self.ssdo isLowOrder = self.isLowOrder weightOrder = self.weightOrder #### Get Neighbor Dictionary for All Polygons #### master2Order = ssdo.master2Order polyNeighborDict = WU.polygonNeighborDict(self.inputFC, \ self.masterField, \ contiguityType=self.weightType) #### Assign empty list to polygons without neighbors #### if ssdo.numObs > len(polyNeighborDict): for masterKey in master2Order.keys(): if not polyNeighborDict.has_key(masterKey): polyNeighborDict[masterKey] = [] #### Convert DefaultDict to Real Dict ?#### if not self.idField: polyNeighborCopy = {} for key in polyNeighborDict.keys(): polyNeighborCopy[master2Order[key]] = [] for item in polyNeighborDict[key]: polyNeighborCopy[master2Order[key]].\ append(master2Order[item]) polyNeighborDict = polyNeighborCopy #### Create a PySAL W Object #### weightObj = W(polyNeighborDict) #### Building up Lower Order Spatial Weights #### if weightOrder > 1: ARCPY.SetProgressor("default", \ "Building up Lower Order Spatial Weights...") origWeight = weightObj weightObj = PYSAL.higher_order(weightObj, weightOrder) if isLowOrder: for order in xrange(weightOrder - 1, 1, -1): lowOrderW = PYSAL.higher_order(origWeight, order) weightObj = PYSAL.w_union(weightObj, lowOrderW) weightObj = PYSAL.w_union(weightObj, origWeight) #### Save weightObj Class Object for Writing Result #### self.weightObj = weightObj
def getFeatNumFromWeights(weightsFile): weightType = returnWeightFileType(weightsFile) if weightType in ['GAL', 'GWT']: weightFile = open(weightsFile, 'r') info = weightFile.readline().strip().split() if weightType == 'GAL': if len(info) == 1: return LOCALE.atoi(info[0]) elif len(info) > 1: return LOCALE.atoi(info[1]) else: return LOCALE.atoi(info[1]) elif weightType == 'SWM': swm = WU.SWMReader(weightsFile) return swm.numObs
def buildWeights(self): """Performs Contiguity-based Weights Creation.""" ARCPY.SetProgressor("default", "Constructing spatial weights object...") #### Shorthand Attributes #### ssdo = self.ssdo isLowOrder = self.isLowOrder weightOrder = self.weightOrder #### Get Neighbor Dictionary for All Polygons #### master2Order = ssdo.master2Order polyNeighborDict = WU.polygonNeighborDict(self.inputFC, \ self.masterField, \ contiguityType=self.weightType) #### Assign empty list to polygons without neighbors #### if ssdo.numObs > len(polyNeighborDict): for masterKey in master2Order.keys(): if not polyNeighborDict.has_key(masterKey): polyNeighborDict[masterKey] = [] #### Convert DefaultDict to Real Dict ?#### if not self.idField: polyNeighborCopy = {} for key in polyNeighborDict.keys(): polyNeighborCopy[master2Order[key]] = [] for item in polyNeighborDict[key]: polyNeighborCopy[master2Order[key]].\ append(master2Order[item]) polyNeighborDict = polyNeighborCopy #### Create a PySAL W Object #### weightObj = W(polyNeighborDict) #### Building up Lower Order Spatial Weights #### if weightOrder > 1: ARCPY.SetProgressor("default", \ "Building up Lower Order Spatial Weights...") origWeight = weightObj weightObj = PYSAL.higher_order(weightObj, weightOrder) if isLowOrder: for order in xrange(weightOrder-1, 1, -1): lowOrderW = PYSAL.higher_order(origWeight, order) weightObj = PYSAL.w_union(weightObj, lowOrderW) weightObj = PYSAL.w_union(weightObj, origWeight) #### Save weightObj Class Object for Writing Result #### self.weightObj = weightObj
def getIDFieldFromWeights(weightsFile): weightType = returnWeightFileType(weightsFile) if weightType == 'SWM': swm = WU.SWMReader(weightsFile) if not swm.masterField or swm.masterField == 'UNKNOWN': return None return swm.masterField else: weightFile = open(weightsFile, 'r') info = weightFile.readline().strip().split() weightFile.close() for item in info: if not item.isdigit() and item.lower() != "unknown" \ and len(item) > 0: return item return None
def __init__(self, ssdo, dependentVar, independentVars, weightsFile, outputReportFile = None, outputTable = None, maxIndVars = 5, minIndVars = 1, minR2 = .5, maxCoef = .01, maxVIF = 5.0, minJB = .1, minMI = .1): ARCPY.env.overwriteOutput = True #### Set Initial Attributes #### UTILS.assignClassAttr(self, locals()) self.masterField = self.ssdo.masterField self.warnedTProb = False #### Set Boolean For Passing All Moran's I #### self.allMIPass = UTILS.compareFloat(0.0, self.minMI, rTol = .00000001) #### Assess Whether SWM File Being Used #### if weightsFile: weightSuffix = weightsFile.split(".")[-1].lower() if weightSuffix == "swm": self.weightsType = "SWM" self.weightsMatrix = self.weightsFile else: self.weightsType = "GWT" self.weightsMatrix = WU.buildTextWeightDict(weightsFile, self.ssdo.master2Order) else: #### If No Weightsfile Provided, Use 8 Nearest Neighbors #### if ssdo.numObs <= 9: nn = ssdo.numObs - 2 ARCPY.AddIDMessage("WARNING", 1500, 8, nn) else: nn = 8 self.weightsType = "GA" gaSearch = GAPY.ga_nsearch(self.ssdo.gaTable) gaSearch.init_nearest(0.0, nn, "euclidean") self.weightsMatrix = gaSearch #### Initialize Data #### self.runModels()
def construct(self): """Constructs the neighborhood structure for each feature and dispatches the appropriate values for the calculation of the statistic.""" #### Shorthand Attributes #### ssdo = self.ssdo varName = self.varName concept = self.concept gaConcept = concept.lower() threshold = self.threshold exponent = self.exponent wType = self.wType rowStandard = self.rowStandard numObs = self.numObs master2Order = self.master2Order masterField = ssdo.masterField weightsFile = self.weightsFile #### Check That All Input Values are Positive #### if NUM.sum(self.y < 0.0) != 0: ARCPY.AddIDMessage("Error", 915) raise SystemExit() #### Assure that Variance is Larger than Zero #### yVar = NUM.var(self.y) if NUM.isnan(yVar) or yVar <= 0.0: ARCPY.AddIDMessage("Error", 906) raise SystemExit() #### Create Base Data Structures/Variables #### self.numer = 0.0 self.denom = 0.0 self.rowSum = NUM.zeros(numObs) self.colSum = NUM.zeros(numObs) self.ySum = 0.0 self.y2Sum = 0.0 self.y3Sum = 0.0 self.y4Sum = 0.0 self.s0 = 0 self.s1 = 0 self.wij = {} #### Set Neighborhood Structure Type #### if self.weightsFile: if self.swmFileBool: #### Open Spatial Weights and Obtain Chars #### swm = WU.SWMReader(weightsFile) N = swm.numObs rowStandard = swm.rowStandard #### Check to Assure Complete Set of Weights #### if numObs > N: ARCPY.AddIDMessage("Error", 842, numObs, N) raise SystemExit() #### Check if Selection Set #### isSubSet = False if numObs < N: isSubSet = True iterVals = xrange(N) else: #### Warning for GWT with Bad Records/Selection #### if ssdo.selectionSet or ssdo.badRecords: ARCPY.AddIDMessage("WARNING", 1029) #### Build Weights Dictionary #### weightDict = WU.buildTextWeightDict(weightsFile, master2Order) iterVals = master2Order.iterkeys() N = numObs elif wType in [4, 5]: #### Polygon Contiguity #### if wType == 4: contiguityType = "ROOK" else: contiguityType = "QUEEN" contDict = WU.polygonNeighborDict(ssdo.inputFC, ssdo.oidName, contiguityType = contiguityType) iterVals = master2Order.keys() N = numObs else: gaTable = ssdo.gaTable gaSearch = GAPY.ga_nsearch(gaTable) if wType == 7: #### Zone of Indiff, All Related to All #### gaSearch.init_nearest(threshold, numObs, gaConcept) else: #### Inverse and Fixed Distances #### gaSearch.init_nearest(threshold, 0, gaConcept) iterVals = xrange(numObs) N = numObs neighWeights = ARC._ss.NeighborWeights(gaTable, gaSearch, weight_type = wType, exponent = exponent, row_standard = rowStandard) #### Create Progressor #### ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84007), 0, N, 1) #### Create Neighbor Info Class #### ni = WU.NeighborInfo(masterField) #### Calculation For Each Feature #### for i in iterVals: if self.swmFileBool: #### Using SWM File #### info = swm.swm.readEntry() masterID = info[0] if master2Order.has_key(masterID): rowInfo = WU.getWeightsValuesSWM(info, master2Order, self.y, rowStandard = rowStandard, isSubSet = isSubSet) includeIt = True else: includeIt = False elif self.weightsFile and not self.swmFileBool: #### Text Weights #### masterID = i includeIt = True rowInfo = WU.getWeightsValuesText(masterID, master2Order, weightDict, self.y) elif wType in [4, 5]: #### Polygon Contiguity #### masterID = i includeIt = True rowInfo = WU.getWeightsValuesCont(masterID, master2Order, contDict, self.y, rowStandard = rowStandard) else: #### Distance Based #### masterID = gaTable[i][0] includeIt = True rowInfo = WU.getWeightsValuesOTF(neighWeights, i, self.y) #### Subset Boolean for SWM File #### if includeIt: #### Parse Row Info #### orderID, yiVal, nhIDs, nhVals, weights = rowInfo #### Assure Neighbors Exist After Selection #### nn, nhIDs, nhVals, weights = ni.processInfo(masterID, nhIDs, nhVals, weights) if nn: #### Process Feature Contribution to General G #### self.processRow(orderID, yiVal, nhIDs, nhVals, weights) #### Reset Progessor #### ARCPY.SetProgressorPosition() #### Clean Up #### if self.swmFileBool: swm.close() #### Report on Features with No Neighbors #### ni.reportNoNeighbors() #### Report on Features with Large Number of Neighbors #### ni.reportWarnings() ni.reportMaximums() self.neighInfo = ni
def weightedCalc(self): """Performs weighted k-function.""" #### Attribute Shortcuts #### ssdo = self.ssdo reduce = self.reduce simulate = self.simulate ripley = self.ripley numIDs = len(self.ids) if reduce: studyArea2Use = self.reduceArea else: studyArea2Use = self.studyArea if simulate: simOrder = [] for simKey, origID in self.simDict.iteritems(): simOrder.append(self.weightDict[origID]) self.ld = COLL.defaultdict(float) if self.permutations: self.ldMin = COLL.defaultdict(float) self.ldMax = COLL.defaultdict(float) for order in self.cutoffOrder: self.ldMin[order] = 99999999999. permsPlus = self.permutations + 1 for perm in xrange(0, permsPlus): #### Permutation Progressor #### pmsg = ARCPY.GetIDMessage(84184) progressMessage = pmsg.format(perm, permsPlus) ARCPY.SetProgressor("default", progressMessage) gaSearch = GAPY.ga_nsearch(self.kTable) gaSearch.init_nearest(self.stepMax, 0, "euclidean") N = len(self.kTable) #### Permutate Weights #### if perm: weights = RAND.permutation(weights) else: weights = self.weightVals if simulate: simWeights = NUM.take(self.weightVals, simOrder) #### Set Statistic Variables #### weightSumVal = 0.0 kij = COLL.defaultdict(float) start = 0 #### Loop Over Entire Table #### for i in xrange(N): row = self.kTable[i] id0 = row[0] #### Calculate For Inside IDs #### if id0 in self.ids: x0,y0 = row[1] weightInd0 = self.weightDict[id0] w0 = weights[weightInd0] #### Weight Sum Resolution #### weightSumVal += (NUM.sum(w0 * weights)) - w0**2.0 if simulate: weightSumVal += (w0 * simWeights).sum() #### Neighbors Within Largest Distance #### gaSearch.search_by_idx(i) for nh in gaSearch: neighInfo = self.kTable[nh.idx] id1 = neighInfo[0] x1,y1 = neighInfo[1] #### Input or Simulated Point #### try: weightInd1 = self.weightDict[id1] except: origID = self.simDict[id1] weightInd1 = self.weightDict[origID] #### Process Neighbor Pair #### w1 = weights[weightInd1] dist = WU.euclideanDistance(x0,x1,y0,y1) if ripley: value = self.returnRipley(id0, dist) else: value = 1.0 value = w0 * (w1 * value) #### Add To Cutoffs #### for order in self.reverseOrder: cutoff = self.cutoffs[order] if dist > cutoff: break kij[order] += value ARCPY.SetProgressorPosition() #### Calculate Stats USing Dictionaries #### denom = NUM.pi * weightSumVal for order in self.cutoffOrder: res = kij[order] numer = res * studyArea2Use permResult = NUM.sqrt( (numer/denom) ) if perm: self.ldMin[order] = min(self.ldMin[order], permResult) self.ldMax[order] = max(self.ldMax[order], permResult) else: self.ld[order] = permResult
def kNearest2SWM(inputFC, swmFile, masterField, concept = "EUCLIDEAN", kNeighs = 1, rowStandard = True): """Creates a sparse spatial weights matrix (SWM) based on k-nearest neighbors. INPUTS: inputFC (str): path to the input feature class swmFile (str): path to the SWM file. masterField (str): field in table that serves as the mapping. concept: {str, EUCLIDEAN}: EUCLIDEAN or MANHATTAN kNeighs {int, 1}: number of neighbors to return rowStandard {bool, True}: row standardize weights? """ #### Assure that kNeighs is Non-Zero #### if kNeighs <= 0: ARCPY.AddIDMessage("ERROR", 976) raise SystemExit() #### Set Default Progressor for Neigborhood Structure #### ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143)) #### Create SSDataObject #### ssdo = SSDO.SSDataObject(inputFC, templateFC = inputFC, useChordal = True) cnt = UTILS.getCount(inputFC) ERROR.errorNumberOfObs(cnt, minNumObs = 2) #### Validation of Master Field #### verifyMaster = ERROR.checkField(ssdo.allFields, masterField, types = [0,1]) #### Create GA Data Structure #### gaTable, gaInfo = WU.gaTable(ssdo.catPath, [masterField], spatRef = ssdo.spatialRefString) #### Assure Enough Observations #### N = gaInfo[0] ERROR.errorNumberOfObs(N, minNumObs = 2) #### Process any bad records encountered #### numBadRecs = cnt - N if numBadRecs: badRecs = WU.parseGAWarnings(gaTable.warnings) err = ERROR.reportBadRecords(cnt, numBadRecs, badRecs, label = ssdo.oidName) #### Assure k-Nearest is Less Than Number of Features #### if kNeighs >= N: ARCPY.AddIDMessage("ERROR", 975) raise SystemExit() #### Create k-Nearest Neighbor Search Type #### gaSearch = GAPY.ga_nsearch(gaTable) concept, gaConcept = WU.validateDistanceMethod(concept, ssdo.spatialRef) gaSearch.init_nearest(0.0, kNeighs, gaConcept) neighWeights = ARC._ss.NeighborWeights(gaTable, gaSearch, weight_type = 1, row_standard = False) #### Set Progressor for Weights Writing #### ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84127), 0, N, 1) #### Initialize Spatial Weights Matrix File #### swmWriter = WU.SWMWriter(swmFile, masterField, ssdo.spatialRefName, N, rowStandard, inputFC = inputFC, wType = 2, distanceMethod = concept, numNeighs = kNeighs) #### Unique Master ID Dictionary #### masterSet = set([]) for row in xrange(N): masterID = int(gaTable[row][2]) if masterID in masterSet: ARCPY.AddIDMessage("Error", 644, masterField) ARCPY.AddIDMessage("Error", 643) raise SystemExit() else: masterSet.add(masterID) neighs, weights = neighWeights[row] neighs = [ gaTable[nh][2] for nh in neighs ] #### Add Spatial Weights Matrix Entry #### swmWriter.swm.writeEntry(masterID, neighs, weights) #### Set Progress #### ARCPY.SetProgressorPosition() swmWriter.close() del gaTable #### Report Warning/Max Neighbors #### swmWriter.reportNeighInfo() #### Report Spatial Weights Summary #### swmWriter.report() #### Report SWM File is Large #### swmWriter.reportLargeSWM()
def delaunay2SWM(inputFC, swmFile, masterField, rowStandard = True): """Creates a sparse spatial weights matrix (SWM) based on Delaunay Triangulation. INPUTS: inputFC (str): path to the input feature class swmFile (str): path to the SWM file. masterField (str): field in table that serves as the mapping. rowStandard {bool, True}: row standardize weights? """ #### Set Default Progressor for Neigborhood Structure #### ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143)) #### Create SSDataObject #### ssdo = SSDO.SSDataObject(inputFC, templateFC = inputFC, useChordal = True) cnt = UTILS.getCount(inputFC) ERROR.errorNumberOfObs(cnt, minNumObs = 2) #### Validation of Master Field #### verifyMaster = ERROR.checkField(ssdo.allFields, masterField, types = [0,1]) #### Create GA Data Structure #### gaTable, gaInfo = WU.gaTable(ssdo.catPath, [masterField], spatRef = ssdo.spatialRefString) #### Assure Enough Observations #### N = gaInfo[0] ERROR.errorNumberOfObs(N, minNumObs = 2) #### Process any bad records encountered #### numBadRecs = cnt - N if numBadRecs: badRecs = WU.parseGAWarnings(gaTable.warnings) err = ERROR.reportBadRecords(cnt, numBadRecs, badRecs, label = ssdo.oidName) #### Create Delaunay Neighbor Search Type #### gaSearch = GAPY.ga_nsearch(gaTable) gaSearch.init_delaunay() neighWeights = ARC._ss.NeighborWeights(gaTable, gaSearch, weight_type = 1, row_standard = False) #### Set Progressor for Weights Writing #### ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84127), 0, N, 1) #### Initialize Spatial Weights Matrix File #### swmWriter = WU.SWMWriter(swmFile, masterField, ssdo.spatialRefName, N, rowStandard, inputFC = inputFC, wType = 3) #### Unique Master ID Dictionary #### masterSet = set([]) for row in xrange(N): masterID = int(gaTable[row][2]) if masterID in masterSet: ARCPY.AddIDMessage("Error", 644, masterField) ARCPY.AddIDMessage("Error", 643) raise SystemExit() else: masterSet.add(masterID) neighs, weights = neighWeights[row] neighs = [ gaTable[nh][2] for nh in neighs ] #### Add Spatial Weights Matrix Entry #### swmWriter.swm.writeEntry(masterID, neighs, weights) #### Set Progress #### ARCPY.SetProgressorPosition() #### Clean Up #### swmWriter.close() del gaTable #### Report if Any Features Have No Neighbors #### swmWriter.reportNoNeighbors() #### Report Spatial Weights Summary #### swmWriter.report() #### Report SWM File is Large #### swmWriter.reportLargeSWM()
def spaceTime2SWM(inputFC, swmFile, masterField, concept = "EUCLIDEAN", threshold = None, rowStandard = True, timeField = None, timeType = None, timeValue = None): """ inputFC (str): path to the input feature class swmFile (str): path to the SWM file. masterField (str): field in table that serves as the mapping. concept: {str, EUCLIDEAN}: EUCLIDEAN or MANHATTAN threshold {float, None}: distance threshold rowStandard {bool, True}: row standardize weights? timeField {str, None}: name of the date-time field timeType {str, None}: ESRI enumeration of date-time intervals timeValue {float, None}: value forward and backward in time """ #### Assure Temporal Parameters are Set #### if not timeField: ARCPY.AddIDMessage("ERROR", 1320) raise SystemExit() if not timeType: ARCPY.AddIDMessage("ERROR", 1321) raise SystemExit() if not timeValue or timeValue <= 0: ARCPY.AddIDMessage("ERROR", 1322) raise SystemExit() #### Create SSDataObject #### ssdo = SSDO.SSDataObject(inputFC, templateFC = inputFC, useChordal = True) cnt = UTILS.getCount(inputFC) ERROR.errorNumberOfObs(cnt, minNumObs = 2) ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84001), 0, cnt, 1) #### Validation of Master Field #### verifyMaster = ERROR.checkField(ssdo.allFields, masterField, types = [0,1]) badIDs = [] #### Create Temporal Hash #### timeInfo = {} xyCoords = NUM.empty((cnt, 2), float) #### Process Field Values #### fieldList = [masterField, "SHAPE@XY", timeField] try: rows = DA.SearchCursor(ssdo.catPath, fieldList, "", ssdo.spatialRefString) except: ARCPY.AddIDMessage("ERROR", 204) raise SystemExit() #### Add Data to GATable and Time Dictionary #### c = 0 for row in rows: badRow = False #### Assure Masterfield is Valid #### masterID = row[0] if masterID == None or masterID == "": badRow = True #### Assure Date/Time is Valid #### timeStamp = row[-1] if timeStamp == None or timeStamp == "": badRow = True #### Assure Centroid is Valid #### badXY = row[1].count(None) if not badXY: x,y = row[1] xyCoords[c] = (x,y) else: badRow = True #### Process Data #### if not badRow: if timeInfo.has_key(masterID): #### Assure Uniqueness #### ARCPY.AddIDMessage("Error", 644, masterField) ARCPY.AddIDMessage("Error", 643) raise SystemExit() else: #### Fill Date/Time Dict #### startDT, endDT = TUTILS.calculateTimeWindow(timeStamp, timeValue, timeType) timeInfo[masterID] = (timeStamp, startDT, endDT) else: badIDs.append(masterID) #### Set Progress #### c += 1 ARCPY.SetProgressorPosition() #### Clean Up #### del rows #### Get Set of Bad IDs #### numBadObs = len(badIDs) badIDs = list(set(badIDs)) badIDs.sort() badIDs = [ str(i) for i in badIDs ] #### Process any bad records encountered #### if numBadObs: ERROR.reportBadRecords(cnt, numBadObs, badIDs, label = masterField) #### Load Neighbor Table #### gaTable, gaInfo = WU.gaTable(ssdo.inputFC, fieldNames = [masterField, timeField], spatRef = ssdo.spatialRefString) numObs = len(gaTable) xyCoords = xyCoords[0:numObs] #### Set the Distance Threshold #### concept, gaConcept = WU.validateDistanceMethod(concept, ssdo.spatialRef) if threshold == None: #### Set Progressor for Search #### ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84144)) #### Create k-Nearest Neighbor Search Type #### gaSearch = GAPY.ga_nsearch(gaTable) gaSearch.init_nearest(0.0, 1, gaConcept) neighDist = ARC._ss.NeighborDistances(gaTable, gaSearch) N = len(neighDist) threshold = 0.0 sumDist = 0.0 #### Find Maximum Nearest Neighbor Distance #### for row in xrange(N): dij = neighDist[row][-1][0] if dij > threshold: threshold = dij sumDist += dij ARCPY.SetProgressorPosition() #### Increase For Rounding Error #### threshold = threshold * 1.0001 avgDist = sumDist / (N * 1.0) #### Add Linear/Angular Units #### thresholdStr = ssdo.distanceInfo.printDistance(threshold) ARCPY.AddIDMessage("Warning", 853, thresholdStr) #### Chordal Default Check #### if ssdo.useChordal: hardMaxExtent = ARC._ss.get_max_gcs_distance(ssdo.spatialRef) if threshold > hardMaxExtent: ARCPY.AddIDMessage("ERROR", 1609) raise SystemExit() #### Clean Up #### del gaSearch #### Create Missing SSDO Info #### extent = UTILS.resetExtent(xyCoords) #### Reset Coordinates for Chordal #### if ssdo.useChordal: sliceInfo = UTILS.SpheroidSlice(extent, ssdo.spatialRef) maxExtent = sliceInfo.maxExtent else: env = UTILS.Envelope(extent) maxExtent = env.maxExtent threshold = checkDistanceThresholdSWM(ssdo, threshold, maxExtent) #### Set Default Progressor for Neigborhood Structure #### ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143)) #### Create Distance Neighbor Search Type #### gaSearch = GAPY.ga_nsearch(gaTable) gaSearch.init_nearest(threshold, 0, gaConcept) neighSearch = ARC._ss.NeighborSearch(gaTable, gaSearch) #### Set Progressor for Weights Writing #### ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84127), 0, numObs, 1) #### Initialize Spatial Weights Matrix File #### swmWriter = WU.SWMWriter(swmFile, masterField, ssdo.spatialRefName, numObs, rowStandard, inputFC = inputFC, wType = 9, distanceMethod = concept, threshold = threshold, timeField = timeField, timeType = timeType, timeValue = timeValue) for row in xrange(numObs): masterID = gaTable[row][2] #### Get Date/Time Info #### dt0, startDT0, endDT0 = timeInfo[masterID] nhs = neighSearch[row] neighs = [] weights = [] for nh in nhs: #### Search Through Spatial Neighbors #### neighID = gaTable[nh][2] #### Get Date/Time Info #### dt1, startDT1, endDT1 = timeInfo[neighID] #### Filter Based on Date/Time #### insideTimeWindow = TUTILS.isTimeNeighbor(startDT0, endDT0, dt1) if insideTimeWindow: neighs.append(neighID) weights.append(1.0) #### Add Spatial Weights Matrix Entry #### swmWriter.swm.writeEntry(masterID, neighs, weights) #### Set Progress #### ARCPY.SetProgressorPosition() swmWriter.close() del gaTable #### Report Warning/Max Neighbors #### swmWriter.reportNeighInfo() #### Report Spatial Weights Summary #### swmWriter.report() #### Report SWM File is Large #### swmWriter.reportLargeSWM()
def polygon2SWM(inputFC, swmFile, masterField, concept = "EUCLIDEAN", kNeighs = 0, rowStandard = True, contiguityType = "ROOK"): """Creates a sparse spatial weights matrix (SWM) based on polygon contiguity. INPUTS: inputFC (str): path to the input feature class swmFile (str): path to the SWM file. masterField (str): field in table that serves as the mapping. concept: {str, EUCLIDEAN}: EUCLIDEAN or MANHATTAN kNeighs {int, 0}: number of neighbors to return (1) rowStandard {bool, True}: row standardize weights? contiguityType {str, Rook}: {Rook = Edges Only, Queen = Edges/Vertices} NOTES: (1) kNeighs is used if polygon is not contiguous. E.g. Islands """ #### Set Default Progressor for Neigborhood Structure #### ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143)) #### Create SSDataObject #### ssdo = SSDO.SSDataObject(inputFC, templateFC = inputFC, useChordal = True) cnt = UTILS.getCount(inputFC) ERROR.errorNumberOfObs(cnt, minNumObs = 2) #### Validation of Master Field #### verifyMaster = ERROR.checkField(ssdo.allFields, masterField, types = [0,1]) #### Create GA Data Structure #### gaTable, gaInfo = WU.gaTable(ssdo.catPath, [masterField], spatRef = ssdo.spatialRefString) #### Assure Enough Observations #### N = gaInfo[0] ERROR.errorNumberOfObs(N, minNumObs = 2) #### Assure k-Nearest is Less Than Number of Features #### if kNeighs >= N: ARCPY.AddIDMessage("ERROR", 975) raise SystemExit() #### Create Nearest Neighbor Search Type For Islands #### if kNeighs > 0: gaSearch = GAPY.ga_nsearch(gaTable) concept, gaConcept = WU.validateDistanceMethod(concept, ssdo.spatialRef) gaSearch.init_nearest(0.0, kNeighs, gaConcept) forceNeighbor = True neighWeights = ARC._ss.NeighborWeights(gaTable, gaSearch, weight_type = 1, row_standard = False) else: forceNeighbor = False neighSearch = None #### Create Polygon Neighbors #### polyNeighborDict = WU.polygonNeighborDict(inputFC, masterField, contiguityType = contiguityType) #### Write Poly Neighbor List (Dict) #### #### Set Progressor for SWM Writing #### ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84127), 0, N, 1) #### Initialize Spatial Weights Matrix File #### if contiguityType == "ROOK": wType = 4 else: wType = 5 swmWriter = WU.SWMWriter(swmFile, masterField, ssdo.spatialRefName, N, rowStandard, inputFC = inputFC, wType = wType, distanceMethod = concept, numNeighs = kNeighs) #### Keep Track of Polygons w/o Neighbors #### islandPolys = [] #### Write Polygon Contiguity to SWM File #### for row in xrange(N): rowInfo = gaTable[row] oid = rowInfo[0] masterID = rowInfo[2] neighs = polyNeighborDict[masterID] nn = len(neighs) if forceNeighbor: if nn < kNeighs: #### Only Force KNN If Specified & Contiguity is Less #### islandPolys.append(oid) flag = True knnNeighs, knnWeights = neighWeights[row] c = 0 while flag: try: neighID = gaTable[knnNeighs[c]][2] if neighID not in neighs: neighs.append(neighID) nn += 1 if nn == kNeighs: flag = False c += 1 except: flag = False weights = NUM.ones(nn) #### Add Weights Entry #### swmWriter.swm.writeEntry(masterID, neighs, weights) #### Set Progress #### ARCPY.SetProgressorPosition() #### Report on Features with No Neighbors #### countIslands = len(islandPolys) if countIslands: islandPolys.sort() if countIslands > 30: islandPolys = islandPolys[0:30] ERROR.warningNoNeighbors(N, countIslands, islandPolys, ssdo.oidName, forceNeighbor = forceNeighbor, contiguity = True) #### Clean Up #### swmWriter.close() del gaTable #### Report Spatial Weights Summary #### swmWriter.report() #### Report SWM File is Large #### swmWriter.reportLargeSWM() del polyNeighborDict
def construct(self): """Constructs the neighborhood structure for each feature and dispatches the appropriate values for the calculation of the statistic.""" #### Shorthand Attributes #### ssdo = self.ssdo varName = self.varName concept = self.concept gaConcept = concept.lower() threshold = self.threshold exponent = self.exponent wType = self.wType rowStandard = self.rowStandard numObs = self.numObs master2Order = self.master2Order masterField = ssdo.masterField weightsFile = self.weightsFile #### Assure that Variance is Larger than Zero #### yVar = NUM.var(self.y) if NUM.isnan(yVar) or yVar <= 0.0: ARCPY.AddIDMessage("Error", 906) raise SystemExit() #### Create Deviation Variables #### self.yBar = NUM.mean(self.y) self.yDev = self.y - self.yBar self.nm1 = numObs - 1. self.nm2 = numObs - 2. self.nm12 = self.nm1 * self.nm2 yDev2 = self.yDev**2.0 yDev2Norm = yDev2 / self.nm1 self.yDev2NormSum = sum(yDev2Norm) yDev4 = self.yDev**4.0 yDev4Norm = yDev4 / self.nm1 yDev4NormSum = sum(yDev4Norm) self.b2i = yDev4NormSum / (self.yDev2NormSum**2.0) #### Create Base Data Structures/Variables #### self.li = NUM.zeros(numObs) self.ei = NUM.zeros(numObs) self.vi = NUM.zeros(numObs) self.zi = NUM.zeros(numObs) self.pVals = NUM.ones(numObs) if self.permutations: self.pseudoPVals = NUM.ones(numObs) self.moranInfo = {} #### Keep Track of Features with No Neighbors #### self.idsNoNeighs = [] #### Set Neighborhood Structure Type #### if self.weightsFile: if self.swmFileBool: #### Open Spatial Weights and Obtain Chars #### swm = WU.SWMReader(weightsFile) N = swm.numObs rowStandard = swm.rowStandard self.swm = swm #### Check to Assure Complete Set of Weights #### if numObs > N: ARCPY.AddIDMessage("Error", 842, numObs, N) raise SystemExit() #### Check if Selection Set #### isSubSet = False if numObs < N: isSubSet = True iterVals = xrange(N) else: #### Warning for GWT with Bad Records/Selection #### if ssdo.selectionSet or ssdo.badRecords: ARCPY.AddIDMessage("WARNING", 1029) #### Build Weights Dictionary #### weightDict = WU.buildTextWeightDict(weightsFile, master2Order) iterVals = master2Order.keys() N = numObs elif wType in [4, 5]: #### Polygon Contiguity #### if wType == 4: contiguityType = "ROOK" else: contiguityType = "QUEEN" contDict = WU.polygonNeighborDict(ssdo.inputFC, ssdo.oidName, contiguityType = contiguityType) iterVals = master2Order.keys() N = numObs else: gaTable = ssdo.gaTable gaSearch = GAPY.ga_nsearch(gaTable) if wType == 7: #### Zone of Indiff, All Related to All #### gaSearch.init_nearest(threshold, numObs, gaConcept) else: #### Inverse and Fixed Distances #### gaSearch.init_nearest(threshold, 0, gaConcept) iterVals = range(numObs) N = numObs neighWeights = ARC._ss.NeighborWeights(gaTable, gaSearch, weight_type = wType, exponent = exponent, row_standard = rowStandard) #### Create Progressor #### msg = ARCPY.GetIDMessage(84007) if self.permutations: msg += ": Using Permutations = %i" % self.permutations ARCPY.SetProgressor("step", msg , 0, N, 1) #### Create Neighbor Info Class #### ni = WU.NeighborInfo(masterField) #### Calculation For Each Feature #### for i in iterVals: if self.swmFileBool: #### Using SWM File #### info = swm.swm.readEntry() masterID = info[0] if master2Order.has_key(masterID): rowInfo = WU.getWeightsValuesSWM(info, master2Order, self.yDev, rowStandard = rowStandard, isSubSet = isSubSet) includeIt = True else: includeIt = False elif self.weightsFile and not self.swmFileBool: #### Text Weights #### masterID = i includeIt = True rowInfo = WU.getWeightsValuesText(masterID, master2Order, weightDict, self.yDev) elif wType in [4, 5]: #### Polygon Contiguity #### masterID = i includeIt = True rowInfo = WU.getWeightsValuesCont(masterID, master2Order, contDict, self.yDev, rowStandard = rowStandard) else: #### Distance Based #### masterID = gaTable[i][0] includeIt = True rowInfo = WU.getWeightsValuesOTF(neighWeights, i, self.yDev) #### Subset Boolean for SWM File #### if includeIt: #### Parse Row Info #### orderID, yiDev, nhIDs, nhVals, weights = rowInfo #### Assure Neighbors Exist After Selection #### nn, nhIDs, nhVals, weights = ni.processInfo(masterID, nhIDs, nhVals, weights) if nn: #### Calculate Local I #### self.calculateLI(orderID, yiDev, nhVals, weights) ARCPY.SetProgressorPosition() #### Clean Up #### if self.swmFileBool: swm.close() #### Report on Features with No Neighbors #### ni.reportNoNeighbors() self.setNullValues(ni.idsNoNeighs) #### Report on Features with Large Number of Neighbors #### ni.reportWarnings() ni.reportMaximums() self.neighInfo = ni #### Set p-values for Gi Bins #### if self.permutations: #### Use Pseudo p-values #### pv = self.pseudoPVals else: #### Use Traditional p-values #### pv = self.pVals #### Calculate FDR and Moran Bins #### toolMSG = ARCPY.GetIDMessage(84474) if self.applyFDR: #### Set Bins Using FDR #### msg = ARCPY.GetIDMessage(84472).format(toolMSG) ARCPY.SetProgressor("default", msg) fdrBins = STATS.fdrTransform(pv, self.li) self.moranBins = STATS.moranBinFromPVals(pv, self.moranInfo, fdrBins = fdrBins) else: msg = ARCPY.GetIDMessage(84473).format(toolMSG) ARCPY.SetProgressor("default", msg) self.moranBins = STATS.moranBinFromPVals(pv, self.moranInfo)
def construct(self): """Constructs the neighborhood structure for each feature and dispatches the appropriate values for the calculation of the statistic.""" #### Shorthand Attributes #### ssdo = self.ssdo masterField = ssdo.masterField numObs = len(self.y) master2Order = self.ssdo.master2Order yVar = NUM.var(self.y) if NUM.isnan(yVar) or yVar <= 0.0: ARCPY.AddIDMessage("Error", 906) raise SystemExit() #### Create Deviation Variables #### self.yBar = NUM.mean(self.y) self.yDev = self.y - self.yBar #### Create Base Data Structures/Variables #### self.numer = 0.0 self.denom = NUM.sum(self.yDev**2.0) self.rowSum = NUM.zeros(numObs) self.colSum = NUM.zeros(numObs) self.s0 = 0 self.s1 = 0 self.wij = {} #### Open Spatial Weights and Obtain Chars #### if self.weightsType == "SWM": swm = WU.SWMReader(self.weightsMatrix) N = swm.numObs rowStandard = swm.rowStandard #### Check to Assure Complete Set of Weights #### if numObs > N: ARCPY.AddIDMessage("Error", 842, numObs, N) raise SystemExit() #### Check if Selection Set #### isSubSet = False if numObs < N: isSubSet = True iterVals = xrange(N) elif self.weightsType == "GWT": #### Warning for GWT with Bad Records/Selection #### if ssdo.selectionSet or ssdo.badRecords: ARCPY.AddIDMessage("WARNING", 1029) #### Build Weights Dictionary #### iterVals = master2Order.keys() N = numObs else: #### Use GA Table, 8 Nearest Neighbors #### iterVals = range(numObs) N = numObs neighWeights = ARC._ss.NeighborWeights(ssdo.gaTable, self.weightsMatrix) #### Create Neighbor Info Class #### ni = WU.NeighborInfo(masterField, silent = self.silent) #### Calculation For Each Feature #### for i in iterVals: if self.weightsType == "SWM": info = swm.swm.readEntry() masterID = info[0] if master2Order.has_key(masterID): rowInfo = WU.getWeightsValuesSWM(info, master2Order, self.yDev, rowStandard = rowStandard, isSubSet = isSubSet) includeIt = True else: includeIt = False elif self.weightsType == "GWT": #### Text Weights #### masterID = i includeIt = True rowInfo = WU.getWeightsValuesText(masterID, master2Order, self.weightsMatrix, self.yDev) else: #### Distance Based #### masterID = ssdo.gaTable[i][0] includeIt = True rowInfo = WU.getWeightsValuesOTF(neighWeights, i, self.yDev) #### Subset Boolean for SWM File #### if includeIt: #### Parse Row Info #### orderID, yiDev, nhIDs, nhVals, weights = rowInfo #### Assure Neighbors Exist After Selection #### nn, nhIDs, nhVals, weights = ni.processInfo(masterID, nhIDs, nhVals, weights) if nn: #### Process Feature Contribution to Moran's I #### self.processRow(orderID, yiDev, nhIDs, nhVals, weights) #### Clean Up #### if self.weightsType == "SWM": swm.close() if not self.silent: #### Report on Features with No Neighbors #### ni.reportNoNeighbors() #### Report on Features with Large Number of Neighbors #### ni.reportWarnings() ni.reportMaximums() self.neighInfo = ni
def stCollectByKNN(ssdo, timeField, outputFC, inSpan, inDistance): """ This method applied Jacquez Space-Time K-NN to convert event data into weighted point data by dissolving all coincident points in space and time into unique points with a new count field that contains the number of original features at that location and time span. INPUTS: ssdo (obj): SSDataObject from input timeField (str): Date/Time field name in input feature outputFC (str): path to the output feature class inSpan (int): value of temporal units within the same time bin inDistance (int): value of spatial units considered as spatial neighbors OUTPUTS: Create new collected point feature """ #### Read raw time data #### timeData = ssdo.fields[timeField].data #### Convert temporal unit #### time = NUM.array(timeData, dtype='datetime64[s]').astype('datetime64[D]') #### Find Start Time #### startTime = time.min() #### Create Bin for Space and Time #### timeBin = (time - startTime) / inSpan numObs = ssdo.numObs #### Create Sudo-fid to Find K-NN in Space and Time fid = [i for i in xrange(numObs)] #### Validate Output Workspace #### ERROR.checkOutputPath(outputFC) #### True Centroid Warning For Non-Point FCs #### if ssdo.shapeType.upper() != "POINT": ARCPY / AddIDMessage("WARNING", 1021) #### Create GA Data Structure #### gaTable, gaInfo = WU.gaTable(ssdo.inputFC, spatRef=ssdo.spatialRefString) #### Assure Enough Observations #### cnt = UTILS.getCount(ssdo.inputFC) ERROR.errorNumberOfObs(cnt, minNumObs=4) N = gaInfo[0] ERROR.errorNumberOfObs(N, minNumObs=4) #### Process Any Bad Records Encountered #### numBadRecs = cnt - N if numBadRecs: badRecs = WU.parseGAWarnings(gaTable.warnings) if not ssdo.silentWarnings: ERROR.reportBadRecords(cnt, numBadRecs, badRecs, label=ssdo.oidName) #### Create Output Feature Class #### outPath, outName = OS.path.split(outputFC) try: DM.CreateFeatureclass(outPath, outName, "POINT", "", ssdo.mFlag, ssdo.zFlag, ssdo.spatialRefString) except: ARCPY.AddIDMessage("ERROR", 210, outputFC) raise SystemExit() #### Create k-Nearest Neighbor Search Type #### gaSearch = GAPY.ga_nsearch(gaTable) gaSearch.init_nearest(inDistance, 0, "euclidean") #### Add Count Field #### countFieldNameOut = ARCPY.ValidateFieldName(countFieldName, outPath) timeFieldNameOut = ARCPY.ValidateFieldName(timeFieldName, outPath) UTILS.addEmptyField(outputFC, countFieldNameOut, "LONG") UTILS.addEmptyField(outputFC, timeFieldNameOut, "DATE") fieldList = ["SHAPE@", countFieldNameOut, timeFieldNameOut] #### Set Insert Cursor #### rowsOut = DA.InsertCursor(outputFC, fieldList) #### Detect S-T K-NN by Space and Time Bin #### duplicateList = [] for record in fid: kNNList = [record] if record not in duplicateList: for pair in fid: if pair != record: gaSearch.search_by_idx(record) for nh in gaSearch: if timeBin[record] == timeBin[pair]: kNNList.append(nh.idx) duplicateList.append(nh.idx) #### Create and Populate New Feature #### kNNList = list(set(kNNList)) count = len(kNNList) dt = time[record] x0 = ssdo.xyCoords[kNNList, 0].mean() y0 = ssdo.xyCoords[kNNList, 1].mean() pnt = (x0, y0, ssdo.defaultZ) rowResult = [pnt, count, dt] rowsOut.insertRow(rowResult) ARCPY.SetProgressorPosition() #### Clean Up #### del rowsOut, timeBin, kNNList, duplicateList return countFieldNameOut
def obtainDataGA(self, masterField, fields=[], types=[0, 1, 2, 3, 5, 6], minNumObs=0, warnNumObs=0): """Takes a list of field names and returns it in a dictionary structure. INPUTS: masterField (str): name of field being used as the master fields {list, []}: name(s) of the field to be returned types (list): types of data allowed to be returned (1) minNumObs {int, 0}: minimum number of observations for error warnNumObs {int, 0}: minimum number of observations for warning ATTRIBUTES: gaTable (structure): instance of the GA Table fields (dict): fieldName = instance of FCField master2Order (dict): masterID = order in lists order2Master (dict): order in lists = masterID masterField (str): field that serves as the master badRecords (list): master IDs that could not be read xyCoords (array, nunObs x 2): xy-coordinates for feature centroids NOTES: (1) No Text Fields; short [0], long [1], float [2], double[3] """ #### Validation of Master Field #### verifyMaster = ERROR.checkField(self.allFields, masterField, types=[0, 1, 5]) #### Set MasterIsOID Boolean #### self.masterIsOID = masterField == self.oidName #### Set Master and Data Indices #### if self.masterIsOID: self.masterColumnIndex = 0 self.dataColumnIndex = 2 fieldList = [] else: self.masterColumnIndex = 2 self.dataColumnIndex = 3 fieldList = [masterField] #### Validation and Initialization of Data Fields #### numFields = len(fields) for field in fields: fType = ERROR.checkField(self.allFields, field, types=types) fieldList.append(field) self.fields[field] = self.allFields[field] #### ZCoords Are Last #### getZBool = self.hasZ and (not self.renderType) if getZBool: fieldList.append("SHAPE&Z") #### Create GA Data Structure #### cnt = UTILS.getCount(self.inputFC) fieldList = tuple(fieldList) gaTable, gaInfo = WU.gaTable(self.inputFC, fieldNames=fieldList, spatRef=self.spatialRefString) #### Check Whether the Number of Features is Appropriate #### numObs = gaInfo[0] ERROR.checkNumberOfObs(numObs, minNumObs=minNumObs, warnNumObs=warnNumObs, silentWarnings=self.silentWarnings) #### Process any bad records encountered #### numBadIDs = cnt - numObs if numBadIDs: badIDs = WU.parseGAWarnings(gaTable.warnings) if not self.silentWarnings: ERROR.reportBadRecords(cnt, numBadIDs, badIDs, label=self.oidName) else: badIDs = [] #### Initialization of Centroids #### xyCoords = NUM.empty((numObs, 2), float) #### Z Coords #### if self.hasZ: zCoords = NUM.empty((numObs, ), float) #### Create Empty Data Arrays #### for fieldName, fieldObj in self.fields.iteritems(): fieldObj.createDataArray(numObs) #### Populate SSDataObject #### ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84001), 0, numObs, 1) for row in xrange(numObs): rowInfo = gaTable[row] x, y = rowInfo[1] masterID = int(rowInfo[self.masterColumnIndex]) if self.master2Order.has_key(masterID): ARCPY.AddIDMessage("ERROR", 644, masterField) ARCPY.AddIDMessage("ERROR", 643) raise SystemExit() else: self.master2Order[masterID] = row self.order2Master[row] = masterID xyCoords[row] = (x, y) if numFields: restFields = rowInfo[self.dataColumnIndex:] for fieldInd, fieldName in enumerate(fields): self.fields[fieldName].data[row] = restFields[fieldInd] if self.hasZ: if getZBool: zCoords[row] = rowInfo[-1] else: zCoords[row] = NUM.nan ARCPY.SetProgressorPosition() #### Set the Hidden Fields (E.g. Not in Use) #### self.setHiddenFields() #### Reset Extent to Honor Env and Subsets #### try: self.extent = UTILS.resetExtent(xyCoords) except: pass #### Reset Coordinates for Chordal #### if self.useChordal: #### Project to XY on Spheroid #### self.spheroidCoords = ARC._ss.lonlat_to_xy(xyCoords, self.spatialRef) self.sliceInfo = UTILS.SpheroidSlice(self.extent, self.spatialRef) else: self.spheroidCoords = None self.sliceInfo = None #### Set Further Attributes #### self.badRecords = badIDs self.xyCoords = xyCoords self.masterField = masterField self.gaTable = gaTable self.numObs = numObs if self.hasZ: self.zCoords = zCoords else: self.zCoords = None
def calculateDistanceBand(inputFC, kNeighs, concept="EUCLIDEAN"): """Provides the minimum, maximum and average distance from a set of features based on a given neighbor count. INPUTS: inputFC (str): path to the input feature class kNeighs (int): number of neighbors to return concept {str, EUCLIDEAN}: EUCLIDEAN or MANHATTAN distance """ #### Assure that kNeighs is Non-Zero #### if kNeighs <= 0: ARCPY.AddIDMessage("ERROR", 976) raise SystemExit() #### Set Default Progressor for Neigborhood Structure #### ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143)) #### Create SSDataObject #### ssdo = SSDO.SSDataObject(inputFC, useChordal=True) cnt = UTILS.getCount(inputFC) ERROR.errorNumberOfObs(cnt, minNumObs=2) #### Create GA Data Structure #### gaTable, gaInfo = WU.gaTable(inputFC, spatRef=ssdo.spatialRefString) #### Assure Enough Observations #### N = gaInfo[0] ERROR.errorNumberOfObs(N, minNumObs=2) #### Process Any Bad Records Encountered #### numBadRecs = cnt - N if numBadRecs: badRecs = WU.parseGAWarnings(gaTable.warnings) err = ERROR.reportBadRecords(cnt, numBadRecs, badRecs, label=ssdo.oidName) #### Assure k-Nearest is Less Than Number of Features #### if kNeighs >= N: ARCPY.AddIDMessage("ERROR", 975) raise SystemExit() #### Create k-Nearest Neighbor Search Type #### gaSearch = GAPY.ga_nsearch(gaTable) gaConcept = concept.lower() gaSearch.init_nearest(0.0, kNeighs, gaConcept) neighDist = ARC._ss.NeighborDistances(gaTable, gaSearch) #### Set Progressor for Weights Writing #### ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84007), 0, N, 1) distances = NUM.empty((N, ), float) for row in xrange(N): distances[row] = neighDist[row][-1].max() ARCPY.SetProgressorPosition() #### Calculate and Report #### minDist = distances.min() avgDist = distances.mean() maxDist = distances.max() if ssdo.useChordal: hardMaxExtent = ARC._ss.get_max_gcs_distance(ssdo.spatialRef) if maxDist > hardMaxExtent: ARCPY.AddIDMessage("ERROR", 1609) raise SystemExit() minDistOut = LOCALE.format("%0.6f", minDist) avgDistOut = LOCALE.format("%0.6f", avgDist) maxDistOut = LOCALE.format("%0.6f", maxDist) #### Create Output Text Table #### header = ARCPY.GetIDMessage(84171) row1 = [ARCPY.GetIDMessage(84165).format(kNeighs), minDistOut] row2 = [ARCPY.GetIDMessage(84166).format(kNeighs), avgDistOut] row3 = [ARCPY.GetIDMessage(84167).format(kNeighs), maxDistOut] total = [row1, row2, row3] tableOut = UTILS.outputTextTable(total, header=header, pad=1) #### Add Linear/Angular Unit #### distanceOut = ssdo.distanceInfo.outputString distanceMeasuredStr = ARCPY.GetIDMessage(84344).format(distanceOut) tableOut += "\n%s\n" % distanceMeasuredStr #### Report Text Output #### ARCPY.AddMessage(tableOut) #### Set Derived Output #### ARCPY.SetParameterAsText(3, minDist) ARCPY.SetParameterAsText(4, avgDist) ARCPY.SetParameterAsText(5, maxDist) #### Clean Up #### del gaTable
def distance2SWM(inputFC, swmFile, masterField, fixed = 0, concept = "EUCLIDEAN", exponent = 1.0, threshold = None, kNeighs = 1, rowStandard = True): """Creates a sparse spatial weights matrix (SWM) based on k-nearest neighbors. INPUTS: inputFC (str): path to the input feature class swmFile (str): path to the SWM file. masterField (str): field in table that serves as the mapping. fixed (boolean): fixed (1) or inverse (0) distance? concept: {str, EUCLIDEAN}: EUCLIDEAN or MANHATTAN exponent {float, 1.0}: distance decay threshold {float, None}: distance threshold kNeighs (int): number of neighbors to return rowStandard {bool, True}: row standardize weights? """ #### Create SSDataObject #### ssdo = SSDO.SSDataObject(inputFC, templateFC = inputFC, useChordal = True) #### Validation of Master Field #### verifyMaster = ERROR.checkField(ssdo.allFields, masterField, types = [0,1]) #### Read Data #### ssdo.obtainDataGA(masterField, minNumObs = 2) N = ssdo.numObs gaTable = ssdo.gaTable if fixed: wType = 1 else: wType = 0 #### Set Default Progressor for Neigborhood Structure #### ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143)) #### Set the Distance Threshold #### concept, gaConcept = WU.validateDistanceMethod(concept, ssdo.spatialRef) if threshold == None: threshold, avgDist = WU.createThresholdDist(ssdo, concept = concept) #### Assures that the Threshold is Appropriate #### gaExtent = UTILS.get92Extent(ssdo.extent) threshold, maxSet = WU.checkDistanceThreshold(ssdo, threshold, weightType = wType) #### If the Threshold is Set to the Max #### #### Set to Zero for Script Logic #### if maxSet: #### All Locations are Related #### threshold = SYS.maxint if N > 500: ARCPY.AddIDMessage("Warning", 717) #### Assure k-Nearest is Less Than Number of Features #### if kNeighs >= N and fixed: ARCPY.AddIDMessage("ERROR", 975) raise SystemExit() #### Create Distance/k-Nearest Neighbor Search Type #### gaSearch = GAPY.ga_nsearch(gaTable) gaSearch.init_nearest(threshold, kNeighs, gaConcept) neighWeights = ARC._ss.NeighborWeights(gaTable, gaSearch, weight_type = wType, exponent = exponent, row_standard = False) #### Set Progressor for Weights Writing #### ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84127), 0, N, 1) #### Initialize Spatial Weights Matrix File #### swmWriter = WU.SWMWriter(swmFile, masterField, ssdo.spatialRefName, N, rowStandard, inputFC = inputFC, wType = wType, distanceMethod = concept, exponent = exponent, threshold = threshold) #### Unique Master ID Dictionary #### masterDict = {} #### Unique Master ID Dictionary #### masterSet = set([]) for row in xrange(N): masterID = int(gaTable[row][2]) if masterID in masterSet: ARCPY.AddIDMessage("Error", 644, masterField) ARCPY.AddIDMessage("Error", 643) raise SystemExit() else: masterSet.add(masterID) neighs, weights = neighWeights[row] neighs = [ gaTable[nh][2] for nh in neighs ] #### Add Spatial Weights Matrix Entry #### swmWriter.swm.writeEntry(masterID, neighs, weights) #### Set Progress #### ARCPY.SetProgressorPosition() swmWriter.close() del gaTable #### Report Warning/Max Neighbors #### swmWriter.reportNeighInfo() #### Add Linear/Angular Unit (Distance Based Only) #### distanceOut = ssdo.distanceInfo.outputString distanceOut = [ARCPY.GetIDMessage(84344).format(distanceOut)] #### Report Spatial Weights Summary #### swmWriter.report(additionalInfo = distanceOut) #### Report SWM File is Large #### swmWriter.reportLargeSWM()
def table2SWM(inputFC, masterField, swmFile, tableFile, rowStandard = True): """Converts a weigths matrix in table format into SWM format. INPUTS: inputFC (str): path to the input feature class masterField (str): field in table that serves as the mapping. swmFile (str): path to the SWM file. tableFile (str) path to the database table rowStandard {bool, True}: row standardize weights? """ #### Set Default Progressor for Neigborhood Structure #### ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84123)) #### Create SSDataObject #### ssdo = SSDO.SSDataObject(inputFC, templateFC = inputFC) #### Obtain Unique IDs from Input Feature Class #### ssdo.obtainData(masterField, minNumObs = 2) ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84123)) master2Order = ssdo.master2Order allMaster = master2Order.keys() n = ssdo.numObs #### Create Search Cursor for Input Weights Table #### neighFieldName = "NID" weightFieldName = "WEIGHT" fieldList = [masterField, neighFieldName, weightFieldName] try: rows = DA.SearchCursor(tableFile, fieldList) except: ARCPY.AddIDMessage("Error", 722) raise SystemExit() #### Initialize Spatial Weights Matrix File #### swmWriter = WU.SWMWriter(swmFile, masterField, ssdo.spatialRefName, n, rowStandard, inputFC = inputFC, wType = 8, inputTable = tableFile) #### Set Progressor for SWM Reading/Writing #### c = 0 cnt = UTILS.getCount(tableFile) ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84123), 0, cnt, 1) lastID = "NULL" neighs = [] weights = [] #### Process Spatial Weights #### for row in rows: masterID = row[0] if master2Order.has_key(masterID): neighID = row[1] weight = row[2] if masterID == lastID: #### Append to Current Record #### try: testNeigh = master2Order[neighID] neighs.append(neighID) weights.append(weight) except: #### NID Does Not Exist / Not In Selection #### pass #### Set Progress #### ARCPY.SetProgressorPosition() else: #### Create New Record if not NULL #### if lastID != "NULL": allMaster.remove(lastID) swmWriter.swm.writeEntry(lastID, neighs, weights) #### Reset and Initialize Containers #### neighs = [neighID] weights = [weight] else: #### Create First Record #### try: testNeigh = master2Order[neighID] neighs.append(neighID) weights.append(weight) except: #### NID Does Not Exist / Not In Selection #### pass lastID = masterID #### Set Progress #### ARCPY.SetProgressorPosition() else: #### Unique Id Does Not Exist / Not In Selection #### ARCPY.SetProgressorPosition() #### Write Last Record #### swmWriter.swm.writeEntry(lastID, neighs, weights) try: allMaster.remove(lastID) except: pass #### Set Progress #### ARCPY.SetProgressorPosition() #### Write No Neighbor Features #### for masterID in allMaster: swmWriter.swm.writeEntry(masterID, [], []) #### Report Warning/Max Neighbors #### swmWriter.reportNeighInfo() #### Report Spatial Weights Summary #### swmWriter.report() #### Report SWM File is Large #### swmWriter.reportLargeSWM() #### Clean Up #### swmWriter.close() del rows
def network2SWM(inputFC, masterField, swmFile, inputNetwork, impedance, cutoff = "#", numberOfNeighs = "#", inputBarrier = "#", uturnPolicy = "ALLOW_UTURNS", restrictions = "#", hierarchy = 'NO_HIERARCHY', searchTolerance = "#", fixed = 0, exponent = 1.0, rowStandard = True): """Creates spatial weights in SWM format from a combination of network data and feature classes. INPUTS: inputFC (str): path to the input feature class masterField (str): field in table that serves as the mapping swmFile (str): path to the SWM file inputNetwork (str): path to the network dataset (*.nd) impedance (str): attribute from network dataset (1) cutoff {float, "#"}: impedance threshold numberOfNeighs {int, "#"}: number of neighbors to return inputBarrier {str, "#"}: path to the input barrier feature class uturnPolicy {str, ALLOW_UTURNS}: uturn policy (2) restrictions {str, "#"}: attribute from network dataset (3) hierarchy {str, NO_HIERARCHY}: NO_HIERARCHY or USE_HIERARCHY searchTolerance {linear measure, "#"}: snap tolerance for network (4) fixed {int, 0}: Invert impedance as weight or return a weight = 1? exponent {float, 1.0}: distance decay rowStandard {bool, True}: row standardize weights? NOTES: (1) E.g. MINUTES and METERS (2) E.g. ALLOW_UTURNS or NO_UTURNS (3) E.g. ONEWAY (4) E.g. 5000 METERS """ #### Check out Network Analyst #### try: ARCPY.CheckOutExtension("Network") except: ARCPY.AddIDMessage("ERROR", 849) raise SystemExit() #### OD Matrix and Layers #### ODCostMatrix = "ODMatrix" BarriersLayerNames = {"POINT": 'Barriers', "POLYLINE" : 'PolylineBarriers', "LINE" : 'PolylineBarriers', "POLYGON" : 'PolygonBarriers'} lines = ODCostMatrix + "\\Lines" destFCLayer = "NetSWM_Dest" ##### Delete Layers If They Exist #### cleanupNetLayer(ODCostMatrix) cleanupNetLayer(destFCLayer) cleanupNetLayer(lines) #### Get Master Field From inputFC #### ssdo = SSDO.SSDataObject(inputFC, useChordal = False) ssdo.obtainDataGA(masterField, minNumObs = 2) master2Order = ssdo.master2Order masterFieldObj = ssdo.allFields[masterField.upper()] allMaster = master2Order.keys() numObs = ssdo.numObs numPossNeighs = numObs - 1 #### Get Spatial Ref From Net Data Set #### netDesc = ARCPY.Describe(inputNetwork) netSpatialRef = netDesc.SpatialReference netSpatName = netSpatialRef.Name #### Set Maximum Neighbor Argument #### if numberOfNeighs == "#": numberOfNeighs = min( [numPossNeighs, 30] ) ARCPY.AddIDMessage("WARNING", 1012, numberOfNeighs) if numberOfNeighs >= numObs: numberOfNeighs = numPossNeighs ARCPY.AddIDMessage("WARNING", 1013, numberOfNeighs) if numberOfNeighs == 0: numberOfNeighs = numPossNeighs #### All Features are Related. Force Inverse Impedance #### if (numObs - numberOfNeighs) <= 1: if fixed: ARCPY.AddIDMessage("WARNING", 974) fixed = 0 #### Add Self Neighbor For OD Solve #### numberOfNeighsOD = numberOfNeighs + 1 #### Make OD Cost Matrix Layer #### ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84132)) odCostMatrixLayer = NET.MakeODCostMatrixLayer(inputNetwork, ODCostMatrix, impedance, cutoff, numberOfNeighsOD, "#", uturnPolicy, restrictions, hierarchy, "#", "NO_LINES").getOutput(0) #### OD Matrix and Layers #### naClassNames = NET.GetNAClassNames(odCostMatrixLayer) destinationLayer = ODCostMatrix + OS.sep + naClassNames["Destinations"] originLayer = ODCostMatrix + OS.sep + naClassNames["Origins"] lines = ODCostMatrix + OS.sep + naClassNames["ODLines"] #### Add Barriers #### if inputBarrier != "" and inputBarrier != "#": ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84147)) barDesc = ARCPY.Describe(inputBarrier) barShapeType = barDesc.ShapeType.upper() if barShapeType in BarriersLayerNames: barString = naClassNames[BarriersLayerNames[barShapeType]] NET.AddLocations(ODCostMatrix, barString, inputBarrier, "", searchTolerance) #### Add Master Field to OD for Selection #### masterType = UTILS.convertType[masterFieldObj.type] NET.AddFieldToAnalysisLayer(ODCostMatrix, naClassNames["Destinations"], masterField, masterType) #### Add Destinations #### ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84133)) masterToken = "Name " + masterField + " #;" masterToken += masterField + " " + masterField + " #" NET.AddLocations(ODCostMatrix, naClassNames["Destinations"], inputFC, masterToken, searchTolerance, exclude_restricted_elements = "EXCLUDE") #### Initialize Spatial Weights Matrix File #### hierarchyBool = hierarchy == 'USE_HIERARCHY' addConcept = WU.wTypeDispatch[fixed].split("_")[0] forceFixed = (fixed == True) swmWriter = WU.SWMWriter(swmFile, masterField, netSpatName, numObs, rowStandard, inputFC = inputFC, wType = 10, inputNet = inputNetwork, impedanceField = impedance, barrierFC = inputBarrier, uturnPolicy = uturnPolicy, restrictions = restrictions, useHierarchy = hierarchyBool, searchTolerance = searchTolerance, addConcept = addConcept, exponent = exponent, forceFixed = forceFixed) #### Create FieldList for Subset Searching #### totalImpedance = "Total_" + impedance fieldList = ";".join( ["NAME", totalImpedance] ) #### Get Chunks if Necessary #### numOrigins = int(10000000. / numObs) allMaster.sort() chunkedIDs = UTILS.chunk(allMaster, numOrigins) sqlStrings = UTILS.sqlChunkStrings(inputFC, masterField, chunkedIDs) numChunks = len(sqlStrings) #### Create Field Map for Origins #### masterToken = "Name " + masterField + " #" orgFieldMap = [masterToken, 'CurbApproach CurbApproach 0', 'SourceID SourceID #', 'SourceOID SourceOID #', 'PosAlong PosAlong #', 'SideOfEdge SideOfEdge #'] orgFieldMap = ";".join(orgFieldMap) #### Keep Track of Features That Snap to Network #### snappedFeatures = set([]) for chunkNum in xrange(numChunks): progMsg = ARCPY.GetIDMessage(84145).format(chunkNum + 1, numChunks) ARCPY.SetProgressor("default", progMsg) #### Make Origins from Chunk of Destinations #### sqlValue = sqlStrings[chunkNum] DM.MakeFeatureLayer(destinationLayer, destFCLayer, sqlValue) NET.AddLocations(ODCostMatrix, naClassNames["Origins"], destFCLayer, orgFieldMap, "#", "#", "#", "#", "CLEAR") #### Solve OD Matrix and Select Data #### NET.Solve(ODCostMatrix, "SKIP") #### Count the Number of NonZero Spatial Linkages #### numLinks = UTILS.getCount(lines) #### Create Search Cursor for OD Line Info #### rows = ARCPY.SearchCursor(lines, "", None, fieldList) row = rows.next() #### Set Tool Progressor and Process Information #### ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84127), 0, numLinks, 1) #### Process First Record #### ODInfo = row.getValue("NAME") lastID, neighID = [ int(i) for i in ODInfo.split(" - ") ] impValue = row.getValue(totalImpedance) weight = WU.distance2Weight(impValue, wType = fixed, exponent = exponent) neighs = [] weights = [] if lastID != neighID: neighs.append(neighID) weights.append(weight) #### Process Remaining Records #### progMsg = ARCPY.GetIDMessage(84146).format(chunkNum + 1, numChunks) ARCPY.SetProgressor("step", progMsg, 0, numLinks, 1) while row: #### Get Origin and Destination Unique IDs #### ODInfo = row.getValue("NAME") masterID, neighID = [ int(i) for i in ODInfo.split(" - ") ] #### Obtain Impedance and Create Weight #### impValue = row.getValue(totalImpedance) weight = WU.distance2Weight(impValue, wType = fixed, exponent = exponent) #### Check Whether it is the Same ID #### if masterID == lastID: if masterID != neighID: neighs.append(neighID) weights.append(weight) else: #### New ID, Add Last ID Result to SWM File #### swmWriter.swm.writeEntry(lastID, neighs, weights) snappedFeatures.add(lastID) #### Reset and Initialize Containers #### neighs = [] weights = [] if masterID != neighID: neighs.append(neighID) weights.append(weight) lastID = masterID ARCPY.SetProgressorPosition() row = rows.next() #### Write Last ID Result #### swmWriter.swm.writeEntry(lastID, neighs, weights) snappedFeatures.add(lastID) #### Clean Up #### del rows ##### Delete Layers If They Exist #### cleanupNetLayer(ODCostMatrix) cleanupNetLayer(destFCLayer) cleanupNetLayer(lines) #### Add Empty SWM Entries for Features Not Snapped to Network #### notSnapped = snappedFeatures.symmetric_difference(allMaster) for masterID in notSnapped: swmWriter.swm.writeEntry(masterID, [], []) #### Report Warning/Max Neighbors #### swmWriter.reportNeighInfo() #### Clean Up #### swmWriter.close() #### Report Spatial Weights Summary #### swmWriter.report() #### Report SWM File is Large #### swmWriter.reportLargeSWM()
def initialize(self): """Populates the instance of the Spatial Statistics Data Object (SSDataObject) and resolves a default distance threshold if none given. """ #### Shorthand Attributes #### ssdo = self.ssdo varName = self.varName concept = self.concept threshold = self.threshold exponent = self.exponent wType = self.wType weightsFile = self.weightsFile swmFileBool = self.swmFileBool masterField = ssdo.masterField potentialField = self.potentialField #### Get Data Array #### field = ssdo.fields[varName] self.y = field.returnDouble() self.numObs = ssdo.numObs maxSet = False self.fieldNames = [varName] #### Distance Threshold #### if wType in [0, 1, 7]: if threshold == None: threshold, avgDist = WU.createThresholdDist(ssdo, concept=concept) #### Assures that the Threshold is Appropriate #### gaExtent = UTILS.get92Extent(ssdo.extent) fixed = wType == 1 threshold, maxSet = WU.checkDistanceThreshold(ssdo, threshold, weightType=wType) #### If the Threshold is Set to the Max #### #### Set to Zero for Script Logic #### if maxSet: #### All Locations are Related #### if self.numObs > 500: ARCPY.AddIDMessage("WARNING", 717) #### Resolve Self Potential Field (Default to 1.0) #### if potentialField: potField = ssdo.fields[potentialField] self.potVals = potField.returnDouble() self.fieldNames.append(potentialField) #### Warn if Negative Self Weights #### sumNeg = NUM.sum(self.potVals < 0.0) if sumNeg: ARCPY.AddIDMessage("WARNING", 940) #### Set Negative Weights to Zero #### self.potVals = NUM.where(self.potVals < 0.0, 0.0, self.potVals) else: if weightsFile and not swmFileBool: self.potVals = None else: self.potVals = NUM.ones(self.numObs) #### Set Attributes #### self.maxSet = maxSet self.threshold = threshold self.master2Order = ssdo.master2Order self.swmFileBool = swmFileBool
def construct(self): """Constructs the neighborhood structure for each feature and dispatches the appropriate values for the calculation of the statistic.""" #### Shorthand Attributes #### ssdo = self.ssdo varName = self.varName concept = self.concept gaConcept = concept.lower() threshold = self.threshold exponent = self.exponent wType = self.wType rowStandard = self.rowStandard numObs = self.numObs master2Order = self.master2Order masterField = ssdo.masterField weightsFile = self.weightsFile #### Assure that Variance is Larger than Zero #### yVar = NUM.var(self.y) if NUM.isnan(yVar) or yVar <= 0.0: ARCPY.AddIDMessage("Error", 906) raise SystemExit() #### Create Deviation Variables #### self.yBar = NUM.mean(self.y) self.yDev = self.y - self.yBar #### Create Base Data Structures/Variables #### self.numer = 0.0 self.denom = NUM.sum(self.yDev**2.0) self.rowSum = NUM.zeros(numObs) self.colSum = NUM.zeros(numObs) self.s0 = 0 self.s1 = 0 self.wij = {} #### Set Neighborhood Structure Type #### if self.weightsFile: if self.swmFileBool: #### Open Spatial Weights and Obtain Chars #### swm = WU.SWMReader(weightsFile) N = swm.numObs rowStandard = swm.rowStandard #### Check to Assure Complete Set of Weights #### if numObs > N: ARCPY.AddIDMessage("Error", 842, numObs, N) raise SystemExit() #### Check if Selection Set #### isSubSet = False if numObs < N: isSubSet = True iterVals = xrange(N) else: #### Warning for GWT with Bad Records/Selection #### if ssdo.selectionSet or ssdo.badRecords: ARCPY.AddIDMessage("WARNING", 1029) #### Build Weights Dictionary #### weightDict = WU.buildTextWeightDict(weightsFile, master2Order) iterVals = master2Order.keys() N = numObs elif wType in [4, 5]: #### Polygon Contiguity #### if wType == 4: contiguityType = "ROOK" else: contiguityType = "QUEEN" contDict = WU.polygonNeighborDict(ssdo.inputFC, ssdo.oidName, contiguityType = contiguityType) iterVals = master2Order.keys() N = numObs else: gaTable = ssdo.gaTable gaSearch = GAPY.ga_nsearch(gaTable) if wType == 7: #### Zone of Indiff, All Related to All #### gaSearch.init_nearest(threshold, numObs, gaConcept) else: #### Inverse and Fixed Distances #### gaSearch.init_nearest(threshold, 0, gaConcept) iterVals = range(numObs) N = numObs neighWeights = ARC._ss.NeighborWeights(gaTable, gaSearch, weight_type = wType, exponent = exponent, row_standard = rowStandard) #### Create Progressor #### ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84007), 0, N, 1) #### Create Neighbor Info Class #### ni = WU.NeighborInfo(masterField) #### Calculation For Each Feature #### for i in iterVals: if self.swmFileBool: #### Using SWM File #### info = swm.swm.readEntry() masterID = info[0] if master2Order.has_key(masterID): rowInfo = WU.getWeightsValuesSWM(info, master2Order, self.yDev, rowStandard = rowStandard, isSubSet = isSubSet) includeIt = True else: includeIt = False elif self.weightsFile and not self.swmFileBool: #### Text Weights #### masterID = i includeIt = True rowInfo = WU.getWeightsValuesText(masterID, master2Order, weightDict, self.yDev) elif wType in [4, 5]: #### Polygon Contiguity #### masterID = i includeIt = True rowInfo = WU.getWeightsValuesCont(masterID, master2Order, contDict, self.yDev, rowStandard = rowStandard) else: #### Distance Based #### masterID = gaTable[i][0] includeIt = True rowInfo = WU.getWeightsValuesOTF(neighWeights, i, self.yDev) #### Subset Boolean for SWM File #### if includeIt: #### Parse Row Info #### orderID, yiDev, nhIDs, nhVals, weights = rowInfo #### Assure Neighbors Exist After Selection #### nn, nhIDs, nhVals, weights = ni.processInfo(masterID, nhIDs, nhVals, weights) if nn: #### Process Feature Contribution to Moran's I #### self.processRow(orderID, yiDev, nhIDs, nhVals, weights) #### Reset Progessor #### ARCPY.SetProgressorPosition() #### Clean Up #### if self.swmFileBool: swm.close() #### Report on Features with No Neighbors #### ni.reportNoNeighbors() #### Report on Features with Large Number of Neighbors #### ni.reportWarnings() ni.reportMaximums() self.neighInfo = ni
def construct(self): """Constructs the neighborhood structure for each feature and dispatches the appropriate values for the calculation of the statistic.""" #### Shorthand Attributes #### ssdo = self.ssdo varName = self.varName concept = self.concept gaConcept = concept.lower() threshold = self.threshold exponent = self.exponent wType = self.wType numObs = self.numObs master2Order = self.master2Order masterField = ssdo.masterField weightsFile = self.weightsFile potentialField = self.potentialField #### Assure that Variance is Larger than Zero #### yVar = NUM.var(self.y) if NUM.isnan(yVar) or yVar <= 0.0: ARCPY.AddIDMessage("ERROR", 906) raise SystemExit() #### Create Summed Variables #### self.intRange = NUM.arange(numObs) self.floatN = self.numObs * 1.0 ySum = self.y.sum() ySum2 = (self.y**2.0).sum() self.yBar = ySum / self.floatN self.S = NUM.sqrt((ySum2 / self.floatN) - self.yBar**2.0) self.nm1 = self.floatN - 1.0 #### Create Base Data Structures/Variables #### self.gi = NUM.zeros(numObs) self.pVals = NUM.ones(numObs) if self.permutations: self.pseudoPVals = NUM.ones(numObs) #### Set Neighborhood Structure Type #### if self.weightsFile: if self.swmFileBool: #### Open Spatial Weights and Obtain Chars #### swm = WU.SWMReader(weightsFile) N = swm.numObs rowStandard = swm.rowStandard self.swm = swm #### Check to Assure Complete Set of Weights #### if numObs > N: ARCPY.AddIDMessage("ERROR", 842, numObs, N) raise SystemExit() #### Check if Selection Set #### isSubSet = False if numObs < N: isSubSet = True iterVals = xrange(N) else: #### Warning for GWT with Bad Records/Selection #### if ssdo.selectionSet or ssdo.badRecords: ARCPY.AddIDMessage("WARNING", 1029) #### Build Weights Dictionary #### weightDict = WU.buildTextWeightDict(weightsFile, master2Order) iterVals = master2Order.keys() N = numObs elif wType in [4, 5]: #### Polygon Contiguity #### if wType == 4: contiguityType = "ROOK" else: contiguityType = "QUEEN" contDict = WU.polygonNeighborDict(ssdo.inputFC, ssdo.oidName, contiguityType=contiguityType) iterVals = master2Order.keys() N = numObs else: gaTable = ssdo.gaTable gaSearch = GAPY.ga_nsearch(gaTable) if wType == 7: #### Zone of Indiff, All Related to All #### gaSearch.init_nearest(threshold, numObs, gaConcept) else: #### Inverse and Fixed Distances #### gaSearch.init_nearest(threshold, self.numNeighs, gaConcept) iterVals = range(numObs) N = numObs neighWeights = ARC._ss.NeighborWeights(gaTable, gaSearch, weight_type=wType, exponent=exponent, row_standard=False, include_self=True) #### Create Progressor #### msg = ARCPY.GetIDMessage(84007) if self.permutations: msg += ": Using Permutations = %i" % self.permutations ARCPY.SetProgressor("step", msg, 0, N, 1) #### Create Neighbor Info Class #### ni = WU.NeighborInfo(masterField) #### Calculation For Each Feature #### for i in iterVals: if self.swmFileBool: #### Using SWM File #### info = swm.swm.readEntry() masterID = info[0] if master2Order.has_key(masterID): rowInfo = WU.getWeightsValuesSWM(info, master2Order, self.y, rowStandard=rowStandard, potVals=self.potVals) includeIt = True else: includeIt = False elif self.weightsFile and not self.swmFileBool: #### Text Weights #### masterID = i includeIt = True rowInfo = WU.getWeightsValuesText(masterID, master2Order, weightDict, self.y, potVals=self.potVals, allowSelf=True) elif wType in [4, 5]: #### Polygon Contiguity #### masterID = i includeIt = True rowInfo = WU.getWeightsValuesCont(masterID, master2Order, contDict, self.y, rowStandard=False, potVals=self.potVals) else: #### Distance Based #### masterID = gaTable[i][0] includeIt = True rowInfo = WU.getWeightsValuesOTF_Potent( neighWeights, i, self.y, self.potVals) #### Subset Boolean for SWM File #### if includeIt: #### Parse Row Info #### orderID, yiVal, nhIDs, nhVals, weights = rowInfo #### Assure Neighbors Exist After Selection #### nn, nhIDs, nhVals, weights = ni.processInfo( masterID, nhIDs, nhVals, weights) if nn: #### Calculate Local G #### self.calculateGI(orderID, yiVal, nhVals, weights) ARCPY.SetProgressorPosition() #### Clean Up #### if self.swmFileBool: swm.close() #### Report on Features with No Neighbors #### ni.reportNoNeighbors(failAllNoNeighs=False) self.setNullValues(ni.idsNoNeighs) #### Report on Features with Large Number of Neighbors #### ni.reportWarnings() ni.reportMaximums() self.neighInfo = ni #### Set p-values for Gi Bins #### if self.permutations: #### Use Pseudo p-values #### pv = self.pseudoPVals else: #### Use Traditional p-values #### pv = self.pVals toolMSG = ARCPY.GetIDMessage(84466) if self.applyFDR: #### Set Bins Using FDR #### msg = ARCPY.GetIDMessage(84472).format(toolMSG) ARCPY.SetProgressor("default", msg) self.giBins = STATS.fdrTransform(pv, self.gi) else: msg = ARCPY.GetIDMessage(84473).format(toolMSG) ARCPY.SetProgressor("default", msg) self.giBins = STATS.pValueBins(pv, self.gi)
def obtainDataGA(self, masterField, fields = [], types = [0,1,2,3,5,6], minNumObs = 0, warnNumObs = 0): """Takes a list of field names and returns it in a dictionary structure. INPUTS: masterField (str): name of field being used as the master fields {list, []}: name(s) of the field to be returned types (list): types of data allowed to be returned (1) minNumObs {int, 0}: minimum number of observations for error warnNumObs {int, 0}: minimum number of observations for warning ATTRIBUTES: gaTable (structure): instance of the GA Table fields (dict): fieldName = instance of FCField master2Order (dict): masterID = order in lists order2Master (dict): order in lists = masterID masterField (str): field that serves as the master badRecords (list): master IDs that could not be read xyCoords (array, nunObs x 2): xy-coordinates for feature centroids NOTES: (1) No Text Fields; short [0], long [1], float [2], double[3] """ #### Validation of Master Field #### verifyMaster = ERROR.checkField(self.allFields, masterField, types = [0,1,5]) #### Set MasterIsOID Boolean #### self.masterIsOID = masterField == self.oidName #### Set Master and Data Indices #### if self.masterIsOID: self.masterColumnIndex = 0 self.dataColumnIndex = 2 fieldList = [] else: self.masterColumnIndex = 2 self.dataColumnIndex = 3 fieldList = [masterField] #### Validation and Initialization of Data Fields #### numFields = len(fields) for field in fields: fType = ERROR.checkField(self.allFields, field, types = types) fieldList.append(field) self.fields[field] = self.allFields[field] #### ZCoords Are Last #### getZBool = self.hasZ and (not self.renderType) if getZBool: fieldList.append("SHAPE&Z") #### Create GA Data Structure #### cnt = UTILS.getCount(self.inputFC) fieldList = tuple(fieldList) gaTable, gaInfo = WU.gaTable(self.inputFC, fieldNames = fieldList, spatRef = self.spatialRefString) #### Check Whether the Number of Features is Appropriate #### numObs = gaInfo[0] ERROR.checkNumberOfObs(numObs, minNumObs = minNumObs, warnNumObs = warnNumObs, silentWarnings = self.silentWarnings) #### Process any bad records encountered #### numBadIDs = cnt - numObs if numBadIDs: badIDs = WU.parseGAWarnings(gaTable.warnings) if not self.silentWarnings: ERROR.reportBadRecords(cnt, numBadIDs, badIDs, label = self.oidName) else: badIDs = [] #### Initialization of Centroids #### xyCoords = NUM.empty((numObs, 2), float) #### Z Coords #### if self.hasZ: zCoords = NUM.empty((numObs, ), float) #### Create Empty Data Arrays #### for fieldName, fieldObj in self.fields.iteritems(): fieldObj.createDataArray(numObs) #### Populate SSDataObject #### ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84001), 0, numObs, 1) for row in xrange(numObs): rowInfo = gaTable[row] x,y = rowInfo[1] masterID = int(rowInfo[self.masterColumnIndex]) if self.master2Order.has_key(masterID): ARCPY.AddIDMessage("ERROR", 644, masterField) ARCPY.AddIDMessage("ERROR", 643) raise SystemExit() else: self.master2Order[masterID] = row self.order2Master[row] = masterID xyCoords[row] = (x, y) if numFields: restFields = rowInfo[self.dataColumnIndex:] for fieldInd, fieldName in enumerate(fields): self.fields[fieldName].data[row] = restFields[fieldInd] if self.hasZ: if getZBool: zCoords[row] = rowInfo[-1] else: zCoords[row] = NUM.nan ARCPY.SetProgressorPosition() #### Set the Hidden Fields (E.g. Not in Use) #### self.setHiddenFields() #### Reset Extent to Honor Env and Subsets #### try: self.extent = UTILS.resetExtent(xyCoords) except: pass #### Reset Coordinates for Chordal #### if self.useChordal: #### Project to XY on Spheroid #### self.spheroidCoords = ARC._ss.lonlat_to_xy(xyCoords, self.spatialRef) self.sliceInfo = UTILS.SpheroidSlice(self.extent, self.spatialRef) else: self.spheroidCoords = None self.sliceInfo = None #### Set Further Attributes #### self.badRecords = badIDs self.xyCoords = xyCoords self.masterField = masterField self.gaTable = gaTable self.numObs = numObs if self.hasZ: self.zCoords = zCoords else: self.zCoords = None
def outputResults(self): """Creates output feature class for Local I.""" #### Prepare Derived Variables for Output Feature Class #### outPath, outName = OS.path.split(self.outputFC) fieldOrder = UTILS.getFieldNames(liFieldNames, outPath) fieldData = [self.li, self.zi, self.pVals] #### Add Pseudo-P Field #### if self.permutations: fieldOrder.append(liPseudoFieldName) fieldData.append(self.pseudoPVals) #### Add CO Type Field #### fieldOrder.append(liCOFieldName) whereNull = NUM.where(self.pVals == NUM.nan)[0] if len(whereNull): outBins = list(self.moranBins) for ind in whereNull: outBins[ind] = NUM.nan fieldData.append(outBins) else: fieldData.append(self.moranBins) #### Create Alias Field Names #### if self.wType == 8: addString = OS.path.basename(self.weightsFile) rowStandard = False elif self.wType in [0, 1, 7]: if self.maxSet: addString = "0" else: addString = str(int(self.threshold)) rowStandard = self.rowStandard else: addString = None rowStandard = self.rowStandard aliasList = WU.createSpatialFieldAliases(fieldOrder, addString=addString, wType=self.wType, exponent=self.exponent, rowStandard=rowStandard) if self.applyFDR: aliasList[-1] += "_FDR" #### Create/Populate Dictionary of Candidate Fields #### candidateFields = {} for fieldInd, fieldName in enumerate(fieldOrder): if fieldName == liCOFieldName: fType = "TEXT" length = 2 else: fType = "DOUBLE" length = None candidateField = SSDO.CandidateField(fieldName, fType, fieldData[fieldInd], alias=aliasList[fieldInd], length=length) candidateFields[fieldName] = candidateField #### Input Fields to Copy to Output FC #### appendFields = [i for i in self.fieldNames] #### Add Date-Time Field If Applicable #### if self.swmFileBool: if self.swm.wType == 9: if self.ssdo.allFields.has_key(self.swm.timeField.upper()): appendFields.insert(0, self.swm.timeField.upper()) #### Write Data to Output Feature Class #### self.ssdo.output2NewFC(self.outputFC, candidateFields, appendFields=appendFields, fieldOrder=fieldOrder) outFieldSet = fieldOrder[0:3] + [fieldOrder[-1]] return outFieldSet
def outputResults(self): """Creates output feature class for Local I.""" #### Prepare Derived Variables for Output Feature Class #### outPath, outName = OS.path.split(self.outputFC) fieldOrder = UTILS.getFieldNames(liFieldNames, outPath) fieldData = [self.li, self.zi, self.pVals] #### Add Pseudo-P Field #### if self.permutations: fieldOrder.append(liPseudoFieldName) fieldData.append(self.pseudoPVals) #### Add CO Type Field #### fieldOrder.append(liCOFieldName) whereNull = NUM.where(self.pVals == NUM.nan)[0] if len(whereNull): outBins = list(self.moranBins) for ind in whereNull: outBins[ind] = NUM.nan fieldData.append(outBins) else: fieldData.append(self.moranBins) #### Create Alias Field Names #### if self.wType == 8: addString = OS.path.basename(self.weightsFile) rowStandard = False elif self.wType in [0, 1, 7]: if self.maxSet: addString = "0" else: addString = str(int(self.threshold)) rowStandard = self.rowStandard else: addString = None rowStandard = self.rowStandard aliasList = WU.createSpatialFieldAliases(fieldOrder, addString = addString, wType = self.wType, exponent = self.exponent, rowStandard = rowStandard) if self.applyFDR: aliasList[-1] += "_FDR" #### Create/Populate Dictionary of Candidate Fields #### candidateFields = {} for fieldInd, fieldName in enumerate(fieldOrder): if fieldName == liCOFieldName: fType = "TEXT" length = 2 else: fType = "DOUBLE" length = None candidateField = SSDO.CandidateField(fieldName, fType, fieldData[fieldInd], alias = aliasList[fieldInd], length = length) candidateFields[fieldName] = candidateField #### Input Fields to Copy to Output FC #### appendFields = [i for i in self.fieldNames] #### Add Date-Time Field If Applicable #### if self.swmFileBool: if self.swm.wType == 9: if self.ssdo.allFields.has_key(self.swm.timeField.upper()): appendFields.insert(0, self.swm.timeField.upper()) #### Write Data to Output Feature Class #### self.ssdo.output2NewFC(self.outputFC, candidateFields, appendFields = appendFields, fieldOrder = fieldOrder) outFieldSet = fieldOrder[0:3] + [fieldOrder[-1]] return outFieldSet
def unweightedCalc(self): """Performs unweighted k-function.""" #### Attribute Shortcuts #### ssdo = self.ssdo reduce = self.reduce simulate = self.simulate ripley = self.ripley if reduce: studyArea2Use = self.reduceArea else: studyArea2Use = self.studyArea self.ld = COLL.defaultdict(float) if self.permutations: self.ldMin = COLL.defaultdict(float) self.ldMax = COLL.defaultdict(float) for order in self.cutoffOrder: self.ldMin[order] = 99999999999. permsPlus = self.permutations + 1 for perm in xrange(0, permsPlus): #### Permutation Progressor #### pmsg = ARCPY.GetIDMessage(84184) progressMessage = pmsg.format(perm, permsPlus) ARCPY.SetProgressor("default", progressMessage) #### Permutate the XY #### if perm != 0: self.permutateTable() gaSearch = GAPY.ga_nsearch(self.kTable) gaSearch.init_nearest(self.stepMax, 0, "euclidean") N = len(self.kTable) numIDs = len(self.ids) kij = COLL.defaultdict(float) for i in xrange(N): row = self.kTable[i] id0 = row[0] if id0 in self.ids: x0,y0 = row[1] gaSearch.search_by_idx(i) for nh in gaSearch: neighInfo = self.kTable[nh.idx] nhID = neighInfo[0] x1,y1 = neighInfo[1] dist = WU.euclideanDistance(x0,x1,y0,y1) if ripley: value = self.returnRipley(id0, dist) else: value = 1.0 for order in self.reverseOrder: cutoff = self.cutoffs[order] if dist > cutoff: break kij[order] += value ARCPY.SetProgressorPosition() #### Calculate Stats USing Dictionaries #### weightSumVal = numIDs * (numIDs - 1.0) denom = NUM.pi * weightSumVal for order in self.cutoffOrder: res = kij[order] numer = res * studyArea2Use permResult = NUM.sqrt( (numer/denom) ) if perm: self.ldMin[order] = min(self.ldMin[order], permResult) self.ldMax[order] = max(self.ldMax[order], permResult) else: self.ld[order] = permResult
def collectEvents(ssdo, outputFC): """This utility converts event data into weighted point data by dissolving all coincident points into unique points with a new count field that contains the number of original features at that location. INPUTS: inputFC (str): path to the input feature class outputFC (str): path to the input feature class """ #### Set Default Progressor for Neigborhood Structure #### ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143)) #### Validate Output Workspace #### ERROR.checkOutputPath(outputFC) #### True Centroid Warning For Non-Point FCs #### if ssdo.shapeType.upper() != "POINT": ARCPY.AddIDMessage("WARNING", 1021) #### Create GA Data Structure #### gaTable, gaInfo = WU.gaTable(ssdo.inputFC, spatRef=ssdo.spatialRefString) #### Assure Enough Observations #### cnt = UTILS.getCount(ssdo.inputFC) ERROR.errorNumberOfObs(cnt, minNumObs=4) N = gaInfo[0] ERROR.errorNumberOfObs(N, minNumObs=4) #### Process Any Bad Records Encountered #### numBadRecs = cnt - N if numBadRecs: badRecs = WU.parseGAWarnings(gaTable.warnings) if not ssdo.silentWarnings: ERROR.reportBadRecords(cnt, numBadRecs, badRecs, label=ssdo.oidName) #### Create k-Nearest Neighbor Search Type #### gaSearch = GAPY.ga_nsearch(gaTable) gaSearch.init_nearest(0.0, 0, "euclidean") #### Create Output Feature Class #### outPath, outName = OS.path.split(outputFC) try: DM.CreateFeatureclass(outPath, outName, "POINT", "", ssdo.mFlag, ssdo.zFlag, ssdo.spatialRefString) except: ARCPY.AddIDMessage("ERROR", 210, outputFC) raise SystemExit() #### Add Count Field #### countFieldNameOut = ARCPY.ValidateFieldName(countFieldName, outPath) UTILS.addEmptyField(outputFC, countFieldNameOut, "LONG") fieldList = ["SHAPE@", countFieldNameOut] #### Set Insert Cursor #### rowsOut = DA.InsertCursor(outputFC, fieldList) #### Set Progressor for Calculation #### ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84007), 0, N, 1) #### ID List to Search #### rowsIN = range(N) maxCount = 0 numUnique = 0 for row in rowsIN: #### Get Row Coords #### rowInfo = gaTable[row] x0, y0 = rowInfo[1] count = 1 #### Search For Exact Coord Match #### gaSearch.search_by_idx(row) for nh in gaSearch: count += 1 rowsIN.remove(nh.idx) ARCPY.SetProgressorPosition() #### Keep Track of Max Count #### maxCount = max([count, maxCount]) #### Create Output Point #### pnt = (x0, y0, ssdo.defaultZ) #### Create and Populate New Feature #### rowResult = [pnt, count] rowsOut.insertRow(rowResult) numUnique += 1 ARCPY.SetProgressorPosition() #### Clean Up #### del rowsOut, gaTable return countFieldNameOut, maxCount, N, numUnique
def collectEvents(ssdo, outputFC): """This utility converts event data into weighted point data by dissolving all coincident points into unique points with a new count field that contains the number of original features at that location. INPUTS: inputFC (str): path to the input feature class outputFC (str): path to the input feature class """ #### Set Default Progressor for Neigborhood Structure #### ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143)) #### Validate Output Workspace #### ERROR.checkOutputPath(outputFC) #### True Centroid Warning For Non-Point FCs #### if ssdo.shapeType.upper() != "POINT": ARCPY.AddIDMessage("WARNING", 1021) #### Create GA Data Structure #### gaTable, gaInfo = WU.gaTable(ssdo.inputFC, spatRef = ssdo.spatialRefString) #### Assure Enough Observations #### cnt = UTILS.getCount(ssdo.inputFC) ERROR.errorNumberOfObs(cnt, minNumObs = 4) N = gaInfo[0] ERROR.errorNumberOfObs(N, minNumObs = 4) #### Process Any Bad Records Encountered #### numBadRecs = cnt - N if numBadRecs: badRecs = WU.parseGAWarnings(gaTable.warnings) if not ssdo.silentWarnings: ERROR.reportBadRecords(cnt, numBadRecs, badRecs, label = ssdo.oidName) #### Create k-Nearest Neighbor Search Type #### gaSearch = GAPY.ga_nsearch(gaTable) gaSearch.init_nearest(0.0, 0, "euclidean") #### Create Output Feature Class #### outPath, outName = OS.path.split(outputFC) try: DM.CreateFeatureclass(outPath, outName, "POINT", "", ssdo.mFlag, ssdo.zFlag, ssdo.spatialRefString) except: ARCPY.AddIDMessage("ERROR", 210, outputFC) raise SystemExit() #### Add Count Field #### countFieldNameOut = ARCPY.ValidateFieldName(countFieldName, outPath) UTILS.addEmptyField(outputFC, countFieldNameOut, "LONG") fieldList = ["SHAPE@", countFieldNameOut] #### Set Insert Cursor #### rowsOut = DA.InsertCursor(outputFC, fieldList) #### Set Progressor for Calculation #### ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84007), 0, N, 1) #### ID List to Search #### rowsIN = range(N) maxCount = 0 numUnique = 0 for row in rowsIN: #### Get Row Coords #### rowInfo = gaTable[row] x0, y0 = rowInfo[1] count = 1 #### Search For Exact Coord Match #### gaSearch.search_by_idx(row) for nh in gaSearch: count += 1 rowsIN.remove(nh.idx) ARCPY.SetProgressorPosition() #### Keep Track of Max Count #### maxCount = max([count, maxCount]) #### Create Output Point #### pnt = (x0, y0, ssdo.defaultZ) #### Create and Populate New Feature #### rowResult = [pnt, count] rowsOut.insertRow(rowResult) numUnique += 1 ARCPY.SetProgressorPosition() #### Clean Up #### del rowsOut, gaTable return countFieldNameOut, maxCount, N, numUnique
def distance2Weights(ssdo, neighborType=1, distanceBand=0.0, numNeighs=0, distanceType="euclidean", exponent=1.0, rowStandard=True, includeSelf=False): """Uses ArcGIS Neighborhood Searching Structure to create a PySAL Sparse Spatial Weights Matrix. INPUTS: ssdo (class): instance of SSDataObject [1] neighborType {int, 1}: 0 = inverse distance, 1 = fixed distance, 2 = k-nearest-neighbors, 3 = delaunay distanceBand {float, 0.0}: return all neighbors within this distance for inverse/fixed distance numNeighs {int, 0}: number of neighbors for k-nearest-neighbor, can also be used to set a minimum number of neighbors for inverse/fixed distance distanceType {str, euclidean}: manhattan or euclidean distance [2] exponent {float, 1.0}: distance decay factor for inverse distance rowStandard {bool, True}: whether to row standardize the spatial weights includeSelf {bool, False}: whether to return self as a neighbor NOTES: (1) Data must already be obtained using ssdo.obtainDataGA() (2) Chordal Distance is used for GCS Data """ neighbors = {} weights = {} gaSearch = GAPY.ga_nsearch(ssdo.gaTable) if neighborType == 3: gaSearch.init_delaunay() neighSearch = ARC._ss.NeighborWeights(ssdo.gaTable, gaSearch, weight_type=1) else: if neighborType == 2: distanceBand = 0.0 weightType = 1 else: weightType = neighborType concept, gaConcept = WU.validateDistanceMethod(distanceType.upper(), ssdo.spatialRef) gaSearch.init_nearest(distanceBand, numNeighs, gaConcept) neighSearch = ARC._ss.NeighborWeights(ssdo.gaTable, gaSearch, weight_type=weightType, exponent=exponent, include_self=includeSelf) for i in range(len(neighSearch)): neighOrderIDs, neighWeights = neighSearch[i] neighbors[i] = neighOrderIDs weights[i] = neighWeights w = PYSAL.W(neighbors, weights) if rowStandard: w.transform = 'R' return w
def polygon2SWM(inputFC, swmFile, masterField, concept = "EUCLIDEAN", kNeighs = 0, rowStandard = True, contiguityType = "ROOK"): """Creates a sparse spatial weights matrix (SWM) based on polygon contiguity. INPUTS: inputFC (str): path to the input feature class swmFile (str): path to the SWM file. masterField (str): field in table that serves as the mapping. concept: {str, EUCLIDEAN}: EUCLIDEAN or MANHATTAN kNeighs {int, 0}: number of neighbors to return (1) rowStandard {bool, True}: row standardize weights? contiguityType {str, Rook}: {Rook = Edges Only, Queen = Edges/Vertices} NOTES: (1) kNeighs is used if polygon is not contiguous. E.g. Islands """ #### Set Default Progressor for Neigborhood Structure #### ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143)) #### Create SSDataObject #### ssdo = SSDO.SSDataObject(inputFC, templateFC = inputFC, useChordal = True) cnt = UTILS.getCount(inputFC) ERROR.errorNumberOfObs(cnt, minNumObs = 2) #### Validation of Master Field #### verifyMaster = ERROR.checkField(ssdo.allFields, masterField, types = [0,1]) #### Create GA Data Structure #### gaTable, gaInfo = WU.gaTable(ssdo.catPath, [masterField], spatRef = ssdo.spatialRefString) #### Assure Enough Observations #### N = gaInfo[0] ERROR.errorNumberOfObs(N, minNumObs = 2) #### Assure k-Nearest is Less Than Number of Features #### if kNeighs >= N: ARCPY.AddIDMessage("ERROR", 975) raise SystemExit() #### Create Nearest Neighbor Search Type For Islands #### gaSearch = GAPY.ga_nsearch(gaTable) concept, gaConcept = WU.validateDistanceMethod(concept, ssdo.spatialRef) gaSearch.init_nearest(0.0, kNeighs, gaConcept) if kNeighs > 0: forceNeighbor = True neighWeights = ARC._ss.NeighborWeights(gaTable, gaSearch, weight_type = 1, row_standard = False) else: forceNeighbor = False neighSearch = None #### Create Polygon Neighbors #### polyNeighborDict = WU.polygonNeighborDict(inputFC, masterField, contiguityType = contiguityType) #### Write Poly Neighbor List (Dict) #### #### Set Progressor for SWM Writing #### ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84127), 0, N, 1) #### Initialize Spatial Weights Matrix File #### if contiguityType == "ROOK": wType = 4 else: wType = 5 swmWriter = WU.SWMWriter(swmFile, masterField, ssdo.spatialRefName, N, rowStandard, inputFC = inputFC, wType = wType, distanceMethod = concept, numNeighs = kNeighs) #### Keep Track of Polygons w/o Neighbors #### islandPolys = [] #### Write Polygon Contiguity to SWM File #### for row in xrange(N): rowInfo = gaTable[row] oid = rowInfo[0] masterID = rowInfo[2] neighs = polyNeighborDict[masterID] if neighs: weights = [ 1. for nh in neighs ] isIsland = False else: isIsland = True islandPolys.append(oid) weights = [] #### Get Nearest Neighbor Based On Centroid Distance #### if isIsland and forceNeighbor: neighs, weights = neighWeights[row] neighs = [ gaTable[nh][2] for nh in neighs ] #### Add Weights Entry #### swmWriter.swm.writeEntry(masterID, neighs, weights) #### Set Progress #### ARCPY.SetProgressorPosition() #### Report on Features with No Neighbors #### countIslands = len(islandPolys) if countIslands: islandPolys.sort() if countIslands > 30: islandPolys = islandPolys[0:30] ERROR.warningNoNeighbors(N, countIslands, islandPolys, ssdo.oidName, forceNeighbor = forceNeighbor, contiguity = True) #### Clean Up #### swmWriter.close() del gaTable #### Report Spatial Weights Summary #### swmWriter.report() #### Report SWM File is Large #### swmWriter.reportLargeSWM() del polyNeighborDict
def initialize(self): """Populates the instance of the Spatial Statistics Data Object (SSDataObject) and resolves a default distance threshold if none given. """ #### Shorthand Attributes #### ssdo = self.ssdo varName = self.varName concept = self.concept threshold = self.threshold exponent = self.exponent wType = self.wType weightsFile = self.weightsFile swmFileBool = self.swmFileBool masterField = ssdo.masterField potentialField = self.potentialField #### Get Data Array #### field = ssdo.fields[varName] self.y = field.returnDouble() self.numObs = ssdo.numObs maxSet = False self.fieldNames = [varName] #### Distance Threshold #### if wType in [0, 1, 7]: if threshold == None: threshold, avgDist = WU.createThresholdDist(ssdo, concept=concept) #### Assures that the Threshold is Appropriate #### gaExtent = UTILS.get92Extent(ssdo.extent) fixed = (wType == 1) threshold, maxSet = WU.checkDistanceThreshold(ssdo, threshold, weightType=wType) #### If the Threshold is Set to the Max #### #### Set to Zero for Script Logic #### if maxSet: #### All Locations are Related #### if self.numObs > 500: ARCPY.AddIDMessage("WARNING", 717) #### Resolve Self Potential Field (Default to 1.0) #### if potentialField: potField = ssdo.fields[potentialField] self.potVals = potField.returnDouble() self.fieldNames.append(potentialField) #### Warn if Negative Self Weights #### sumNeg = NUM.sum(self.potVals < 0.0) if sumNeg: ARCPY.AddIDMessage("WARNING", 940) #### Set Negative Weights to Zero #### self.potVals = NUM.where(self.potVals < 0.0, 0.0, self.potVals) else: if weightsFile and not swmFileBool: self.potVals = None else: self.potVals = NUM.ones(self.numObs) #### Set Attributes #### self.maxSet = maxSet self.threshold = threshold self.master2Order = ssdo.master2Order self.swmFileBool = swmFileBool
def swm2Weights(ssdo, swmfile): """Converts ArcGIS Sparse Spatial Weights Matrix (*.swm) file to PySAL Sparse Spatial Weights Class. INPUTS: ssdo (class): instance of SSDataObject [1,2] swmFile (str): full path to swm file NOTES: (1) Data must already be obtained using ssdo.obtainData() (2) The masterField for the swm file and the ssdo object must be the same and may NOT be the OID/FID/ObjectID """ neighbors = {} weights = {} #### Create SWM Reader Object #### swm = WU.SWMReader(swmfile) #### SWM May NOT be a Subset of the Data #### if ssdo.numObs > swm.numObs: ARCPY.AddIDMessage("ERROR", 842, ssdo.numObs, swm.numObs) raise SystemExit() if swm.masterField != ssdo.masterField: ARCPY.AddWarning("ERROR", 938) raise SystemExit() #### Parse All SWM Records #### for r in UTILS.ssRange(swm.numObs): info = swm.swm.readEntry() masterID, nn, nhs, w, sumUnstandard = info #### Must Have at Least One Neighbor #### if nn: #### Must be in Selection Set (If Exists) #### if masterID in ssdo.master2Order: outNHS = [] outW = [] #### Transform Master ID to Order ID #### orderID = ssdo.master2Order[masterID] #### Neighbors and Weights Adjusted for Selection #### for nhInd, nhVal in enumerate(nhs): try: nhOrder = ssdo.master2Order[nhVal] outNHS.append(nhOrder) weightVal = w[nhInd] if swm.rowStandard: weightVal = weightVal * sumUnstandard[0] outW.append(weightVal) except KeyError: pass #### Add Selected Neighbors/Weights #### if len(outNHS): neighbors[orderID] = outNHS weights[orderID] = outW swm.close() #### Construct PySAL Spatial Weights and Standardize as per SWM #### w = PYSAL.W(neighbors, weights) if swm.rowStandard: w.transform = 'R' return w
def outputResults(self): """Creates output feature class Local Gi*.""" #### Prepare Derived Variables for Output Feature Class #### outPath, outName = OS.path.split(self.outputFC) fieldOrder = UTILS.getFieldNames(giFieldNames, outPath) fieldData = [self.gi, self.pVals] fieldTypes = ["DOUBLE", "DOUBLE"] #### Add Pseudo-P Field #### if self.permutations: fieldOrder.append(giPseudoFieldName) fieldData.append(self.pseudoPVals) fieldTypes.append("DOUBLE") #### Add Gi Bin Field #### fieldOrder.append(giBinFieldName) fieldData.append(self.giBins) fieldTypes.append("LONG") #### Create Alias Field Names #### rowStandard = False if self.wType == 8: addString = OS.path.basename(self.weightsFile) elif self.wType in [0, 1, 7]: if self.maxSet: addString = "0" else: addString = str(int(self.threshold)) else: addString = None aliasList = WU.createSpatialFieldAliases(fieldOrder, addString=addString, wType=self.wType, exponent=self.exponent, rowStandard=rowStandard) if self.applyFDR: aliasList[-1] += "_FDR" #### Create/Populate Dictionary of Candidate Fields #### candidateFields = {} for fieldInd, fieldName in enumerate(fieldOrder): fieldType = fieldTypes[fieldInd] candidateField = SSDO.CandidateField(fieldName, fieldType, fieldData[fieldInd], alias=aliasList[fieldInd]) candidateFields[fieldName] = candidateField #### Input Fields to Copy to Output FC #### appendFields = [i for i in self.fieldNames] #### Add Date-Time Field If Applicable #### if self.swmFileBool: if self.swm.wType == 9: if self.ssdo.allFields.has_key(self.swm.timeField.upper()): appendFields.insert(0, self.swm.timeField.upper()) #### Write Data to Output Feature Class #### self.ssdo.output2NewFC(self.outputFC, candidateFields, appendFields=appendFields, fieldOrder=fieldOrder) return fieldOrder[0], fieldOrder[1]
def stCollectByKNN(ssdo, timeField, outputFC, inSpan, inDistance): """ This method applied Jacquez Space-Time K-NN to convert event data into weighted point data by dissolving all coincident points in space and time into unique points with a new count field that contains the number of original features at that location and time span. INPUTS: ssdo (obj): SSDataObject from input timeField (str): Date/Time field name in input feature outputFC (str): path to the output feature class inSpan (int): value of temporal units within the same time bin inDistance (int): value of spatial units considered as spatial neighbors OUTPUTS: Create new collected point feature """ #### Read raw time data #### timeData = ssdo.fields[timeField].data #### Convert temporal unit #### time = NUM.array(timeData, dtype = 'datetime64[s]').astype('datetime64[D]') #### Find Start Time #### startTime = time.min() #### Create Bin for Space and Time #### timeBin = (time - startTime) / inSpan numObs = ssdo.numObs #### Create Sudo-fid to Find K-NN in Space and Time fid = [i for i in xrange(numObs)] #### Validate Output Workspace #### ERROR.checkOutputPath(outputFC) #### True Centroid Warning For Non-Point FCs #### if ssdo.shapeType.upper() != "POINT": ARCPY/AddIDMessage("WARNING", 1021) #### Create GA Data Structure #### gaTable, gaInfo = WU.gaTable(ssdo.inputFC, spatRef = ssdo.spatialRefString) #### Assure Enough Observations #### cnt = UTILS.getCount(ssdo.inputFC) ERROR.errorNumberOfObs(cnt, minNumObs = 4) N = gaInfo[0] ERROR.errorNumberOfObs(N, minNumObs = 4) #### Process Any Bad Records Encountered #### numBadRecs = cnt -N if numBadRecs: badRecs = WU.parseGAWarnings(gaTable.warnings) if not ssdo.silentWarnings: ERROR.reportBadRecords(cnt, numBadRecs, badRecs, label = ssdo.oidName) #### Create Output Feature Class #### outPath, outName = OS.path.split(outputFC) try: DM.CreateFeatureclass(outPath, outName, "POINT", "", ssdo.mFlag, ssdo.zFlag, ssdo.spatialRefString) except: ARCPY.AddIDMessage("ERROR", 210, outputFC) raise SystemExit() #### Create k-Nearest Neighbor Search Type #### gaSearch = GAPY.ga_nsearch(gaTable) gaSearch.init_nearest(inDistance, 0, "euclidean") #### Add Count Field #### countFieldNameOut = ARCPY.ValidateFieldName(countFieldName, outPath) timeFieldNameOut = ARCPY.ValidateFieldName(timeFieldName, outPath) UTILS.addEmptyField(outputFC, countFieldNameOut, "LONG") UTILS.addEmptyField(outputFC, timeFieldNameOut, "DATE") fieldList = ["SHAPE@", countFieldNameOut, timeFieldNameOut] #### Set Insert Cursor #### rowsOut = DA.InsertCursor(outputFC, fieldList) #### Detect S-T K-NN by Space and Time Bin #### duplicateList = [] for record in fid: kNNList = [record] if record not in duplicateList: for pair in fid: if pair != record : gaSearch.search_by_idx(record) for nh in gaSearch: if timeBin[record] == timeBin[pair]: kNNList.append(nh.idx) duplicateList.append(nh.idx) #### Create and Populate New Feature #### kNNList = list(set(kNNList)) count = len(kNNList) dt = time[record] x0 = ssdo.xyCoords[kNNList, 0].mean() y0 = ssdo.xyCoords[kNNList, 1].mean() pnt =(x0, y0, ssdo.defaultZ) rowResult = [pnt, count, dt] rowsOut.insertRow(rowResult) ARCPY.SetProgressorPosition() #### Clean Up #### del rowsOut, timeBin, kNNList, duplicateList return countFieldNameOut