def construct(self): """Constructs the neighborhood structure for each feature and dispatches the appropriate values for the calculation of the statistic.""" #### Shorthand Attributes #### ssdo = self.ssdo varName = self.varName concept = self.concept gaConcept = concept.lower() threshold = self.threshold exponent = self.exponent wType = self.wType rowStandard = self.rowStandard numObs = self.numObs master2Order = self.master2Order masterField = ssdo.masterField weightsFile = self.weightsFile #### Assure that Variance is Larger than Zero #### yVar = NUM.var(self.y) if NUM.isnan(yVar) or yVar <= 0.0: ARCPY.AddIDMessage("Error", 906) raise SystemExit() #### Create Deviation Variables #### self.yBar = NUM.mean(self.y) self.yDev = self.y - self.yBar self.nm1 = numObs - 1. self.nm2 = numObs - 2. self.nm12 = self.nm1 * self.nm2 yDev2 = self.yDev**2.0 yDev2Norm = yDev2 / self.nm1 self.yDev2NormSum = sum(yDev2Norm) yDev4 = self.yDev**4.0 yDev4Norm = yDev4 / self.nm1 yDev4NormSum = sum(yDev4Norm) self.b2i = yDev4NormSum / (self.yDev2NormSum**2.0) #### Create Base Data Structures/Variables #### self.li = NUM.zeros(numObs) self.ei = NUM.zeros(numObs) self.vi = NUM.zeros(numObs) self.zi = NUM.zeros(numObs) self.pVals = NUM.ones(numObs) if self.permutations: self.pseudoPVals = NUM.ones(numObs) self.moranInfo = {} #### Keep Track of Features with No Neighbors #### self.idsNoNeighs = [] #### Set Neighborhood Structure Type #### if self.weightsFile: if self.swmFileBool: #### Open Spatial Weights and Obtain Chars #### swm = WU.SWMReader(weightsFile) N = swm.numObs rowStandard = swm.rowStandard self.swm = swm #### Check to Assure Complete Set of Weights #### if numObs > N: ARCPY.AddIDMessage("Error", 842, numObs, N) raise SystemExit() #### Check if Selection Set #### isSubSet = False if numObs < N: isSubSet = True iterVals = xrange(N) else: #### Warning for GWT with Bad Records/Selection #### if ssdo.selectionSet or ssdo.badRecords: ARCPY.AddIDMessage("WARNING", 1029) #### Build Weights Dictionary #### weightDict = WU.buildTextWeightDict(weightsFile, master2Order) iterVals = master2Order.keys() N = numObs elif wType in [4, 5]: #### Polygon Contiguity #### if wType == 4: contiguityType = "ROOK" else: contiguityType = "QUEEN" contDict = WU.polygonNeighborDict(ssdo.inputFC, ssdo.oidName, contiguityType=contiguityType) iterVals = master2Order.keys() N = numObs else: gaTable = ssdo.gaTable gaSearch = GAPY.ga_nsearch(gaTable) if wType == 7: #### Zone of Indiff, All Related to All #### gaSearch.init_nearest(threshold, numObs, gaConcept) else: #### Inverse and Fixed Distances #### gaSearch.init_nearest(threshold, 0, gaConcept) iterVals = range(numObs) N = numObs neighWeights = ARC._ss.NeighborWeights(gaTable, gaSearch, weight_type=wType, exponent=exponent, row_standard=rowStandard) #### Create Progressor #### msg = ARCPY.GetIDMessage(84007) if self.permutations: msg += ": Using Permutations = %i" % self.permutations ARCPY.SetProgressor("step", msg, 0, N, 1) #### Create Neighbor Info Class #### ni = WU.NeighborInfo(masterField) #### Calculation For Each Feature #### for i in iterVals: if self.swmFileBool: #### Using SWM File #### info = swm.swm.readEntry() masterID = info[0] if master2Order.has_key(masterID): rowInfo = WU.getWeightsValuesSWM(info, master2Order, self.yDev, rowStandard=rowStandard, isSubSet=isSubSet) includeIt = True else: includeIt = False elif self.weightsFile and not self.swmFileBool: #### Text Weights #### masterID = i includeIt = True rowInfo = WU.getWeightsValuesText(masterID, master2Order, weightDict, self.yDev) elif wType in [4, 5]: #### Polygon Contiguity #### masterID = i includeIt = True rowInfo = WU.getWeightsValuesCont(masterID, master2Order, contDict, self.yDev, rowStandard=rowStandard) else: #### Distance Based #### masterID = gaTable[i][0] includeIt = True rowInfo = WU.getWeightsValuesOTF(neighWeights, i, self.yDev) #### Subset Boolean for SWM File #### if includeIt: #### Parse Row Info #### orderID, yiDev, nhIDs, nhVals, weights = rowInfo #### Assure Neighbors Exist After Selection #### nn, nhIDs, nhVals, weights = ni.processInfo( masterID, nhIDs, nhVals, weights) if nn: #### Calculate Local I #### self.calculateLI(orderID, yiDev, nhVals, weights) ARCPY.SetProgressorPosition() #### Clean Up #### if self.swmFileBool: swm.close() #### Report on Features with No Neighbors #### ni.reportNoNeighbors() self.setNullValues(ni.idsNoNeighs) #### Report on Features with Large Number of Neighbors #### ni.reportWarnings() ni.reportMaximums() self.neighInfo = ni #### Set p-values for Gi Bins #### if self.permutations: #### Use Pseudo p-values #### pv = self.pseudoPVals else: #### Use Traditional p-values #### pv = self.pVals #### Calculate FDR and Moran Bins #### toolMSG = ARCPY.GetIDMessage(84474) if self.applyFDR: #### Set Bins Using FDR #### msg = ARCPY.GetIDMessage(84472).format(toolMSG) ARCPY.SetProgressor("default", msg) fdrBins = STATS.fdrTransform(pv, self.li) self.moranBins = STATS.moranBinFromPVals(pv, self.moranInfo, fdrBins=fdrBins) else: msg = ARCPY.GetIDMessage(84473).format(toolMSG) ARCPY.SetProgressor("default", msg) self.moranBins = STATS.moranBinFromPVals(pv, self.moranInfo)
import arcpy import sixbynine import time msg1 = arcpy.GetIDMessage("dt1") msg2 = arcpy.GetIDMessage("dt2") msg3 = arcpy.GetIDMessage("dt3") arcpy.AddWarning(arcpy.GetIDMessage("dt1") + arcpy.GetIDMessage("dt2")) arcpy.SetProgressor("step", "", 0, 100, 1) prog_msg = arcpy.GetIDMessage("dt3") + arcpy.GetIDMessage("dt2") for i in range(1, 10): arcpy.SetProgressorLabel(f"{i} {prog_msg}") time.sleep(1) arcpy.SetParameter(0, sixbynine.compute())
def report(self): """Reports the results from exploratory regression analysis.""" #### Set Title #### title = self.label #### Column Labels #### labs = [ ARCPY.GetIDMessage(84021), ARCPY.GetIDMessage(84249), ARCPY.GetIDMessage(84042), ARCPY.GetIDMessage(84036), ARCPY.GetIDMessage(84284), ARCPY.GetIDMessage(84292), ARCPY.GetIDMessage(84286) ] r2Info = [labs] #### Adjusted R2, Sorted Highest to Lowest with ID Tie Breaks #### header = ARCPY.GetIDMessage(84287) numRes = xrange(len(self.bestR2Res)) r2Data = [] for i in numRes: r2Val = self.bestR2Vals[i] idVal = int(self.bestR2Res[i].split(":")[-1]) r2Data.append((r2Val, idVal)) r2Data = NUM.array(r2Data, dtype=[('r2', float), ('ids', int)]) r2SortedInds = r2Data.argsort(order=('r2', 'ids')) sortIndex = reversed(r2SortedInds) for ind in sortIndex: olsID = self.bestR2Res[ind] olsRes = self.olsResults[olsID] olsOut = olsRes.report(formatStr="%0.2f") r2Info.append(olsOut) r2Report = UTILS.outputTextTable(r2Info, header=header, justify=masterJustify) #### Passing Models #### header = ARCPY.GetIDMessage(84288) passList = [labs] r2Values = [] olsIDs = [] for olsID in self.passBools: olsRes = self.olsResults[olsID] r2Values.append(olsRes.r2) olsIDs.append(olsID) sortIndex = NUM.argsort(r2Values).tolist() sortIndex.reverse() for ind in sortIndex: olsID = olsIDs[ind] olsRes = self.olsResults[olsID] olsOut = olsRes.report(formatStr="%0.6f") passList.append(olsOut) passingReport = UTILS.outputTextTable(passList, header=header) #### Print Report #### starMess = ARCPY.GetIDMessage(84289) * 78 finalReport = [starMess, title, r2Report, passingReport] finalReport = "\n".join(finalReport) finalReport = finalReport + "\n" ARCPY.AddMessage(finalReport) return finalReport
def createCoefficientReport(self): """Creates a formatted summary table of the OLS coefficients.""" #### Table Title #### header = ARCPY.GetIDMessage(84075) aFoot = ARCPY.GetIDMessage(84080) bFoot = ARCPY.GetIDMessage(84086) cFoot = ARCPY.GetIDMessage(84103) coefColLab = ARCPY.GetIDMessage(84049) + " " + aFoot probColLab = ARCPY.GetIDMessage(84055) + " " + bFoot robColLab = ARCPY.GetIDMessage(84102) + " " + bFoot vifColLab = ARCPY.GetIDMessage(84284) + " " + cFoot #### Column Labels #### total = [[ ARCPY.GetIDMessage(84068), coefColLab, ARCPY.GetIDMessage(84051), ARCPY.GetIDMessage(84053), probColLab, ARCPY.GetIDMessage(84097), ARCPY.GetIDMessage(84101), robColLab ]] if self.vif: total[0].append(vifColLab) #### Loop Through Explanatory Variables #### for row in UTILS.ssRange(self.k): #### Variable Name #### rowVals = [self.varLabels[row]] #### Standard Values #### rowVals.append(UTILS.formatValue(self.coef[row, 0])) rowVals.append(UTILS.formatValue(self.seCoef[row])) rowVals.append(UTILS.formatValue(self.tStats[row])) rowVals.append(UTILS.writePVal(self.pVals[row], padNonSig=True)) #### Robust Values #### rowVals.append(UTILS.formatValue(self.seCoefRob[row])) rowVals.append(UTILS.formatValue(self.tStatsRob[row])) rowVals.append(UTILS.writePVal(self.pValsRob[row], padNonSig=True)) #### VIF #### if self.vif: if row == 0: rowVIF = ARCPY.GetIDMessage(84092) else: rowVIF = self.vifVal[(row - 1)] if abs(rowVIF) > 1000: rowVIF = "> 1000.0" else: rowVIF = LOCALE.format("%0.6f", rowVIF) rowVals.append(rowVIF) #### Append Row to Result List #### total.append(rowVals) #### Finalize Coefficient Table #### self.coefTable = UTILS.outputTextTable(total, header=header, pad=1, justify="right") self.coefRaw = total
def createInterpretReport(self): """Creates the interpretation table for OLS.""" #### Generate Interpretation Table ##### header = ARCPY.GetIDMessage(84081) #### Set up Rows in Tables #### decimalSep = UTILS.returnDecimalChar() if decimalSep == ".": pValue = "0.01" VIF = "7.5" else: pValue = "0,01" VIF = "7,5" significance = [ ARCPY.GetIDMessage(84111), ARCPY.GetIDMessage(84082).format(pValue) ] coefficient = [ARCPY.GetIDMessage(84080), ARCPY.GetIDMessage(84349)] probs = [ ARCPY.GetIDMessage(84086), ARCPY.GetIDMessage(84350).format(pValue) ] multicoll = [ ARCPY.GetIDMessage(84103), ARCPY.GetIDMessage(84083).format(VIF) ] rSquared = [ARCPY.GetIDMessage(84104), ARCPY.GetIDMessage(84084)] jointFW = [ ARCPY.GetIDMessage(84105), ARCPY.GetIDMessage(84085).format(pValue) ] bpRow = [ ARCPY.GetIDMessage(84106), ARCPY.GetIDMessage(84087).format(pValue) ] jbRow = [ ARCPY.GetIDMessage(84107), ARCPY.GetIDMessage(84088).format(pValue) ] #### Finalize Interpretation Table #### intTotal = [ significance, coefficient, probs, multicoll, rSquared, jointFW, bpRow, jbRow ] body = UTILS.outputTextTable(intTotal, pad=1, justify=["center", "left"]) self.interpretTable = "\n%s%s" % (header, body) self.interpretRaw = intTotal
def spaceTime2SWM(inputFC, swmFile, masterField, concept = "EUCLIDEAN", threshold = None, rowStandard = True, timeField = None, timeType = None, timeValue = None): """ inputFC (str): path to the input feature class swmFile (str): path to the SWM file. masterField (str): field in table that serves as the mapping. concept: {str, EUCLIDEAN}: EUCLIDEAN or MANHATTAN threshold {float, None}: distance threshold rowStandard {bool, True}: row standardize weights? timeField {str, None}: name of the date-time field timeType {str, None}: ESRI enumeration of date-time intervals timeValue {float, None}: value forward and backward in time """ #### Assure Temporal Parameters are Set #### if not timeField: ARCPY.AddIDMessage("ERROR", 1320) raise SystemExit() if not timeType: ARCPY.AddIDMessage("ERROR", 1321) raise SystemExit() if not timeValue or timeValue <= 0: ARCPY.AddIDMessage("ERROR", 1322) raise SystemExit() #### Create SSDataObject #### ssdo = SSDO.SSDataObject(inputFC, templateFC = inputFC, useChordal = True) cnt = UTILS.getCount(inputFC) ERROR.errorNumberOfObs(cnt, minNumObs = 2) ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84001), 0, cnt, 1) #### Validation of Master Field #### verifyMaster = ERROR.checkField(ssdo.allFields, masterField, types = [0,1]) badIDs = [] #### Create Temporal Hash #### timeInfo = {} xyCoords = NUM.empty((cnt, 2), float) #### Process Field Values #### fieldList = [masterField, "SHAPE@XY", timeField] try: rows = DA.SearchCursor(ssdo.catPath, fieldList, "", ssdo.spatialRefString) except: ARCPY.AddIDMessage("ERROR", 204) raise SystemExit() #### Add Data to GATable and Time Dictionary #### c = 0 for row in rows: badRow = False #### Assure Masterfield is Valid #### masterID = row[0] if masterID == None or masterID == "": badRow = True #### Assure Date/Time is Valid #### timeStamp = row[-1] if timeStamp == None or timeStamp == "": badRow = True #### Assure Centroid is Valid #### badXY = row[1].count(None) if not badXY: x,y = row[1] xyCoords[c] = (x,y) else: badRow = True #### Process Data #### if not badRow: if timeInfo.has_key(masterID): #### Assure Uniqueness #### ARCPY.AddIDMessage("Error", 644, masterField) ARCPY.AddIDMessage("Error", 643) raise SystemExit() else: #### Fill Date/Time Dict #### startDT, endDT = TUTILS.calculateTimeWindow(timeStamp, timeValue, timeType) timeInfo[masterID] = (timeStamp, startDT, endDT) else: badIDs.append(masterID) #### Set Progress #### c += 1 ARCPY.SetProgressorPosition() #### Clean Up #### del rows #### Get Set of Bad IDs #### numBadObs = len(badIDs) badIDs = list(set(badIDs)) badIDs.sort() badIDs = [ str(i) for i in badIDs ] #### Process any bad records encountered #### if numBadObs: ERROR.reportBadRecords(cnt, numBadObs, badIDs, label = masterField) #### Load Neighbor Table #### gaTable, gaInfo = WU.gaTable(ssdo.inputFC, fieldNames = [masterField, timeField], spatRef = ssdo.spatialRefString) numObs = len(gaTable) xyCoords = xyCoords[0:numObs] #### Set the Distance Threshold #### concept, gaConcept = WU.validateDistanceMethod(concept, ssdo.spatialRef) if threshold == None: #### Set Progressor for Search #### ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84144)) #### Create k-Nearest Neighbor Search Type #### gaSearch = GAPY.ga_nsearch(gaTable) gaSearch.init_nearest(0.0, 1, gaConcept) neighDist = ARC._ss.NeighborDistances(gaTable, gaSearch) N = len(neighDist) threshold = 0.0 sumDist = 0.0 #### Find Maximum Nearest Neighbor Distance #### for row in xrange(N): dij = neighDist[row][-1][0] if dij > threshold: threshold = dij sumDist += dij ARCPY.SetProgressorPosition() #### Increase For Rounding Error #### threshold = threshold * 1.0001 avgDist = sumDist / (N * 1.0) #### Add Linear/Angular Units #### thresholdStr = ssdo.distanceInfo.printDistance(threshold) ARCPY.AddIDMessage("Warning", 853, thresholdStr) #### Chordal Default Check #### if ssdo.useChordal: hardMaxExtent = ARC._ss.get_max_gcs_distance(ssdo.spatialRef) if threshold > hardMaxExtent: ARCPY.AddIDMessage("ERROR", 1609) raise SystemExit() #### Clean Up #### del gaSearch #### Create Missing SSDO Info #### extent = UTILS.resetExtent(xyCoords) #### Reset Coordinates for Chordal #### if ssdo.useChordal: sliceInfo = UTILS.SpheroidSlice(extent, ssdo.spatialRef) maxExtent = sliceInfo.maxExtent else: env = UTILS.Envelope(extent) maxExtent = env.maxExtent threshold = checkDistanceThresholdSWM(ssdo, threshold, maxExtent) #### Set Default Progressor for Neigborhood Structure #### ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143)) #### Create Distance Neighbor Search Type #### gaSearch = GAPY.ga_nsearch(gaTable) gaSearch.init_nearest(threshold, 0, gaConcept) neighSearch = ARC._ss.NeighborSearch(gaTable, gaSearch) #### Set Progressor for Weights Writing #### ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84127), 0, numObs, 1) #### Initialize Spatial Weights Matrix File #### swmWriter = WU.SWMWriter(swmFile, masterField, ssdo.spatialRefName, numObs, rowStandard, inputFC = inputFC, wType = 9, distanceMethod = concept, threshold = threshold, timeField = timeField, timeType = timeType, timeValue = timeValue) for row in xrange(numObs): masterID = gaTable[row][2] #### Get Date/Time Info #### dt0, startDT0, endDT0 = timeInfo[masterID] nhs = neighSearch[row] neighs = [] weights = [] for nh in nhs: #### Search Through Spatial Neighbors #### neighID = gaTable[nh][2] #### Get Date/Time Info #### dt1, startDT1, endDT1 = timeInfo[neighID] #### Filter Based on Date/Time #### insideTimeWindow = TUTILS.isTimeNeighbor(startDT0, endDT0, dt1) if insideTimeWindow: neighs.append(neighID) weights.append(1.0) #### Add Spatial Weights Matrix Entry #### swmWriter.swm.writeEntry(masterID, neighs, weights) #### Set Progress #### ARCPY.SetProgressorPosition() swmWriter.close() del gaTable #### Report Warning/Max Neighbors #### swmWriter.reportNeighInfo() #### Report Spatial Weights Summary #### swmWriter.report() #### Report SWM File is Large #### swmWriter.reportLargeSWM()
def table2SWM(inputFC, masterField, swmFile, tableFile, rowStandard = True): """Converts a weigths matrix in table format into SWM format. INPUTS: inputFC (str): path to the input feature class masterField (str): field in table that serves as the mapping. swmFile (str): path to the SWM file. tableFile (str) path to the database table rowStandard {bool, True}: row standardize weights? """ #### Set Default Progressor for Neigborhood Structure #### ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84123)) #### Create SSDataObject #### ssdo = SSDO.SSDataObject(inputFC, templateFC = inputFC) #### Obtain Unique IDs from Input Feature Class #### ssdo.obtainData(masterField, minNumObs = 2) ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84123)) master2Order = ssdo.master2Order allMaster = master2Order.keys() n = ssdo.numObs #### Create Search Cursor for Input Weights Table #### neighFieldName = "NID" weightFieldName = "WEIGHT" fieldList = [masterField, neighFieldName, weightFieldName] try: rows = DA.SearchCursor(tableFile, fieldList) except: ARCPY.AddIDMessage("Error", 722) raise SystemExit() #### Initialize Spatial Weights Matrix File #### swmWriter = WU.SWMWriter(swmFile, masterField, ssdo.spatialRefName, n, rowStandard, inputFC = inputFC, wType = 8, inputTable = tableFile) #### Set Progressor for SWM Reading/Writing #### c = 0 cnt = UTILS.getCount(tableFile) ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84123), 0, cnt, 1) lastID = "NULL" neighs = [] weights = [] #### Process Spatial Weights #### for row in rows: masterID = row[0] if master2Order.has_key(masterID): neighID = row[1] weight = row[2] if masterID == lastID: #### Append to Current Record #### try: testNeigh = master2Order[neighID] neighs.append(neighID) weights.append(weight) except: #### NID Does Not Exist / Not In Selection #### pass #### Set Progress #### ARCPY.SetProgressorPosition() else: #### Create New Record if not NULL #### if lastID != "NULL": allMaster.remove(lastID) swmWriter.swm.writeEntry(lastID, neighs, weights) #### Reset and Initialize Containers #### neighs = [neighID] weights = [weight] else: #### Create First Record #### try: testNeigh = master2Order[neighID] neighs.append(neighID) weights.append(weight) except: #### NID Does Not Exist / Not In Selection #### pass lastID = masterID #### Set Progress #### ARCPY.SetProgressorPosition() else: #### Unique Id Does Not Exist / Not In Selection #### ARCPY.SetProgressorPosition() #### Write Last Record #### swmWriter.swm.writeEntry(lastID, neighs, weights) try: allMaster.remove(lastID) except: pass #### Set Progress #### ARCPY.SetProgressorPosition() #### Write No Neighbor Features #### for masterID in allMaster: swmWriter.swm.writeEntry(masterID, [], []) #### Report Warning/Max Neighbors #### swmWriter.reportNeighInfo() #### Report Spatial Weights Summary #### swmWriter.report() #### Report SWM File is Large #### swmWriter.reportLargeSWM() #### Clean Up #### swmWriter.close() del rows
def createAnalysisSSDO(self, tempFC, varName): self.varName = varName self.analysisSSDO = SSDO.SSDataObject( tempFC, explicitSpatialRef=self.ssdo.spatialRef, useChordal=True) self.masterField = UTILS.setUniqueIDField(self.analysisSSDO) self.analysisSSDO.obtainDataGA(self.masterField, [self.varName]) if self.aggType == 2: #### Verify Enough Polygons #### self.checkPolygons(self.analysisSSDO.numObs) #### Locational Outliers #### lo = UTILS.LocationInfo(self.analysisSSDO, concept="EUCLIDEAN", silentThreshold=True, stdDeviations=3) printOHSLocationalOutliers(lo, aggType=self.aggType) #### Agg Header #### printOHSSection(84444) #### Do Spatial Join #### msg = ARCPY.GetIDMessage(84458) printOHSSubject(84458, addNewLine=False) msg = ARCPY.GetIDMessage(84489) printOHSAnswer(msg) #### Analyze Incident Subject #### msgID = aggHeaders[self.aggType] msg = ARCPY.GetIDMessage(msgID) ARCPY.SetProgressor("default", msg) printOHSSubject(msgID, addNewLine=False) #### Errors and Warnings #### y = self.analysisSSDO.fields[self.varName].returnDouble() yVar = NUM.var(y) if self.analysisSSDO.numObs < 30: #### Too Few Aggregated Features #### if self.boundaryFC: ARCPY.AddIDMessage("ERROR", 1573) else: ARCPY.AddIDMessage("ERROR", 1572) self.cleanUp() raise SystemExit() #### Zero Variance #### if NUM.isnan(yVar) or yVar <= 0.0: if self.aggType == 2: ARCPY.AddIDMessage("ERROR", 1534) self.cleanUp() raise SystemExit() else: ARCPY.AddIDMessage("ERROR", 1533) self.cleanUp() raise SystemExit() #### Count Description #### if self.aggType: msgID = 84490 else: msgID = 84447 msg = ARCPY.GetIDMessage(msgID).format(len(y)) printOHSAnswer(msg, addNewLine=False) varNameCounts = ARCPY.GetIDMessage(84488) msg = ARCPY.GetIDMessage(84446).format(varNameCounts) printOHSAnswer(msg, addNewLine=False) printWeightAnswer(y)
def doHotSpots(self): #### Scale Header #### printOHSSection(84459) #### Scale Subject #### msg = ARCPY.GetIDMessage(84460) ARCPY.SetProgressor("default", msg) printOHSSubject(84460, addNewLine=False) #### Run Incremental Spatial AutoCorrelation #### self.templateDir = OS.path.dirname(OS.path.dirname(SYS.argv[0])) mi = MI.GlobalI_Step(self.analysisSSDO, self.varName, includeCoincident=False, stdDeviations=3, silent=True, stopMax=500) #### Set Distance or KNN #### peakFound = False if mi.completed: if mi.firstPeakDistance: distanceBand = mi.firstPeakDistance distanceStr = self.ssdo.distanceInfo.printDistance( distanceBand) peakInd = mi.firstPeakInd msg = ARCPY.GetIDMessage(84461).format(distanceStr) printOHSAnswer(msg) numNeighs = 0 wType = 1 peakFound = True elif mi.maxPeakDistance: distanceBand = mi.maxPeakDistance distanceStr = self.ssdo.distanceInfo.printDistance( distanceBand) peakInd = mi.maxPeakInd msg = ARCPY.GetIDMessage(84461).format(distanceStr) printOHSAnswer(msg) numNeighs = 0 wType = 1 peakFound = True if not peakFound: #### Use KNN If No Peak OR More than 500 Neighs #### msg = ARCPY.GetIDMessage(84462) printOHSAnswer(msg) distanceBand = knnDecision(self.analysisSSDO) distanceStr = self.ssdo.distanceInfo.printDistance(distanceBand) wType = 1 numNeighs = 0 self.distanceBand = distanceBand self.distanceStr = distanceStr #### Run Local Gi* #### msg = ARCPY.GetIDMessage(84466) ARCPY.SetProgressor("default", msg) #### Hot Spot Header #### printOHSSection(84466) #### Subject w/ Value - Use AddMessage Explicitly #### varMSG = ARCPY.GetIDMessage(84467).format(self.varString) ARCPY.AddMessage(varMSG) #### Run Analysis #### gi = GISTAR.LocalG(self.analysisSSDO, self.varName, self.outputFC, wType, threshold=distanceBand, numNeighs=numNeighs, permutations=numPerms, applyFDR=True) #### FDR Significance #### numSig = (gi.giBins != 0).sum() msg = ARCPY.GetIDMessage(84470).format(numSig) printOHSAnswer(msg) #### Wrap Up Header #### printOHSSection(84471) #### Subject w/ Value - Use AddMessage Explicitly #### outMSG = ARCPY.GetIDMessage(84475).format(self.outputFC) ARCPY.AddMessage(outMSG) giField, pvField = gi.outputResults() hotMSG = ARCPY.GetIDMessage(84476).format(self.varString) printOHSAnswer(hotMSG, addNewLine=False) coldMSG = ARCPY.GetIDMessage(84477).format(self.varString) printOHSAnswer(coldMSG) #### Set the Default Symbology #### self.params = ARCPY.gp.GetParameterInfo() try: renderType = UTILS.renderType[self.analysisSSDO.shapeType.upper()] renderLayerFile = GISTAR.giRenderDict[renderType] fullRLF = OS.path.join(self.templateDir, "Templates", "Layers", renderLayerFile) self.params[1].Symbology = fullRLF except: ARCPY.AddIDMessage("WARNING", 973)
def doIntegrate(self): #### Initial Data Assessment #### printOHSSection(84428, prependNewLine=True) printOHSSubject(84431, addNewLine=False) #### Find Unique Locations #### msg = ARCPY.GetIDMessage(84441) ARCPY.SetProgressor("default", msg) initCount = UTILS.getCount(self.ssdo.inputFC) self.checkIncidents(initCount) collectedPointFC = UTILS.returnScratchName("Collect_InitTempFC") collInfo = EVENTS.collectEvents(self.ssdo, collectedPointFC) self.cleanUpList.append(collectedPointFC) collSSDO = SSDO.SSDataObject(collectedPointFC, explicitSpatialRef=self.ssdo.spatialRef, useChordal=True) collSSDO.obtainDataGA(collSSDO.oidName) ################################# #### Locational Outliers #### lo = UTILS.LocationInfo(collSSDO, concept="EUCLIDEAN", silentThreshold=True, stdDeviations=3) printOHSLocationalOutliers(lo, aggType=self.aggType) #### Raster Boundary #### if self.outputRaster: self.validateRaster(collSSDO.xyCoords) #### Agg Header #### printOHSSection(84444) #### Copy Features for Integrate #### msg = ARCPY.GetIDMessage(84443) ARCPY.SetProgressor("default", msg) intFC = UTILS.returnScratchName("Integrated_TempFC") self.cleanUpList.append(intFC) DM.CopyFeatures(self.ssdo.inputFC, intFC) #### Make Feature Layer To Avoid Integrate Bug with Spaces #### mfc = "Integrate_MFC_2" DM.MakeFeatureLayer(intFC, mfc) self.cleanUpList.append(mfc) #### Snap Subject #### printOHSSubject(84442, addNewLine=False) nScale = (collSSDO.numObs * 1.0) / self.cnt if lo.nonZeroAvgDist < lo.nonZeroMedDist: useDist = lo.nonZeroAvgDist * nScale useType = "average" else: useDist = lo.nonZeroMedDist * nScale useType = "median" distance2Integrate = lo.distances[lo.distances < useDist] distance2Integrate = NUM.sort(distance2Integrate) numDists = len(distance2Integrate) #### Max Snap Answer #### msg = ARCPY.GetIDMessage(84445) useDistStr = self.ssdo.distanceInfo.printDistance(useDist) msg = msg.format(useDistStr) printOHSAnswer(msg) percs = [10, 25, 100] indices = [int(numDists * (i * .01)) for i in percs] if indices[-1] >= numDists: indices[-1] = -1 ARCPY.SetProgressor("default", msg) for pInd, dInd in enumerate(indices): dist = distance2Integrate[dInd] snap = self.ssdo.distanceInfo.linearUnitString(dist, convert=True) DM.Integrate(mfc, snap) del collSSDO #### Run Collect Events #### collectedFC = UTILS.returnScratchName("Collect_TempFC") self.cleanUpList.append(collectedFC) intSSDO = SSDO.SSDataObject(intFC, explicitSpatialRef=self.ssdo.spatialRef, silentWarnings=True, useChordal=True) intSSDO.obtainDataGA(intSSDO.oidName) EVENTS.collectEvents(intSSDO, collectedFC) descTemp = ARCPY.Describe(collectedFC) oidName = descTemp.oidFieldName #### Delete Integrated FC #### del intSSDO #### Set VarName, MasterField, AnalysisSSDO #### self.createAnalysisSSDO(collectedFC, "ICOUNT")
def doFishnet(self): #### Initial Data Assessment #### printOHSSection(84428, prependNewLine=True) printOHSSubject(84431, addNewLine=False) #### Find Unique Locations #### msg = ARCPY.GetIDMessage(84441) ARCPY.SetProgressor("default", msg) initCount = UTILS.getCount(self.ssdo.inputFC) self.checkIncidents(initCount) collectedPointFC = UTILS.returnScratchName("Collect_InitTempFC") collInfo = EVENTS.collectEvents(self.ssdo, collectedPointFC) self.cleanUpList.append(collectedPointFC) collSSDO = SSDO.SSDataObject(collectedPointFC, explicitSpatialRef=self.ssdo.spatialRef, useChordal=True) collSSDO.obtainDataGA(collSSDO.oidName) ################################# if self.boundaryFC: #### Assure Boundary FC Has Area and Obtain Chars #### self.checkBoundary() #### Location Outliers #### lo = UTILS.LocationInfo(collSSDO, concept="EUCLIDEAN", silentThreshold=True, stdDeviations=3) printOHSLocationalOutliers(lo, aggType=self.aggType) #### Agg Header #### printOHSSection(84444) if self.boundaryFC: extent = self.boundExtent forMercExtent = self.boundExtent countMSGNumber = 84453 else: countMSGNumber = 84452 extent = None forMercExtent = collSSDO.extent if collSSDO.useChordal: extentFC_GCS = UTILS.returnScratchName("TempGCS_Extent") extentFC_Merc = UTILS.returnScratchName("TempMercator_Extent") points = NUM.array([[forMercExtent.XMin, forMercExtent.YMax], [forMercExtent.XMax, forMercExtent.YMin]]) UTILS.createPointFC(extentFC_GCS, points, spatialRef=collSSDO.spatialRef) DM.Project(extentFC_GCS, extentFC_Merc, mercatorProjection) d = ARCPY.Describe(extentFC_Merc) extent = d.extent fishOutputCoords = mercatorProjection else: fishOutputCoords = self.ssdo.spatialRef #### Fish Subject #### printOHSSubject(84449, addNewLine=False) dist = scaleDecision(lo.nonZeroAvgDist, lo.nonZeroMedDist) area = 0.0 #### Construct Fishnet #### fish = UTILS.FishnetInfo(collSSDO, area, extent, explicitCellSize=dist) dist = fish.quadLength snap = self.ssdo.distanceInfo.linearUnitString(dist) #### Cell Size Answer #### snapStr = self.ssdo.distanceInfo.printDistance(dist) msg = ARCPY.GetIDMessage(84450).format(snapStr) printOHSAnswer(msg) self.fish = fish #### Fishnet Count Subject #### printOHSSubject(84451, addNewLine=False) #### Create Temp Fishnet Grid #### gridFC = UTILS.returnScratchName("Fishnet_TempFC") self.cleanUpList.append(gridFC) #### Apply Output Coords to Create Fishnet #### oldSpatRef = ARCPY.env.outputCoordinateSystem ARCPY.env.outputCoordinateSystem = fishOutputCoords #### Fish No Extent #### oldExtent = ARCPY.env.extent ARCPY.env.extent = "" #### Apply Max XY Tolerance #### fishWithXY = UTILS.funWithXYTolerance(DM.CreateFishnet, self.ssdo.distanceInfo) #### Execute Fishnet #### fishWithXY(gridFC, self.fish.origin, self.fish.rotate, self.fish.quadLength, self.fish.quadLength, self.fish.numRows, self.fish.numCols, self.fish.corner, "NO_LABELS", self.fish.extent, "POLYGON") #### Project Back to GCS if Use Chordal #### if collSSDO.useChordal: gridFC_ProjBack = UTILS.returnScratchName("TempFC_Proj") DM.Project(gridFC, gridFC_ProjBack, collSSDO.spatialRef) UTILS.passiveDelete(gridFC) gridFC = gridFC_ProjBack #### Set Env Output Coords Back #### ARCPY.env.outputCoordinateSystem = oldSpatRef #### Create Empty Field Mappings to Ignore Atts #### fieldMap = ARCPY.FieldMappings() fieldMap.addTable(self.ssdo.inputFC) fieldMap.removeAll() #### Fishnet Count Answer #### printOHSAnswer(ARCPY.GetIDMessage(countMSGNumber)) #### Create Weighted Fishnet Grid #### tempFC = UTILS.returnScratchName("Optimized_TempFC") self.cleanUpList.append(tempFC) joinWithXY = UTILS.funWithXYTolerance(ANA.SpatialJoin, self.ssdo.distanceInfo) joinWithXY(gridFC, self.ssdo.inputFC, tempFC, "JOIN_ONE_TO_ONE", "KEEP_ALL", "EMPTY") #### Clean Up Temp FCs #### UTILS.passiveDelete(gridFC) #### Remove Locations Outside Boundary FC #### featureLayer = "ClippedPointFC" DM.MakeFeatureLayer(tempFC, featureLayer) if self.boundaryFC: msg = ARCPY.GetIDMessage(84454) ARCPY.SetProgressor("default", msg) DM.SelectLayerByLocation(featureLayer, "INTERSECT", self.boundaryFC, "#", "NEW_SELECTION") DM.SelectLayerByLocation(featureLayer, "INTERSECT", "#", "#", "SWITCH_SELECTION") DM.DeleteFeatures(featureLayer) else: if additionalZeroDistScale == "ALL": msg = ARCPY.GetIDMessage(84455) ARCPY.SetProgressor("default", msg) DM.SelectLayerByAttribute(featureLayer, "NEW_SELECTION", '"Join_Count" = 0') DM.DeleteFeatures(featureLayer) else: distance = additionalZeroDistScale * fish.quadLength distanceStr = self.ssdo.distanceInfo.linearUnitString( distance, convert=True) nativeStr = self.ssdo.distanceInfo.printDistance(distance) msg = "Removing cells further than %s from input pointsd...." ARCPY.AddMessage(msg % nativeStr) DM.SelectLayerByLocation(featureLayer, "INTERSECT", self.ssdo.inputFC, distanceStr, "NEW_SELECTION") DM.SelectLayerByLocation(featureLayer, "INTERSECT", "#", "#", "SWITCH_SELECTION") DM.DeleteFeatures(featureLayer) DM.Delete(featureLayer) del collSSDO ARCPY.env.extent = oldExtent self.createAnalysisSSDO(tempFC, "JOIN_COUNT")
def network2SWM(inputFC, masterField, swmFile, inputNetwork, impedance, cutoff = "#", numberOfNeighs = "#", inputBarrier = "#", uturnPolicy = "ALLOW_UTURNS", restrictions = "#", hierarchy = 'NO_HIERARCHY', searchTolerance = "#", fixed = 0, exponent = 1.0, rowStandard = True): """Creates spatial weights in SWM format from a combination of network data and feature classes. INPUTS: inputFC (str): path to the input feature class masterField (str): field in table that serves as the mapping swmFile (str): path to the SWM file inputNetwork (str): path to the network dataset (*.nd) impedance (str): attribute from network dataset (1) cutoff {float, "#"}: impedance threshold numberOfNeighs {int, "#"}: number of neighbors to return inputBarrier {str, "#"}: path to the input barrier feature class uturnPolicy {str, ALLOW_UTURNS}: uturn policy (2) restrictions {str, "#"}: attribute from network dataset (3) hierarchy {str, NO_HIERARCHY}: NO_HIERARCHY or USE_HIERARCHY searchTolerance {linear measure, "#"}: snap tolerance for network (4) fixed {int, 0}: Invert impedance as weight or return a weight = 1? exponent {float, 1.0}: distance decay rowStandard {bool, True}: row standardize weights? NOTES: (1) E.g. MINUTES and METERS (2) E.g. ALLOW_UTURNS or NO_UTURNS (3) E.g. ONEWAY (4) E.g. 5000 METERS """ #### Check out Network Analyst #### try: ARCPY.CheckOutExtension("Network") except: ARCPY.AddIDMessage("ERROR", 849) raise SystemExit() #### OD Matrix and Layers #### ODCostMatrix = "ODMatrix" BarriersLayerNames = {"POINT": 'Barriers', "POLYLINE" : 'PolylineBarriers', "LINE" : 'PolylineBarriers', "POLYGON" : 'PolygonBarriers'} lines = ODCostMatrix + "\\Lines" destFCLayer = "NetSWM_Dest" ##### Delete Layers If They Exist #### cleanupNetLayer(ODCostMatrix) cleanupNetLayer(destFCLayer) cleanupNetLayer(lines) #### Get Master Field From inputFC #### ssdo = SSDO.SSDataObject(inputFC, useChordal = False) ssdo.obtainDataGA(masterField, minNumObs = 2) master2Order = ssdo.master2Order masterFieldObj = ssdo.allFields[masterField.upper()] allMaster = master2Order.keys() numObs = ssdo.numObs numPossNeighs = numObs - 1 #### Get Spatial Ref From Net Data Set #### netDesc = ARCPY.Describe(inputNetwork) netSpatialRef = netDesc.SpatialReference netSpatName = netSpatialRef.Name #### Set Maximum Neighbor Argument #### if numberOfNeighs == "#": numberOfNeighs = min( [numPossNeighs, 30] ) ARCPY.AddIDMessage("WARNING", 1012, numberOfNeighs) if numberOfNeighs >= numObs: numberOfNeighs = numPossNeighs ARCPY.AddIDMessage("WARNING", 1013, numberOfNeighs) if numberOfNeighs == 0: numberOfNeighs = numPossNeighs #### All Features are Related. Force Inverse Impedance #### if (numObs - numberOfNeighs) <= 1: if fixed: ARCPY.AddIDMessage("WARNING", 974) fixed = 0 #### Add Self Neighbor For OD Solve #### numberOfNeighsOD = numberOfNeighs + 1 #### Make OD Cost Matrix Layer #### ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84132)) odCostMatrixLayer = NET.MakeODCostMatrixLayer(inputNetwork, ODCostMatrix, impedance, cutoff, numberOfNeighsOD, "#", uturnPolicy, restrictions, hierarchy, "#", "NO_LINES").getOutput(0) #### OD Matrix and Layers #### naClassNames = NET.GetNAClassNames(odCostMatrixLayer) destinationLayer = ODCostMatrix + OS.sep + naClassNames["Destinations"] originLayer = ODCostMatrix + OS.sep + naClassNames["Origins"] lines = ODCostMatrix + OS.sep + naClassNames["ODLines"] #### Add Barriers #### if inputBarrier != "" and inputBarrier != "#": ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84147)) barDesc = ARCPY.Describe(inputBarrier) barShapeType = barDesc.ShapeType.upper() if barShapeType in BarriersLayerNames: barString = naClassNames[BarriersLayerNames[barShapeType]] NET.AddLocations(ODCostMatrix, barString, inputBarrier, "", searchTolerance) #### Add Master Field to OD for Selection #### masterType = UTILS.convertType[masterFieldObj.type] NET.AddFieldToAnalysisLayer(ODCostMatrix, naClassNames["Destinations"], masterField, masterType) #### Add Destinations #### ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84133)) masterToken = "Name " + masterField + " #;" masterToken += masterField + " " + masterField + " #" NET.AddLocations(ODCostMatrix, naClassNames["Destinations"], inputFC, masterToken, searchTolerance, exclude_restricted_elements = "EXCLUDE") #### Initialize Spatial Weights Matrix File #### hierarchyBool = hierarchy == 'USE_HIERARCHY' addConcept = WU.wTypeDispatch[fixed].split("_")[0] forceFixed = (fixed == True) swmWriter = WU.SWMWriter(swmFile, masterField, netSpatName, numObs, rowStandard, inputFC = inputFC, wType = 10, inputNet = inputNetwork, impedanceField = impedance, barrierFC = inputBarrier, uturnPolicy = uturnPolicy, restrictions = restrictions, useHierarchy = hierarchyBool, searchTolerance = searchTolerance, addConcept = addConcept, exponent = exponent, forceFixed = forceFixed) #### Create FieldList for Subset Searching #### totalImpedance = "Total_" + impedance fieldList = ";".join( ["NAME", totalImpedance] ) #### Get Chunks if Necessary #### numOrigins = int(10000000. / numObs) allMaster.sort() chunkedIDs = UTILS.chunk(allMaster, numOrigins) sqlStrings = UTILS.sqlChunkStrings(inputFC, masterField, chunkedIDs) numChunks = len(sqlStrings) #### Create Field Map for Origins #### masterToken = "Name " + masterField + " #" orgFieldMap = [masterToken, 'CurbApproach CurbApproach 0', 'SourceID SourceID #', 'SourceOID SourceOID #', 'PosAlong PosAlong #', 'SideOfEdge SideOfEdge #'] orgFieldMap = ";".join(orgFieldMap) #### Keep Track of Features That Snap to Network #### snappedFeatures = set([]) for chunkNum in xrange(numChunks): progMsg = ARCPY.GetIDMessage(84145).format(chunkNum + 1, numChunks) ARCPY.SetProgressor("default", progMsg) #### Make Origins from Chunk of Destinations #### sqlValue = sqlStrings[chunkNum] DM.MakeFeatureLayer(destinationLayer, destFCLayer, sqlValue) NET.AddLocations(ODCostMatrix, naClassNames["Origins"], destFCLayer, orgFieldMap, "#", "#", "#", "#", "CLEAR") #### Solve OD Matrix and Select Data #### NET.Solve(ODCostMatrix, "SKIP") #### Count the Number of NonZero Spatial Linkages #### numLinks = UTILS.getCount(lines) #### Create Search Cursor for OD Line Info #### rows = ARCPY.SearchCursor(lines, "", None, fieldList) row = rows.next() #### Set Tool Progressor and Process Information #### ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84127), 0, numLinks, 1) #### Process First Record #### ODInfo = row.getValue("NAME") lastID, neighID = [ int(i) for i in ODInfo.split(" - ") ] impValue = row.getValue(totalImpedance) weight = WU.distance2Weight(impValue, wType = fixed, exponent = exponent) neighs = [] weights = [] if lastID != neighID: neighs.append(neighID) weights.append(weight) #### Process Remaining Records #### progMsg = ARCPY.GetIDMessage(84146).format(chunkNum + 1, numChunks) ARCPY.SetProgressor("step", progMsg, 0, numLinks, 1) while row: #### Get Origin and Destination Unique IDs #### ODInfo = row.getValue("NAME") masterID, neighID = [ int(i) for i in ODInfo.split(" - ") ] #### Obtain Impedance and Create Weight #### impValue = row.getValue(totalImpedance) weight = WU.distance2Weight(impValue, wType = fixed, exponent = exponent) #### Check Whether it is the Same ID #### if masterID == lastID: if masterID != neighID: neighs.append(neighID) weights.append(weight) else: #### New ID, Add Last ID Result to SWM File #### swmWriter.swm.writeEntry(lastID, neighs, weights) snappedFeatures.add(lastID) #### Reset and Initialize Containers #### neighs = [] weights = [] if masterID != neighID: neighs.append(neighID) weights.append(weight) lastID = masterID ARCPY.SetProgressorPosition() row = rows.next() #### Write Last ID Result #### swmWriter.swm.writeEntry(lastID, neighs, weights) snappedFeatures.add(lastID) #### Clean Up #### del rows ##### Delete Layers If They Exist #### cleanupNetLayer(ODCostMatrix) cleanupNetLayer(destFCLayer) cleanupNetLayer(lines) #### Add Empty SWM Entries for Features Not Snapped to Network #### notSnapped = snappedFeatures.symmetric_difference(allMaster) for masterID in notSnapped: swmWriter.swm.writeEntry(masterID, [], []) #### Report Warning/Max Neighbors #### swmWriter.reportNeighInfo() #### Clean Up #### swmWriter.close() #### Report Spatial Weights Summary #### swmWriter.report() #### Report SWM File is Large #### swmWriter.reportLargeSWM()
def reportHTML(self, htmlFile=None): """Generates a graphical html report for Moran's I.""" #### Shorthand Attributes #### zi = self.zi #### Progress and Create HTML File Name #### writeMSG = ARCPY.GetIDMessage(84228) ARCPY.SetProgressor("default", writeMSG) ARCPY.AddMessage(writeMSG) if not htmlFile: prefix = ARCPY.GetIDMessage(84227) outputDir = UTILS.returnScratchWorkSpace() baseDir = UTILS.getBaseFolder(outputDir) htmlFile = UTILS.returnScratchName(prefix, fileType="TEXT", scratchWS=baseDir, extension="html") #### Obtain Correct Images #### imageDir = UTILS.getImageDir() clustStr = ARCPY.GetIDMessage(84243) dispStr = ARCPY.GetIDMessage(84244) if zi <= -2.58: imageFile = OS.path.join(imageDir, "dispersedValues01.png") info = ("1%", dispStr) imageBox = OS.path.join(imageDir, "dispersedBox01.png") elif (-2.58 < zi <= -1.96): imageFile = OS.path.join(imageDir, "dispersedValues05.png") info = ("5%", dispStr) imageBox = OS.path.join(imageDir, "dispersedBox05.png") elif (-1.96 < zi <= -1.65): imageFile = OS.path.join(imageDir, "dispersedValues10.png") info = ("10%", dispStr) imageBox = OS.path.join(imageDir, "dispersedBox10.png") elif (-1.65 < zi < 1.65): imageFile = OS.path.join(imageDir, "randomValues.png") imageBox = OS.path.join(imageDir, "randomBox.png") elif (1.65 <= zi < 1.96): imageFile = OS.path.join(imageDir, "clusteredValues10.png") info = ("10%", clustStr) imageBox = OS.path.join(imageDir, "clusteredBox10.png") elif (1.96 <= zi < 2.58): imageFile = OS.path.join(imageDir, "clusteredValues05.png") info = ("5%", clustStr) imageBox = OS.path.join(imageDir, "clusteredBox05.png") else: imageFile = OS.path.join(imageDir, "clusteredValues01.png") info = ("1%", clustStr) imageBox = OS.path.join(imageDir, "clusteredBox01.png") #### Footnote #### footStart = ARCPY.GetIDMessage(84230).format(zi) if abs(zi) >= 1.65: footEnd = ARCPY.GetIDMessage(84231) footEnd = footEnd.format(*info) footerText = footStart + footEnd else: footEnd = ARCPY.GetIDMessage(84232) footerText = footStart + footEnd #### Root Element #### title = ARCPY.GetIDMessage(84229) reportElement, reportTree = REPORT.xmlReport(title=title) #### Begin Graphic SubElement #### graphicElement = REPORT.xmlGraphic(reportElement, imageFile, footerText=footerText) #### Floating Table #### rowVals = [[ARCPY.GetIDMessage(84148), self.giString, ""], [ARCPY.GetIDMessage(84151), self.ziString, imageBox], [ARCPY.GetIDMessage(84152), self.pvString, ""]] fTable = REPORT.xmlTable(graphicElement, rowVals, tType="ssFloat") #### Moran Table #### rowVals = [[ARCPY.GetIDMessage(84148), self.giString], [ARCPY.GetIDMessage(84149), self.eiString], [ARCPY.GetIDMessage(84150), self.viString], [ARCPY.GetIDMessage(84151), self.ziString], [ARCPY.GetIDMessage(84152), self.pvString]] mTable = REPORT.xmlTable(reportElement, rowVals, title=ARCPY.GetIDMessage(84160)) #### Dataset Table #### rowVals = [ [UTILS.addColon(ARCPY.GetIDMessage(84233)), self.ssdo.inputFC], [UTILS.addColon(ARCPY.GetIDMessage(84016)), self.varName], [ UTILS.addColon(ARCPY.GetIDMessage(84234)), WU.wTypeDispatch[self.wType] ], [UTILS.addColon(ARCPY.GetIDMessage(84235)), self.concept], [UTILS.addColon(ARCPY.GetIDMessage(84236)), str(self.rowStandard)], [UTILS.addColon(ARCPY.GetIDMessage(84237)), self.thresholdStr], [UTILS.addColon(ARCPY.GetIDMessage(84238)), str(self.weightsFile)], [ UTILS.addColon(ARCPY.GetIDMessage(84418)), str(self.ssdo.selectionSet) ] ] dTable = REPORT.xmlTable(reportElement, rowVals, title=ARCPY.GetIDMessage(84239)) #### Create HTML #### html = REPORT.report2html(reportTree, htmlFile) ARCPY.AddMessage(htmlFile) return htmlFile
def construct(self): """Constructs the neighborhood structure for each feature and dispatches the appropriate values for the calculation of the statistic.""" #### Shorthand Attributes #### ssdo = self.ssdo varName = self.varName concept = self.concept gaConcept = concept.lower() threshold = self.threshold exponent = self.exponent wType = self.wType rowStandard = self.rowStandard numObs = self.numObs master2Order = self.master2Order masterField = ssdo.masterField weightsFile = self.weightsFile #### Assure that Variance is Larger than Zero #### yVar = NUM.var(self.y) if NUM.isnan(yVar) or yVar <= 0.0: ARCPY.AddIDMessage("Error", 906) raise SystemExit() #### Create Deviation Variables #### self.yBar = NUM.mean(self.y) self.yDev = self.y - self.yBar #### Create Base Data Structures/Variables #### self.numer = 0.0 self.denom = NUM.sum(self.yDev**2.0) self.rowSum = NUM.zeros(numObs) self.colSum = NUM.zeros(numObs) self.s0 = 0 self.s1 = 0 self.wij = {} #### Set Neighborhood Structure Type #### if self.weightsFile: if self.swmFileBool: #### Open Spatial Weights and Obtain Chars #### swm = WU.SWMReader(weightsFile) N = swm.numObs rowStandard = swm.rowStandard #### Check to Assure Complete Set of Weights #### if numObs > N: ARCPY.AddIDMessage("Error", 842, numObs, N) raise SystemExit() #### Check if Selection Set #### isSubSet = False if numObs < N: isSubSet = True iterVals = xrange(N) else: #### Warning for GWT with Bad Records/Selection #### if ssdo.selectionSet or ssdo.badRecords: ARCPY.AddIDMessage("WARNING", 1029) #### Build Weights Dictionary #### weightDict = WU.buildTextWeightDict(weightsFile, master2Order) iterVals = master2Order.keys() N = numObs elif wType in [4, 5]: #### Polygon Contiguity #### if wType == 4: contiguityType = "ROOK" else: contiguityType = "QUEEN" contDict = WU.polygonNeighborDict(ssdo.inputFC, ssdo.oidName, contiguityType=contiguityType) iterVals = master2Order.keys() N = numObs else: gaTable = ssdo.gaTable gaSearch = GAPY.ga_nsearch(gaTable) if wType == 7: #### Zone of Indiff, All Related to All #### gaSearch.init_nearest(threshold, numObs, gaConcept) else: #### Inverse and Fixed Distances #### gaSearch.init_nearest(threshold, 0, gaConcept) iterVals = range(numObs) N = numObs neighWeights = ARC._ss.NeighborWeights(gaTable, gaSearch, weight_type=wType, exponent=exponent, row_standard=rowStandard) #### Create Progressor #### ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84007), 0, N, 1) #### Create Neighbor Info Class #### ni = WU.NeighborInfo(masterField) #### Calculation For Each Feature #### for i in iterVals: if self.swmFileBool: #### Using SWM File #### info = swm.swm.readEntry() masterID = info[0] if master2Order.has_key(masterID): rowInfo = WU.getWeightsValuesSWM(info, master2Order, self.yDev, rowStandard=rowStandard, isSubSet=isSubSet) includeIt = True else: includeIt = False elif self.weightsFile and not self.swmFileBool: #### Text Weights #### masterID = i includeIt = True rowInfo = WU.getWeightsValuesText(masterID, master2Order, weightDict, self.yDev) elif wType in [4, 5]: #### Polygon Contiguity #### masterID = i includeIt = True rowInfo = WU.getWeightsValuesCont(masterID, master2Order, contDict, self.yDev, rowStandard=rowStandard) else: #### Distance Based #### masterID = gaTable[i][0] includeIt = True rowInfo = WU.getWeightsValuesOTF(neighWeights, i, self.yDev) #### Subset Boolean for SWM File #### if includeIt: #### Parse Row Info #### orderID, yiDev, nhIDs, nhVals, weights = rowInfo #### Assure Neighbors Exist After Selection #### nn, nhIDs, nhVals, weights = ni.processInfo( masterID, nhIDs, nhVals, weights) if nn: #### Process Feature Contribution to Moran's I #### self.processRow(orderID, yiDev, nhIDs, nhVals, weights) #### Reset Progessor #### ARCPY.SetProgressorPosition() #### Clean Up #### if self.swmFileBool: swm.close() #### Report on Features with No Neighbors #### ni.reportNoNeighbors() #### Report on Features with Large Number of Neighbors #### ni.reportWarnings() ni.reportMaximums() self.neighInfo = ni
def kNearest2SWM(inputFC, swmFile, masterField, concept = "EUCLIDEAN", kNeighs = 1, rowStandard = True): """Creates a sparse spatial weights matrix (SWM) based on k-nearest neighbors. INPUTS: inputFC (str): path to the input feature class swmFile (str): path to the SWM file. masterField (str): field in table that serves as the mapping. concept: {str, EUCLIDEAN}: EUCLIDEAN or MANHATTAN kNeighs {int, 1}: number of neighbors to return rowStandard {bool, True}: row standardize weights? """ #### Assure that kNeighs is Non-Zero #### if kNeighs <= 0: ARCPY.AddIDMessage("ERROR", 976) raise SystemExit() #### Set Default Progressor for Neigborhood Structure #### ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143)) #### Create SSDataObject #### ssdo = SSDO.SSDataObject(inputFC, templateFC = inputFC, useChordal = True) cnt = UTILS.getCount(inputFC) ERROR.errorNumberOfObs(cnt, minNumObs = 2) #### Validation of Master Field #### verifyMaster = ERROR.checkField(ssdo.allFields, masterField, types = [0,1]) #### Create GA Data Structure #### gaTable, gaInfo = WU.gaTable(ssdo.catPath, [masterField], spatRef = ssdo.spatialRefString) #### Assure Enough Observations #### N = gaInfo[0] ERROR.errorNumberOfObs(N, minNumObs = 2) #### Process any bad records encountered #### numBadRecs = cnt - N if numBadRecs: badRecs = WU.parseGAWarnings(gaTable.warnings) err = ERROR.reportBadRecords(cnt, numBadRecs, badRecs, label = ssdo.oidName) #### Assure k-Nearest is Less Than Number of Features #### if kNeighs >= N: ARCPY.AddIDMessage("ERROR", 975) raise SystemExit() #### Create k-Nearest Neighbor Search Type #### gaSearch = GAPY.ga_nsearch(gaTable) concept, gaConcept = WU.validateDistanceMethod(concept, ssdo.spatialRef) gaSearch.init_nearest(0.0, kNeighs, gaConcept) neighWeights = ARC._ss.NeighborWeights(gaTable, gaSearch, weight_type = 1, row_standard = False) #### Set Progressor for Weights Writing #### ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84127), 0, N, 1) #### Initialize Spatial Weights Matrix File #### swmWriter = WU.SWMWriter(swmFile, masterField, ssdo.spatialRefName, N, rowStandard, inputFC = inputFC, wType = 2, distanceMethod = concept, numNeighs = kNeighs) #### Unique Master ID Dictionary #### masterSet = set([]) for row in xrange(N): masterID = int(gaTable[row][2]) if masterID in masterSet: ARCPY.AddIDMessage("Error", 644, masterField) ARCPY.AddIDMessage("Error", 643) raise SystemExit() else: masterSet.add(masterID) neighs, weights = neighWeights[row] neighs = [ gaTable[nh][2] for nh in neighs ] #### Add Spatial Weights Matrix Entry #### swmWriter.swm.writeEntry(masterID, neighs, weights) #### Set Progress #### ARCPY.SetProgressorPosition() swmWriter.close() del gaTable #### Report Warning/Max Neighbors #### swmWriter.reportNeighInfo() #### Report Spatial Weights Summary #### swmWriter.report() #### Report SWM File is Large #### swmWriter.reportLargeSWM()
def createOutput(self, outputFC): """Creates an Output Feature Class with the Standard Distances. INPUTS: outputFC (str): path to the output feature class """ #### Validate Output Workspace #### ERROR.checkOutputPath(outputFC) #### Shorthand Attributes #### ssdo = self.ssdo caseField = self.caseField #### Increase Extent if not Projected #### if ssdo.spatialRefType != "Projected": seValues = self.se.values() if len(seValues): maxSE = NUM.array([i[0:2] for i in seValues]).max() largerExtent = UTILS.increaseExtentByConstant(ssdo.extent, constant=maxSE) largerExtent = [LOCALE.str(i) for i in largerExtent] ARCPY.env.XYDomain = " ".join(largerExtent) #### Create Output Feature Class #### ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84003)) outPath, outName = OS.path.split(outputFC) try: DM.CreateFeatureclass(outPath, outName, "POLYGON", "", ssdo.mFlag, ssdo.zFlag, ssdo.spatialRefString) except: ARCPY.AddIDMessage("ERROR", 210, outputFC) raise SystemExit() #### Add Fields to Output FC #### dataFieldNames = UTILS.getFieldNames(seFieldNames, outPath) shapeFieldNames = ["SHAPE@"] for fieldName in dataFieldNames: UTILS.addEmptyField(outputFC, fieldName, "DOUBLE") caseIsDate = False if caseField: fcCaseField = ssdo.allFields[caseField] validCaseName = UTILS.validQFieldName(fcCaseField, outPath) caseType = UTILS.convertType[fcCaseField.type] UTILS.addEmptyField(outputFC, validCaseName, caseType) dataFieldNames.append(validCaseName) if caseType.upper() == "DATE": caseIsDate = True #### Write Output #### badCaseRadians = [] allFieldNames = shapeFieldNames + dataFieldNames rows = DA.InsertCursor(outputFC, allFieldNames) for case in self.caseKeys: #### Get Results #### xVal, yVal = self.meanCenter[case] seX, seY, degreeRotation, radianR1, radianR2 = self.se[case] seX2 = seX**2.0 seY2 = seY**2.0 #### Create Empty Polygon Geomretry #### poly = ARCPY.Array() #### Check for Valid Radius #### seXZero = UTILS.compareFloat(0.0, seX, rTol=.0000001) seXNan = NUM.isnan(seX) seXBool = seXZero + seXNan seYZero = UTILS.compareFloat(0.0, seY, rTol=.0000001) seYNan = NUM.isnan(seY) seYBool = seYZero + seYNan if seXBool or seYBool: badRadian = 6 badCase = UTILS.caseValue2Print(case, self.caseIsString) badCaseRadians.append(badCase) else: badRadian = 0 cosRadian = NUM.cos(radianR1) sinRadian = NUM.sin(radianR1) #### Calculate a Point For Each #### #### Degree in Ellipse Polygon #### for degree in NUM.arange(0, 360): try: radians = UTILS.convert2Radians(degree) tanVal2 = NUM.tan(radians)**2.0 dX = MATH.sqrt( (seX2 * seY2) / (seY2 + (seX2 * tanVal2))) dY = MATH.sqrt((seY2 * (seX2 - dX**2.0)) / seX2) #### Adjust for Quadrant #### if 90 <= degree < 180: dX = -dX elif 180 <= degree < 270: dX = -dX dY = -dY elif degree >= 270: dY = -dY #### Rotate X and Y #### dXr = dX * cosRadian - dY * sinRadian dYr = dX * sinRadian + dY * cosRadian #### Create Point Shifted to #### #### Ellipse Centroid #### pntX = dXr + xVal pntY = dYr + yVal pnt = ARCPY.Point(pntX, pntY, ssdo.defaultZ) poly.add(pnt) except: badRadian += 1 if badRadian == 6: badCase = UTILS.caseValue2Print( case, self.caseIsString) badCaseRadians.append(badCase) break if badRadian < 6: #### Create and Populate New Feature #### poly = ARCPY.Polygon(poly, None, True) rowResult = [poly, xVal, yVal, seX, seY, radianR2] if caseField: caseValue = case.item() if caseIsDate: caseValue = TUTILS.iso2DateTime(caseValue) rowResult.append(caseValue) rows.insertRow(rowResult) #### Report Bad Cases Due to Geometry (coincident pts) #### nBadRadians = len(badCaseRadians) if nBadRadians: if caseField: badCaseRadians = " ".join(badCaseRadians) ARCPY.AddIDMessage("WARNING", 1011, caseField, badCaseRadians) else: ARCPY.AddIDMessage("ERROR", 978) raise SystemExit() #### Return Extent to Normal if not Projected #### if ssdo.spatialRefType != "Projected": ARCPY.env.XYDomain = "" #### Clean Up #### del rows #### Set Attribute #### self.outputFC = outputFC
def distance2SWM(inputFC, swmFile, masterField, fixed = 0, concept = "EUCLIDEAN", exponent = 1.0, threshold = None, kNeighs = 1, rowStandard = True): """Creates a sparse spatial weights matrix (SWM) based on k-nearest neighbors. INPUTS: inputFC (str): path to the input feature class swmFile (str): path to the SWM file. masterField (str): field in table that serves as the mapping. fixed (boolean): fixed (1) or inverse (0) distance? concept: {str, EUCLIDEAN}: EUCLIDEAN or MANHATTAN exponent {float, 1.0}: distance decay threshold {float, None}: distance threshold kNeighs (int): number of neighbors to return rowStandard {bool, True}: row standardize weights? """ #### Create SSDataObject #### ssdo = SSDO.SSDataObject(inputFC, templateFC = inputFC, useChordal = True) #### Validation of Master Field #### verifyMaster = ERROR.checkField(ssdo.allFields, masterField, types = [0,1]) #### Read Data #### ssdo.obtainDataGA(masterField, minNumObs = 2) N = ssdo.numObs gaTable = ssdo.gaTable if fixed: wType = 1 else: wType = 0 #### Set Default Progressor for Neigborhood Structure #### ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143)) #### Set the Distance Threshold #### concept, gaConcept = WU.validateDistanceMethod(concept, ssdo.spatialRef) if threshold == None: threshold, avgDist = WU.createThresholdDist(ssdo, concept = concept) #### Assures that the Threshold is Appropriate #### gaExtent = UTILS.get92Extent(ssdo.extent) threshold, maxSet = WU.checkDistanceThreshold(ssdo, threshold, weightType = wType) #### If the Threshold is Set to the Max #### #### Set to Zero for Script Logic #### if maxSet: #### All Locations are Related #### threshold = SYS.maxint if N > 500: ARCPY.AddIDMessage("Warning", 717) #### Assure k-Nearest is Less Than Number of Features #### if kNeighs >= N and fixed: ARCPY.AddIDMessage("ERROR", 975) raise SystemExit() #### Create Distance/k-Nearest Neighbor Search Type #### gaSearch = GAPY.ga_nsearch(gaTable) gaSearch.init_nearest(threshold, kNeighs, gaConcept) neighWeights = ARC._ss.NeighborWeights(gaTable, gaSearch, weight_type = wType, exponent = exponent, row_standard = False) #### Set Progressor for Weights Writing #### ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84127), 0, N, 1) #### Initialize Spatial Weights Matrix File #### swmWriter = WU.SWMWriter(swmFile, masterField, ssdo.spatialRefName, N, rowStandard, inputFC = inputFC, wType = wType, distanceMethod = concept, exponent = exponent, threshold = threshold) #### Unique Master ID Dictionary #### masterDict = {} #### Unique Master ID Dictionary #### masterSet = set([]) for row in xrange(N): masterID = int(gaTable[row][2]) if masterID in masterSet: ARCPY.AddIDMessage("Error", 644, masterField) ARCPY.AddIDMessage("Error", 643) raise SystemExit() else: masterSet.add(masterID) neighs, weights = neighWeights[row] neighs = [ gaTable[nh][2] for nh in neighs ] #### Add Spatial Weights Matrix Entry #### swmWriter.swm.writeEntry(masterID, neighs, weights) #### Set Progress #### ARCPY.SetProgressorPosition() swmWriter.close() del gaTable #### Report Warning/Max Neighbors #### swmWriter.reportNeighInfo() #### Add Linear/Angular Unit (Distance Based Only) #### distanceOut = ssdo.distanceInfo.outputString distanceOut = [ARCPY.GetIDMessage(84344).format(distanceOut)] #### Report Spatial Weights Summary #### swmWriter.report(additionalInfo = distanceOut) #### Report SWM File is Large #### swmWriter.reportLargeSWM()
def initialize(self): """Reads data into a GA structure for neighborhood searching and sets the study area envelope.""" #### Shorthand Attributes #### ssdo = self.ssdo weightField = self.weightField if weightField: fieldList = [weightField] else: fieldList = [] #### Create GA Data Structure #### ssdo.obtainDataGA(ssdo.oidName, fieldList, minNumObs = 3, warnNumObs = 30) N = len(ssdo.gaTable) #### Get Weights #### if weightField: weights = ssdo.fields[weightField].returnDouble() #### Report No Weights #### weightSum = weights.sum() if not weightSum > 0.0: ARCPY.AddIDMessage("ERROR", 898) raise SystemExit() #### Set Study Area #### ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84248)) clearedMinBoundGeom = UTILS.clearExtent(UTILS.minBoundGeomPoints) #### Set Initial Study Area FC #### if self.studyAreaMethod == 1 and self.studyAreaFC: #### Assure Only A Single Polygon in Study Area FC #### polyCount = UTILS.getCount(self.studyAreaFC) if polyCount != 1: ARCPY.AddIDMessage("ERROR", 936) raise SystemExit() self.tempStudyArea = False #### Read User Provided Study Area #### polyInfo = UTILS.returnPolygon(self.studyAreaFC, spatialRef = ssdo.spatialRefString) self.studyAreaPoly, self.studyArea = polyInfo #### Create Temp Min. Enc. Rectangle and Class #### tempMBG_FC = UTILS.returnScratchName("tempMBG_FC") clearedMinBoundGeom(self.studyAreaPoly, tempMBG_FC, geomType = "RECTANGLE_BY_AREA", spatialRef = ssdo.spatialRef) self.minRect = UTILS.MinRect(tempMBG_FC) UTILS.passiveDelete(tempMBG_FC) else: #### Create Min. Enc. Rectangle #### self.studyAreaFC = UTILS.returnScratchName("regularBound_FC") self.tempStudyArea = True clearedMinBoundGeom(ssdo.xyCoords, self.studyAreaFC, geomType = "RECTANGLE_BY_AREA", spatialRef = ssdo.spatialRef) polyInfo = UTILS.returnPolygon(self.studyAreaFC, spatialRef = ssdo.spatialRefString) self.studyAreaPoly, self.studyArea = polyInfo #### Create Min. Enc. Rectangle Class #### self.minRect = UTILS.MinRect(self.studyAreaFC) if self.reduce: #### Only Need To Create FC if Reduce Buffer #### UTILS.createPolygonFC(self.studyAreaFC, self.studyAreaPoly, spatialRef = ssdo.spatialRefString) #### Set Extent and Envelope and Min Rect #### self.envelope = UTILS.Envelope(ssdo.extent) self.maxDistance = self.minRect.maxLength * 0.25 if self.maxDistance > (self.minRect.minLength * .5): #### 25% of Max Extent is Larger Than Half Min Extent #### #### Results in Reduced Study Area Failure #### if self.reduce: self.maxDistance = self.minRect.minLength * 0.25 #### Determine Distance Increment #### if not self.dIncrement: if self.begDist: distRange = self.maxDistance - self.begDist else: distRange = self.maxDistance self.dIncrement = float(distRange / self.nIncrements) #### Determine Starting Distance #### if not self.begDist: self.begDist = self.dIncrement #### Determine All Distance Cutoffs #### rangeInc = xrange(self.nIncrements) cutoffs = [] for inc in rangeInc: val = (inc * self.dIncrement) + self.begDist cutoffs.append(val) stepMax = cutoffs[-1] #### Check Cutoff Values ### if self.begDist > (self.minRect.maxLength * 0.51): ARCPY.AddIDMessage("WARNING", 934) elif stepMax > self.minRect.maxLength: ARCPY.AddIDMessage("WARNING", 935) #### Set Step Attributes #### self.stepMax = stepMax self.cutoffs = NUM.array(cutoffs) self.reverseOrder = range(self.nIncrements - 1, -1, -1) self.cutoffOrder = range(self.nIncrements) self.largestDistBand = self.cutoffs[-1] self.tolerance = self.minRect.tolerance self.xyTolerance = ssdo.spatialRef.XYTolerance #### Get Linear Unit for Reduce, Simulate and Ripley #### floatMax = self.stepMax * 1.0 self.simulateUnitStr = ssdo.distanceInfo.linearUnitString(floatMax, convert = True) self.reduceUnitStr = "-" + self.simulateUnitStr #### Create Smaller Poly FC for Reduce #### if self.reduce: self.reducedFC = UTILS.returnScratchName("reducedBound_FC") if UTILS.compareFloat(floatMax, 0): ARCPY.AddIDMessage("ERROR", 1170) raise SystemExit() ANA.Buffer(self.studyAreaFC, self.reducedFC, self.reduceUnitStr) reduceInfo = UTILS.returnPolygon(self.reducedFC, spatialRef = ssdo.spatialRefString) self.reducePoly, self.reduceArea = reduceInfo self.reducePath = PATH.Path(self.reducePoly) if self.reducePoly == None: ARCPY.AddIDMessage("ERROR", 1170) raise SystemExit() descRed = ARCPY.Describe(self.reducedFC) redExtent = descRed.extent self.redEnvelope = UTILS.Envelope(redExtent) self.redTolerance = self.redEnvelope.tolerance UTILS.passiveDelete(self.reducedFC) #### Create Study Area Envelope #### descSA = ARCPY.Describe(self.studyAreaFC) self.extentSA = descSA.extent self.envelopeSA = UTILS.Envelope(self.extentSA) envCoordsSA = self.envelopeSA.envelope self.minX, self.minY, self.maxX, self.maxY = envCoordsSA self.studyAreaPath = PATH.Path(self.studyAreaPoly)
def setupWeights(): """Retrieves the parameters from the User Interface and executes the appropriate commands.""" inputFC = ARCPY.GetParameterAsText(0) masterField = ARCPY.GetParameterAsText(1) swmFile = ARCPY.GetParameterAsText(2) spaceConcept = ARCPY.GetParameterAsText(3) distanceConcept = ARCPY.GetParameterAsText(4) exponent = UTILS.getNumericParameter(5) threshold = UTILS.getNumericParameter(6) kNeighs = UTILS.getNumericParameter(7) rowStandard = ARCPY.GetParameter(8) tableFile = ARCPY.GetParameterAsText(9) #### Assess Temporal Options ####' timeField = UTILS.getTextParameter(10, fieldName = True) timeType = UTILS.getTextParameter(11) timeValue = UTILS.getNumericParameter(12) #### Assign to appropriate spatial weights method #### try: wType = WU.weightDispatch[spaceConcept] except: ARCPY.AddIDMessage("Error", 723) raise SystemExit() #### EUCLIDEAN or MANHATTAN #### concept = WU.conceptDispatch[distanceConcept] if not kNeighs: kNeighs = 0 if wType <= 1: #### Distance Based Weights #### ARCPY.AddMessage(ARCPY.GetIDMessage(84118)) #### Set Options for Fixed vs. Inverse #### if wType == 0: exponent = exponent fixed = 0 else: exponent = 1 fixed = 1 #### Execute Distance-Based Weights #### w = distance2SWM(inputFC, swmFile, masterField, fixed = fixed, concept = concept, exponent = exponent, threshold = threshold, kNeighs = kNeighs, rowStandard = rowStandard) elif wType == 2: #### k-Nearest Neighbors Weights #### ARCPY.AddMessage(ARCPY.GetIDMessage(84119)) w = kNearest2SWM(inputFC, swmFile, masterField, concept = concept, kNeighs = kNeighs, rowStandard = rowStandard) elif wType == 3: #### Delaunay Triangulation Weights #### ARCPY.AddMessage(ARCPY.GetIDMessage(84120)) w = delaunay2SWM(inputFC, swmFile, masterField, rowStandard = rowStandard) elif wType == 4: #### Contiguity Based Weights, Edges Only #### ARCPY.AddMessage(ARCPY.GetIDMessage(84121)) w = polygon2SWM(inputFC, swmFile, masterField, concept = concept, kNeighs = kNeighs, rowStandard = rowStandard, contiguityType = "ROOK") elif wType == 5: #### Contiguity Based Weights, Edges and Corners #### ARCPY.AddMessage(ARCPY.GetIDMessage(84122)) w = polygon2SWM(inputFC, swmFile, masterField, concept = concept, kNeighs = kNeighs, rowStandard = rowStandard, contiguityType = "QUEEN") elif wType == 9: ARCPY.AddMessage(ARCPY.GetIDMessage(84255)) w = spaceTime2SWM(inputFC, swmFile, masterField, concept = concept, threshold = threshold, rowStandard = rowStandard, timeField = timeField, timeType = timeType, timeValue = timeValue) else: #### Tabular Input for Weights #### ARCPY.AddMessage(ARCPY.GetIDMessage(84123)) if tableFile == "" or tableFile == "#": ARCPY.AddIDMessage("Error", 721) raise SystemExit() else: table2SWM(inputFC, masterField, swmFile, tableFile, rowStandard = rowStandard)
def weightedCalc(self): """Performs weighted k-function.""" #### Attribute Shortcuts #### ssdo = self.ssdo reduce = self.reduce simulate = self.simulate ripley = self.ripley numIDs = len(self.ids) if reduce: studyArea2Use = self.reduceArea else: studyArea2Use = self.studyArea if simulate: simOrder = [] for simKey, origID in self.simDict.iteritems(): simOrder.append(self.weightDict[origID]) self.ld = COLL.defaultdict(float) if self.permutations: self.ldMin = COLL.defaultdict(float) self.ldMax = COLL.defaultdict(float) for order in self.cutoffOrder: self.ldMin[order] = 99999999999. permsPlus = self.permutations + 1 for perm in xrange(0, permsPlus): #### Permutation Progressor #### pmsg = ARCPY.GetIDMessage(84184) progressMessage = pmsg.format(perm, permsPlus) ARCPY.SetProgressor("default", progressMessage) gaSearch = GAPY.ga_nsearch(self.kTable) gaSearch.init_nearest(self.stepMax, 0, "euclidean") N = len(self.kTable) #### Permutate Weights #### if perm: weights = RAND.permutation(weights) else: weights = self.weightVals if simulate: simWeights = NUM.take(self.weightVals, simOrder) #### Set Statistic Variables #### weightSumVal = 0.0 kij = COLL.defaultdict(float) start = 0 #### Loop Over Entire Table #### for i in xrange(N): row = self.kTable[i] id0 = row[0] #### Calculate For Inside IDs #### if id0 in self.ids: x0,y0 = row[1] weightInd0 = self.weightDict[id0] w0 = weights[weightInd0] #### Weight Sum Resolution #### weightSumVal += (NUM.sum(w0 * weights)) - w0**2.0 if simulate: weightSumVal += (w0 * simWeights).sum() #### Neighbors Within Largest Distance #### gaSearch.search_by_idx(i) for nh in gaSearch: neighInfo = self.kTable[nh.idx] id1 = neighInfo[0] x1,y1 = neighInfo[1] #### Input or Simulated Point #### try: weightInd1 = self.weightDict[id1] except: origID = self.simDict[id1] weightInd1 = self.weightDict[origID] #### Process Neighbor Pair #### w1 = weights[weightInd1] dist = WU.euclideanDistance(x0,x1,y0,y1) if ripley: value = self.returnRipley(id0, dist) else: value = 1.0 value = w0 * (w1 * value) #### Add To Cutoffs #### for order in self.reverseOrder: cutoff = self.cutoffs[order] if dist > cutoff: break kij[order] += value ARCPY.SetProgressorPosition() #### Calculate Stats USing Dictionaries #### denom = NUM.pi * weightSumVal for order in self.cutoffOrder: res = kij[order] numer = res * studyArea2Use permResult = NUM.sqrt( (numer/denom) ) if perm: self.ldMin[order] = min(self.ldMin[order], permResult) self.ldMax[order] = max(self.ldMax[order], permResult) else: self.ld[order] = permResult
def __init__(self, ssdo, distanceMethod="EUCLIDEAN", weightField=None, potentialField=None, caseField=None): #### Set Initial Attributes #### UTILS.assignClassAttr(self, locals()) #### Set Data #### self.xyCoords = self.ssdo.xyCoords #### Verify Weights #### if weightField: self.weights = self.ssdo.fields[weightField].returnDouble() #### Report Negative Weights #### lessThanZero = NUM.where(self.weights < 0.0) if len(lessThanZero[0]): self.weights[lessThanZero] = 0.0 ARCPY.AddIDMessage("Warning", 941) #### Verify Weight Sum #### self.weightSum = self.weights.sum() if not self.weightSum > 0.0: ARCPY.AddIDMessage("ERROR", 898) raise SystemExit() else: self.weights = NUM.ones((self.ssdo.numObs, )) #### Verify Potential #### if potentialField: self.potential = self.ssdo.fields[potentialField].returnDouble() #### Report Negative Weights #### lessThanZero = NUM.where(self.potential < 0.0) if len(lessThanZero[0]): self.potential[lessThanZero] = 0.0 ARCPY.AddIDMessage("Warning", 940) else: self.potential = NUM.zeros((self.ssdo.numObs, )) #### Set Case Field #### if caseField: caseType = ssdo.allFields[caseField].type.upper() self.caseIsString = caseType == "STRING" self.caseVals = self.ssdo.fields[caseField].data cases = NUM.unique(self.caseVals) if self.caseIsString: self.uniqueCases = cases[NUM.where(cases != "")] else: self.uniqueCases = cases else: self.caseIsString = False self.caseVals = NUM.ones((self.ssdo.numObs, ), int) self.uniqueCases = [1] #### Set Result Dict #### cf = COLL.defaultdict(tuple) #### Calculate Central Feature #### ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84007), 0, self.ssdo.numObs, 1) for case in self.uniqueCases: cfOIDs = [] indices = NUM.where(self.caseVals == case) potent = self.potential[indices] xy = self.xyCoords[indices] w = self.weights[indices] cfOrder, minSumDist = nsquaredDist(xy, weights=w, potent=potent, dType=distanceMethod) for cfOrd in cfOrder: oid = ssdo.order2Master[indices[0][cfOrd]] cfOIDs.append(oid) cf[case] = (cfOIDs, minSumDist) #### Set Attributes #### self.ssdo = ssdo self.cf = cf self.caseField = caseField self.weightField = weightField self.potentialField = potentialField
def unweightedCalc(self): """Performs unweighted k-function.""" #### Attribute Shortcuts #### ssdo = self.ssdo reduce = self.reduce simulate = self.simulate ripley = self.ripley if reduce: studyArea2Use = self.reduceArea else: studyArea2Use = self.studyArea self.ld = COLL.defaultdict(float) if self.permutations: self.ldMin = COLL.defaultdict(float) self.ldMax = COLL.defaultdict(float) for order in self.cutoffOrder: self.ldMin[order] = 99999999999. permsPlus = self.permutations + 1 for perm in xrange(0, permsPlus): #### Permutation Progressor #### pmsg = ARCPY.GetIDMessage(84184) progressMessage = pmsg.format(perm, permsPlus) ARCPY.SetProgressor("default", progressMessage) #### Permutate the XY #### if perm != 0: self.permutateTable() gaSearch = GAPY.ga_nsearch(self.kTable) gaSearch.init_nearest(self.stepMax, 0, "euclidean") N = len(self.kTable) numIDs = len(self.ids) kij = COLL.defaultdict(float) for i in xrange(N): row = self.kTable[i] id0 = row[0] if id0 in self.ids: x0,y0 = row[1] gaSearch.search_by_idx(i) for nh in gaSearch: neighInfo = self.kTable[nh.idx] nhID = neighInfo[0] x1,y1 = neighInfo[1] dist = WU.euclideanDistance(x0,x1,y0,y1) if ripley: value = self.returnRipley(id0, dist) else: value = 1.0 for order in self.reverseOrder: cutoff = self.cutoffs[order] if dist > cutoff: break kij[order] += value ARCPY.SetProgressorPosition() #### Calculate Stats USing Dictionaries #### weightSumVal = numIDs * (numIDs - 1.0) denom = NUM.pi * weightSumVal for order in self.cutoffOrder: res = kij[order] numer = res * studyArea2Use permResult = NUM.sqrt( (numer/denom) ) if perm: self.ldMin[order] = min(self.ldMin[order], permResult) self.ldMax[order] = max(self.ldMax[order], permResult) else: self.ld[order] = permResult
def createDiagnosticReport(self): """Creates a formatted summary table of the OLS diagnostics.""" #### Create PValue Array #### allPVals = NUM.array( [self.fProb, self.waldProb, self.BPProb, self.JBProb]) #### Check For Any Significance for Extra Padding #### signFlag = NUM.any(allPVals <= 0.05) #### Table Title #### header = ARCPY.GetIDMessage(84076) feet = [84104, 84105, 84106, 84107] feet = [ARCPY.GetIDMessage(i) for i in feet] dFoot, eFoot, fFoot, gFoot = feet dFoot = ARCPY.GetIDMessage(84104) row1 = [ UTILS.addColon(ARCPY.GetIDMessage(84253)), self.ssdo.inName, ' ' + UTILS.addColon(ARCPY.GetIDMessage(84254)), UTILS.padValue(self.depVarName, significant=signFlag) ] aiccLab = ARCPY.GetIDMessage(84251) + " " + dFoot row2 = [ UTILS.addColon(ARCPY.GetIDMessage(84093)), str(self.n), ' ' + UTILS.addColon(aiccLab), UTILS.padValue(UTILS.formatValue(self.aicc), significant=signFlag) ] r2Lab = ARCPY.GetIDMessage(84019) + " " + dFoot adjR2Lab = ARCPY.GetIDMessage(84022) + " " + dFoot row3 = [ UTILS.addColon(r2Lab), UTILS.formatValue(self.r2), ' ' + UTILS.addColon(adjR2Lab), UTILS.padValue(UTILS.formatValue(self.r2Adj), significant=signFlag) ] fdofLab = ARCPY.GetIDMessage(84028) fLab = ARCPY.GetIDMessage(84025) + " " + eFoot row4 = [ UTILS.addColon(fLab), UTILS.formatValue(self.fStat), " " + UTILS.addColon(fdofLab.format(self.q, self.dof)), UTILS.writePVal(self.fProb, padNonSig=True) ] chiMess = ARCPY.GetIDMessage(84034) wLab = ARCPY.GetIDMessage(84031) + " " + eFoot row5 = [ UTILS.addColon(wLab), UTILS.formatValue(self.waldStat), " " + UTILS.addColon(chiMess.format(self.q)), UTILS.writePVal(self.waldProb, padNonSig=True) ] kLab = ARCPY.GetIDMessage(84037) + " " + fFoot row6 = [ UTILS.addColon(kLab), UTILS.formatValue(self.BP), ' ' + UTILS.addColon(chiMess.format(self.q)), UTILS.writePVal(self.BPProb, padNonSig=True) ] jbLab = ARCPY.GetIDMessage(84043) + " " + gFoot row7 = [ UTILS.addColon(jbLab), UTILS.formatValue(self.JB), ' ' + UTILS.addColon(chiMess.format(2)), UTILS.writePVal(self.JBProb, padNonSig=True) ] #### Finalize Diagnostic Table #### diagTotal = [row1, row2, row3, row4, row5, row6, row7] diagJustify = ["left", "right", "left", "right"] self.diagTable = UTILS.outputTextTable(diagTotal, header=header, pad=1, justify=diagJustify) self.diagRaw = diagTotal self.diagJustify = diagJustify
def createOutput(self, outputTable, displayIt = False): """Creates K-Function Output Table. INPUTS outputTable (str): path to the output table displayIt {bool, False}: create output graph? """ #### Allow Overwrite Output #### ARCPY.env.overwriteOutput = 1 #### Get Output Table Name With Extension if Appropriate #### outputTable, dbf = UTILS.returnTableName(outputTable) #### Set Progressor #### ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84008)) #### Delete Table If Exists #### UTILS.passiveDelete(outputTable) #### Create Table #### outPath, outName = OS.path.split(outputTable) try: DM.CreateTable(outPath, outName) except: ARCPY.AddIDMessage("ERROR", 541) raise SystemExit() #### Add Result Fields #### fn = UTILS.getFieldNames(kOutputFieldNames, outPath) expectedKName, observedKName, diffKName, lowKName, highKName = fn outputFields = [expectedKName, observedKName, diffKName] if self.permutations: outputFields += [lowKName, highKName] for field in outputFields: UTILS.addEmptyField(outputTable, field, "DOUBLE") #### Create Insert Cursor #### try: insert = DA.InsertCursor(outputTable, outputFields) except: ARCPY.AddIDMessage("ERROR", 204) raise SystemExit() #### Add Rows to Output Table #### for testIter in xrange(self.nIncrements): distVal = self.cutoffs[testIter] ldVal = self.ld[testIter] diffVal = ldVal - distVal rowResult = [distVal, ldVal, diffVal] if self.permutations: ldMinVal = self.ldMin[testIter] ldMaxVal = self.ldMax[testIter] rowResult += [ldMinVal, ldMaxVal] insert.insertRow(rowResult) #### Clean Up #### del insert #### Make Table Visable in TOC if *.dbf Had To Be Added #### if dbf: ARCPY.SetParameterAsText(1, outputTable) #### Display Results #### if displayIt: if "WIN" in SYS.platform.upper(): #### Set Progressor #### ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84186)) #### Get Image Directory #### imageDir = UTILS.getImageDir() #### Make List of Fields and Set Template File #### yFields = [expectedKName, observedKName] if self.permutations: #### Add Confidence Envelopes #### yFields.append(highKName) yFields.append(lowKName) tee = OS.path.join(imageDir, "KFunctionPlotEnv.tee") else: tee = OS.path.join(imageDir, "KFunctionPlot.tee") xFields = [ expectedKName for i in yFields ] #### Create Data Series String #### dataStr = UTILS.createSeriesStr(xFields, yFields, outputTable) #### Make Graph #### DM.MakeGraph(tee, dataStr, "KFunction") ARCPY.SetParameterAsText(11, "KFunction") else: ARCPY.AddIDMessage("Warning", 942)
def createOutputGraphic(self, fileName): """Create OLS Output Report File. INPUTS fileName (str): path to output report file (*.pdf) """ #### Set Progressor #### writeMSG = ARCPY.GetIDMessage(84186) ARCPY.SetProgressor("step", writeMSG, 0, 6, 1) ARCPY.AddMessage(writeMSG) #### Set Colors #### colors = NUM.array([ "#4575B5", "#849EBA", "#C0CCBE", "#FFFFBF", "#FAB984", "#ED7551", "#D62F27" ]) cutoffs = NUM.array([-2.5, -1.5, -0.5, 0.5, 1.5, 2.5]) #### Set Data #### stdRes = self.stdRedisuals.flatten() predicted = self.yHat.flatten() #### Create PDF Output #### pdfOutput = REPORT.openPDF(fileName) ##### Make Coefficient Table #### title = ARCPY.GetIDMessage(84075) + " - " + ARCPY.GetIDMessage(84370) contStr = ARCPY.GetIDMessage(84377) varTitlePlus = title + " " + contStr numCols = 9 report = REPORT.startNewReport(9, title=title, landscape=True, titleFont=REPORT.ssTitleFont) grid = report.grid colLabs = self.coefRaw[0] tabVals = self.coefRaw[1:] #### Create Column Labels #### writeVarColHeaders(grid, colLabs) #### Loop Through Explanatory Variables #### for row in UTILS.ssRange(self.k): if grid.rowCount >= 20: #### Finalize Page #### grid.finalizeTable() report.write(pdfOutput) #### New Page #### report = REPORT.startNewReport(9, title=varTitlePlus, landscape=True, titleFont=REPORT.ssTitleFont) grid = report.grid writeVarColHeaders(grid, colLabs) #### Variable Name #### rowVals = tabVals[row] for ind, val in enumerate(rowVals): justify = "right" gridCell = PLT.subplot2grid(grid.gridInfo, (grid.rowCount, ind)) if ind in [4, 7]: if not val.count("*"): x0 = .925 elif ind == 0: justify = "left" x0 = 0.0 else: x0 = 1.0 #### Limit Col Value Length to 12 #### if ind in [0, 1, 2, 5, 8]: val = val[0:12] PLT.text(x0, 0.5, val, fontproperties=REPORT.ssFont, horizontalalignment=justify, **REPORT.bAlignment) REPORT.clearGrid(gridCell) grid.stepRow() grid.createLineRow(grid.rowCount, startCol=0, endCol=numCols) grid.finalizeTable() #### Add To PDF #### report.write(pdfOutput) ARCPY.SetProgressorPosition() #### Diagnostic Table/Interpret Tables #### numCols = 6 title = ARCPY.GetIDMessage(84076) titlePlus = title + " " + contStr report = REPORT.startNewReport(numCols, title=title, landscape=True, numRows=22, titleFont=REPORT.ssTitleFont) grid = report.grid ind2Col = {0: 0, 1: 1, 2: 3, 3: 5} for row in self.diagRaw: for ind, val in enumerate(row): #### Limit Col Length to 23 #### if ind not in [0, 2]: val = val[0:23] #### Set Col Info #### justify = self.diagJustify[ind] if ind == 2: colspan = 2 else: colspan = 1 col = ind2Col[ind] grid.writeCell((grid.rowCount, col), val, justify=justify, colspan=colspan) grid.stepRow() grid.createEmptyRow() #### Add Footnotes #### notesMSG = ARCPY.GetIDMessage(84081) grid.writeCell((grid.rowCount, 0), notesMSG, colspan=2, fontObj=REPORT.ssBoldFont, justify="left") grid.stepRow() #### Set Line Width Based on Non-Latin Font File #### if REPORT.fontFilePathName is None: splitLineAt = 145 else: splitLineAt = 100 #### Draw Interpretation Notes #### for note in self.interpretRaw: text = " ".join(note) lines = REPORT.splitFootnote(text, splitLineAt) for line in lines: if grid.rowCount >= 22: #### Finalize Page #### grid.finalizeTable() report.write(pdfOutput) #### New Page #### report = REPORT.startNewReport( numCols, title=titlePlus, landscape=True, numRows=22, titleFont=REPORT.ssTitleFont) grid = report.grid #### Write Footnote #### grid.writeCell((grid.rowCount, 0), line, colspan=2, justify="left") grid.stepRow() grid.finalizeTable() #### Add To PDF #### report.write(pdfOutput) ARCPY.SetProgressorPosition() ##### Make Scatterplot Matrices #### k = len(self.indVarNames) title = ARCPY.GetIDMessage(84371) titlePlus = title + " " + contStr report = REPORT.startNewReport(6, title=title, landscape=True, numRows=4, titleFont=REPORT.ssTitleFont) grid = report.grid #### Loop Through Explanatory Variables #### seq = list(NUM.arange(0, k, 5)) if seq[-1] < k: seq.append(k) for ind, s in enumerate(seq[0:-1]): if grid.rowCount == 4: #### Finalize Page #### grid.finalizeTable() report.write(pdfOutput) #### New Page #### report = REPORT.startNewReport(6, title=titlePlus, landscape=True, numRows=4, titleFont=REPORT.ssTitleFont) grid = report.grid #### New Group of Vars #### e = seq[ind + 1] values = self.x[:, (s + 1):(e + 1)] numVars = e - s varNames = self.indVarNames[s:e] lenRow = len(varNames) #### Histogram #### for vInd, vName in enumerate(varNames): data = values[:, vInd] gridHist = PLT.subplot2grid(grid.gridInfo, (grid.rowCount, vInd)) PLT.hist(data) gridHist.xaxis.set_visible(False) gridHist.yaxis.set_visible(False) gridHist.set_title(vName[0:14], fontproperties=REPORT.ssBoldFont) #### Add Dep Var #### gridHist = PLT.subplot2grid(grid.gridInfo, (grid.rowCount, lenRow)) PLT.hist(self.y) gridHist.xaxis.set_visible(False) gridHist.yaxis.set_visible(False) gridHist.set_title(self.depVarName[0:14], fontproperties=REPORT.ssBoldFont) grid.stepRow() for vInd, vName in enumerate(varNames): xVals = values[:, vInd] m = NUM.polyfit(xVals, self.y, 1) yFit = NUM.polyval(m, xVals) gridScat = PLT.subplot2grid(grid.gridInfo, (grid.rowCount, vInd)) PLT.scatter(xVals, self.y, s=10, edgecolors=None, linewidths=0.05) PLT.plot(xVals, yFit, color='k', lw=1, alpha=.7) gridScat.xaxis.set_visible(False) gridScat.yaxis.set_ticks([]) if vInd == 0: gridScat.yaxis.set_label_text( self.depVarName[0:14], fontproperties=REPORT.ssBoldFont) grid.stepRow() #### Add Help Text #### if grid.rowCount == 4: #### Finalize Page #### grid.finalizeTable() report.write(pdfOutput) #### New Page #### report = REPORT.startNewReport(6, title=titlePlus, landscape=True, numRows=4, titleFont=REPORT.ssTitleFont) grid = report.grid #### Get Help Info #### #### Set Line Width Based on Non-Latin Font File #### if REPORT.fontFilePathName is None: splitLineAt = 110 else: splitLineAt = 55 helpTxt1 = REPORT.splitFootnote(ARCPY.GetIDMessage(84403), splitLineAt) helpTxt2 = REPORT.splitFootnote(ARCPY.GetIDMessage(84404), splitLineAt) helpTxt1 = "\n".join(helpTxt1) helpTxt2 = "\n".join(helpTxt2) helpTxt = helpTxt1 + "\n\n" + helpTxt2 grid.writeCell((grid.rowCount, 0), helpTxt, fontObj=REPORT.ssBigFont, colspan=6, justify="left") grid.stepRow() #### Finalize Page #### grid.finalizeTable() #### Add To PDF #### report.write(pdfOutput) ARCPY.SetProgressorPosition() #### Histogram of Residuals #### title = ARCPY.GetIDMessage(84341) titlePlus = title + " " + contStr numCols = 10 report = REPORT.startNewReport(numCols, title=title, landscape=True, titleFont=REPORT.ssTitleFont, numRows=30) numRows = report.numRows grid = report.grid histGrid = PLT.subplot2grid((numRows, numCols), (0, 1), rowspan=22, colspan=numCols - 2) #### Add Histogram #### n, bins, patches = PLT.hist(stdRes, 15, normed=True, facecolor='#8400A8', alpha=0.75) #### Bell Curve #### x = NUM.arange(-3.5, 3.5, 0.01) y = PYLAB.normpdf(x, 0, 1) PLT.plot(x, y, color='blue', lw=1, linestyle="-") #### Axis Info #### histGrid.yaxis.grid(True, linestyle='-', which='both', color='lightgrey', alpha=0.5) PYLAB.ylabel(ARCPY.GetIDMessage(84055), fontproperties=REPORT.ssLabFont) PYLAB.xlabel(ARCPY.GetIDMessage(84337), fontproperties=REPORT.ssLabFont) #### Text Box #### grid.rowCount = 25 #### Set Line Width Based on Non-Latin Font File #### if REPORT.fontFilePathName is None: splitLineAt = 120 else: splitLineAt = 80 infoRows = REPORT.splitFootnote(ARCPY.GetIDMessage(84421), splitLineAt) for row in infoRows: if grid.rowCount >= numRows: #### Finalize Page #### grid.finalizeTable() report.write(pdfOutput) #### New Page #### report = REPORT.startNewReport(numCols, title=titlePlus, landscape=True, titleFont=REPORT.ssTitleFont) grid = report.grid grid.writeCell((grid.rowCount, 0), row, colspan=numCols, justify="left", fontObj=REPORT.ssBigFont) grid.stepRow() #### Add To PDF #### grid.finalizeTable() report.write(pdfOutput) ARCPY.SetProgressorPosition() #### Scatterplot of Std. Residuals and Predicted Y #### title = ARCPY.GetIDMessage(84336) numCols = 10 report = REPORT.startNewReport(numCols, title=title, landscape=False, titleFont=REPORT.ssTitleFont, numRows=32) numRows = report.numRows grid = report.grid scatGrid = PLT.subplot2grid(grid.gridInfo, (0, 1), rowspan=20, colspan=numCols - 2) #### Best Fit Line #### sortedYHatInd = NUM.argsort(predicted) sortedYHat = predicted[sortedYHatInd] sortedSTDRes = stdRes[sortedYHatInd] m = NUM.polyfit(sortedYHat, sortedSTDRes, 1) yFit = NUM.polyval(m, sortedYHat) PLT.plot(sortedYHat, yFit, color='k', lw=2, alpha=.7) #### Plot Values #### binVals = NUM.digitize(stdRes, cutoffs) binColors = colors[binVals] scat = PLT.scatter(predicted, stdRes, s=30, c=binColors) #### Labels #### PYLAB.ylabel(ARCPY.GetIDMessage(84337), fontproperties=REPORT.ssLabFont) PYLAB.xlabel(ARCPY.GetIDMessage(84338), fontproperties=REPORT.ssLabFont) scatGrid.yaxis.grid(True, linestyle='-', which='both', color='lightgrey', alpha=0.5) #### Text Box #### grid.rowCount = 23 #### Set Line Width Based on Non-Latin Font File #### if REPORT.fontFilePathName is None: splitLineAt = 60 else: splitLineAt = 30 infoRows = REPORT.splitFootnote(ARCPY.GetIDMessage(84422), splitLineAt) numLines = len(infoRows) if numLines > 9: #### Place Text and Small Scatter on Next Page #### grid.finalizeTable() report.write(pdfOutput) #### New Page #### titlePlus = title + " " + contStr report = REPORT.startNewReport(numCols, title=titlePlus, landscape=False, titleFont=REPORT.ssTitleFont, numRows=32) grid = report.grid startGrid = grid.rowCount * 1 for row in infoRows: grid.writeCell((grid.rowCount, 0), row, colspan=7, justify="left", fontObj=REPORT.ssBigFont) grid.stepRow() #### Random Scatter #### scatLines = 8 smallScatGrid = PLT.subplot2grid(grid.gridInfo, (startGrid, 7), rowspan=scatLines, colspan=3) RAND.seed(seed=100) rN = 200 randRes = RAND.normal(0, 1, (rN, )) randPred = RAND.normal(0, 1, (rN, )) randX = NUM.ones((rN, 2)) randX[:, 1] = randRes coef, sumRes, rank, s = LA.lstsq(randX, randPred) randYHat = NUM.dot(randX, coef) randE = randPred - randYHat ess = (randE**2.0).sum() fdof = (rN - 2) * 1.0 s2 = ess / fdof se = NUM.sqrt(s2) seRandE = randE / se sortedXP = NUM.argsort(randYHat) sRandPred = randYHat[sortedXP] sRandRes = seRandE[sortedXP] mRand = NUM.polyfit(sRandPred, sRandRes, 1) yRandFit = NUM.polyval(mRand, sRandPred) PLT.plot(sRandPred, yRandFit, color='k', lw=1, alpha=.7) binValsR = NUM.digitize(seRandE, cutoffs) binColorsR = colors[binValsR] scat = PLT.scatter(randYHat, seRandE, s=10, c=binColorsR, edgecolors=None, linewidths=0.05) smallScatGrid.yaxis.grid(True, linestyle='-', which='both', color='lightgrey', alpha=0.5) smallScatGrid.yaxis.set_ticks([0]) smallScatGrid.yaxis.set_ticklabels([]) meanY = randYHat.mean() smallScatGrid.xaxis.set_ticks([meanY]) smallScatGrid.xaxis.set_ticklabels([ARCPY.GetIDMessage(84340)], fontproperties=REPORT.ssLabFont) RAND.seed() #### Adjust Row Count to End of Lines/Scatter #### if numLines < scatLines: grid.rowCount = startGrid + scatLines #### Add To PDF #### grid.finalizeTable() report.write(pdfOutput) ARCPY.SetProgressorPosition() ##### Add Dataset/Parameter Info #### paramLabels = [84253, 84359, 84360, 84112] paramLabels = [ARCPY.GetIDMessage(i) for i in paramLabels] paramValues = [ self.ssdo.inputFC, self.ssdo.masterField, self.ssdo.templateFC, self.depVarName ] #### Set Analysis Field Names #### countRows = len(paramLabels) + 1 maxVarLen = 100 varLines = [i[0:(maxVarLen - 1)] for i in self.indVarNames] for ind, varLine in enumerate(varLines): if ind == 0: paramLabels.append(ARCPY.GetIDMessage(84402)) elif countRows >= 20: paramLabels.append(ARCPY.GetIDMessage(84402)) countRows = 1 else: paramLabels.append("") countRows += 1 paramValues.append(varLine) #### Add Selection Set Boolean #### paramLabels.append(ARCPY.GetIDMessage(84418)) paramValues.append(str(self.ssdo.selectionSet)) title = ARCPY.GetIDMessage(84372) REPORT.createParameterPage(paramLabels, paramValues, title=title, pdfOutput=pdfOutput, titleFont=REPORT.ssTitleFont) ARCPY.SetProgressorPosition() #### Finish Up #### ARCPY.AddMessage(fileName) pdfOutput.close()
def polygon2SWM(inputFC, swmFile, masterField, concept = "EUCLIDEAN", kNeighs = 0, rowStandard = True, contiguityType = "ROOK"): """Creates a sparse spatial weights matrix (SWM) based on polygon contiguity. INPUTS: inputFC (str): path to the input feature class swmFile (str): path to the SWM file. masterField (str): field in table that serves as the mapping. concept: {str, EUCLIDEAN}: EUCLIDEAN or MANHATTAN kNeighs {int, 0}: number of neighbors to return (1) rowStandard {bool, True}: row standardize weights? contiguityType {str, Rook}: {Rook = Edges Only, Queen = Edges/Vertices} NOTES: (1) kNeighs is used if polygon is not contiguous. E.g. Islands """ #### Set Default Progressor for Neigborhood Structure #### ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143)) #### Create SSDataObject #### ssdo = SSDO.SSDataObject(inputFC, templateFC = inputFC, useChordal = True) cnt = UTILS.getCount(inputFC) ERROR.errorNumberOfObs(cnt, minNumObs = 2) #### Validation of Master Field #### verifyMaster = ERROR.checkField(ssdo.allFields, masterField, types = [0,1]) #### Create GA Data Structure #### gaTable, gaInfo = WU.gaTable(ssdo.catPath, [masterField], spatRef = ssdo.spatialRefString) #### Assure Enough Observations #### N = gaInfo[0] ERROR.errorNumberOfObs(N, minNumObs = 2) #### Assure k-Nearest is Less Than Number of Features #### if kNeighs >= N: ARCPY.AddIDMessage("ERROR", 975) raise SystemExit() #### Create Nearest Neighbor Search Type For Islands #### gaSearch = GAPY.ga_nsearch(gaTable) concept, gaConcept = WU.validateDistanceMethod(concept, ssdo.spatialRef) gaSearch.init_nearest(0.0, kNeighs, gaConcept) if kNeighs > 0: forceNeighbor = True neighWeights = ARC._ss.NeighborWeights(gaTable, gaSearch, weight_type = 1, row_standard = False) else: forceNeighbor = False neighSearch = None #### Create Polygon Neighbors #### polyNeighborDict = WU.polygonNeighborDict(inputFC, masterField, contiguityType = contiguityType) #### Write Poly Neighbor List (Dict) #### #### Set Progressor for SWM Writing #### ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84127), 0, N, 1) #### Initialize Spatial Weights Matrix File #### if contiguityType == "ROOK": wType = 4 else: wType = 5 swmWriter = WU.SWMWriter(swmFile, masterField, ssdo.spatialRefName, N, rowStandard, inputFC = inputFC, wType = wType, distanceMethod = concept, numNeighs = kNeighs) #### Keep Track of Polygons w/o Neighbors #### islandPolys = [] #### Write Polygon Contiguity to SWM File #### for row in xrange(N): rowInfo = gaTable[row] oid = rowInfo[0] masterID = rowInfo[2] neighs = polyNeighborDict[masterID] if neighs: weights = [ 1. for nh in neighs ] isIsland = False else: isIsland = True islandPolys.append(oid) weights = [] #### Get Nearest Neighbor Based On Centroid Distance #### if isIsland and forceNeighbor: neighs, weights = neighWeights[row] neighs = [ gaTable[nh][2] for nh in neighs ] #### Add Weights Entry #### swmWriter.swm.writeEntry(masterID, neighs, weights) #### Set Progress #### ARCPY.SetProgressorPosition() #### Report on Features with No Neighbors #### countIslands = len(islandPolys) if countIslands: islandPolys.sort() if countIslands > 30: islandPolys = islandPolys[0:30] ERROR.warningNoNeighbors(N, countIslands, islandPolys, ssdo.oidName, forceNeighbor = forceNeighbor, contiguity = True) #### Clean Up #### swmWriter.close() del gaTable #### Report Spatial Weights Summary #### swmWriter.report() #### Report SWM File is Large #### swmWriter.reportLargeSWM() del polyNeighborDict
def calculate(self, comboX): """Performs OLS and related diagnostics.""" #### Shorthand Attributes #### ssdo = self.ssdo x = comboX n, k = NUM.shape(comboX) y = self.y #### General Information #### fn = n * 1.0 dof = n - k fdof = dof * 1.0 xt = x.T yt = y.T xx = NUM.dot(xt, x) #### Check for Perfect Multicollinearity #### U, s, V = LA.svd(xx) if UTILS.compareFloat(0.0, s[-1]): return False #### Attempt to Invert Design Matrix #### try: xxi = LA.inv(xx) except: #### Perfect multicollinearity, cannot proceed #### return False #### Bad Probabilities - Near Multicollinearity #### badProbs = False #### Compute Coefficients #### xy = NUM.dot(xt, y) coef = NUM.dot(xxi, xy) #### Residuals, Sum Of Squares, R2, Etc. #### yHat = NUM.dot(x, coef) yBar = (y.sum()) / fn e = y - yHat ess = (NUM.dot(e.T, e))[0][0] s2 = (ess / fdof) s2mle = (ess / fn) seResiduals = NUM.sqrt(s2) ss = y - yBar tss = (NUM.dot(ss.T, ss))[0][0] r2 = 1.0 - (ess / tss) r2Adj = 1.0 - ((ess / (fdof)) / (tss / (fn - 1))) u2 = e * e #### Variance-Covariance for Coefficients #### varBeta = xxi * s2 #### Standard Errors / t-Statistics #### seBeta = NUM.sqrt(varBeta.diagonal()) tStat = (coef.T / seBeta).flatten() #### White's Robust Standard Errors #### dofScale = (n / (n - k)) * 1.0 sHat = NUM.dot((u2 * x).T, x) * dofScale varBetaRob = NUM.dot(NUM.dot(xxi, sHat), xxi) seBetaRob = NUM.sqrt(varBetaRob.diagonal()) tStatRob = (coef.T / seBetaRob).flatten() #### DOF Warning Once for t-Stats #### silentVector = [True for i in range(k)] if (2 <= dof <= 4) and not self.warnedTProb: silentVector[0] = False self.warnedTProb = True #### Coefficient t-Tests #### pVals = [] pValsRob = [] for varInd in xrange(k): #### General #### try: p = STATS.tProb(tStat[varInd], dof, type=2, silent=silentVector[varInd]) except: p = NUM.nan badProbs = True pVals.append(p) #### Robust #### try: p = STATS.tProb(tStatRob[varInd], dof, type=2, silent=True) except: p = NUM.nan badProbs = True pValsRob.append(p) #### Jarque-Bera Test For Normality of the Residuals #### muE = (e.sum()) / fn devE = e - muE u3 = (devE**3.0).sum() / fn u4 = (devE**4.0).sum() / fn denomS = s2mle**1.5 denomK = s2mle**2.0 skew = u3 / denomS kurt = u4 / denomK self.JB = (n / 6.) * (skew**2. + ((kurt - 3.)**2. / 4.)) if self.JB >= 0.0: self.JBProb = STATS.chiProb(self.JB, 2, type=1) else: self.JBProb = NUM.nan badProbs = True #### Breusch-Pagan Test for Heteroskedasticity #### u2y = NUM.dot(xt, u2) bpCoef = NUM.dot(xxi, u2y) u2Hat = NUM.dot(x, bpCoef) eU = u2 - u2Hat essU = NUM.dot(eU.T, eU) u2Bar = (u2.sum()) / fn ssU = u2 - u2Bar tssU = NUM.dot(ssU.T, ssU) r2U = 1.0 - (essU / tssU) self.BP = (fn * r2U)[0][0] if self.BP >= 0.0: self.BPProb = STATS.chiProb(self.BP, (k - 1), type=1) else: self.BPProb = NUM.nan badProbs = True #### Classic Joint-Hypothesis F-Test #### q = k - 1 fq = q * 1.0 self.fStat = (r2 / fq) / ((1 - r2) / (fn - k)) try: self.fProb = abs(STATS.fProb(self.fStat, q, (n - k), type=1)) except: self.fProb = NUM.nan badProbs = True #### Wald Robust Joint Hypothesis Test #### R = NUM.zeros((q, k)) R[0:, 1:] = NUM.eye(q) Rb = NUM.dot(R, coef) try: invRbR = LA.inv(NUM.dot(NUM.dot(R, varBetaRob), R.T)) except: #### Perfect multicollinearity, cannot proceed #### return False self.waldStat = (NUM.dot(NUM.dot(Rb.T, invRbR), Rb))[0][0] if self.waldStat >= 0.0: self.waldProb = STATS.chiProb(self.waldStat, q, type=1) else: self.waldProb = NUM.nan badProbs = True #### Log-Likelihood #### self.logLik = -(n / 2.) * (1. + NUM.log(2. * NUM.pi)) - \ (n / 2.) * NUM.log(s2mle) #### AIC/AICc #### k1 = k + 1 self.aic = -2. * self.logLik + 2. * k1 self.aicc = -2. * self.logLik + 2. * k1 * (fn / (fn - k1 - 1)) #### Calculate the Variance Inflation Factor #### if k <= 2: self.vifVal = ARCPY.GetIDMessage(84090) self.vif = False else: xTemp = xt[1:] corX = NUM.corrcoef(xTemp) try: ic = LA.inv(corX) self.vifVal = abs(ic.diagonal()) self.vifVal[self.vifVal >= 1000] = 1000 self.vif = True except: #### Perfect multicollinearity, cannot proceed #### return False #### Set Attributes #### self.dof = dof self.coef = coef self.yHat = yHat self.yBar = yBar self.residuals = e self.seResiduals = seResiduals self.stdRedisuals = e / self.seResiduals self.ess = ess self.tss = tss self.varCoef = varBeta self.seCoef = seBeta self.tStats = tStat self.pVals = pVals self.varCoefRob = varBetaRob self.seCoefRob = seBetaRob self.tStatsRob = tStatRob self.pValsRob = pValsRob self.r2 = r2 self.r2Adj = r2Adj self.s2 = s2 self.s2mle = s2mle self.q = q self.badProbs = badProbs self.varLabels = [ARCPY.GetIDMessage(84064)] + self.independentVars return True
def delaunay2SWM(inputFC, swmFile, masterField, rowStandard = True): """Creates a sparse spatial weights matrix (SWM) based on Delaunay Triangulation. INPUTS: inputFC (str): path to the input feature class swmFile (str): path to the SWM file. masterField (str): field in table that serves as the mapping. rowStandard {bool, True}: row standardize weights? """ #### Set Default Progressor for Neigborhood Structure #### ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143)) #### Create SSDataObject #### ssdo = SSDO.SSDataObject(inputFC, templateFC = inputFC, useChordal = True) cnt = UTILS.getCount(inputFC) ERROR.errorNumberOfObs(cnt, minNumObs = 2) #### Validation of Master Field #### verifyMaster = ERROR.checkField(ssdo.allFields, masterField, types = [0,1]) #### Create GA Data Structure #### gaTable, gaInfo = WU.gaTable(ssdo.catPath, [masterField], spatRef = ssdo.spatialRefString) #### Assure Enough Observations #### N = gaInfo[0] ERROR.errorNumberOfObs(N, minNumObs = 2) #### Process any bad records encountered #### numBadRecs = cnt - N if numBadRecs: badRecs = WU.parseGAWarnings(gaTable.warnings) err = ERROR.reportBadRecords(cnt, numBadRecs, badRecs, label = ssdo.oidName) #### Create Delaunay Neighbor Search Type #### gaSearch = GAPY.ga_nsearch(gaTable) gaSearch.init_delaunay() neighWeights = ARC._ss.NeighborWeights(gaTable, gaSearch, weight_type = 1, row_standard = False) #### Set Progressor for Weights Writing #### ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84127), 0, N, 1) #### Initialize Spatial Weights Matrix File #### swmWriter = WU.SWMWriter(swmFile, masterField, ssdo.spatialRefName, N, rowStandard, inputFC = inputFC, wType = 3) #### Unique Master ID Dictionary #### masterSet = set([]) for row in xrange(N): masterID = int(gaTable[row][2]) if masterID in masterSet: ARCPY.AddIDMessage("Error", 644, masterField) ARCPY.AddIDMessage("Error", 643) raise SystemExit() else: masterSet.add(masterID) neighs, weights = neighWeights[row] neighs = [ gaTable[nh][2] for nh in neighs ] #### Add Spatial Weights Matrix Entry #### swmWriter.swm.writeEntry(masterID, neighs, weights) #### Set Progress #### ARCPY.SetProgressorPosition() #### Clean Up #### swmWriter.close() del gaTable #### Report if Any Features Have No Neighbors #### swmWriter.reportNoNeighbors() #### Report Spatial Weights Summary #### swmWriter.report() #### Report SWM File is Large #### swmWriter.reportLargeSWM()
def runModels(self): """Performs additional validation and populates the SSDataObject.""" #### Shorthand Attributes #### ssdo = self.ssdo #### Create Dependent Variable #### self.y = ssdo.fields[self.dependentVar].returnDouble() self.n = ssdo.numObs self.y.shape = (self.n, 1) #### Assure that Variance is Larger than Zero #### yVar = NUM.var(self.y) if NUM.isnan(yVar) or yVar <= 0.0: ARCPY.AddIDMessage("Error", 906) raise SystemExit() #### Validate Chosen Number of Combos #### k = len(ssdo.fields) if self.maxIndVars > (k - 1): ARCPY.AddIDMessage("WARNING", 1171, self.maxIndVars) self.maxIndVars = k - 1 ARCPY.AddIDMessage("WARNING", 1172, self.maxIndVars) #### Assure Degrees of Freedom #### withIntercept = self.maxIndVars + 1 dof = self.n - withIntercept if dof <= 2: ARCPY.AddIDMessage("WARNING", 1128, 2) dofLimit = self.n - 4 ARCPY.AddIDMessage("WARNING", 1419, dofLimit) self.maxIndVars = dofLimit if self.maxIndVars < 1: ARCPY.AddIDMessage("WARNING", 1173) #### Assure Min Vars is less than or equal to Max Vars #### if self.maxIndVars < self.minIndVars: ARCPY.AddIDMessage("WARNING", 1174) ARCPY.AddIDMessage("WARNING", 1175) self.minIndVars = self.maxIndVars #### Gen Range Combos #### rangeVars = range(1, k) rangeCombos = NUM.arange(self.minIndVars, self.maxIndVars + 1) #### Create Base Design Matrix #### self.x = NUM.ones((self.n, k), dtype=float) for column, variable in enumerate(self.independentVars): self.x[:, column + 1] = ssdo.fields[variable].data #### Calculate Global VIF #### self.globalVifVals = COLL.defaultdict(float) if k > 2: #### Values Less Than One Were Forced by Psuedo-Inverse #### self.printVIF = True else: self.printVIF = False #### Create Output Table Info #### if self.outputTable: #### List of Results #### self.tableResults = [] #### Valid Table Name and Type #### self.outputTable, dbf = UTILS.returnTableName(self.outputTable) outPath, outName = OS.path.split(self.outputTable) #### Set Field Names (Base) #### self.outFieldNames = UTILS.getFieldNames(erFieldNames, outPath) self.outFieldTypes = [ "LONG", "DOUBLE", "DOUBLE", "DOUBLE", "DOUBLE", "DOUBLE", "DOUBLE", "LONG" ] #### Add Field Names (Independent Vars as X#) #### maxRange = range(1, self.maxIndVars + 1) self.outFieldNames += ["X" + str(j) for j in maxRange] self.outFieldTypes += ["TEXT"] * self.maxIndVars #### Calculate Max Text Length for Output Fields #### fieldLens = [len(i) for i in self.independentVars] self.maxFieldLen = max(fieldLens) + 5 tableReportCount = 0 #### Set NULL Values and Flag to Reset Table Name #### isNullable = UTILS.isNullable(self.outputTable) if isNullable: self.nullValue = NUM.nan else: self.nullValue = UTILS.shpFileNull["DOUBLE"] self.dbf = dbf #### Create Output Report File #### if self.outputReportFile: fo = UTILS.openFile(self.outputReportFile, "w") #### Hold Results for Every Choose Combo #### self.resultDict = {} self.vifVarCount = COLL.defaultdict(int) self.model2Table = {} self.sumRuns = 0 self.sumGI = 0 self.boolGI = 0 self.boolResults = NUM.zeros(4, dtype=int) self.jbModels = [] self.jbValues = [] self.jbResiduals = NUM.empty((self.n, 3), dtype=float) self.perfectMultiWarnBool = False self.neighborWarn = False for choose in rangeCombos: #### Generate Index Combos #### comboGenerator = ITER.combinations(rangeVars, choose) #### Set Progressor #### message = ARCPY.GetIDMessage(84293).format(k - 1, choose) ARCPY.SetProgressor("default", message) #### Set Result Structure #### rh = ResultHandler(self.independentVars, choose, self.ssdo, self.weightsMatrix, weightsType=self.weightsType, minR2=self.minR2, maxCoef=self.maxCoef, maxVIF=self.maxVIF, minJB=self.minJB, minMI=self.minMI, silent=self.neighborWarn) #### Loop Through All Combinations #### modelCount = 0 emptyTabValues = [""] * (self.maxIndVars - choose) perfectMultiModels = [] for combo in comboGenerator: #### Create Design Matrix for Given Combination #### columns = [0] + list(combo) comboX = self.x[0:, columns] #### Get Model Info for given Combination #### N, K = comboX.shape varNameList = [self.independentVars[j - 1] for j in combo] varNameListInt = ["Intercept"] + varNameList modelAll = self.dependentVar + " ~ " modelAll += " + ".join(varNameListInt) modelID = str(K) + ":" + str(modelCount) #### Run Linear Regression #### runModel = self.calculate(comboX) #### Set Near/Perfect Multicoll Bool #### nearPerfectBool = False if K > 2 and runModel: nearPerfectBool = NUM.any(abs(self.vifVal) >= 1000) if (not runModel) or nearPerfectBool: #### Perfect Multicollinearity #### #### Unable to Invert the Matrix #### perfectMultiModels.append(modelAll) else: #### Keep Track of Total Number of Models Ran #### modelCount += 1 self.sumRuns += 1 residuals = self.residuals.flatten() #### Evaluate p-values #### if self.BPProb < .1: #### Use Robust Coefficients #### pValsOut = self.pValsRob[1:] else: pValsOut = self.pVals[1:] coefOut = self.coef[1:] #### Process Largest VIF Values #### if K > 2: for ind, varName in enumerate(varNameList): vif = self.vifVal[ind] previousVIF = self.globalVifVals[varName] if vif > previousVIF: self.globalVifVals[varName] = vif #### Set OLS Result #### res = OLSResult(modelID, varNameList, coefOut, pValsOut, self.vifVal, self.r2Adj, self.aicc, self.JBProb, self.BPProb, allMIPass=self.allMIPass) #### Evaluate Jarque-Bera Stat #### keep = self.pushPopJB(res, self.residuals.flatten()) boolReport = rh.evaluateResult(res, residuals, keep=keep) r2Bool, pvBool, vifBool, jbBool, giBool, keepBool = boolReport #### Populate Output Table #### if self.outputTable: lesserKeepModel = pvBool and vifBool if lesserKeepModel: #### Set VIF/Moran's I for Table #### maxVIFValue = res.maxVIFValue giPVal = res.miPVal if giPVal == None or self.allMIPass: giPVal = self.nullValue #### Create List of Results #### countPlus = tableReportCount + 1 resultValues = [ countPlus, self.r2Adj, self.aicc, self.JBProb, self.BPProb, maxVIFValue, giPVal, choose ] resultValues += varNameList resultValues += emptyTabValues self.tableResults.append(resultValues) self.model2Table[modelID] = tableReportCount tableReportCount += 1 #### Add Booleans for End Total Summary #### boolResult = [r2Bool, pvBool, vifBool, jbBool] self.boolResults += boolResult #### Delete OLS Instance if Not Necessary For Summary #### if not keepBool: del res #### Run Moran's I for Highest Adj. R2 #### r2ResultList = rh.runR2Moran() self.neighborWarn = True #### Update Output Table Moran Value #### if self.outputTable and not self.allMIPass: for id, pvalue in r2ResultList: try: tableIndex = self.model2Table[id] self.tableResults[tableIndex][6] = pvalue except: pass #### Add Results to Report File #### result2Print = rh.report() if self.outputReportFile: fo.write(result2Print.encode('utf-8')) if len(perfectMultiModels): self.perfectMultiWarnBool = True ARCPY.AddIDMessage("WARNING", 1304) for modelStr in perfectMultiModels: ARCPY.AddIDMessage("WARNING", 1176, modelStr) #### Add Choose Run to Result Dictionary #### self.resultDict[choose] = rh #### Run Moran's I on Best Jarque-Bera #### self.createJBReport() #### Final Moran Stats #### self.getMoranStats() #### Create Output Table #### if self.outputTable: self.createOutputTable() #### Ending Summary #### self.endSummary() if self.outputReportFile: fo.write(self.fullReport.encode('utf-8')) fo.close()
def __init__(self, value=None): if value == None: value = ARCPY.GetIDMessage(84004) self.value = value