Ejemplo n.º 1
0
def kNearest2SWM(inputFC, swmFile, masterField, concept = "EUCLIDEAN", 
                 kNeighs = 1, rowStandard = True):
    """Creates a sparse spatial weights matrix (SWM) based on k-nearest
    neighbors.

    INPUTS: 
    inputFC (str): path to the input feature class
    swmFile (str): path to the SWM file.
    masterField (str): field in table that serves as the mapping.
    concept: {str, EUCLIDEAN}: EUCLIDEAN or MANHATTAN 
    kNeighs {int, 1}: number of neighbors to return
    rowStandard {bool, True}: row standardize weights?
    """

    #### Assure that kNeighs is Non-Zero ####
    if kNeighs <= 0:
        ARCPY.AddIDMessage("ERROR", 976)
        raise SystemExit()

    #### Set Default Progressor for Neigborhood Structure ####
    ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143))

    #### Create SSDataObject ####
    ssdo = SSDO.SSDataObject(inputFC, templateFC = inputFC,
                             useChordal = True)
    cnt = UTILS.getCount(inputFC)
    ERROR.errorNumberOfObs(cnt, minNumObs = 2)

    #### Validation of Master Field ####
    verifyMaster = ERROR.checkField(ssdo.allFields, masterField, types = [0,1])

    #### Create GA Data Structure ####
    gaTable, gaInfo = WU.gaTable(ssdo.catPath, [masterField],
                                 spatRef = ssdo.spatialRefString)

    #### Assure Enough Observations ####
    N = gaInfo[0]
    ERROR.errorNumberOfObs(N, minNumObs = 2)

    #### Process any bad records encountered ####
    numBadRecs = cnt - N
    if numBadRecs:
        badRecs = WU.parseGAWarnings(gaTable.warnings)
        err = ERROR.reportBadRecords(cnt, numBadRecs, badRecs,
                                     label = ssdo.oidName)

    #### Assure k-Nearest is Less Than Number of Features ####
    if kNeighs >= N:
        ARCPY.AddIDMessage("ERROR", 975)
        raise SystemExit()

    #### Create k-Nearest Neighbor Search Type ####
    gaSearch = GAPY.ga_nsearch(gaTable)
    concept, gaConcept = WU.validateDistanceMethod(concept, ssdo.spatialRef)
    gaSearch.init_nearest(0.0, kNeighs, gaConcept)
    neighWeights = ARC._ss.NeighborWeights(gaTable, gaSearch, 
                                           weight_type = 1,
                                           row_standard = False)

    #### Set Progressor for Weights Writing ####
    ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84127), 0, N, 1)

    #### Initialize Spatial Weights Matrix File ####
    swmWriter = WU.SWMWriter(swmFile, masterField, ssdo.spatialRefName, 
                             N, rowStandard, inputFC = inputFC,
                             wType = 2, distanceMethod = concept,
                             numNeighs = kNeighs)

    #### Unique Master ID Dictionary ####
    masterSet = set([])

    for row in xrange(N):
        masterID = int(gaTable[row][2])
        if masterID in masterSet:
            ARCPY.AddIDMessage("Error", 644, masterField)
            ARCPY.AddIDMessage("Error", 643)
            raise SystemExit()
        else:
            masterSet.add(masterID)

        neighs, weights = neighWeights[row]
        neighs = [ gaTable[nh][2] for nh in neighs ]

        #### Add Spatial Weights Matrix Entry ####
        swmWriter.swm.writeEntry(masterID, neighs, weights) 

        #### Set Progress ####
        ARCPY.SetProgressorPosition()

    swmWriter.close()
    del gaTable

    #### Report Warning/Max Neighbors ####
    swmWriter.reportNeighInfo()

    #### Report Spatial Weights Summary ####
    swmWriter.report()

    #### Report SWM File is Large ####
    swmWriter.reportLargeSWM()
Ejemplo n.º 2
0
    def obtainData(self, masterField, fields = [], types = [0,1,2,3,4,5,6],
                   minNumObs = 0, warnNumObs = 0, dateStr = False,
                   explicitBadRecordID = None):
        """Takes a list of field names and returns it in a dictionary
        structure.

        INPUTS:
        masterField (str): name of field being used as the master
        fields {list, []}: name(s) of the field to be returned
        types (list): types of data allowed to be returned (1)
        minNumObs {int, 0}: minimum number of observations for error
        warnNumObs {int, 0}: minimum number of observations for warning
        OID {bool, False}: OID field allowed to be master field?

        ATTRIBUTES:
        gaTable (structure): instance of the GA Table
        fields (dict): fieldName = instance of FCField
        master2Order (dict): masterID = order in lists
        order2Master (dict): order in lists = masterID
        masterField (str): field that serves as the master
        badRecords (list): master IDs that could not be read
        xyCoords (array, nunObs x 2): xy-coordinates for feature centroids
        """

        #### Get Base Count, May Include Bad Records ####
        cnt = UTILS.getCount(self.inputFC)

        #### Validation of Master Field ####
        verifyMaster = ERROR.checkField(self.allFields, masterField,
                                        types = [0,1,5])

        #### Set MasterIsOID Boolean ####
        self.masterIsOID = masterField == self.oidName

        #### Set Master and Data Indices ####
        if self.masterIsOID:
            self.masterColumnIndex = 0
            self.dataColumnIndex = 2
            fieldList = [self.oidName, "shape@XY"]
        else:
            self.masterColumnIndex = 2
            self.dataColumnIndex = 3
            fieldList = [self.oidName, "shape@XY", masterField]

        #### Initialization of Centroids  ####
        xyCoords = NUM.empty((cnt, 2), float)

        #### Validation and Initialization of Data Fields ####
        numFields = len(fields)
        fieldTypes = {}
        hasDate = False
        for field in fields:
            fieldType = ERROR.checkField(self.allFields, field, types = types)
            fieldTypes[field] = fieldType
            fieldList.append(field)
            self.fields[field] = self.allFields[field]
            if fieldType.upper() == "DATE":
                hasDate = True
                nowTime = DT.datetime.now()

        #### Create Empty Data Arrays ####
        for fieldName, fieldObj in self.fields.iteritems():
            fieldObj.createDataArray(cnt, dateStr = dateStr)

        #### Z Coords ####
        if self.hasZ:
            zCoords = NUM.empty((cnt, ), float)
            fieldList.append("shape@Z")

        #### Keep track of Invalid Fields ####
        badIDs = []
        badRecord = 0

        #### Create Progressor Bar ####
        ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84001), 0, cnt, 1)

        #### Process Field Values ####
        try:
            rows = DA.SearchCursor(self.inputFC, fieldList, "",
                                   self.spatialRefString)
        except:
            ARCPY.AddIDMessage("ERROR", 204)
            raise SystemExit()

        c = 0
        for row in rows:
            oid = row[0]
            badXY = row[1].count(None)
            if self.hasZ:
                badValues = row[0:-1].count(None)
            else:
                badValues = row.count(None)

            #### Check Bad Record ####
            if badXY or badValues:
                badRow = 1
                badRecord = 1
                badIDs.append(oid)
            else:
                #### Get Centroid and Master ID ####
                xyCoords[c] = row[1]
                masterID = row[self.masterColumnIndex]

                #### Add Field Values ####
                if numFields:
                    restFields = row[self.dataColumnIndex:]
                    for fieldInd, fieldName in enumerate(fields):
                        fieldValue = restFields[fieldInd]
                        fieldType = fieldTypes[fieldName]
                        if fieldType.upper() == "DATE":
                            if dateStr:
                                fieldValue = str(fieldValue)
                            else:
                                fieldValue = (nowTime - fieldValue).total_seconds()
                        self.fields[fieldName].data[c] = fieldValue
                if self.hasZ:
                    zCoords[c] = row[-1]

                #### Check uniqueness of masterID field ####
                if self.master2Order.has_key(masterID):
                    del rows
                    ARCPY.AddIDMessage("ERROR", 644, masterField)
                    ARCPY.AddIDMessage("ERROR", 643)
                    raise SystemExit()
                else:
                    self.master2Order[masterID] = c
                    self.order2Master[c] = masterID
                    c += 1

            ARCPY.SetProgressorPosition()

        del rows

        #### Check Whether the Number of Features is Appropriate ####
        numObs = len(self.master2Order)
        ERROR.checkNumberOfObs(numObs, minNumObs = minNumObs,
                               warnNumObs = warnNumObs,
                               silentWarnings = self.silentWarnings)

        #### Get Set of Bad IDs ####
        badIDs = list(set(badIDs))
        badIDs.sort()
        badIDs = [ str(i) for i in badIDs ]

        #### Process any bad records encountered ####
        if badRecord != 0:
            bn = len(badIDs)
            if not self.silentWarnings:
                ERROR.reportBadRecords(cnt, bn, badIDs, label = self.oidName,
                                       explicitBadRecordID = explicitBadRecordID)

            #### Prune Data Arrays ####
            xyCoords = xyCoords[0:numObs]
            self.resizeDataArrays(numObs)
            if self.hasZ:
                zCoords = zCoords[0:numObs]

        #### Set the Hidden Fields (E.g. Not in Use) ####
        self.setHiddenFields()

        #### Reset Extent to Honor Env and Subsets ####
        try:
            self.extent = UTILS.resetExtent(xyCoords)
        except:
            pass

        #### Reset Coordinates for Chordal ####
        if self.useChordal:
            #### Project to XY on Spheroid ####
            self.spheroidCoords = ARC._ss.lonlat_to_xy(xyCoords, 
                                                self.spatialRef) 
            self.sliceInfo = UTILS.SpheroidSlice(self.extent,
                                                self.spatialRef)
        else:
            self.spheroidCoords = None
            self.sliceInfo = None

        #### Set Further Attributes ####
        self.badRecords = badIDs
        self.xyCoords = xyCoords
        self.masterField = masterField
        self.gaTable = None
        self.numObs = numObs
        if self.hasZ:
            self.zCoords = zCoords
        else:
            self.zCoords = None
Ejemplo n.º 3
0
def delaunay2SWM(inputFC, swmFile, masterField, rowStandard = True):
    """Creates a sparse spatial weights matrix (SWM) based on Delaunay
    Triangulation.  

    INPUTS: 
    inputFC (str): path to the input feature class
    swmFile (str): path to the SWM file.
    masterField (str): field in table that serves as the mapping.
    rowStandard {bool, True}: row standardize weights?
    """

    #### Set Default Progressor for Neigborhood Structure ####
    ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143))

    #### Create SSDataObject ####
    ssdo = SSDO.SSDataObject(inputFC, templateFC = inputFC,
                             useChordal = True)
    cnt = UTILS.getCount(inputFC)
    ERROR.errorNumberOfObs(cnt, minNumObs = 2)

    #### Validation of Master Field ####
    verifyMaster = ERROR.checkField(ssdo.allFields, masterField, types = [0,1])

    #### Create GA Data Structure ####
    gaTable, gaInfo = WU.gaTable(ssdo.catPath, [masterField],
                                 spatRef = ssdo.spatialRefString)

    #### Assure Enough Observations ####
    N = gaInfo[0]
    ERROR.errorNumberOfObs(N, minNumObs = 2)

    #### Process any bad records encountered ####
    numBadRecs = cnt - N
    if numBadRecs:
        badRecs = WU.parseGAWarnings(gaTable.warnings)
        err = ERROR.reportBadRecords(cnt, numBadRecs, badRecs,
                                     label = ssdo.oidName)

    #### Create Delaunay Neighbor Search Type ####
    gaSearch = GAPY.ga_nsearch(gaTable)
    gaSearch.init_delaunay()
    neighWeights = ARC._ss.NeighborWeights(gaTable, gaSearch, 
                                           weight_type = 1,
                                           row_standard = False)

    #### Set Progressor for Weights Writing ####
    ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84127), 0, N, 1)

    #### Initialize Spatial Weights Matrix File ####
    swmWriter = WU.SWMWriter(swmFile, masterField, ssdo.spatialRefName, 
                             N, rowStandard, inputFC = inputFC,
                             wType = 3)

    #### Unique Master ID Dictionary ####
    masterSet = set([])

    for row in xrange(N):
        masterID = int(gaTable[row][2])
        if masterID in masterSet:
            ARCPY.AddIDMessage("Error", 644, masterField)
            ARCPY.AddIDMessage("Error", 643)
            raise SystemExit()
        else:
            masterSet.add(masterID)

        neighs, weights = neighWeights[row]
        neighs = [ gaTable[nh][2] for nh in neighs ]

        #### Add Spatial Weights Matrix Entry ####
        swmWriter.swm.writeEntry(masterID, neighs, weights) 

        #### Set Progress ####
        ARCPY.SetProgressorPosition()

    #### Clean Up ####
    swmWriter.close()
    del gaTable

    #### Report if Any Features Have No Neighbors ####
    swmWriter.reportNoNeighbors()

    #### Report Spatial Weights Summary ####
    swmWriter.report()

    #### Report SWM File is Large ####
    swmWriter.reportLargeSWM()
Ejemplo n.º 4
0
    def obtainData(self,
                   masterField,
                   fields=[],
                   types=[0, 1, 2, 3, 4, 5, 6],
                   minNumObs=0,
                   warnNumObs=0,
                   dateStr=False,
                   explicitBadRecordID=None):
        """Takes a list of field names and returns it in a dictionary
        structure.

        INPUTS:
        masterField (str): name of field being used as the master
        fields {list, []}: name(s) of the field to be returned
        types (list): types of data allowed to be returned (1)
        minNumObs {int, 0}: minimum number of observations for error
        warnNumObs {int, 0}: minimum number of observations for warning
        OID {bool, False}: OID field allowed to be master field?

        ATTRIBUTES:
        gaTable (structure): instance of the GA Table
        fields (dict): fieldName = instance of FCField
        master2Order (dict): masterID = order in lists
        order2Master (dict): order in lists = masterID
        masterField (str): field that serves as the master
        badRecords (list): master IDs that could not be read
        xyCoords (array, nunObs x 2): xy-coordinates for feature centroids
        """

        #### Get Base Count, May Include Bad Records ####
        cnt = UTILS.getCount(self.inputFC)

        #### Validation of Master Field ####
        verifyMaster = ERROR.checkField(self.allFields,
                                        masterField,
                                        types=[0, 1, 5])

        #### Set MasterIsOID Boolean ####
        self.masterIsOID = masterField == self.oidName

        #### Set Master and Data Indices ####
        if self.masterIsOID:
            self.masterColumnIndex = 0
            self.dataColumnIndex = 2
            fieldList = [self.oidName, "shape@XY"]
        else:
            self.masterColumnIndex = 2
            self.dataColumnIndex = 3
            fieldList = [self.oidName, "shape@XY", masterField]

        #### Initialization of Centroids  ####
        xyCoords = NUM.empty((cnt, 2), float)

        #### Validation and Initialization of Data Fields ####
        numFields = len(fields)
        fieldTypes = {}
        hasDate = False
        for field in fields:
            fieldType = ERROR.checkField(self.allFields, field, types=types)
            fieldTypes[field] = fieldType
            fieldList.append(field)
            self.fields[field] = self.allFields[field]
            if fieldType.upper() == "DATE":
                hasDate = True
                nowTime = DT.datetime.now()

        #### Create Empty Data Arrays ####
        for fieldName, fieldObj in self.fields.iteritems():
            fieldObj.createDataArray(cnt, dateStr=dateStr)

        #### Z Coords ####
        if self.hasZ:
            zCoords = NUM.empty((cnt, ), float)
            fieldList.append("shape@Z")

        #### Keep track of Invalid Fields ####
        badIDs = []
        badRecord = 0

        #### Create Progressor Bar ####
        ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84001), 0, cnt, 1)

        #### Process Field Values ####
        try:
            rows = DA.SearchCursor(self.inputFC, fieldList, "",
                                   self.spatialRefString)
        except:
            ARCPY.AddIDMessage("ERROR", 204)
            raise SystemExit()

        c = 0
        for row in rows:
            oid = row[0]
            badXY = row[1].count(None)
            if self.hasZ:
                badValues = row[0:-1].count(None)
            else:
                badValues = row.count(None)

            #### Check Bad Record ####
            if badXY or badValues:
                badRow = 1
                badRecord = 1
                badIDs.append(oid)
            else:
                #### Get Centroid and Master ID ####
                xyCoords[c] = row[1]
                masterID = row[self.masterColumnIndex]

                #### Add Field Values ####
                if numFields:
                    restFields = row[self.dataColumnIndex:]
                    for fieldInd, fieldName in enumerate(fields):
                        fieldValue = restFields[fieldInd]
                        fieldType = fieldTypes[fieldName]
                        if fieldType.upper() == "DATE":
                            if dateStr:
                                fieldValue = str(fieldValue)
                            else:
                                fieldValue = (nowTime -
                                              fieldValue).total_seconds()
                        self.fields[fieldName].data[c] = fieldValue
                if self.hasZ:
                    zCoords[c] = row[-1]

                #### Check uniqueness of masterID field ####
                if self.master2Order.has_key(masterID):
                    del rows
                    ARCPY.AddIDMessage("ERROR", 644, masterField)
                    ARCPY.AddIDMessage("ERROR", 643)
                    raise SystemExit()
                else:
                    self.master2Order[masterID] = c
                    self.order2Master[c] = masterID
                    c += 1

            ARCPY.SetProgressorPosition()

        del rows

        #### Check Whether the Number of Features is Appropriate ####
        numObs = len(self.master2Order)
        ERROR.checkNumberOfObs(numObs,
                               minNumObs=minNumObs,
                               warnNumObs=warnNumObs,
                               silentWarnings=self.silentWarnings)

        #### Get Set of Bad IDs ####
        badIDs = list(set(badIDs))
        badIDs.sort()
        badIDs = [str(i) for i in badIDs]

        #### Process any bad records encountered ####
        if badRecord != 0:
            bn = len(badIDs)
            if not self.silentWarnings:
                ERROR.reportBadRecords(cnt,
                                       bn,
                                       badIDs,
                                       label=self.oidName,
                                       explicitBadRecordID=explicitBadRecordID)

            #### Prune Data Arrays ####
            xyCoords = xyCoords[0:numObs]
            self.resizeDataArrays(numObs)
            if self.hasZ:
                zCoords = zCoords[0:numObs]

        #### Set the Hidden Fields (E.g. Not in Use) ####
        self.setHiddenFields()

        #### Reset Extent to Honor Env and Subsets ####
        try:
            self.extent = UTILS.resetExtent(xyCoords)
        except:
            pass

        #### Reset Coordinates for Chordal ####
        if self.useChordal:
            #### Project to XY on Spheroid ####
            self.spheroidCoords = ARC._ss.lonlat_to_xy(xyCoords,
                                                       self.spatialRef)
            self.sliceInfo = UTILS.SpheroidSlice(self.extent, self.spatialRef)
        else:
            self.spheroidCoords = None
            self.sliceInfo = None

        #### Set Further Attributes ####
        self.badRecords = badIDs
        self.xyCoords = xyCoords
        self.masterField = masterField
        self.gaTable = None
        self.numObs = numObs
        if self.hasZ:
            self.zCoords = zCoords
        else:
            self.zCoords = None
Ejemplo n.º 5
0
    def obtainDataGA(self, masterField, fields = [], types = [0,1,2,3,5,6],
                     minNumObs = 0, warnNumObs = 0):
        """Takes a list of field names and returns it in a dictionary
        structure.

        INPUTS:
        masterField (str): name of field being used as the master
        fields {list, []}: name(s) of the field to be returned
        types (list): types of data allowed to be returned (1)
        minNumObs {int, 0}: minimum number of observations for error
        warnNumObs {int, 0}: minimum number of observations for warning

        ATTRIBUTES:
        gaTable (structure): instance of the GA Table
        fields (dict): fieldName = instance of FCField
        master2Order (dict): masterID = order in lists
        order2Master (dict): order in lists = masterID
        masterField (str): field that serves as the master
        badRecords (list): master IDs that could not be read
        xyCoords (array, nunObs x 2): xy-coordinates for feature centroids

        NOTES:
        (1) No Text Fields; short [0], long [1], float [2], double[3]
        """

        #### Validation of Master Field ####
        verifyMaster = ERROR.checkField(self.allFields, masterField,
                                        types = [0,1,5])

        #### Set MasterIsOID Boolean ####
        self.masterIsOID = masterField == self.oidName

        #### Set Master and Data Indices ####
        if self.masterIsOID:
            self.masterColumnIndex = 0
            self.dataColumnIndex = 2
            fieldList = []
        else:
            self.masterColumnIndex = 2
            self.dataColumnIndex = 3
            fieldList = [masterField]

        #### Validation and Initialization of Data Fields ####
        numFields = len(fields)
        for field in fields:
            fType = ERROR.checkField(self.allFields, field, types = types)
            fieldList.append(field)
            self.fields[field] = self.allFields[field]

        #### ZCoords Are Last ####
        getZBool = self.hasZ and (not self.renderType)
        if getZBool:
            fieldList.append("SHAPE&Z")

        #### Create GA Data Structure ####
        cnt = UTILS.getCount(self.inputFC)
        fieldList = tuple(fieldList)
        gaTable, gaInfo = WU.gaTable(self.inputFC, fieldNames = fieldList,
                                     spatRef = self.spatialRefString)

        #### Check Whether the Number of Features is Appropriate ####
        numObs = gaInfo[0]
        ERROR.checkNumberOfObs(numObs, minNumObs = minNumObs,
                               warnNumObs = warnNumObs,
                               silentWarnings = self.silentWarnings)

        #### Process any bad records encountered ####
        numBadIDs = cnt - numObs
        if numBadIDs:
            badIDs = WU.parseGAWarnings(gaTable.warnings)
            if not self.silentWarnings:
                ERROR.reportBadRecords(cnt, numBadIDs, badIDs,
                                       label = self.oidName)
        else:
            badIDs = []

        #### Initialization of Centroids  ####
        xyCoords = NUM.empty((numObs, 2), float)

        #### Z Coords ####
        if self.hasZ:
            zCoords = NUM.empty((numObs, ), float)

        #### Create Empty Data Arrays ####
        for fieldName, fieldObj in self.fields.iteritems():
            fieldObj.createDataArray(numObs)

        #### Populate SSDataObject ####
        ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84001), 0, numObs, 1)
        for row in xrange(numObs):
            rowInfo = gaTable[row]
            x,y = rowInfo[1]
            masterID = int(rowInfo[self.masterColumnIndex])
            if self.master2Order.has_key(masterID):
                ARCPY.AddIDMessage("ERROR", 644, masterField)
                ARCPY.AddIDMessage("ERROR", 643)
                raise SystemExit()
            else:
                self.master2Order[masterID] = row
                self.order2Master[row] = masterID
                xyCoords[row] = (x, y)
            if numFields:
                restFields = rowInfo[self.dataColumnIndex:]
                for fieldInd, fieldName in enumerate(fields):
                    self.fields[fieldName].data[row] = restFields[fieldInd]
            if self.hasZ:
                if getZBool:
                    zCoords[row] = rowInfo[-1]
                else:
                    zCoords[row] = NUM.nan

            ARCPY.SetProgressorPosition()

        #### Set the Hidden Fields (E.g. Not in Use) ####
        self.setHiddenFields()

        #### Reset Extent to Honor Env and Subsets ####
        try:
            self.extent = UTILS.resetExtent(xyCoords)
        except:
            pass

        #### Reset Coordinates for Chordal ####
        if self.useChordal:
            #### Project to XY on Spheroid ####
            self.spheroidCoords = ARC._ss.lonlat_to_xy(xyCoords,
                                                self.spatialRef) 
            self.sliceInfo = UTILS.SpheroidSlice(self.extent,
                                                self.spatialRef)
        else:
            self.spheroidCoords = None
            self.sliceInfo = None

        #### Set Further Attributes ####
        self.badRecords = badIDs
        self.xyCoords = xyCoords
        self.masterField = masterField
        self.gaTable = gaTable
        self.numObs = numObs
        if self.hasZ:
            self.zCoords = zCoords
        else:
            self.zCoords = None
Ejemplo n.º 6
0
def stCollectByKNN(ssdo, timeField, outputFC, inSpan, inDistance):
    """
    This method applied Jacquez Space-Time K-NN to convert event data into weighted
    point data by dissolving all coincident points in space and time into unique
    points with a new count field that contains the number of original features
    at that location and time span.

    INPUTS:
        ssdo (obj): SSDataObject from input
        timeField (str): Date/Time field name in input feature
        outputFC (str): path to the output feature class
        inSpan (int): value of temporal units within the same time bin
        inDistance (int): value of spatial units considered as spatial neighbors
    OUTPUTS:
        Create new collected point feature

    """
    #### Read raw time data ####
    timeData = ssdo.fields[timeField].data
    #### Convert temporal unit ####
    time = NUM.array(timeData, dtype='datetime64[s]').astype('datetime64[D]')
    #### Find Start Time ####
    startTime = time.min()
    #### Create Bin for Space and Time ####
    timeBin = (time - startTime) / inSpan

    numObs = ssdo.numObs
    #### Create Sudo-fid to Find K-NN in Space and Time
    fid = [i for i in xrange(numObs)]

    #### Validate Output Workspace ####
    ERROR.checkOutputPath(outputFC)

    #### True Centroid Warning For Non-Point FCs ####
    if ssdo.shapeType.upper() != "POINT":
        ARCPY / AddIDMessage("WARNING", 1021)

    #### Create GA Data Structure ####
    gaTable, gaInfo = WU.gaTable(ssdo.inputFC, spatRef=ssdo.spatialRefString)

    #### Assure Enough Observations ####
    cnt = UTILS.getCount(ssdo.inputFC)
    ERROR.errorNumberOfObs(cnt, minNumObs=4)
    N = gaInfo[0]
    ERROR.errorNumberOfObs(N, minNumObs=4)

    #### Process Any Bad Records Encountered ####
    numBadRecs = cnt - N
    if numBadRecs:
        badRecs = WU.parseGAWarnings(gaTable.warnings)
        if not ssdo.silentWarnings:
            ERROR.reportBadRecords(cnt,
                                   numBadRecs,
                                   badRecs,
                                   label=ssdo.oidName)

    #### Create Output Feature Class ####
    outPath, outName = OS.path.split(outputFC)
    try:
        DM.CreateFeatureclass(outPath, outName, "POINT", "", ssdo.mFlag,
                              ssdo.zFlag, ssdo.spatialRefString)
    except:
        ARCPY.AddIDMessage("ERROR", 210, outputFC)
        raise SystemExit()

    #### Create k-Nearest Neighbor Search Type ####
    gaSearch = GAPY.ga_nsearch(gaTable)
    gaSearch.init_nearest(inDistance, 0, "euclidean")

    #### Add Count Field ####
    countFieldNameOut = ARCPY.ValidateFieldName(countFieldName, outPath)
    timeFieldNameOut = ARCPY.ValidateFieldName(timeFieldName, outPath)
    UTILS.addEmptyField(outputFC, countFieldNameOut, "LONG")
    UTILS.addEmptyField(outputFC, timeFieldNameOut, "DATE")
    fieldList = ["SHAPE@", countFieldNameOut, timeFieldNameOut]

    #### Set Insert Cursor ####
    rowsOut = DA.InsertCursor(outputFC, fieldList)

    #### Detect S-T K-NN by Space and Time Bin ####
    duplicateList = []
    for record in fid:
        kNNList = [record]
        if record not in duplicateList:
            for pair in fid:
                if pair != record:
                    gaSearch.search_by_idx(record)
                    for nh in gaSearch:
                        if timeBin[record] == timeBin[pair]:
                            kNNList.append(nh.idx)
                            duplicateList.append(nh.idx)
            #### Create and Populate New Feature ####
            kNNList = list(set(kNNList))
            count = len(kNNList)
            dt = time[record]
            x0 = ssdo.xyCoords[kNNList, 0].mean()
            y0 = ssdo.xyCoords[kNNList, 1].mean()
            pnt = (x0, y0, ssdo.defaultZ)
            rowResult = [pnt, count, dt]
            rowsOut.insertRow(rowResult)
            ARCPY.SetProgressorPosition()

    #### Clean Up ####
    del rowsOut, timeBin, kNNList, duplicateList

    return countFieldNameOut
Ejemplo n.º 7
0
    def obtainDataGA(self,
                     masterField,
                     fields=[],
                     types=[0, 1, 2, 3, 5, 6],
                     minNumObs=0,
                     warnNumObs=0):
        """Takes a list of field names and returns it in a dictionary
        structure.

        INPUTS:
        masterField (str): name of field being used as the master
        fields {list, []}: name(s) of the field to be returned
        types (list): types of data allowed to be returned (1)
        minNumObs {int, 0}: minimum number of observations for error
        warnNumObs {int, 0}: minimum number of observations for warning

        ATTRIBUTES:
        gaTable (structure): instance of the GA Table
        fields (dict): fieldName = instance of FCField
        master2Order (dict): masterID = order in lists
        order2Master (dict): order in lists = masterID
        masterField (str): field that serves as the master
        badRecords (list): master IDs that could not be read
        xyCoords (array, nunObs x 2): xy-coordinates for feature centroids

        NOTES:
        (1) No Text Fields; short [0], long [1], float [2], double[3]
        """

        #### Validation of Master Field ####
        verifyMaster = ERROR.checkField(self.allFields,
                                        masterField,
                                        types=[0, 1, 5])

        #### Set MasterIsOID Boolean ####
        self.masterIsOID = masterField == self.oidName

        #### Set Master and Data Indices ####
        if self.masterIsOID:
            self.masterColumnIndex = 0
            self.dataColumnIndex = 2
            fieldList = []
        else:
            self.masterColumnIndex = 2
            self.dataColumnIndex = 3
            fieldList = [masterField]

        #### Validation and Initialization of Data Fields ####
        numFields = len(fields)
        for field in fields:
            fType = ERROR.checkField(self.allFields, field, types=types)
            fieldList.append(field)
            self.fields[field] = self.allFields[field]

        #### ZCoords Are Last ####
        getZBool = self.hasZ and (not self.renderType)
        if getZBool:
            fieldList.append("SHAPE&Z")

        #### Create GA Data Structure ####
        cnt = UTILS.getCount(self.inputFC)
        fieldList = tuple(fieldList)
        gaTable, gaInfo = WU.gaTable(self.inputFC,
                                     fieldNames=fieldList,
                                     spatRef=self.spatialRefString)

        #### Check Whether the Number of Features is Appropriate ####
        numObs = gaInfo[0]
        ERROR.checkNumberOfObs(numObs,
                               minNumObs=minNumObs,
                               warnNumObs=warnNumObs,
                               silentWarnings=self.silentWarnings)

        #### Process any bad records encountered ####
        numBadIDs = cnt - numObs
        if numBadIDs:
            badIDs = WU.parseGAWarnings(gaTable.warnings)
            if not self.silentWarnings:
                ERROR.reportBadRecords(cnt,
                                       numBadIDs,
                                       badIDs,
                                       label=self.oidName)
        else:
            badIDs = []

        #### Initialization of Centroids  ####
        xyCoords = NUM.empty((numObs, 2), float)

        #### Z Coords ####
        if self.hasZ:
            zCoords = NUM.empty((numObs, ), float)

        #### Create Empty Data Arrays ####
        for fieldName, fieldObj in self.fields.iteritems():
            fieldObj.createDataArray(numObs)

        #### Populate SSDataObject ####
        ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84001), 0, numObs, 1)
        for row in xrange(numObs):
            rowInfo = gaTable[row]
            x, y = rowInfo[1]
            masterID = int(rowInfo[self.masterColumnIndex])
            if self.master2Order.has_key(masterID):
                ARCPY.AddIDMessage("ERROR", 644, masterField)
                ARCPY.AddIDMessage("ERROR", 643)
                raise SystemExit()
            else:
                self.master2Order[masterID] = row
                self.order2Master[row] = masterID
                xyCoords[row] = (x, y)
            if numFields:
                restFields = rowInfo[self.dataColumnIndex:]
                for fieldInd, fieldName in enumerate(fields):
                    self.fields[fieldName].data[row] = restFields[fieldInd]
            if self.hasZ:
                if getZBool:
                    zCoords[row] = rowInfo[-1]
                else:
                    zCoords[row] = NUM.nan

            ARCPY.SetProgressorPosition()

        #### Set the Hidden Fields (E.g. Not in Use) ####
        self.setHiddenFields()

        #### Reset Extent to Honor Env and Subsets ####
        try:
            self.extent = UTILS.resetExtent(xyCoords)
        except:
            pass

        #### Reset Coordinates for Chordal ####
        if self.useChordal:
            #### Project to XY on Spheroid ####
            self.spheroidCoords = ARC._ss.lonlat_to_xy(xyCoords,
                                                       self.spatialRef)
            self.sliceInfo = UTILS.SpheroidSlice(self.extent, self.spatialRef)
        else:
            self.spheroidCoords = None
            self.sliceInfo = None

        #### Set Further Attributes ####
        self.badRecords = badIDs
        self.xyCoords = xyCoords
        self.masterField = masterField
        self.gaTable = gaTable
        self.numObs = numObs
        if self.hasZ:
            self.zCoords = zCoords
        else:
            self.zCoords = None
Ejemplo n.º 8
0
def exportXYV(inputFC, fieldList, delimiter, outFile, outFieldNames=False):
    """Exports the X,Y Coords and Set of Field Values for a Given
    Feature Class.

    INPUTS:
    inputFC (str): path to the input feature class
    fieldList (list): list of field names to export
    delimiter (str): token to delimit output file with
    outFile (str): path to the output text file
    outFieldNames (bool): return field names in first row of text file?

    OUTPUT:
    outFile (file): output text file
    """

    #### Get Feature Class Properties ####
    ssdo = SSDO.SSDataObject(inputFC, useChordal=False)
    inputFields = [ssdo.oidName, "SHAPE@XY"] + fieldList

    #### Create Progressor Bar ####
    cnt = UTILS.getCount(inputFC)
    ARCPY.AddMessage(ARCPY.GetIDMessage(84012))
    ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84012), 0, cnt, 1)

    #### Keep track of Invalid Fields ####
    badIDs = []
    badRecord = 0

    #### Process Field Values ####
    try:
        rows = DA.SearchCursor(ssdo.inputFC, inputFields, "",
                               ssdo.spatialRefString)
    except:
        ARCPY.AddIDMessage("ERROR", 204)
        raise SystemExit()

    #### Get Field Types and Set LOCALE Dictionary ####
    floatTypes = ["Single", "Double"]
    localeDict = {}
    for field in fieldList:
        fieldType = ssdo.allFields[field].type
        if fieldType in floatTypes:
            formatToken = "%f"
        else:
            formatToken = "%s"
        localeDict[field] = formatToken

    #### Create Output File ####
    fo = UTILS.openFile(outFile, 'w')

    #### Write Field Names to File ####
    if outFieldNames:
        outPath, outName = OS.path.split(outFile)
        allFieldNames = UTILS.getFieldNames(exyvFieldNames, outPath)
        allFieldNames += fieldList
        outRow = delimiter.join(allFieldNames)
        fo.write("%s\n" % outRow.encode('utf-8'))

    for row in rows:
        OID = row[0]
        badValues = row.count(None)
        badXY = row[1].count(None)
        badRow = badValues or badXY
        if not badXY:
            xCoord, yCoord = row[1]
            x = LOCALE.format("%0.8f", xCoord)
            y = LOCALE.format("%0.8f", yCoord)
        else:
            x = "NULL"
            y = "NULL"

        #### Check to see whether field values are OK ####
        rowValues = [x, y]
        for ind, field in enumerate(fieldList):
            value = row[ind + 2]
            if value == "" or value == None:
                rowValues.append("NULL")
            else:
                formatValue = LOCALE.format(localeDict[field], value)
                rowValues.append(formatValue)

        #### Keep TRack of Bad Records ####
        if badRow:
            badIDs.append(OID)

        #### Continue Based on Whether a Bad Row ####
        outRow = delimiter.join(rowValues)
        fo.write("%s\n" % outRow.encode('utf-8'))

        ARCPY.SetProgressorPosition()

    #### Clean Up ####
    del rows
    fo.close()
    ARCPY.AddMessage(outFile)

    #### Get Set of Bad IDs ####
    badIDs = list(set(badIDs))
    badIDs.sort()
    badIDs = [str(i) for i in badIDs]

    #### Process any bad records encountered ####
    bn = len(badIDs)
    if bn:
        err = ERROR.reportBadRecords(cnt,
                                     bn,
                                     badIDs,
                                     label=ssdo.oidName,
                                     allowNULLs=True)
Ejemplo n.º 9
0
def collectEvents(ssdo, outputFC):
    """This utility converts event data into weighted point data by
    dissolving all coincident points into unique points with a new count
    field that contains the number of original features at that
    location.

    INPUTS: 
    inputFC (str): path to the input feature class
    outputFC (str): path to the input feature class
    """

    #### Set Default Progressor for Neigborhood Structure ####
    ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143))

    #### Validate Output Workspace ####
    ERROR.checkOutputPath(outputFC)

    #### True Centroid Warning For Non-Point FCs ####
    if ssdo.shapeType.upper() != "POINT":
        ARCPY.AddIDMessage("WARNING", 1021)

    #### Create GA Data Structure ####
    gaTable, gaInfo = WU.gaTable(ssdo.inputFC, spatRef=ssdo.spatialRefString)

    #### Assure Enough Observations ####
    cnt = UTILS.getCount(ssdo.inputFC)
    ERROR.errorNumberOfObs(cnt, minNumObs=4)
    N = gaInfo[0]
    ERROR.errorNumberOfObs(N, minNumObs=4)

    #### Process Any Bad Records Encountered ####
    numBadRecs = cnt - N
    if numBadRecs:
        badRecs = WU.parseGAWarnings(gaTable.warnings)
        if not ssdo.silentWarnings:
            ERROR.reportBadRecords(cnt,
                                   numBadRecs,
                                   badRecs,
                                   label=ssdo.oidName)

    #### Create k-Nearest Neighbor Search Type ####
    gaSearch = GAPY.ga_nsearch(gaTable)
    gaSearch.init_nearest(0.0, 0, "euclidean")

    #### Create Output Feature Class ####
    outPath, outName = OS.path.split(outputFC)
    try:
        DM.CreateFeatureclass(outPath, outName, "POINT", "", ssdo.mFlag,
                              ssdo.zFlag, ssdo.spatialRefString)
    except:
        ARCPY.AddIDMessage("ERROR", 210, outputFC)
        raise SystemExit()

    #### Add Count Field ####
    countFieldNameOut = ARCPY.ValidateFieldName(countFieldName, outPath)
    UTILS.addEmptyField(outputFC, countFieldNameOut, "LONG")
    fieldList = ["SHAPE@", countFieldNameOut]

    #### Set Insert Cursor ####
    rowsOut = DA.InsertCursor(outputFC, fieldList)

    #### Set Progressor for Calculation ####
    ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84007), 0, N, 1)

    #### ID List to Search ####
    rowsIN = range(N)
    maxCount = 0
    numUnique = 0

    for row in rowsIN:
        #### Get Row Coords ####
        rowInfo = gaTable[row]
        x0, y0 = rowInfo[1]
        count = 1

        #### Search For Exact Coord Match ####
        gaSearch.search_by_idx(row)
        for nh in gaSearch:
            count += 1
            rowsIN.remove(nh.idx)
            ARCPY.SetProgressorPosition()

        #### Keep Track of Max Count ####
        maxCount = max([count, maxCount])

        #### Create Output Point ####
        pnt = (x0, y0, ssdo.defaultZ)

        #### Create and Populate New Feature ####
        rowResult = [pnt, count]
        rowsOut.insertRow(rowResult)
        numUnique += 1
        ARCPY.SetProgressorPosition()

    #### Clean Up ####
    del rowsOut, gaTable

    return countFieldNameOut, maxCount, N, numUnique
def calculateDistanceBand(inputFC, kNeighs, concept="EUCLIDEAN"):
    """Provides the minimum, maximum and average distance from a
    set of features based on a given neighbor count.

    INPUTS: 
    inputFC (str): path to the input feature class
    kNeighs (int): number of neighbors to return
    concept {str, EUCLIDEAN}: EUCLIDEAN or MANHATTAN distance
    """

    #### Assure that kNeighs is Non-Zero ####
    if kNeighs <= 0:
        ARCPY.AddIDMessage("ERROR", 976)
        raise SystemExit()

    #### Set Default Progressor for Neigborhood Structure ####
    ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143))

    #### Create SSDataObject ####
    ssdo = SSDO.SSDataObject(inputFC, useChordal=True)
    cnt = UTILS.getCount(inputFC)
    ERROR.errorNumberOfObs(cnt, minNumObs=2)

    #### Create GA Data Structure ####
    gaTable, gaInfo = WU.gaTable(inputFC, spatRef=ssdo.spatialRefString)

    #### Assure Enough Observations ####
    N = gaInfo[0]
    ERROR.errorNumberOfObs(N, minNumObs=2)

    #### Process Any Bad Records Encountered ####
    numBadRecs = cnt - N
    if numBadRecs:
        badRecs = WU.parseGAWarnings(gaTable.warnings)
        err = ERROR.reportBadRecords(cnt,
                                     numBadRecs,
                                     badRecs,
                                     label=ssdo.oidName)

    #### Assure k-Nearest is Less Than Number of Features ####
    if kNeighs >= N:
        ARCPY.AddIDMessage("ERROR", 975)
        raise SystemExit()

    #### Create k-Nearest Neighbor Search Type ####
    gaSearch = GAPY.ga_nsearch(gaTable)
    gaConcept = concept.lower()
    gaSearch.init_nearest(0.0, kNeighs, gaConcept)
    neighDist = ARC._ss.NeighborDistances(gaTable, gaSearch)

    #### Set Progressor for Weights Writing ####
    ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84007), 0, N, 1)
    distances = NUM.empty((N, ), float)

    for row in xrange(N):
        distances[row] = neighDist[row][-1].max()
        ARCPY.SetProgressorPosition()

    #### Calculate and Report ####
    minDist = distances.min()
    avgDist = distances.mean()
    maxDist = distances.max()
    if ssdo.useChordal:
        hardMaxExtent = ARC._ss.get_max_gcs_distance(ssdo.spatialRef)
        if maxDist > hardMaxExtent:
            ARCPY.AddIDMessage("ERROR", 1609)
            raise SystemExit()

    minDistOut = LOCALE.format("%0.6f", minDist)
    avgDistOut = LOCALE.format("%0.6f", avgDist)
    maxDistOut = LOCALE.format("%0.6f", maxDist)

    #### Create Output Text Table ####
    header = ARCPY.GetIDMessage(84171)
    row1 = [ARCPY.GetIDMessage(84165).format(kNeighs), minDistOut]
    row2 = [ARCPY.GetIDMessage(84166).format(kNeighs), avgDistOut]
    row3 = [ARCPY.GetIDMessage(84167).format(kNeighs), maxDistOut]
    total = [row1, row2, row3]
    tableOut = UTILS.outputTextTable(total, header=header, pad=1)

    #### Add Linear/Angular Unit ####
    distanceOut = ssdo.distanceInfo.outputString
    distanceMeasuredStr = ARCPY.GetIDMessage(84344).format(distanceOut)
    tableOut += "\n%s\n" % distanceMeasuredStr

    #### Report Text Output ####
    ARCPY.AddMessage(tableOut)

    #### Set Derived Output ####
    ARCPY.SetParameterAsText(3, minDist)
    ARCPY.SetParameterAsText(4, avgDist)
    ARCPY.SetParameterAsText(5, maxDist)

    #### Clean Up ####
    del gaTable
Ejemplo n.º 11
0
def exportXYV(inputFC, fieldList, delimiter, outFile, outFieldNames=False):
    """Exports the X,Y Coords and Set of Field Values for a Given
    Feature Class.

    INPUTS:
    inputFC (str): path to the input feature class
    fieldList (list): list of field names to export
    delimiter (str): token to delimit output file with
    outFile (str): path to the output text file
    outFieldNames (bool): return field names in first row of text file?

    OUTPUT:
    outFile (file): output text file
    """

    #### Get Feature Class Properties ####
    ssdo = SSDO.SSDataObject(inputFC, useChordal=False)
    inputFields = [ssdo.oidName, "SHAPE@XY"] + fieldList

    #### Create Progressor Bar ####
    cnt = UTILS.getCount(inputFC)
    ARCPY.AddMessage(ARCPY.GetIDMessage(84012))
    ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84012), 0, cnt, 1)

    #### Keep track of Invalid Fields ####
    badIDs = []
    badRecord = 0

    #### Process Field Values ####
    try:
        rows = DA.SearchCursor(ssdo.inputFC, inputFields, "", ssdo.spatialRefString)
    except:
        ARCPY.AddIDMessage("ERROR", 204)
        raise SystemExit()

    #### Get Field Types and Set LOCALE Dictionary ####
    floatTypes = ["Single", "Double"]
    localeDict = {}
    for field in fieldList:
        fieldType = ssdo.allFields[field].type
        if fieldType in floatTypes:
            formatToken = "%f"
        else:
            formatToken = "%s"
        localeDict[field] = formatToken

    #### Create Output File ####
    fo = UTILS.openFile(outFile, "w")

    #### Write Field Names to File ####
    if outFieldNames:
        outPath, outName = OS.path.split(outFile)
        allFieldNames = UTILS.getFieldNames(exyvFieldNames, outPath)
        allFieldNames += fieldList
        outRow = delimiter.join(allFieldNames)
        fo.write("%s\n" % outRow.encode("utf-8"))

    for row in rows:
        OID = row[0]
        badValues = row.count(None)
        badXY = row[1].count(None)
        badRow = badValues or badXY
        if not badXY:
            xCoord, yCoord = row[1]
            x = LOCALE.format("%0.8f", xCoord)
            y = LOCALE.format("%0.8f", yCoord)
        else:
            x = "NULL"
            y = "NULL"

        #### Check to see whether field values are OK ####
        rowValues = [x, y]
        for ind, field in enumerate(fieldList):
            value = row[ind + 2]
            if value == "" or value == None:
                rowValues.append("NULL")
            else:
                formatValue = LOCALE.format(localeDict[field], value)
                rowValues.append(formatValue)

        #### Keep TRack of Bad Records ####
        if badRow:
            badIDs.append(OID)

        #### Continue Based on Whether a Bad Row ####
        outRow = delimiter.join(rowValues)
        fo.write("%s\n" % outRow.encode("utf-8"))

        ARCPY.SetProgressorPosition()

    #### Clean Up ####
    del rows
    fo.close()
    ARCPY.AddMessage(outFile)

    #### Get Set of Bad IDs ####
    badIDs = list(set(badIDs))
    badIDs.sort()
    badIDs = [str(i) for i in badIDs]

    #### Process any bad records encountered ####
    bn = len(badIDs)
    if bn:
        err = ERROR.reportBadRecords(cnt, bn, badIDs, label=ssdo.oidName, allowNULLs=True)
Ejemplo n.º 12
0
def spaceTime2SWM(inputFC, swmFile, masterField, concept = "EUCLIDEAN",
                  threshold = None, rowStandard = True,
                  timeField = None, timeType = None,
                  timeValue = None):
    """
    inputFC (str): path to the input feature class
    swmFile (str): path to the SWM file.
    masterField (str): field in table that serves as the mapping.
    concept: {str, EUCLIDEAN}: EUCLIDEAN or MANHATTAN 
    threshold {float, None}: distance threshold
    rowStandard {bool, True}: row standardize weights?
    timeField {str, None}: name of the date-time field
    timeType {str, None}: ESRI enumeration of date-time intervals
    timeValue {float, None}: value forward and backward in time
    """

    #### Assure Temporal Parameters are Set ####
    if not timeField:
        ARCPY.AddIDMessage("ERROR", 1320)
        raise SystemExit()
    if not timeType:
        ARCPY.AddIDMessage("ERROR", 1321)
        raise SystemExit()
    if not timeValue or timeValue <= 0:
        ARCPY.AddIDMessage("ERROR", 1322)
        raise SystemExit()

    #### Create SSDataObject ####
    ssdo = SSDO.SSDataObject(inputFC, templateFC = inputFC,
                             useChordal = True)
    cnt = UTILS.getCount(inputFC)
    ERROR.errorNumberOfObs(cnt, minNumObs = 2)
    ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84001), 0, cnt, 1)

    #### Validation of Master Field ####
    verifyMaster = ERROR.checkField(ssdo.allFields, masterField, types = [0,1])
    badIDs = []

    #### Create Temporal Hash ####
    timeInfo = {}
    xyCoords = NUM.empty((cnt, 2), float)

    #### Process Field Values ####
    fieldList = [masterField, "SHAPE@XY", timeField]
    try:
        rows = DA.SearchCursor(ssdo.catPath, fieldList, "", 
                               ssdo.spatialRefString)
    except:
        ARCPY.AddIDMessage("ERROR", 204)
        raise SystemExit()

    #### Add Data to GATable and Time Dictionary ####
    c = 0
    for row in rows:
        badRow = False

        #### Assure Masterfield is Valid ####
        masterID = row[0]
        if masterID == None or masterID == "":
            badRow = True

        #### Assure Date/Time is Valid ####
        timeStamp = row[-1]
        if timeStamp == None or timeStamp == "":
            badRow = True

        #### Assure Centroid is Valid ####
        badXY = row[1].count(None)
        if not badXY:
            x,y = row[1]
            xyCoords[c] = (x,y)
        else:
            badRow = True

        #### Process Data ####
        if not badRow:
            if timeInfo.has_key(masterID):
                #### Assure Uniqueness ####
                ARCPY.AddIDMessage("Error", 644, masterField)
                ARCPY.AddIDMessage("Error", 643)
                raise SystemExit()
            else:
                #### Fill Date/Time Dict ####
                startDT, endDT = TUTILS.calculateTimeWindow(timeStamp, 
                                                            timeValue, 
                                                            timeType)
                timeInfo[masterID] = (timeStamp, startDT, endDT)

        else:
            badIDs.append(masterID)

        #### Set Progress ####
        c += 1
        ARCPY.SetProgressorPosition()

    #### Clean Up ####
    del rows

    #### Get Set of Bad IDs ####
    numBadObs = len(badIDs)
    badIDs = list(set(badIDs))
    badIDs.sort()
    badIDs = [ str(i) for i in badIDs ]
    
    #### Process any bad records encountered ####
    if numBadObs:
        ERROR.reportBadRecords(cnt, numBadObs, badIDs, label = masterField)

    #### Load Neighbor Table ####
    gaTable, gaInfo = WU.gaTable(ssdo.inputFC, 
                                 fieldNames = [masterField, timeField],
                                 spatRef = ssdo.spatialRefString)
    numObs = len(gaTable)
    xyCoords = xyCoords[0:numObs]

    #### Set the Distance Threshold ####
    concept, gaConcept = WU.validateDistanceMethod(concept, ssdo.spatialRef)
    if threshold == None:
        #### Set Progressor for Search ####
        ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84144))

        #### Create k-Nearest Neighbor Search Type ####
        gaSearch = GAPY.ga_nsearch(gaTable)
        gaSearch.init_nearest(0.0, 1, gaConcept)
        neighDist = ARC._ss.NeighborDistances(gaTable, gaSearch)
        N = len(neighDist)
        threshold = 0.0
        sumDist = 0.0 

        #### Find Maximum Nearest Neighbor Distance ####
        for row in xrange(N):
            dij = neighDist[row][-1][0]
            if dij > threshold:
                threshold = dij
            sumDist += dij

            ARCPY.SetProgressorPosition()

        #### Increase For Rounding Error ####
        threshold = threshold * 1.0001
        avgDist = sumDist / (N * 1.0)

        #### Add Linear/Angular Units ####
        thresholdStr = ssdo.distanceInfo.printDistance(threshold)
        ARCPY.AddIDMessage("Warning", 853, thresholdStr)

        #### Chordal Default Check ####
        if ssdo.useChordal:
            hardMaxExtent = ARC._ss.get_max_gcs_distance(ssdo.spatialRef)
            if threshold > hardMaxExtent:
                ARCPY.AddIDMessage("ERROR", 1609)
                raise SystemExit()

        #### Clean Up ####
        del gaSearch

    #### Create Missing SSDO Info ####
    extent = UTILS.resetExtent(xyCoords)

    #### Reset Coordinates for Chordal ####
    if ssdo.useChordal:
        sliceInfo = UTILS.SpheroidSlice(extent, ssdo.spatialRef)
        maxExtent = sliceInfo.maxExtent
    else:
        env = UTILS.Envelope(extent)
        maxExtent = env.maxExtent

    threshold = checkDistanceThresholdSWM(ssdo, threshold, maxExtent)
    
    #### Set Default Progressor for Neigborhood Structure ####
    ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143))

    #### Create Distance Neighbor Search Type ####
    gaSearch = GAPY.ga_nsearch(gaTable)
    gaSearch.init_nearest(threshold, 0, gaConcept)
    neighSearch = ARC._ss.NeighborSearch(gaTable, gaSearch)

    #### Set Progressor for Weights Writing ####
    ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84127), 0, numObs, 1)

    #### Initialize Spatial Weights Matrix File ####
    swmWriter = WU.SWMWriter(swmFile, masterField, ssdo.spatialRefName, 
                             numObs, rowStandard, inputFC = inputFC,
                             wType = 9, distanceMethod = concept,
                             threshold = threshold, timeField = timeField,
                             timeType = timeType, timeValue = timeValue)

    for row in xrange(numObs):
        masterID = gaTable[row][2]

        #### Get Date/Time Info ####
        dt0, startDT0, endDT0 = timeInfo[masterID]

        nhs = neighSearch[row]
        neighs = []
        weights = []
        for nh in nhs:
            #### Search Through Spatial Neighbors ####
            neighID = gaTable[nh][2]

            #### Get Date/Time Info ####
            dt1, startDT1, endDT1 = timeInfo[neighID]

            #### Filter Based on Date/Time ####
            insideTimeWindow = TUTILS.isTimeNeighbor(startDT0, endDT0, dt1)
            if insideTimeWindow:
                neighs.append(neighID)
                weights.append(1.0)

        #### Add Spatial Weights Matrix Entry ####
        swmWriter.swm.writeEntry(masterID, neighs, weights) 

        #### Set Progress ####
        ARCPY.SetProgressorPosition()

    swmWriter.close()
    del gaTable

    #### Report Warning/Max Neighbors ####
    swmWriter.reportNeighInfo()

    #### Report Spatial Weights Summary ####
    swmWriter.report()

    #### Report SWM File is Large ####
    swmWriter.reportLargeSWM()
Ejemplo n.º 13
0
def kNearest2SWM(inputFC, swmFile, masterField, concept = "EUCLIDEAN", 
                 kNeighs = 1, rowStandard = True):
    """Creates a sparse spatial weights matrix (SWM) based on k-nearest
    neighbors.

    INPUTS: 
    inputFC (str): path to the input feature class
    swmFile (str): path to the SWM file.
    masterField (str): field in table that serves as the mapping.
    concept: {str, EUCLIDEAN}: EUCLIDEAN or MANHATTAN 
    kNeighs {int, 1}: number of neighbors to return
    rowStandard {bool, True}: row standardize weights?
    """

    #### Assure that kNeighs is Non-Zero ####
    if kNeighs <= 0:
        ARCPY.AddIDMessage("ERROR", 976)
        raise SystemExit()

    #### Set Default Progressor for Neigborhood Structure ####
    ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143))

    #### Create SSDataObject ####
    ssdo = SSDO.SSDataObject(inputFC, templateFC = inputFC,
                             useChordal = True)
    cnt = UTILS.getCount(inputFC)
    ERROR.errorNumberOfObs(cnt, minNumObs = 2)

    #### Validation of Master Field ####
    verifyMaster = ERROR.checkField(ssdo.allFields, masterField, types = [0,1])

    #### Create GA Data Structure ####
    gaTable, gaInfo = WU.gaTable(ssdo.catPath, [masterField],
                                 spatRef = ssdo.spatialRefString)

    #### Assure Enough Observations ####
    N = gaInfo[0]
    ERROR.errorNumberOfObs(N, minNumObs = 2)

    #### Process any bad records encountered ####
    numBadRecs = cnt - N
    if numBadRecs:
        badRecs = WU.parseGAWarnings(gaTable.warnings)
        err = ERROR.reportBadRecords(cnt, numBadRecs, badRecs,
                                     label = ssdo.oidName)

    #### Assure k-Nearest is Less Than Number of Features ####
    if kNeighs >= N:
        ARCPY.AddIDMessage("ERROR", 975)
        raise SystemExit()

    #### Create k-Nearest Neighbor Search Type ####
    gaSearch = GAPY.ga_nsearch(gaTable)
    concept, gaConcept = WU.validateDistanceMethod(concept, ssdo.spatialRef)
    gaSearch.init_nearest(0.0, kNeighs, gaConcept)
    neighWeights = ARC._ss.NeighborWeights(gaTable, gaSearch, 
                                           weight_type = 1,
                                           row_standard = False)

    #### Set Progressor for Weights Writing ####
    ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84127), 0, N, 1)

    #### Initialize Spatial Weights Matrix File ####
    swmWriter = WU.SWMWriter(swmFile, masterField, ssdo.spatialRefName, 
                             N, rowStandard, inputFC = inputFC,
                             wType = 2, distanceMethod = concept,
                             numNeighs = kNeighs)

    #### Unique Master ID Dictionary ####
    masterSet = set([])

    for row in xrange(N):
        masterID = int(gaTable[row][2])
        if masterID in masterSet:
            ARCPY.AddIDMessage("Error", 644, masterField)
            ARCPY.AddIDMessage("Error", 643)
            raise SystemExit()
        else:
            masterSet.add(masterID)

        neighs, weights = neighWeights[row]
        neighs = [ gaTable[nh][2] for nh in neighs ]

        #### Add Spatial Weights Matrix Entry ####
        swmWriter.swm.writeEntry(masterID, neighs, weights) 

        #### Set Progress ####
        ARCPY.SetProgressorPosition()

    swmWriter.close()
    del gaTable

    #### Report Warning/Max Neighbors ####
    swmWriter.reportNeighInfo()

    #### Report Spatial Weights Summary ####
    swmWriter.report()

    #### Report SWM File is Large ####
    swmWriter.reportLargeSWM()
Ejemplo n.º 14
0
def delaunay2SWM(inputFC, swmFile, masterField, rowStandard = True):
    """Creates a sparse spatial weights matrix (SWM) based on Delaunay
    Triangulation.  

    INPUTS: 
    inputFC (str): path to the input feature class
    swmFile (str): path to the SWM file.
    masterField (str): field in table that serves as the mapping.
    rowStandard {bool, True}: row standardize weights?
    """

    #### Set Default Progressor for Neigborhood Structure ####
    ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143))

    #### Create SSDataObject ####
    ssdo = SSDO.SSDataObject(inputFC, templateFC = inputFC,
                             useChordal = True)
    cnt = UTILS.getCount(inputFC)
    ERROR.errorNumberOfObs(cnt, minNumObs = 2)

    #### Validation of Master Field ####
    verifyMaster = ERROR.checkField(ssdo.allFields, masterField, types = [0,1])

    #### Create GA Data Structure ####
    gaTable, gaInfo = WU.gaTable(ssdo.catPath, [masterField],
                                 spatRef = ssdo.spatialRefString)

    #### Assure Enough Observations ####
    N = gaInfo[0]
    ERROR.errorNumberOfObs(N, minNumObs = 2)

    #### Process any bad records encountered ####
    numBadRecs = cnt - N
    if numBadRecs:
        badRecs = WU.parseGAWarnings(gaTable.warnings)
        err = ERROR.reportBadRecords(cnt, numBadRecs, badRecs,
                                     label = ssdo.oidName)

    #### Create Delaunay Neighbor Search Type ####
    gaSearch = GAPY.ga_nsearch(gaTable)
    gaSearch.init_delaunay()
    neighWeights = ARC._ss.NeighborWeights(gaTable, gaSearch, 
                                           weight_type = 1,
                                           row_standard = False)

    #### Set Progressor for Weights Writing ####
    ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84127), 0, N, 1)

    #### Initialize Spatial Weights Matrix File ####
    swmWriter = WU.SWMWriter(swmFile, masterField, ssdo.spatialRefName, 
                             N, rowStandard, inputFC = inputFC,
                             wType = 3)

    #### Unique Master ID Dictionary ####
    masterSet = set([])

    for row in xrange(N):
        masterID = int(gaTable[row][2])
        if masterID in masterSet:
            ARCPY.AddIDMessage("Error", 644, masterField)
            ARCPY.AddIDMessage("Error", 643)
            raise SystemExit()
        else:
            masterSet.add(masterID)

        neighs, weights = neighWeights[row]
        neighs = [ gaTable[nh][2] for nh in neighs ]

        #### Add Spatial Weights Matrix Entry ####
        swmWriter.swm.writeEntry(masterID, neighs, weights) 

        #### Set Progress ####
        ARCPY.SetProgressorPosition()

    #### Clean Up ####
    swmWriter.close()
    del gaTable

    #### Report if Any Features Have No Neighbors ####
    swmWriter.reportNoNeighbors()

    #### Report Spatial Weights Summary ####
    swmWriter.report()

    #### Report SWM File is Large ####
    swmWriter.reportLargeSWM()
Ejemplo n.º 15
0
def spaceTime2SWM(inputFC, swmFile, masterField, concept = "EUCLIDEAN",
                  threshold = None, rowStandard = True,
                  timeField = None, timeType = None,
                  timeValue = None):
    """
    inputFC (str): path to the input feature class
    swmFile (str): path to the SWM file.
    masterField (str): field in table that serves as the mapping.
    concept: {str, EUCLIDEAN}: EUCLIDEAN or MANHATTAN 
    threshold {float, None}: distance threshold
    rowStandard {bool, True}: row standardize weights?
    timeField {str, None}: name of the date-time field
    timeType {str, None}: ESRI enumeration of date-time intervals
    timeValue {float, None}: value forward and backward in time
    """

    #### Assure Temporal Parameters are Set ####
    if not timeField:
        ARCPY.AddIDMessage("ERROR", 1320)
        raise SystemExit()
    if not timeType:
        ARCPY.AddIDMessage("ERROR", 1321)
        raise SystemExit()
    if not timeValue or timeValue <= 0:
        ARCPY.AddIDMessage("ERROR", 1322)
        raise SystemExit()

    #### Create SSDataObject ####
    ssdo = SSDO.SSDataObject(inputFC, templateFC = inputFC,
                             useChordal = True)
    cnt = UTILS.getCount(inputFC)
    ERROR.errorNumberOfObs(cnt, minNumObs = 2)
    ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84001), 0, cnt, 1)

    #### Validation of Master Field ####
    verifyMaster = ERROR.checkField(ssdo.allFields, masterField, types = [0,1])
    badIDs = []

    #### Create Temporal Hash ####
    timeInfo = {}
    xyCoords = NUM.empty((cnt, 2), float)

    #### Process Field Values ####
    fieldList = [masterField, "SHAPE@XY", timeField]
    try:
        rows = DA.SearchCursor(ssdo.catPath, fieldList, "", 
                               ssdo.spatialRefString)
    except:
        ARCPY.AddIDMessage("ERROR", 204)
        raise SystemExit()

    #### Add Data to GATable and Time Dictionary ####
    c = 0
    for row in rows:
        badRow = False

        #### Assure Masterfield is Valid ####
        masterID = row[0]
        if masterID == None or masterID == "":
            badRow = True

        #### Assure Date/Time is Valid ####
        timeStamp = row[-1]
        if timeStamp == None or timeStamp == "":
            badRow = True

        #### Assure Centroid is Valid ####
        badXY = row[1].count(None)
        if not badXY:
            x,y = row[1]
            xyCoords[c] = (x,y)
        else:
            badRow = True

        #### Process Data ####
        if not badRow:
            if timeInfo.has_key(masterID):
                #### Assure Uniqueness ####
                ARCPY.AddIDMessage("Error", 644, masterField)
                ARCPY.AddIDMessage("Error", 643)
                raise SystemExit()
            else:
                #### Fill Date/Time Dict ####
                startDT, endDT = TUTILS.calculateTimeWindow(timeStamp, 
                                                            timeValue, 
                                                            timeType)
                timeInfo[masterID] = (timeStamp, startDT, endDT)

        else:
            badIDs.append(masterID)

        #### Set Progress ####
        c += 1
        ARCPY.SetProgressorPosition()

    #### Clean Up ####
    del rows

    #### Get Set of Bad IDs ####
    numBadObs = len(badIDs)
    badIDs = list(set(badIDs))
    badIDs.sort()
    badIDs = [ str(i) for i in badIDs ]
    
    #### Process any bad records encountered ####
    if numBadObs:
        ERROR.reportBadRecords(cnt, numBadObs, badIDs, label = masterField)

    #### Load Neighbor Table ####
    gaTable, gaInfo = WU.gaTable(ssdo.inputFC, 
                                 fieldNames = [masterField, timeField],
                                 spatRef = ssdo.spatialRefString)
    numObs = len(gaTable)
    xyCoords = xyCoords[0:numObs]

    #### Set the Distance Threshold ####
    concept, gaConcept = WU.validateDistanceMethod(concept, ssdo.spatialRef)
    if threshold == None:
        #### Set Progressor for Search ####
        ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84144))

        #### Create k-Nearest Neighbor Search Type ####
        gaSearch = GAPY.ga_nsearch(gaTable)
        gaSearch.init_nearest(0.0, 1, gaConcept)
        neighDist = ARC._ss.NeighborDistances(gaTable, gaSearch)
        N = len(neighDist)
        threshold = 0.0
        sumDist = 0.0 

        #### Find Maximum Nearest Neighbor Distance ####
        for row in xrange(N):
            dij = neighDist[row][-1][0]
            if dij > threshold:
                threshold = dij
            sumDist += dij

            ARCPY.SetProgressorPosition()

        #### Increase For Rounding Error ####
        threshold = threshold * 1.0001
        avgDist = sumDist / (N * 1.0)

        #### Add Linear/Angular Units ####
        thresholdStr = ssdo.distanceInfo.printDistance(threshold)
        ARCPY.AddIDMessage("Warning", 853, thresholdStr)

        #### Chordal Default Check ####
        if ssdo.useChordal:
            hardMaxExtent = ARC._ss.get_max_gcs_distance(ssdo.spatialRef)
            if threshold > hardMaxExtent:
                ARCPY.AddIDMessage("ERROR", 1609)
                raise SystemExit()

        #### Clean Up ####
        del gaSearch

    #### Create Missing SSDO Info ####
    extent = UTILS.resetExtent(xyCoords)

    #### Reset Coordinates for Chordal ####
    if ssdo.useChordal:
        sliceInfo = UTILS.SpheroidSlice(extent, ssdo.spatialRef)
        maxExtent = sliceInfo.maxExtent
    else:
        env = UTILS.Envelope(extent)
        maxExtent = env.maxExtent

    threshold = checkDistanceThresholdSWM(ssdo, threshold, maxExtent)
    
    #### Set Default Progressor for Neigborhood Structure ####
    ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143))

    #### Create Distance Neighbor Search Type ####
    gaSearch = GAPY.ga_nsearch(gaTable)
    gaSearch.init_nearest(threshold, 0, gaConcept)
    neighSearch = ARC._ss.NeighborSearch(gaTable, gaSearch)

    #### Set Progressor for Weights Writing ####
    ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84127), 0, numObs, 1)

    #### Initialize Spatial Weights Matrix File ####
    swmWriter = WU.SWMWriter(swmFile, masterField, ssdo.spatialRefName, 
                             numObs, rowStandard, inputFC = inputFC,
                             wType = 9, distanceMethod = concept,
                             threshold = threshold, timeField = timeField,
                             timeType = timeType, timeValue = timeValue)

    for row in xrange(numObs):
        masterID = gaTable[row][2]

        #### Get Date/Time Info ####
        dt0, startDT0, endDT0 = timeInfo[masterID]

        nhs = neighSearch[row]
        neighs = []
        weights = []
        for nh in nhs:
            #### Search Through Spatial Neighbors ####
            neighID = gaTable[nh][2]

            #### Get Date/Time Info ####
            dt1, startDT1, endDT1 = timeInfo[neighID]

            #### Filter Based on Date/Time ####
            insideTimeWindow = TUTILS.isTimeNeighbor(startDT0, endDT0, dt1)
            if insideTimeWindow:
                neighs.append(neighID)
                weights.append(1.0)

        #### Add Spatial Weights Matrix Entry ####
        swmWriter.swm.writeEntry(masterID, neighs, weights) 

        #### Set Progress ####
        ARCPY.SetProgressorPosition()

    swmWriter.close()
    del gaTable

    #### Report Warning/Max Neighbors ####
    swmWriter.reportNeighInfo()

    #### Report Spatial Weights Summary ####
    swmWriter.report()

    #### Report SWM File is Large ####
    swmWriter.reportLargeSWM()
Ejemplo n.º 16
0
def collectEvents(ssdo, outputFC):
    """This utility converts event data into weighted point data by
    dissolving all coincident points into unique points with a new count
    field that contains the number of original features at that
    location.

    INPUTS: 
    inputFC (str): path to the input feature class
    outputFC (str): path to the input feature class
    """

    #### Set Default Progressor for Neigborhood Structure ####
    ARCPY.SetProgressor("default", ARCPY.GetIDMessage(84143))

    #### Validate Output Workspace ####
    ERROR.checkOutputPath(outputFC)

    #### True Centroid Warning For Non-Point FCs ####
    if ssdo.shapeType.upper() != "POINT":
        ARCPY.AddIDMessage("WARNING", 1021)

    #### Create GA Data Structure ####
    gaTable, gaInfo = WU.gaTable(ssdo.inputFC, spatRef = ssdo.spatialRefString)

    #### Assure Enough Observations ####
    cnt = UTILS.getCount(ssdo.inputFC)
    ERROR.errorNumberOfObs(cnt, minNumObs = 4)
    N = gaInfo[0]
    ERROR.errorNumberOfObs(N, minNumObs = 4)

    #### Process Any Bad Records Encountered ####
    numBadRecs = cnt - N
    if numBadRecs:
        badRecs = WU.parseGAWarnings(gaTable.warnings)
        if not ssdo.silentWarnings:
            ERROR.reportBadRecords(cnt, numBadRecs, badRecs,
                                   label = ssdo.oidName)

    #### Create k-Nearest Neighbor Search Type ####
    gaSearch = GAPY.ga_nsearch(gaTable)
    gaSearch.init_nearest(0.0, 0, "euclidean")

    #### Create Output Feature Class ####
    outPath, outName = OS.path.split(outputFC)
    try:
        DM.CreateFeatureclass(outPath, outName, "POINT", "", ssdo.mFlag, 
                              ssdo.zFlag, ssdo.spatialRefString)
    except:
        ARCPY.AddIDMessage("ERROR", 210, outputFC)
        raise SystemExit()

    #### Add Count Field ####
    countFieldNameOut = ARCPY.ValidateFieldName(countFieldName, outPath)
    UTILS.addEmptyField(outputFC, countFieldNameOut, "LONG")
    fieldList = ["SHAPE@", countFieldNameOut]

    #### Set Insert Cursor ####
    rowsOut = DA.InsertCursor(outputFC, fieldList)

    #### Set Progressor for Calculation ####
    ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84007), 0, N, 1)

    #### ID List to Search ####
    rowsIN = range(N)
    maxCount = 0
    numUnique = 0

    for row in rowsIN:
        #### Get Row Coords ####
        rowInfo = gaTable[row]
        x0, y0 = rowInfo[1]
        count = 1

        #### Search For Exact Coord Match ####
        gaSearch.search_by_idx(row)
        for nh in gaSearch:
            count += 1
            rowsIN.remove(nh.idx)
            ARCPY.SetProgressorPosition()

        #### Keep Track of Max Count ####
        maxCount = max([count, maxCount])
        
        #### Create Output Point ####
        pnt = (x0, y0, ssdo.defaultZ)

        #### Create and Populate New Feature ####
        rowResult = [pnt, count]
        rowsOut.insertRow(rowResult)
        numUnique += 1
        ARCPY.SetProgressorPosition()
    
    #### Clean Up ####
    del rowsOut, gaTable

    return countFieldNameOut, maxCount, N, numUnique
Ejemplo n.º 17
0
    def __init__(self,
                 inputFC,
                 outputFC=None,
                 caseField=None,
                 orientationOnly=False):

        #### Create SSDataObject ####
        ssdo = SSDO.SSDataObject(inputFC,
                                 templateFC=outputFC,
                                 useChordal=False)
        cnt = UTILS.getCount(inputFC)
        ERROR.errorNumberOfObs(cnt, minNumObs=1)
        fieldList = [ssdo.oidName, "SHAPE@"]
        caseIsString = False
        if caseField:
            fieldList.append(caseField)
            caseType = ssdo.allFields[caseField].type.upper()
            caseIsString = caseType == "STRING"

        #### Initialize Accounting Structures ####
        xyLenVals = {}
        sinCosVals = {}

        #### Open Search Cursor ####
        try:
            rows = DA.SearchCursor(inputFC, fieldList, "",
                                   ssdo.spatialRefString)
        except:
            ARCPY.AddIDMessage("ERROR", 204)
            raise SystemExit()

        #### Keep track of Invalid Fields ####
        badIDs = []
        badLengths = []
        badRecord = False
        negativeWeights = False

        #### Create Progressor ####
        ARCPY.SetProgressor("step", ARCPY.GetIDMessage(84001), 0, cnt, 1)

        for row in rows:
            OID = row[0]
            shapeInfo = row[1]
            badRow = row.count(None)
            try:
                centroidInfo = shapeInfo.trueCentroid
                xVal = centroidInfo.X
                yVal = centroidInfo.Y
                length = float(shapeInfo.length)
                firstPoint = shapeInfo.firstPoint
                lastPoint = shapeInfo.lastPoint
                if firstPoint == lastPoint:
                    badLengths.append(OID)
                    badRow = True
                else:
                    firstX = float(firstPoint.X)
                    firstY = float(firstPoint.Y)
                    lastX = float(lastPoint.X)
                    lastY = float(lastPoint.Y)
            except:
                badRow = True

            #### Process Good Records ####
            if not badRow:
                #### Case Field ####
                caseVal = "ALL"
                if caseField:
                    caseVal = UTILS.caseValue2Print(row[2], caseIsString)

                #### Get Angle ####
                numer = lastX - firstX
                denom = lastY - firstY
                angle = UTILS.getAngle(numer, denom)

                #### Adjust for Orientation Only ####
                if orientationOnly:
                    angle2Degree = UTILS.convert2Degree(angle)
                    if angle2Degree < 180:
                        numer = firstX - lastX
                        denom = firstY - lastY
                        angle = UTILS.getAngle(numer, denom)

                sinVal = NUM.sin(angle)
                cosVal = NUM.cos(angle)

                xyLenVal = (xVal, yVal, length)
                sinCosVal = (sinVal, cosVal)

                try:
                    xyLenVals[caseVal].append(xyLenVal)
                    sinCosVals[caseVal].append(sinCosVal)
                except:
                    xyLenVals[caseVal] = [xyLenVal]
                    sinCosVals[caseVal] = [sinCosVal]

            else:
                #### Bad Record ####
                badRecord = True
                badIDs.append(OID)

            ARCPY.SetProgressorPosition()

        del rows

        #### Get Set of Bad IDs ####
        badIDs = list(set(badIDs))
        badIDs.sort()
        badIDs = [str(i) for i in badIDs]

        #### Process any bad records encountered ####
        bn = len(badIDs)
        if badRecord:
            err = ERROR.reportBadRecords(cnt, bn, badIDs, label=ssdo.oidName)

        #### Error For Not Enough Observations ####
        goodRecs = cnt - bn
        ERROR.errorNumberOfObs(goodRecs, minNumObs=1)

        #### Report Features With No Length ####
        badLengths = list(set(badLengths))
        badLengths.sort()
        badLengths = [str(i) for i in badLengths]
        numBadLengths = len(badLengths)
        if numBadLengths > 0:
            ERROR.reportBadLengths(cnt,
                                   numBadLengths,
                                   badLengths,
                                   label=ssdo.oidName)

        #### Set up for Bad Cases ####
        badCases = []
        cases = xyLenVals.keys()
        meanCenter = {}
        dm = {}

        #### Calculate Mean Center and Standard Distance ####
        for case in cases:
            xyLens = xyLenVals[case]
            numFeatures = len(xyLens)
            if numFeatures > 0:
                #### Mean Centers and Lengths ####
                xyLens = NUM.array(xyLens)
                meanX, meanY, meanL = NUM.mean(xyLens, 0)

                #### Sum Sin and Cos ####
                scVals = NUM.array(sinCosVals[case])
                sumSin, sumCos = NUM.sum(scVals, 0)

                #### Calculate Angle ####
                radianAngle = UTILS.getAngle(sumSin, sumCos)
                degreeAngle = UTILS.convert2Degree(radianAngle)

                #### Get Start and End Points ####
                halfMeanLen = meanL / 2.0
                endX = (halfMeanLen * NUM.sin(radianAngle)) + meanX
                startX = (2.0 * meanX) - endX
                endY = (halfMeanLen * NUM.cos(radianAngle)) + meanY
                startY = (2.0 * meanY) - endY
                unstandardized = NUM.sqrt(sumSin**2.0 + sumCos**2.0)
                circVar = 1.0 - (unstandardized / (numFeatures * 1.0))

                #### Re-adjust Angle Back towards North ####
                if orientationOnly:
                    degreeAngle = degreeAngle - 180.0
                    radianAngle = UTILS.convert2Radians(degreeAngle)

                #### Populate Results Structure ####
                meanCenter[case] = (meanX, meanY)
                dm[case] = [(startX, startY), (endX, endY), meanL, radianAngle,
                            degreeAngle, circVar]

        #### Sorted Case List ####
        caseKeys = dm.keys()
        caseKeys.sort()
        self.caseKeys = caseKeys

        #### Set Attributes ####
        self.ssdo = ssdo
        self.meanCenter = meanCenter
        self.dm = dm
        self.badCases = badCases
        self.inputFC = inputFC
        self.outputFC = outputFC
        self.caseField = caseField
        self.orientationOnly = orientationOnly
        self.caseIsString = caseIsString
Ejemplo n.º 18
0
def stCollectByKNN(ssdo, timeField, outputFC, inSpan, inDistance):
    """
    This method applied Jacquez Space-Time K-NN to convert event data into weighted
    point data by dissolving all coincident points in space and time into unique
    points with a new count field that contains the number of original features
    at that location and time span.

    INPUTS:
        ssdo (obj): SSDataObject from input
        timeField (str): Date/Time field name in input feature
        outputFC (str): path to the output feature class
        inSpan (int): value of temporal units within the same time bin
        inDistance (int): value of spatial units considered as spatial neighbors
    OUTPUTS:
        Create new collected point feature

    """
    #### Read raw time data ####
    timeData = ssdo.fields[timeField].data
    #### Convert temporal unit ####
    time = NUM.array(timeData, dtype = 'datetime64[s]').astype('datetime64[D]')
    #### Find Start Time ####
    startTime = time.min()
    #### Create Bin for Space and Time ####
    timeBin = (time - startTime) / inSpan

    numObs = ssdo.numObs
    #### Create Sudo-fid to Find K-NN in Space and Time
    fid = [i for i in xrange(numObs)]

    #### Validate Output Workspace ####
    ERROR.checkOutputPath(outputFC)

    #### True Centroid Warning For Non-Point FCs ####
    if ssdo.shapeType.upper() != "POINT":
        ARCPY/AddIDMessage("WARNING", 1021)

    #### Create GA Data Structure ####
    gaTable, gaInfo = WU.gaTable(ssdo.inputFC, spatRef = ssdo.spatialRefString)

    #### Assure Enough Observations ####
    cnt = UTILS.getCount(ssdo.inputFC)
    ERROR.errorNumberOfObs(cnt, minNumObs = 4)
    N = gaInfo[0]
    ERROR.errorNumberOfObs(N, minNumObs = 4)

    #### Process Any Bad Records Encountered ####
    numBadRecs = cnt -N
    if numBadRecs:
        badRecs = WU.parseGAWarnings(gaTable.warnings)
        if not ssdo.silentWarnings:
            ERROR.reportBadRecords(cnt, numBadRecs, badRecs, label = ssdo.oidName)

    #### Create Output Feature Class ####
    outPath, outName = OS.path.split(outputFC)
    try:
        DM.CreateFeatureclass(outPath, outName, "POINT", "", ssdo.mFlag,
                              ssdo.zFlag, ssdo.spatialRefString)
    except:
        ARCPY.AddIDMessage("ERROR", 210, outputFC)
        raise SystemExit()

    #### Create k-Nearest Neighbor Search Type ####
    gaSearch = GAPY.ga_nsearch(gaTable)
    gaSearch.init_nearest(inDistance, 0, "euclidean")

    #### Add Count Field ####
    countFieldNameOut = ARCPY.ValidateFieldName(countFieldName, outPath)
    timeFieldNameOut = ARCPY.ValidateFieldName(timeFieldName, outPath)
    UTILS.addEmptyField(outputFC, countFieldNameOut, "LONG")
    UTILS.addEmptyField(outputFC, timeFieldNameOut, "DATE")
    fieldList = ["SHAPE@", countFieldNameOut, timeFieldNameOut]

    #### Set Insert Cursor ####
    rowsOut = DA.InsertCursor(outputFC, fieldList)

    #### Detect S-T K-NN by Space and Time Bin ####
    duplicateList = []
    for record in fid:
        kNNList = [record]
        if record not in duplicateList:
            for pair in fid:
                if pair != record :
                    gaSearch.search_by_idx(record)
                    for nh in gaSearch:
                        if timeBin[record] == timeBin[pair]:
                            kNNList.append(nh.idx)
                            duplicateList.append(nh.idx)
            #### Create and Populate New Feature ####
            kNNList = list(set(kNNList))
            count = len(kNNList)
            dt = time[record]
            x0 = ssdo.xyCoords[kNNList, 0].mean()
            y0 = ssdo.xyCoords[kNNList, 1].mean()
            pnt =(x0, y0, ssdo.defaultZ)
            rowResult = [pnt, count, dt]
            rowsOut.insertRow(rowResult)
            ARCPY.SetProgressorPosition()

    #### Clean Up ####
    del rowsOut, timeBin, kNNList, duplicateList

    return countFieldNameOut