Example #1
0
    def test_siteDownstreamOnSegment(self):
        streamGraph = StreamGraph()
        node1 = streamGraph.addNode((0, 0))
        node2 = streamGraph.addNode((0, -1))
        segment = streamGraph.addSegment(node1, node2, "1", 1, 1, 1)
        streamGraph.addSite("site1", "1", 0.1)
        streamGraph.addSite("site2", "1", 0.9)
        streamGraph.addSite("site3", "1", 1)

        navigator = StreamGraphNavigator(streamGraph)

        downstreamSite = navigator.getNextDownstreamSite(segment, 0.5)
        foundSiteID = downstreamSite[0]
        foundSiteDist = downstreamSite[1]

        self.assertEqual(foundSiteID, "site2")
        self.assertEqual(foundSiteDist, 0.4)
Example #2
0
    def test_findUpstreamSiteWithBacktrack(self):
        streamGraph = StreamGraph()
        node1 = streamGraph.addNode((0, 0))
        node2 = streamGraph.addNode((0, -1))
        node3 = streamGraph.addNode((1, 0))

        segment1 = streamGraph.addSegment(node1, node2, "1", 1, 2,
                                          1)  #trib of segment2 path
        segment2 = streamGraph.addSegment(node3, node2, "2", 1, 1, 1)
        streamGraph.addSite("site1", "2", 0.2)
        dataBoundary = DataBoundary(point=(0, 0), radius=10)

        streamGraph.safeDataBoundary.append(dataBoundary)

        navigator = StreamGraphNavigator(streamGraph)

        downstreamSite = navigator.getNextUpstreamSite(segment1, 0.5)
        foundSiteID = downstreamSite[0]
        foundSiteDist = downstreamSite[1]

        self.assertEqual(foundSiteID, "site1")
        self.assertEqual(foundSiteDist, 1.3)
Example #3
0
def getSiteSnapAssignment (graph):
    #a copy of the current graph used to try different possible snap operations
    testingGraph = graph#copy.deepcopy(graph)#.clone()
    testingGraphNavigator = StreamGraphNavigator(testingGraph)

    allSnaps = []
    for snaps in graph.siteSnaps.values():
        allSnaps.extend(snaps)

    #assign all possible snaps of each site to the graph
    testingGraph.refreshSiteSnaps(allSnaps)

    assignments = []
    allRankedChoices = {}
    #for each site, holds a list of other sites that 
    siteConflicts = set()

    def addAssignment (siteAssignment):
        alreadyContainedAssignment = False
        for i, assignment in enumerate(assignments):
            #if we find a match
            if assignment.siteID == siteAssignment.siteID:
                #and the newly added assignment is better than the original
                if siteAssignment.snapDist < assignment.snapDist:
                    #then replace
                    assignments[i] = siteAssignment
                elif __debug__:
                    print("tried to add a second assignment")
                #at this point, we've either replaced, or not since our current assignment is worse
                return
        #if we reach this line then we don't have an assignment for this ID yet. Add one
        assignments.append(siteAssignment)

    def getSiteIndexRange (siteID, sites):
        firstIndex = -1
        lastIndex = -1

        for i, site in enumerate(sites):
            if site.siteID == siteID:
                if firstIndex == -1:
                    firstIndex = i
                lastIndex = i
        return (firstIndex, lastIndex)

    def getBestRankedChoice (rankedChoices):
        minOrderError = sys.maxsize
        bestScoreChoice = None
        #find the choice that minimize ordering error
        for choice in rankedChoices:
            orderError = choice[1]
            distanceScore = choice[2]
            nameMatch = choice[4]
            #if we find a better order error, always choose this option
            if orderError < minOrderError:
                bestScoreChoice = choice
                minOrderError = orderError
            elif orderError == minOrderError:
                #if we find an equal order error but smaller dist score choice, choose it
                bestDistScore = bestScoreChoice[2]
                bestScoreNameMatch = bestScoreChoice[4]
                # if this dist is better than previous
                # AND either this choice is a name match or this isn't and the previous best isn't
                if distanceScore < bestDistScore and (nameMatch or (not nameMatch and not bestScoreNameMatch)):
                    bestScoreChoice = choice
        return bestScoreChoice

    sinks = graph.getSinks()
    for sink in sinks:
        upstreamPaths = sink.getUpstreamNeighbors()
        for path in upstreamPaths:
            upstreamSitesInfo = testingGraphNavigator.collectSortedUpstreamSites(path, path.length, siteLimit = sys.maxsize, autoExpand = False)[0]
            #trim the extra distance info off of the results. Not needed
            upstreamSites = [siteInfo[0] for siteInfo in upstreamSitesInfo]

            siteIndexRanges = {}
            for site in upstreamSites:
                siteID = site.siteID
                if siteID not in siteIndexRanges:
                    firstOccuranceIdx, lastOccuranceIdx = getSiteIndexRange(siteID, upstreamSites)
                    siteIndexRanges[siteID] = (firstOccuranceIdx, lastOccuranceIdx)
            
            #count all unique sites found on this branch. List them in order of appearance
            uniqueOrderedIDs = []
            for i, site in enumerate(upstreamSites):
                siteID = site.siteID
                if siteID not in uniqueOrderedIDs:
                    uniqueOrderedIDs.append(siteID)
            uniqueOrderedIDs = sorted(uniqueOrderedIDs, key=lambda site: int(Helpers.getFullID(site)), reverse=True)
            #list of sites that have already been chosen on this branch
            resolvedSites = dict()

            for orderedIdx, siteID in enumerate(uniqueOrderedIDs):
                firstOccuranceIdx, lastOccuranceIdx = siteIndexRanges[siteID]

                #get a list of possible assignments for this ID
                #Here, an choice is a tuple (assignment, index)
                siteChoices = []

                #Here, a ranked choice is a tuple (assignment, orderError, distScore)
                rankedChoices = []
                
                for i in range(firstOccuranceIdx, lastOccuranceIdx+1):
                    if upstreamSites[i].siteID == siteID:
                        siteChoices.append((upstreamSites[i], i))

                for choice in siteChoices:
                    assignment = choice[0]
                    upstreamSitesIdx = choice[1] #the index of this site in the list 'upstreamSites'
                    orderError = 0
                    distanceScore = 0
                    nameMatch = assignment.nameMatch
                    #siteIDs that this assignment will force an out of sequence snap for
                    orderConflicts = []
                    #calculate the order error for this choice
                    for i in range(0, len(uniqueOrderedIDs)):
                        cmpSiteID = uniqueOrderedIDs[i]
                        #the case when we are comparing to a site thats been resolved
                        #is different than the case when a site we compare to is unresolved 

                        #if the cmp site is unresolved, we are looking to see if this site's choice 
                        #will force an order error for site that has yet to be chosen

                        #if the cmp site is resolved, we are looking to see if this site's choice
                        #conflicts with the choice ALREADY made for the cmp site

                        if cmpSiteID in resolvedSites:
                            #the third elem in the tuple is the ranked choice's upstream sites index
                            resolvedCmpUpstreamSitesIdx = resolvedSites[cmpSiteID][3]
                            #if this cmp site is resolved it must be a larger ID than us because
                            #sites are resolved in decending order of their IDs
                            if upstreamSitesIdx < resolvedCmpUpstreamSitesIdx:
                                orderError += 1
                                orderConflicts.append(cmpSiteID)
                        else:
                            cmpFirstOccuranceIdx, cmpLastOccuranceIdx = siteIndexRanges[cmpSiteID]
                            compare = Helpers.siteIDCompare(assignment.siteID, cmpSiteID)
                            #moving forward, if I choose this choice, will I cut off all the assignments for any remaining sites?
                            if cmpLastOccuranceIdx < upstreamSitesIdx and compare > 0:
                                # by choosing this choice, I'm stranding the the last snap choice 
                                # of a site with a lower ID than us downstream from us. 
                                orderError += 1
                                orderConflicts.append(cmpSiteID)
                            if cmpFirstOccuranceIdx > upstreamSitesIdx and compare < 0:
                                # by choosing this choice, I'm stranding all of the snap options 
                                # for cmpSite upstream from our current choice even though 
                                # cmpSiteID is higher than us 
                                orderError += 1
                                orderConflicts.append(cmpSiteID)
                    #get list of sites involved in the outcome of this sites snap choice
                    #this is all site IDs that have a snap choice that appears between the first instance of the 
                    #current site id and the last instance in the traversal 
                    involvedSites = set()
                    for i in range(firstOccuranceIdx+1, lastOccuranceIdx):
                        if upstreamSites[i].siteID != siteID and upstreamSites[i].siteID not in resolvedSites:
                            involvedSites.add(upstreamSites[i].siteID)

                    #for all sites that are 'involved' (appear between the first and last occurance index of the current site),
                    #find the best nearest possible distance allowed if we choose this assignment
                    minDistOfInvolved = {}

                    # by starting this loop at the index of the choice,
                    # we won't get snap options of this involved site that occur before the index of the current 
                    # choice. This is because if we choose this choice, anything before it on the traversal can't be chosen anymore
                    # if there are no instances of an involved site that occur after this choice, it won't be counted
                    # But, then that should trigger an increase in order error.
                    # since order error is taken as higher priority than distance, the fact we don't
                    # count up the distance for the missing site shouldn't be an issue
                    for i in range(upstreamSitesIdx, len(upstreamSites)):
                        involvedID = upstreamSites[i].siteID
                        #check if this site is truely an involved site
                        if involvedID in involvedSites:
                            #if this site is not the same as the one we are trying to assign:
                            if involvedID in minDistOfInvolved:
                                minDistOfInvolved[involvedID] = min(minDistOfInvolved[involvedID], upstreamSites[i].snapDist)
                            else:
                                minDistOfInvolved[involvedID] = upstreamSites[i].snapDist

                    # the total snap distance must be inceased by the snapDist of this choice
                    distanceScore += assignment.snapDist
                    # and it is increased at MINIMUM by the best choices remaining for other involved sites
                    for minDist in minDistOfInvolved.values():
                        distanceScore += minDist
                    
                    rankedChoices.append((assignment, orderError, distanceScore, upstreamSitesIdx, nameMatch, orderConflicts))

                bestScoreChoice = getBestRankedChoice(rankedChoices)
                resolvedSites[siteID] = bestScoreChoice

                if siteID in allRankedChoices:
                    #catch case when a site gets snapped onto two networks 
                    #later on we choose which network has the best fit
                    allRankedChoices[siteID].append(bestScoreChoice)
                else:
                    allRankedChoices[siteID] = [bestScoreChoice]

    #choose an assignment from the best picked ranked choices
    #in almost all cases, there will only be one ranked choice to choose from
    #there will only be two if the site had possible snaps on networks with different sinks
    for choices in allRankedChoices.values():
        bestRankedChoice = getBestRankedChoice(choices)
        assignment = bestRankedChoice[0]
        addAssignment(assignment)

        # for each conflict forced by this choice, add a conflict to the total list going 
        # in both directions (a conflicts with b AND b conflicts with a)
        for conflictingSite in bestRankedChoice[5]:
            conflictingCmp = Helpers.siteIDCompare(conflictingSite, assignment.siteID)
            #make sure we put the larger ID first so that if this pair appears again we don't add it again (bc we use a set)
            if conflictingCmp > 0:
                siteConflicts.add((conflictingSite, assignment.siteID))
            else:
                siteConflicts.add((assignment.siteID, conflictingSite))

        if bestRankedChoice[1] > 0 and __debug__:
            print("adding " + assignment.siteID + " with " + str(bestRankedChoice[1]) + " order error:")
            for conflictingSite in bestRankedChoice[5]:
                print("\t conflicts with " + conflictingSite)      

    #verify that all site IDs are accounted for
    #this code should never really have to run
    accountedForSiteIDs = set()
    for assignment in assignments:
        accountedForSiteIDs.add(assignment.siteID)
    
    for siteID in graph.siteSnaps:
        if siteID not in accountedForSiteIDs:
            if __debug__:
                print("missing site! adding in post: " + str(siteID))
            #add the most likely snap for this site
            assignments.append(graph.siteSnaps[siteID][0])

    #keep track of which sites we think are causing the conflicts
    atFaultSites = []
    atFaultPairs = []
    #store all sites that may be involved in a conflict
    allImplicatedSites = set()

    while len(siteConflicts) > 0:
        #count which sites appear in the most number of conflicts
        siteConflictCounts = dict((siteID, 0) for siteID in graph.siteSnaps)
        mostConflicts = 0
        mostConflictingSite = None

        for conflict in siteConflicts:
            #a conflict is between two sites
            conflictA = conflict[0]
            conflictB = conflict[1]

            siteConflictCounts[conflictA] += 1 
            siteConflictCounts[conflictB] += 1

            if siteConflictCounts[conflictA] > mostConflicts:
                mostConflicts = siteConflictCounts[conflictA]
                mostConflictingSite = conflictA
            
            if siteConflictCounts[conflictB] > mostConflicts:
                mostConflicts = siteConflictCounts[conflictB]
                mostConflictingSite = conflictB
        
        #catch cases when sites conflict with eachother equally and fixing either would remove issues
        
        if mostConflicts == 1:
            #find the conflict pair that caused this conflict
            for conflict in siteConflicts:
                conflictA = conflict[0]
                conflictB = conflict[1]

                if conflictA == mostConflictingSite or conflictB == mostConflictingSite:
                    atFaultPairs.append((conflictA, conflictB))
                    allImplicatedSites.add(conflictA)
                    allImplicatedSites.add(conflictB)
                    break
        else:
            #remove this conflict and keep track of it as a problem site
            atFaultSites.append((mostConflictingSite, mostConflicts))
            allImplicatedSites.add(mostConflictingSite)

        siteConflictsCpy = siteConflicts.copy()
        for conflict in siteConflictsCpy:
            #a conflict is between two sites
            conflictA = conflict[0]
            conflictB = conflict[1]

            if conflictA == mostConflictingSite or conflictB == mostConflictingSite:
                siteConflicts.remove(conflict)
    #reset warnings in this catagory so they don't build up

    warnings = []
    assignmentWarnings = []
    
    for faultySite in atFaultSites:
        faultySiteID = faultySite[0]
        faultySiteConflictCount = faultySite[1]
        message = str(faultySiteID) + " conflicts with " + str(faultySiteConflictCount) + " other sites. Consider changing this site's ID"
        warnings.append(WarningLog.Warning(priority=WarningLog.MED_PRIORITY, message=message))

    for faultyPair in atFaultPairs:
        pairA = str(faultyPair[0])
        pairB = str(faultyPair[1])
        message = pairA + " conflicts with " + pairB + ". Consider changing the site ID of one of these two sites"
        warnings.append(WarningLog.Warning(priority=WarningLog.MED_PRIORITY, message=message))

    #finally, assign any warning to the site itself
    for assignment in assignments:
        assignmentID = assignment.siteID
        if assignmentID in allImplicatedSites:
            message = str(assignmentID) + " is involved in a site conflict. See story/medium priority warnings for conflict details."
            warning = WarningLog.Warning(WarningLog.HIGH_PRIORITY, message)
            assignment.assignmentWarnings.clear()
            assignment.assignmentWarnings.append(warning)

    return (assignments, warnings)
Example #4
0
    def getSiteNameContext(self):
        """ Build up a dict containing contextual information to generate site names. 
        
        The finished context object is stored in the SiteInfoCreator instance. """
        lat = self.lat
        lng = self.lng
        streamGraph = self.streamGraph
        baseData = self.baseData

        if Failures.isFailureCode(baseData):
            return baseData

        if self.context is not None:
            return self.context
        context = {}
        point = (lng, lat)
        snapablePoint = SnapablePoint(point=point, name="", id="")
        snapInfo = snapPoint(snapablePoint, baseData,
                             snapCutoff=1)  #get the most likely snap

        if Failures.isFailureCode(snapInfo):
            return snapInfo

        feature = snapInfo[0].feature

        segmentID = str(feature["properties"]["OBJECTID"])

        distAlongSegment = snapInfo[0].distAlongFeature
        #get the segment ID of the snapped segment
        graphSegment = streamGraph.getCleanedSegment(segmentID)

        navigator = StreamGraphNavigator(streamGraph)

        downstreamSegment = navigator.findNextLowerStreamLevelPath(
            graphSegment,
            downStreamPositionOnSegment=distAlongSegment,
            expand=False)

        streamName = graphSegment.streamName
        if streamName == "":
            if not Failures.isFailureCode(
                    downstreamSegment
            ) and downstreamSegment[0].streamName != "":
                context["streamName"] = downstreamSegment[
                    0].streamName + " tributary"
            else:
                context["streamName"] = "(INSERT STREAM NAME)"
        else:
            context["streamName"] = streamName

        placeInfo = GDALData.getNearestPlace(lat, lng)
        if Failures.isFailureCode(placeInfo):
            context["distanceToPlace"] = -1
            context["state"] = "unknown"
            context["placeName"] = "unknown"
        else:
            context["distanceToPlace"] = placeInfo["distanceToPlace"]
            context["state"] = placeInfo["state"]
            context["placeName"] = placeInfo["placeName"]
        context["lat"] = lat
        context["long"] = lng

        contextualPlaces = []

        bridges = GDALData.getNearestBridges(lat, lng)
        namedTribMouths = navigator.getNamedTribMouths()
        if not Failures.isFailureCode(bridges):
            contextualPlaces.extend([{
                "name": contextBridge.name,
                "point": contextBridge.point,
                "distance": contextBridge.distance
            } for contextBridge in bridges])
        if not Failures.isFailureCode(namedTribMouths):
            contextualPlaces.extend([{
                "name":
                mouth[0],
                "point":
                mouth[1],
                "distance":
                Helpers.degDistance(mouth[1][0], mouth[1][1], lng, lat)
            } for mouth in namedTribMouths])

        context["contextualPlaces"] = contextualPlaces

        upstreamDistance = graphSegment.arbolateSum - (graphSegment.length -
                                                       distAlongSegment)
        #check if we are at a stream mouth
        upstreamDistMiles = Helpers.metersToMiles(upstreamDistance * 1000)
        if upstreamDistMiles < 1:
            context["source"] = "at source"
        elif upstreamDistMiles < 3:
            context["source"] = "near source"
        else:
            context["source"] = ""

        if Failures.isFailureCode(downstreamSegment):
            context["mouth"] = ""
        else:
            downstreamDistMiles = Helpers.metersToMiles(downstreamSegment[1] *
                                                        1000)
            #make sure that the mouth distance is less than upstream dist
            #before assigning descriptor. Otherwise, we could have near mouth and near source as option
            #on the same site
            if downstreamDistMiles > upstreamDistMiles:
                context["mouth"] = ""
            else:
                #likewise, if downstream is closer, don't use "at source" type descriptors
                context["source"] = ""
                if downstreamDistMiles < 1:
                    context["mouth"] = "at mouth"
                elif downstreamDistMiles < 3:
                    context["mouth"] = "near mouth"
                else:
                    context["mouth"] = ""

        self.context = context
        return context
Example #5
0
    def getSiteID(self, useBadSites=True, logWarnings=False):
        """ Get the siteID. Lat and Lng are provided to the constructor.
        
        :param useBadSites: When false, any site that has warnings associated with it will be ignored in calculating the new ID.
        :param logWarnings: Should this request log warnings into the SiteInfoCreator's WarningLog instance? 
        
        :return: A dict {"id": the_id, "story": the_story}"""
        lat = self.lat
        lng = self.lng
        warningLog = self.warningLog
        streamGraph = self.streamGraph
        siteIDManager = self.siteIDManager

        streamGraph.setAssignBadSitesStatus(useBadSites)

        #typically lat/long are switched to fit the x/y order paradigm
        point = (lng, lat)

        story = ""
        newID = ""
        huc = ""

        #create the json that gets resturned
        def getResults(siteID="unknown",
                       story="See warning log",
                       failed=False):
            results = dict()
            results["id"] = Helpers.formatID(siteID)

            snapLatFormatted = Helpers.getFloatTruncated(lat, 7)
            snapLngFormatted = Helpers.getFloatTruncated(lng, 7)
            storyHeader = "Requested site info at " + str(
                snapLatFormatted) + ", " + str(snapLngFormatted) + ". "
            useBadSitesStory = (
                "" if useBadSites else
                "ADONNIS ignored sites with incorrect ID's when calculating the new ID. "
            )
            results["story"] = storyHeader + useBadSitesStory + story
            return results

        if Failures.isFailureCode(self.baseData):
            return getResults(failed=True)

        #snap query point to a segment
        snapablePoint = SnapablePoint(point=point, name="", id="")
        snapInfo = snapPoint(snapablePoint, self.baseData,
                             snapCutoff=1)  #get the most likely snap
        if Failures.isFailureCode(snapInfo):
            if __debug__:
                print("could not snap")
            if logWarnings:
                warningLog.addWarning(WarningLog.HIGH_PRIORITY, snapInfo)
            return getResults(failed=True)

        feature = snapInfo[0].feature
        segmentID = str(feature["properties"]["OBJECTID"])
        distAlongSegment = snapInfo[0].distAlongFeature
        #get the segment ID of the snapped segment
        graphSegment = streamGraph.getCleanedSegment(segmentID)

        snappedPoint = streamGraph.segments[segmentID].getPointOnSegment(
            distAlongSegment)

        if __debug__:
            SnapSites.visualize(self.baseData, [])
            streamGraph.visualize(customPoints=[snappedPoint],
                                  showSegInfo=True)
            streamGraph.visualize(customPoints=[snappedPoint],
                                  showSegInfo=False)

        #build a navigator object
        #we want to terminate the search each time a query happens
        #this allows us to stagger upstream and downstream searches
        #although this means repeating parts of the search multiple times, searching a already constructed
        #graph takes practically no time at all
        navigator = StreamGraphNavigator(streamGraph,
                                         terminateSearchOnQuery=True)

        upstreamSite = None
        downstreamSite = None
        endOfUpstreamNetwork = False
        endOfDownstreamNetwork = False
        secondaryQueries = 0
        primaryQueries = 0

        #each iteration extends the graph by one query worth of data
        # in this step we try to find an upstream and downstream site
        while (
                upstreamSite is None or downstreamSite is None
        ) and secondaryQueries < MAX_SECONDARY_SITE_QUERIES and primaryQueries < MAX_PRIMARY_QUERIES and (
                endOfUpstreamNetwork is False
                or endOfDownstreamNetwork is False):
            if upstreamSite is None and endOfUpstreamNetwork is False:
                #we haven't found upstream yet
                upstreamReturn = navigator.getNextUpstreamSite(
                    graphSegment, distAlongSegment)
                if upstreamReturn == Failures.END_OF_BASIN_CODE:
                    endOfUpstreamNetwork = True
                if Failures.isFailureCode(
                        upstreamReturn
                ) is not True and upstreamReturn is not None:
                    upstreamSite = upstreamReturn

            if downstreamSite is None and endOfDownstreamNetwork is False:
                #we haven't found downstream yet
                downstreamReturn = navigator.getNextDownstreamSite(
                    graphSegment, distAlongSegment)
                if downstreamReturn == Failures.END_OF_BASIN_CODE:
                    endOfDownstreamNetwork = True
                if Failures.isFailureCode(
                        downstreamReturn
                ) is not True and downstreamReturn is not None:
                    downstreamSite = downstreamReturn

            if upstreamSite is not None or downstreamSite is not None:
                #we've found at least one site
                secondaryQueries += 1
            else:
                primaryQueries += 1

        #add warnings from found sites, collect HUC
        if upstreamSite is not None:
            siteAssignment = upstreamSite[0]
            if logWarnings:
                for warning in siteAssignment.generalWarnings:
                    warningLog.addWarningTuple(warning)
                for warning in siteAssignment.assignmentWarnings:
                    warningLog.addWarningTuple(warning)

            huc = siteAssignment.huc

        if downstreamSite is not None:
            siteAssignment = downstreamSite[0]
            if logWarnings:
                for warning in siteAssignment.generalWarnings:
                    warningLog.addWarningTuple(warning)
                for warning in siteAssignment.assignmentWarnings:
                    warningLog.addWarningTuple(warning)

            huc = siteAssignment.huc

        if logWarnings:
            for warning in streamGraph.currentAssignmentWarnings:  #apply warnings from streamGraph
                warningLog.addWarningTuple(warning)

        #handle all combinations of having an upstream site and/or a downstream site (also having neither)

        #~~~~~~~~~~~~~~~~~~~UPSTREAM AND DOWNSTREAM SITES FOUND CASE~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
        if upstreamSite is not None and downstreamSite is not None:
            #we have an upstream and downstream

            upstreamSiteID = upstreamSite[0].siteID
            downstreamSiteID = downstreamSite[0].siteID
            partCode = upstreamSiteID[0:2]

            if Helpers.siteIDCompare(downstreamSiteID, upstreamSiteID) < 0:
                message = "The found upstream site is larger than found downstream site. ADONNIS output almost certainly incorrect."
                if logWarnings:
                    warningLog.addWarning(WarningLog.HIGH_PRIORITY, message)

            fullUpstreamSiteID = Helpers.getFullID(upstreamSiteID)
            fullDownstreamSiteID = Helpers.getFullID(downstreamSiteID)

            upstreamSiteIdDsnStr = fullUpstreamSiteID[2:]
            downstreamSiteIdDsnStr = fullDownstreamSiteID[2:]

            #get the downstream number portion of the ID
            upstreamSiteIdDsn = int(upstreamSiteIdDsnStr)
            downstreamSiteIdDsn = int(downstreamSiteIdDsnStr)

            totalAddressSpaceDistance = upstreamSite[1] + downstreamSite[1]
            newSitePercentage = downstreamSite[1] / totalAddressSpaceDistance

            newDon = int(downstreamSiteIdDsn * (1 - newSitePercentage) +
                         upstreamSiteIdDsn * newSitePercentage)

            newID = Helpers.buildFullID(partCode, newDon)
            newID = self.beautifyID(newID,
                                    downstreamSiteID,
                                    upstreamSiteID,
                                    logWarnings=logWarnings)
            story = "Found a upstream site " + Helpers.formatID(
                upstreamSiteID) + " and a downstream site " + Helpers.formatID(
                    downstreamSiteID
                ) + ". New site is the weighted average of these two sites."

            if __debug__:
                print("found upstream is " + upstreamSiteID)
                print("found downstream is " + downstreamSiteID)
                streamGraph.visualize(customPoints=[snappedPoint])
                SnapSites.visualize(self.baseData, [])

        #~~~~~~~~~~~~~~~~~~~UPSTREAM SITE FOUND ONLY CASE~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
        elif upstreamSite is not None:
            upstreamSiteID = upstreamSite[0].siteID
            partCode = upstreamSiteID[:2]
            fullUpstreamID = Helpers.getFullID(upstreamSiteID)

            foundSiteNeighbors = siteIDManager.getNeighborIDs(
                upstreamSiteID, huc)
            if Failures.isFailureCode(foundSiteNeighbors):
                nextSequentialDownstreamSite = None
            else:
                nextSequentialDownstreamSite = foundSiteNeighbors[1]

            upstreamSiteDSN = int(fullUpstreamID[2:])
            upstreamSiteDistance = upstreamSite[1]

            #calculate offset. If we have a sequential downstream use that as a bound
            siteIDOffset = math.ceil(upstreamSiteDistance / MIN_SITE_DISTANCE)
            if nextSequentialDownstreamSite is not None:
                #if we have the sequential downstream bound, don't let the new site get added any closer than halfway between
                siteIDOffset = min(
                    siteIDOffset,
                    Helpers.getSiteIDOffset(upstreamSiteID,
                                            nextSequentialDownstreamSite) / 2)

            newSiteIDDSN = upstreamSiteDSN + siteIDOffset

            #allowed wiggle room in the new site. Depending on how much distance is between the found site
            #we allow for a larger range in the final ID. Has to be at least 10% within the rule of min_site_distance
            #at most 5 digits up or down. At least, 0
            allowedError = math.floor(max(1, min(siteIDOffset / 10, 5)))

            upperBound = Helpers.buildFullID(
                partCode, upstreamSiteDSN + siteIDOffset + allowedError)
            lowerBound = Helpers.buildFullID(
                partCode, upstreamSiteDSN + siteIDOffset - allowedError)

            newID = Helpers.buildFullID(partCode, newSiteIDDSN)
            newID = self.beautifyID(newID,
                                    lowerBound,
                                    upperBound,
                                    logWarnings=logWarnings)
            offsetAfterBeautify = Helpers.getSiteIDOffset(
                newID, fullUpstreamID)

            if nextSequentialDownstreamSite is None:
                story = "Only found a upstream site (" + upstreamSiteID + "). New site ID is based on upstream site while allowing space for " + str(
                    offsetAfterBeautify
                ) + " sites between upstream site and new site"
                if logWarnings:
                    warningLog.addWarning(
                        WarningLog.HIGH_PRIORITY,
                        "No downstream bound on result. Needs verification!")
            else:
                story = "Found an upstream site " + Helpers.formatID(
                    upstreamSiteID
                ) + ". Based on list of all sites, assume that " + Helpers.formatID(
                    nextSequentialDownstreamSite
                ) + " is the nearest sequential downstream site. New ID is based on the upstream site and bounded by the sequential downstream site"
                if logWarnings:
                    warningLog.addWarning(
                        WarningLog.LOW_PRIORITY,
                        "Found upstream and downstream bound. But, downstream bound is based on list of sequential sites and may not be the true downstream bound. This could result in site ID clustering."
                    )

            if __debug__:
                print("found upstream, but not downstream")
                print("upstream siteID is " + str(upstreamSiteID))
                streamGraph.visualize(customPoints=[snappedPoint])
                SnapSites.visualize(self.baseData, [])

        #~~~~~~~~~~~~~~~~~~~DOWNSTREAM SITE ONLY CASE~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
        elif downstreamSite is not None:
            downstreamSiteID = downstreamSite[0].siteID
            partCode = downstreamSiteID[:2]
            fullDownstreamID = Helpers.getFullID(downstreamSiteID)

            foundSiteNeighbors = siteIDManager.getNeighborIDs(
                downstreamSiteID, huc)
            if Failures.isFailureCode(foundSiteNeighbors):
                nextSequentialUpstreamSite = None
            else:
                nextSequentialUpstreamSite = foundSiteNeighbors[0]

            downstreamSiteDSN = int(fullDownstreamID[2:])
            downstreamSiteDistance = downstreamSite[1]

            siteIDOffset = math.ceil(downstreamSiteDistance /
                                     MIN_SITE_DISTANCE)

            if nextSequentialUpstreamSite is not None:
                #if we have the sequential upstream bound, don't let the new site get added any closer than halfway between
                siteIDOffset = min(
                    siteIDOffset,
                    Helpers.getSiteIDOffset(downstreamSiteID,
                                            nextSequentialUpstreamSite) / 2)

            newSiteIDDSN = downstreamSiteDSN - siteIDOffset

            allowedError = math.floor(max(1, min(siteIDOffset / 10, 5)))

            upperBound = Helpers.buildFullID(
                partCode, downstreamSiteDSN - siteIDOffset + allowedError)
            lowerBound = Helpers.buildFullID(
                partCode, downstreamSiteDSN - siteIDOffset - allowedError)

            newID = Helpers.buildFullID(partCode, newSiteIDDSN)
            newID = self.beautifyID(newID,
                                    lowerBound,
                                    upperBound,
                                    logWarnings=logWarnings)
            offsetAfterBeautify = Helpers.getSiteIDOffset(
                newID, fullDownstreamID)

            if nextSequentialUpstreamSite is None:
                story = "Only found a downstream site " + Helpers.formatID(
                    downstreamSiteID
                ) + ". New site ID is based on downstream site while allowing space for " + str(
                    offsetAfterBeautify
                ) + " sites between downstream site and new site"
                if logWarnings:
                    warningLog.addWarning(
                        WarningLog.HIGH_PRIORITY,
                        "No upstream bound on result. Needs verification!")
            else:
                story = "Found a downstream site " + Helpers.formatID(
                    downstreamSiteID
                ) + ". Based on list of all sites, assume that " + Helpers.formatID(
                    nextSequentialUpstreamSite
                ) + " is the nearest sequential upstream site. New ID is based on the downstream site and bounded by the sequential upstream site"
                if logWarnings:
                    warningLog.addWarning(
                        WarningLog.LOW_PRIORITY,
                        "Found upstream and downstream bound. But, upstream bound is based on list of sequential sites and may not be the true upstream bound. This could result in site ID clustering."
                    )

            if __debug__:
                print("found downstream, but not upstream")
                print("downstream siteID is " + str(downstreamSiteID))
                streamGraph.visualize(customPoints=[snappedPoint])
                SnapSites.visualize(self.baseData, [])

        #~~~~~~~~~~~~~~~~~~~NO SITES FOUND CASE~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
        else:
            # get huge radius of sites:
            sitesInfo = GDALData.loadSitesFromQuery(lat, lng, 30)
            if Failures.isFailureCode(sitesInfo):
                if logWarnings:
                    warningLog.addWarning(WarningLog.HIGH_PRIORITY, sitesInfo)
                return getResults(failed=True)

            sites = []
            for site in sitesInfo:
                siteNumber = site["properties"]["site_no"]
                siteHUC = site["properties"]["huc_cd"]
                sitePoint = site["geometry"]["coordinates"]
                fastDistance = Helpers.fastMagDist(sitePoint[0], sitePoint[1],
                                                   point[0], point[1])
                sites.append((siteNumber, sitePoint, fastDistance, siteHUC))

            sortedSites = sorted(sites, key=lambda site: site[2])

            huc = sortedSites[0][3]

            oppositePairA = None
            oppositePairB = None
            foundOppositePair = False
            i = 1
            while foundOppositePair is False:
                curSite = sortedSites[i]
                curPartCode = curSite[0][:2]
                curSitePoint = curSite[1]
                curDirection = Helpers.normalize(curSitePoint[0] - point[0],
                                                 curSitePoint[1] - point[1])
                for cmpSite in sortedSites[:i]:
                    cmpSitePoint = cmpSite[1]
                    cmpDirection = Helpers.normalize(
                        cmpSitePoint[0] - point[0], cmpSitePoint[1] - point[1])
                    cmpPartCode = cmpSite[0][:2]
                    dot = Helpers.dot(curDirection[0], curDirection[1],
                                      cmpDirection[0], cmpDirection[1])

                    #check if these two directions are mostly opposite
                    # dot < 0 means they are at least perpendicular
                    if dot < 0.4 and curPartCode == cmpPartCode:
                        foundOppositePair = True
                        oppositePairA = cmpSite
                        oppositePairB = curSite
                i += 1

            partCode = oppositePairA[0][:2]

            fullIDA = Helpers.getFullID(oppositePairA[0])
            fullIDB = Helpers.getFullID(oppositePairB[0])

            dsnA = int(fullIDA[2:])
            dsnB = int(fullIDB[2:])

            distA = oppositePairA[2]
            distB = oppositePairB[2]

            totalAddressSpaceDistance = distA + distB
            newSitePercentage = distA / totalAddressSpaceDistance

            newDsn = int(dsnA * (1 - newSitePercentage) +
                         dsnB * newSitePercentage)

            newID = Helpers.buildFullID(partCode, newDsn)
            newID = self.beautifyID(newID,
                                    fullIDA,
                                    fullIDB,
                                    logWarnings=logWarnings)

            story = "Could not find any sites on the network. Estimating based on " + Helpers.formatID(
                oppositePairA[0]) + " and " + Helpers.formatID(
                    oppositePairB[0]) + "."

            if __debug__:
                print(
                    "no sites found nearby. Estimating new ID based on nearby sites"
                )
                print("new estimate based on " + oppositePairA[0] + " and " +
                      oppositePairB[0])
                print("estimation is " + newID)
                streamGraph.visualize()
                SnapSites.visualize(self.baseData, [])

        return getResults(siteID=newID, story=story)