Пример #1
0
    def runnode(self, nodeid):
        """Run an instance for a given node

        This will run the checks for a given node
        """

        log = self.log

        log.info("Running checks for Node: {0}".format(nodeid))

        mergesession = self.mergesession()

        #First we fetch counts of all data for these items
        log.debug("--> Fetching remote counts")
        mergecounts = self.getcounts(nodeid)

        log.debug("--> Fetching Main counts")
        maincounts = self.getremotecounts(nodeid)

        #Next convert to a dictionary and run a dictdiff
        maindict = dict(maincounts)
        mergedict = dict(mergecounts)

        ddiff = DictDiff(maindict, mergedict)

        #Items that are in the Main but not in the Merged
        added = ddiff.added()
        #Items that are completely missing from the Merged
        removed = ddiff.removed()
        #Items where there is a different count than in the merged
        changed = ddiff.changed()

        log.debug("--> Added Items {0}".format(added))
        log.debug("--> Removed Items {0}".format(removed))
        log.debug("--> Changed Items {0}".format(changed))

        #For this version I dont really care that the removed and changed
        #require a merge etc,  higher level code takes care of it
        #:warning: If the bulk merge functionality changes this may break

        removed = removed.union(changed)

        if removed:
            log.info("--- {0} Complete days that need adding ---".format(
                len(removed)))
            for thedate in removed:
                maincount = maindict.get(thedate, 0)
                mergecount = mergedict.get(thedate)
                log.debug("--> {0} {1}/{2} Samples in main".format(
                    thedate, maincount, mergecount))

                if maincount > mergecount:
                    log.warning(
                        "For Some Reason there are more items in the main db. Assuming all is well"
                    )
                    continue
                #Get the readings themselves
                qry = (mergesession.query(
                    models.Reading).filter_by(nodeId=nodeid))
                qry = qry.filter(
                    sqlalchemy.func.date(models.Reading.time) == thedate)

                datalist = []
                for reading in qry:
                    #Check if we have mapped the location
                    if reading.locationId == None:
                        log.warning("Reading {0} has no location !!!!!".format(
                            reading))
                        continue

                    maploc = self.locationmap.get(reading.locationId, None)
                    if maploc is None:
                        log.debug("Location {0} Has not been mapped".format(
                            reading.locationId))
                        maploc = self._maplocation(reading)

                    #log.debug("New Location is {0}.".format(maploc))
                    dictitem = reading.dict()
                    dictitem["locationId"] = maploc
                    datalist.append(dictitem)

                #log.debug("Data List {0}".format(datalist))
                #log.debug(datalist[:5])
                jsonStr = json.dumps(datalist)
                gzStr = zlib.compress(jsonStr)

                #And then try to bulk upload them
                theurl = "{0}bulk/".format(self.resturl)
                restqry = requests.post(theurl, data=gzStr)
                #log.debug(restqry)

                #We also want to transfer the relevant nodestates
                log.info("Transfering NodeStates")
                qry = (mergesession.query(
                    models.NodeState).filter_by(nodeId=nodeid))
                qry = qry.filter(
                    sqlalchemy.func.date(models.NodeState.time) == thedate)
                #log.debug("{0} Nodestates to transfer".format(qry.count()))
                datalist = []
                for nodestate in qry:
                    dictitem = nodestate.dict()
                    dictitem["id"] = None
                    datalist.append(dictitem)

                jsonStr = json.dumps(datalist)
                gzStr = zlib.compress(jsonStr)

                #And then try to bulk upload them
                theurl = "{0}bulk/".format(self.resturl)
                restqry = requests.post(theurl, data=gzStr)
                log.debug(restqry)

        return
Пример #2
0
    def runnode(self, nodeid):
        """Run an instance for a given node

        This will run the checks for a given node
        """

        log = self.log

        log.info("Running checks for Node: {0}".format(nodeid))

        mainsession = self.mainsession()
        mergesession = self.mergesession()

        #First we fetch counts of all data for these items
        log.debug("--> Fetching remote counts")
        mergecounts = self.getcounts(nodeid)

        log.debug("--> Fetching Main counts")
        maincounts = self.getcounts(nodeid, MAIN)

        #Next convert to a dictionary and run a dictdiff
        maindict = dict(maincounts)
        mergedict = dict(mergecounts)

        ddiff = DictDiff(maindict, mergedict)

        #Items that are in the Main but not in the Merged
        added = ddiff.added()
        #Items that are completely missing from the Merged
        removed = ddiff.removed()
        #Items where there is a different count than in the merged
        changed = ddiff.changed()

        log.debug("--> Added Items {0}".format(added))
        log.debug("--> Removed Items {0}".format(removed))
        log.debug("--> Changed Items {0}".format(changed))

        #The first nice and simple changes are to add the "removed" data as this
        #does not exist in the remote database

        if removed:
            log.info("--- {0} Complete days that need adding ---".format(
                len(removed)))
            for thedate in removed:
                maincount = maindict.get(thedate, 0)
                mergecount = mergedict.get(thedate)
                log.debug("--> {0} {1}/{2} Samples in main".format(
                    thedate, maincount, mergecount))

                #Get the readings themselves
                qry = (mergesession.query(
                    models.Reading).filter_by(nodeId=nodeid))
                qry = qry.filter(
                    sqlalchemy.func.date(models.Reading.time) == thedate)

                for reading in qry:
                    #Check if we have mapped the location
                    if reading.locationId == None:
                        log.warning("Reading {0} has no location !!!!!".format(
                            reading))
                        continue

                    maploc = self.locationmap.get(reading.locationId, None)
                    if maploc is None:
                        log.debug("Location {0} Has not been mapped".format(
                            reading.locationId))
                        maploc = self._maplocation(reading)

                    #log.debug("New Location is {0}.".format(maploc))
                    #make a copy and add to the new session
                    mainsession.add(
                        models.Reading(time=reading.time,
                                       nodeId=reading.nodeId,
                                       locationId=maploc,
                                       typeId=reading.typeId,
                                       value=reading.value))

                #We also want to transfer the relevant nodestates
                log.info("Transfering NodeStates")
                qry = (mergesession.query(
                    models.NodeState).filter_by(nodeId=nodeid))
                qry = qry.filter(
                    sqlalchemy.func.date(models.NodeState.time) == thedate)
                log.debug("{0} Nodestates to transfer".format(qry.count()))
                for nodestate in qry:
                    mainsession.add(
                        models.NodeState(time=nodestate.time,
                                         nodeId=nodestate.nodeId,
                                         parent=nodestate.parent,
                                         localtime=nodestate.localtime,
                                         seq_num=nodestate.seq_num,
                                         rssi=nodestate.rssi))
                #Close our sessions
                mainsession.flush()
                mainsession.close()

        if changed:
            log.debug("---- Dealing with changed items ----")
            #For the moment I dont really care about merging and duplicates
            #We can fix the problem up later (nodestate table bugfix)
            log.info("--- {0} days that need merging ---".format(len(changed)))
            for thedate in changed:
                maincount = maindict.get(thedate, 0)
                mergecount = mergedict.get(thedate)
                log.debug("--> {0} {1}/{2} Samples in main".format(
                    thedate, maincount, mergecount))

                if maincount > mergecount:
                    log.warning(
                        "For Some Reason there are more items in the main db")
                    continue

                #Get the readings themselves
                qry = (mergesession.query(
                    models.Reading).filter_by(nodeId=nodeid))
                qry = qry.filter(models.Reading.locationId != None)
                qry = qry.filter(
                    sqlalchemy.func.date(models.Reading.time) == thedate)

                log.debug("--> Total of {0} readings to merge".format(
                    qry.count()))

                for reading in qry:
                    #Check if we have mapped the location
                    if reading.locationId == None:
                        log.warning("Reading {0} has no location !!!!!".format(
                            reading))
                        continue

                    maploc = self.locationmap.get(reading.locationId, None)
                    if maploc is None:
                        log.debug("Location {0} Has not been mapped".format(
                            reading.locationId))
                        maploc = self._maplocation(reading)

                    #log.debug("New Location is {0}.".format(maploc))
                    #make a copy and add to the new session
                    mainsession.merge(
                        models.Reading(time=reading.time,
                                       nodeId=reading.nodeId,
                                       locationId=maploc,
                                       typeId=reading.typeId,
                                       value=reading.value))

                #We also want to transfer the relevant nodestates
                log.info("Transfering NodeStates")
                qry = (mergesession.query(
                    models.NodeState).filter_by(nodeId=nodeid))
                qry = qry.filter(
                    sqlalchemy.func.date(models.NodeState.time) == thedate)
                log.debug("{0} Nodestates to transfer".format(qry.count()))
                for nodestate in qry:
                    mainsession.merge(
                        models.NodeState(time=nodestate.time,
                                         nodeId=nodestate.nodeId,
                                         parent=nodestate.parent,
                                         localtime=nodestate.localtime,
                                         seq_num=nodestate.seq_num,
                                         rssi=nodestate.rssi))
                #Close our sessions
                mainsession.flush()
                mainsession.close()