Exemplo n.º 1
0
    def testNEQ(self):
        """Test for Inequality"""
        item1 = models.Reading(time=NOW,
                               nodeId=1,
                               typeId=2,
                               locationId=3,
                               value=45.0)

        item2 = models.Reading(time=NOW,
                               nodeId=1,
                               typeId=2,
                               locationId=3,
                               value=45.0)

        self.assertEqual(item1, item2)

        item2.time = datetime.datetime.utcnow()
        self.assertReallyNotEqual(item1, item2)

        item2.time = item1.time
        item2.nodeId = 10
        self.assertReallyNotEqual(item1, item2)

        item2.nodeId = 1
        item2.typeId = 10
        self.assertReallyNotEqual(item1, item2)

        item2.locationId = 3
        item2.value = 0.0
        self.assertReallyNotEqual(item1, item2)
Exemplo n.º 2
0
 def _serialobj(self):
     """Helper Method to provde an object to serialise"""
     theItem = models.Reading(time=NOW,
                              nodeId=1,
                              typeId=2,
                              locationId=3,
                              value=45.0)
     return theItem
Exemplo n.º 3
0
    def testEq(self):
        """Test for Equality"""
        item1 = models.Reading(time=NOW,
                               nodeId=1,
                               typeId=2,
                               locationId=3,
                               value=45.0)

        item2 = models.Reading(time=NOW,
                               nodeId=1,
                               typeId=2,
                               locationId=3,
                               value=45.0)

        self.assertEqual(item1, item2)
        self.assertReallyEqual(item1, item2)

        #Or Location id
        item2.locationId = 10
        self.assertReallyEqual(item1, item2)
Exemplo n.º 4
0
    def testCmp(self):
        #     """Test Compaison function
        #     (actually __lt__ for Py3K Comat)"""

        item1 = models.Reading(time=NOW,
                               nodeId=1,
                               typeId=2,
                               locationId=3,
                               value=45.0)

        item2 = models.Reading(time=NOW,
                               nodeId=1,
                               typeId=2,
                               locationId=3,
                               value=45.0)

        self.assertEqual(item1, item2)

        item2.time = datetime.datetime.utcnow()
        self.assertGreater(item2, item1)
Exemplo n.º 5
0
 def uploadFakeReadings(self):
     import datetime
     now = datetime.datetime.utcnow()
     session = self.localSession()
     for x in range(100000):
         thisTime = now + datetime.timedelta(seconds=x)
         theSample = models.Reading(time=thisTime,
                                    nodeId=837,
                                    locationId=1,
                                    typeId=0,
                                    value=x)
         session.add(theSample)
     session.flush()
     session.commit()
Exemplo n.º 6
0
    def saveData(self, nodeId, values):
        """Save a reading in the Database

        :var nodeId: String Containing the Current Cost Node Id
        :var values: Tuple containing sensor values as returned by ploggParseValue
        """
        log = self.log
        log.debug("Saving data for {0} {1}".format(nodeId, values))
        session = meta.Session()
        mappedId = NODEMAP[nodeId]
        theNode = session.query(models.Node).filter_by(id=mappedId).first()

        #Fetch Sensor Types
        wattSensor = session.query(
            models.SensorType).filter_by(name="Plogg Watts").first()
        log.debug("Watt Sensor {0}".format(wattSensor))
        kWhSensor = session.query(
            models.SensorType).filter_by(name="Plogg kWh").first()
        log.debug("kW Sensor {0}".format(kWhSensor))
        currentSensor = session.query(
            models.SensorType).filter_by(name="Plogg Current").first()
        log.debug("A Sensor {0}".format(currentSensor))

        #Create if it doesnt Exist
        if not theNode:
            log.info("Node {0} / {1} does not exist, Creating".format(
                nodeId, mappedId))
            theNode = models.Node(id=mappedId, locationId=None)
            session.add(theNode)
            session.flush()
            log.debug("Node is {0}".format(theNode))
            #And we need to add a set of sensors
            for item in [wattSensor, kWhSensor, currentSensor]:
                theSensor = models.Sensor(sensorTypeId=item.id,
                                          nodeId=theNode.id,
                                          calibrationSlope=1.0,
                                          calibrationOffset=0.0)
                session.add(theSensor)
            session.flush()

        sampleTime, sampleWatts, samplekWh, sampleCurrent = values
        #Then Add the Readings
        theReading = models.Reading(time=sampleTime,
                                    nodeId=theNode.id,
                                    location=theNode.locationId,
                                    typeId=wattSensor.id,
                                    value=sampleWatts)
        session.add(theReading)

        theReading = models.Reading(time=sampleTime,
                                    nodeId=theNode.id,
                                    location=theNode.locationId,
                                    typeId=kWhSensor.id,
                                    value=samplekWh)
        session.add(theReading)

        theReading = models.Reading(time=sampleTime,
                                    nodeId=theNode.id,
                                    location=theNode.locationId,
                                    typeId=currentSensor.id,
                                    value=sampleCurrent)
        session.add(theReading)

        #And add a nodeState
        theNodeState = models.NodeState(time=sampleTime,
                                        nodeId=theNode.id,
                                        parent=theNode.id,
                                        localtime=sampleTime)

        session.add(theNodeState)
        session.flush()
        session.commit()
        session.close()
Exemplo n.º 7
0
    def run(self):
        session = meta.Session()
        localCount = 0
        stateOne = True

        node37 = self.node37
        node38 = self.node38

        try:
            while True:
                #Add a reading every N seconds
                log.debug("Adding New Reading {0}".format(datetime.utcnow()))

                theReading = models.Reading(time=datetime.utcnow(),
                                            nodeId=node37.id,
                                            locationId=node37.locationId,
                                            value=localCount,
                                            typeId=0)
                session.add(theReading)

                theReading = models.Reading(time=datetime.utcnow(),
                                            nodeId=node38.id,
                                            locationId=node38.locationId,
                                            value=100 - localCount,
                                            typeId=0)
                session.add(theReading)
                session.flush()
                if localCount == STATE_SWITCH:
                    log.debug("Switching States")
                    localCount = 0

                    #Add a node state
                    if stateOne:
                        theState = models.NodeState(time=datetime.utcnow(),
                                                    nodeId=node37.id,
                                                    parent=1024,
                                                    localtime=0)
                        session.add(theState)

                        theState = models.NodeState(time=datetime.utcnow(),
                                                    nodeId=node38.id,
                                                    parent=1024,
                                                    localtime=0)
                        session.add(theState)
                    else:
                        theState = models.NodeState(time=datetime.utcnow(),
                                                    nodeId=node37.id,
                                                    parent=node38.id,
                                                    localtime=0)
                        session.add(theState)

                        theState = models.NodeState(time=datetime.utcnow(),
                                                    nodeId=node38.id,
                                                    parent=1024,
                                                    localtime=0)
                        session.add(theState)

                    stateOne = not stateOne
                    session.flush()
                else:
                    localCount += 1

                time.sleep(READING_GAP)
                session.commit()
        except KeyboardInterrupt:
            log.debug("Closing Everything down")
            session.flush()
            session.commit()
Exemplo n.º 8
0
    def addMany(self):
        """Add around 1 million records to the database"""
        """Add about 2000 Records to the Database"""
        session = meta.Session()
        localCount = 0
        stateOne = True
        fakeTime = datetime.utcnow()

        node37 = self.node37
        node38 = self.node38

        #Work out a better start time
        currentTime = datetime.utcnow()
        #Calculate total seconds for samples
        deploymentSeconds = (BULK_SAMPLES * READING_GAP) * BULK_OFFSET
        fakeTime = currentTime - timedelta(seconds=deploymentSeconds)
        log.debug("Current Time is {0}  -> Start time is {1}".format(
            currentTime, fakeTime))

        totalCount = 0
        try:
            #while totalCount < 500000:
            while totalCount < BULK_SAMPLES:
                #Add a reading every N seconds
                #log.debug("Adding New Reading {0}".format(fakeTime))

                thisRRD = RRDLIST.get((node37.id, 0, node37.locationId), None)
                if thisRRD is None:
                    thisRRD = rrdstore.RRDStore(node37.id,
                                                0,
                                                node37.locationId,
                                                startTime=fakeTime)
                    RRDLIST[(node37.id, 0, node37.locationId)] = thisRRD

                theReading = models.Reading(time=fakeTime,
                                            nodeId=node37.id,
                                            locationId=node37.locationId,
                                            value=localCount,
                                            typeId=0)

                session.add(theReading)

                thisRRD.update(fakeTime, localCount)

                # ---- AND Node 38
                thisRRD = RRDLIST.get((node38.id, 0, node38.locationId), None)
                if thisRRD is None:
                    thisRRD = rrdstore.RRDStore(node38.id,
                                                0,
                                                node38.locationId,
                                                startTime=fakeTime)

                    RRDLIST[(node38.id, 0, node38.locationId)] = thisRRD

                thisReading = models.Reading(time=fakeTime,
                                             nodeId=node38.id,
                                             locationId=node38.locationId,
                                             value=100 - localCount,
                                             typeId=0)
                session.add(theReading)

                thisRRD.update(fakeTime, localCount)

                session.flush()
                if localCount == STATE_SWITCH:
                    log.debug("Switching States")
                    localCount = 0

                    #Add a node state
                    if stateOne:
                        theState = models.NodeState(time=fakeTime,
                                                    nodeId=node37.id,
                                                    parent=1024,
                                                    localtime=0)
                        session.add(theState)

                        theState = models.NodeState(time=fakeTime,
                                                    nodeId=node38.id,
                                                    parent=1024,
                                                    localtime=0)
                        session.add(theState)
                    else:
                        theState = models.NodeState(time=fakeTime,
                                                    nodeId=node37.id,
                                                    parent=node38.id,
                                                    localtime=0)
                        session.add(theState)

                        theState = models.NodeState(time=fakeTime,
                                                    nodeId=node38.id,
                                                    parent=1024,
                                                    localtime=0)
                        session.add(theState)

                    stateOne = not stateOne
                    session.flush()
                    session.commit
                    log.debug("Commiting Samples {0}".format(totalCount))
                else:
                    localCount += 1

                #time.sleep(READING_GAP)
                totalCount += 1
                fakeTime = fakeTime + timedelta(seconds=READING_GAP)
                #session.commit()
        except KeyboardInterrupt:
            log.debug("Closing Everything down")
            session.flush()
            session.commit()

        session.flush()
        session.commit()
Exemplo n.º 9
0
def populate_readings(session = None):

    #The Deployment
    if not session:
        print "Creating a new Session"
        session = meta.Session()

    now = datetime.datetime(2013, 01, 01, 00, 00, 00)

    #Now we want to add a load of readings / Nodestates
    thetime = now# - datetime.timedelta(days = 10)
    endtime = now + datetime.timedelta(days=10)
    #print "START TIME {0}".format(starttime)

    thecount = 0.0
    seqnum = -1
    while thetime < endtime:
        #Increment and roll over the sequence number
        seqnum += 1
        if seqnum > 255:
            seqnum = seqnum - 255

        for nid in [837, 838, 1061, 1063]:

            locationid = 1
            if nid == 838:
                locationid = 2
            elif nid == 1061:
                locationid = 3
                #Sample very 10 minutes (50% Yield)
                if thetime.minute % 10 == 0:
                    continue
            elif nid == 1063:
                locationid = 4
                #And remove every 3rd sample
                if thetime.minute % 15 == 0:
                    continue

            ns = models.NodeState(nodeId = nid,
                                  parent = 1,
                                  time = thetime,
                                  seq_num = seqnum)

            session.add(ns)


            reading = models.Reading(nodeId = nid,
                                     typeId = 0,
                                     time = thetime,
                                     locationId = locationid,
                                     value = 18.0+(2.0*math.sin(thecount)),
                                     )
            session.add(reading)




        #Increment the time
        thetime = thetime + datetime.timedelta(minutes=5)
        thecount = thecount + (3.14 / 144)

    session.commit()
    transaction.commit()
    session.commit()
    session.close()
Exemplo n.º 10
0
    def test_uploadreadings(self):
        """Does the uploading of readings happen correctly"""

        self.pusher.log.setLevel(logging.DEBUG)
        rurl = "{0}Reading/".format(RESTURL)
        #Clean up
        cutdate = datetime.datetime(2013, 2, 1, 0, 0, 0)

        self.pusher.mappingConfig["lastupdate"] = {}

        #We also need to fake the mappings
        self.pusher.mappedLocations = {1: 1, 2: 2, 3: 3, 4: 4}

        session = self.Session()
        thehouse = session.query(models.House).filter_by(id=1).first()
        secondhouse = session.query(models.House).filter_by(id=2).first()
        #Limit to stuff after the cutoff date
        output = self.pusher.upload_readings(thehouse, cutdate)
        #The First time around we should have no readings transferred (as
        #everthing should match)
        txcount, lasttx = output

        self.assertEqual(txcount, 0)
        self.assertEqual(lasttx, cutdate)

        #So lets transfer some readings
        currentdate = cutdate
        enddate = datetime.datetime(2013, 2, 2, 0, 0, 0)  #One day
        session = self.Session()
        while currentdate < enddate:
            thesample = models.Reading(time=currentdate,
                                       nodeId=837,
                                       locationId=1,
                                       typeId=0,
                                       value=200)
            session.add(thesample)
            currentdate = currentdate + datetime.timedelta(minutes=5)
        session.flush()
        session.commit()

        output = self.pusher.upload_readings(thehouse, cutdate)
        txcount, lasttx = output
        self.assertEqual(txcount, 287)
        #Remove 5 mins as that is the actual last sample transferred
        self.assertEqual(lasttx, currentdate - datetime.timedelta(minutes=5))

        #We should also now have about 11 days worth of samples
        #expectedcount = ((288*10)*2)+288
        qry = requests.get(rurl,
                           params={
                               "time": [
                                   "ge_{0}".format(cutdate.isoformat()),
                                   "le_{0}".format(currentdate.isoformat())
                               ]
                           })

        self.assertEqual(287, len(qry.json()))

        #And now if we transfer there should be nothing pushed across
        output = self.pusher.upload_readings(thehouse, currentdate)
        txcount, lasttx = output
        self.assertEqual(txcount, 0)
        self.assertEqual(lasttx, currentdate)

        #So lets add readings for multiple locations and houses
        enddate = datetime.datetime(2013, 2, 3, 0, 0, 0)  #One day
        session = self.Session()
        while currentdate <= enddate:
            thesample = models.Reading(time=currentdate,
                                       nodeId=837,
                                       locationId=1,
                                       typeId=0,
                                       value=400)
            session.add(thesample)
            thesample = models.Reading(time=currentdate,
                                       nodeId=838,
                                       locationId=2,
                                       typeId=0,
                                       value=400)
            session.add(thesample)
            thesample = models.Reading(time=currentdate,
                                       nodeId=1061,
                                       locationId=3,
                                       typeId=0,
                                       value=400)
            session.add(thesample)
            thesample = models.Reading(time=currentdate,
                                       nodeId=1063,
                                       locationId=4,
                                       typeId=0,
                                       value=400)
            session.add(thesample)

            currentdate = currentdate + datetime.timedelta(minutes=5)
        session.flush()
        session.commit()

        cutdate = lasttx
        output = self.pusher.upload_readings(thehouse, cutdate)
        txcount, lasttx = output
        self.assertEqual(txcount, 288 * 2)
        self.assertEqual(lasttx, currentdate - datetime.timedelta(minutes=5))

        #And double check everything on the remote server
        #TODO:  REPLICATE THIS WITH REQURESTS
        # #House 1, locaition 1 Should now have 12 days of readings (11 with 2 locations 1 with only one)
        expected = (12 * 288) - 1
        qry = requests.get(rurl, params={"nodeId": 837})
        self.assertEqual(expected, len(qry.json()))

        # #House 1 location 2 should have 11
        expected = 11 * 288
        qry = requests.get(rurl, params={"nodeId": 838})
        self.assertEqual(expected, len(qry.json()))

        # #House two gets a tad more tricksy as we skipped samples for yield calculations
        expected = (288 / 2) * 10  #Skips every other sample
        qry = requests.get(rurl, params={"nodeId": 1061})
        self.assertEqual(expected, len(qry.json()))

        expected = round((288 * 0.6666) * 10)  #Approximately 1/3 missing
        qry = requests.get(rurl, params={"nodeId": 1063})
        self.assertEqual(expected, len(qry.json()))

        #Finally we want to push stuff from house 2
        session = self.Session()
        secondhouse = session.query(models.House).filter_by(id=2).first()
        output = self.pusher.upload_readings(secondhouse, cutdate)
        txcount, lasttx = output
        self.assertEqual(txcount, 288 * 2)
        self.assertEqual(lasttx, currentdate - datetime.timedelta(minutes=5))
        session.close()

        cutdate = lasttx

        #Finally, what happens if we hit the maximum number of samples to be transmitted
        self.pusher.pushLimit = 144  #1/2 day
        #self.pusher.log.setLevel(logging.DEBUG)
        #Add some samples
        session = self.Session()
        enddate = datetime.datetime(2013, 2, 4, 0, 0, 0)  #One day
        while currentdate < enddate:
            thesample = models.Reading(time=currentdate,
                                       nodeId=837,
                                       locationId=1,
                                       typeId=0,
                                       value=600)
            session.add(thesample)
            thesample = models.Reading(time=currentdate,
                                       nodeId=838,
                                       locationId=2,
                                       typeId=0,
                                       value=600)
            session.add(thesample)
            currentdate = currentdate + datetime.timedelta(minutes=5)
        session.flush()
        session.commit()
        session.close()

        session = self.Session()
        thehouse = session.query(models.House).filter_by(id=1).first()

        output = self.pusher.upload_readings(thehouse, lasttx)
        txcount, lasttx = output
        self.assertEqual(txcount, (288 - 1) * 2)

        #Then we want to ensure that there is nothing left
        output = self.pusher.upload_readings(thehouse, lasttx)
        txcount, lasttx = output
        self.assertEqual(txcount, 0)
Exemplo n.º 11
0
    def test_uploadnodestate(self):
        """Do we upload nodestates correctly"""
        #self.pusher.log.setLevel(logging.DEBUG)
        rurl = "{0}NodeState/".format(RESTURL)
        cutdate = datetime.datetime(2013, 2, 1, 0, 0, 0)

        #So now its time to check if nodestates are updated correctly
        session = self.Session()
        thehouse = session.query(models.House).filter_by(id=1).first()
        #As this will get passed in by the upload readings we need to fetch it now
        lastupdate = self.pusher.get_lastupdate(thehouse)
        expectdate = datetime.datetime(2013, 1, 10, 23, 55, 1)
        self.assertEqual(lastupdate, expectdate)

        #First off lets make sure that a run without anything to transfer works properly
        txcount = self.pusher.upload_nodestate(thehouse, lastupdate)
        self.assertEqual(txcount, 0)

        #Now add a load of nodestates for house One
        currentdate = cutdate

        enddate = datetime.datetime(2013, 2, 2, 0, 0, 0)  #One day
        session = self.Session()
        while currentdate < enddate:
            theitem = models.NodeState(time=currentdate, nodeId=837)
            session.add(theitem)
            theitem = models.Reading(time=currentdate,
                                     nodeId=837,
                                     locationId=1,
                                     typeId=0)
            session.add(theitem)
            currentdate = currentdate + datetime.timedelta(minutes=5)
        session.flush()
        session.commit()

        txcount = self.pusher.upload_nodestate(thehouse, lastupdate)
        #Modify last update

        self.assertEqual(txcount, 288)
        lastupdate = currentdate

        #currentdate = currentdate - datetime.timedelta(minutes=5)
        #Add nodestates for house One and Two but only push house 1
        enddate = datetime.datetime(2013, 2, 3, 0, 0, 0)
        while currentdate <= enddate:
            theitem = models.NodeState(time=currentdate, nodeId=837)
            session.add(theitem)
            theitem = models.Reading(time=currentdate,
                                     nodeId=837,
                                     locationId=1,
                                     typeId=0)
            session.add(theitem)
            theitem = models.NodeState(time=currentdate, nodeId=838)
            session.add(theitem)
            theitem = models.Reading(time=currentdate,
                                     nodeId=838,
                                     locationId=2,
                                     typeId=0)
            session.add(theitem)
            theitem = models.NodeState(time=currentdate, nodeId=1061)
            session.add(theitem)
            theitem = models.Reading(time=currentdate,
                                     nodeId=1061,
                                     locationId=3,
                                     typeId=0)
            session.add(theitem)
            theitem = models.NodeState(time=currentdate, nodeId=1063)
            session.add(theitem)
            theitem = models.Reading(time=currentdate,
                                     nodeId=1063,
                                     typeId=0,
                                     locationId=4)
            session.add(theitem)
            currentdate = currentdate + datetime.timedelta(minutes=5)
        session.flush()
        session.commit()
        session.close()
        txcount = self.pusher.upload_nodestate(thehouse, lastupdate)
        self.assertEqual(txcount, 288 * 2)

        #Check nothing has been transfered to house2
        node1061expected = 144 * 10
        node1063expected = 1920  #round(288*0.666*10)

        ##session = self.rSession()
        qry = requests.get(rurl, params={"nodeId": 837})
        self.assertEqual(288 * 12, len(qry.json()))
        qry = requests.get(rurl, params={"nodeId": 838})
        self.assertEqual(288 * 11, len(qry.json()))
        qry = requests.get(rurl, params={"nodeId": 1061})
        self.assertEqual(node1061expected, len(qry.json()))
        qry = requests.get(rurl, params={"nodeId": 1063})
        self.assertEqual(node1063expected, len(qry.json()))
        # qry = session.query(models.NodeState).filter_by(nodeId=837)
        # self.assertEqual(qry.count(), 288*12)
        # qry = session.query(models.NodeState).filter_by(nodeId=838)
        # self.assertEqual(qry.count(), 288*11)
        # qry = session.query(models.NodeState).filter_by(nodeId=1061)
        # self.assertEqual(qry.count(), node1061expected)
        # qry = session.query(models.NodeState).filter_by(nodeId=1063)
        # self.assertEqual(qry.count(), node1063expected)
        # session.close()

        #Push house2
        session = self.Session()
        thehouse = session.query(models.House).filter_by(id=2).first()
        txcount = self.pusher.upload_nodestate(thehouse, lastupdate)
        self.assertEqual(txcount, 288 * 2)
        session.close()

        qry = requests.get(rurl, params={"nodeId": 837})
        self.assertEqual(288 * 12, len(qry.json()))
        qry = requests.get(rurl, params={"nodeId": 838})
        self.assertEqual(288 * 11, len(qry.json()))
        qry = requests.get(rurl, params={"nodeId": 1061})
        self.assertEqual(node1061expected + 288, len(qry.json()))
        qry = requests.get(rurl, params={"nodeId": 1063})
        self.assertEqual(node1063expected + 288, len(qry.json()))
Exemplo n.º 12
0
    def runnode(self, nodeid):
        """Run an instance for a given node

        This will run the checks for a given node
        """

        log = self.log

        log.info("Running checks for Node: {0}".format(nodeid))

        mainsession = self.mainsession()
        mergesession = self.mergesession()

        #First we fetch counts of all data for these items
        log.debug("--> Fetching remote counts")
        mergecounts = self.getcounts(nodeid)

        log.debug("--> Fetching Main counts")
        maincounts = self.getcounts(nodeid, MAIN)

        #Next convert to a dictionary and run a dictdiff
        maindict = dict(maincounts)
        mergedict = dict(mergecounts)

        ddiff = DictDiff(maindict, mergedict)

        #Items that are in the Main but not in the Merged
        added = ddiff.added()
        #Items that are completely missing from the Merged
        removed = ddiff.removed()
        #Items where there is a different count than in the merged
        changed = ddiff.changed()

        log.debug("--> Added Items {0}".format(added))
        log.debug("--> Removed Items {0}".format(removed))
        log.debug("--> Changed Items {0}".format(changed))

        #The first nice and simple changes are to add the "removed" data as this
        #does not exist in the remote database

        if removed:
            log.info("--- {0} Complete days that need adding ---".format(
                len(removed)))
            for thedate in removed:
                maincount = maindict.get(thedate, 0)
                mergecount = mergedict.get(thedate)
                log.debug("--> {0} {1}/{2} Samples in main".format(
                    thedate, maincount, mergecount))

                #Get the readings themselves
                qry = (mergesession.query(
                    models.Reading).filter_by(nodeId=nodeid))
                qry = qry.filter(
                    sqlalchemy.func.date(models.Reading.time) == thedate)

                for reading in qry:
                    #Check if we have mapped the location
                    if reading.locationId == None:
                        log.warning("Reading {0} has no location !!!!!".format(
                            reading))
                        continue

                    maploc = self.locationmap.get(reading.locationId, None)
                    if maploc is None:
                        log.debug("Location {0} Has not been mapped".format(
                            reading.locationId))
                        maploc = self._maplocation(reading)

                    #log.debug("New Location is {0}.".format(maploc))
                    #make a copy and add to the new session
                    mainsession.add(
                        models.Reading(time=reading.time,
                                       nodeId=reading.nodeId,
                                       locationId=maploc,
                                       typeId=reading.typeId,
                                       value=reading.value))

                #We also want to transfer the relevant nodestates
                log.info("Transfering NodeStates")
                qry = (mergesession.query(
                    models.NodeState).filter_by(nodeId=nodeid))
                qry = qry.filter(
                    sqlalchemy.func.date(models.NodeState.time) == thedate)
                log.debug("{0} Nodestates to transfer".format(qry.count()))
                for nodestate in qry:
                    mainsession.add(
                        models.NodeState(time=nodestate.time,
                                         nodeId=nodestate.nodeId,
                                         parent=nodestate.parent,
                                         localtime=nodestate.localtime,
                                         seq_num=nodestate.seq_num,
                                         rssi=nodestate.rssi))
                #Close our sessions
                mainsession.flush()
                mainsession.close()

        if changed:
            log.debug("---- Dealing with changed items ----")
            #For the moment I dont really care about merging and duplicates
            #We can fix the problem up later (nodestate table bugfix)
            log.info("--- {0} days that need merging ---".format(len(changed)))
            for thedate in changed:
                maincount = maindict.get(thedate, 0)
                mergecount = mergedict.get(thedate)
                log.debug("--> {0} {1}/{2} Samples in main".format(
                    thedate, maincount, mergecount))

                if maincount > mergecount:
                    log.warning(
                        "For Some Reason there are more items in the main db")
                    continue

                #Get the readings themselves
                qry = (mergesession.query(
                    models.Reading).filter_by(nodeId=nodeid))
                qry = qry.filter(models.Reading.locationId != None)
                qry = qry.filter(
                    sqlalchemy.func.date(models.Reading.time) == thedate)

                log.debug("--> Total of {0} readings to merge".format(
                    qry.count()))

                for reading in qry:
                    #Check if we have mapped the location
                    if reading.locationId == None:
                        log.warning("Reading {0} has no location !!!!!".format(
                            reading))
                        continue

                    maploc = self.locationmap.get(reading.locationId, None)
                    if maploc is None:
                        log.debug("Location {0} Has not been mapped".format(
                            reading.locationId))
                        maploc = self._maplocation(reading)

                    #log.debug("New Location is {0}.".format(maploc))
                    #make a copy and add to the new session
                    mainsession.merge(
                        models.Reading(time=reading.time,
                                       nodeId=reading.nodeId,
                                       locationId=maploc,
                                       typeId=reading.typeId,
                                       value=reading.value))

                #We also want to transfer the relevant nodestates
                log.info("Transfering NodeStates")
                qry = (mergesession.query(
                    models.NodeState).filter_by(nodeId=nodeid))
                qry = qry.filter(
                    sqlalchemy.func.date(models.NodeState.time) == thedate)
                log.debug("{0} Nodestates to transfer".format(qry.count()))
                for nodestate in qry:
                    mainsession.merge(
                        models.NodeState(time=nodestate.time,
                                         nodeId=nodestate.nodeId,
                                         parent=nodestate.parent,
                                         localtime=nodestate.localtime,
                                         seq_num=nodestate.seq_num,
                                         rssi=nodestate.rssi))
                #Close our sessions
                mainsession.flush()
                mainsession.close()
Exemplo n.º 13
0
    def run(self):
        """Single iteration of the mainloop"""
        #Wait for
        data = self.con.readline()
        log = self.log
        if data:
            session = meta.Session()
            log.debug("> {0}".format(data.strip()))
            if "PKT:" in data:
                now = datetime.datetime.now()
                pktdata = data.strip().split(":")  #Get the main packet data
                pktitems = [int(x) for x in pktdata[1].split(",")]
                log.debug(">>PKT. {0}".format(pktitems))

                (nodeid, time, ctp_seq, hops, tx_pwr, msg_seq, parent, n_count,
                 temp, hum) = pktitems

                #Temperature / Humidity conversion
                temp = float(temp)
                temp = -39.6 + 0.01 * temp
                hum = float(hum)
                hum = -4 + 0.0405 * hum - 0.0000028 * (hum * hum)

                qry = session.query(models.Node).filter_by(id=nodeid)
                thenode = qry.first()
                if thenode is None:
                    log.info("No such node {0}".format(nodeid))
                    thenode = models.Node(id=nodeid)
                    session.add(thenode)
                    session.flush()

                #Then we can create a nodestate
                ns = models.NodeState(time=now,
                                      nodeId=nodeid,
                                      localtime=time,
                                      seq_num=msg_seq,
                                      parent=parent)
                session.add(ns)

                #And Readings
                rdg = models.Reading(time=now,
                                     nodeId=nodeid,
                                     typeId=HOPS,
                                     locationId=thenode.locationId,
                                     value=hops)
                session.add(rdg)

                rdg = models.Reading(time=now,
                                     nodeId=nodeid,
                                     typeId=TX_PWR,
                                     locationId=thenode.locationId,
                                     value=tx_pwr)
                session.add(rdg)

                rdg = models.Reading(time=now,
                                     nodeId=nodeid,
                                     typeId=N_COUNT,
                                     locationId=thenode.locationId,
                                     value=n_count)
                session.add(rdg)

                rdg = models.Reading(time=now,
                                     nodeId=nodeid,
                                     typeId=CTP_SEQ,
                                     locationId=thenode.locationId,
                                     value=ctp_seq)
                session.add(rdg)

                #Temperature
                rdg = models.Reading(time=now,
                                     nodeId=nodeid,
                                     typeId=0,
                                     locationId=thenode.locationId,
                                     value=temp)
                session.add(rdg)

                rdg = models.Reading(time=now,
                                     nodeId=nodeid,
                                     typeId=2,
                                     locationId=thenode.locationId,
                                     value=hum)
                session.add(rdg)
                session.commit()

                #Now neighbor table info
                print pktdata
                if len(pktdata) > 2:
                    neighinfo = pktdata[2:]
                    log.info("Neighbor Table is {0}".format(neighinfo))
                    for idx, item in enumerate(neighinfo):
                        print item, idx
                        vals = item.split(",")
                        rdg = models.Reading(time=now,
                                             nodeId=nodeid,
                                             typeId=2000 + idx,
                                             locationId=thenode.locationId,
                                             value=vals[0])
                        session.add(rdg)
                session.commit()