def run(self): session = meta.Session() localCount = 0 stateOne = True node37 = self.node37 node38 = self.node38 try: while True: #Add a reading every N seconds log.debug("Adding New Reading {0}".format(datetime.utcnow())) theReading = models.Reading(time=datetime.utcnow(), nodeId=node37.id, locationId=node37.locationId, value=localCount, typeId=0) session.add(theReading) theReading = models.Reading(time=datetime.utcnow(), nodeId=node38.id, locationId=node38.locationId, value=100 - localCount, typeId=0) session.add(theReading) session.flush() if localCount == STATE_SWITCH: log.debug("Switching States") localCount = 0 #Add a node state if stateOne: theState = models.NodeState(time=datetime.utcnow(), nodeId=node37.id, parent=1024, localtime=0) session.add(theState) theState = models.NodeState(time=datetime.utcnow(), nodeId=node38.id, parent=1024, localtime=0) session.add(theState) else: theState = models.NodeState(time=datetime.utcnow(), nodeId=node37.id, parent=node38.id, localtime=0) session.add(theState) theState = models.NodeState(time=datetime.utcnow(), nodeId=node38.id, parent=1024, localtime=0) session.add(theState) stateOne = not stateOne session.flush() else: localCount += 1 time.sleep(READING_GAP) session.commit() except KeyboardInterrupt: log.debug("Closing Everything down") session.flush() session.commit()
def saveData(self, nodeId, values): """Save a reading in the Database :var nodeId: String Containing the Current Cost Node Id :var values: Tuple containing sensor values as returned by ploggParseValue """ log = self.log log.debug("Saving data for {0} {1}".format(nodeId, values)) session = meta.Session() mappedId = NODEMAP[nodeId] theNode = session.query(models.Node).filter_by(id=mappedId).first() #Fetch Sensor Types wattSensor = session.query( models.SensorType).filter_by(name="Plogg Watts").first() log.debug("Watt Sensor {0}".format(wattSensor)) kWhSensor = session.query( models.SensorType).filter_by(name="Plogg kWh").first() log.debug("kW Sensor {0}".format(kWhSensor)) currentSensor = session.query( models.SensorType).filter_by(name="Plogg Current").first() log.debug("A Sensor {0}".format(currentSensor)) #Create if it doesnt Exist if not theNode: log.info("Node {0} / {1} does not exist, Creating".format( nodeId, mappedId)) theNode = models.Node(id=mappedId, locationId=None) session.add(theNode) session.flush() log.debug("Node is {0}".format(theNode)) #And we need to add a set of sensors for item in [wattSensor, kWhSensor, currentSensor]: theSensor = models.Sensor(sensorTypeId=item.id, nodeId=theNode.id, calibrationSlope=1.0, calibrationOffset=0.0) session.add(theSensor) session.flush() sampleTime, sampleWatts, samplekWh, sampleCurrent = values #Then Add the Readings theReading = models.Reading(time=sampleTime, nodeId=theNode.id, location=theNode.locationId, typeId=wattSensor.id, value=sampleWatts) session.add(theReading) theReading = models.Reading(time=sampleTime, nodeId=theNode.id, location=theNode.locationId, typeId=kWhSensor.id, value=samplekWh) session.add(theReading) theReading = models.Reading(time=sampleTime, nodeId=theNode.id, location=theNode.locationId, typeId=currentSensor.id, value=sampleCurrent) session.add(theReading) #And add a nodeState theNodeState = models.NodeState(time=sampleTime, nodeId=theNode.id, parent=theNode.id, localtime=sampleTime) session.add(theNodeState) session.flush() session.commit() session.close()
def addMany(self): """Add around 1 million records to the database""" """Add about 2000 Records to the Database""" session = meta.Session() localCount = 0 stateOne = True fakeTime = datetime.utcnow() node37 = self.node37 node38 = self.node38 #Work out a better start time currentTime = datetime.utcnow() #Calculate total seconds for samples deploymentSeconds = (BULK_SAMPLES * READING_GAP) * BULK_OFFSET fakeTime = currentTime - timedelta(seconds=deploymentSeconds) log.debug("Current Time is {0} -> Start time is {1}".format( currentTime, fakeTime)) totalCount = 0 try: #while totalCount < 500000: while totalCount < BULK_SAMPLES: #Add a reading every N seconds #log.debug("Adding New Reading {0}".format(fakeTime)) thisRRD = RRDLIST.get((node37.id, 0, node37.locationId), None) if thisRRD is None: thisRRD = rrdstore.RRDStore(node37.id, 0, node37.locationId, startTime=fakeTime) RRDLIST[(node37.id, 0, node37.locationId)] = thisRRD theReading = models.Reading(time=fakeTime, nodeId=node37.id, locationId=node37.locationId, value=localCount, typeId=0) session.add(theReading) thisRRD.update(fakeTime, localCount) # ---- AND Node 38 thisRRD = RRDLIST.get((node38.id, 0, node38.locationId), None) if thisRRD is None: thisRRD = rrdstore.RRDStore(node38.id, 0, node38.locationId, startTime=fakeTime) RRDLIST[(node38.id, 0, node38.locationId)] = thisRRD thisReading = models.Reading(time=fakeTime, nodeId=node38.id, locationId=node38.locationId, value=100 - localCount, typeId=0) session.add(theReading) thisRRD.update(fakeTime, localCount) session.flush() if localCount == STATE_SWITCH: log.debug("Switching States") localCount = 0 #Add a node state if stateOne: theState = models.NodeState(time=fakeTime, nodeId=node37.id, parent=1024, localtime=0) session.add(theState) theState = models.NodeState(time=fakeTime, nodeId=node38.id, parent=1024, localtime=0) session.add(theState) else: theState = models.NodeState(time=fakeTime, nodeId=node37.id, parent=node38.id, localtime=0) session.add(theState) theState = models.NodeState(time=fakeTime, nodeId=node38.id, parent=1024, localtime=0) session.add(theState) stateOne = not stateOne session.flush() session.commit log.debug("Commiting Samples {0}".format(totalCount)) else: localCount += 1 #time.sleep(READING_GAP) totalCount += 1 fakeTime = fakeTime + timedelta(seconds=READING_GAP) #session.commit() except KeyboardInterrupt: log.debug("Closing Everything down") session.flush() session.commit() session.flush() session.commit()
def populate_readings(session = None): #The Deployment if not session: print "Creating a new Session" session = meta.Session() now = datetime.datetime(2013, 01, 01, 00, 00, 00) #Now we want to add a load of readings / Nodestates thetime = now# - datetime.timedelta(days = 10) endtime = now + datetime.timedelta(days=10) #print "START TIME {0}".format(starttime) thecount = 0.0 seqnum = -1 while thetime < endtime: #Increment and roll over the sequence number seqnum += 1 if seqnum > 255: seqnum = seqnum - 255 for nid in [837, 838, 1061, 1063]: locationid = 1 if nid == 838: locationid = 2 elif nid == 1061: locationid = 3 #Sample very 10 minutes (50% Yield) if thetime.minute % 10 == 0: continue elif nid == 1063: locationid = 4 #And remove every 3rd sample if thetime.minute % 15 == 0: continue ns = models.NodeState(nodeId = nid, parent = 1, time = thetime, seq_num = seqnum) session.add(ns) reading = models.Reading(nodeId = nid, typeId = 0, time = thetime, locationId = locationid, value = 18.0+(2.0*math.sin(thecount)), ) session.add(reading) #Increment the time thetime = thetime + datetime.timedelta(minutes=5) thecount = thecount + (3.14 / 144) session.commit() transaction.commit() session.commit() session.close()
def test_uploadnodestate(self): """Do we upload nodestates correctly""" #self.pusher.log.setLevel(logging.DEBUG) rurl = "{0}NodeState/".format(RESTURL) cutdate = datetime.datetime(2013, 2, 1, 0, 0, 0) #So now its time to check if nodestates are updated correctly session = self.Session() thehouse = session.query(models.House).filter_by(id=1).first() #As this will get passed in by the upload readings we need to fetch it now lastupdate = self.pusher.get_lastupdate(thehouse) expectdate = datetime.datetime(2013, 1, 10, 23, 55, 1) self.assertEqual(lastupdate, expectdate) #First off lets make sure that a run without anything to transfer works properly txcount = self.pusher.upload_nodestate(thehouse, lastupdate) self.assertEqual(txcount, 0) #Now add a load of nodestates for house One currentdate = cutdate enddate = datetime.datetime(2013, 2, 2, 0, 0, 0) #One day session = self.Session() while currentdate < enddate: theitem = models.NodeState(time=currentdate, nodeId=837) session.add(theitem) theitem = models.Reading(time=currentdate, nodeId=837, locationId=1, typeId=0) session.add(theitem) currentdate = currentdate + datetime.timedelta(minutes=5) session.flush() session.commit() txcount = self.pusher.upload_nodestate(thehouse, lastupdate) #Modify last update self.assertEqual(txcount, 288) lastupdate = currentdate #currentdate = currentdate - datetime.timedelta(minutes=5) #Add nodestates for house One and Two but only push house 1 enddate = datetime.datetime(2013, 2, 3, 0, 0, 0) while currentdate <= enddate: theitem = models.NodeState(time=currentdate, nodeId=837) session.add(theitem) theitem = models.Reading(time=currentdate, nodeId=837, locationId=1, typeId=0) session.add(theitem) theitem = models.NodeState(time=currentdate, nodeId=838) session.add(theitem) theitem = models.Reading(time=currentdate, nodeId=838, locationId=2, typeId=0) session.add(theitem) theitem = models.NodeState(time=currentdate, nodeId=1061) session.add(theitem) theitem = models.Reading(time=currentdate, nodeId=1061, locationId=3, typeId=0) session.add(theitem) theitem = models.NodeState(time=currentdate, nodeId=1063) session.add(theitem) theitem = models.Reading(time=currentdate, nodeId=1063, typeId=0, locationId=4) session.add(theitem) currentdate = currentdate + datetime.timedelta(minutes=5) session.flush() session.commit() session.close() txcount = self.pusher.upload_nodestate(thehouse, lastupdate) self.assertEqual(txcount, 288 * 2) #Check nothing has been transfered to house2 node1061expected = 144 * 10 node1063expected = 1920 #round(288*0.666*10) ##session = self.rSession() qry = requests.get(rurl, params={"nodeId": 837}) self.assertEqual(288 * 12, len(qry.json())) qry = requests.get(rurl, params={"nodeId": 838}) self.assertEqual(288 * 11, len(qry.json())) qry = requests.get(rurl, params={"nodeId": 1061}) self.assertEqual(node1061expected, len(qry.json())) qry = requests.get(rurl, params={"nodeId": 1063}) self.assertEqual(node1063expected, len(qry.json())) # qry = session.query(models.NodeState).filter_by(nodeId=837) # self.assertEqual(qry.count(), 288*12) # qry = session.query(models.NodeState).filter_by(nodeId=838) # self.assertEqual(qry.count(), 288*11) # qry = session.query(models.NodeState).filter_by(nodeId=1061) # self.assertEqual(qry.count(), node1061expected) # qry = session.query(models.NodeState).filter_by(nodeId=1063) # self.assertEqual(qry.count(), node1063expected) # session.close() #Push house2 session = self.Session() thehouse = session.query(models.House).filter_by(id=2).first() txcount = self.pusher.upload_nodestate(thehouse, lastupdate) self.assertEqual(txcount, 288 * 2) session.close() qry = requests.get(rurl, params={"nodeId": 837}) self.assertEqual(288 * 12, len(qry.json())) qry = requests.get(rurl, params={"nodeId": 838}) self.assertEqual(288 * 11, len(qry.json())) qry = requests.get(rurl, params={"nodeId": 1061}) self.assertEqual(node1061expected + 288, len(qry.json())) qry = requests.get(rurl, params={"nodeId": 1063}) self.assertEqual(node1063expected + 288, len(qry.json()))
def runnode(self, nodeid): """Run an instance for a given node This will run the checks for a given node """ log = self.log log.info("Running checks for Node: {0}".format(nodeid)) mainsession = self.mainsession() mergesession = self.mergesession() #First we fetch counts of all data for these items log.debug("--> Fetching remote counts") mergecounts = self.getcounts(nodeid) log.debug("--> Fetching Main counts") maincounts = self.getcounts(nodeid, MAIN) #Next convert to a dictionary and run a dictdiff maindict = dict(maincounts) mergedict = dict(mergecounts) ddiff = DictDiff(maindict, mergedict) #Items that are in the Main but not in the Merged added = ddiff.added() #Items that are completely missing from the Merged removed = ddiff.removed() #Items where there is a different count than in the merged changed = ddiff.changed() log.debug("--> Added Items {0}".format(added)) log.debug("--> Removed Items {0}".format(removed)) log.debug("--> Changed Items {0}".format(changed)) #The first nice and simple changes are to add the "removed" data as this #does not exist in the remote database if removed: log.info("--- {0} Complete days that need adding ---".format( len(removed))) for thedate in removed: maincount = maindict.get(thedate, 0) mergecount = mergedict.get(thedate) log.debug("--> {0} {1}/{2} Samples in main".format( thedate, maincount, mergecount)) #Get the readings themselves qry = (mergesession.query( models.Reading).filter_by(nodeId=nodeid)) qry = qry.filter( sqlalchemy.func.date(models.Reading.time) == thedate) for reading in qry: #Check if we have mapped the location if reading.locationId == None: log.warning("Reading {0} has no location !!!!!".format( reading)) continue maploc = self.locationmap.get(reading.locationId, None) if maploc is None: log.debug("Location {0} Has not been mapped".format( reading.locationId)) maploc = self._maplocation(reading) #log.debug("New Location is {0}.".format(maploc)) #make a copy and add to the new session mainsession.add( models.Reading(time=reading.time, nodeId=reading.nodeId, locationId=maploc, typeId=reading.typeId, value=reading.value)) #We also want to transfer the relevant nodestates log.info("Transfering NodeStates") qry = (mergesession.query( models.NodeState).filter_by(nodeId=nodeid)) qry = qry.filter( sqlalchemy.func.date(models.NodeState.time) == thedate) log.debug("{0} Nodestates to transfer".format(qry.count())) for nodestate in qry: mainsession.add( models.NodeState(time=nodestate.time, nodeId=nodestate.nodeId, parent=nodestate.parent, localtime=nodestate.localtime, seq_num=nodestate.seq_num, rssi=nodestate.rssi)) #Close our sessions mainsession.flush() mainsession.close() if changed: log.debug("---- Dealing with changed items ----") #For the moment I dont really care about merging and duplicates #We can fix the problem up later (nodestate table bugfix) log.info("--- {0} days that need merging ---".format(len(changed))) for thedate in changed: maincount = maindict.get(thedate, 0) mergecount = mergedict.get(thedate) log.debug("--> {0} {1}/{2} Samples in main".format( thedate, maincount, mergecount)) if maincount > mergecount: log.warning( "For Some Reason there are more items in the main db") continue #Get the readings themselves qry = (mergesession.query( models.Reading).filter_by(nodeId=nodeid)) qry = qry.filter(models.Reading.locationId != None) qry = qry.filter( sqlalchemy.func.date(models.Reading.time) == thedate) log.debug("--> Total of {0} readings to merge".format( qry.count())) for reading in qry: #Check if we have mapped the location if reading.locationId == None: log.warning("Reading {0} has no location !!!!!".format( reading)) continue maploc = self.locationmap.get(reading.locationId, None) if maploc is None: log.debug("Location {0} Has not been mapped".format( reading.locationId)) maploc = self._maplocation(reading) #log.debug("New Location is {0}.".format(maploc)) #make a copy and add to the new session mainsession.merge( models.Reading(time=reading.time, nodeId=reading.nodeId, locationId=maploc, typeId=reading.typeId, value=reading.value)) #We also want to transfer the relevant nodestates log.info("Transfering NodeStates") qry = (mergesession.query( models.NodeState).filter_by(nodeId=nodeid)) qry = qry.filter( sqlalchemy.func.date(models.NodeState.time) == thedate) log.debug("{0} Nodestates to transfer".format(qry.count())) for nodestate in qry: mainsession.merge( models.NodeState(time=nodestate.time, nodeId=nodestate.nodeId, parent=nodestate.parent, localtime=nodestate.localtime, seq_num=nodestate.seq_num, rssi=nodestate.rssi)) #Close our sessions mainsession.flush() mainsession.close()
def run(self): """Single iteration of the mainloop""" #Wait for data = self.con.readline() log = self.log if data: session = meta.Session() log.debug("> {0}".format(data.strip())) if "PKT:" in data: now = datetime.datetime.now() pktdata = data.strip().split(":") #Get the main packet data pktitems = [int(x) for x in pktdata[1].split(",")] log.debug(">>PKT. {0}".format(pktitems)) (nodeid, time, ctp_seq, hops, tx_pwr, msg_seq, parent, n_count, temp, hum) = pktitems #Temperature / Humidity conversion temp = float(temp) temp = -39.6 + 0.01 * temp hum = float(hum) hum = -4 + 0.0405 * hum - 0.0000028 * (hum * hum) qry = session.query(models.Node).filter_by(id=nodeid) thenode = qry.first() if thenode is None: log.info("No such node {0}".format(nodeid)) thenode = models.Node(id=nodeid) session.add(thenode) session.flush() #Then we can create a nodestate ns = models.NodeState(time=now, nodeId=nodeid, localtime=time, seq_num=msg_seq, parent=parent) session.add(ns) #And Readings rdg = models.Reading(time=now, nodeId=nodeid, typeId=HOPS, locationId=thenode.locationId, value=hops) session.add(rdg) rdg = models.Reading(time=now, nodeId=nodeid, typeId=TX_PWR, locationId=thenode.locationId, value=tx_pwr) session.add(rdg) rdg = models.Reading(time=now, nodeId=nodeid, typeId=N_COUNT, locationId=thenode.locationId, value=n_count) session.add(rdg) rdg = models.Reading(time=now, nodeId=nodeid, typeId=CTP_SEQ, locationId=thenode.locationId, value=ctp_seq) session.add(rdg) #Temperature rdg = models.Reading(time=now, nodeId=nodeid, typeId=0, locationId=thenode.locationId, value=temp) session.add(rdg) rdg = models.Reading(time=now, nodeId=nodeid, typeId=2, locationId=thenode.locationId, value=hum) session.add(rdg) session.commit() #Now neighbor table info print pktdata if len(pktdata) > 2: neighinfo = pktdata[2:] log.info("Neighbor Table is {0}".format(neighinfo)) for idx, item in enumerate(neighinfo): print item, idx vals = item.split(",") rdg = models.Reading(time=now, nodeId=nodeid, typeId=2000 + idx, locationId=thenode.locationId, value=vals[0]) session.add(rdg) session.commit()