def test_clear(self):
        s = self.device["mystream"]

        # This time we test existing stream
        s.create({"type": "string"})

        l = Logger("test.db")
        l.serverurl = TEST_URL
        l.apikey = self.apikey

        l.addStream("mystream")

        l.insert("mystream", "test1")
        l.insert("mystream", "test2")

        l.cleardata()
        self.assertEqual(len(l), 0)
    def __init__(self,firstrun_callback=None):
        self.firstrun_callback = firstrun_callback

        self.syncer = None
        self.isrunning = False
        self.issyncing = False

        #Get the data gatherers. currentgatherers is the ones that are set to ON
        # gatherers is ALL of them
        self.currentgatherers = {}
        self.gatherers = {}
        for p in getplugins():
            g = p()
            self.currentgatherers[g.streamname] = g
            self.gatherers[g.streamname] = g

        filedir = files.getFileFolder()
        cachefile = os.path.join(filedir,"cache.db")
        logging.info("Opening database " + cachefile)
        self.cache = Logger(cachefile,on_create=self.create_callback)

        # Disable the relevant gatherers
        for g in self.cache.data["disabled_gatherers"]:
            if g in self.currentgatherers:
                del self.currentgatherers[g]

        # If ConnectorDB is managed, start the executable
        self.localdir = os.path.join(filedir,"db")
        self.localrunning = False
        self.runLocal()

        #Start running the logger if it is supposed to be running
        if self.cache.data["isrunning"]:
            self.start()
        if self.cache.data["isbgsync"]:
            self.startsync()
class LaptopLogger():
    def __init__(self,firstrun_callback=None):
        self.firstrun_callback = firstrun_callback

        self.syncer = None
        self.isrunning = False
        self.issyncing = False

        #Get the data gatherers. currentgatherers is the ones that are set to ON
        # gatherers is ALL of them
        self.currentgatherers = {}
        self.gatherers = {}
        for p in getplugins():
            g = p()
            self.currentgatherers[g.streamname] = g
            self.gatherers[g.streamname] = g

        filedir = files.getFileFolder()
        cachefile = os.path.join(filedir,"cache.db")
        logging.info("Opening database " + cachefile)
        self.cache = Logger(cachefile,on_create=self.create_callback)

        # Disable the relevant gatherers
        for g in self.cache.data["disabled_gatherers"]:
            if g in self.currentgatherers:
                del self.currentgatherers[g]

        # If ConnectorDB is managed, start the executable
        self.localdir = os.path.join(filedir,"db")
        self.localrunning = False
        self.runLocal()

        #Start running the logger if it is supposed to be running
        if self.cache.data["isrunning"]:
            self.start()
        if self.cache.data["isbgsync"]:
            self.startsync()

    # This can be used to start a local version of ConnectorDB
    def runLocal(self):
        if self.cache.data["runlocal"] and not self.localrunning:
            logging.info("Starting ConnectorDB server")
            try:
                self.localrunning = True
                retcode = cdbmanager.Manager(self.localdir).start()
                # The method needed to start on windows doesn't return error codes.
                if (platform.system()=="Windows"):
                    return True
                logging.debug("Start return code: " +str(retcode))
                return retcode==0
            except Exception as e:
                logging.error(str(e))
            self.localrunning = False
            return False
        return False

    def create_callback(self,c):
        logging.info("Creating new cache file...")

        c.data = {
            "runlocal": False,      # Whether or not to run a local ConnectorDB instance (the ConnectorDB server)
            "isrunning": False,    # Whether or not the logger is currently gathering data. This NEEDS to be false - it is set to true later
            "isbgsync": False,      # Whether or not the logger automatically syncs with ConnectorDB. Needs to be false - automatically set to True later
            "gathertime": 4.0,     # The logger gathers datapoints every this number of seconds
            "disabled_gatherers": [], # The names of disabled gatherers
        }
        c.syncperiod = 60*60    # Sync once an hour

        #We now need to set the API key
        if self.firstrun_callback is not None:
            self.firstrun_callback(c)

    def removegatherer(self,g):
        logging.info("Removing gatherer " + g)
        if g in self.currentgatherers:
            del self.currentgatherers[g]
            if self.isrunning:
                self.gatherers[g].stop()
        # Save the setting
        d = self.cache.data
        if not g in d["disabled_gatherers"]:
            d["disabled_gatherers"].append(g)
            self.cache.data = d

    def addgatherer(self,g):
        logging.info("Adding gatherer " + g)
        if not g in self.currentgatherers:
            if self.isrunning:
                self.gatherers[g].start(self.cache)
            self.currentgatherers[g] = self.gatherers[g]
        # Save the setting
        d = self.cache.data
        if g in d["disabled_gatherers"]:
            d["disabled_gatherers"].remove(g)
            self.cache.data = d



    def gather(self):
        for g in self.currentgatherers:
            self.currentgatherers[g].run(self.cache)

        self.syncer = threading.Timer(self.cache.data["gathertime"],self.gather)
        self.syncer.daemon = True
        self.syncer.start()

    # Whether or not to run data gathering
    def start(self):
        if not self.isrunning:
            logging.info("Start acquisition")
            d = self.cache.data
            d["isrunning"] = True
            self.cache.data = d

            #First, make sure all streams are ready to go in the cache
            for g in self.gatherers:
                if not g in self.cache:
                    gatherer = self.gatherers[g]
                    logging.info("Adding {} stream ({})".format(g,self.gatherers[g].streamschema))
                    nickname = ""
                    if hasattr(gatherer,"nickname"):
                        nickname = gatherer.nickname
                    datatype = ""
                    if hasattr(gatherer,"datatype"):
                        datatype = gatherer.datatype
                    self.cache.addStream(g,gatherer.streamschema,description=gatherer.description,nickname=nickname,datatype=datatype)

            for g in self.currentgatherers:
                self.currentgatherers[g].start(self.cache)

            self.isrunning = True

            self.gather()

    # Whether or not to run background syncer
    def startsync(self):
        if not self.issyncing:
            logging.info("Start background sync")
            d = self.cache.data
            d["isbgsync"] = True
            self.cache.data = d
            self.cache.start()
            self.issyncing = True


    def stop(self,temporary=False):
        logging.info("Stop acquisition")

        if self.syncer is not None:
            self.syncer.cancel()
            self.syncer = None

        for g in self.currentgatherers:
            self.currentgatherers[g].stop()

        if not temporary:
            d = self.cache.data
            d["isrunning"] = False
            self.cache.data = d

        self.isrunning = False

    def stopsync(self):
        self.cache.stop()
        d = self.cache.data
        d["isbgsync"] = False
        self.cache.data = d
        self.issyncing= False

    def exit(self):
        # exit performs cleanup - in this case, shutting down the ConnectorDB database on exit
        if self.cache.data["runlocal"] and self.localrunning:
            logging.info("Shutting down ConnectorDB server")
            try:
                cdbmanager.Manager(self.localdir).stop()
                self.localrunning = False
            except:
                pass
    def test_inserting(self):
        s = self.device["mystream"]

        def test_create(l):
            l.apikey = self.apikey
            l.serverurl = TEST_URL
            l.data = "Hello World!!!"
            l.syncperiod = 3.3

            l.addStream("mystream", {"type": "string"}, nickname="My nickname")

            haderror = False
            try:
                l.addStream("stream_DNE")
            except:
                haderror = True

            self.assertTrue(haderror)

        self.assertFalse(s.exists())
        l = Logger("test.db", on_create=test_create)
        l.ping()
        self.assertTrue(s.exists())
        self.assertTrue(s.nickname == "My nickname")

        self.assertEqual("logger_test/mydevice", l.name)
        self.assertEqual(self.apikey, l.apikey)
        self.assertEqual(TEST_URL, l.serverurl)

        self.assertEqual(0, len(l))

        self.assertTrue("mystream" in l)
        self.assertFalse("stream_DNE" in l)

        l.insert("mystream", "Hello World!")

        self.assertEqual(1, len(l))
        self.assertEqual("Hello World!!!", l.data)

        l.close()

        def nocreate(self):
            raise Exception("OnCreate was called on existing database!")

        # Now reload from file and make sure everything was saved
        l = Logger("test.db", on_create=nocreate)
        self.assertEqual(1, len(l))
        self.assertEqual(l.name, "logger_test/mydevice")
        self.assertTrue("mystream" in l)
        self.assertTrue(self.apikey, l.apikey)
        self.assertTrue(TEST_URL, l.serverurl)
        self.assertTrue(3.3, l.syncperiod)

        haderror = False
        try:
            l.insert(5)  # Make sure that the schema is checked correctly
        except:
            haderror = True
        self.assertTrue(haderror)

        l.insert("mystream", "hi")

        self.assertEqual(2, len(l))
        self.assertEqual(0, len(s))
        l.sync()
        self.assertEqual(0, len(l))
        self.assertEqual(2, len(s))

        self.assertGreater(l.lastsynctime, time.time() - 1)

        self.assertEqual("Hello World!!!", l.data)

        self.assertEqual(s[0]["d"], "Hello World!")
        self.assertEqual(s[1]["d"], "hi")
        self.assertGreater(s[1]["t"], time.time() - 1)

        l.close()
    def test_overflow(self):
        global DATAPOINT_INSERT_LIMIT
        dil = DATAPOINT_INSERT_LIMIT
        DATAPOINT_INSERT_LIMIT = 2
        s = self.device["mystream"]

        # This time we test existing stream
        s.create({"type": "string"})

        l = Logger("test.db")
        l.serverurl = TEST_URL
        l.apikey = self.apikey

        l.addStream("mystream")

        l.insert("mystream", "test1")
        l.insert("mystream", "test2")
        l.insert("mystream", "test3")

        l.sync()

        self.assertEqual(3, len(s))
        self.assertEqual(0, len(l))

        DATAPOINT_INSERT_LIMIT = dil
    def test_bgsync(self):
        s = self.device["mystream"]

        # This time we test existing stream
        s.create({"type": "string"})

        l = Logger("test.db")
        l.serverurl = TEST_URL
        l.apikey = self.apikey

        l.addStream("mystream")

        l.syncperiod = 1

        self.assertEqual(0, len(s))
        self.assertEqual(0, len(l))

        l.start()
        l.insert("mystream", "hi")
        l.insert("mystream", "hello")
        self.assertEqual(0, len(s))
        self.assertEqual(2, len(l))
        time.sleep(1.1)
        self.assertEqual(2, len(s))
        self.assertEqual(0, len(l))
        l.insert("mystream", "har")
        self.assertEqual(2, len(s))
        self.assertEqual(1, len(l))
        time.sleep(1.1)
        self.assertEqual(3, len(s))
        self.assertEqual(0, len(l))
        l.stop()

        l.insert("mystream", "stopped")
        time.sleep(1.3)
        self.assertEqual(3, len(s))
        self.assertEqual(1, len(l))

        l.close()
示例#7
0
    time.sleep(args.wait)

print("cdbcontrol: Starting")

r = RemoteControl(sdevice="/dev/ttyS0")

def toggle(stream,datapoint):
	d = datapoint[-1]["d"]
	print(stream,datapoint)
	if stream.endswith("/rc3/downlink"):
		r.toggle(3,d)
	elif stream.endswith("/rc2/downlink"):
		r.toggle(2,d)
	return [{"t": time.time(),"d": d}]

l = Logger("/home/pi/Desktop/arduino/remotecontrol/database.db",on_create=initlogger)
cdb = l.connectordb

def create(l,description="Remote controlled electric socket",downlink=True,schema={"type":"boolean"}):
	if not l.exists():
		l.create(schema,downlink=downlink,description=description)
		#l.downlink=True
create(cdb["rc3"])
create(cdb["rc2"])

# Start the logger in the background
l.start()

if args.notify:
    print("cdbcontrol: Notifying")
    # Now we notify that we're good