Beispiel #1
0
    def setUp(self):

        self.parser = KnxParser("enheter.xml", "groupaddresses.csv", False,
                                False, {
                                    "1/1/14": "onoff",
                                    "1/1/15": "temp",
                                    "1/1/16": "time",
                                    "1/1/17": "%%"
                                })
Beispiel #2
0
    def setUp(self):

        self.parser = KnxParser("enheter.xml", "groupaddresses.csv", False, False,
                      { "1/1/14" : "onoff",
                        "1/1/15" : "temp",
                        "1/1/16" : "time",
                        "1/1/17" : "%%"})
Beispiel #3
0
    def test_init(self):

        p = KnxParser("enheter.xml", "groupaddresses.csv", False, False, {
            "1/1/14": "onoff",
            "1/1/15": "temp",
            "1/1/16": "time",
            "1/1/17": "%%"
        })
        self.assertIsInstance(p, KnxParser)
Beispiel #4
0
class KnxLogViewer(object):
    def _readLinesFromFileOrCache(self, infile):

        try:
            inf = infile
        except IOError:
            print "%s: Unable to read file: %s" % (sys.argv[0], infile.name)
            sys.exit(1)
        except:
            op.print_help()
            sys.exit(1)

        print "Reading file: %s" % infile.name
        l = inf.readlines()
        inf.close()

        # Ok, so now we have the file content. However, parsing it
        # is expensive, so look for an already parsed cache of the file.
        # The cache files are named using the md5 of the original file,
        # so get that first...

        hsh = hashlib.md5()
        for ll in l:
            hsh.update(ll)

        cachename = hsh.hexdigest() + ".hex"
        try:
            inf = open(cachename, "r")
            clines = inf.readlines()
            # sanity check...
            if len(clines) == len(l):
                # Ok, seems good...
                print "Using cached input for file %s" % infile.name
                return (None, clines)
            else:
                print "Cached file found, but invalid length (%d != %d)" % (
                    len(clines), len(l))
        except IOError:
            # No luck in getting cached input, just use the new...
            print "No cached input for file %s found..." % infile.name

        return (cachename, l)

    def __init__(self,
                 devicesfilename,
                 groupaddrfilename,
                 infiles,
                 dumpGAtable,
                 types,
                 flanksOnly,
                 tail,
                 groupAddressSet=None,
                 hourly_avg=False,
                 start_time=None):

        self.delta = 0
        self.delta2 = 0
        self.pduCount = 0
        self.pduSkipped = 0
        self.h_avg = hourly_avg if hourly_avg != None else False
        self.dbgMsg = "groupAddressSet = %s" % str(groupAddressSet)
        start = time()

        #
        # Read in all the files...
        #
        lines = []
        lines_meta = []
        start = 1
        for infile in infiles:
            cachename, newLines = self._readLinesFromFileOrCache(infile)
            lines.extend(newLines)
            lines_meta.append((infile.name, cachename, start, len(newLines)))
            start += len(newLines)

        print "Creating parser..."
        self.knx = KnxParser(devicesfilename, groupaddrfilename, dumpGAtable,
                             flanksOnly, types)

        if tail != 0:
            if tail < len(lines):
                lines = lines[len(lines) - tail:]

        if start_time != None:
            self.found_start = "Trying to locate start time..."
            print "Trying to locate start time..."
            for i in range(len(lines) - 1, 0, -1):
                try:
                    timestamp, pdu = lines[i].split(":LPDU:")
                except ValueError:
                    timestamp, pdu = lines[i].split("LPDU:")
                ts = mktime(strptime(timestamp, "%a %b %d %H:%M:%S %Y"))
                if ts < start_time:
                    print "Found start time!"
                    self.found_start = "Found start time!"
                    lines = lines[i + 1:]
                    break
        else:
            self.found_start = "not relevant"

        #
        # Parsing the input...
        #
        basetime = 0
        lineNo = 0
        origfilename, cachefilename, startLine, numLines = lines_meta.pop(0)

        for line in lines:
            # Skip empty lines...
            if len(line.strip()) < 1:
                continue

            # If filter specified, skip unwanted GAs
            if groupAddressSet != None:
                ignore = True
                for ga in groupAddressSet:
                    if line.find(ga) != -1:
                        ignore = False
                        break
                if ignore:
                    self.pduSkipped += 1
                    continue

            lineNo += 1

            # Differentiate between parsing new files and loading cached input
            if line[:2] == "@@":
                pass
                #print "loading: %s" %line.strip().decode("utf-8")
            else:
                # Split timestamp from rest...
                try:
                    timestamp, pdu = line.split(":LPDU:")
                except ValueError:
                    timestamp, pdu = line.split("LPDU:")

                try:
                    if basetime == 0:
                        basetime = mktime(
                            strptime(timestamp, "%a %b %d %H:%M:%S %Y"))
                        self.knx.setTimeBase(basetime)
                except ValueError:
                    printVerbose("timestamp error: %s" % timestamp)

                try:
                    self.knx.parseVbusOutput(lineNo, timestamp, pdu)
                    self.pduCount += 1
                except KnxParseException:
                    print "Failed: %s:  %s" % (lineNo, pdu)
                    sys.exit(1)

            # Check if we are into a new file, in which case we should
            # potentially update the cache file for the last file...
            # Note that the --tail option disables creation of cache files
            if (tail == 0) and lineNo == startLine + numLines - 1:
                if cachefilename != None:
                    print "update cache file for %s (%s) at %s" % (
                        origfilename, cachefilename, lineNo)
                    try:
                        of = open(cachefilename, "w")
                    except IOError:
                        print cachefilename
                    else:
                        self.knx.storeCachedInput(of, startLine)
                # Shift meta data to new file...
                try:
                    origfilename, cachefilename, startLine, numLines = lines_meta.pop(
                        0)
                except:
                    print "Last file done, line no (%s)" % lineNo
                    origfilename, cachefilename, startLine, numLines = (None,
                                                                        None,
                                                                        None,
                                                                        None)

            if lineNo % 10000 == 0:
                print "Parsed %d lines..." % lineNo

        print "Parsed %d lines..." % lineNo
        self.dbgMsg += "Parsed %d lines..." % lineNo

        self.delta = time() - start

    def getPerfData(self):

        s = "<p>"
        s += "found_start: %s<p>" % self.found_start
        if self.delta != 0:
            s += "KnxLogViewer: Time used for init:    %f (%d PDUs parsed, %d skipped)<p>" % (
                self.delta, self.pduCount, self.pduSkipped)
            s += "Debug: %s<p>GlobalDebug:%s<p>" % (self.dbgMsg, globDbgMsg)
            self.delta = 0
        s += "KnxLogViewer: Time used for plotgen: %f<p>" % self.delta2
        s += "<p>"
        return s

    def getMinMaxValues(self, groupAddr):
        return self.knx.getStreamMinMaxValues(groupAddr)

    def plotLog(self, groupAddrs, plotImage, addHorLine=None):
        start = time()
        self.knx.plotStreams(groupAddrs, plotImage, addHorLine)
        self.delta2 = time() - start

    def printLog(self, groupAddrs):

        self.knx.printStreams(groupAddrs)
Beispiel #5
0
    def __init__(self,
                 devicesfilename,
                 groupaddrfilename,
                 infiles,
                 dumpGAtable,
                 types,
                 flanksOnly,
                 tail,
                 groupAddressSet=None,
                 hourly_avg=False,
                 start_time=None):

        self.delta = 0
        self.delta2 = 0
        self.pduCount = 0
        self.pduSkipped = 0
        self.h_avg = hourly_avg if hourly_avg != None else False
        self.dbgMsg = "groupAddressSet = %s" % str(groupAddressSet)
        start = time()

        #
        # Read in all the files...
        #
        lines = []
        lines_meta = []
        start = 1
        for infile in infiles:
            cachename, newLines = self._readLinesFromFileOrCache(infile)
            lines.extend(newLines)
            lines_meta.append((infile.name, cachename, start, len(newLines)))
            start += len(newLines)

        print "Creating parser..."
        self.knx = KnxParser(devicesfilename, groupaddrfilename, dumpGAtable,
                             flanksOnly, types)

        if tail != 0:
            if tail < len(lines):
                lines = lines[len(lines) - tail:]

        if start_time != None:
            self.found_start = "Trying to locate start time..."
            print "Trying to locate start time..."
            for i in range(len(lines) - 1, 0, -1):
                try:
                    timestamp, pdu = lines[i].split(":LPDU:")
                except ValueError:
                    timestamp, pdu = lines[i].split("LPDU:")
                ts = mktime(strptime(timestamp, "%a %b %d %H:%M:%S %Y"))
                if ts < start_time:
                    print "Found start time!"
                    self.found_start = "Found start time!"
                    lines = lines[i + 1:]
                    break
        else:
            self.found_start = "not relevant"

        #
        # Parsing the input...
        #
        basetime = 0
        lineNo = 0
        origfilename, cachefilename, startLine, numLines = lines_meta.pop(0)

        for line in lines:
            # Skip empty lines...
            if len(line.strip()) < 1:
                continue

            # If filter specified, skip unwanted GAs
            if groupAddressSet != None:
                ignore = True
                for ga in groupAddressSet:
                    if line.find(ga) != -1:
                        ignore = False
                        break
                if ignore:
                    self.pduSkipped += 1
                    continue

            lineNo += 1

            # Differentiate between parsing new files and loading cached input
            if line[:2] == "@@":
                pass
                #print "loading: %s" %line.strip().decode("utf-8")
            else:
                # Split timestamp from rest...
                try:
                    timestamp, pdu = line.split(":LPDU:")
                except ValueError:
                    timestamp, pdu = line.split("LPDU:")

                try:
                    if basetime == 0:
                        basetime = mktime(
                            strptime(timestamp, "%a %b %d %H:%M:%S %Y"))
                        self.knx.setTimeBase(basetime)
                except ValueError:
                    printVerbose("timestamp error: %s" % timestamp)

                try:
                    self.knx.parseVbusOutput(lineNo, timestamp, pdu)
                    self.pduCount += 1
                except KnxParseException:
                    print "Failed: %s:  %s" % (lineNo, pdu)
                    sys.exit(1)

            # Check if we are into a new file, in which case we should
            # potentially update the cache file for the last file...
            # Note that the --tail option disables creation of cache files
            if (tail == 0) and lineNo == startLine + numLines - 1:
                if cachefilename != None:
                    print "update cache file for %s (%s) at %s" % (
                        origfilename, cachefilename, lineNo)
                    try:
                        of = open(cachefilename, "w")
                    except IOError:
                        print cachefilename
                    else:
                        self.knx.storeCachedInput(of, startLine)
                # Shift meta data to new file...
                try:
                    origfilename, cachefilename, startLine, numLines = lines_meta.pop(
                        0)
                except:
                    print "Last file done, line no (%s)" % lineNo
                    origfilename, cachefilename, startLine, numLines = (None,
                                                                        None,
                                                                        None,
                                                                        None)

            if lineNo % 10000 == 0:
                print "Parsed %d lines..." % lineNo

        print "Parsed %d lines..." % lineNo
        self.dbgMsg += "Parsed %d lines..." % lineNo

        self.delta = time() - start
class KnxLogViewer(object):

    def _readLinesFromFileOrCache(self, infile):

        try:
            inf = infile
        except IOError:
            print "%s: Unable to read file: %s" %(sys.argv[0], infile.name)
            sys.exit(1);
        except:
            op.print_help()
            sys.exit(1);

        print "Reading file: %s" % infile.name
        l =  inf.readlines()
        inf.close()

        # Ok, so now we have the file content. However, parsing it
        # is expensive, so look for an already parsed cache of the file.
        # The cache files first line is the MD5 sum of the infile, which
        # we use to see if the cache is up to date. If it is not, re-parse
        # the whole in file and update cache. Future enhancement could be
        # to use the part of the cache file that is already there.

        hsh = hashlib.md5()
        for ll in l:
            hsh.update(ll)
        infile_md5 = hsh.hexdigest()

        cachename = infile.name.replace(".hex",".cache")
        try:
            inf = open(cachename, "r")
            clines = inf.readlines()

            cache_md5 = clines.pop(0).strip()

            if cache_md5 == infile_md5:
                # Ok, seems good...
                print "Using cached input for file %s" %infile.name
                return (None, infile_md5, clines)
            else:
                print "Cached file found, but hash mismatch"
                print "FILE:  %s" %infile_md5
                print "CACHE: %s" %cache_md5
        except IOError:
            # No luck in getting cached input, just use the new...
            print "No cached input for file %s found..." %infile.name

        return (cachename, infile_md5, l)


    def __init__(self, devicesfilename, groupaddrfilename, infiles,
                 dumpGAtable, types, flanksOnly, tail, groupAddressSet = None,
                 hourly_avg = False, start_time=None):

        self.delta = 0
        self.delta2 = 0
        self.pduCount = 0
        self.pduSkipped = 0
        self.h_avg = hourly_avg if hourly_avg != None else False
        self.dbgMsg = "groupAddressSet = %s" %str(groupAddressSet)
        start = time()

        #
        # Read in all the files...
        #
        lines =  []
        lines_meta = []
        start = 1
        for infile in infiles:
            cachename, hash, newLines = self._readLinesFromFileOrCache(infile)
            lines.extend(newLines)
            lines_meta.append( (infile.name, cachename, hash,
                                start, len(newLines) ) )
            start += len(newLines)


        print "Creating parser..."
        self.knx = KnxParser(devicesfilename, groupaddrfilename,
                             dumpGAtable, flanksOnly, types)


        if tail != 0:
            if tail < len(lines):
                lines = lines[len(lines) - tail :]


        if start_time != None:
            self.found_start = "Trying to locate start time..."
            print "Trying to locate start time..."
            for i in range(len(lines)-1, 0, -1):
                try:
                    timestamp, pdu = lines[i].split(":LPDU:")
                except ValueError:
                    timestamp, pdu = lines[i].split("LPDU:")
                ts = mktime(strptime(timestamp, "%a %b %d %H:%M:%S %Y"))
                if ts < start_time:
                    print "Found start time!"
                    self.found_start = "Found start time!"
                    lines = lines[i+1:]
                    break
        else:
            self.found_start = "not relevant"

        #
        # Parsing the input...
        #
        basetime = 0
        lineNo = 0
        origfilename, cachefilename, hash, startLine, numLines = lines_meta.pop(0)

        for line in lines:
            # Skip empty lines...
            if len(line.strip()) < 1:
                continue

            # If filter specified, skip unwanted GAs
            if groupAddressSet != None:
                ignore = True
                for ga in groupAddressSet:
                    if line.find(ga) != -1:
                        ignore = False
                        break
                if ignore:
                    self.pduSkipped += 1
                    continue

            lineNo += 1

            # Differentiate between parsing new files and loading cached input
            if line[:2] == "@@":
                pass
                #print "loading: %s" %line.strip().decode("utf-8")
            else:
                # Split timestamp from rest...
                try:
                    timestamp, pdu = line.split(":LPDU:")
                except ValueError:
                    timestamp, pdu = line.split("LPDU:")

                try:
                    if basetime == 0:
                        basetime = mktime(strptime(timestamp,
                                                "%a %b %d %H:%M:%S %Y"))
                        self.knx.setTimeBase(basetime)
                except ValueError:
                    printVerbose("timestamp error: %s" %timestamp)

                try:
                    self.knx.parseVbusOutput(lineNo, timestamp, pdu)
                    self.pduCount += 1
                except KnxParseException:
                    print "Failed: %s:  %s" %(lineNo, pdu)
                    sys.exit(1)


            # Check if we are into a new file, in which case we should
            # potentially update the cache file for the last file...
            # Note that the --tail option disables creation of cache files
            if (tail == 0) and lineNo == startLine + numLines - 1:
                if cachefilename != None:

                    print "update cache file for %s (%s) at %s" %(origfilename,
                                                                  cachefilename,
                                                                  lineNo)
                    try:
                        of = open(cachefilename, "w")
                    except IOError:
                        print cachefilename
                    else:
                        # write hash at first line
                        of.write("%s\n" % hash)
                        self.knx.storeCachedInput(of, startLine)

                # Shift meta data to new file...
                try:
                    origfilename, cachefilename, hash, startLine, numLines = lines_meta.pop(0)
                except:
                    print "Last file done, line no (%s)" %lineNo
                    origfilename, cachefilename, hash, startLine, numLines = (None, None, None, None, None)


            if lineNo % 10000 == 0:
                print "Parsed %d lines..." %lineNo

        print "Parsed %d lines..." %lineNo
        self.dbgMsg += "Parsed %d lines..." %lineNo

        self.delta = time() - start

    def getPerfData(self):

        s = "<p>"
        s += "found_start: %s<p>"%self.found_start
        if self.delta != 0:
            s += "KnxLogViewer: Time used for init:    %f (%d PDUs parsed, %d skipped)<p>" %(self.delta, self.pduCount, self.pduSkipped)
            s += "Debug: %s<p>GlobalDebug:%s<p>" %(self.dbgMsg, globDbgMsg)
            self.delta = 0
        s += "KnxLogViewer: Time used for plotgen: %f<p>" %self.delta2
        s += "<p>"
        return s

    def getMinMaxValues(self, groupAddr):
        return self.knx.getStreamMinMaxValues(groupAddr)

    def plotLog(self, groupAddrs, plotImage, addHorLine=None):
        start = time()
        self.knx.plotStreams(groupAddrs, plotImage, addHorLine)
        self.delta2 = time() - start

    def printLog(self, groupAddrs):

        self.knx.printStreams(groupAddrs)

    def printJSON(self, groupAddrs):

        self.knx.printStreams(groupAddrs, "JSON")
    def __init__(self, devicesfilename, groupaddrfilename, infiles,
                 dumpGAtable, types, flanksOnly, tail, groupAddressSet = None,
                 hourly_avg = False, start_time=None):

        self.delta = 0
        self.delta2 = 0
        self.pduCount = 0
        self.pduSkipped = 0
        self.h_avg = hourly_avg if hourly_avg != None else False
        self.dbgMsg = "groupAddressSet = %s" %str(groupAddressSet)
        start = time()

        #
        # Read in all the files...
        #
        lines =  []
        lines_meta = []
        start = 1
        for infile in infiles:
            cachename, hash, newLines = self._readLinesFromFileOrCache(infile)
            lines.extend(newLines)
            lines_meta.append( (infile.name, cachename, hash,
                                start, len(newLines) ) )
            start += len(newLines)


        print "Creating parser..."
        self.knx = KnxParser(devicesfilename, groupaddrfilename,
                             dumpGAtable, flanksOnly, types)


        if tail != 0:
            if tail < len(lines):
                lines = lines[len(lines) - tail :]


        if start_time != None:
            self.found_start = "Trying to locate start time..."
            print "Trying to locate start time..."
            for i in range(len(lines)-1, 0, -1):
                try:
                    timestamp, pdu = lines[i].split(":LPDU:")
                except ValueError:
                    timestamp, pdu = lines[i].split("LPDU:")
                ts = mktime(strptime(timestamp, "%a %b %d %H:%M:%S %Y"))
                if ts < start_time:
                    print "Found start time!"
                    self.found_start = "Found start time!"
                    lines = lines[i+1:]
                    break
        else:
            self.found_start = "not relevant"

        #
        # Parsing the input...
        #
        basetime = 0
        lineNo = 0
        origfilename, cachefilename, hash, startLine, numLines = lines_meta.pop(0)

        for line in lines:
            # Skip empty lines...
            if len(line.strip()) < 1:
                continue

            # If filter specified, skip unwanted GAs
            if groupAddressSet != None:
                ignore = True
                for ga in groupAddressSet:
                    if line.find(ga) != -1:
                        ignore = False
                        break
                if ignore:
                    self.pduSkipped += 1
                    continue

            lineNo += 1

            # Differentiate between parsing new files and loading cached input
            if line[:2] == "@@":
                pass
                #print "loading: %s" %line.strip().decode("utf-8")
            else:
                # Split timestamp from rest...
                try:
                    timestamp, pdu = line.split(":LPDU:")
                except ValueError:
                    timestamp, pdu = line.split("LPDU:")

                try:
                    if basetime == 0:
                        basetime = mktime(strptime(timestamp,
                                                "%a %b %d %H:%M:%S %Y"))
                        self.knx.setTimeBase(basetime)
                except ValueError:
                    printVerbose("timestamp error: %s" %timestamp)

                try:
                    self.knx.parseVbusOutput(lineNo, timestamp, pdu)
                    self.pduCount += 1
                except KnxParseException:
                    print "Failed: %s:  %s" %(lineNo, pdu)
                    sys.exit(1)


            # Check if we are into a new file, in which case we should
            # potentially update the cache file for the last file...
            # Note that the --tail option disables creation of cache files
            if (tail == 0) and lineNo == startLine + numLines - 1:
                if cachefilename != None:

                    print "update cache file for %s (%s) at %s" %(origfilename,
                                                                  cachefilename,
                                                                  lineNo)
                    try:
                        of = open(cachefilename, "w")
                    except IOError:
                        print cachefilename
                    else:
                        # write hash at first line
                        of.write("%s\n" % hash)
                        self.knx.storeCachedInput(of, startLine)

                # Shift meta data to new file...
                try:
                    origfilename, cachefilename, hash, startLine, numLines = lines_meta.pop(0)
                except:
                    print "Last file done, line no (%s)" %lineNo
                    origfilename, cachefilename, hash, startLine, numLines = (None, None, None, None, None)


            if lineNo % 10000 == 0:
                print "Parsed %d lines..." %lineNo

        print "Parsed %d lines..." %lineNo
        self.dbgMsg += "Parsed %d lines..." %lineNo

        self.delta = time() - start
Beispiel #8
0
class TestKnxParser(unittest.TestCase):

    def setUp(self):

        self.parser = KnxParser("enheter.xml", "groupaddresses.csv", False, False,
                      { "1/1/14" : "onoff",
                        "1/1/15" : "temp",
                        "1/1/16" : "time",
                        "1/1/17" : "%%"})

    def test_init(self):

        p = KnxParser("enheter.xml", "groupaddresses.csv", False, False,
                      { "1/1/14" : "onoff",
                        "1/1/15" : "temp",
                        "1/1/16" : "time",
                        "1/1/17" : "%%"})
        self.assertIsInstance(p, KnxParser)

    def test_setTimeBase(self):

        basetime = mktime(strptime("Fri Sep  4 06:15:03 2015",
                                "%a %b %d %H:%M:%S %Y"))
        try:
            self.parser.setTimeBase(basetime)
        except:
            self.fail("setTimeBase raised exception")

    def test_parseVbusOutput(self):

        self.parser.parseVbusOutput(0, "Fri Sep  4 06:15:03 2015", "Fri Sep  4 06:15:03 2015:LPDU: BC 11 03 12 00 E2 00 80 00 21 :L_Data low from 6.12.31 to 1/1/15 hops: 06 T_DATA_XXX_REQ A_GroupValue_Write 01 ff")
        self.assertEqual(len(self.parser.knxAddrStream["1/1/15"].telegrams), 1)

        self.parser.parseVbusOutput(1, "Fri Sep  4 06:15:03 2015", "Fri Sep  4 06:15:03 2015:LPDU: BC 11 03 12 00 E2 00 80 00 21 :L_Data low from 6.12.31 to 1/1/15 hops: 06 T_DATA_XXX_REQ A_GroupValue_Write 81 ff")
        self.assertEqual(len(self.parser.knxAddrStream["1/1/15"].telegrams), 2)

        self.parser.parseVbusOutput(2, "Fri Dec 10 14:08:59 2010", "Fri Dec 10 14:08:59 2010:LPDU: B0 FF FF 00 00 E3 00 C0 11 1B 66 :L_Data system from 15.15.255 to 0/0/0 hops: 06 T_DATA_XXX_REQ A_IndividualAddress_Write 1.1.27")
        self.assertEqual(len(self.parser.knxAddrStream["1/1/15"].telegrams), 2)

        self.parser.parseVbusOutput(3, "Fri Sep  4 06:15:03 2015", "Fri Sep  4 06:15:03 2015:LPDU: BC 11 03 12 00 E2 00 80 00 21 :L_Data low from 6.12.31 to 2/7/15 hops: 06 T_DATA_XXX_REQ A_GroupValue_Write 81 ff")
        self.assertEqual(len(self.parser.knxAddrStream["1/1/15"].telegrams), 2)

    @unittest.skip("Cache functionality not finished yet.")
    def test_storeCachedInput(self):

        pass

    def test_getStreamMinMaxValues(self):

        self.assertEqual(self.parser.getStreamMinMaxValues("1/1/15"), (None, None))
        self.parser.parseVbusOutput(0, "Fri Sep  4 06:15:03 2015", "Fri Sep  4 06:15:03 2015:LPDU: BC 11 03 12 00 E2 00 80 00 21 :L_Data low from 6.12.31 to 1/1/15 hops: 06 T_DATA_XXX_REQ A_GroupValue_Write 01 ff")
        self.parser.parseVbusOutput(1, "Fri Sep  4 06:15:03 2015", "Fri Sep  4 06:15:03 2015:LPDU: BC 11 03 12 00 E2 00 80 00 21 :L_Data low from 6.12.31 to 1/1/15 hops: 06 T_DATA_XXX_REQ A_GroupValue_Write 81 ff")
        self.assertEqual(self.parser.getStreamMinMaxValues("1/1/15"), ("-15.37","5.11"))

        self.assertEqual(self.parser.getStreamMinMaxValues("666/1/15"), (None, None))

    def test_printStreams(self):

        self.parser.parseVbusOutput(0, "Fri Sep  4 06:15:03 2015", "Fri Sep  4 06:15:03 2015:LPDU: BC 11 03 12 00 E2 00 80 00 21 :L_Data low from 6.12.31 to 1/1/15 hops: 06 T_DATA_XXX_REQ A_GroupValue_Write 01 ff")
        self.parser.parseVbusOutput(1, "Fri Sep  4 06:15:03 2015", "Fri Sep  4 06:15:03 2015:LPDU: BC 11 03 12 00 E2 00 80 00 21 :L_Data low from 6.12.31 to 1/1/15 hops: 06 T_DATA_XXX_REQ A_GroupValue_Write 81 ff")
        self.parser.printStreams(["1/1/15"])

    @unittest.skip("Does not play well with Travis CI environment at the moment...")
    def test_plotStreams(self):

        basetime = mktime(strptime("Fri Sep  4 06:15:00 2015",
                                "%a %b %d %H:%M:%S %Y"))
        self.parser.setTimeBase(basetime)
        self.parser.parseVbusOutput(0, "Fri Sep  4 06:15:03 2015", "Fri Sep  4 06:15:03 2015:LPDU: BC 11 03 12 00 E2 00 80 00 21 :L_Data low from 6.12.31 to 1/1/15 hops: 06 T_DATA_XXX_REQ A_GroupValue_Write 01 ff")
        self.parser.parseVbusOutput(1, "Fri Sep  4 06:15:06 2015", "Fri Sep  4 06:15:03 2015:LPDU: BC 11 03 12 00 E2 00 80 00 21 :L_Data low from 6.12.31 to 1/1/15 hops: 06 T_DATA_XXX_REQ A_GroupValue_Write 81 ff")
        self.parser.plotStreams(["1/1/15"], "testimg.png", 0.0)
Beispiel #9
0
class TestKnxParser(unittest.TestCase):
    def setUp(self):

        self.parser = KnxParser("enheter.xml", "groupaddresses.csv", False,
                                False, {
                                    "1/1/14": "onoff",
                                    "1/1/15": "temp",
                                    "1/1/16": "time",
                                    "1/1/17": "%%"
                                })

    def test_init(self):

        p = KnxParser("enheter.xml", "groupaddresses.csv", False, False, {
            "1/1/14": "onoff",
            "1/1/15": "temp",
            "1/1/16": "time",
            "1/1/17": "%%"
        })
        self.assertIsInstance(p, KnxParser)

    def test_setTimeBase(self):

        basetime = mktime(
            strptime("Fri Sep  4 06:15:03 2015", "%a %b %d %H:%M:%S %Y"))
        try:
            self.parser.setTimeBase(basetime)
        except:
            self.fail("setTimeBase raised exception")

    def test_parseVbusOutput(self):

        self.parser.parseVbusOutput(
            0, "Fri Sep  4 06:15:03 2015",
            "Fri Sep  4 06:15:03 2015:LPDU: BC 11 03 12 00 E2 00 80 00 21 :L_Data low from 6.12.31 to 1/1/15 hops: 06 T_DATA_XXX_REQ A_GroupValue_Write 01 ff"
        )
        self.assertEqual(len(self.parser.knxAddrStream["1/1/15"].telegrams), 1)

        self.parser.parseVbusOutput(
            1, "Fri Sep  4 06:15:03 2015",
            "Fri Sep  4 06:15:03 2015:LPDU: BC 11 03 12 00 E2 00 80 00 21 :L_Data low from 6.12.31 to 1/1/15 hops: 06 T_DATA_XXX_REQ A_GroupValue_Write 81 ff"
        )
        self.assertEqual(len(self.parser.knxAddrStream["1/1/15"].telegrams), 2)

        self.parser.parseVbusOutput(
            2, "Fri Dec 10 14:08:59 2010",
            "Fri Dec 10 14:08:59 2010:LPDU: B0 FF FF 00 00 E3 00 C0 11 1B 66 :L_Data system from 15.15.255 to 0/0/0 hops: 06 T_DATA_XXX_REQ A_IndividualAddress_Write 1.1.27"
        )
        self.assertEqual(len(self.parser.knxAddrStream["1/1/15"].telegrams), 2)

        self.parser.parseVbusOutput(
            3, "Fri Sep  4 06:15:03 2015",
            "Fri Sep  4 06:15:03 2015:LPDU: BC 11 03 12 00 E2 00 80 00 21 :L_Data low from 6.12.31 to 2/7/15 hops: 06 T_DATA_XXX_REQ A_GroupValue_Write 81 ff"
        )
        self.assertEqual(len(self.parser.knxAddrStream["1/1/15"].telegrams), 2)

    @unittest.skip("Cache functionality not finished yet.")
    def test_storeCachedInput(self):

        pass

    def test_getStreamMinMaxValues(self):

        self.assertEqual(self.parser.getStreamMinMaxValues("1/1/15"),
                         (None, None))
        self.parser.parseVbusOutput(
            0, "Fri Sep  4 06:15:03 2015",
            "Fri Sep  4 06:15:03 2015:LPDU: BC 11 03 12 00 E2 00 80 00 21 :L_Data low from 6.12.31 to 1/1/15 hops: 06 T_DATA_XXX_REQ A_GroupValue_Write 01 ff"
        )
        self.parser.parseVbusOutput(
            1, "Fri Sep  4 06:15:03 2015",
            "Fri Sep  4 06:15:03 2015:LPDU: BC 11 03 12 00 E2 00 80 00 21 :L_Data low from 6.12.31 to 1/1/15 hops: 06 T_DATA_XXX_REQ A_GroupValue_Write 81 ff"
        )
        self.assertEqual(self.parser.getStreamMinMaxValues("1/1/15"),
                         ("-15.37", "5.11"))

        self.assertEqual(self.parser.getStreamMinMaxValues("666/1/15"),
                         (None, None))

    def test_printStreams(self):

        self.parser.parseVbusOutput(
            0, "Fri Sep  4 06:15:03 2015",
            "Fri Sep  4 06:15:03 2015:LPDU: BC 11 03 12 00 E2 00 80 00 21 :L_Data low from 6.12.31 to 1/1/15 hops: 06 T_DATA_XXX_REQ A_GroupValue_Write 01 ff"
        )
        self.parser.parseVbusOutput(
            1, "Fri Sep  4 06:15:03 2015",
            "Fri Sep  4 06:15:03 2015:LPDU: BC 11 03 12 00 E2 00 80 00 21 :L_Data low from 6.12.31 to 1/1/15 hops: 06 T_DATA_XXX_REQ A_GroupValue_Write 81 ff"
        )
        self.parser.printStreams(["1/1/15"])

    @unittest.skip(
        "Does not play well with Travis CI environment at the moment...")
    def test_plotStreams(self):

        basetime = mktime(
            strptime("Fri Sep  4 06:15:00 2015", "%a %b %d %H:%M:%S %Y"))
        self.parser.setTimeBase(basetime)
        self.parser.parseVbusOutput(
            0, "Fri Sep  4 06:15:03 2015",
            "Fri Sep  4 06:15:03 2015:LPDU: BC 11 03 12 00 E2 00 80 00 21 :L_Data low from 6.12.31 to 1/1/15 hops: 06 T_DATA_XXX_REQ A_GroupValue_Write 01 ff"
        )
        self.parser.parseVbusOutput(
            1, "Fri Sep  4 06:15:06 2015",
            "Fri Sep  4 06:15:03 2015:LPDU: BC 11 03 12 00 E2 00 80 00 21 :L_Data low from 6.12.31 to 1/1/15 hops: 06 T_DATA_XXX_REQ A_GroupValue_Write 81 ff"
        )
        self.parser.plotStreams(["1/1/15"], "testimg.png", 0.0)