示例#1
0
    def test_SplitTokens(self):
        data_file = HighlightedFile(DATA_FILE)

        # get the set of self-describing lines
        lines = data_file.lines()

        first_line = lines[0]
        assert first_line is not None

        tokens = first_line.tokens()
        self.assertEqual(7, len(tokens))

        first_token = tokens[0]

        assert first_token is not None

        self.assertEqual("951212", tokens[0].text())
        self.assertEqual("050000.000", tokens[1].text())
        self.assertEqual("MONDEO_44", tokens[2].text())
        self.assertEqual("@C", tokens[3].text())
        self.assertEqual("269.7", tokens[4].text())
        self.assertEqual("10.0", tokens[5].text())
        self.assertEqual("10", tokens[6].text())

        second_line = lines[1]
        assert second_line is not None

        tokens = second_line.tokens()
        self.assertEqual(5, len(tokens))

        self.assertEqual("//", tokens[0].text())
        self.assertEqual("EVENT", tokens[1].text())
        self.assertEqual("951212", tokens[2].text())
        self.assertEqual("050300.000", tokens[3].text())
        self.assertEqual("BRAVO", tokens[4].text())
示例#2
0
    def test_zero_number(self):
        with self.assertRaises(SystemExit) as cm:
            data_file = HighlightedFile(DATA_FILE, 0)
            lines = data_file.lines()
            print(lines)  # to avoid unused variable warning

        self.assertEqual(cm.exception.code, 1)
示例#3
0
    def test_SplitLines(self):
        data_file = HighlightedFile(DATA_FILE)

        # get the set of self-describing lines
        lines = data_file.lines()

        self.assertEqual(7, len(lines))
示例#4
0
    def test_all_lines(self):
        data_file = HighlightedFile(DATA_FILE)

        # get the set of self-describing lines
        lines = data_file.lines()

        chars = data_file.chars_debug()
        self.assertEqual(323, len(chars))
        self.assertEqual(7, len(lines))

        usages = chars[0].usages
        self.assertTrue(usages is not None, "usages should be declared")
示例#5
0
    def test_multi_lines(self):
        dataFile = HighlightedFile(DATA_FILE)

        tool = "TOOL"

        # get the set of self-describing lines
        lines = dataFile.lines()

        # check the contents of hte print statement
        lineStr = str(lines[0])
        self.assertEqual("(0+(0, 55), 951212 050000.000 MONDEO_44   @C   269.7   10.0      10)", lineStr)
        

        for line in lines:
            tokens = line.tokens()

            if tokens[0].text() == "//":
                dateToken = tokens[2]
                timeToken = tokens[3]
                messageToken = tokens[4]

                dateVal = self.parse_timestamp(dateToken.text(), timeToken.text())
                dateTimeToken = combine_tokens(dateToken, timeToken)
                dateTimeToken.record(tool, "Event DTG", dateVal, "N/A")

                messageToken.record(tool, "Message", messageToken.text(), "N/A")
            else:
                dateToken = tokens[0]
                timeToken = tokens[1]
                vehicleToken = tokens[2]
                courseToken = tokens[4]
                speedToken = tokens[5]
                tempToken = tokens[6]


                dateVal = self.parse_timestamp(dateToken.text(), timeToken.text())
                dateTimeToken = combine_tokens(dateToken, timeToken)
                dateTimeToken.record(tool, "DTG", dateVal, "N/A")
                vehicleToken.record(tool,"NAME", vehicleToken.text(), "N/A")
                courseToken.record(tool,"Course", courseToken.text(), "Degs")
                speedToken.record(tool,"Speed", speedToken.text(),"M/Sec")
                tempToken.record(tool, "Temperature", tempToken.text(), "Deg C")

                # also send the temperature somewhewre else
                tempToken.record("Third Party Temp Tracker", "Env Tmp", tempToken.text(),"Deg C")


        dataFile.export("track_lines.html", True)
示例#6
0
    def test_SplitCommaTokens(self):
        data_file = HighlightedFile(COMMA_FILE)

        # get the set of self-describing lines
        lines = data_file.lines()

        first_line = lines[0]
        assert first_line is not None

        # FixMe - this next constant should be declared in class module
        csv_delim = "(?:,\"|^\")(\"\"|[\w\W]*?)(?=\",|\"$)|(?:,(?!\")|^(?!\"))([^,]*?)(?=$|,)|(\r\n|\n)"

        tokens = first_line.tokens(csv_delim, ",")
        self.assertEqual(7, len(tokens))

        self.assertEqual("951212", tokens[0].text())
示例#7
0
    def test_CreateChars(self):
        dataFile = HighlightedFile(DATA_FILE)

        # get the set of self-describing lines
        lines = dataFile.lines()
        self.assertEqual(7, len(lines))

        chars = dataFile.chars_debug()
        assert chars is not None

        self.assertEqual(323, len(chars))

        self.assertEqual("9", chars[0].letter)
        self.assertEqual("5", chars[1].letter)

        usages = chars[0].usages
        self.assertTrue(usages is not None, "usages should be declared")
        self.assertEqual(0, len(usages), "usages should start empty")
示例#8
0
    def test_RecordTokens(self):
        dataFile = HighlightedFile(DATA_FILE)

        # get the set of self-describing lines
        lines = dataFile.lines()

        firstLine = lines[0]
        assert firstLine is not None

        tokens = firstLine.tokens()
        self.assertEqual(7, len(tokens))

        tool = "TOOL"
        field = "FIELD"
        value = "VALUE"
        units = "UNITS"

        tokens[0].record(tool, field, value, units)

        chars = dataFile.chars_debug()
        assert chars is not None

        first_entry = chars[0]
        self.assertEqual("9", first_entry.letter)
        self.assertEqual(1, len(first_entry.usages))

        first_usage = first_entry.usages[0]
        self.assertTrue(first_usage is not None, "should have a usage")
        self.assertEqual("TOOL/FIELD", first_usage.tool_field)
        self.assertEqual("Value:VALUE Units:UNITS", first_usage.message)

        # make another recordd
        firstLine.record(field, value)
        self.assertEqual(2, len(first_entry.usages))
        second_usage = first_entry.usages[1]
        self.assertTrue(second_usage is not None, "should have a usage")
        self.assertEqual("FIELD", second_usage.tool_field)
        self.assertEqual("VALUE", second_usage.message)
示例#9
0
    def test_CombineSingleLine(self):
        dataFile = HighlightedFile(DATA_FILE, 1)

        # get the set of self-describing lines
        lines = dataFile.lines()

        tokens = lines[0].tokens()

        self.assertEqual(7, len(tokens))

        dateToken = tokens[0]
        timeToken = tokens[1]
        dateTimeToken = combine_tokens(dateToken, timeToken)

        date_time = self.parse_timestamp(dateToken.text(), timeToken.text())

        dateTimeToken.record("TOOL", "Date-Time", date_time, "N/A")

        chars = dataFile.chars_debug()
        assert chars is not None

        ctr = 0
        for char in chars:
            if ctr == 22:
                break
            ctr = ctr + 1

            if ctr > 0 and ctr <= 6:
                usages = char.usages
                self.assertEqual(1, len(usages))
                self.assertEqual("Value:1995-12-12 05:00:00 Units:N/A",
                                 usages[0].message)
            elif ctr > 7 and ctr <= 17:
                usages = char.usages
                self.assertEqual(1, len(usages))
                self.assertEqual("Value:1995-12-12 05:00:00 Units:N/A",
                                 usages[0].message)
示例#10
0
from data_highlight.highlighter import HighlightedFile

# NORMAL FILE
dataFile = HighlightedFile('data_highlight/file.txt')

# get the set of self-describing lines
lines = dataFile.lines()

for thisLine in lines:
    tokens = thisLine.tokens()

    # check the type
    firstToken = tokens[0]

    if firstToken.text() == "//":
        # event marker
        eventImporter = "Simple Event importer"
        dateToken = tokens[2]
        dateToken.record(eventImporter, "Date", dateToken.text())
        timeToken = tokens[3]
        timeToken.record(eventImporter, "Time", timeToken.text())
        eventToken = tokens[4]
        eventToken.record(eventImporter, "Event", timeToken.text())


        # and the whole=line record
        thisLine.record(eventImporter, "Whole line")

# output to file, display

for i in dataFile.chars_debug():
示例#11
0
    def test_more_than_lines_number(self):
        data_file = HighlightedFile(DATA_FILE, 200)

        lines = data_file.lines()
        self.assertEqual(len(lines), 7)
示例#12
0
 def test_SplitLoadFile(self):
     data_file = HighlightedFile(DATA_FILE)
     assert data_file is not None
示例#13
0
    def test_CombineLinesOnMultipleLines(self):
        dataFile = HighlightedFile(NMEA_FILE, 50)

        # get the set of self-describing lines
        lines = dataFile.lines()

        nmea_delim = "([^,]+|(?<=,)(?=,)|^(?=,)|(?<=,)$)"

        lat_tok = None
        lat_hem_tok = None
        long_tok = None
        long_hem_tok = None
        date_tok = None
        time_tok = None
        hdg_tok = None
        spd_tok = None

        date_line = None
        loc_line = None
        hdg_line = None
        spd_line = None

        for line in lines:
            tokens = line.tokens(nmea_delim, ",")
            if len(tokens) > 0:

                msg_type = tokens[1].text()

                if msg_type == "DZA":
                    date_tok = tokens[2]
                    time_tok = tokens[3]
                    date_line = line
                elif msg_type == "VEL":
                    spd_tok = tokens[6]
                    spd_line = line
                elif msg_type == "HDG":
                    hdg_tok = tokens[2]
                    hdg_line = line
                elif msg_type == "POS":
                    lat_tok = tokens[3]
                    lat_hem_tok = tokens[4]
                    long_tok = tokens[5]
                    long_hem_tok = tokens[6]
                    loc_line = line

                # do we have all we need?
                if date_tok and spd_tok and hdg_tok and lat_tok:

                    date_time = self.parse_timestamp(date_tok.text(),
                                                     time_tok.text())

                    loc = self.parse_location(lat_tok.text(),
                                              lat_hem_tok.text(),
                                              long_tok.text(),
                                              long_hem_tok.text())
                    spd = float(spd_tok.text())
                    hdg = float(hdg_tok.text())

                    fStr = "{:8.2f}"

                    msg = "Date:" + str(date_time) + ", Loc:()" + fStr.format(loc[0]) + ", " \
                        + fStr.format(loc[1]) + "), Spd:" +  \
                        fStr.format(spd) + ", Hdg:" + fStr.format(hdg)

                    line_composite = combine_tokens(date_line, loc_line,
                                                    spd_line, hdg_line)

                    line_composite.record("NMEA Import",
                                          "Date:" + str(date_time), msg, "N/A")

                    date_tok = None
                    spd_tok = None
                    hdg_tok = None
                    lat_tok = None

        dataFile.export("nmea2.html")