Exemplo n.º 1
0
def testParseDataWithMissingOffSet(getFixtures):
    try:
        parser = DataParser()
        parsedData = parser.parseData(getFixtures.id, None, getFixtures.data)
        fail("parser.parseData should have thrown an error on null offset")
    except Exception as ex:
        assert (type(ex) is ValueError)
Exemplo n.º 2
0
def testParseDataWithNullData(getFixtures):
    try:
        parser = DataParser()
        parsedData = parser.parseData(getFixtures.id, 1, None)
        fail("parser.parseData should have thrown an error on null data")
    except Exception as ex:
        assert (type(ex) is ValueError)
Exemplo n.º 3
0
    def fillTableWithDataFromFile(self, fileName):
        # Parse data into booking class
        dataParser = DataParser(fileName)
        bookings = dataParser.GetAllBookings()

        # Show data in UI table
        self.dataTable.setRowCount(0)
        for row, booking in enumerate(bookings):
            self.dataTable.insertRow(row)

            dateItem = QtWidgets.QTableWidgetItem(booking.date)
            bookoingTypeItem = QtWidgets.QTableWidgetItem(booking.bookingType)
            nameItem = QtWidgets.QTableWidgetItem(booking.name)
            purposeItem = QtWidgets.QTableWidgetItem(booking.purpose)
            valueItem = QtWidgets.QTableWidgetItem(booking.value)

            valueItem.setTextAlignment(Qt.AlignRight)

            self.dataTable.setItem(row, 0, dateItem)
            self.dataTable.setItem(row, 1, bookoingTypeItem)
            self.dataTable.setItem(row, 2, nameItem)
            self.dataTable.setItem(row, 3, purposeItem)
            self.dataTable.setItem(row, 4, valueItem)

        self.dataTable.resizeColumnsToContents()
Exemplo n.º 4
0
    def __init__(self, parameters, sc):

        parameters = json.loads(parameters)
        schema = parameters.get('schema', None)
        header = parameters.get('header', False)
        self._parser = DataParser(schema, header)
        self._sc = sc
Exemplo n.º 5
0
def testParseDataWithNonJsonData(getFixtures):
    try:
        parser = DataParser()
        data = "this is not JSON"
        parsedData = parser.parseData(getFixtures.id, 1, data)
        fail("parser.parseData should have thrown an error on non JSON data")
    except Exception as ex:
        assert (type(ex) is ValueError)
Exemplo n.º 6
0
    def test_04_parser_parse_raw_data(self):
        input = "empid=D011\ngender=M\nage=29"
        parser = DataParser()
        parser.parse_raw_data(input)

        expected = [{'empid': 'D011', 'gender': 'M', 'age': '29'}]
        actual = parser.get_data()
        self.assertEqual(expected, actual)
Exemplo n.º 7
0
def testOffsetTimeGenerationWithInvalidArgs(getFixtures):
    parser = DataParser()
    try:

        strTimeNow = "2016-10-09T15:48:54"
        dtOffset = parser.getOffsetTime(strTimeNow, getFixtures.offset)
        fail("should have thrown exception b/c invalid timestamp")
    except Exception as ex:
        assert (type(ex) is ValueError)
Exemplo n.º 8
0
def testParseDataWithInvalidJsonData(getFixtures):
    try:
        parser = DataParser()
        data = {'foobar': 'goo'}
        parsedData = parser.parseData(getFixtures.id, 1, data)
        fail(
            "parser.parseData should have thrown an error on incorrect JSON data"
        )
    except Exception as ex:
        assert (type(ex) is KeyError)
Exemplo n.º 9
0
 def setUp(self):
     self.parser = DataParser()
     self.cmd_view = CmdView()
     self.file_view = FileView()
     self.validator = Validator()
     self.db = DatabaseView("test.db")
     self.vis = Visualiser()
     # self.val = Validator()
     self.controller = Controller(self.cmd_view, self.file_view,
                                  self.parser, self.validator, self.db,
                                  self.vis)
Exemplo n.º 10
0
 def setUp(self):
     self.parser = DataParser()
     self.cmd_view = CmdView()
     self.file_reader = FileReader()
     self.validator = Validator()
     self.db = Database("test.db")
     self.vis = Visualiser()
     self.val = Validator()
     self.serial = Serializer()
     self.controller = Controller(self.cmd_view, self.file_reader,
                                  self.parser, self.validator, self.db,
                                  self.vis, self.serial)
     self.init()
Exemplo n.º 11
0
    def test_10_controller_validate_fail(self):
        self.controller = Controller(self.cmd_view, self.file_reader,
                                     DataParser(), self.validator, self.db,
                                     self.vis, self.serial)
        captured_output = io.StringIO()
        sys.stdout = captured_output
        self.controller.validate()

        expected = "* No data has been read.\n-- Type 'help get' for more details.\n"
        actual = captured_output.getvalue()

        sys.stdout = sys.__stdout__
        self.assertEqual(expected, actual)
Exemplo n.º 12
0
 def parse(self):
     struct = self.dictFromListBox(self.fields_list)
     struct["separator"] = self.tokenSeparator.get()
     parser = DataParser(struct)
     with open(self.logFilename.get(), 'r') as logfile:
         testline = logfile.readline()
         print(testline, struct)
         data = parser.parse_line(testline)
         if data == None:
             print(
                 "Could not match line structure to the log's [first] line")
             return None
     all_data = parser.parse_file(self.logFilename.get())
     self.generateStatistics(all_data)
Exemplo n.º 13
0
 def analyze(self):
     if self.fname is None:
         print "File is not setted."
         return
     datagetter = DataParser(self.fname[0])
     if self.regexTextField.toPlainText() is None:
         print "Regexp is not setted."
         return
     datagetter.init_regex(self.regexTextField.toPlainText())
     datagetter.filled_data()
     if self.allowStatistics.isChecked():
         datagetter.print_statistics()
     if self.csvReport.isChecked():
         datagetter.save_to_csv()
     if self.allowGraphs.isChecked():
         datagetter.print_graphs(self.separateGraphs.isChecked())
Exemplo n.º 14
0
def testParsedDataWithValidArgs(getFixtures):
    try:
        parser = DataParser()
        parsedData = parser.parseData(getFixtures.id, getFixtures.offset,
                                      getFixtures.data)
        assert (parsedData != None)
        #logging.debug(parsedData)

        assert (len(parsedData) > 0)

        for data in parsedData:
            assert data['id'] != None
            assert data['time'] != None
            assert data['heartRate'] != None
            assert data['coordinates'] != None

    except Exception as ex:
        logging.debug(str(ex))
        fail("should not have an exception when parsedData has valid input"
             )  # cause a failure
Exemplo n.º 15
0
def testOffsetTimeGeneration(getFixtures):
    try:
        parser = DataParser()

        strTimeNow = "2016-10-09T15:48:54Z"
        dtOffset = parser.getOffsetTime(strTimeNow, getFixtures.offset)
        assert (dtOffset.second == 59)

        strTimeNow = "2016-10-09T15:48:55Z"
        dtOffset = parser.getOffsetTime(strTimeNow, getFixtures.offset)
        assert (dtOffset.second == 0)
        assert (dtOffset.minute == 49)

        strTimeNow = "2016-10-09T15:48:56Z"
        dtOffset = parser.getOffsetTime(strTimeNow, getFixtures.offset)
        assert (dtOffset.second == 1)
        assert (dtOffset.minute == 49)

    except Exception as ex:
        logging.debug(str(ex))
        fail("should not have an exception when parsing valid time and offset"
             )  # cause a failure
Exemplo n.º 16
0
# Instiate the models
model1 = BCNN()
model2 = TCNN()

# Load models from last saved state
model1.load_state_dict(torch.load("./bcnn_model.pt"))
model2.load_state_dict(torch.load("./tcnn_model.pt"))
model1.cuda()
model2.cuda()

# Set optimizer as Adam
optimizer = torch.optim.Adam(
    (list(model1.parameters()) + list(model2.parameters())), lr=1e-4)

# Load Training data
trainset = DataParser('01')
trainloader = torch.utils.data.DataLoader(trainset, batch_size=8, shuffle=True)

# Load testing data
testset = DataParser('04')
testloader = torch.utils.data.DataLoader(testset, batch_size=1, shuffle=True)

# criterion is MSELoss
criterion = nn.MSELoss().cuda()

epochs = 15
i = 0

for e in range(epochs):
    model1.train()
    model2.train()
Exemplo n.º 17
0
__author__ = 'Radim Spigel'
__version__ = '1.0'
import sys
from dataparser import print_help, DataParser
from qtgui import qt_main

if __name__ == "__main__":
    if len(sys.argv) > 1:
        print sys.argv
        if '-h' in sys.argv:
            print_help()
            sys.exit()
        datagetter = DataParser(sys.argv[1])
        datagetter.from_command_line(sys.argv[1:])
    elif len(sys.argv) > 6:
        print_help()
    else:
        qt_main()
if __name__ == '__main__':
    parser = OptionParser()
    parser.add_option("-d", "--dataset", dest="db_type", default="berlin")
    parser.add_option("-p", "--dataset_path", dest="path", default="")

    (options, args) = parser.parse_args(sys.argv)

    db_type = options.db_type
    path = options.path

    print("Loading data from " + db_type + " dataset...")
    if db_type not in ('berlin'):
        sys.exit("Dataset not registered. Please create a method to read it")

    db = DataParser(path, db_type)

    # k_folds = len(db.test_sets)
    # splits = zip(db.train_sets, db.test_sets)

    callback_list = [
        EarlyStopping(
            monitor='acc',
            patience=1,
            verbose=1
        ),
        ModelCheckpoint(
            filepath='cnnlstm_model.h5',
            monitor='val_loss',
            save_best_only='True'
        )
Exemplo n.º 19
0
# Instantiate models
model1 = BCNN()
model2 = TCNN()

# Load models from files
model1.load_state_dict(torch.load("./bcnn_model.pt"))
model2.load_state_dict(torch.load("./tcnn_model.pt"))
model1 = model1.cuda()
model2 = model2.cuda()

# Set to eval mode
model1.eval()
model2.eval()

# Load Data from testing set
testset = DataParser('04')
testloader = torch.utils.data.DataLoader(testset, batch_size=1, shuffle=True)

total = 0
err_x = 0
err_z = 0
err_t = 0

# Run for all testing data
for counter, d in enumerate(testloader, 0):
    dtype = torch.cuda.FloatTensor
    x1 = d["img_l1"].type(dtype)
    x2 = d["img_l2"].type(dtype)
    yx = d["dx"].type(dtype)
    yz = d["dz"].type(dtype)
    yt = d["dth"].type(dtype)
Exemplo n.º 20
0
def get_logs():
    dp = DataParser(constants.log_file_path)
    mentions_by_ticker = dp.mentions_by_ticker()
import time
from pprint import pprint

import matplotlib.pyplot as plt
import requests
import smopy
from google.transit import gtfs_realtime_pb2
from protobuf_to_dict import protobuf_to_dict

from dataparser import DataParser

FEED_URL = "https://gtfsrt.api.translink.com.au/Feed/SEQ"
MAP_FILENAME = "bne.bin"
UPDATE_EVERY = 30
BBOX = (152.7, 153.3, -27.7, -27.2)
data = DataParser()


def get_entity_type(routes_dict, route_id):
    # Get TransLink URL and extract portion of path with vehicle type
    # Edit this for different feeds
    return routes_dict[route_id]['route_url'].split('/')[5]


def get_feed(url):
    while True:
        response = requests.get(url)
        if response.status_code == 200:
            break
        print(f"Got status code {response.status_code}, retrying...")
        time.sleep(0.5)
Exemplo n.º 22
0
#!/usr/bin/python3
from dataparser import DataParser
import statistics

structure = {
    "my_date": "date",
    "my_int": "integer",
    "my_float": "float",
    "my_str": "str",
    "separator": ";"
}

parser = DataParser(structure)

## LINE PARSING
line = "1-12-2021;123;-123.456;xa"
x = parser.parse_line(line)
print("Single Line:\n" + str(x))

## FILE PARSING
all_data = parser.parse_file('my_log.log')
print("\nFull File: \n" + str(all_data))

##GENERATE STATISTICS
g = statistics.generate_data_x_data(all_data['my_int'], all_data['my_float'],
                                    "my_int", "my_float")
print(g)
Exemplo n.º 23
0
 def test_02_parser_to_dict(self):
     expected = {'empid': 'D011', 'gender': 'M', 'age': '29'}
     actual = DataParser()._to_dict(['empid=D011', 'gender=M', 'age=29'])
     self.assertEqual(expected, actual)
Exemplo n.º 24
0
 def test_01_parser_to_list(self):
     expected = ['empid=D011', 'gender=M', 'age=29']
     actual = DataParser()._to_list("empid=D011\ngender=M\nage=29")
     self.assertEqual(expected, actual)
Exemplo n.º 25
0
from controller import Controller
from cmdview import CmdView
from file_reader import FileReader
from dataparser import DataParser
from validator import Validator
from database import Database
from visualiser import Visualiser
from serializer import Serializer

if __name__ == "__main__":
    parser = DataParser()
    cmd_view = CmdView()
    file_reader = FileReader()
    validator = Validator()
    db = Database("test.db")
    vis = Visualiser()
    serial = Serializer()

    con = Controller(cmd_view, file_reader, parser, validator, db, vis, serial)
    cmd_view.set_controller(con)

    # run program
    cmd_view.cmdloop()
Exemplo n.º 26
0
 def test_03_parser_scrub_db_list(self):
     expected = [14, 25]
     actual = DataParser().scrub_db_list([(14, ), (25, )])
     self.assertEqual(expected, actual)
Exemplo n.º 27
0
 def getDataParser(self):
     return DataParser()