Ejemplo n.º 1
0
 def test_inline_definitions_with_metadata(self):
     metadata = {
         'CounterUnits': {'CounterUnitsMetadata'},
         'GravityValue': {'GravityValueMetadata'},
         'GravityError': {'GravityErrorMetadata'}
     }
     p = RecordParser(
         record_format='{timestamp:ti} {field_string}',
         field_patterns=['{:d}:{GravityValue:d} {GravityError:d}'],
         metadata=metadata,
         metadata_interval=1)
     r = p.parse_record('2017-11-10T01:00:06.572Z 01:024557 00')
     self.assertDictEqual(
         r, {
             'timestamp': 1510275606.572,
             'fields': {
                 'GravityValue': 24557,
                 'GravityError': 0
             },
             'metadata': {
                 'fields': {
                     'GravityError': {'GravityErrorMetadata'},
                     'GravityValue': {'GravityValueMetadata'}
                 }
             }
         })
Ejemplo n.º 2
0
    def test_read(self):
        # Create records using synthetic, randomized data id and write to db
        parser = RecordParser(definition_path='local/usap/nbp/devices/nbp_devices.yaml',
                              return_das_record=True)
        writer = DatabaseWriter(database='test', host='localhost',
                                user='******', password='******')
        writer.db.exec_sql_command('truncate table data')

        reader = DatabaseReader(database='test', host='localhost',
                                user='******', password='******')

        # Write to database, automatically creating table
        records = [parser.parse_record(s) for s in SAMPLE_DATA]
        index = 0
        for record in records:
            logging.debug('Writing record "%s"', str(record))
            writer.write(record)

            result = True
            while result:
                result = writer.db.read()
                logging.info('Read %d: %s', index, result)
                if result:
                    self.assertEqual(result, SAMPLE_RESULTS[index])
                    index += 1

        # Test range: read a range that should include 3 records
        results = reader.read_range(start=2, stop=5)
        self.assertEqual(results, {'S330GPSDay': [(1509778447.17313, 7)],
                                   'S330GPSMonth': [(1509778447.17313, 8)],
                                   'S330GPSYear': [(1509778447.17313, 2014)]})

        # Next record should be one after that
        result = reader.read()
        self.assertEqual(result, {'S330GPSTime': [(1509778449.210395, 2034.17)]})

        # Test time_range: read a range that should include 3 records
        results = reader.read_time_range(start_time=1509778449.210395,
                                         stop_time=1509778453.290818)
        self.assertEqual(results, {'S330GPSTime': [(1509778451.248784, 2035.17),
                                                   (1509778453.290817, 2036.17)],
                                   'S330GPSDay': [(1509778451.248784, 7),
                                                  (1509778453.290817, 7)],
                                   'S330GPSMonth': [(1509778451.248784, 8),
                                                    (1509778453.290817, 8)],
                                   'S330GPSYear': [(1509778451.248784, 2014),
                                                   (1509778453.290817, 2014)]})
        # Next record should be one after that
        result = reader.read()
        self.assertEqual(result, {'S330GPSTime': [(1509778455.328116, 2037.17)]})

        writer.db.close()
        reader.db.close()
Ejemplo n.º 3
0
    def test_default_parser(self):

        p = RecordParser(definition_path=self.device_filename)

        for records in [
                GRV1_RECORDS,
                KNUD_RECORDS,
                SEAP_RECORDS,
        ]:
            for line in records:
                logging.info('line:\n%s', line)
                record = p.parse_record(line)
                logging.info('record:\n%s', pprint.pformat(record))
Ejemplo n.º 4
0
    def test_inline_definitions(self):
        p = RecordParser(record_format='{timestamp:ti} {field_string}',
                         field_patterns=[
                             '{CounterUnits:d}:{GravityValue:d} {GravityError:d}'])
        r = p.parse_record('2017-11-10T01:00:06.572Z 01:024557 00')
        self.assertDictEqual(r, {'timestamp': 1510275606.572,
                                 'fields': {'CounterUnits': 1,
                                            'GravityValue': 24557,
                                            'GravityError': 0}})

        p = RecordParser(
            record_format='{timestamp:ti} {field_string}',
            field_patterns=[
                '$PSXN,20,{HorizQual:d},{HeightQual:d},{HeadingQual:d},{RollPitchQual:d}*{:x}',
                '$PSXN,22,{GyroCal:f},{GyroOffset:f}*{:x}',
                '$PSXN,23,{Roll:f},{Pitch:f},{HeadingTrue:f},{Heave:f}*{:x}',
            ])

        r = p.parse_record('2017-11-04T07:00:39.291859Z $PSXN,20,1,0,0,0*3A')
        self.assertDictEqual(r, {'timestamp': 1509778839.291859,
                                 'fields': {'HeightQual': 0,
                                            'RollPitchQual': 0,
                                            'HorizQual': 1,
                                            'HeadingQual': 0}})
        r = p.parse_record('2017-11-04T07:00:39.547251Z $PSXN,22,0.44,0.74*3A')
        self.assertDictEqual(r, {'timestamp': 1509778839.547251,
                                 'fields': {'GyroOffset': 0.74,
                                            'GyroCal': 0.44}})

        r = p.parse_record('2017-11-04T07:00:39.802690Z $PSXN,23,-1.47,0.01,235.77,-0.38*34')
        self.assertDictEqual(r, {'timestamp': 1509778839.802690,
                                 'fields': {'Roll': -1.47,
                                            'Heave': -0.38,
                                            'HeadingTrue': 235.77,
                                            'Pitch': 0.01}})
Ejemplo n.º 5
0
    def test_mysql_connector(self):
        parser = RecordParser(
            definition_path='local/usap/nbp/devices/nbp_devices.yaml',
            return_das_record=True)
        try:
            db = MySQLConnector(database='test',
                                host='localhost',
                                user='******',
                                password='******')
            db.exec_sql_command('truncate table data')
        except Exception as e:
            self.assertTrue(
                False, 'Unable to create database connection. Have you '
                'set up the appropriate setup script in database/setup?')

        records = [parser.parse_record(s) for s in SAMPLE_DATA]
        for record in records:
            db.write_record(record)

        for r in SINGLE_RESULTS:
            result = db.read()
            self.assertEqual(result, r)
            logging.info('Read record: %s', str(result))
        self.assertEqual(db.read(), {})

        logging.info('###### Resetting')
        db.seek(0, 'start')
        for r in RESET_RESULTS:
            result = db.read('S330CourseTrue,S330CourseMag')
            self.assertEqual(result, r)
            logging.info('Read record: %s', str(result))
        self.assertEqual(db.read('S330CourseTrue,S330CourseMag'), {})

        logging.info('###### Resetting')
        db.seek(0, 'start')
        for r in BATCH_RESULTS:
            result = db.read('S330CourseTrue,S330CourseMag', num_records=None)
            self.assertEqual(result, r)
            logging.info('Read record: %s', str(result))
        self.assertEqual(
            db.read('S330CourseTrue,S330CourseMag', num_records=None), {})

        db.close()
Ejemplo n.º 6
0
    def test_parse_records_das_record(self):
        p = RecordParser(definition_path=self.device_filename,
                         return_das_record=True)

        r = p.parse_record(GRV1_RECORDS[0])
        self.assertEqual(r, DASRecord(data_id='grv1', timestamp=1510275606.572,
                                      fields={'Grv1Error': 0, 'Grv1Value': 24557}))
        r = p.parse_record(SEAP_RECORDS[0])
        self.assertEqual(r, DASRecord(data_id='seap', timestamp=1509778839.291859,
                                      message_type='PSXN20',
                                      fields={'Seap200HeightQual': 0,
                                              'Seap200RollPitchQual': 0,
                                              'Seap200HorizQual': 1,
                                              'Seap200HeadingQual': 0}))
        r = p.parse_record(SEAP_RECORDS[1])
        self.assertEqual(r, DASRecord(data_id='seap', timestamp=1509778839.547251,
                                      message_type='PSXN22',
                                      fields={'Seap200GyroOffset': 0.74,
                                              'Seap200GyroCal': 0.44}))
        r = p.parse_record(SEAP_RECORDS[2])
        self.assertEqual(r, DASRecord(data_id='seap', timestamp=1509778839.802690,
                                      message_type='PSXN23',
                                      fields={'Seap200Roll': -1.47,
                                              'Seap200Heave': -0.38,
                                              'Seap200HeadingTrue': 235.77,
                                              'Seap200Pitch': 0.01}))
Ejemplo n.º 7
0
    def test_parse_records_json(self):
        p = RecordParser(definition_path=self.device_filename, return_json=True)

        r = p.parse_record(GRV1_RECORDS[0])
        self.assertDictEqual(json.loads(r),
                             {'data_id': 'grv1', 'timestamp': 1510275606.572,
                              'fields': {'Grv1Error': 0, 'Grv1Value': 24557}})
        r = p.parse_record(SEAP_RECORDS[0])
        self.assertDictEqual(json.loads(r),
                             {'data_id': 'seap',
                              'timestamp': 1509778839.291859,
                              'message_type': 'PSXN20',
                              'fields': {'Seap200HeightQual': 0,
                                         'Seap200RollPitchQual': 0,
                                         'Seap200HorizQual': 1,
                                         'Seap200HeadingQual': 0}})
        r = p.parse_record(SEAP_RECORDS[1])
        self.assertDictEqual(json.loads(r),
                             {'data_id': 'seap',
                              'timestamp': 1509778839.547251,
                              'message_type': 'PSXN22',
                              'fields': {'Seap200GyroOffset': 0.74,
                                         'Seap200GyroCal': 0.44}})
        r = p.parse_record(SEAP_RECORDS[2])
        self.assertDictEqual(json.loads(r),
                             {'data_id': 'seap', 'timestamp': 1509778839.802690,
                              'message_type': 'PSXN23',
                              'fields': {'Seap200Roll': -1.47,
                                         'Seap200Heave': -0.38,
                                         'Seap200HeadingTrue': 235.77,
                                         'Seap200Pitch': 0.01}})
Ejemplo n.º 8
0
    def test_new_parse_records(self):
        """Test the "new" style of device/device_type definitions, where files
        are YAML dicts with top-level devices/device_types/includes keys.
        """
        p = RecordParser(definition_path=self.new_device_filename)

        r = p.parse_record(GRV1_RECORDS[0])
        self.assertDictEqual(r, {'data_id': 'grv1', 'timestamp': 1510275606.572,
                                 'fields': {'Grv1Error': 0, 'Grv1Value': 24557}})
        r = p.parse_record(SEAP_RECORDS[0])
        self.assertDictEqual(r, {'data_id': 'seap',
                                 'timestamp': 1509778839.291859,
                                 'message_type': 'PSXN20',
                                 'fields': {'Seap200HeightQual': 0,
                                            'Seap200RollPitchQual': 0,
                                            'Seap200HorizQual': 1,
                                            'Seap200HeadingQual': 0}})
        r = p.parse_record(SEAP_RECORDS[1])
        self.assertDictEqual(r, {'data_id': 'seap',
                                 'timestamp': 1509778839.547251,
                                 'message_type': 'PSXN22',
                                 'fields': {'Seap200GyroOffset': 0.74,
                                            'Seap200GyroCal': 0.44}})

        r = p.parse_record(SEAP_RECORDS[2])
        self.assertDictEqual(r, {'data_id': 'seap', 'timestamp': 1509778839.802690,
                                 'message_type': 'PSXN23',
                                 'fields': {'Seap200Roll': -1.47,
                                            'Seap200Heave': -0.38,
                                            'Seap200HeadingTrue': 235.77,
                                            'Seap200Pitch': 0.01}})
Ejemplo n.º 9
0
    def test_database_writer(self):
        parser = RecordParser(
            definition_path='local/usap/nbp/devices/nbp_devices.yaml',
            return_das_record=True)
        writer = DatabaseWriter(database='test',
                                host='localhost',
                                user='******',
                                password='******')
        writer.db.exec_sql_command('truncate table data')

        index = 0
        records = [parser.parse_record(s) for s in SAMPLE_DATA]
        for record in records:
            writer.write(record)

            result = True
            while result:
                result = writer.db.read()
                logging.debug('Read %d: %s', index, result)
                if result:
                    self.assertEqual(result, SINGLE_RESULTS[index])
                    index += 1
Ejemplo n.º 10
0
    def test_parse_bad_record(self):

        # Should log a warning on a bad record...
        p = RecordParser(definition_path=self.device_filename)
        with self.assertLogs(logging.getLogger(), logging.WARNING):
            r = p.parse_record(BAD_RECORD)

        # But shouldn't log anything if we're in quiet mode
        p = RecordParser(definition_path=self.device_filename, quiet=True)
        with self.assertRaises(AssertionError):
            with self.assertLogs(logging.getLogger(), logging.WARNING):
                r = p.parse_record(BAD_RECORD)
Ejemplo n.º 11
0
    def test_mysql_connector(self):
        parser = RecordParser(
            definition_path='local/usap/nbp/devices/nbp_devices.yaml',
            return_das_record=True)
        try:
            db = MySQLRecordConnector(database='test',
                                      host='localhost',
                                      user='******',
                                      password='******')
        except Exception as e:
            self.assertTrue(
                False, 'Unable to create database connection. Have you '
                'set up the appropriate setup script in database/setup?')

        test_num = random.randint(0, 100000)
        records = [parser.parse_record(s) for s in SAMPLE_DATA]
        for i in range(len(records)):
            records[i].data_id = '%d_%s' % (test_num, records[i].data_id)
            table_name = db.table_name_from_record(records[i])
            logging.info('Deleting table %s', table_name)
            if db.table_exists(table_name):
                db.delete_table(table_name)
            self.assertFalse(db.table_exists(table_name))

        # Delete the mapping table so that we can test its automatic creation
        if db.table_exists(db.FIELD_NAME_MAPPING_TABLE):
            try:
                db.delete_table(db.FIELD_NAME_MAPPING_TABLE)
            except ProgrammingError:
                pass

        for record in records:
            table_name = db.table_name_from_record(record)

            # self.assertFalse(db.table_exists(table_name))
            db.create_table_from_record(record)
            self.assertTrue(db.table_exists(table_name))

            logging.info('Created table %s for %s', table_name, record)
            db.write_record(record)
            result = db.read(table_name)
            logging.info('Read record: %s', str(result))
            logging.info('Result: %s', result)
            self.assertEqual(record.timestamp, result.timestamp)
            self.assertEqual(record.data_id, result.data_id)
            self.assertDictEqual(record.fields, result.fields)

            # Some fields can come from more than one record, and we only
            # map to the first such record/table_name
            # for field in record.fields:
            #  self.assertEqual(table_name, db.table_name_from_field(field))

            # Make sure we don't get anything when we try a second read
            self.assertFalse(db.read(table_name))

        for record in records:
            table_name = db.table_name_from_record(record)
            db.write_record(record)
            results = db.read_range(table_name, start=1)
            logging.debug('Read records: %s', [str(r) for r in results])
            self.assertEqual(len(results), 2)

        table_name = db.table_name_from_record(records[0])

        db.seek(table_name, 0, 'start')
        self.assertEqual(db.read(table_name), records[0])
        self.assertEqual(db.read(table_name), records[0])
        self.assertEqual(db.read(table_name), None)
        db.seek(table_name, -2, 'current')
        self.assertEqual(db.read(table_name), records[0])
        self.assertEqual(db.read(table_name), records[0])
        self.assertEqual(db.read(table_name), None)
        db.seek(table_name, -1, 'end')
        self.assertEqual(db.read(table_name), records[0])
        self.assertEqual(db.read(table_name), None)

        # Finally, clean up
        for record in records:
            table_name = db.table_name_from_record(record)
            db.delete_table(table_name)
            self.assertFalse(db.table_exists(table_name))

        db.close()