def test_read_from_write_stream(self): with gpsd_format.open(TYPES_MSG_GZ_FILE) as src, \ gpsd_format.open(self.tempfile.name, 'w', driver='newlinejson') as dst: for msg in src: dst.write(msg) with self.assertRaises(IOError): next(dst)
def test_standard(self): with gpsd_format.open(TYPES_MSG_FILE) as fp_msg, \ gpsd_format.open(TYPES_MSG_GZ_FILE) as fp_msg_gz, \ gpsd_format.open(TYPES_JSON_FILE) as fp_json, \ gpsd_format.open(TYPES_JSON_GZ_FILE) as fp_json_gz: for lines in zip(fp_msg, fp_msg_gz, fp_json, fp_json_gz): for pair in itertools.combinations(lines, 2): self.assertTrue(compare_msg(*pair))
def convert(ctx, infile, outfile): """ Converts between JSON and msgpack container formats """ with open(infile) as inf: with open(outfile, "w") as of: reader = gpsd_format.open(inf) writer = gpsd_format.open(of) for row in reader: writer.write(row)
def test_write_to_read_stream(self): with tempfile.NamedTemporaryFile(mode='r+') as f: for mode in ('r', 'a'): with gpsd_format.open(f.name, mode=mode, driver='msgpack') as src: with self.assertRaises(IOError): src.writeheader()
def test_io_on_closed_stream(self): for mode in ('r', 'w', 'a'): with gpsd_format.open(self.tempfile.name, mode=mode, driver='newlinejson') as stream: stream.close() self.assertTrue(stream.closed) with self.assertRaises(IOError): next(stream) with self.assertRaises(IOError): stream.write(None) with self.assertRaises(IOError): stream.writeheader()
def test_wrong_extension(self): with open(TYPES_MSG_FILE) as src: self.tempfile.write(src.read()) self.tempfile.seek(0) with gpsd_format.open(self.tempfile.name, driver='msgpack') as actual, \ gpsd_format.open(TYPES_MSG_FILE) as expected: for e_line, a_line in zip(expected, actual): self.assertDictEqual(e_line, a_line) with tempfile.NamedTemporaryFile(mode='r+') as tfile: with open(TYPES_MSG_GZ_FILE) as src: tfile.write(src.read()) tfile.seek(0) with gpsd_format.open(tfile.name, driver='msgpack', compression='gzip') as actual, \ gpsd_format.open(TYPES_MSG_GZ_FILE) as expected: for e_line, a_line in zip(expected, actual): self.assertDictEqual(e_line, a_line)
def test_nonjson(self): infile = os.path.join(self.dir, "rows.mmsi=123.json") with open(infile, "w") as f: w = gpsd_format.open(f, 'w') for row in self.rows: w.write(row) for x in six.moves.range(0, self.num_invalid_rows): f.write("N\n") out = self.runcmd("validate", "--verbose", "--msg-hist", "--mmsi-hist", infile).output self.assertIn("All rows are sorted: True", out) self.assertIn("->", out)
def test_sorted(self): infile = os.path.join(self.dir, "rows.mmsi=123.json") with open(infile, "w") as f: w = gpsd_format.open(f, 'w') for row in self.rows: w.write(row) for x in six.moves.range(0, self.num_invalid_rows): f.write("N") self.expected[u'file'] = str(infile) actual = json.loads( self.runcmd("validate", "--print-json", infile).output) self.assertDictEqual(self.expected, actual)
def test_attrs(self): with gpsd_format.open(TYPES_MSG_FILE) as stream: self.assertIsInstance(stream.__repr__(), six.string_types) self.assertTrue(hasattr(stream, '__next__'))
def test_no_detect_compression(self): with gpsd_format.open(TYPES_MSG_FILE, compression=False) as actual, \ gpsd_format.open(TYPES_MSG_FILE) as expected: for e_line, a_line in zip(expected, actual): self.assertDictEqual(e_line, a_line)
def test_default_mode_is_read(self): with gpsd_format.open(TYPES_MSG_FILE) as stream: self.assertEqual(stream.mode, 'r')
def validate(ctx, infile, print_json, verbose, msg_hist, mmsi_hist): """ Print info about a GPSD format AIS/GPS file """ if os.path.isdir(infile): files = [os.path.join(infile, name) for name in os.listdir(infile)] else: files = [infile] files.sort() stats = {} for name in files: sys.stderr.write("Collecting stats for {infile} ...\n".format(infile=name)) with gpsd_format.open(name, "r", skip_failures=True, force_message=False) as f: if verbose: def error_cb(type, msg, exc=None, trace=None): if exc: sys.stderr.write("%s: %s: %s: %s\n" % (name, type.title(), exc, msg)) if trace: sys.stderr.write("%s\n" % (trace,)) else: sys.stderr.write("%s: %s: %s\n" % (name, type.title(), msg)) else: error_cb = None stats = gpsd_format.validate.merge_info(stats, gpsd_format.validate.collect_info(f, error_cb=error_cb)) if print_json: for key, value in six.iteritems(stats): if isinstance(value, datetime.datetime): stats[key] = value.strftime("%Y-%m-%dT%H:%M:%S.%fZ") stats['file'] = infile sys.stdout.write(json.dumps(stats) + "\n") sys.exit(0) else: click.echo("") click.echo("=== Report for %s ===" % infile) click.echo(" Number of rows: %s" % stats['num_rows']) click.echo(" Number of incomplete rows: %s" % stats['num_incomplete_rows']) click.echo(" Number of invalid rows: %s" % stats['num_invalid_rows']) click.echo(" All files are sorted: %s" % stats['is_sorted_files']) click.echo(" All rows are sorted: %s" % stats['is_sorted']) if stats['mmsi_declaration'] is not None: click.echo(" All rows match declared MMSI: %s" % stats['mmsi_declaration']) click.echo(" Number of unique MMSI's: %s" % len(stats['mmsi_hist'])) click.echo(" Number of message types: %s" % len(stats['msg_type_hist'])) click.echo("") click.echo(" X Min: %s" % stats['lon_min']) click.echo(" Y Min: %s" % stats['lat_min']) click.echo(" X Max: %s" % stats['lon_max']) click.echo(" Y Max: %s" % stats['lat_max']) click.echo("") if stats['min_timestamp'] is not None: _min_t = gpsd_format.schema.datetime2str(stats['min_timestamp']) else: _min_t = None if stats['max_timestamp'] is not None: _max_t = gpsd_format.schema.datetime2str(stats['max_timestamp']) else: _max_t = None click.echo(" Min timestamp: %s" % _min_t) click.echo(" Max timestamp: %s" % _max_t) if mmsi_hist: click.echo("") click.echo(" MMSI histogram:") for mmsi in sorted(stats['mmsi_hist'].keys()): click.echo(" %s -> %s" % (mmsi, stats['mmsi_hist'][mmsi])) if msg_hist: click.echo("") click.echo(" Message type histogram:") for msg_type in sorted(stats['msg_type_hist'].keys()): click.echo(" %s -> %s" % (msg_type, stats['msg_type_hist'][msg_type])) click.echo("") sys.exit(0)