def test_overall(self): entries = [ parse_line(make_line(method='PUT')), parse_line(make_line(method='GET')), ] output = entries_to_csv(entries) csv = DictReader(output.splitlines(), CSV_FIELDS) assert list(csv) == [serialize_entry(e) for e in entries]
def parse_apt_line(line): "Parse a single line in the APT file" record_type = line[:3] r = parse_line(line, APT_RECORD_MAP[record_type]) # Parse out useful coordinates if (record_type == 'APT'): r['lat'] = convert_dashed_dms_to_float(r['point_latitude_formatted']) r['lon'] = convert_dashed_dms_to_float(r['point_longitude_formatted']) r['control_tower'] = convert_boolean(r['control_tower']) if (record_type == 'RWY'): if r.get('base_end_latitude_physical_runway_end_formatted', False): r['base_end_lat'] = convert_dashed_dms_to_float(r['base_end_latitude_physical_runway_end_formatted']) if r.get('base_end_longitude_physical_runway_end_formatted', False): r['base_end_lon'] = convert_dashed_dms_to_float(r['base_end_longitude_physical_runway_end_formatted']) if r.get('base_end_latitude_displaced_threshold_formatted', False): r['base_end_displaced_threshold_lat'] = convert_dashed_dms_to_float(r['base_end_latitude_displaced_threshold_formatted']) if r.get('base_end_longitude_displaced_threshold_formatted', False): r['base_end_displaced_threshold_lon'] = convert_dashed_dms_to_float(r['base_end_longitude_displaced_threshold_formatted']) if r.get('reciprocal_end_latitude_physical_runway_end_formatted', False): r['reciprocal_end_lat'] = convert_dashed_dms_to_float(r['reciprocal_end_latitude_physical_runway_end_formatted']) if r.get('reciprocal_end_longitude_physical_runway_end_formatted', False): r['reciprocal_end_lon'] = convert_dashed_dms_to_float(r['reciprocal_end_longitude_physical_runway_end_formatted']) if r.get('reciprocal_end_latitude_physical_runway_end_formatted', False): r['reciprocal_end_displaced_threshold_lat'] = convert_dashed_dms_to_float(r['reciprocal_end_latitude_physical_runway_end_formatted']) if r.get('reciprocal_end_longitude_physical_runway_end_formatted', False): r['reciprocal_end_displaced_threshold_lon'] = convert_dashed_dms_to_float(r['reciprocal_end_longitude_physical_runway_end_formatted']) return r
def main(): arguments = parse_args() filename, extension = arguments.input.rsplit('.', 1) temp_file = f'{filename}_temp.png' try: quantitize_image(filename, extension, arguments.colors) imagick_output = generate_histogram(temp_file) delete_file(temp_file) except: print( 'Something went wrong with ImageMagick, is it installed correctly? Its version needs to be at least 7.0.9' ) else: output_lines = imagick_output.split('\n') parsed_colors = [parse_line(line) for line in output_lines] sorted_colors = sorted(parsed_colors, key=lambda l: int(l['frequency']), reverse=True) image = generate_palette(sorted_colors, arguments.values, arguments.percentage) try: output, extension = arguments.output.rsplit('.', 1) image.save(f'{output}.{extension}') except OSError as oserror: print( f'Couldn\'t save the image for some reason\n{oserror.strerror}' )
def listen_and_parse(access_log_path): tail = open_tail(access_log_path) while True: line = tail.readline() assert line data = parse_line(line) if data: yield data
def parse_awos_line(line): "Parse a single line in the AWOS file" r = parse_line(line, AWOS_RECORDS) # Parse out useful coordinates if r['record_type'] == 'AWOS1': # only if it's a record type 1 if r['latitude']: r['lat'] = convert_dashed_dms_to_float(r['latitude']) if r['longitude']: r['lon'] = convert_dashed_dms_to_float(r['longitude']) return r
def test_ignored(self): lines = [ make_line(volume='?clusterMeta'), make_line(volume='.status'), make_line(path='/.upload'), make_line(path='?o=mod'), make_line(path='/?o=mod'), make_line(method='WHATEVER'), make_line(method='PUT', path=''), make_line(token='CLUSTER/ALLNODE/ROOT/USER<token>'), ] for i, line in enumerate(lines): assert parse_line(line) is None, i
def test_overall(self): line = make_line() entry = parse_line(line) assert entry['ip'] == DEFAULT_PARAMS['ip'] datetime = DEFAULT_PARAMS['datetime'] datetime = dateutil_parse(datetime.replace(':', ' ', 1)) assert entry['datetime'] == datetime assert entry['operation'] == Operation.UPLOAD assert entry['volume'] == 'sxmonitor' assert entry['path'] == '/nagios-test-file' assert entry['user'] == DEFAULT_PARAMS['token'] assert entry['user_agent'] == DEFAULT_PARAMS['user_agent'] assert len(entry) == 7, "Tests all entry keys"
def insert_script(script_number): ''' insert messages into messages table from script file. :parameter {chaptor_id} : chaptor index. :parameter {charactor_id} : charactor index. :parameter {message} : message. :return {} : None ''' lines = get_lines('scripts/%02d.txt' % script_number) count = 0 for line in lines: charactor, message = parse_line(line) if message is None: continue insert_script_with_charactor_name(1, charactor, message) print(count) count += 1
def main(): if not os.path.exists(assembly_file): raise Exception('No file found at {0}'.format(assembly_file)) tokens = [] line_num = 1 with open(assembly_file) as f: for line in f: try: sanitized = sanitize_line(line) if sanitized: tokens.append(parse.parse_line(sanitized, line_num)) except parse.ParseException as e: raise parse.ParseException('Error at line number {0}: {1}, {2}'.format(line_num, line.strip(), str(e))) line_num += 1 generate.generate(tokens)
def test_delete(self): line = make_line(method='DELETE') entry = parse_line(line) assert entry['operation'] == Operation.DELETE
def test_parse_line_letter(self): s = parse.parse_line( '161 19:22:50.9 +26:15:45 59.78 5.32 V 5.18 B6III\ -0.001 -0.010 -012 182255 3Lam ', 'abc') self.assertEqual(s.letter, 'λ')
def test_parse_line_color(self): s = parse.parse_line( '161 19:22:50.9 +26:15:45 59.78 5.32 V 5.18 B6III\ -0.001 -0.010 -012 182255 3 ', 'abc') self.assertEqual(s.color, 'B')
def test_fail(self): assert parse_line('') is None assert parse_line('garbage') is None line = make_line(resp_code=404) assert parse_line(line) is None
def test_list(self): line = make_line(method='GET', path='') entry = parse_line(line) assert entry['operation'] == Operation.LIST
def test_query(self): line = make_line(method='GET', path='?recursive') entry = parse_line(line) assert entry is not None
def test_query_ignored(self): line = make_line(method='GET', path='file?rev=foo&fileMeta') entry = parse_line(line) assert entry is None
#!/usr/bin/env python3 from binascii import hexlify import sys import serial import parse if len(sys.argv) != 2: print("read_serial /dev/ttyUSB4") sys.exit(1) # ser = serial.Serial(sys.argv[1], 115200) # ser = serial.Serial(sys.argv[1], 2_000_000) ser = serial.Serial(sys.argv[1], 921600) while True: line = ser.readline() line = line.strip(b'\r\n') print(hexlify(line)) lpc = parse.parse_line(line) if not lpc: continue lpctype, direction, address, data = lpc print('%3s: %5s %8s: %4s' % (lpctype, direction, address, data))
def test_download(self): line = make_line(method='GET') entry = parse_line(line) assert entry['operation'] == Operation.DOWNLOAD
def parse_natfix_line(line): r = parse_line(line[:-1], NATFIX_RECORDS) # add in lat/lon converted to a simple float r['lat'] = convert_dms_to_float(r['latitude_string']) r['lon'] = convert_dms_to_float(r['longitude_string']) return r
def test_serialize_entry(): entry = parse_line(make_line()) serialized = serialize_entry(entry) assert isinstance(serialized['datetime'], str) assert isinstance(serialized['operation'], str) assert set(serialized.keys()) == set(CSV_FIELDS)