def __init__(self, reactor, client, name, aprs_filter): # not specifically expecting more than one but this neatly handles zero-or-one self.__device_contexts = [] client.connect( aprs_filter=aprs_filter ) # TODO either expect the user to do this or forward args def main_callback(line): message = parse_tnc2(line, time.time(), log=self.__log) for c in self.__device_contexts: c.output_message(message) # client blocks in a loop, so set up a thread def threaded_callback(line): if not self.__alive: raise StopIteration() reactor.callFromThread(main_callback, line) def thread_body(): try: client.receive(callback=threaded_callback) except StopIteration: # thrown by callback pass thread = threading.Thread( name='APRSISRXClient({}) reader thread'.format( repr_no_string_tag(name)), target=thread_body) thread.daemon = True # Allow clean process shutdown without waiting for us thread.start()
def _parse_telemetry_value(facts, errors, value_str, channel): try: value = float(value_str) except ValueError: errors.append('Telemetry channel {} did not parse: {}'.format(channel, repr_no_string_tag(value_str))) return facts.append(Telemetry(channel=channel, value=value))
def parse_tnc2(line, receive_time, log=None): """Parse "TNC2 text format" APRS messages.""" if not isinstance(line, six.text_type): # TODO: Is there a more-often-than-not used encoding beyond ASCII, that we should use here? line = six.text_type(line, 'ascii', 'replace') facts = [] errors = [] match = re.match(r'^([^:>,]+?)>([^:>,]+)((?:,[^:>]+)*):(.*?)$', line) if not match: errors.append('Could not parse TNC2') parsed = APRSMessage(receive_time, '', '', '', line, facts, errors, line) else: source, destination, via, payload = match.groups() comment = _parse_payload(facts, errors, source, destination, payload, receive_time) parsed = APRSMessage(receive_time, source, destination, via, payload, facts, errors, comment) if log: # repr here provides robustness against control characters. log.info('APRS: {line}\n -> {aprs_message}', line=repr_no_string_tag(line), aprs_message=parsed) return parsed
def __init__(self, reactor, client, name, aprs_filter): # not specifically expecting more than one but this neatly handles zero-or-one self.__device_contexts = [] client.connect(aprs_filter=aprs_filter) # TODO either expect the user to do this or forward args def main_callback(line): message = parse_tnc2(line, time.time(), log=self.__log) for c in self.__device_contexts: c.output_message(message) # client blocks in a loop, so set up a thread def threaded_callback(line): if not self.__alive: raise StopIteration() reactor.callFromThread(main_callback, line) def thread_body(): try: client.receive(callback=threaded_callback) except StopIteration: # thrown by callback pass thread = threading.Thread( name='APRSISRXClient({}) reader thread'.format(repr_no_string_tag(name)), target=thread_body) thread.daemon = True # Allow clean process shutdown without waiting for us thread.start()
def __init__(self, reactor, client, name, aprs_filter): super(_APRSISComponent, self).__init__(time_source=reactor) client.connect( aprs_filter=aprs_filter ) # TODO either expect the user to do this or forward args def main_callback(line): # TODO: This print-both-formats code is duplicated from multimon.py; it should be a utility in this module instead. Also, we should maybe have a try block. message = parse_tnc2(line, time.time(), log=self.__log) self.receive(message) # client blocks in a loop, so set up a thread def threaded_callback(line): if not self.__alive: raise StopIteration() reactor.callFromThread(main_callback, line) def thread_body(): try: client.receive(callback=threaded_callback) except StopIteration: # thrown by callback pass thread = threading.Thread( name='APRSISRXClient({}) reader thread'.format( repr_no_string_tag(name)), target=thread_body) thread.daemon = True # Allow clean process shutdown without waiting for us thread.start()
def _parse_telemetry_value(facts, errors, value_str, channel): try: value = float(value_str) except ValueError: errors.append('Telemetry channel {} did not parse: {}'.format( channel, repr_no_string_tag(value_str))) return facts.append(Telemetry(channel=channel, value=value))
def __lineReceived(self, line): # rtl_433's JSON encoder is not perfect (e.g. it will emit unescaped newlines), so protect against parse failures try: message = json.loads(line) except ValueError: self.__log.warn('bad JSON from rtl_433: {rtl_433_line}', rtl_433_line=repr_no_string_tag(line)) return self.__log.info('rtl_433 message: {rtl_433_json!r}', rtl_433_json=message) # rtl_433 provides a time field, but when in file-input mode it assumes the input is not real-time and generates start-of-file-relative timestamps, so we can't use them directly. wrapper = RTL433MessageWrapper(message, time.time()) self.__target(wrapper)
def __lineReceived(self, line): # rtl_433's JSON encoder is not perfect (e.g. it will emit unescaped newlines), so protect against parse failures try: message = json.loads(line) except ValueError: self.__log.warn('bad JSON from rtl_433: {rtl_433_line}', rtl_433_line=repr_no_string_tag(line)) return self.__log.info('rtl_433 message: {rtl_433_json!r}', rtl_433_json=message) # rtl_433 provides a time field, but when in file-input mode it assumes the input is not real-time and generates start-of-file-relative timestamps, so we can't use them directly. wrapper = RTL433MessageWrapper(message, time.time()) self.__target(wrapper)
def parse_tnc2(line, receive_time, log=None): """Parse "TNC2 text format" APRS messages.""" if not isinstance(line, six.text_type): # TODO: Is there a more-often-than-not used encoding beyond ASCII, that we should use here? line = six.text_type(line, 'ascii', 'replace') facts = [] errors = [] match = re.match(r'^([^:>,]+?)>([^:>,]+)((?:,[^:>]+)*):(.*?)$', line) if not match: errors.append('Could not parse TNC2') parsed = APRSMessage(receive_time, '', '', '', line, facts, errors, line) else: source, destination, via, payload = match.groups() comment = _parse_payload(facts, errors, source, destination, payload, receive_time) parsed = APRSMessage(receive_time, source, destination, via, payload, facts, errors, comment) if log: # repr here provides robustness against control characters. log.info('APRS: {line}\n -> {aprs_message}', line=repr_no_string_tag(line), aprs_message=parsed) return parsed
def failure(msg, **kwargs): return msg.format( cmd=args if isinstance(args, six.string_types) else ' '.join(args), substring=repr_no_string_tag(substring), **kwargs)
def _parse_payload(facts, errors, source, destination, payload, receive_time): # pylint: disable=unused-variable # (variables for information we're not yet using) if len(payload) < 1: errors.append('zero length information') return payload data_type = payload[0] if data_type == '!' or data_type == '=': # Position Without Timestamp facts.append(Messaging(data_type == '=')) return _parse_position_and_symbol(facts, errors, payload[1:]) if data_type == '/' or data_type == '@': # Position With Timestamp facts.append(Messaging(data_type == '@')) match = re.match(r'^.(.{7})(.*)$', payload) if not match: errors.append('Position With Timestamp is too short') return payload else: time_str, position_str = match.groups() _parse_dhm_hms_timestamp(facts, errors, time_str, receive_time) return _parse_position_and_symbol(facts, errors, position_str) elif data_type == '<': # Capabilities facts.append( Capabilities(dict(map(_parse_capability, payload[1:].split(','))))) return '' elif data_type == '>': # Status # TODO: parse timestamp facts.append(Status(payload[1:])) return '' elif data_type == '`' or data_type == "'": # Mic-E position match = re.match(r'^.(.)(.)(.)(.)(.)(.)(..)(.*)$', payload) if not match: errors.append('Mic-E Information is too short') return payload elif len(destination) < 6: errors.append('Mic-E Destination Address is too short') return payload else: # TODO: deal with ssid/7th byte # This is a generic application of the decoding table: note that not all of the resulting values are meaningful (e.g. only ns_bits[3] is a north/south value). lat_digits, message_bits, ns_bits, lon_offset_bits, ew_bits = zip( *[_mic_e_addr_decode_table[x] for x in destination[0:6]]) latitude_string = ''.join(lat_digits[0:4]) + '.' + ''.join( lat_digits[4:6]) + ns_bits[3] latitude = _parse_angle(latitude_string) longitude_offset = lon_offset_bits[4] # TODO: parse Mic-E "message"/"position comment" bits # TODO: interpret data type ID values (note spec revisions about it) d28, m28, h28, sp28, dc28, se28, symbol_rev, type_and_more = match.groups( ) # decode longitude, as specified in http://www.aprs.org/doc/APRS101.PDF page 48 lon_d = ord(d28) - 28 + longitude_offset if 180 <= lon_d <= 189: lon_d -= 80 elif 190 <= lon_d <= 199: lon_d -= 190 lon_m = ord(m28) - 28 if lon_m >= 60: lon_m -= 60 lon_s = ord(h28) - 28 longitude = ew_bits[5] * (lon_d + (lon_m + lon_s / 100) / 60) # TODO: interpret position ambiguity from latitude if latitude is not None: facts.append(Position(latitude, longitude)) else: errors.append('Mic-E latitude does not parse: {}'.format( repr_no_string_tag(latitude_string))) # decode course and speed, as specified in http://www.aprs.org/doc/APRS101.PDF page 52 dc = ord(dc28) - 28 speed = (ord(sp28) - 28) * 10 + dc // 10 course = dc % 10 + (ord(se28) - 28) if speed >= 800: speed -= 800 if course >= 400: course -= 400 facts.append(Velocity(speed_knots=speed, course_degrees=course)) _parse_symbol(facts, errors, symbol_rev[1] + symbol_rev[0]) # Type code per http://www.aprs.org/aprs12/mic-e-types.txt # TODO: parse and process manufacturer codes type_match = re.match(r"^([] >`'])(?:(...)\})?(.*)$", type_and_more) if type_match is None: errors.append('Mic-E contained non-type-code text: {}'.format( repr_no_string_tag(type_and_more))) return type_and_more else: type_code, opt_altitude, more_text = type_match.groups() # TODO: process type code if opt_altitude is not None: facts.append( Altitude(value=_parse_base91(opt_altitude) - 10000, feet_not_meters=False)) return more_text # or should this be a status fact? elif data_type == ';': # Object match = re.match(r'^.(.{9})([*_])(.{7})(.*)$', payload) if not match: errors.append('Object Information did not parse') return payload else: name, live_str, time_str, position_ext_and_comment = match.groups() obj_facts = [] _parse_dhm_hms_timestamp(obj_facts, errors, time_str, receive_time) comment = _parse_position_and_symbol(obj_facts, errors, position_ext_and_comment) facts.append( ObjectItemReport(object=True, name=name, live=live_str == '*', facts=obj_facts)) return comment elif data_type == 'T': # Telemetry (1.0.1 format) # more lenient than spec because a real packet I saw had decimal points and variable field lengths match = re.match( r'^T#([^,]*|MIC),?([^,]*),([^,]*),([^,]*),([^,]*),([^,]*),([01]{8})(.*)$', payload) if not match: errors.append('Telemetry did not parse: {}'.format( repr_no_string_tag(payload))) return '' else: seq, a1, a2, a3, a4, a5, digital, comment = match.groups() _parse_telemetry_value(facts, errors, a1, 1) _parse_telemetry_value(facts, errors, a2, 2) _parse_telemetry_value(facts, errors, a3, 3) _parse_telemetry_value(facts, errors, a4, 4) _parse_telemetry_value(facts, errors, a5, 5) # TODO: handle seq # (how is it used in practice?) and digital return comment else: errors.append('unrecognized data type: {}'.format( repr_no_string_tag(data_type))) return payload
def _parse_payload(facts, errors, source, destination, payload, receive_time): # pylint: disable=unused-variable # (variables for information we're not yet using) if len(payload) < 1: errors.append('zero length information') return payload data_type = payload[0] if data_type == '!' or data_type == '=': # Position Without Timestamp facts.append(Messaging(data_type == '=')) return _parse_position_and_symbol(facts, errors, payload[1:]) if data_type == '/' or data_type == '@': # Position With Timestamp facts.append(Messaging(data_type == '@')) match = re.match(r'^.(.{7})(.*)$', payload) if not match: errors.append('Position With Timestamp is too short') return payload else: time_str, position_str = match.groups() _parse_dhm_hms_timestamp(facts, errors, time_str, receive_time) return _parse_position_and_symbol(facts, errors, position_str) elif data_type == '<': # Capabilities facts.append(Capabilities(dict(map(_parse_capability, payload[1:].split(','))))) return '' elif data_type == '>': # Status # TODO: parse timestamp facts.append(Status(payload[1:])) return '' elif data_type == '`' or data_type == "'": # Mic-E position match = re.match(r'^.(.)(.)(.)(.)(.)(.)(..)(.*)$', payload) if not match: errors.append('Mic-E Information is too short') return payload elif len(destination) < 6: errors.append('Mic-E Destination Address is too short') return payload else: # TODO: deal with ssid/7th byte # This is a generic application of the decoding table: note that not all of the resulting values are meaningful (e.g. only ns_bits[3] is a north/south value). lat_digits, message_bits, ns_bits, lon_offset_bits, ew_bits = zip(*[_mic_e_addr_decode_table[x] for x in destination[0:6]]) latitude_string = ''.join(lat_digits[0:4]) + '.' + ''.join(lat_digits[4:6]) + ns_bits[3] latitude = _parse_angle(latitude_string) longitude_offset = lon_offset_bits[4] # TODO: parse Mic-E "message"/"position comment" bits # TODO: interpret data type ID values (note spec revisions about it) d28, m28, h28, sp28, dc28, se28, symbol_rev, type_and_more = match.groups() # decode longitude, as specified in http://www.aprs.org/doc/APRS101.PDF page 48 lon_d = ord(d28) - 28 + longitude_offset if 180 <= lon_d <= 189: lon_d -= 80 elif 190 <= lon_d <= 199: lon_d -= 190 lon_m = ord(m28) - 28 if lon_m >= 60: lon_m -= 60 lon_s = ord(h28) - 28 longitude = ew_bits[5] * (lon_d + (lon_m + lon_s / 100) / 60) # TODO: interpret position ambiguity from latitude if latitude is not None: facts.append(Position(latitude, longitude)) else: errors.append('Mic-E latitude does not parse: {}'.format(repr_no_string_tag(latitude_string))) # decode course and speed, as specified in http://www.aprs.org/doc/APRS101.PDF page 52 dc = ord(dc28) - 28 speed = (ord(sp28) - 28) * 10 + dc // 10 course = dc % 10 + (ord(se28) - 28) if speed >= 800: speed -= 800 if course >= 400: course -= 400 facts.append(Velocity(speed_knots=speed, course_degrees=course)) _parse_symbol(facts, errors, symbol_rev[1] + symbol_rev[0]) # Type code per http://www.aprs.org/aprs12/mic-e-types.txt # TODO: parse and process manufacturer codes type_match = re.match(r"^([] >`'])(?:(...)\})?(.*)$", type_and_more) if type_match is None: errors.append('Mic-E contained non-type-code text: {}'.format(repr_no_string_tag(type_and_more))) return type_and_more else: type_code, opt_altitude, more_text = type_match.groups() # TODO: process type code if opt_altitude is not None: facts.append(Altitude(value=_parse_base91(opt_altitude) - 10000, feet_not_meters=False)) return more_text # or should this be a status fact? elif data_type == ';': # Object match = re.match(r'^.(.{9})([*_])(.{7})(.*)$', payload) if not match: errors.append('Object Information did not parse') return payload else: name, live_str, time_str, position_ext_and_comment = match.groups() obj_facts = [] _parse_dhm_hms_timestamp(obj_facts, errors, time_str, receive_time) comment = _parse_position_and_symbol(obj_facts, errors, position_ext_and_comment) facts.append(ObjectItemReport( object=True, name=name, live=live_str == '*', facts=obj_facts)) return comment elif data_type == 'T': # Telemetry (1.0.1 format) # more lenient than spec because a real packet I saw had decimal points and variable field lengths match = re.match(r'^T#([^,]*|MIC),?([^,]*),([^,]*),([^,]*),([^,]*),([^,]*),([01]{8})(.*)$', payload) if not match: errors.append('Telemetry did not parse: {}'.format(repr_no_string_tag(payload))) return '' else: seq, a1, a2, a3, a4, a5, digital, comment = match.groups() _parse_telemetry_value(facts, errors, a1, 1) _parse_telemetry_value(facts, errors, a2, 2) _parse_telemetry_value(facts, errors, a3, 3) _parse_telemetry_value(facts, errors, a4, 4) _parse_telemetry_value(facts, errors, a5, 5) # TODO: handle seq # (how is it used in practice?) and digital return comment else: errors.append('unrecognized data type: {}'.format(repr_no_string_tag(data_type))) return payload
def failure(msg, **kwargs): return msg.format( cmd=args if isinstance(args, six.string_types) else ' '.join(args), substring=repr_no_string_tag(substring), **kwargs)
def write_default_config(new_config_path): # TODO: support enumerating osmosdr devices and configuring specifically for them # TODO: support more than one audio device (moot currently because gnuradio doesn't have a enumeration operation) from shinysdr.devices import find_audio_rx_names audio_rx_names = find_audio_rx_names() if audio_rx_names: has_audio = True audio_rx_name = audio_rx_names[0] else: has_audio = False audio_rx_name = '' try: importlib.import_module('shinysdr.plugins.osmosdr') has_osmosdr = True except ImportError: has_osmosdr = False config_text = dedent("""\ # -*- coding: utf-8 -*- # This is a ShinySDR configuration file. For more information about what can # be put here, read the manual section on it, available from the running # ShinySDR server at: http://localhost:8100/manual/configuration from shinysdr.devices import AudioDevice {osmosdr_comment}from shinysdr.plugins.osmosdr import OsmoSDRDevice from shinysdr.plugins.simulate import SimulatedDevice # OsmoSDR generic driver; handles USRP, RTL-SDR, FunCube Dongle, HackRF, etc. # To select a specific device, replace '' with 'rtl=0' etc. {osmosdr_comment}config.devices.add(u'osmo', OsmoSDRDevice('')) # For hardware which uses a sound-card as its ADC or appears as an # audio device. {audio_comment}config.devices.add(u'audio', AudioDevice(rx_device={audio_rx_name_quoted})) # Locally generated RF signals for test purposes. config.devices.add(u'sim', SimulatedDevice()) config.serve_web( # These are in Twisted endpoint description syntax: # <http://twistedmatrix.com/documents/current/api/twisted.internet.endpoints.html#serverFromString> # Note: ws_endpoint must currently be 1 greater than http_endpoint; if one # is SSL then both must be. These restrictions will be relaxed later. http_endpoint='tcp:8100', ws_endpoint='tcp:8101', # A secret placed in the URL as simple access control. Does not # provide any real security unless using HTTPS. The default value # in this file has been automatically generated from 128 random bits. # Set to None to not use any secret. root_cap={root_cap_quoted}, # Page title / station name title='ShinySDR') """).format( root_cap_quoted=repr_no_string_tag(generate_cap()), audio_rx_name_quoted=repr_no_string_tag(audio_rx_name), audio_comment='' if has_audio else '# ', osmosdr_comment='' if has_osmosdr else '# ', ) os.mkdir(new_config_path) with open(os.path.join(new_config_path, 'config.py'), 'wb') as f: f.write(config_text.encode('utf-8')) os.mkdir(os.path.join(new_config_path, 'dbs-read-only'))
def _coerce_and_validate_base_url(url_value, label, allowed_schemes, allow_path=False): """Convert url_value to string or None and validate it is a suitable base URL.""" if url_value is not None: url_value = str(url_value) scheme, _netloc, path_bytes, _params, _query_bytes, _fragment = urlparse(bytes_or_ascii(url_value)) # Ensure that the protocol is compatible. if scheme.lower() not in allowed_schemes: raise ConfigException('config.serve_web: {} must be a {} URL but was {}'.format(label, ' or '.join(repr_no_string_tag(s + ':') for s in allowed_schemes), repr_no_string_tag(url_value))) # Ensure that there are no path components. There are two reasons for this: # 1. The client makes use of host-relative URLs. # 2. Because ShinySDR makes heavy use of localStorage, and may in the future use other origin-scoped features, it is not safe to run ShinySDR on the same origin as another web application as they might collide with each other. Trying to reverse-proxy with an added path component does not _necessarily_ indicate an attempt to do this, but it'd be more work to support it so let's not bother. # However, neither reason applies to WebSocket addresses, so those are allowed to have directory paths. if allow_path: if not path_bytes.endswith(b'/'): raise ConfigException('config.serve_web: {}\'s path must end in a slash, but had {}'.format(label, repr_no_string_tag(path_bytes))) else: if path_bytes != b'/': raise ConfigException('config.serve_web: {} must not have any path components, but had {}'.format(label, repr_no_string_tag(path_bytes))) return url_value