def main(): """ Main Loop """ global config, exporter_objects, exporter_functions, logging_level, rs92_ephemeris, gpsd_adaptor, email_exporter # Specify command arguments epilog_msg = '''\ See above examples - [?]$ ./{0} -t 0 '''.format(FILE_NAME) parser = argparse.ArgumentParser( formatter_class=argparse.RawDescriptionHelpFormatter, description="The main script to scan frequency range and decode sonde", epilog=epilog_msg) parser.add_argument("-c", "--config", default="./config/station.cfg", help="Receive station configuration file") parser.add_argument("-l", "--log", default="./log/", help="Receive station log path") parser.add_argument( "-f", "--frequency", type=float, default=0.0, help= "Sonde frequency override (Unit : MHz). This overrides the scan whitelist with the supplied frequency" ) parser.add_argument( "-m", "--model", action='store', type=str, choices=VALID_SONDE_TYPES, default=None, help="Immediately start a decoder for a provided and valid sonde type") parser.add_argument( "-t", "--timeout", type=int, default=0, help="Close auto_rx system after N minutes. Use 0 to run continuously." ) parser.add_argument("-v", "--verbose", action="store_true", help="Enable debug output.") parser.add_argument( "-e", "--ephemeris", type=str, default="None", help="Use a manually obtained ephemeris file when decoding RS92 Sondes." ) parser.add_argument( "--systemlog", action="store_true", default=False, help="Write a auto_rx system log-file to ./log/ (Default=False)") args = parser.parse_args() # Copy out timeout value, and convert to seconds, _timeout = args.timeout * 60 # Copy out RS92 ephemeris value, if provided. if args.ephemeris != "None": rs92_ephemeris = args.ephemeris # Set log-level to DEBUG if requested if args.verbose: logging_level = logging.DEBUG # Validate the user supplied log path if Path(args.log).is_dir(): logging_path = Path(args.log).absolute() else: # Define the default logging path logging_path = Path("./log/").absolute() # Using print because logging may not be established yet print("Invalid logging path, using default. Does the folder exist?") # Configure logging _log_suffix = datetime.datetime.utcnow().strftime( "%Y%m%d-%H%M%S_system.log") _log_path = logging_path / _log_suffix if args.systemlog: # Only write out a logs to a system log file if we have been asked to. # Systemd will capture and logrotate our logs anyway, so writing to our own log file is less useful. logging.basicConfig( format="%(asctime)s %(levelname)s:%(message)s", filename=_log_path, level=logging_level, ) logging.info("Opened new system log file: {}".format(_log_path)) # Also add a separate stdout logger. stdout_format = logging.Formatter( "%(asctime)s %(levelname)s:%(message)s") stdout_handler = logging.StreamHandler(sys.stdout) stdout_handler.setFormatter(stdout_format) logging.getLogger().addHandler(stdout_handler) else: # Otherwise, we only need the stdout logger, which if we don't specify a filename to logging.basicConfig, # is the default... logging.basicConfig(format="%(asctime)s %(levelname)s:%(message)s", level=logging_level) # Add the web interface logging handler. web_handler = WebHandler() logging.getLogger().addHandler(web_handler) # Set the requests/socketio loggers (and related) to only display critical log messages. logging.getLogger("requests").setLevel(logging.CRITICAL) logging.getLogger("urllib3").setLevel(logging.CRITICAL) logging.getLogger("werkzeug").setLevel(logging.ERROR) logging.getLogger("socketio").setLevel(logging.ERROR) logging.getLogger("engineio").setLevel(logging.ERROR) logging.getLogger("geventwebsocket").setLevel(logging.ERROR) # Attempt to read in config file logging.info("Reading configuration file...") _temp_cfg = read_auto_rx_config(args.config) if _temp_cfg is None: logging.critical("Error in configuration file! Exiting...") sys.exit(ReturnValues.EXIT_FAILURE) else: config = _temp_cfg autorx.sdr_list = config["sdr_settings"] # Check all the RS utilities exist. if not check_rs_utils(): sys.exit(ReturnValues.EXIT_FAILURE) # If a sonde type has been provided, insert an entry into the scan results, # and immediately start a decoder. This also sets the decoder time to 0, which # allows it to run indefinitely. if args.model is not None: logging.warning( "Overriding RX timeout for manually specified radiosonde type. Decoders will not automatically stop!" ) config["rx_timeout"] = 0 autorx.scan_results.put([[args.frequency * 1e6, args.type]]) # Start up the flask server. # This needs to occur AFTER logging is setup, else logging breaks horribly for some reason. start_flask(host=config["web_host"], port=config["web_port"]) # If we have been supplied a frequency via the command line, override the whitelist settings # to only include the supplied frequency. if args.frequency != 0.0: config["whitelist"] = [args.frequency] # Start our exporter options # Telemetry Logger if config["per_sonde_log"]: _logger = TelemetryLogger(log_directory=logging_path) exporter_objects.append(_logger) exporter_functions.append(_logger.add) if config["email_enabled"]: _email_notification = EmailNotification( smtp_server=config["email_smtp_server"], smtp_port=config["email_smtp_port"], smtp_authentication=config["email_smtp_authentication"], smtp_login=config["email_smtp_login"], smtp_password=config["email_smtp_password"], mail_from=config["email_from"], mail_to=config["email_to"], mail_subject=config["email_subject"], station_position=( config["station_lat"], config["station_lon"], config["station_alt"], ), launch_notifications=config["email_launch_notifications"], landing_notifications=config["email_landing_notifications"], landing_range_threshold=config["email_landing_range_threshold"], landing_altitude_threshold=config[ "email_landing_altitude_threshold"], ) email_exporter = _email_notification exporter_objects.append(_email_notification) exporter_functions.append(_email_notification.add) # Habitat Uploader if config["habitat_enabled"]: if config["habitat_upload_listener_position"] is False: _habitat_station_position = None else: _habitat_station_position = ( config["station_lat"], config["station_lon"], config["station_alt"], ) _habitat = HabitatUploader( user_callsign=config["habitat_uploader_callsign"], user_antenna=config["habitat_uploader_antenna"], station_position=_habitat_station_position, synchronous_upload_time=config["habitat_upload_rate"], callsign_validity_threshold=config["payload_id_valid"], url=config["habitat_url"], ) exporter_objects.append(_habitat) exporter_functions.append(_habitat.add) # APRS Uploader if config["aprs_enabled"]: if (config["aprs_object_id"] == "<id>") or (config["aprs_use_custom_object_id"] == False): _aprs_object = None else: _aprs_object = config["aprs_object_id"] _aprs = APRSUploader( aprs_callsign=config["aprs_user"], aprs_passcode=config["aprs_pass"], object_name_override=_aprs_object, object_comment=config["aprs_custom_comment"], position_report=config["aprs_position_report"], aprsis_host=config["aprs_server"], aprsis_port=config["aprs_port"], synchronous_upload_time=config["aprs_upload_rate"], callsign_validity_threshold=config["payload_id_valid"], station_beacon=config["station_beacon_enabled"], station_beacon_rate=config["station_beacon_rate"], station_beacon_position=( config["station_lat"], config["station_lon"], config["station_alt"], ), station_beacon_comment=config["station_beacon_comment"], station_beacon_icon=config["station_beacon_icon"], ) exporter_objects.append(_aprs) exporter_functions.append(_aprs.add) # OziExplorer if config["ozi_enabled"] or config["payload_summary_enabled"]: if config["ozi_enabled"]: _ozi_port = config["ozi_port"] else: _ozi_port = None if config["payload_summary_enabled"]: _summary_port = config["payload_summary_port"] else: _summary_port = None _ozimux = OziUploader( ozimux_port=_ozi_port, payload_summary_port=_summary_port, update_rate=config["ozi_update_rate"], station=config["habitat_uploader_callsign"], ) exporter_objects.append(_ozimux) exporter_functions.append(_ozimux.add) # Rotator if config["rotator_enabled"]: _rotator = Rotator( station_position=( config["station_lat"], config["station_lon"], config["station_alt"], ), rotctld_host=config["rotator_hostname"], rotctld_port=config["rotator_port"], rotator_update_rate=config["rotator_update_rate"], rotator_update_threshold=config["rotation_threshold"], rotator_homing_enabled=config["rotator_homing_enabled"], rotator_homing_delay=config["rotator_homing_delay"], rotator_home_position=[ config["rotator_home_azimuth"], config["rotator_home_elevation"], ], ) exporter_objects.append(_rotator) exporter_functions.append(_rotator.add) if config["sondehub_enabled"]: if config["habitat_upload_listener_position"] is False: _sondehub_station_position = None else: _sondehub_station_position = ( config["station_lat"], config["station_lon"], config["station_alt"], ) _sondehub = SondehubUploader( user_callsign=config["habitat_uploader_callsign"], user_position=_sondehub_station_position, user_antenna=config["habitat_uploader_antenna"], upload_rate=config["sondehub_upload_rate"], ) exporter_objects.append(_sondehub) exporter_functions.append(_sondehub.add) _web_exporter = WebExporter(max_age=config["web_archive_age"]) exporter_objects.append(_web_exporter) exporter_functions.append(_web_exporter.add) # GPSD Startup if config["gpsd_enabled"]: gpsd_adaptor = GPSDAdaptor( hostname=config["gpsd_host"], port=config["gpsd_port"], callback=station_position_update, ) check_autorx_version() # Note the start time. _start_time = time.time() # If we have been asked to start decoding a specific radiosonde type, we need to start up # the decoder immediately, before a scanner thread is started. if args.model is not None: handle_scan_results() # Loop. while True: # Check for finished tasks. clean_task_list() # Handle any new scan results. handle_scan_results() # Sleep a little bit. time.sleep(2) if len(autorx.sdr_list) == 0: # No Functioning SDRs! logging.critical( "Task Manager - No SDRs available! Cannot continue...") email_error( "auto_rx exited due to all SDRs being marked as failed.") raise IOError("No SDRs available!") # Allow a timeout after a set time, for users who wish to run auto_rx # within a cronjob. if (_timeout > 0) and ((time.time() - _start_time) > _timeout): logging.info("Shutdown time reached. Closing.") stop_flask(host=config["web_host"], port=config["web_port"]) stop_all() break
def main(): """ Main Loop """ global config, exporter_objects, exporter_functions, logging_level, rs92_ephemeris # Command line arguments. parser = argparse.ArgumentParser() parser.add_argument( "-c", "--config", default="station.cfg", help="Receive Station Configuration File. Default: station.cfg") parser.add_argument( "-f", "--frequency", type=float, default=0.0, help= "Sonde Frequency Override (MHz). This overrides the scan whitelist with the supplied frequency." ) parser.add_argument( "-t", "--timeout", type=int, default=0, help="Close auto_rx system after N minutes. Use 0 to run continuously." ) parser.add_argument("-v", "--verbose", help="Enable debug output.", action="store_true") parser.add_argument( "-e", "--ephemeris", type=str, default="None", help="Use a manually obtained ephemeris file when decoding RS92 Sondes." ) args = parser.parse_args() # Copy out timeout value, and convert to seconds, _timeout = args.timeout * 60 # Copy out RS92 ephemeris value, if provided. if args.ephemeris != "None": rs92_ephemeris = args.ephemeris # Set log-level to DEBUG if requested if args.verbose: logging_level = logging.DEBUG # Configure logging logging.basicConfig(format='%(asctime)s %(levelname)s:%(message)s', filename=datetime.datetime.utcnow().strftime( "log/%Y%m%d-%H%M%S_system.log"), level=logging_level) stdout_format = logging.Formatter('%(asctime)s %(levelname)s:%(message)s') stdout_handler = logging.StreamHandler(sys.stdout) stdout_handler.setFormatter(stdout_format) logging.getLogger().addHandler(stdout_handler) web_handler = WebHandler() logging.getLogger().addHandler(web_handler) # Set the requests/socketio logger to only display critical log messages. logging.getLogger("requests").setLevel(logging.CRITICAL) logging.getLogger("urllib3").setLevel(logging.CRITICAL) logging.getLogger('werkzeug').setLevel(logging.ERROR) logging.getLogger('socketio').setLevel(logging.ERROR) logging.getLogger('engineio').setLevel(logging.ERROR) # Attempt to read in config file logging.info("Reading configuration file...") _temp_cfg = read_auto_rx_config(args.config) if _temp_cfg is None: logging.critical("Error in configuration file! Exiting...") sys.exit(1) else: config = _temp_cfg autorx.sdr_list = config['sdr_settings'] # Check all the RS utilities exist. if not check_rs_utils(): sys.exit(1) # Start up the flask server. # This needs to occur AFTER logging is setup, else logging breaks horribly for some reason. start_flask(port=config['web_port']) # If we have been supplied a frequency via the command line, override the whitelist settings # to only include the supplied frequency. if args.frequency != 0.0: config['whitelist'] = [args.frequency] # Start our exporter options # Telemetry Logger if config['per_sonde_log']: _logger = TelemetryLogger(log_directory="./log/") exporter_objects.append(_logger) exporter_functions.append(_logger.add) if config['email_enabled']: _email_notification = EmailNotification( smtp_server=config['email_smtp_server'], mail_from=config['email_from'], mail_to=config['email_to']) exporter_objects.append(_email_notification) exporter_functions.append(_email_notification.add) # Habitat Uploader if config['habitat_enabled']: if config['habitat_payload_callsign'] == "<id>": _habitat_payload_call = None else: _habitat_payload_call = config['habitat_payload_callsign'] if config['habitat_upload_listener_position'] is False: _habitat_user_position = None else: _habitat_user_position = (config['station_lat'], config['station_lon'], config['station_alt']) _habitat = HabitatUploader( user_callsign=config['habitat_uploader_callsign'], user_antenna=config['habitat_uploader_antenna'], user_position=_habitat_user_position, payload_callsign_override=_habitat_payload_call, synchronous_upload_time=config['habitat_upload_rate'], callsign_validity_threshold=config['payload_id_valid']) exporter_objects.append(_habitat) exporter_functions.append(_habitat.add) # APRS Uploader if config['aprs_enabled']: if config['aprs_object_id'] == "<id>": _aprs_object = None else: _aprs_object = config['aprs_object_id'] _aprs = APRSUploader( aprs_callsign=config['aprs_user'], aprs_passcode=config['aprs_pass'], object_name_override=_aprs_object, object_comment=config['aprs_custom_comment'], aprsis_host=config['aprs_server'], synchronous_upload_time=config['aprs_upload_rate'], callsign_validity_threshold=config['payload_id_valid']) exporter_objects.append(_aprs) exporter_functions.append(_aprs.add) # OziExplorer if config['ozi_enabled'] or config['payload_summary_enabled']: if config['ozi_enabled']: _ozi_port = config['ozi_port'] else: _ozi_port = None if config['payload_summary_enabled']: _summary_port = config['payload_summary_port'] else: _summary_port = None _ozimux = OziUploader(ozimux_port=_ozi_port, payload_summary_port=_summary_port, update_rate=config['ozi_update_rate']) exporter_objects.append(_ozimux) exporter_functions.append(_ozimux.add) _web_exporter = WebExporter(max_age=config['web_archive_age']) exporter_objects.append(_web_exporter) exporter_functions.append(_web_exporter.add) # MQTT (?) - TODO # Note the start time. _start_time = time.time() # Loop. while True: # Check for finished tasks. clean_task_list() # Handle any new scan results. handle_scan_results() # Sleep a little bit. time.sleep(2) # Allow a timeout after a set time, for users who wish to run auto_rx # within a cronjob. if (_timeout > 0) and ((time.time() - _start_time) > _timeout): logging.info("Shutdown time reached. Closing.") stop_flask(port=config['web_port']) stop_all() break
def main(): """ Main Loop """ global config, exporter_objects, exporter_functions, logging_level, rs92_ephemeris # Command line arguments. parser = argparse.ArgumentParser() parser.add_argument( "-c", "--config", default="station.cfg", help="Receive Station Configuration File. Default: station.cfg") parser.add_argument("-l", "--log", default="./log/", help="Receive Station Log Path. Default: ./log/") parser.add_argument( "-f", "--frequency", type=float, default=0.0, help= "Sonde Frequency Override (MHz). This overrides the scan whitelist with the supplied frequency." ) parser.add_argument( "-m", "--type", type=str, default=None, help= "Immediately start a decoder for a provided sonde type (RS41, RS92, DFM, M10, etc)" ) parser.add_argument( "-t", "--timeout", type=int, default=0, help="Close auto_rx system after N minutes. Use 0 to run continuously." ) parser.add_argument("-v", "--verbose", help="Enable debug output.", action="store_true") parser.add_argument( "-e", "--ephemeris", type=str, default="None", help="Use a manually obtained ephemeris file when decoding RS92 Sondes." ) parser.add_argument( "--systemlog", action='store_true', default=False, help="Write a auto_rx system log-file to ./log/ (default=False)") args = parser.parse_args() # Copy out timeout value, and convert to seconds, _timeout = args.timeout * 60 # Copy out RS92 ephemeris value, if provided. if args.ephemeris != "None": rs92_ephemeris = args.ephemeris # Set log-level to DEBUG if requested if args.verbose: logging_level = logging.DEBUG # Define the default logging path logging_path = "./log/" # Validate the user supplied log path if os.path.isdir(args.log): logging_path = os.path.abspath(args.log) else: #Using print because logging may not be established yet print("Invalid logging path, using default. Does the folder exist?") # Configure logging _log_suffix = datetime.datetime.utcnow().strftime( "%Y%m%d-%H%M%S_system.log") _log_path = os.path.join(logging_path, _log_suffix) if args.systemlog: # Only write out a logs to a system log file if we have been asked to. # Systemd will capture and logrotate our logs anyway, so writing to our own log file is less useful. logging.basicConfig(format='%(asctime)s %(levelname)s:%(message)s', filename=_log_path, level=logging_level) logging.info("Opened new system log file: %s" % _log_path) # Also add a separate stdout logger. stdout_format = logging.Formatter( '%(asctime)s %(levelname)s:%(message)s') stdout_handler = logging.StreamHandler(sys.stdout) stdout_handler.setFormatter(stdout_format) logging.getLogger().addHandler(stdout_handler) else: # Otherwise, we only need the stdout logger, which if we don't specify a filename to logging.basicConfig, # is the default... logging.basicConfig(format='%(asctime)s %(levelname)s:%(message)s', level=logging_level) # Add the web interface logging handler. web_handler = WebHandler() logging.getLogger().addHandler(web_handler) # Set the requests/socketio loggers (and related) to only display critical log messages. logging.getLogger("requests").setLevel(logging.CRITICAL) logging.getLogger("urllib3").setLevel(logging.CRITICAL) logging.getLogger('werkzeug').setLevel(logging.ERROR) logging.getLogger('socketio').setLevel(logging.ERROR) logging.getLogger('engineio').setLevel(logging.ERROR) logging.getLogger('geventwebsocket').setLevel(logging.ERROR) # Attempt to read in config file logging.info("Reading configuration file...") _temp_cfg = read_auto_rx_config(args.config) if _temp_cfg is None: logging.critical("Error in configuration file! Exiting...") sys.exit(1) else: config = _temp_cfg autorx.sdr_list = config['sdr_settings'] # Check all the RS utilities exist. if not check_rs_utils(): sys.exit(1) # Start up the flask server. # This needs to occur AFTER logging is setup, else logging breaks horribly for some reason. start_flask(port=config['web_port']) # If we have been supplied a frequency via the command line, override the whitelist settings # to only include the supplied frequency. if args.frequency != 0.0: config['whitelist'] = [args.frequency] # Start our exporter options # Telemetry Logger if config['per_sonde_log']: _logger = TelemetryLogger(log_directory=logging_path) exporter_objects.append(_logger) exporter_functions.append(_logger.add) if config['email_enabled']: _email_notification = EmailNotification( smtp_server=config['email_smtp_server'], mail_from=config['email_from'], mail_to=config['email_to']) exporter_objects.append(_email_notification) exporter_functions.append(_email_notification.add) # Habitat Uploader if config['habitat_enabled']: if config['habitat_payload_callsign'] == "<id>": _habitat_payload_call = None else: _habitat_payload_call = config['habitat_payload_callsign'] if config['habitat_upload_listener_position'] is False: _habitat_user_position = None else: _habitat_user_position = (config['station_lat'], config['station_lon'], config['station_alt']) _habitat = HabitatUploader( user_callsign=config['habitat_uploader_callsign'], user_antenna=config['habitat_uploader_antenna'], user_position=_habitat_user_position, payload_callsign_override=_habitat_payload_call, synchronous_upload_time=config['habitat_upload_rate'], callsign_validity_threshold=config['payload_id_valid']) exporter_objects.append(_habitat) exporter_functions.append(_habitat.add) # APRS Uploader if config['aprs_enabled']: if config['aprs_object_id'] == "<id>": _aprs_object = None else: _aprs_object = config['aprs_object_id'] _aprs = APRSUploader( aprs_callsign=config['aprs_user'], aprs_passcode=config['aprs_pass'], object_name_override=_aprs_object, object_comment=config['aprs_custom_comment'], position_report=config['aprs_position_report'], aprsis_host=config['aprs_server'], synchronous_upload_time=config['aprs_upload_rate'], callsign_validity_threshold=config['payload_id_valid'], station_beacon=config['station_beacon_enabled'], station_beacon_rate=config['station_beacon_rate'], station_beacon_position=[ config['station_lat'], config['station_lon'] ], station_beacon_comment=config['station_beacon_comment'], station_beacon_icon=config['station_beacon_icon']) exporter_objects.append(_aprs) exporter_functions.append(_aprs.add) # OziExplorer if config['ozi_enabled'] or config['payload_summary_enabled']: if config['ozi_enabled']: _ozi_port = config['ozi_port'] else: _ozi_port = None if config['payload_summary_enabled']: _summary_port = config['payload_summary_port'] else: _summary_port = None _ozimux = OziUploader(ozimux_port=_ozi_port, payload_summary_port=_summary_port, update_rate=config['ozi_update_rate']) exporter_objects.append(_ozimux) exporter_functions.append(_ozimux.add) # Rotator if config['rotator_enabled']: _rotator = Rotator( station_position=[ config['station_lat'], config['station_lon'], config['station_alt'] ], rotctld_host=config['rotator_hostname'], rotctld_port=config['rotator_port'], rotator_update_rate=config['rotator_update_rate'], rotator_update_threshold=config['rotation_threshold'], rotator_homing_enabled=config['rotator_homing_enabled'], rotator_homing_delay=config['rotator_homing_delay'], rotator_home_position=[ config['rotator_home_azimuth'], config['rotator_home_elevation'] ]) exporter_objects.append(_rotator) exporter_functions.append(_rotator.add) _web_exporter = WebExporter(max_age=config['web_archive_age']) exporter_objects.append(_web_exporter) exporter_functions.append(_web_exporter.add) # MQTT (?) - TODO # Note the start time. _start_time = time.time() # If a sonde type has been provided, insert an entry into the scan results, # and immediately start a decoder. If decoding fails, then we continue into # the main scanning loop. if args.type != None: scan_results.put([[args.frequency * 1e6, args.type]]) handle_scan_results() # Loop. while True: # Check for finished tasks. clean_task_list() # Handle any new scan results. handle_scan_results() # Sleep a little bit. time.sleep(2) # Allow a timeout after a set time, for users who wish to run auto_rx # within a cronjob. if (_timeout > 0) and ((time.time() - _start_time) > _timeout): logging.info("Shutdown time reached. Closing.") stop_flask(port=config['web_port']) stop_all() break
import time import datetime from autorx.ozimux import OziUploader from time import gmtime, localtime dt = datetime.datetime(2019, 12, 29, 20, 03, 04, 79043) gmt = gmtime() ozimux = OziUploader(ozimux_port=None, payload_summary_port=55673, update_rate=5, station='KD2EAT') packet = { 'frame': '1', 'id': '1', 'datetime': gmt, 'lat': 42.4417, 'lon': -76.4985, 'alt': 22345, 'temp': 32, 'type': 'PAYLOAD_SUMMARY', 'freq': '1678', 'freq_float': 1678.0, 'datetime_dt': dt, } _short_time = packet['datetime_dt'].strftime("%H:%M:%S") ozimux.add(packet) print "Sent. sleeping 10 to avoid race condition where we shut down before sending the packet." time.sleep(10)