def main(): log.begin() parser = argparse.ArgumentParser( description = __doc__, formatter_class = argparse.RawDescriptionHelpFormatter) # Add required command line argument parser.add_argument('filename') # Get command line arguments args = vars(parser.parse_args()) filename = os.path.abspath(args['filename']) extension = os.path.splitext(filename)[1] if extension.lower() != '.bin': log.warn("Filename '%s' does not have a '.bin' extension", filename) sequence = seq.Seq(filename) if not sequence.validate(): for msg in sequence.messages: log.error(msg) sequence.printText() log.end()
def main(): log.begin() parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) # Add required command line arguments parser.add_argument('filename', default=None) # Get command line arguments args = vars(parser.parse_args()) filename = os.path.abspath(args['filename']) extension = os.path.splitext(filename)[1] if extension.lower() != '.bin': log.warn("Filename '%s' does not have a '.bin' extension", filename) sequence = seq.Seq(filename) if not sequence.validate(): for msg in sequence.messages: log.error(msg) else: txtpath = sequence.txtpath seqid = sequence.seqid version = sequence.version msg = "Writing %s (seqid=0x%04x, version=%u)." log.info(msg, txtpath, seqid, version) sequence.writeText() log.end()
def _subscribe_all(self): """ Subscribes all streams to their input. Subscribes all plugins to all their inputs. Subscribes all plugin outputs to the plugin. """ for stream in self.inbound_streams + self.outbound_streams: for input_ in stream.inputs: if not type(input_) is int and input_ is not None: self._subscribe(stream, input_) for plugin in self.plugins: for input_ in plugin.inputs: self._subscribe(plugin, input_) for output in plugin.outputs: # Find output stream instance subscriber = next( (x for x in self.outbound_streams if x.name == output), None) if subscriber is None: log.warn("The outbound stream {} does not " "exist so will not receive messages " "from {}".format(output, plugin)) else: self._subscribe(subscriber, plugin.name) # Lastly setup the outputstream to receive commands self._subscribe_cmdr()
def _load_leap_second_data(self): ls_file = ait.config.get( "leapseconds.filename", os.path.join(ait.config._directory, _DEFAULT_FILE_NAME), ) try: log.info("Attempting to load leapseconds.dat") with open(ls_file, "rb") as outfile: self._data = pickle.load(outfile) log.info("Loaded leapseconds config file successfully") except IOError: log.info("Unable to locate leapseconds config file") if not (self._data and self.is_valid()): try: self._update_leap_second_data() except ValueError: msg = ("Leapsecond data update failed. " "This may cause problems with some functionality") log.warn(msg) if self._data: log.warn("Continuing with out of date leap second data") else: raise ValueError("Could not load leap second data") else: t = self._data["valid"] log_t = t.strftime("%m/%d/%Y") log.info("Leapseconds data valid until %s", log_t)
def _db_connect(self): """Connect to database""" # Get datastore from config plugins = ait.config.get('server.plugins', []) datastore = None other_args = {} for i in range(len(plugins)): if plugins[i]['plugin'][ 'name'] == 'ait.core.server.plugins.DataArchive': datastore = plugins[i]['plugin']['datastore'] other_args = copy.deepcopy(plugins[i]['plugin']) other_args.pop('name') other_args.pop('inputs', None) other_args.pop('outputs', None) other_args.pop('datastore', None) break if datastore: try: mod, cls = datastore.rsplit('.', 1) # Connect to database self.dbconn = getattr(importlib.import_module(mod), cls)() self.dbconn.connect(**other_args) except Exception as e: log.error('Error connecting to datastore {}: {}'.format( datastore, e)) log.warn('Disabling telemetry playback.') else: msg = ('[GUI Playback Configuration]' 'Unable to locate DataArchive plugin configuration for ' 'historical data queries. Historical telemetry playback ' 'will be disabled in monitoring UI and server endpoints.') log.warn(msg)
def main(): log.begin() try: parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter ) # Add required command line arguments parser.add_argument( "filename", nargs="+", metavar="</path/to/seq>", help="File or collection of sequence file(s)", ) # Add optional command line arguments args = parser.parse_args() for fname in args.filename: filename = os.path.abspath(fname) if not os.path.isfile(filename): raise Exception(f"File not found: {filename}") extension = os.path.splitext(filename)[1] if extension.lower() != ".txt": log.warn(f"Filename '{filename}' does not have a '.txt' extension") # Parse the filename for the applicable information parts = os.path.basename(filename).split("_") seqid = os.path.splitext(parts[-1])[0] try: seqid = int(seqid) except ValueError: raise Exception( 'Invalid filename "{os.path.basename(filename)}": . {__doc__}' ) sequence = seq.createSeq(filename, id=seqid) if not sequence.validate(): for msg in sequence.log.messages: log.error(msg) else: binpath = sequence.binpath seqid = sequence.seqid log.info(f"Writing {binpath} (seqid=0x{seqid:04X}).") sequence.writeBinary() exit = 0 except Exception as e: log.error(e) exit = 1 log.end() sys.exit(exit)
def get_realtime_tlm(self): """Handles realtime packet dispatch via websocket layers""" pad = bytearray(1) websocket = bottle.request.environ.get('wsgi.websocket') if not websocket: bottle.abort(400, 'Expected WebSocket request.') empty_map = dict( ) # default empty object for probing websocket connection req_env = bottle.request.environ client_ip = req_env.get('HTTP_X_FORWARDED_FOR') or req_env.get( 'REMOTE_ADDR') or "(unknown)" self.dbg_message('Creating a new web-socket session with client IP ' + client_ip) try: while not websocket.closed: try: self.dbg_message("Polling Telemtry queue...") uid, data = self._tlmQueue.popleft(timeout=30) pkt_defn = self._get_tlm_packet_def(uid) if not pkt_defn: continue pkt_name = pkt_defn.name ait_pkt = ait.core.tlm.Packet(pkt_defn, data=data) openmct_pkt = self.format_tlmpkt_for_openmct(ait_pkt) openmct_pkt_jsonstr = json.dumps( openmct_pkt, default=self.datetime_jsonifier) self.dbg_message( "Sending realtime telemtry websocket msg: " + openmct_pkt_jsonstr) websocket.send(openmct_pkt_jsonstr) except IndexError: # If no telemetry has been received by the GUI # server after timeout seconds, "probe" the client # websocket connection to make sure it's still # active and if so, keep it alive. This is # accomplished by sending an empty JSON object. self.dbg_message("Telemtry queue is empty.") if not websocket.closed: websocket.send(json.dumps(empty_map)) self.dbg_message('Web-socket session closed with client IP ' + client_ip) except geventwebsocket.WebSocketError, wser: log.warn('Web-socket session had an error with client IP ' + client_ip + ': ' + str(wser))
def main(): log.begin() try: defaults = {} parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) # Add required command line arguments parser.add_argument('filename', nargs='+', metavar='</path/to/seq>', help='File or collection of sequence file(s)') # Add optional command line arguments args = parser.parse_args() for fname in args.filename: filename = os.path.abspath(fname) if not os.path.isfile(filename): raise Exception('File not found: %s ' % filename) extension = os.path.splitext(filename)[1] if extension.lower() != '.txt': log.warn("Filename '%s' does not have a '.txt' extension", filename) # Parse the filename for the applicable information parts = os.path.basename(filename).split('_') l = len(parts) seqid = os.path.splitext(parts[l - 1])[0] desc = parts[l - 2] subsys = parts[l - 3] try: int(seqid) except ValueError: raise Exception('Invalid filename "%s": . %s' % (os.path.basename(filename), __doc__)) sequence = seq.Seq(filename, id=seqid) if not sequence.validate(): for msg in sequence.log.messages: log.error(msg) else: binpath = sequence.binpath seqid = sequence.seqid log.info("Writing %s (seqid=0x%04x).", binpath, seqid) sequence.writeBinary() exit = 0 except Exception, e: log.error(e) exit = 1
def _create_outbound_stream(self, config=None): """ Creates an outbound stream from its config. Params: config: stream configuration as read by ait.config Returns: stream: a Stream Raises: ValueError: if any of the required config values are missing """ if config is None: raise ValueError("No stream config to create stream from.") name = self._get_stream_name(config) stream_handlers = self._get_stream_handlers(config, name) stream_input = config.get("input", None) stream_output = config.get("output", None) stream_cmd_sub = config.get("command-subscriber", None) if stream_cmd_sub: stream_cmd_sub = str(stream_cmd_sub).lower() in [ "true", "enabled", "1" ] ostream = None if type(stream_output) is int: ostream = PortOutputStream( name, stream_input, stream_output, stream_handlers, zmq_args={ "zmq_context": self.broker.context, "zmq_proxy_xsub_url": self.broker.XSUB_URL, "zmq_proxy_xpub_url": self.broker.XPUB_URL, }, ) else: if stream_output is not None: log.warn("Output of stream {} is not an integer port. " "Stream outputs can only be ports.".format(name)) ostream = ZMQStream( name, stream_input, stream_handlers, zmq_args={ "zmq_context": self.broker.context, "zmq_proxy_xsub_url": self.broker.XSUB_URL, "zmq_proxy_xpub_url": self.broker.XPUB_URL, }, ) # Set the cmd subscriber field for the stream ostream.cmd_subscriber = stream_cmd_sub is True return ostream
def getDefaultFilename(self): if 'AIT_CONFIG' in os.environ: filename = os.path.abspath(os.environ.get('AIT_CONFIG')) else: msg = 'AIT_CONFIG not set. Falling back to AIT_ROOT or CWD' log.warn(msg) filename = os.path.join(self._directory, 'config.yaml') return filename
def get_default_filename(self): if "AIT_CONFIG" in os.environ: filename = os.path.abspath(os.environ.get("AIT_CONFIG")) else: msg = "AIT_CONFIG not set. Falling back to AIT_ROOT or CWD" log.warn(msg) filename = os.path.join(self._directory, "config.yaml") return filename
def _create_plugin(self, config): """ Creates a plugin from its config. Params: config: plugin configuration as read by ait.config Returns: plugin: a Plugin Raises: ValueError: if any of the required config values are missing """ if config is None: raise ValueError("No plugin config to create plugin from.") other_args = copy.deepcopy(config) name = other_args.pop("name", None) if name is None: raise (cfg.AitConfigMissing("plugin name")) # TODO I don't think we actually care about this being unique? Left over from # previous conversations about stuff? module_name = name.rsplit(".", 1)[0] class_name = name.rsplit(".", 1)[-1] if class_name in [ x.name for x in (self.outbound_streams + self.inbound_streams + self.servers + self.plugins) ]: raise ValueError( 'Plugin "{}" already loaded. Only one plugin of a given name is allowed' .format(class_name)) plugin_inputs = other_args.pop("inputs", None) if plugin_inputs is None: log.warn("No plugin inputs specified for {}".format(name)) plugin_inputs = [] subscribers = other_args.pop("outputs", None) if subscribers is None: log.warn("No plugin outputs specified for {}".format(name)) subscribers = [] # try to create plugin module = import_module(module_name) plugin_class = getattr(module, class_name) instance = plugin_class( plugin_inputs, subscribers, zmq_args={ "zmq_context": self.broker.context, "zmq_proxy_xsub_url": self.broker.XSUB_URL, "zmq_proxy_xpub_url": self.broker.XPUB_URL, }, **other_args, ) return instance
def process(self, input_data, topic=None, **kwargs): try: load = pickle.loads(input_data) pkt_id, pkt_data = int(load[0]), load[1] packet = self.packet_dict[pkt_id] decoded = tlm.Packet(packet, data=bytearray(pkt_data)) except Exception as e: log.error('TelemetryLimitMonitor: {}'.format(e)) log.error( 'TelemetryLimitMonitor received input_data that it is unable to process. Skipping input ...' ) return if packet.name in self.limit_dict: for field, defn in self.limit_dict[packet.name].iteritems(): v = decoded._getattr(field) if packet.name not in self.limit_trip_repeats.keys(): self.limit_trip_repeats[packet.name] = {} if field not in self.limit_trip_repeats[packet.name].keys(): self.limit_trip_repeats[packet.name][field] = 0 if defn.error(v): msg = 'Field {} error out of limit with value {}'.format( field, v) log.error(msg) self.limit_trip_repeats[packet.name][field] += 1 repeats = self.limit_trip_repeats[packet.name][field] if (repeats == self.notif_thrshld or (repeats > self.notif_thrshld and (repeats - self.notif_thrshld) % self.notif_freq == 0)): notify.trigger_notification('limit-error', msg) elif defn.warn(v): msg = 'Field {} warning out of limit with value {}'.format( field, v) log.warn(msg) self.limit_trip_repeats[packet.name][field] += 1 repeats = self.limit_trip_repeats[packet.name][field] if (repeats == self.notif_thrshld or (repeats > self.notif_thrshld and (repeats - self.notif_thrshld) % self.notif_freq == 0)): notify.trigger_notification('limit-warn', msg) else: self.limit_trip_repeats[packet.name][field] = 0
def __init__(self, inputs, outputs, zmq_args=None, **kwargs): super(AITGUIPlugin, self).__init__(inputs, outputs, zmq_args, **kwargs) try: HTMLRoot.User = kwargs['html']['directory'] log.info('[GUI Plugin Configuration] Static file directory is set to {}'.format(HTMLRoot.User)) except: log.warn('[GUI Plugin Configuration] Unable to locate static file directory in config.yaml. '\ 'The directory is set to {}'.format(HTMLRoot.User)) bottle.TEMPLATE_PATH.append(HTMLRoot.User) gevent.spawn(self.init)
def _get_stream_handlers(self, config, name): stream_handlers = [ ] if 'handlers' in config: if config['handlers'] is not None: for handler in config['handlers']: hndlr = self._create_handler(handler) stream_handlers.append(hndlr) log.info('Created handler {} for stream {}'.format(type(hndlr).__name__, name)) else: log.warn('No handlers specified for stream {}'.format(name)) return stream_handlers
def main(): log.begin() parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter ) # Add required command line arguments parser.add_argument( "filename", nargs="+", metavar="</path/to/seq>", help="encoded sequence file(s)" ) # Get command line arguments args = parser.parse_args() for fname in args.filename: filename = os.path.abspath(fname) if not os.path.isfile(filename): raise Exception("File not found: %s " % filename) extension = os.path.splitext(filename)[1] if extension.lower() != ".bin": log.warn("Filename '%s' does not have a '.bin' extension", filename) # Parse the filename for the applicable information parts = os.path.basename(filename).split("_") seqid = os.path.splitext(parts[-1])[0] try: int(seqid) except ValueError: raise Exception( 'Invalid filename "%s": . %s' % (os.path.basename(filename), __doc__) ) sequence = seq.createSeq(filename, id=seqid) if not sequence.validate(): for msg in sequence.messages: log.error(msg) else: txtpath = sequence.txtpath seqid = sequence.seqid version = sequence.version msg = "Writing %s (seqid=0x%04x, version=%u)." log.info(msg, txtpath, seqid, version) sequence.writeText() log.end()
def managed_web_socket_send(mws, message): ''' Sends message to underlying web-socket :param mws: Managed web-socket instance :param message: Message to be sent ''' if mws.is_alive: try: mws.web_socket.send(message) except geventwebsocket.WebSocketError as wserr: log.warn( f"Error while writing to web-socket {mws.id}; Message:'{message}'; Error: {wserr}" ) mws.set_error()
def managed_web_socket_recv(mws): ''' Attempts to read message from the websocket with timeout. :param mws: Managed web-socket instance :return: Message retrieved from underlying-websocket, or None ''' message = None try: with Timeout(AITOpenMctPlugin.DEFAULT_WS_RECV_TIMEOUT_SECS, False): message = mws.web_socket.receive() except geventwebsocket.WebSocketError as wserr: log.warn( f"Error while reading from web-socket {mws.id}; Error: {wserr}" ) mws.set_error() return message
def load(self, content): if self.filename: log.warn('EVRDict: Skipping load() attempt after previous initialization') return if os.path.isfile(content): self.filename = content stream = open(self.filename, 'rb') else: stream = content try: evrs = yaml.load(stream) except IOError, e: msg = "Could not load EVR YAML '{}': '{}'".format(stream, str(e)) log.error(msg) return
def getPacketDefn(uid): """ Returns packet defn from tlm dict matching uid. Logs warning and returns None if no defn matching uid is found. """ global packet_defns if uid in packet_defns: return packet_defns[uid] else: tlmdict = ait.core.tlm.getDefaultDict() for k, v in tlmdict.items(): if v.uid == uid: packet_defns[uid] = v return v log.warn('No packet defn matching UID {}'.format(uid)) return None
def _create_outbound_stream(self, config=None): """ Creates an outbound stream from its config. Params: config: stream configuration as read by ait.config Returns: stream: a Stream Raises: ValueError: if any of the required config values are missing """ if config is None: raise ValueError('No stream config to create stream from.') name = self._get_stream_name(config) stream_handlers = self._get_stream_handlers(config, name) stream_input = config.get('input', None) stream_output = config.get('output', None) if type(stream_output) is int: return PortOutputStream(name, stream_input, stream_output, stream_handlers, zmq_args={ 'zmq_context': self.broker.context, 'zmq_proxy_xsub_url': self.broker.XSUB_URL, 'zmq_proxy_xpub_url': self.broker.XPUB_URL }) else: if stream_output is not None: log.warn("Output of stream {} is not an integer port. " "Stream outputs can only be ports.".format(name)) return ZMQStream(name, stream_input, stream_handlers, zmq_args={ 'zmq_context': self.broker.context, 'zmq_proxy_xsub_url': self.broker.XSUB_URL, 'zmq_proxy_xpub_url': self.broker.XPUB_URL })
def start_browser(self, url, name=None): browser = None if name is not None and name.lower() == "none": log.info("Will not start any browser since --browser=none") return try: browser = webbrowser.get(name) except webbrowser.Error: msg = "Could not find browser: %s. Will use: %s." browser = webbrowser.get() log.warn(msg, name, self.getBrowserName(browser)) if type(browser) is webbrowser.GenericBrowser: msg = "Will not start text-based browser: %s." log.info(msg % self.getBrowserName(browser)) elif browser is not None: log.info("Starting browser: %s" % self.getBrowserName(browser)) browser.open_new(url)
def expand_config_paths( config, prefix=None, datetime=None, pathvars=None, parameter_key="", *keys ): """Updates all relative configuration paths in dictionary config, which contain a key in keys, by prepending prefix. If keys is omitted, it defaults to 'directory', 'file', 'filename', 'path', 'pathname'. See util.expandPath(). """ if len(keys) == 0: keys = PATH_KEYS for name, value in config.items(): if name in keys and type(name) is str: expanded = util.expandPath(value, prefix) cleaned = replace_variables(expanded, datetime=datetime, pathvars=pathvars) for p in cleaned: if not os.path.exists(p): msg = "Config parameter {}.{} specifies nonexistent path {}".format( parameter_key, name, p ) log.warn(msg) config[name] = cleaned[0] if len(cleaned) == 1 else cleaned elif isinstance(value, dict): param_key = name if parameter_key == "" else parameter_key + "." + name expand_config_paths(value, prefix, datetime, pathvars, param_key, *keys) elif isinstance(value, list): for item in value: if isinstance(item, dict): param_key = ( name if parameter_key == "" else parameter_key + "." + name ) expand_config_paths( item, prefix, datetime, pathvars, param_key, *keys )
def start_browser(self, url, name=None): browser = None if name is not None and name.lower() == 'none': log.info('Will not start any browser since --browser=none') return try: browser = webbrowser.get(name) except webbrowser.Error: old = name or 'default' msg = 'Could not find browser: %s. Will use: %s.' browser = webbrowser.get() log.warn(msg, name, self.getBrowserName(browser)) if type(browser) is webbrowser.GenericBrowser: msg = 'Will not start text-based browser: %s.' log.info(msg % self.getBrowserName(browser)) elif browser is not None: log.info('Starting browser: %s' % self.getBrowserName(browser)) browser.open_new(url)
def _load_plugins(self): """ Reads, parses and creates plugins specified in config.yaml. """ plugins = ait.config.get('server.plugins') if plugins is None: log.warn('No plugins specified in config.') else: for index, p in enumerate(plugins): try: plugin = self._create_plugin(p['plugin']) self.plugins.append(plugin) log.info('Added plugin {}'.format(plugin)) except Exception: exc_type, value, tb = sys.exc_info() log.error('{} creating plugin {}: {}'.format(exc_type, index, value)) if not self.plugins: log.warn('No valid plugin configurations found. No plugins will be added.')
def handle(): """ Run a script Scripts are located via the script.directory configuration parameter. :formparam scriptPath: The name of the script to load. This should be one of the values returned by **/scripts**. :statuscode 400: When the script name cannot be located """ global _RUNNING_SCRIPT if _RUNNING_SCRIPT is None: with Sessions.current() as session: script_name = bottle.request.forms.get('scriptPath') script_path = os.path.join(ScriptRoot, script_name) if not os.path.exists(script_path): bottle.abort(400, "Script cannot be located") _RUNNING_SCRIPT = gevent.spawn(bgExecScript, script_path) else: msg = ('Attempted to execute script while another script is running. ' 'Please wait until the previous script completes and try again') log.warn(msg)
def load(self, content): if self.filename: log.warn( "EVRDict: Skipping load() attempt after previous initialization" ) return if os.path.isfile(content): self.filename = content stream = open(self.filename, "rb") else: stream = content try: evrs = yaml.load(stream, Loader=yaml.Loader) except IOError as e: msg = "Could not load EVR YAML '{}': '{}'".format(stream, str(e)) log.error(msg) return for evr in evrs: self.add(evr)
def decode(self, bytes, raw=False): """decode(bytearray, raw=False) -> value Decodes the given bytearray according the corresponding EVR Definition (:class:`EVRDefn`) for the underlying 'MSB_U16' EVR code. If the optional parameter ``raw`` is ``True``, the EVR code itself will be returned instead of the EVR Definition (:class:`EVRDefn`). """ code = super(EVRType, self).decode(bytes) result = None if raw: result = code elif code in self.evrs.codes: result = self.evrs.codes[code] else: result = code log.warn('Unrecognized EVR code: %d' % code) return result
def _load_leap_second_data(self): ls_file = ait.config.get( 'leapseconds.filename', os.path.join(ait.config._directory, _DEFAULT_FILE_NAME)) try: with open(ls_file, 'rb') as outfile: self._data = pickle.load(outfile) except IOError: log.info('Unable to locate leapseconds config file') if not (self._data and self.is_valid()): try: self._update_leap_second_data() except ValueError: msg = ('Leapsecond data update failed. ' 'This may cause problems with some functionality') log.warn(msg) if self._data: log.warn('Continuing with out of date leap second data') else: raise ValueError('Could not load leap second data')
def _subscribe_cmdr(self): """ Setup for the appropriate outbound stream that is configured to accept command messages. If no stream is specified, it looks for the first outbound stream. """ # If command topic was not supplied, report error and return # Technically "shouldn't happen" but better to be safe. if not self.command_topic: log.error("Cannot create entry point for command subscriber, " "required topic name is missing.") return cmd_sub_flag_field = "cmd_subscriber" cmd_stream = None # Lookup for outbound stream with subscribe flag set cmd_streams = list((x for x in self.outbound_streams if hasattr(x, cmd_sub_flag_field) and getattr(x, cmd_sub_flag_field))) cmd_stream = next(iter(cmd_streams), None) # Warn about multiple matches if cmd_stream and len(cmd_streams) > 1: log.warn("Multiple output streams found with {} field enabled, " "{} was selected as the default.".format( cmd_sub_flag_field, cmd_stream.name)) # No stream yet, so just grab the first output stream if not cmd_stream: cmd_stream = next((x for x in self.outbound_streams), None) if cmd_stream: log.warn( "No output stream was designated as the command subscriber, " "{} was selected as the default.".format(cmd_stream.name)) if cmd_stream: self._subscribe(cmd_stream, self.command_topic) else: log.warn( "No output stream was designated as the command subscriber. " "Commands from other processes will not be dispatched by the server." )