def process_pdu(raf_mngr): sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) while True: gevent.sleep(0) if raf_mngr._data_queue.empty(): continue log.info('Empty {}'.format(raf_mngr._data_queue.empty())) pdu = raf_mngr._data_queue.get() try: decoded_pdu, remainder = raf_mngr.decode(pdu) except pyasn1.error.PyAsn1Error as e: log.error('Unable to decode PDU. Skipping ...') continue except TypeError as e: log.error('Unable to decode PDU due to type error ...') continue if ('data' in decoded_pdu['rafTransferBuffer'][0]['annotatedFrame'] and decoded_pdu['rafTransferBuffer'][0]['annotatedFrame'] ['data'].isValue): # Data is present and initialized. Processing telemetry ... trans_data = decoded_pdu['rafTransferBuffer'][0]['annotatedFrame'][ 'data'].asOctets() else: # Object does not contain data or data is not initalized. Skipping ... continue tmf = ait.dsn.sle.frames.TMTransFrame(trans_data) log.info('Emitting {} bytes of telemetry to GUI'.format( len(tmf._data[0]))) sock.sendto(tmf._data[0], ('localhost', 3076))
def main(): log.begin() try: parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter ) # Add required command line arguments parser.add_argument( "filename", nargs="+", metavar="</path/to/seq>", help="File or collection of sequence file(s)", ) # Add optional command line arguments args = parser.parse_args() for fname in args.filename: filename = os.path.abspath(fname) if not os.path.isfile(filename): raise Exception(f"File not found: {filename}") extension = os.path.splitext(filename)[1] if extension.lower() != ".txt": log.warn(f"Filename '{filename}' does not have a '.txt' extension") # Parse the filename for the applicable information parts = os.path.basename(filename).split("_") seqid = os.path.splitext(parts[-1])[0] try: seqid = int(seqid) except ValueError: raise Exception( 'Invalid filename "{os.path.basename(filename)}": . {__doc__}' ) sequence = seq.createSeq(filename, id=seqid) if not sequence.validate(): for msg in sequence.log.messages: log.error(msg) else: binpath = sequence.binpath seqid = sequence.seqid log.info(f"Writing {binpath} (seqid=0x{seqid:04X}).") sequence.writeBinary() exit = 0 except Exception as e: log.error(e) exit = 1 log.end() sys.exit(exit)
def createDirStruct(paths, verbose=True): '''Loops ait.config._datapaths from AIT_CONFIG and creates a directory. Replaces year and doy with the respective year and day-of-year. If neither are given as arguments, current UTC day and year are used. Args: paths: [optional] list of directory paths you would like to create. doy and year will be replaced by the datetime day and year, respectively. datetime: UTC Datetime string in ISO 8601 Format YYYY-MM-DDTHH:mm:ssZ ''' for k, path in paths.items(): p = None try: pathlist = path if type(path) is list else [path] for p in pathlist: os.makedirs(p) if verbose: log.info('Creating directory: ' + p) except OSError, e: #print path if e.errno == errno.EEXIST and os.path.isdir(p): pass else: raise
def query(starttime, endtime, output=None, *filenames): '''Given a time range and input file, query creates a new file with only that subset of data. If no outfile name is given, the new file name is the old file name with the time range appended. Args: starttime: The datetime of the beginning time range to be extracted from the files. endtime: The datetime of the end of the time range to be extracted from the files. output: Optional: The output file name. Defaults to [first filename in filenames][starttime]-[endtime].pcap filenames: A tuple of one or more file names to extract data from. ''' if not output: output = (filenames[0].replace('.pcap','') + starttime.isoformat() + '-' + endtime.isoformat() + '.pcap') else: output = output with open(output,'w') as outfile: for filename in filenames: log.info("pcap.query: processing %s..." % filename) with open(filename, 'r') as stream: for header, packet in stream: if packet is not None: if header.timestamp >= starttime and header.timestamp <= endtime: outfile.write(packet, header=header)
def main(): log.begin() parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) # Add required command line arguments parser.add_argument('filename', default=None) # Get command line arguments args = vars(parser.parse_args()) filename = os.path.abspath(args['filename']) extension = os.path.splitext(filename)[1] if extension.lower() != '.bin': log.warn("Filename '%s' does not have a '.bin' extension", filename) sequence = seq.Seq(filename) if not sequence.validate(): for msg in sequence.messages: log.error(msg) else: txtpath = sequence.txtpath seqid = sequence.seqid version = sequence.version msg = "Writing %s (seqid=0x%04x, version=%u)." log.info(msg, txtpath, seqid, version) sequence.writeText() log.end()
def main(): ap = argparse.ArgumentParser( epilog=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) ap.add_argument( "-n", "--dry-run", action="store_true", help="Dry run; do not actually write files", ) ap.add_argument("-b", "--bytes", help="Segment evey B bytes", metavar="B", type=int) ap.add_argument("-p", "--packets", help="Segment evey P packets", metavar="P", type=int) ap.add_argument( "-s", "--seconds", help="Segment when first and last pcap timestamps span S seconds", metavar="S", type=int, ) ap.add_argument( "format", help="Segment filename (should include strftime(3) time format)") ap.add_argument("file", nargs="+", help="Packet Capture (.pcap) file(s)") args = ap.parse_args() if args.bytes is None and args.packets is None and args.seconds is None: msg = "At least one of -b, -p, or -s is required." ap.error(msg) try: pcap.segment( filenames=args.file, format=args.format, nbytes=args.bytes, npackets=args.packets, nseconds=args.seconds, dryrun=args.dry_run, ) except KeyboardInterrupt: log.info("Received Ctrl-C. Aborting pcap segmentation.") except IOError as e: log.error(str(e)) log.end()
def main(): log.begin() try: defaults = {} parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) # Add required command line arguments parser.add_argument('filename', nargs='+', metavar='</path/to/seq>', help='File or collection of sequence file(s)') # Add optional command line arguments args = parser.parse_args() for fname in args.filename: filename = os.path.abspath(fname) if not os.path.isfile(filename): raise Exception('File not found: %s ' % filename) extension = os.path.splitext(filename)[1] if extension.lower() != '.txt': log.warn("Filename '%s' does not have a '.txt' extension", filename) # Parse the filename for the applicable information parts = os.path.basename(filename).split('_') l = len(parts) seqid = os.path.splitext(parts[l - 1])[0] desc = parts[l - 2] subsys = parts[l - 3] try: int(seqid) except ValueError: raise Exception('Invalid filename "%s": . %s' % (os.path.basename(filename), __doc__)) sequence = seq.Seq(filename, id=seqid) if not sequence.validate(): for msg in sequence.log.messages: log.error(msg) else: binpath = sequence.binpath seqid = sequence.seqid log.info("Writing %s (seqid=0x%04x).", binpath, seqid) sequence.writeBinary() exit = 0 except Exception, e: log.error(e) exit = 1
def main(): ap = argparse.ArgumentParser( epilog=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) ap.add_argument( '-n', '--dry-run', action='store_true', help='Dry run; do not actually write files', ) ap.add_argument('-b', '--bytes', help='Segment evey B bytes', metavar='B', type=int) ap.add_argument('-p', '--packets', help='Segment evey P packets', metavar='P', type=int) ap.add_argument( '-s', '--seconds', help='Segment when first and last pcap timestamps span S seconds', metavar='S', type=int) ap.add_argument( 'format', help='Segment filename (should include strftime(3) time format)') ap.add_argument('file', nargs='+', help='Packet Capture (.pcap) file(s)') args = ap.parse_args() if args.bytes is None and args.packets is None and args.seconds is None: msg = 'At least one of -b, -p, or -s is required.' ap.error(msg) try: pcap.segment(filenames=args.file, format=args.format, nbytes=args.bytes, npackets=args.packets, nseconds=args.seconds, dryrun=args.dry_run) except KeyboardInterrupt: log.info('Received Ctrl-C. Aborting pcap segmentation.') except IOError as e: log.error(str(e)) log.end()
def main(): log.begin() parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) # Add required command line argument parser.add_argument('filename', default=None) # Add optional command line arguments parser.add_argument('--port', default=ait.config.get('command.port', ait.DEFAULT_CMD_PORT), type=int) parser.add_argument('--verbose', default=0, type=int) # Get command line arguments args = vars(parser.parse_args()) host = '127.0.0.1' port = args['port'] data = ' '.join(args) verbose = args['verbose'] cmd = api.CmdAPI(port, verbose=verbose) filename = args['filename'] try: with open(filename, 'r') as stream: for line in stream.readlines(): line = line.strip() # Skip blank lines and comments if len(line) == 0 or line.startswith('#'): continue # Meta-command elif line.startswith('%'): command = line[1:].strip() system(command) # Sequence command else: tokens = line.split() delay = float(tokens[0]) name = tokens[1] args = [util.toNumber(t, t) for t in tokens[2:]] args = cmd.parseArgs(name, *args) time.sleep(delay) log.info(line) cmd.send(name, *args) except IOError: log.error("Could not open '%s' for reading." % filename) log.end()
def handle(): """ Abort the active running sequence """ global _RUNNING_SEQ with Sessions.current() as session: if _RUNNING_SEQ: _RUNNING_SEQ.kill() _RUNNING_SEQ = None log.info('Sequence aborted by user') Sessions.addEvent('seq:err', 'Sequence aborted by user')
def wait(self): """ Starts all greenlets for concurrent processing. Joins over all greenlets that are not servers. """ for greenlet in (self.greenlets + self.servers): log.info("Starting {} greenlet...".format(greenlet)) greenlet.start() gevent.joinall(self.greenlets)
def set_notif_options(thrshld=None, freq=None): pathvars = {} if thrshld: pathvars['notifications.options.threshold'] = thrshld log.info('Changing notif threshold to {}.'.format(thrshld)) if freq: pathvars['notifications.options.frequency'] = freq log.info('Changing notif freq to {}.'.format(freq)) ait.config.addPathVariables(pathvars) ait.config.reload()
def __init__(self, inputs, outputs, zmq_args=None, **kwargs): super(AITGUIPlugin, self).__init__(inputs, outputs, zmq_args, **kwargs) try: HTMLRoot.User = kwargs['html']['directory'] log.info('[GUI Plugin Configuration] Static file directory is set to {}'.format(HTMLRoot.User)) except: log.warn('[GUI Plugin Configuration] Unable to locate static file directory in config.yaml. '\ 'The directory is set to {}'.format(HTMLRoot.User)) bottle.TEMPLATE_PATH.append(HTMLRoot.User) gevent.spawn(self.init)
def _get_stream_handlers(self, config, name): stream_handlers = [ ] if 'handlers' in config: if config['handlers'] is not None: for handler in config['handlers']: hndlr = self._create_handler(handler) stream_handlers.append(hndlr) log.info('Created handler {} for stream {}'.format(type(hndlr).__name__, name)) else: log.warn('No handlers specified for stream {}'.format(name)) return stream_handlers
def main(): log.begin() parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter ) # Add required command line arguments parser.add_argument( "filename", nargs="+", metavar="</path/to/seq>", help="encoded sequence file(s)" ) # Get command line arguments args = parser.parse_args() for fname in args.filename: filename = os.path.abspath(fname) if not os.path.isfile(filename): raise Exception("File not found: %s " % filename) extension = os.path.splitext(filename)[1] if extension.lower() != ".bin": log.warn("Filename '%s' does not have a '.bin' extension", filename) # Parse the filename for the applicable information parts = os.path.basename(filename).split("_") seqid = os.path.splitext(parts[-1])[0] try: int(seqid) except ValueError: raise Exception( 'Invalid filename "%s": . %s' % (os.path.basename(filename), __doc__) ) sequence = seq.createSeq(filename, id=seqid) if not sequence.validate(): for msg in sequence.messages: log.error(msg) else: txtpath = sequence.txtpath seqid = sequence.seqid version = sequence.version msg = "Writing %s (seqid=0x%04x, version=%u)." log.info(msg, txtpath, seqid, version) sequence.writeText() log.end()
def check_yaml_timestamps(yaml_file_name, cache_name): """ Checks YAML configuration file timestamp and any 'included' YAML configuration file's timestamp against the pickle cache file timestamp. The term 'dirty' means that a yaml config file has a more recent timestamp than the pickle cache file. If a pickle cache file is found to be 'dirty' (return true) the pickle cache file is not up-to-date, and a new pickle cache file must be generated. If the cache file in not 'dirty' (return false) the existing pickle binary will be loaded. param: yaml_file_name: str Name of the yaml configuration file to be tested param: cache_name: str Filename with path to the cached pickle file for this config file. return: boolean True: Indicates 'dirty' pickle cache: i.e. the file is not current, generate new binary False Load current cache file """ # If no pickle cache exists return True to make a new one. if not os.path.exists(cache_name): log.debug('No pickle cache exists, make a new one') return True # Has the yaml config file has been modified since the creation of the pickle cache if os.path.getmtime(yaml_file_name) > os.path.getmtime(cache_name): log.info( f'{yaml_file_name} modified - make a new binary pickle cache file.' ) return True # Get the directory of the yaml config file to be parsed dir_name = os.path.dirname(yaml_file_name) # Open the yaml config file to look for '!includes' to be tested on the next iteration with open(yaml_file_name, "r") as file: try: for line in file: if not line.strip().startswith("#") and "!include" in line: check = check_yaml_timestamps( os.path.join(dir_name, line.strip().split(" ")[2]), cache_name) if check: return True except RecursionError as e: print( f'ERROR: {e}: Infinite loop: check that yaml config files are not looping ' f'back and forth to one another thought the "!include" statements.' ) return False
def _load_leap_second_data(self): ls_file = ait.config.get( "leapseconds.filename", os.path.join(ait.config._directory, _DEFAULT_FILE_NAME), ) try: log.info("Attempting to load leapseconds.dat") with open(ls_file, "rb") as outfile: self._data = pickle.load(outfile) log.info("Loaded leapseconds config file successfully") except IOError: log.info("Unable to locate leapseconds config file") if not (self._data and self.is_valid()): try: self._update_leap_second_data() except ValueError: msg = ("Leapsecond data update failed. " "This may cause problems with some functionality") log.warn(msg) if self._data: log.warn("Continuing with out of date leap second data") else: raise ValueError("Could not load leap second data") else: t = self._data["valid"] log_t = t.strftime("%m/%d/%Y") log.info("Leapseconds data valid until %s", log_t)
def _send_email(message, recipients): """""" if type(recipients) != list: recipients = [recipients] if len(recipients) == 0 or any([i is None for i in recipients]): m = ("Email recipient list error. Unable to send email. " "Recipient list length: {} Recipients: {}").format( len(recipients), ", ".join(recipients)) log.error(m) return server = ait.config.get("notifications.smtp.server", None) port = ait.config.get("notifications.smtp.port", None) un = ait.config.get("notifications.smtp.username", None) pw = ait.config.get("notifications.smtp.password", None) # if server is None or port is None or un is None or pw is None: if server is None or port is None: log.error( "Email SMTP connection parameter error. Please check config.") return subject = ait.config.get("notifications.smtp.subject", "AIT Notification") # From address must have a valid @server, otherwise texts will not works fromaddr = ait.config.get("notifications.smtp.from", "ait-notify@%s" % server) msg = MIMEText(message) msg["Subject"] = subject msg["To"] = ", ".join(recipients) msg["From"] = fromaddr try: if un is None or pw is None: s = smtplib.SMTP() s.connect(server, port) s.sendmail(fromaddr, recipients, msg.as_string()) s.quit() else: s = smtplib.SMTP_SSL(server, port) s.login(un, pw) s.sendmail(fromaddr, recipients, msg.as_string()) s.quit() log.info("Email notification sent") except smtplib.SMTPException as e: log.error("Failed to send email notification.") log.error(e)
def load(self): if self.fswtabdict is None: if self.dirty: self.fswtabdict = FSWTabDict(self.filename) util.update_cache(self.filename, self.cachename, self.fswtabdict) log.info(f'Loaded new pickle file: {self.cachename}') else: with open(self.cachename, "rb") as stream: self.fswtabdict = pickle.load(stream) log.info( f'Current pickle file loaded: {self.cachename.split("/")[-1]}' ) return self.fswtabdict
def validate(validator, yml, schema): msgs = [] validator = validator(yml, schema) valid = validator.validate(messages=msgs) msg = "Validation: %s: yml=%s, schema=%s" if valid: log.info(msg % ('SUCCESS', yml, schema)) return 0 else: log.error(msg % ('FAILED', yml, schema)) for msg in msgs: log.error(msg) return 1
def _update_leap_second_data(self): """ Updates the systems leap second information Pulls the latest leap second information from https://www.ietf.org/timezones/data/leap-seconds.list and updates the leapsecond config file. Raises: ValueError: If the connection to IETF does not return 200 IOError: If the path to the leap seconds file is not valid """ log.info('Attempting to acquire latest leapsecond data') ls_file = ait.config.get( 'leapseconds.filename', os.path.join(ait.config._directory, _DEFAULT_FILE_NAME)) url = 'https://www.ietf.org/timezones/data/leap-seconds.list' r = requests.get(url) if r.status_code != 200: msg = 'Unable to locate latest timezone data. Connection to IETF failed' log.error(msg) raise ValueError(msg) text = r.text.split('\n') lines = [ l for l in text if l.startswith('#@') or not l.startswith('#') ] data = {'valid': None, 'leapseconds': []} data['valid'] = datetime.datetime(1900, 1, 1) + datetime.timedelta( seconds=int(lines[0].split('\t')[1])) leap = 1 for l in lines[1:-1]: t = datetime.datetime( 1900, 1, 1) + datetime.timedelta(seconds=int(l.split('\t')[0])) if t < GPS_Epoch: continue data['leapseconds'].append((t, leap)) leap += 1 self._data = data with open(ls_file, 'wb') as outfile: pickle.dump(data, outfile)
def _send_email(message, recipients): '''''' if type(recipients) != list: recipients = [recipients] if len(recipients) == 0 or any([i is None for i in recipients]): m = ( 'Email recipient list error. Unable to send email. ' 'Recipient list length: {} Recipients: {}' ).format(len(recipients), ', '.join(recipients)) log.error(m) return server = ait.config.get('notifications.smtp.server', None) port = ait.config.get('notifications.smtp.port', None) un = ait.config.get('notifications.smtp.username', None) pw = ait.config.get('notifications.smtp.password', None) # if server is None or port is None or un is None or pw is None: if server is None or port is None: log.error('Email SMTP connection parameter error. Please check config.') return subject = ait.config.get('notifications.smtp.subject', 'AIT Notification') # From address must have a valid @server, otherwise texts will not works fromaddr = ait.config.get('notifications.smtp.from', 'ait-notify@%s' % server) msg = MIMEText(message) msg['Subject'] = subject msg['To'] = ', '.join(recipients) msg['From'] = fromaddr try: if un is None or pw is None: s = smtplib.SMTP() s.connect(server, port) s.sendmail(fromaddr, recipients, msg.as_string()) s.quit() else: s = smtplib.SMTP_SSL(server, port) s.login(un, pw) s.sendmail(fromaddr, recipients, msg.as_string()) s.quit() log.info('Email notification sent') except smtplib.SMTPException as e: log.error('Failed to send email notification.') log.error(e)
def _run(self): self._setup_proxy() self._subscribe_all() log.info("Starting broker...") while True: gevent.sleep(0) socks = dict(self.poller.poll()) if socks.get(self.frontend) == zmq.POLLIN: message = self.frontend.recv_multipart() self.backend.send_multipart(message) if socks.get(self.backend) == zmq.POLLIN: message = self.backend.recv_multipart() self.frontend.send_multipart(message)
def __init__(self, inputs, outputs, **kwargs): super(TelemetryLimitMonitor, self).__init__(inputs, outputs, **kwargs) self.limit_dict = defaultdict(dict) for k, v in limits.getDefaultDict().items(): packet, field = k.split('.') self.limit_dict[packet][field] = v self.packet_dict = defaultdict(dict) for k, v in tlm.getDefaultDict().items(): self.packet_dict[v.uid] = v self.notif_thrshld = ait.config.get('notifications.options.threshold', 1) self.notif_freq = ait.config.get('notifications.options.frequency', float('inf')) self.limit_trip_repeats = {} log.info('Starting telemetry limit monitoring')
def main(): try: log.begin() parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) # Add optional command line arguments parser.add_argument("--port", default=3076, type=int) parser.add_argument("--host", default="127.0.0.1", type=str) parser.add_argument("--packetName", default=None) parser.add_argument("--packetFill", default=None) # Get command line arguments args = vars(parser.parse_args()) port = args["port"] host = args["host"] fill = args["packetFill"] name = args["packetName"] if name: defn = tlm.getDefaultDict()[name] else: defn = list(tlm.getDefaultDict().values())[0] sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) packet = defn.simulate(fill=fill) while True: sock.sendto(packet._data, (host, port)) log.info("Sent telemetry (%d bytes) to %s:%d" % (packet.nbytes, host, port)) time.sleep(1) except KeyboardInterrupt: log.info("Received Ctrl-C. Stopping telemetry stream.") except Exception as e: log.error("TLM send error: %s" % str(e)) log.end()
def __init__(self, inputs, outputs, zmq_args=None, **kwargs): """ Params: inputs: names of inbound streams plugin receives data from outputs: names of outbound streams plugin sends its data to zmq_args: dict containing the follow keys: zmq_context zmq_proxy_xsub_url zmq_proxy_xpub_url Defaults to empty dict. Default values assigned during instantiation of parent class. **kwargs: (optional) Dependent on requirements of child class. """ super(AITOpenMctPlugin, self).__init__(inputs, outputs, zmq_args, **kwargs) log.info('Running AIT OpenMCT Plugin') # Initialize state fields # Debug state fields self._debugEnabled = AITOpenMctPlugin.DEFAULT_DEBUG self._debugMimicRepeat = False # Port value for the server self._servicePort = AITOpenMctPlugin.DEFAULT_PORT # Setup server state self._app = bottle.Bottle() self._servers = [] # Queues for AIT events events self._tlmQueue = api.GeventDeque(maxlen=100) self._logQueue = api.GeventDeque(maxlen=100) # Load AIT tlm dict and create OpenMCT format of it self._aitTlmDict = tlm.getDefaultDict() self._mctTlmDict = self.format_tlmdict_for_openmct(self._aitTlmDict) # Create lookup from packet-uid to packet def self._uidToPktDefMap = self.create_uid_pkt_map(self._aitTlmDict) # Check for AIT config overrides self._checkConfig() gevent.spawn(self.init)
def __init__(self, inputs, outputs, datastore='ait.core.db.InfluxDBBackend', **kwargs): """ Attempts to connect to database backend. Plugin will not be created if connection fails. Creates base packet dictionary for decoding packets with packet UIDs as keys and packet definitions as values. Params: inputs: list of names of input streams to plugin outputs: list of names of plugin output streams datastore: path to database backend to use **kwargs: any args required for connecting to backend database Raises: ImportError: raised if provided database backend does not exist or cannot be imported Exception: raised if the backened database cannot be connected to for any reason """ super(DataArchive, self).__init__(inputs, outputs, **kwargs) self.datastore = datastore self.packet_dict = defaultdict(dict) for k, v in tlm.getDefaultDict().iteritems(): self.packet_dict[v.uid] = v try: mod, cls = self.datastore.rsplit('.', 1) self.dbconn = getattr(importlib.import_module(mod), cls)() self.dbconn.connect(**kwargs) log.info('Starting telemetry data archiving') except ImportError as e: log.error("Could not import specified datastore {}".format( self.datastore)) raise (e) except Exception as e: log.error( "Unable to connect to {} backend. Disabling data archive.". format(self.datastore)) raise (e)
def update_cache(yaml_file_name, cache_file_name, object_to_serialize): """ Caches the result of loader (yaml_file_name) to pickle binary (cache_file_name), if the yaml config file has been modified since the last pickle cache was created, i.e. (the binary pickle cache is declared to be 'dirty' in 'check_yaml_timestamps()'). param: yaml_file_name: str Name of the yaml configuration file to be serialized ('pickled') param: cache_file_name: str File name with path to the new serialized cached pickle file for this config file.: param: object_to_serialize: object Object to serialize ('pickle') e.g. instance of 'ait.core.cmd.CmdDict' """ msg = f'Saving updates from more recent {yaml_file_name} to {cache_file_name}.' log.info(msg) with open(cache_file_name, "wb") as output: pickle.dump(object_to_serialize, output, -1)
def load(self): """ Loads the Python object Loads the Python object, either via loader (filename) or the pickled cache file, whichever was modified most recently. """ if self._dict is None: if self.dirty: self._dict = self._loader(self.filename) update_cache(self.filename, self.cachename, self._dict) log.info(f'Loaded new pickle file: {self.cachename}') else: with open(self.cachename, "rb") as stream: self._dict = pickle.load(stream) log.info( f'Current pickle file loaded: {self.cachename.split("/")[-1]}' ) return self._dict
def _load_streams(self): """ Reads, parses and creates streams specified in config.yaml. """ common_err_msg = "No valid {} stream configurations found. " specific_err_msg = { "inbound": "No data will be received (or displayed).", "outbound": "No data will be published.", } err_msgs = {} for stream_type in ["inbound", "outbound"]: err_msgs[stream_type] = (common_err_msg.format(stream_type) + specific_err_msg[stream_type]) streams = ait.config.get("server.{}-streams".format(stream_type)) if streams is None: log.warn(err_msgs[stream_type]) else: for index, s in enumerate(streams): try: if stream_type == "inbound": strm = self._create_inbound_stream(s["stream"]) if type(strm) == PortInputStream: self.servers.append(strm) else: self.inbound_streams.append(strm) elif stream_type == "outbound": strm = self._create_outbound_stream(s["stream"]) self.outbound_streams.append(strm) log.info("Added {} stream {}".format( stream_type, strm)) except Exception: exc_type, value, tb = sys.exc_info() log.error("{} creating {} stream {}: {}".format( exc_type, stream_type, index, value)) if not self.inbound_streams and not self.servers: log.warn(err_msgs["inbound"]) if not self.outbound_streams: log.warn(err_msgs["outbound"])