class ThreadedStatistics(Statistics, Threaded, threading.Thread): """ ThreadedStatistics plugins process client statistics in a separate thread. """ def __init__(self, core, datastore): Statistics.__init__(self, core, datastore) Threaded.__init__(self) threading.Thread.__init__(self) # Event from the core signaling an exit self.terminate = core.terminate self.work_queue = Queue(100000) self.pending_file = os.path.join(datastore, "etc", "%s.pending" % self.name) self.daemon = False def start_threads(self): self.start() def _save(self): """Save any pending data to a file.""" pending_data = [] try: while not self.work_queue.empty(): (metadata, data) = self.work_queue.get_nowait() pending_data.append( (metadata.hostname, lxml.etree.tostring( data, xml_declaration=False).decode("UTF-8"))) except Empty: pass try: savefile = open(self.pending_file, 'w') cPickle.dump(pending_data, savefile) savefile.close() self.logger.info("Saved pending %s data" % self.name) except (IOError, TypeError): err = sys.exc_info()[1] self.logger.warning("Failed to save pending data: %s" % err) def _load(self): """Load any pending data from a file.""" if not os.path.exists(self.pending_file): return True pending_data = [] try: savefile = open(self.pending_file, 'r') pending_data = cPickle.load(savefile) savefile.close() except (IOError, cPickle.UnpicklingError): err = sys.exc_info()[1] self.logger.warning("Failed to load pending data: %s" % err) return False for (pmetadata, pdata) in pending_data: # check that shutdown wasnt called early if self.terminate.isSet(): return False try: while True: try: metadata = self.core.build_metadata(pmetadata) break except MetadataRuntimeError: pass self.terminate.wait(5) if self.terminate.isSet(): return False self.work_queue.put_nowait( (metadata, lxml.etree.XML(pdata, parser=Bcfg2.Server.XMLParser))) except Full: self.logger.warning("Queue.Full: Failed to load queue data") break except lxml.etree.LxmlError: lxml_error = sys.exc_info()[1] self.logger.error("Unable to load saved interaction: %s" % lxml_error) except MetadataConsistencyError: self.logger.error("Unable to load metadata for save " "interaction: %s" % pmetadata) try: os.unlink(self.pending_file) except OSError: self.logger.error("Failed to unlink save file: %s" % self.pending_file) self.logger.info("Loaded pending %s data" % self.name) return True def run(self): if not self._load(): return while not self.terminate.isSet() and self.work_queue != None: try: (client, xdata) = self.work_queue.get(block=True, timeout=2) except Empty: continue except: err = sys.exc_info()[1] self.logger.error("ThreadedStatistics: %s" % err) continue self.handle_statistic(client, xdata) if self.work_queue != None and not self.work_queue.empty(): self._save() def process_statistics(self, metadata, data): try: self.work_queue.put_nowait((metadata, copy.copy(data))) except Full: self.logger.warning("%s: Queue is full. Dropping interactions." % self.name) def handle_statistic(self, metadata, data): """ Process the given XML statistics data for the specified client object. This differs from the :func:`Statistics.process_statistics` method only in that ThreadedStatistics first adds the data to a queue, and then processes them in a separate thread. :param metadata: The client metadata :type metadata: Bcfg2.Server.Plugins.Metadata.ClientMetadata :param data: The statistics data :type data: lxml.etree._Element :return: None """ raise NotImplementedError
class ThreadedStatistics(Statistics, Threaded, threading.Thread): """ ThreadedStatistics plugins process client statistics in a separate thread. """ def __init__(self, core, datastore): Statistics.__init__(self, core, datastore) Threaded.__init__(self) threading.Thread.__init__(self) # Event from the core signaling an exit self.terminate = core.terminate self.work_queue = Queue(100000) self.pending_file = os.path.join(datastore, "etc", "%s.pending" % self.name) self.daemon = False def start_threads(self): self.start() def _save(self): """Save any pending data to a file.""" pending_data = [] try: while not self.work_queue.empty(): (metadata, xdata) = self.work_queue.get_nowait() data = \ lxml.etree.tostring(xdata, xml_declaration=False).decode("UTF-8") pending_data.append((metadata.hostname, data)) except Empty: pass try: savefile = open(self.pending_file, 'w') cPickle.dump(pending_data, savefile) savefile.close() self.logger.info("Saved pending %s data" % self.name) except (IOError, TypeError): err = sys.exc_info()[1] self.logger.warning("Failed to save pending data: %s" % err) def _load(self): """Load any pending data from a file.""" if not os.path.exists(self.pending_file): return True pending_data = [] try: savefile = open(self.pending_file, 'r') pending_data = cPickle.load(savefile) savefile.close() except (IOError, cPickle.UnpicklingError): err = sys.exc_info()[1] self.logger.warning("Failed to load pending data: %s" % err) return False for (pmetadata, pdata) in pending_data: # check that shutdown wasnt called early if self.terminate.isSet(): return False try: while True: try: metadata = self.core.build_metadata(pmetadata) break except MetadataRuntimeError: pass self.terminate.wait(5) if self.terminate.isSet(): return False self.work_queue.put_nowait( (metadata, lxml.etree.XML(pdata, parser=Bcfg2.Server.XMLParser))) except Full: self.logger.warning("Queue.Full: Failed to load queue data") break except lxml.etree.LxmlError: lxml_error = sys.exc_info()[1] self.logger.error("Unable to load saved interaction: %s" % lxml_error) except MetadataConsistencyError: self.logger.error("Unable to load metadata for save " "interaction: %s" % pmetadata) try: os.unlink(self.pending_file) except OSError: self.logger.error("Failed to unlink save file: %s" % self.pending_file) self.logger.info("Loaded pending %s data" % self.name) return True def run(self): if not self._load(): return while not self.terminate.isSet() and self.work_queue is not None: try: (client, xdata) = self.work_queue.get(block=True, timeout=2) except Empty: continue except: err = sys.exc_info()[1] self.logger.error("ThreadedStatistics: %s" % err) continue self.handle_statistic(client, xdata) if self.work_queue is not None and not self.work_queue.empty(): self._save() def process_statistics(self, metadata, data): try: self.work_queue.put_nowait((metadata, copy.copy(data))) except Full: self.logger.warning("%s: Queue is full. Dropping interactions." % self.name) def handle_statistic(self, metadata, data): """ Process the given XML statistics data for the specified client object. This differs from the :func:`Statistics.process_statistics` method only in that ThreadedStatistics first adds the data to a queue, and then processes them in a separate thread. :param metadata: The client metadata :type metadata: Bcfg2.Server.Plugins.Metadata.ClientMetadata :param data: The statistics data :type data: lxml.etree._Element :return: None """ raise NotImplementedError
class DirectStore(TransportBase, threading.Thread): def __init__(self, setup): TransportBase.__init__(self, setup) threading.Thread.__init__(self) self.save_file = os.path.join(self.data, ".saved") self.storage = load_storage_from_config(setup) self.queue = Queue(100000) self.terminate = threading.Event() self.start() def shutdown(self): self.terminate.set() def store(self, hostname, metadata, stats): try: self.queue.put_nowait(dict( hostname=hostname, metadata=metadata, stats=stats)) except Full: self.logger.warning("Reporting: Queue is full, " "dropping statistics") def run(self): if not self._load(): return while not self.terminate.isSet() and self.queue is not None: try: interaction = self.queue.get(block=True, timeout=self.timeout) self.storage.import_interaction(interaction) except Empty: continue except: err = sys.exc_info()[1] self.logger.error("Reporting: Could not import interaction: %s" % err) continue if self.queue is not None and not self.queue.empty(): self._save() def fetch(self): """ no collector is necessary with this backend """ pass def start_monitor(self, collector): """ no collector is necessary with this backend """ pass def rpc(self, method, *args, **kwargs): try: return getattr(self.storage, method)(*args, **kwargs) except: # pylint: disable=W0702 msg = "Reporting: RPC method %s failed: %s" % (method, sys.exc_info()[1]) self.logger.error(msg) raise TransportError(msg) def _save(self): """ Save any saved data to a file """ saved_data = [] try: while not self.queue.empty(): saved_data.append(self.queue.get_nowait()) except Empty: pass try: savefile = open(self.save_file, 'w') cPickle.dump(saved_data, savefile) savefile.close() self.logger.info("Saved pending Reporting data") except (IOError, TypeError): err = sys.exc_info()[1] self.logger.warning("Failed to save pending data: %s" % err) def _load(self): """ Load any saved data from a file """ if not os.path.exists(self.save_file): return True saved_data = [] try: savefile = open(self.save_file, 'r') saved_data = cPickle.load(savefile) savefile.close() except (IOError, cPickle.UnpicklingError): err = sys.exc_info()[1] self.logger.warning("Failed to load saved data: %s" % err) return False for interaction in saved_data: # check that shutdown wasnt called early if self.terminate.isSet(): return False try: self.queue.put_nowait(interaction) except Full: self.logger.warning("Reporting: Queue is full, failed to " "load saved interaction data") break try: os.unlink(self.save_file) except OSError: self.logger.error("Reporting: Failed to unlink save file: %s" % self.save_file) self.logger.info("Reporting: Loaded saved interaction data") return True