def writeFile(self): if len(self.buffer) > 0: try: with open(self.outFile, TYPE) as fileHandler: for entry in self.buffer: print(entry, file=fileHandler) except: self.logger.error( "Cannot print JSON change log to file {0}.".format( self.outFile)) if not self.__stopEvent.isSet(): self.writer = Timer(WRITE_INTERVAL, partial(self.writeFile)) self.writer.start()
def __init__(self, onNewEvent, onDeletedEvent, onChangedEvent, logger, outputDirectory): onNewEvent.add(partial(self.onNewHandler)) onChangedEvent.add(partial(self.onChangedHandler)) onDeletedEvent.add(partial(self.onDeletedHandler)) self.buffer = list() self.outFile = os.path.join(outputDirectory, "jsonChangeLog.txt") self.logger = logger self.writer = Timer(WRITE_INTERVAL, partial(self.writeFile)) self.writer.start() self.__stopEvent = threading.Event()
def initScanningSchedule(self): """ This method initializes the scanning schedule. The scanning interval for each scanner will be loaded and a timer for each scanning-module will be launched. """ for collectorName in self.collectorModules.keys(): config = self.config["modules"][collectorName] if config.__class__ != configobj.Section: continue interval = int(config["interval"] if "interval" in config else -1) if interval == -1: self.logger.warning( "No intervall/ interval -1 defined for collector module {0}. This module will be started only once!" .format(collectorName)) self.queue.put((PRIORITY, interval, collectorName), True, TIMEOUT) self.timers[collectorName] = None else: self.timers[collectorName] = Timer(int(interval), self.executeScan, [collectorName]) for timer in self.timers.keys(): self.timers[timer].start() return True
def startTriggeredExporter(self, exporter, interval, configuration=None): graph = self.graph if configuration: graph = Graph.copy(configuration) self.triggeredExporters[exporter](self.dataPath, graph) self.exportTrigger[exporter] = Timer( interval, partial(self.startTriggeredExporter, exporter, interval))
def verify(self, collectorName, timeout): """ Verify this edge and start the timer. Keyword arguments: collectorName -- Name of the scanner that verifies this edge. timeout -- Timeout in seconds. After this timeout the scanner will be deleted from the list of verifying scanners. """ if (collectorName is not None) and (timeout is not None): if collectorName in self.__scanners: self.getScanners()[collectorName].cancel() self.getScanners()[collectorName] = Timer(timeout, self.removeVerification, [collectorName]) self.getScanners()[collectorName].start()
def verify(self, collectorName, timeout): """ Verify this node and start the timer. :param collectorName: Name of the scanner that verifies this node :type collectorName: str :param timeout: Timeout in seconds. After this timeout the scanner will be deleted from the list of verifying scanners :type timeout: int """ if (collectorName is not None) and (timeout is not None): if collectorName in list(self.getScanners().keys()): self.getScanners()[collectorName].cancel() self.getScanners()[collectorName] = Timer( timeout, self.removeVerificationTimeout, [collectorName]) self.getScanners()[collectorName].start()
def initExporters(self): # Continuous if "continuousExporters" in self.config.keys(): for exporter in (self.config["continuousExporters"] if isinstance( self.config["continuousExporters"], list) else [self.config["continuousExporters"]]): try: module = importlib.import_module( "insalata.export.continuous.{0}".format(exporter)) self.logger.debug(str(module)) self.continuousExporters[exporter] = getattr( module, "Exporter")(self.graph.getObjectNewEvent(), self.graph.getObjectDeletedEvent(), self.graph.getObjectChangedEvent(), self.logger, self.dataPath) self.logger.debug( "Added continuous exporter {0}.".format(exporter)) except ImportError: self.logger.error( "No exporter {0}.py in insalata.export.continuous!". format(exporter)) continue except AttributeError as e: self.logger.debug(str(e)) self.logger.error( "No class 'Exporter' in module {0}!".format(exporter)) continue # Triggered if "triggeredExporters" in self.config.keys(): for exporter in list(self.config["triggeredExporters"].keys()): try: module = importlib.import_module( "insalata.export.triggered.{0}".format(exporter)) self.triggeredExporters[exporter] = getattr( module, "export") if ("interval" in self.config["triggeredExporters"][exporter] and self.config["triggeredExporters"][exporter] != -1): interval = self.config["triggeredExporters"][exporter] if (interval < 1): self.exportTrigger[exporter] = Timer( interval, partial(self.startTriggeredExporter, exporter, interval)) self.logger.debug( "Started continuous exporter '{0}'.".format( exporter)) else: self.logger.error( "Invalid interval configured for exporter '{0}'!" .format(exporter)) else: self.logger.error( "No interval configured for triggered exporter '{0}'" .format(exporter)) except ImportError: self.logger.error( "No exporter {0}.py in insalata.export.continuous!". format(exporter)) continue except AttributeError: self.logger.error( "No 'export' method in module {0}!".format(exporter)) continue
def run(self): """ This method is executed when the thread of this handler is started. It triggers the collector modules in their specified time intervals. """ try: if self.__stopEvent.isSet(): self.logger.critical( "Environment is not set on running state!") return if self.initScanner() is None: self.logger.error("Error while initializing scanner.") return self.initExporters() self.logger.info("Environment running...") while not self.__stopEvent.isSet(): try: _, interval, name = self.queue.get( True, TIMEOUT) #Priority is only used by queue self.logger.debug( "Starting collector module {}.".format(name)) if "config" not in self.config["modules"][name]: self.logger.error( "No configuration given for collector {0}.".format( name)) else: configPath = self.config["modules"][name]["config"] configPath = configPath if os.path.isabs( configPath) else os.path.join( self.path, configPath) connectionInfo = ConfigObj(configPath) if connectionInfo == {}: self.logger.warning( "Connection information for module {0} empty.". format(name)) connectionInfo["name"] = name #interval -1 means "No restart" if interval != -1: self.timers[name] = Timer(int(interval), self.executeScan, [name]) worker = Worker( partial(self.collectorModules[name], self.graph, connectionInfo, self.logger), name, partial(self.finishedCallback, name, interval), self.logger) self.workers.append(worker) worker.start() except ConfigObjError: self.logger.error( "Can not parse connectionInfo for module {0}: Path: {1}." .format(name, configPath)) except queue.Empty: #Just do nothing. This is a normal case self.logger.debug("No job to handle.") except KeyError as e: self.logger.error( "Missing key '{0}' in configuration file for module {1}." .format(e.args[0], name)) except Exception as e: self.logger.debug("{0}: {1}".format( type(e), traceback.format_exc())) self.logger.error("Error while executing scan!") except Exception as e: self.logger.critical("Error in EnvironmentHandler: {}".format( str(e)))
class Exporter: def __init__(self, onNewEvent, onDeletedEvent, onChangedEvent, logger, outputDirectory): onNewEvent.add(partial(self.onNewHandler)) onChangedEvent.add(partial(self.onChangedHandler)) onDeletedEvent.add(partial(self.onDeletedHandler)) self.buffer = list() self.outFile = os.path.join(outputDirectory, "jsonChangeLog.txt") self.logger = logger self.writer = Timer(WRITE_INTERVAL, partial(self.writeFile)) self.writer.start() self.__stopEvent = threading.Event() def stop(self): self.writer.cancel() self.__stopEvent.set() def onNewHandler(self, sender, args): self.logger.debug("Received onNewEvent writing to file: {0}".format( self.outFile)) timestamp = datetime.datetime.now().strftime("%Y-%m-%d@%H:%M:%S") message = { "time": timestamp, "type": "new", "objectType": args["objectType"], "initialValues": args["values"] } self.buffer.append(json.dumps(message)) def onChangedHandler(self, sender, args): self.logger.debug( "Received onChangedEvent writing to file: {0}".format( self.outFile)) try: timestamp = datetime.datetime.now().strftime("%Y-%m-%d@%H:%M:%S") message = { "time": timestamp, "type": "change_" + args["type"], "objectType": args["objectType"], "object": args["object"], "value": args["value"] } if "member" in args: message["member"] = args["member"] self.buffer.append(json.dumps(message)) except KeyError as e: print(str(args)) def onDeletedHandler(self, sender, args): self.logger.debug( "Received onDeletedEvent writing to file: {0}".format( self.outFile)) timestamp = datetime.datetime.now().strftime("%Y-%m-%d@%H:%M:%S") message = { "time": timestamp, "type": "delete", "objectType": args["objectType"], "object": args["object"] } self.buffer.append(json.dumps(message)) def writeFile(self): if len(self.buffer) > 0: try: with open(self.outFile, TYPE) as fileHandler: for entry in self.buffer: print(entry, file=fileHandler) except: self.logger.error( "Cannot print JSON change log to file {0}.".format( self.outFile)) if not self.__stopEvent.isSet(): self.writer = Timer(WRITE_INTERVAL, partial(self.writeFile)) self.writer.start()