def build(self): data.loadSettings() if data.credentials.isDefined(): self.connect() data.scanGrounds() modules.load(data.moduleNames) self._main = MainWindow() return self._main
def onModuleToggled(self, renderer, path): """ A module has been enabled/disabled """ row = self.list.getRow(path) name = row[ROW_MODINFO][modules.MODINFO_NAME] if row[ROW_ENABLED]: modules.unload(name) else: try: modules.load(name) except modules.LoadException, e: gui.errorMsgBox(self.window, _('Unable to load this module.'), str(e)) self.fillList()
def onModuleToggled(self, renderer, path): """ A module has been enabled/disabled """ row = self.list.getRow(path) name = row[ROW_MODINFO][modules.MODINFO_NAME] if row[ROW_ENABLED]: modules.unload(name) else: try: modules.load(name) except modules.LoadException as err: gui.errorMsgBox(self.window, _('Unable to load this module.'), str(err)) self.fillList()
def loadModule(self, modName): if self.mod is not None: raise Exception("No fair loading extra modules in one host.") self.mod = modules.load(modName, self) elems = self.mod.listElements() for name, el in elems.iteritems(): w = self.mod.getElement(name, create=True) d = dockarea.Dock(name=name, size=el.size()) if w is not None: d.addWidget(w) pos = el.pos() if pos is None: pos = () #print d, pos if isinstance(pos, basestring): pos = (pos,) self.dockArea.addDock(d, *pos) self.elements = elems self.setWindowTitle(modName) acq4.Manager.getManager().declareInterface(modName, 'analysisMod', self.mod) # ask module for prefered size self.resize(*self.mod.sizeHint())
def __init__(self, agentConfig, emitters, systemStats): self.emit_duration = None self.agentConfig = agentConfig # system stats is generated by config.get_system_stats self.agentConfig["system_stats"] = systemStats # agent config is used during checks, system_stats can be accessed through the config self.os = get_os() self.plugins = None self.emitters = emitters self.metadata_interval = int(agentConfig.get("metadata_interval", 10 * 60)) self.metadata_start = time.time() socket.setdefaulttimeout(15) self.run_count = 0 self.continue_running = True self.metadata_cache = None self.initialized_checks_d = [] self.init_failed_checks_d = [] # Unix System Checks self._unix_system_checks = { "disk": u.Disk(log), "io": u.IO(log), "load": u.Load(log), "memory": u.Memory(log), "processes": u.Processes(log), "cpu": u.Cpu(log), } # Win32 System `Checks self._win32_system_checks = { "disk": w32.Disk(log), "io": w32.IO(log), "proc": w32.Processes(log), "memory": w32.Memory(log), "network": w32.Network(log), "cpu": w32.Cpu(log), } # Old-style metric checks self._ganglia = Ganglia(log) self._dogstream = Dogstreams.init(log, self.agentConfig) self._ddforwarder = DdForwarder(log, self.agentConfig) # Agent Metrics self._agent_metrics = CollectorMetrics(log) self._metrics_checks = [] # Custom metric checks for module_spec in [s.strip() for s in self.agentConfig.get("custom_checks", "").split(",")]: if len(module_spec) == 0: continue try: self._metrics_checks.append(modules.load(module_spec, "Check")(log)) log.info("Registered custom check %s" % module_spec) log.warning( "Old format custom checks are deprecated. They should be moved to the checks.d interface as old custom checks will be removed in a next version" ) except Exception, e: log.exception("Unable to load custom check module %s" % module_spec)
def init(cls, logger, log_path, parser_spec=None, parser_args=None, config=None): class_based = False parse_func = None parse_args = tuple(parser_args or ()) if parser_spec: try: parse_func = modules.load(parser_spec) if isinstance(parse_func, type): logger.info('Instantiating class-based dogstream') parse_func = parse_func( user_args=parse_args or (), logger=logger, log_path=log_path, config=config, ) parse_args = () class_based = True else: logger.info('Instantiating function-based dogstream') except Exception: logger.exception(traceback.format_exc()) logger.error('Could not load Dogstream line parser "%s" PYTHONPATH=%s' % ( parser_spec, os.environ.get('PYTHONPATH', '')) ) logger.info("dogstream: parsing %s with %s (requested %s)" % (log_path, parse_func, parser_spec)) else: logger.info("dogstream: parsing %s with default parser" % log_path) return cls(logger, log_path, parse_func, parse_args, class_based=class_based)
def __init__(self, profile_path="call_response_index.json", decay_in=100, help_msg=""): """ Will load in call_response_index.json if it exists. Otherwise it initialises an empty call response index. The index file is only saved when an entry is added to it (see self.add_response). """ if os.path.exists(profile_path): self.index = json.load(open(profile_path, "r")) else: self.index = {} self.decay_in = decay_in self.help_msg = help_msg self.profile_path = profile_path self.modules = load_module.load() if len(self.modules) > 0: self.help_msg += ("\nCommands from modules:") for module in self.modules: try: self.help_msg += ("\n" + module.help_str) except: self.help_msg += ("\nModule " + module.__name__ + "has no help string!")
def __init__(self, agentConfig, emitters, systemStats): self.emit_duration = None self.agentConfig = agentConfig # system stats is generated by config.get_system_stats self.agentConfig['system_stats'] = systemStats # agent config is used during checks, system_stats can be accessed through the config self.os = get_os() self.plugins = None self.emitters = emitters self.metadata_interval = int(agentConfig.get('metadata_interval', 10 * 60)) self.metadata_start = time.time() socket.setdefaulttimeout(15) self.run_count = 0 self.continue_running = True self.metadata_cache = None self.checks_d = [] # Unix System Checks self._unix_system_checks = { 'disk': u.Disk(log), 'io': u.IO(log), 'load': u.Load(log), 'memory': u.Memory(log), 'processes': u.Processes(log), 'cpu': u.Cpu(log) } # Win32 System `Checks self._win32_system_checks = { 'disk': w32.Disk(log), 'io': w32.IO(log), 'proc': w32.Processes(log), 'memory': w32.Memory(log), 'network': w32.Network(log), 'cpu': w32.Cpu(log) } # Old-style metric checks self._ganglia = Ganglia(log) self._cassandra = Cassandra() self._dogstream = Dogstreams.init(log, self.agentConfig) self._ddforwarder = DdForwarder(log, self.agentConfig) # Agent Metrics self._agent_metrics = CollectorMetrics(log) # Metric Checks self._metrics_checks = [ Memcache(log), ] # Custom metric checks for module_spec in [s.strip() for s in self.agentConfig.get('custom_checks', '').split(',')]: if len(module_spec) == 0: continue try: self._metrics_checks.append(modules.load(module_spec, 'Check')(log)) log.info("Registered custom check %s" % module_spec) except Exception, e: log.exception('Unable to load custom check module %s' % module_spec)
def get_emitters(self): emitters = [http_emitter] custom = [s.strip() for s in self.config.get("custom_emitters", "").split(",")] for emitter_spec in custom: if not emitter_spec: continue emitters.append(modules.load(emitter_spec, "emitter")) return emitters
def _get_emitters(self, agentConfig): emitters = [http_emitter] for emitter_spec in [ s.strip() for s in agentConfig.get('custom_emitters', '').split(',') ]: if len(emitter_spec) == 0: continue emitters.append(modules.load(emitter_spec, 'emitter')) return emitters
def test_modname_load_specified(self): """When the specifier contains a module name, any provided default should be overridden""" self.assertEquals( modules.load( '{0}:specified_target'.format(TARGET_MODULE), 'default_target'), 'SPECIFIED' )
def test_modname_load_default(self): """When the specifier contains no module name, any provided default should be used""" self.assertEquals( modules.load( TARGET_MODULE, 'default_target'), 'DEFAULT' )
def __init__(self, agentConfig, emitters, systemStats): self.emit_duration = None self.agentConfig = agentConfig # system stats is generated by config.get_system_stats self.agentConfig['system_stats'] = systemStats # agent config is used during checks, system_stats can be accessed through the config self.os = get_os() self.plugins = None self.emitters = emitters self.metadata_interval = int(agentConfig.get('metadata_interval', 10 * 60)) self.metadata_start = time.time() socket.setdefaulttimeout(15) self.run_count = 0 self.continue_running = True self.metadata_cache = None self.initialized_checks_d = [] self.init_failed_checks_d = [] # Unix System Checks self._unix_system_checks = { 'disk': u.Disk(log), 'io': u.IO(log), 'load': u.Load(log), 'memory': u.Memory(log), 'processes': u.Processes(log), 'cpu': u.Cpu(log) } # Win32 System `Checks self._win32_system_checks = { 'disk': w32.Disk(log), 'io': w32.IO(log), 'proc': w32.Processes(log), 'memory': w32.Memory(log), 'network': w32.Network(log), 'cpu': w32.Cpu(log) } # Old-style metric checks self._ganglia = Ganglia(log) self._dogstream = Dogstreams.init(log, self.agentConfig) self._ddforwarder = DdForwarder(log, self.agentConfig) # Agent Metrics self._agent_metrics = CollectorMetrics(log) self._metrics_checks = [] # Custom metric checks for module_spec in [s.strip() for s in self.agentConfig.get('custom_checks', '').split(',')]: if len(module_spec) == 0: continue try: self._metrics_checks.append(modules.load(module_spec, 'Check')(log)) log.info("Registered custom check %s" % module_spec) log.warning("Old format custom checks are deprecated. They should be moved to the checks.d interface as old custom checks will be removed in a next version") except Exception, e: log.exception('Unable to load custom check module %s' % module_spec)
def test_modname_load_specified(self): """When the specifier contains a module name, any provided default should be overridden""" self.assertEquals( modules.load( 'tests.target_module:specified_target', 'default_target'), 'SPECIFIED' )
def test_modname_load_default(self): """When the specifier contains no module name, any provided default should be used""" self.assertEquals( modules.load( 'tests.target_module', 'default_target'), 'DEFAULT' )
def get_emitters(self): emitters = [http_emitter] custom = [s.strip() for s in self.config.get('custom_emitters', '').split(',')] for emitter_spec in custom: if not emitter_spec: continue emitters.append(modules.load(emitter_spec, 'emitter')) return emitters
def __init__(self, agentConfig, emitters): self.agentConfig = agentConfig self.plugins = None self.emitters = emitters self.os = None self.checksLogger = logging.getLogger("checks") socket.setdefaulttimeout(15) self._apache = Apache(self.checksLogger) self._nginx = Nginx(self.checksLogger) self._disk = Disk(self.checksLogger) self._io = IO() self._load = Load(self.checksLogger) self._memory = Memory(self.checksLogger) self._network = Network(self.checksLogger) self._processes = Processes() self._cpu = Cpu() self._couchdb = CouchDb(self.checksLogger) self._mongodb = MongoDb(self.checksLogger) self._mysql = MySql(self.checksLogger) self._pgsql = PostgreSql(self.checksLogger) self._rabbitmq = RabbitMq() self._ganglia = Ganglia(self.checksLogger) self._cassandra = Cassandra() self._redis = Redis(self.checksLogger) self._jvm = Jvm(self.checksLogger) self._tomcat = Tomcat(self.checksLogger) self._activemq = ActiveMQ(self.checksLogger) self._solr = Solr(self.checksLogger) self._memcache = Memcache(self.checksLogger) self._dogstream = Dogstreams.init(self.checksLogger, self.agentConfig) self._ddforwarder = DdForwarder(self.checksLogger, self.agentConfig) # All new checks should be metrics checks: self._metrics_checks = [ Cacti(self.checksLogger), Redis(self.checksLogger), Varnish(self.checksLogger), ElasticSearch(self.checksLogger), ] for module_spec in [s.strip() for s in self.agentConfig.get("custom_checks", "").split(",")]: if len(module_spec) == 0: continue try: self._metrics_checks.append(modules.load(module_spec, "Check")(self.checksLogger)) self.checksLogger.info("Registered custom check %s" % module_spec) except Exception, e: self.checksLogger.exception("Unable to load custom check module %s" % module_spec)
def __init__ (self, name="default"): asyncore.dispatcher.__init__(self) self.create_socket(socket.AF_INET, socket.SOCK_STREAM) self.name = name conf = config.link[name] self.connectedto = conf['remote'] self.connect (self.connectedto) self.sid = conf['sid'] self.conf = conf self.sendq = '' self.recvq = '' self.protocol = modules.load(conf['protocol'], self)
def __init__(self, name="default"): asyncore.dispatcher.__init__(self) self.create_socket(socket.AF_INET, socket.SOCK_STREAM) self.name = name conf = config.link[name] self.connectedto = conf['remote'] self.connect(self.connectedto) self.sid = conf['sid'] self.conf = conf self.sendq = '' self.recvq = '' self.protocol = modules.load(conf['protocol'], self)
def run(self, agentConfig=None, run_forever=True): """Main loop of the collector""" agentLogger = logging.getLogger('agent') systemStats = get_system_stats() if agentConfig is None: agentConfig = get_config() # Load the checks.d checks checksd = load_check_directory(agentConfig) # Try to fetch instance Id from EC2 if not hostname has been set # in the config file. # DEPRECATED if agentConfig.get('hostname') is None and agentConfig.get( 'use_ec2_instance_id'): instanceId = EC2.get_instance_id() if instanceId is not None: agentLogger.info("Running on EC2, instanceId: %s" % instanceId) agentConfig['hostname'] = instanceId else: agentLogger.info( 'Not running on EC2, using hostname to identify this server' ) emitters = [http_emitter] for emitter_spec in [ s.strip() for s in agentConfig.get('custom_emitters', '').split(',') ]: if len(emitter_spec) == 0: continue emitters.append(modules.load(emitter_spec, 'emitter')) check_freq = int(agentConfig['check_freq']) # Checks instance collector = Collector(agentConfig, emitters, systemStats) # Watchdog watchdog = None if agentConfig.get("watchdog", True): watchdog = Watchdog(check_freq * WATCHDOG_MULTIPLIER) watchdog.reset() # Main loop while run_forever: collector.run(checksd=checksd) if watchdog is not None: watchdog.reset() time.sleep(check_freq)
def __init__(self, config): self.agentConfig = config self.emitterThreads = [] for emitter_spec in [s.strip() for s in self.agentConfig.get("custom_emitters", "").split(",")]: if len(emitter_spec) == 0: continue logging.info("Setting up custom emitter %r", emitter_spec) try: thread = EmitterThread( name=emitter_spec, emitter=modules.load(emitter_spec, "emitter"), logger=logging, config=config ) thread.start() self.emitterThreads.append(thread) except Exception, e: logging.error("Unable to start thread for emitter: %r", emitter_spec, exc_info=True)
def run(self, agentConfig=None, run_forever=True): """Main loop of the collector""" agentLogger = logging.getLogger('agent') systemStats = get_system_stats() agentLogger.debug('System Properties: ' + str(systemStats)) if agentConfig is None: agentConfig = get_config() # Load the checks.d checks checksd = load_check_directory(agentConfig) # Try to fetch instance Id from EC2 if not hostname has been set # in the config file. # DEPRECATED if agentConfig.get('hostname') is None and agentConfig.get('use_ec2_instance_id'): instanceId = EC2.get_instance_id() if instanceId is not None: agentLogger.info("Running on EC2, instanceId: %s" % instanceId) agentConfig['hostname'] = instanceId else: agentLogger.info('Not running on EC2, using hostname to identify this server') emitters = [http_emitter] for emitter_spec in [s.strip() for s in agentConfig.get('custom_emitters', '').split(',')]: if len(emitter_spec) == 0: continue emitters.append(modules.load(emitter_spec, 'emitter')) check_freq = int(agentConfig['check_freq']) # Checks instance c = checks(agentConfig, emitters) # Watchdog watchdog = None if agentConfig.get("watchdog", True): watchdog = Watchdog(check_freq * WATCHDOG_MULTIPLIER) watchdog.reset() # Run checks once, to get once-in-a-run data c.doChecks(True, systemStats, checksd) # Main loop while run_forever: if watchdog is not None: watchdog.reset() time.sleep(check_freq) c.doChecks(checksd=checksd)
def __init__(self, config): self.agentConfig = config self.emitterThreads = [] for emitter_spec in [s.strip() for s in self.agentConfig.get('custom_emitters', '').split(',')]: if len(emitter_spec) == 0: continue logging.info('Setting up custom emitter %r', emitter_spec) try: thread = EmitterThread( name=emitter_spec, emitter=modules.load(emitter_spec, 'emitter'), logger=logging, config=config, ) thread.start() self.emitterThreads.append(thread) except Exception, e: logging.error('Unable to start thread for emitter: %r', emitter_spec, exc_info=True)
def __init__(self, config): self.agentConfig = config self.emitterThreads = [] for emitter_spec in [s.strip() for s in self.agentConfig.get('custom_emitters', '').split(',')]: if len(emitter_spec) == 0: continue logging.info('Setting up custom emitter %r', emitter_spec) try: thread = EmitterThread( name=emitter_spec, emitter=modules.load(emitter_spec, 'emitter'), logger=logging, config=config, ) thread.start() self.emitterThreads.append(thread) except Exception: logging.error('Unable to start thread for emitter: %r', emitter_spec, exc_info=True) logging.info('Done with custom emitters')
def loadModule(self, modName): if self.mod is not None: raise Exception("No fair loading extra modules in one host.") self.mod = modules.load(modName, self) elems = self.mod.listElements() for name, el in elems.iteritems(): w = self.mod.getElement(name, create=True) d = dockarea.Dock(name=name, size=el.size()) if w is not None: d.addWidget(w) pos = el.pos() if pos is None: pos = () #print d, pos if isinstance(pos, basestring): pos = (pos,) self.dockArea.addDock(d, *pos) self.elements = elems self.setWindowTitle(modName) acq4.Manager.getManager().declareInterface(modName, 'analysisMod', self.mod)
return word return None def generate_desc(model, tokenizer, photo, max_length): in_text = 'startseq' for i in range(max_length): sequence = tokenizer.texts_to_sequences([in_text])[0] sequence = m.pad_sequences([sequence], maxlen=max_length) ypred = model.predict([photo, sequence], verbose=0) ypred = m.argmax(ypred) word = word2id(ypred, tokenizer) if word is None: break in_text += ' ' + word if word == 'endseq': break return in_text tokenizer = m.load(open('tokenizer.pkl', 'rb')) max_length = 34 model = m.load_model('model_9.h5') pic = 'test14.jpg' photo = extract_features(pic) description = generate_desc(model, tokenizer, photo, max_length) im = m.array(m.Image.open(pic)) m.plt.imshow(im) print(description) del generate_desc, extract_features
def test_cached_module(self): """Modules already in the cache should be reused""" self.assertTrue(modules.load('%s:has_been_mutated' % __name__))
def __init__(self, agentConfig, emitters, systemStats, hostname): self.emit_duration = None self.agentConfig = agentConfig self.hostname = hostname # system stats is generated by config.get_system_stats self.agentConfig['system_stats'] = systemStats # agent config is used during checks, system_stats can be accessed through the config self.os = get_os() self.plugins = None self.emitters = emitters self.check_timings = agentConfig.get('check_timings') self.push_times = { 'host_metadata': { 'start': time.time(), 'interval': int(agentConfig.get('metadata_interval', 4 * 60 * 60)) }, 'external_host_tags': { 'start': time.time() - 3 * 60, # Wait for the checks to init 'interval': int(agentConfig.get('external_host_tags', 5 * 60)) }, 'agent_checks': { 'start': time.time(), 'interval': int(agentConfig.get('agent_checks_interval', 10 * 60)) }, 'processes': { 'start': time.time(), 'interval': int(agentConfig.get('processes_interval', 60)) } } socket.setdefaulttimeout(15) self.run_count = 0 self.continue_running = True self.hostname_metadata_cache = None self.initialized_checks_d = [] self.init_failed_checks_d = {} # Unix System Checks self._unix_system_checks = { 'io': u.IO(log), 'load': u.Load(log), 'memory': u.Memory(log), 'processes': u.Processes(log), 'cpu': u.Cpu(log), 'system': u.System(log) } # Win32 System `Checks self._win32_system_checks = { 'io': w32.IO(log), 'proc': w32.Processes(log), 'memory': w32.Memory(log), 'network': w32.Network(log), 'cpu': w32.Cpu(log), 'system': w32.System(log) } # Old-style metric checks self._ganglia = Ganglia(log) self._dogstream = Dogstreams.init(log, self.agentConfig) self._ddforwarder = DdForwarder(log, self.agentConfig) # Agent performance metrics check self._agent_metrics = None self._metrics_checks = [] # Custom metric checks for module_spec in [s.strip() for s in self.agentConfig.get('custom_checks', '').split(',')]: if len(module_spec) == 0: continue try: self._metrics_checks.append(modules.load(module_spec, 'Check')(log)) log.info("Registered custom check %s" % module_spec) log.warning("Old format custom checks are deprecated. They should be moved to the checks.d interface as old custom checks will be removed in a next version") except Exception: log.exception('Unable to load custom check module %s' % module_spec)
return parser.parse_args() if __name__ == '__main__': args = get_args() if args.verbose > 0: logger.set_level(logger.DEBUG) mproc.current_process().name = "mqttany" killer = GracefulKiller() poison_pill = False log.info("MQTTany {version} starting".format(version=__version__)) try: if not modules.load(args.config_file): exit(1) while not killer.kill_now: try: # to get an item from the queue message = queue.get_nowait() except QueueEmptyError: time.sleep(0.1) # 100ms else: poison_pill = True log.debug("Received poison pill") except: modules.unload() logger.uninit() raise
async def on_ready(): modules.load(client) log.info( f"[{str(datetime.now())}] Logged in as {client.user.name}#{client.user.discriminator} ({client.user.id})" )
def test_pathname_load_finds_package(self): """"Loading modules by absolute path should correctly set the name of the loaded module to include any package containing it.""" m = modules.load(os.path.join(os.getcwd(), TARGET_MODULE.replace('.', '/'))) self.assertEquals(m.__name__, TARGET_MODULE)
def load_photos(file,dataset): all_features=m.load(open(file, 'rb')) f={k: all_features[k] for k in dataset} return f
#!/usr/bin/env python # -*- coding: utf-8 -*- import sys from os.path import abspath, dirname sys.path.append('%s/..' % abspath(dirname(__file__))) import modules ################################################################################ ## ## Datastorage creator. ## ################################################################################ for i in modules.get_list(): modules.load(i) from dcu import db db.create_all()
def test_pathname_load_finds_package(self): """"Loading modules by absolute path should correctly set the name of the loaded module to include any package containing it.""" m = modules.load(os.path.join(os.getcwd(), TARGET_MODULE.replace(".", "/"))) self.assertEquals(m.__name__, TARGET_MODULE)
def __init__(self, agentConfig, emitters, systemStats, hostname): self.ip = get_ip(agentConfig) self.emit_duration = None self.agentConfig = agentConfig self.hostname = hostname self.agentConfig['system_stats'] = systemStats self.os = get_os() self.plugins = None self.emitters = emitters self.check_timings = agentConfig.get('check_timings') self.push_times = { 'host_metadata': { 'start': time.time(), 'interval': int(agentConfig.get('metadata_interval', 4 * 60 * 60)) }, 'external_host_tags': { 'start': time.time() - 3 * 60, 'interval': int(agentConfig.get('external_host_tags', 5 * 60)) }, 'agent_checks': { 'start': time.time(), 'interval': int(agentConfig.get('agent_checks_interval', 10 * 60)) }, } socket.setdefaulttimeout(15) self.run_count = 0 self.continue_running = True self.hostname_metadata_cache = None self.initialized_checks_d = [] self.init_failed_checks_d = {} self._unix_system_checks = { 'io': u.IO(log), 'load': u.Load(log), 'memory': u.Memory(log), 'processes': u.Processes(log), 'cpu': u.Cpu(log), 'system': u.System(log) } self._win32_system_checks = { 'io': w32.IO(log), 'proc': w32.Processes(log), 'memory': w32.Memory(log), 'network': w32.Network(log), 'cpu': w32.Cpu(log), 'system': w32.System(log) } self._ganglia = Ganglia(log) self._monitorstream = monitorstreams.init(log, self.agentConfig) self._ddforwarder = DdForwarder(log, self.agentConfig) self._agent_metrics = None self._metrics_checks = [] for module_spec in [s.strip() for s in self.agentConfig.get('custom_checks', '').split(',')]: if len(module_spec) == 0: continue try: self._metrics_checks.append(modules.load(module_spec, 'Check')(log)) log.info("Registered custom check %s" % module_spec) log.warning( "Old format custom checks are deprecated. They should be moved to the checks.d interface as old custom checks will be removed in a next version") except Exception: log.exception('Unable to load custom check module %s' % module_spec)
def menu(): load() main().menu()
def __init__(self): super().__init__() self.buildUI() modules.load()
#!/usr/bin/env python # -*- coding: utf-8 -*- from os.path import abspath, dirname import sys ################################################################################ ## ## Simple module tester ## ################################################################################ if __name__ == '__main__': sys.path.append('%s/..' % abspath(dirname(__file__))) from modules import get_list, load print '=> DCU-F Module tester <=' for module in get_list(): print '==> %s: Test started...' % module load(module).test.start()
def test_pathname_load_finds_package(self): """"Loading modules by absolute path should correctly set the name of the loaded module to include any package containing it.""" m = modules.load(os.getcwd() + '/tests/target_module.py') self.assertEquals(m.__name__, 'tests.target_module')
def test_cache_population(self): """Python module cache should be populated""" self.assertTrue(TARGET_MODULE not in sys.modules) modules.load(TARGET_MODULE) self.assertTrue(TARGET_MODULE in sys.modules)
def test_modname_load_default(self): """When the specifier contains no module name, any provided default should be used""" self.assertEquals(modules.load(TARGET_MODULE, "default_target"), "DEFAULT")
def __init__(self, agentConfig, emitters, systemStats): self.agentConfig = agentConfig # system stats is generated by config.get_system_stats self.agentConfig['system_stats'] = systemStats # agent config is used during checks, system_stats can be accessed through the config self.os = getOS() self.plugins = None self.emitters = emitters self.metadata_interval = int( agentConfig.get('metadata_interval', 10 * 60)) self.metadata_start = time.time() socket.setdefaulttimeout(15) self.run_count = 0 self.continue_running = True # Unix System Checks self._unix_system_checks = { 'disk': u.Disk(checks_logger), 'io': u.IO(), 'load': u.Load(checks_logger), 'memory': u.Memory(checks_logger), 'network': u.Network(checks_logger), 'processes': u.Processes(), 'cpu': u.Cpu(checks_logger) } # Win32 System `Checks self._win32_system_checks = { 'disk': w32.Disk(checks_logger), 'io': w32.IO(checks_logger), 'proc': w32.Processes(checks_logger), 'memory': w32.Memory(checks_logger), 'network': w32.Network(checks_logger), 'cpu': w32.Cpu(checks_logger) } # Old-style metric checks self._couchdb = CouchDb(checks_logger) self._mongodb = MongoDb(checks_logger) self._mysql = MySql(checks_logger) self._rabbitmq = RabbitMq() self._ganglia = Ganglia(checks_logger) self._cassandra = Cassandra() self._dogstream = Dogstreams.init(checks_logger, self.agentConfig) self._ddforwarder = DdForwarder(checks_logger, self.agentConfig) self._ec2 = EC2(checks_logger) # Metric Checks self._metrics_checks = [ ElasticSearch(checks_logger), Jvm(checks_logger), Tomcat(checks_logger), ActiveMQ(checks_logger), Solr(checks_logger), WMICheck(checks_logger), Memcache(checks_logger), ] # Custom metric checks for module_spec in [ s.strip() for s in self.agentConfig.get('custom_checks', '').split(',') ]: if len(module_spec) == 0: continue try: self._metrics_checks.append( modules.load(module_spec, 'Check')(checks_logger)) logger.info("Registered custom check %s" % module_spec) except Exception, e: logger.exception('Unable to load custom check module %s' % module_spec)
def __init__(self, agentConfig, emitters, systemStats, hostname): self.emit_duration = None self.agentConfig = agentConfig self.hostname = hostname # system stats is generated by config.get_system_stats self.agentConfig['system_stats'] = systemStats # agent config is used during checks, system_stats can be accessed through the config self.os = get_os() self.plugins = None self.emitters = emitters self.check_timings = agentConfig.get('check_timings') self.push_times = { 'host_metadata': { 'start': time.time(), 'interval': int(agentConfig.get('metadata_interval', 4 * 60 * 60)) }, 'external_host_tags': { 'start': time.time() - 3 * 60, # Wait for the checks to init 'interval': int(agentConfig.get('external_host_tags', 5 * 60)) }, 'agent_checks': { 'start': time.time(), 'interval': int(agentConfig.get('agent_checks_interval', 10 * 60)) }, 'processes': { 'start': time.time(), 'interval': int(agentConfig.get('processes_interval', 60)) } } socket.setdefaulttimeout(15) self.run_count = 0 self.continue_running = True self.hostname_metadata_cache = None self.initialized_checks_d = [] self.init_failed_checks_d = {} if Platform.is_linux() and psutil is not None: procfs_path = agentConfig.get('procfs_path', '/proc').rstrip('/') psutil.PROCFS_PATH = procfs_path # Unix System Checks self._unix_system_checks = { 'io': u.IO(log), 'load': u.Load(log), 'memory': u.Memory(log), 'processes': u.Processes(log), 'cpu': u.Cpu(log), 'system': u.System(log) } # Win32 System `Checks self._win32_system_checks = { 'io': w32.IO(log), 'proc': w32.Processes(log), 'memory': w32.Memory(log), 'network': w32.Network(log), 'cpu': w32.Cpu(log), 'system': w32.System(log) } # Old-style metric checks self._ganglia = Ganglia(log) if self.agentConfig.get('ganglia_host', '') != '' else None self._dogstream = None if self.agentConfig.get('dogstreams') is None else Dogstreams.init(log, self.agentConfig) # Agent performance metrics check self._agent_metrics = None self._metrics_checks = [] # Custom metric checks for module_spec in [s.strip() for s in self.agentConfig.get('custom_checks', '').split(',')]: if len(module_spec) == 0: continue try: self._metrics_checks.append(modules.load(module_spec, 'Check')(log)) log.info("Registered custom check %s" % module_spec) log.warning("Old format custom checks are deprecated. They should be moved to the checks.d interface as old custom checks will be removed in a next version") except Exception: log.exception('Unable to load custom check module %s' % module_spec)
def dcu_handler(): ''' Main DCU handler. ''' if 'uid' in request.values: uid = request.values['uid'] c = Client.query.filter_by(uid=uid).first() if c is None: return make_response('UID not registered', 403) if 'logout' in request.values: db.session.delete(c) db.session.commit() return make_response(dumps({'logout':True}), 200) if 'recv' in request.values: receiver = request.values['recv'] r = Client.query.filter_by(name=receiver).first() if r is None: return make_response('Receiver not registered', 403) if not r.name in loads(c.access): return make_response('Access denied to \'%s\'' % r.name, 403) module = r.module client = r else: module = c.module client = c if module in modules.get_list(): proxy = modules.load(module).proxy return proxy.run(client) else: return make_response('Unknown module \'%s\'' % c.module, 400) else: try: name = request.values['name'] module = request.values['module'] if len(name) < 4 or len(name) > 15: return make_response('Name error, must be from 4 to 15', 400) if module in modules.get_list(): c = Client(name, module) db.session.add(c) db.session.commit() return make_response(dumps({'uid':c.uid}), 200) else: return make_response('Unknown module \'%s\'' % module, 400) except KeyError: return make_response('Not sended name or module', 400) except OperationalError as e: if DEBUG: return make_response('Database error: %s' % e, 400) else: return make_response('Database error', 400) except: return make_response('Client create unknown exception', 500)
async def on_message(message): try: if ( message.author.id == client.user.id ) or message.author.bot: # ignore message if from self or another bot return if message.channel.id not in config.log_channel_ignore: # don't log messages from channels in the blacklist log.debug( f"[{str(datetime.now())}] [{message.guild.name}] [#{message.channel.name}] {message.author.name}#{message.author.discriminator}: {message.content}" ) if not any( [message.content.startswith(prefix) for prefix in config.prefixes]): return # ignore message if doesn't start with prefix parts = message.content.split(" ", 2) command = message.content.replace(parts[0] + " ", "") try: # we'll see if the user provided a second command, if not print generic help if parts[1] == "help": client.command_count += 1 await handleHelp(command=command, client=client, message=message) return except IndexError: client.command_count += 1 await handleHelp(command="", client=client, message=message) return if command == "reload": client.command_count += 1 importlib.reload(modules) modules.load(client) await message.add_reaction("☑") return try: client.command_count += 1 await modules.module[parts[1]].handle(command=command, client=client, message=message) except KeyError: await message.channel.send( f"Sorry, that command wasn't found. (command: {parts[1]})") except Exception as e: if isinstance(e, UnicodeEncodeError): pass if isinstance(e, SystemExit): raise if isinstance(e, discord.errors.HTTPException): await message.channel.send( f"{message.author.mention} Sorry, but it looks like there was an error sending the response back to Discord. You probably issued a command that caused me to exceed my 2000 character message limit." ) return stackdump = ''.join(traceback.format_exc()) embed = discord.Embed( title="Internal error", description= f"Looks like there was an error, sorry. Please ping or message {config.owner} with this stackdump:\n```{stackdump}```", colour=0xf00000) embed = embed.set_footer(text="Error occurred at " + str(datetime.now())) await message.channel.send(embed=embed) log.exception( f"[{str(datetime.now())}] Error processing message: {message.content}" ) pass
def __init__(self, agentConfig, emitters, systemStats): self.emit_duration = None self.agentConfig = agentConfig # system stats is generated by config.get_system_stats self.agentConfig["system_stats"] = systemStats # agent config is used during checks, system_stats can be accessed through the config self.os = get_os() self.plugins = None self.emitters = emitters self.metadata_interval = int(agentConfig.get("metadata_interval", 10 * 60)) self.metadata_start = time.time() socket.setdefaulttimeout(15) self.run_count = 0 self.continue_running = True self.metadata_cache = None self.checks_d = [] # Unix System Checks self._unix_system_checks = { "disk": u.Disk(log), "io": u.IO(log), "load": u.Load(log), "memory": u.Memory(log), "network": u.Network(log), "processes": u.Processes(log), "cpu": u.Cpu(log), } # Win32 System `Checks self._win32_system_checks = { "disk": w32.Disk(log), "io": w32.IO(log), "proc": w32.Processes(log), "memory": w32.Memory(log), "network": w32.Network(log), "cpu": w32.Cpu(log), } # Old-style metric checks self._mongodb = MongoDb(log) self._mysql = MySql(log) self._rabbitmq = RabbitMq() self._ganglia = Ganglia(log) self._cassandra = Cassandra() self._dogstream = Dogstreams.init(log, self.agentConfig) self._ddforwarder = DdForwarder(log, self.agentConfig) self._ec2 = EC2(log) # Agent Metrics self._agent_metrics = CollectorMetrics(log) # Metric Checks self._metrics_checks = [Memcache(log)] # Custom metric checks for module_spec in [s.strip() for s in self.agentConfig.get("custom_checks", "").split(",")]: if len(module_spec) == 0: continue try: self._metrics_checks.append(modules.load(module_spec, "Check")(log)) log.info("Registered custom check %s" % module_spec) except Exception, e: log.exception("Unable to load custom check module %s" % module_spec)
def reload(bot, server, target, source, message, parsed, private): module = parsed.group(1) modules.load(module, reload=True) bot.privmsg(target, _('Module {mod} reloaded.', mod=module))
def __init__(self, agentConfig, emitters, systemStats, hostname): self.emit_duration = None self.agentConfig = agentConfig self.hostname = hostname # system stats is generated by config.get_system_stats self.agentConfig["system_stats"] = systemStats # agent config is used during checks, system_stats can be accessed through the config self.os = get_os() self.plugins = None self.emitters = emitters self.check_timings = agentConfig.get("check_timings") self.push_times = { "host_metadata": {"start": time.time(), "interval": int(agentConfig.get("metadata_interval", 4 * 60 * 60))}, "external_host_tags": { "start": time.time() - 3 * 60, # Wait for the checks to init "interval": int(agentConfig.get("external_host_tags", 5 * 60)), }, "agent_checks": {"start": time.time(), "interval": int(agentConfig.get("agent_checks_interval", 10 * 60))}, "processes": {"start": time.time(), "interval": int(agentConfig.get("processes_interval", 60))}, } socket.setdefaulttimeout(15) self.run_count = 0 self.continue_running = True self.hostname_metadata_cache = None self.initialized_checks_d = [] self.init_failed_checks_d = {} # Unix System Checks self._unix_system_checks = { "io": u.IO(log), "load": u.Load(log), "memory": u.Memory(log), "processes": u.Processes(log), "cpu": u.Cpu(log), "system": u.System(log), } # Win32 System `Checks self._win32_system_checks = { "io": w32.IO(log), "proc": w32.Processes(log), "memory": w32.Memory(log), "network": w32.Network(log), "cpu": w32.Cpu(log), "system": w32.System(log), } # Old-style metric checks self._ganglia = Ganglia(log) if self.agentConfig.get("ganglia_host", "") != "" else None self._dogstream = None if self.agentConfig.get("dogstreams") is None else Dogstreams.init(log, self.agentConfig) # Agent performance metrics check self._agent_metrics = None self._metrics_checks = [] # Custom metric checks for module_spec in [s.strip() for s in self.agentConfig.get("custom_checks", "").split(",")]: if len(module_spec) == 0: continue try: self._metrics_checks.append(modules.load(module_spec, "Check")(log)) log.info("Registered custom check %s" % module_spec) log.warning( "Old format custom checks are deprecated. They should be moved to the checks.d interface as old custom checks will be removed in a next version" ) except Exception: log.exception("Unable to load custom check module %s" % module_spec)
def __init__(self, agentConfig, emitters, systemStats): self.agentConfig = agentConfig # system stats is generated by config.get_system_stats self.agentConfig['system_stats'] = systemStats # agent config is used during checks, system_stats can be accessed through the config self.os = getOS() self.plugins = None self.emitters = emitters self.metadata_interval = int(agentConfig.get('metadata_interval', 10 * 60)) self.metadata_start = time.time() socket.setdefaulttimeout(15) self.run_count = 0 self.continue_running = True # Unix System Checks self._unix_system_checks = { 'disk': u.Disk(checks_logger), 'io': u.IO(), 'load': u.Load(checks_logger), 'memory': u.Memory(checks_logger), 'network': u.Network(checks_logger), 'processes': u.Processes(), 'cpu': u.Cpu(checks_logger) } # Win32 System `Checks self._win32_system_checks = { 'disk': w32.Disk(checks_logger), 'io': w32.IO(checks_logger), 'proc': w32.Processes(checks_logger), 'memory': w32.Memory(checks_logger), 'network': w32.Network(checks_logger), 'cpu': w32.Cpu(checks_logger) } # Old-style metric checks self._couchdb = CouchDb(checks_logger) self._mongodb = MongoDb(checks_logger) self._mysql = MySql(checks_logger) self._rabbitmq = RabbitMq() self._ganglia = Ganglia(checks_logger) self._cassandra = Cassandra() self._dogstream = Dogstreams.init(checks_logger, self.agentConfig) self._ddforwarder = DdForwarder(checks_logger, self.agentConfig) self._ec2 = EC2(checks_logger) # Metric Checks self._metrics_checks = [ ElasticSearch(checks_logger), WMICheck(checks_logger), Memcache(checks_logger), ] # Custom metric checks for module_spec in [s.strip() for s in self.agentConfig.get('custom_checks', '').split(',')]: if len(module_spec) == 0: continue try: self._metrics_checks.append(modules.load(module_spec, 'Check')(checks_logger)) logger.info("Registered custom check %s" % module_spec) except Exception, e: logger.exception('Unable to load custom check module %s' % module_spec)
def test_cache_population(self): """Python module cache should be populated""" self.assertTrue(not 'tests.target_module' in sys.modules) modules.load('tests.target_module') self.assertTrue('tests.target_module' in sys.modules)