def __init__(self, mod_conf, api_key, secret, default_template): BaseModule.__init__(self, mod_conf) self.api_key = api_key self.secret = secret self.default_template = default_template self.driver = None self.con = None
def __init__(self, mod_conf): BaseModule.__init__(self, mod_conf) logger.info("[Checks forward] module initialization") try: # Module configuration self.glpi_entities = getattr(mod_conf, 'glpi_entities', '') self.glpi_entities = self.glpi_entities.split(',') if len(self.glpi_entities) > 0 and self.glpi_entities[0] == '': self.glpi_entities = None self.send_nsca_bin = str(getattr(mod_conf, 'send_nsca_bin', '/usr/sbin/send_nsca')) self.send_nsca_config = str(getattr(mod_conf, 'send_nsca_config', '/etc/send_nsca.cfg')) self.nsca_server_host = str(getattr(mod_conf, 'nsca_server_host', '127.0.0.1')) self.nsca_server_port = int(getattr(mod_conf, 'nsca_server_port', 5667)) logger.info("[Checks forward] module configuration, forward to: %s:%s, using %s with configuration %s" % (self.nsca_server_host, self.nsca_server_port, self.send_nsca_bin, self.send_nsca_config)) if self.glpi_entities: logger.info("[Checks forward] module configuration, forward checks for GLPI entities: %s" % str(self.glpi_entities)) else: logger.info("[Checks forward] module configuration, forward checks for all hosts/services") # Internal cache for host entities id self.cache_host_entities_id = {} except AttributeError: logger.error("[Checks forward] The module is missing a property, check module configuration") raise
def __init__(self, mod_conf, host, login, password, database, reqlist): BaseModule.__init__(self, mod_conf) self.host = host self.login = login self.password = password self.database = database self.reqlist = reqlist
def __init__(self, modconf, host, port, encryption_method, password): BaseModule.__init__(self, modconf) self.host = host self.port = port self.encryption_method = encryption_method self.password = password self.rng = random.Random(password)
def __init__(self, mod_conf, host, port, socket, allowed_hosts, database_file, max_logs_age, pnp_path): BaseModule.__init__(self, mod_conf) self.host = host self.port = port self.socket = socket self.allowed_hosts = allowed_hosts self.database_file = database_file self.max_logs_age = max_logs_age self.pnp_path = pnp_path #Our datas self.configs = {} self.hosts = {} self.services = {} self.contacts = {} self.hostgroups = {} self.servicegroups = {} self.contactgroups = {} self.timeperiods = {} self.commands = {} #Now satellites self.schedulers = {} self.pollers = {} self.reactionners = {} self.brokers = {} self.instance_ids = [] self.hostname_lookup_table = {} self.servicename_lookup_table = {} self.number_of_objects = 0 self.last_need_data_send = time.time()
def __init__(self, modconf, config_file, perfdata_file, perfdata_spool_dir, perfdata_spool_filename, sleep_time): BaseModule.__init__(self, modconf) self.config_file = config_file self.perfdata_file = perfdata_file self.perfdata_spool_dir = perfdata_spool_dir self.perfdata_spool_filename = perfdata_spool_filename self.sleep_time = sleep_time self.process_performance_data = True # this can be reset and set by program_status_broks self.processed_lines = 0 self.host_commands = {} self.service_commands = {} if self.config_file and not self.process_config_file(): print "npcdmod: An error occurred process your config file. Check your perfdata_file or perfdata_spool_dir" raise if not self.perfdata_spool_dir and not self.perfdata_file: print "npcdmod: An error occurred while attempting to process module arguments" raise try: # We open the file with line buffering, so we can better watch it with tail -f self.logfile = open(self.perfdata_file, 'a', 1) except: print "could not open file %s" % self.perfdata_file raise # use so we do nto ask a reinit ofan instance too quickly self.last_need_data_send = time.time()
def __init__(self, conf): BaseModule.__init__(self, conf) #Mapping for name of dataand transform function self.mapping = { 'program_status': { 'program_start': { 'name': 'program_start_time', 'transform': de_unixify }, 'pid': { 'name': 'process_id', 'transform': None }, 'last_alive': { 'name': 'status_update_time', 'transform': de_unixify }, 'is_running': { 'name': 'is_currently_running', 'transform': None } }, } self.host = conf.host self.user = conf.user self.password = conf.password self.database = conf.database self.character_set = conf.character_set
def __init__(self, modconf, server, port, password, db, expire_time): BaseModule.__init__(self, modconf) self.server = server self.port = port self.password = password self.db = db self.expire_time = expire_time
def __init__(self, mod_conf): BaseModule.__init__(self, mod_conf) self.uri = getattr(mod_conf, 'uri', 'mongodb://localhost') logger.info('[Mongodb-Scheduler-Retention] mongo uri: %s', self.uri) self.replica_set = getattr(mod_conf, 'replica_set', None) if self.replica_set and int(pymongo.version[0]) < 3: logger.error('[Mongodb-Scheduler-Retention] Can not initialize module with ' 'replica_set because your pymongo lib is too old. ' 'Please install it with a 3.x+ version from ' 'https://pypi.python.org/pypi/pymongo') return None self.path = getattr(mod_conf, 'path', None) logger.info('[Mongodb-Scheduler-Retention] old file path: %s', self.path) self.database = getattr(mod_conf, 'database', 'shinken') logger.info('[Mongodb-Scheduler-Retention] database: %s', self.database) self.hosts_collection_name = getattr(mod_conf, 'hosts_collection_name', 'retention_hosts') logger.info('[Mongodb-Scheduler-Retention] hosts retention collection: %s', self.hosts_collection_name) self.services_collection_name = getattr(mod_conf, 'services_collection_name', 'retention_services') logger.info('[Mongodb-Scheduler-Retention] services retention collection: %s', self.services_collection_name) self.comments_collection_name = getattr(mod_conf, 'comments_collection_name', 'retention_comments') logger.info('[Mongodb-Scheduler-Retention] comments retention collection: %s', self.comments_collection_name) self.downtimes_collection_name = getattr(mod_conf, 'downtimes_collection_name', 'retention_downtimes') logger.info('[Mongodb-Scheduler-Retention] downtimes retention collection: %s', self.downtimes_collection_name) self.connection = None self.task = None
def __init__(self, mod_conf, uri, database): BaseModule.__init__(self, mod_conf) self.uri = uri self.database = database # Some used varaible init self.con = None self.db = None
def __init__(self, modconf, server, sentinel_servers, redis_instance, wait_for_failover): BaseModule.__init__(self, modconf) self.server = server self.sentinel_servers = sentinel_servers self.redis_instance = redis_instance self.wait_for_failover = wait_for_failover if self.wait_for_failover: self.wait_for_failover = int(self.wait_for_failover) else: self.wait_for_failover = 0 if self.sentinel_servers: if not Sentinel: logger.error('[RedisRetention]: Can not initialize module with ' 'sentinel because your redis-py lib is too old. ' 'Please install it with a 2.9+ version from ' 'https://pypi.python.org/pypi/redis') return None else: server_list = [] for sentinel_server in self.sentinel_servers.split(','): server_list.append(tuple(sentinel_server.split(':'))) self.sentinel_servers = server_list self.mc = None
def __init__(self, modconf, config_file, perfdata_file, perfdata_spool_dir, perfdata_spool_filename, sleep_time): BaseModule.__init__(self, modconf) self.config_file = config_file self.perfdata_file = perfdata_file self.perfdata_spool_dir = perfdata_spool_dir self.perfdata_spool_filename = perfdata_spool_filename self.sleep_time = sleep_time self.process_performance_data = True # this can be reset and set by program_status_broks self.processed_lines = 0 self.host_commands = {} self.service_commands = {} if self.config_file and not self.process_config_file(): print "npcdmod: An error occurred process your config file. Check your perfdata_file or perfdata_spool_dir" raise Exception('npcdmod: An error occurred process your config file. Check your perfdata_file or perfdata_spool_dir') if not self.perfdata_spool_dir and not self.perfdata_file: print "npcdmod: An error occurred while attempting to process module arguments" raise Exception('npcdmod: An error occurred while attempting to process module arguments') try: # We open the file with line buffering, so we can better watch it with tail -f self.logfile = codecs.open(self.perfdata_file, 'a', 'utf-8', 'replace', 1) except: print "could not open file %s" % self.perfdata_file raise Exception('could not open file %s" % self.perfdata_file') # use so we do nto ask a reinit ofan instance too quickly self.last_need_data_send = time.time()
def __init__(self, modconf): BaseModule.__init__(self, modconf) self.ldap_uri = getattr(modconf, "ldap_uri", None) self.username = getattr(modconf, "username", "") self.password = getattr(modconf, "password", "") self.basedn = getattr(modconf, "basedn", "") # If we got no uri, we bailout... if not self.ldap_uri: self.active = False else: self.active = True self.con = None # Switch between active directory and OpenLdap mode self.mode = getattr(modconf, "mode", "ad") if self.mode not in ["ad", "openldap"]: raise Exception("WebUI Auth ldap module error, mode is not in ad or openldap") self.retrieveAttributes = { "ad": ["userPrincipalName", "thumbnailPhoto", "samaccountname", "email"], "openldap": ["cn", "jpegphoto", "uid", "mail"], }[self.mode] self.photo_attr = {"ad": "thumbnailPhoto", "openldap": "jpegPhoto"}[self.mode] self.name_id = {"ad": "userPrincipalName", "openldap": "uid"}[self.mode] self.auth_key = {"ad": "userPrincipalName", "openldap": "dn"}[self.mode] self.search_format = {"ad": "(| (samaccountname=%s)(mail=%s))", "openldap": "(| (uid=%s)(mail=%s))"}[self.mode]
def __init__(self, mod_conf, key, secret, ca, default_template, https_proxy): BaseModule.__init__(self, mod_conf) self.key = key self.secret = secret self.ca = ca self.default_template = default_template self.https_proxy = https_proxy
def __init__(self, mod_conf, pub_endpoint, serialize_to): from zmq import Context, PUB BaseModule.__init__(self, mod_conf) self.pub_endpoint = pub_endpoint self.serialize_to = serialize_to logger.info("[Zmq Broker] Binding to endpoint " + self.pub_endpoint) # This doesn't work properly in init() # sometimes it ends up beings called several # times and the address becomes already in use. self.context = Context() self.s_pub = self.context.socket(PUB) self.s_pub.bind(self.pub_endpoint) # Load the correct serialization function # depending on the serialization method # chosen in the configuration. if self.serialize_to == "msgpack": from msgpack import Packer packer = Packer(default=encode_monitoring_data) self.serialize = lambda msg: packer.pack(msg) elif self.serialize_to == "json": self.serialize = lambda msg: json.dumps(msg, cls=SetEncoder) else: raise Exception("[Zmq Broker] No valid serialization method defined (Got " + str(self.serialize_to) + ")!")
def __init__(self, conf): BaseModule.__init__(self, conf) # Mapping for name of data and transform function self.mapping = { 'program_status': { 'program_start': {'name': 'program_start_time', 'transform': de_unixify}, 'pid': {'name': 'process_id', 'transform': None}, 'last_alive': {'name': 'status_update_time', 'transform': de_unixify}, 'is_running': {'name': 'is_currently_running', 'transform': None}, 'last_log_rotation': {'name': 'last_log_rotation', 'transform': de_unixify}, 'last_command_check': {'name': 'last_command_check', 'transform': de_unixify} }, } self.host = conf.host self.user = conf.user self.password = conf.password self.database = conf.database self.character_set = conf.character_set self.port = int(getattr(conf, 'port', '3306')) self.prefix = getattr(conf, 'prefix', 'nagios_') # Centreon ndo add some fields like long_output # that are not in the vanilla ndo self.centreon_version = False self.synchronize_database_id = int(conf.synchronize_database_id)
def __init__(self, modconf): BaseModule.__init__(self, modconf) # mongodb://host1,host2,host3/?safe=true;w=2;wtimeoutMS=2000 self.mongodb_uri = getattr(modconf, 'mongodb_uri', None) self.database = getattr(modconf, 'database', 'shinken') self.version = version self.pp = pprint.PrettyPrinter()
def __init__(self, modconf): BaseModule.__init__(self, modconf) self.plugins = [] # Now sleep one second, so that won't get lineno collisions with the last second time.sleep(1) Logline.lineno = 0
def __init__(self, modconf, config_file, perfdata_file, perfdata_spool_dir, perfdata_spool_filename, sleep_time): BaseModule.__init__(self, modconf) self.config_file = config_file self.perfdata_file = perfdata_file self.perfdata_spool_dir = perfdata_spool_dir self.perfdata_spool_filename = perfdata_spool_filename self.sleep_time = sleep_time self.process_performance_data = True # this can be reset and set by program_status_broks self.processed_lines = 0 self.host_commands = {} self.service_commands = {} if self.config_file and not self.process_config_file(): print "npcdmod: An error occurred process your config file. Check your perfdata_file or perfdata_spool_dir" raise if not self.perfdata_spool_dir and not self.perfdata_file: print "npcdmod: An error occurred while attempting to process module arguments" raise try: # We open the file with line buffering, so we can better watch it with tail -f self.logfile = open(self.perfdata_file, 'a', 1) except: print "could not open file %s" % self.perfdata_file raise
def __init__(self, modconf): BaseModule.__init__(self, modconf) self.host = getattr(modconf, 'host', 'localhost') self.use_pickle = getattr(modconf, 'use_pickle', '0') == '1' if self.use_pickle: self.port = int(getattr(modconf, 'port', '2004')) else: self.port = int(getattr(modconf, 'port', '2003')) self.tick_limit = int(getattr(modconf, 'tick_limit', '300')) # Used to reset check time into the scheduled time. # Carbon/graphite does not like latency data and creates blanks in graphs # Every data with "small" latency will be considered create at scheduled time self.ignore_latency_limit = \ int(getattr(modconf, 'ignore_latency_limit', '0')) if self.ignore_latency_limit < 0: self.ignore_latency_limit = 0 self.buffer = [] self.ticks = 0 self.host_dict = {} self.svc_dict = {} self.multival = re.compile(r'_(\d+)$') self.chunk_size = 200 self.max_chunk_size = 100000 # optional "sub-folder" in graphite to hold the data of a specific host self.graphite_data_source = \ self.illegal_char.sub('_', getattr(modconf, 'graphite_data_source', ''))
def __init__(self, mod_conf): BaseModule.__init__(self, mod_conf) # create shared queues #manager = Manager() #self.create_queues(manager=manager) # store only host and service check results self.host_valid_broks = ['host_check_result'] self.service_valid_broks = ['service_check_result'] # need only these keys out of complete check result self.host_valid_attrs = ['address', 'state', 'last_chk', 'last_state_change', 'host_name', 'perf_data'] self.service_valid_attrs = self.host_valid_attrs + ['service_description'] # need to save plugin output for these services as well self.need_plugin_out = ['wimax_topology', 'cambium_topology'] self.redis_conf = { 'host': getattr(mod_conf, 'host', 'localhost'), 'port': getattr(mod_conf, 'port', 6379), 'db': int(getattr(mod_conf, 'db', 0)) } sentinels = [] sentinels_conf = getattr(mod_conf, 'sentinels', None) if sentinels_conf: sentinels_conf = sentinels_conf.split(',') while sentinels_conf: sentinels.append(tuple(sentinels_conf[:2])) sentinels_conf = sentinels_conf[2:] sentinels_service_name = getattr(mod_conf, 'service_name', 'mymaster') self.redis_conf.update({'sentinels': sentinels, 'sentinels_service_name': sentinels_service_name}) min_other_sentinels = getattr(mod_conf, 'min_other_sentinels', 0) # redis queue self.queue = RedisQueue(**self.redis_conf)
def __init__(self, mod_conf, uri, login_name, login_password, use_property): BaseModule.__init__(self, mod_conf) self.uri = uri self.login_name = login_name self.login_password = login_password self.use_property = use_property
def __init__(self, mod_conf, host, port, socket, allowed_hosts, database_file, max_logs_age, pnp_path, debug=None, debug_queries=False): BaseModule.__init__(self, mod_conf) self.host = host self.port = port self.socket = socket self.allowed_hosts = allowed_hosts self.database_file = database_file self.max_logs_age = max_logs_age self.pnp_path = pnp_path self.debug = debug self.debug_queries = debug_queries #Our datas self.configs = {} self.hosts = SortedDict() self.services = SortedDict() self.contacts = SortedDict() self.hostgroups = SortedDict() self.servicegroups = SortedDict() self.contactgroups = SortedDict() self.timeperiods = SortedDict() self.commands = SortedDict() #Now satellites self.schedulers = SortedDict() self.pollers = SortedDict() self.reactionners = SortedDict() self.brokers = SortedDict() self.service_id_cache = {} self.instance_ids = [] self.number_of_objects = 0 self.last_need_data_send = time.time()
def __init__(self, modconf, user, password, database): # Mapping for name of dataand transform function self.mapping = { 'program_status': { 'program_start': { 'name': 'program_start_time', 'transform': de_unixify }, 'pid': { 'name': 'process_id', 'transform': None }, 'last_alive': { 'name': 'status_update_time', 'transform': de_unixify }, 'is_running': { 'name': 'is_currently_running', 'transform': None } }, } BaseModule.__init__(self, modconf) self.user = user self.password = password self.database = database
def __init__(self, modconf): BaseModule.__init__(self, modconf) try: logger.debug("[WS_Nocout] Configuration starting ...") self.username = getattr(modconf, 'username', 'anonymous') self.password = getattr(modconf, 'password', '') self.port = int(getattr(modconf, 'port', '7760')) self.host = getattr(modconf, 'host', '0.0.0.0') # adding inventory load celery task here [being called from do_local_sync] ETL_BASE_DIR = getattr(modconf, 'etl_base_dir', '/omd/nocout_etl') sys.path.insert(0, ETL_BASE_DIR) logger.info( "[WS_Nocout] Configuration done, host: %s(%s), username: %s)" % (self.host, self.port, self.username) ) except AttributeError: logger.error( "[WS_Nocout] The module is missing a property, " "check module declaration in shinken-specific.cfg" ) raise except Exception, e: logger.error("[WS_Nocout] Exception : %s" % str(e)) raise
def __init__(self, mod_conf, pub_endpoint, serialize_to): from zmq import Context, PUB BaseModule.__init__(self, mod_conf) self.pub_endpoint = pub_endpoint self.serialize_to = serialize_to logger.info("[Zmq Broker] Binding to endpoint " + self.pub_endpoint) # This doesn't work properly in init() # sometimes it ends up beings called several # times and the address becomes already in use. self.context = Context() self.s_pub = self.context.socket(PUB) self.s_pub.bind(self.pub_endpoint) # Load the correct serialization function # depending on the serialization method # chosen in the configuration. if self.serialize_to == "msgpack": from msgpack import Packer packer = Packer(default=encode_monitoring_data) self.serialize = lambda msg: packer.pack(msg) elif self.serialize_to == "json": self.serialize = lambda msg: json.dumps(msg, cls=SetEncoder) else: raise Exception( "[Zmq Broker] No valid serialization method defined (Got " + str(self.serialize_to) + ")!")
def __init__(self, conf): BaseModule.__init__(self, conf) # Mapping for name of data and transform function self.mapping = { "program_status": { "program_start": {"name": "program_start_time", "transform": de_unixify}, "pid": {"name": "process_id", "transform": None}, "last_alive": {"name": "status_update_time", "transform": de_unixify}, "is_running": {"name": "is_currently_running", "transform": None}, "last_log_rotation": {"name": "last_log_rotation", "transform": de_unixify}, "last_command_check": {"name": "last_command_check", "transform": de_unixify}, } } self.host = conf.host self.user = conf.user self.password = conf.password self.database = conf.database self.character_set = conf.character_set self.port = int(getattr(conf, "port", "3306")) self.prefix = getattr(conf, "prefix", "nagios_") # Centreon ndo add some fields like long_output # that are not in the vanilla ndo self.centreon_version = False self.synchronize_database_id = int(conf.synchronize_database_id)
def __init__(self, modconf): BaseModule.__init__(self, modconf) self.host = getattr(modconf, 'host', 'localhost') self.use_pickle = getattr(modconf, 'use_pickle', '0') == '1' if self.use_pickle: self.port = int(getattr(modconf, 'port', '2004')) else: self.port = int(getattr(modconf, 'port', '2003')) self.tick_limit = int(getattr(modconf, 'tick_limit', '300')) # Used to reset check time into the scheduled time. # Carbon/graphite does not like latency data and creates blanks in graphs # Every data with "small" latency will be considered create at scheduled time self.ignore_latency_limit = \ int(getattr(modconf, 'ignore_latency_limit', '0')) if self.ignore_latency_limit < 0: self.ignore_latency_limit = 0 self.buffer = [] self.ticks = 0 self.host_dict = {} self.svc_dict = {} self.multival = re.compile(r'_(\d+)$') self.chunk_size = 200 self.max_chunk_size = 100000 self.hosted_graphite_api_key = getattr(modconf, 'hosted_graphite_api_key', '') # optional "sub-folder" in graphite to hold the data of a specific host self.graphite_data_source = \ self.illegal_char.sub('_', getattr(modconf, 'graphite_data_source', ''))
def __init__(self, modconf): BaseModule.__init__(self, modconf) self.hosts_cache = {} self.services_cache = {} # Database configuration self.host = getattr(modconf, 'host', '127.0.0.1') self.user = getattr(modconf, 'user', 'shinken') self.password = getattr(modconf, 'password', 'shinken') self.database = getattr(modconf, 'database', 'glpidb') self.character_set = getattr(modconf, 'character_set', 'utf8') logger.info("[glpidb] using '%s' database on %s (user = %s)", self.database, self.host, self.user) # Database tables update configuration self.update_availability = bool(getattr(modconf, 'update_availability', '0')=='1') self.update_shinken_state = bool(getattr(modconf, 'update_shinken_state', '0')=='1') self.update_services_events = bool(getattr(modconf, 'update_services_events', '0')=='1') self.update_hosts = bool(getattr(modconf, 'update_hosts', '0')=='1') self.update_services = bool(getattr(modconf, 'update_services', '0')=='1') self.update_acknowledges = bool(getattr(modconf, 'update_acknowledges', '0')=='1') logger.info("[glpidb] updating availability: %s", self.update_availability) logger.info("[glpidb] updating Shinken state: %s", self.update_shinken_state) logger.info("[glpidb] updating services events: %s", self.update_services_events) logger.info("[glpidb] updating hosts states: %s", self.update_hosts) logger.info("[glpidb] updating services states: %s", self.update_services) logger.info("[glpidb] updating acknowledges states: %s", self.update_acknowledges)
def __init__(self, mod_conf): BaseModule.__init__(self, mod_conf) logger.info("[Checks forward] module init") try: logger.debug("[Checks forward] module init : get parameters entities") # Module configuration self.glpi_entities = getattr(mod_conf, 'glpi_entities', '') self.glpi_entities = self.glpi_entities.split(',') if len(self.glpi_entities) > 0 and self.glpi_entities[0] == '': self.glpi_entities = None logger.warning("[Checks forward] module init : get parameters nsca") self.send_nsca_bin = str(getattr(mod_conf, 'send_nsca_bin', '/usr/sbin/send_nsca')) self.send_nsca_config = str(getattr(mod_conf, 'send_nsca_config', '/etc/send_nsca.cfg')) logger.warning("[Checks forward] module init : get parameters nsca server") self.nsca_server_host = str(getattr(mod_conf, 'nsca_server_host', '127.0.0.1')) self.nsca_server_port = int(getattr(mod_conf, 'nsca_server_port', 5667)) logger.warning("[Checks forward] module init : log info") logger.info("[Checks forward] module configuration, forward to: %s:%s, using %s with configuration %s" % (self.nsca_server_host, self.nsca_server_port, self.send_nsca_bin, self.send_nsca_config)) if self.glpi_entities: logger.info("[Checks forward] module configuration, forward checks for GLPI entities: %s" % str(self.glpi_entities)) else: logger.info("[Checks forward] module configuration, forward checks for all hosts/services") # Internal cache for host entities id self.cache_host_entities_id = {} except AttributeError: logger.error("[Checks forward] The module is missing a property, check module configuration") raise
def __init__(self, modconf, host, port, user, password, virtual_host, exchange_name): BaseModule.__init__(self, modconf) self.host = host self.port = port self.user = user self.password = password self.virtual_host = virtual_host self.exchange_name = exchange_name
def __init__(self, modconf, path, archive_path): BaseModule.__init__(self, modconf) self.path = path self.archive_path = archive_path try: os.stat(archive_path) except: os.mkdir(archive_path)
def __init__(self, mod_conf, api_key, secret, default_template, ignore_tag, regions, poller_tag): BaseModule.__init__(self, mod_conf) self.api_key = api_key self.secret = secret self.default_template = default_template self.ignore_tag = ignore_tag self.regions = regions self.poller_tag = poller_tag self.cons = []
def __init__(self, modconf): BaseModule.__init__(self, modconf) logger.info("[auth-ws-glpi] Trying to initialize the Glpi WS authentication module") try: self.uri = getattr(modconf, 'uri', 'http://localhost/glpi/plugins/webservices/xmlrpc.php') except AttributeError: logger.error("[auth-ws-glpi] The module is missing a property, check module configuration in auth-ws-glpi.cfg") raise
def __init__(self, mod_conf, uri, database, username, password): BaseModule.__init__(self, mod_conf) self.uri = uri self.database = database self.username = username self.password = password # Some used variable init self.con = None self.db = None
def __init__(self, modconf, path, mode, template): BaseModule.__init__(self, modconf) self.path = path self.mode = mode self.template = template #Make some raw change self.template = self.template.replace(r'\t', '\t') self.template = self.template.replace(r'\n', '\n')
def __init__(self, modconf, host, port, encryption_method, password, max_packet_age, check_future_packet): BaseModule.__init__(self, modconf) self.host = host self.port = port self.encryption_method = encryption_method self.password = password self.rng = random.Random(password) self.max_packet_age = max_packet_age self.check_future_packet = check_future_packet
def __init__(self, mod_conf, host, login, password, database, reqhosts,reqservices, reqcontacts): BaseModule.__init__(self, mod_conf) self.host = host self.login = login self.password = password self.database = database self.reqhosts = reqhosts self.reqservices = reqservices self.reqcontacts = reqcontacts
def __init__(self, modconf): BaseModule.__init__(self, modconf) logger.debug('[hokuto-log-cacher] Initializing') self.regen = Regenerator() # TODO: Keep this ? seems useless self.db_path = getattr(modconf, 'db_path', None) if self.db_path is None: logger.error( '[hokuto-log-cacher] No database path configured. Please specify one with db_path in the module configuration file.' ) raise
def __init__(self, modconf): BaseModule.__init__(self, modconf) try: self.username = getattr(modconf, 'username', 'anonymous') self.password = getattr(modconf, 'password', '') self.port = int(getattr(modconf, 'port', '7760')) self.host = getattr(modconf, 'host', '0.0.0.0') except AttributeError: logger.error("[Ws_arbiter] The module is missing a property, check module declaration in shinken-specific.cfg") raise
def __init__(self, modconf, uri, database, replica_set): BaseModule.__init__(self, modconf) self.uri = uri self.database = database self.replica_set = replica_set self.max_workers = 4 # Older versions don't handle replicasets and don't have the fsync option if version < 2: logger.error('[MongodbRetention] Your pymongo lib is too old. ' 'Please install at least a 2.x+ version.') return None
def __init__(self, mod_conf, ip_range, prop, value, method, ignore_hosts=None): BaseModule.__init__(self, mod_conf) self.ip_range = IP(ip_range) self.property = prop self.value = value self.method = method if ignore_hosts: self.ignore_hosts = ignore_hosts.split(', ') logger.debug("[IP Tag] Ignoring hosts : %s" % self.ignore_hosts) else: self.ignore_hosts = []
def __init__(self, mod_conf): BaseModule.__init__(self, mod_conf) self.version = "1.99.7" self.datasource_file = getattr(mod_conf, 'datasource', None) self.db_host = getattr(mod_conf, 'db_host', "127.0.0.1") self.db_port = to_int(getattr(mod_conf, 'db_port', 6379)) self.db_name = getattr(mod_conf, 'db_name', 'booster_snmp') self.loaded_by = getattr(mod_conf, 'loaded_by', None) self.datasource = None self.db_client = None self.i_am_dying = False
def __init__(self, modconf, uri, database, replica_set): BaseModule.__init__(self, modconf) self.uri = uri self.database = database self.replica_set = replica_set if self.replica_set and not ReplicaSetConnection: logger.error('[MongodbRetention] Can not initialize module with ' 'replica_set because your pymongo lib is too old. ' 'Please install it with a 2.x+ version from ' 'https://github.com/mongodb/mongo-python-driver/downloads') return None
def __init__(self, mod_conf, path, prop, value, method, ignore_hosts=None): BaseModule.__init__(self, mod_conf) self.path = path self.property = prop self.value = value self.method = method if ignore_hosts: self.ignore_hosts = ignore_hosts.split(', ') logger.debug("[File Tag] Ignoring hosts : %s" % self.ignore_hosts) else: self.ignore_hosts = [] self.hosts = []
def __init__(self, modconf): BaseModule.__init__(self, modconf) self.ldap_uri = getattr(modconf, 'ldap_uri', None) self.username = getattr(modconf, 'username', '') self.password = getattr(modconf, 'password', '') self.basedn = getattr(modconf, 'basedn', '') # If we got no uri, we bailout... if not self.ldap_uri: self.active = False else: self.active = True self.con = None
def __init__(self, modconf, mapping_file, mapping_command, mapping_command_interval, mapping_command_timeout, in_debug=False): BaseModule.__init__(self, modconf) self.mapping_file = mapping_file self.last_update = 0 self.last_mapping = set() self.mapping = set() # The external process part self.mapping_command = mapping_command self.mapping_command_interval = mapping_command_interval self.last_cmd_launch = 0 self.process = None self.mapping_command_timeout = mapping_command_timeout self.in_debug = in_debug
def __init__(self, modconf, host, port, buffer_length, payload_length, encryption_method, password, max_packet_age, check_future_packet, backlog): BaseModule.__init__(self, modconf) self.host = host self.port = port self.backlog = backlog self.buffer_length = buffer_length self.payload_length = payload_length self.encryption_method = encryption_method self.password = password self.rng = random.Random(password) self.max_packet_age = max_packet_age self.check_future_packet = check_future_packet logger.info("[NSCA] configuration, allowed hosts : '%s'(%s), buffer length: %s, payload length: %s, encryption: %s, max packet age: %s, check future packet: %s, backlog: %d", self.host, self.port, self.buffer_length, self.payload_length, self.encryption_method, self.max_packet_age, self.check_future_packet, self.backlog)
def __init__(self, modconf, path, mode, template): BaseModule.__init__(self, modconf) self.path = path self.mode = mode self.template = template # Make some raw change self.template = self.template.replace(r'\t', '\t') self.template = self.template.replace(r'\n', '\n') # In Nagios it's said to force a return in line if not self.template.endswith('\n'): self.template += '\n'