def __set_monitor_list(self, section, key): """Init the monitored processes list. The list is defined in the Glances configuration file. """ for l in range(1, self.__monitor_list_max_size + 1): value = {} key = "list_" + str(l) + "_" try: description = self.config.get_value(section, key + 'description') regex = self.config.get_value(section, key + 'regex') command = self.config.get_value(section, key + 'command') countmin = self.config.get_value(section, key + 'countmin') countmax = self.config.get_value(section, key + 'countmax') except Exception as e: logger.error("Cannot read monitored list: {0}".format(e)) else: if description is not None and regex is not None: # Build the new item value["description"] = description try: re.compile(regex) except Exception: continue else: value["regex"] = regex value["command"] = command value["countmin"] = countmin value["countmax"] = countmax value["count"] = None value["result"] = None # Add the item to the list self.__monitor_list.append(value)
def export(self, name, columns, points): """Write the points to the ES server.""" logger.debug("Export {} stats to ElasticSearch".format(name)) # Create DB input # https://elasticsearch-py.readthedocs.io/en/master/helpers.html actions = [] for c, p in zip(columns, points): dtnow = datetime.utcnow() action = { "_index": self.index, "_id": '{}.{}'.format(name,c), "_type": "glances", "_source": { "plugin": name, "metric": c, "value": str(p), "timestamp": dtnow.isoformat('T') } } logger.debug("Exporting the following object to elasticsearch: {}".format(action)) actions.append(action) # Write input to the ES index try: helpers.bulk(self.client, actions) except Exception as e: logger.error("Cannot export {} stats to ElasticSearch ({})".format(name, e))
def export(self, name, columns, points): """Write the points to the ES server.""" logger.debug("Export {} stats to ElasticSearch".format(name)) # Generate index name with the index field + current day index = '{}-{}'.format(self.index, datetime.utcnow().strftime("%Y.%m.%d")) # Create DB input # https://elasticsearch-py.readthedocs.io/en/master/helpers.html actions = [] dtnow = datetime.utcnow().isoformat('T') action = { "_index": index, "_id": '{}.{}'.format(name, dtnow), "_type": 'glances-{}'.format(name), "_source": { "plugin": name, "timestamp": dtnow } } action['_source'].update(zip(columns, [str(p) for p in points])) actions.append(action) logger.debug( "Exporting the following object to elasticsearch: {}".format( action)) # Write input to the ES index try: helpers.bulk(self.client, actions) except Exception as e: logger.error("Cannot export {} stats to ElasticSearch ({})".format( name, e))
def __set_folder_list(self, section): """Init the monitored folder list. The list is defined in the Glances configuration file. """ for l in range(1, self.__folder_list_max_size + 1): value = {} key = 'folder_' + str(l) + '_' # Path is mandatory try: value['path'] = self.config.get_value(section, key + 'path') except Exception as e: logger.error("Cannot read folder list: {0}".format(e)) continue if value['path'] is None: continue # Optional conf keys for i in ['careful', 'warning', 'critical']: try: value[i] = self.config.get_value(section, key + i) except: value[i] = None logger.debug("No {0} threshold for folder {1}".format(i, value["path"])) # Add the item to the list self.__folder_list.append(value)
def filter(self, value): """Set the filter (as a sting) and compute the regular expression A filter could be one of the following: - python > Process name of cmd start with python - .*python.* > Process name of cmd contain python - username:nicolargo > Process of nicolargo user """ self._filter_input = value if value is None: self._filter = None self._filter_key = None else: new_filter = value.split(':') if len(new_filter) == 1: self._filter = new_filter[0] self._filter_key = None else: self._filter = new_filter[1] self._filter_key = new_filter[0] self._filter_re = None if self.filter is not None: logger.info("Set filter to {} on key {}".format( self.filter, self.filter_key)) # Compute the regular expression try: self._filter_re = re.compile(self.filter) logger.debug("Filter regex compilation OK: {}".format( self.filter)) except Exception as e: logger.error("Cannot compile filter regex: {} ({})".format( self.filter, e)) self._filter = None self._filter_re = None self._filter_key = None
def update(self, stats, duration=3): """Display stats to stdout. Refresh every duration second. """ for plugin, attribute in self.plugins_list: # Check if the plugin exist and is enable if plugin in stats.getPluginsList() and \ stats.get_plugin(plugin).is_enable(): stat = stats.get_plugin(plugin).get_export() else: continue # Display stats if attribute is not None: # With attribute try: print("{}.{}: {}".format(plugin, attribute, stat[attribute])) except KeyError as err: logger.error("Can not display stat {}.{} ({})".format(plugin, attribute, err)) else: # Without attribute print("{}: {}".format(plugin, stat)) # Wait until next refresh if duration > 0: time.sleep(duration)
def __connect_old(self, version): """Connect to the Docker server with the 'old school' method""" # Glances is compatible with both API 2.0 and <2.0 # (thanks to the @bacondropped patch) if hasattr(docker, 'APIClient'): # Correct issue #1000 for API 2.0 init_docker = docker.APIClient elif hasattr(docker, 'Client'): # < API 2.0 init_docker = docker.Client else: # Can not found init method (new API version ?) logger.error("docker plugin - Can not found any way to init the Docker API") return None # Init connection to the Docker API try: if WINDOWS: url = 'npipe:////./pipe/docker_engine' else: url = 'unix://var/run/docker.sock' if version is None: ret = init_docker(base_url=url) else: ret = init_docker(base_url=url, version=version) except NameError: # docker lib not found return None return ret
def __set_folder_list(self, section): """Init the monitored folder list. The list is defined in the Glances configuration file. """ for l in range(1, self.__folder_list_max_size + 1): value = {} key = 'folder_' + str(l) + '_' # Path is mandatory try: value['path'] = self.config.get_value(section, key + 'path') except Exception as e: logger.error("Cannot read folder list: {0}".format(e)) continue if value['path'] is None: continue # Optional conf keys for i in ['careful', 'warning', 'critical']: try: value[i] = self.config.get_value(section, key + i) except: value[i] = None logger.debug("No {0} threshold for folder {1}".format( i, value["path"])) # Add the item to the list self.__folder_list.append(value)
def export(self, name, columns, points): """Write the points to the ES server.""" logger.debug("Export {} stats to ElasticSearch".format(name)) # Create DB input # https://elasticsearch-py.readthedocs.io/en/master/helpers.html actions = [] for c, p in zip(columns, points): action = { "_index": self.index, "_type": name, "_id": c, "_source": { "value": str(p), "timestamp": datetime.now() } } actions.append(action) # Write input to the ES index try: helpers.bulk(self.client, actions) except Exception as e: logger.error("Cannot export {} stats to ElasticSearch ({})".format( name, e))
def __init__(self, hostname, args=None): if zeroconf_tag: zeroconf_bind_address = args.bind_address try: self.zeroconf = Zeroconf() except socket.error as e: logger.error("Cannot start zeroconf: {0}".format(e)) # XXX *BSDs: Segmentation fault (core dumped) # -- https://bitbucket.org/al45tair/netifaces/issues/15 if not BSD: try: # -B @ overwrite the dynamic IPv4 choice if zeroconf_bind_address == '0.0.0.0': zeroconf_bind_address = self.find_active_ip_address() except KeyError: # Issue #528 (no network interface available) pass print("Announce the Glances server on the LAN (using {0} IP address)".format(zeroconf_bind_address)) self.info = ServiceInfo( zeroconf_type, '{0}:{1}.{2}'.format(hostname, args.port, zeroconf_type), address=socket.inet_aton(zeroconf_bind_address), port=args.port, weight=0, priority=0, properties={}, server=hostname) self.zeroconf.register_service(self.info) else: logger.error("Cannot announce Glances server on the network: zeroconf library not found.")
def __connect_old(self, version): """Connect to the Docker server with the 'old school' method""" # Glances is compatible with both API 2.0 and <2.0 # (thanks to the @bacondropped patch) if hasattr(docker, 'APIClient'): # Correct issue #1000 for API 2.0 init_docker = docker.APIClient elif hasattr(docker, 'Client'): # < API 2.0 init_docker = docker.Client else: # Can not found init method (new API version ?) logger.error("Can not found any way to init the Docker API") return None # Init connection to the Docker API try: if version is None: ret = init_docker(base_url='unix://var/run/docker.sock') else: ret = init_docker(base_url='unix://var/run/docker.sock', version=version) except NameError: # docker lib not found return None return ret
def run(self, stat_name, criticity, commands, mustache_dict=None): """Run the commands (in background). - stats_name: plugin_name (+ header) - criticity: criticity of the trigger - commands: a list of command line with optional {{mustache}} - mustache_dict: Plugin stats (can be use within {{mustache}}) Return True if the commands have been ran. """ if self.get(stat_name) == criticity or not self.start_timer.finished(): # Action already executed => Exit return False logger.debug("Run action {} for {} ({}) with stats {}".format( commands, stat_name, criticity, mustache_dict)) # Run all actions in background for cmd in commands: # Replace {{arg}} by the dict one (Thk to {Mustache}) if pystache_tag: cmd_full = pystache.render(cmd, mustache_dict) else: cmd_full = cmd # Execute the action logger.info("Action triggered for {} ({}): {}".format(stat_name, criticity, cmd_full)) logger.debug("Stats value for the trigger: {}".format(mustache_dict)) try: Popen(cmd_full, shell=True) except OSError as e: logger.error("Can't execute the action ({})".format(e)) self.set(stat_name, criticity) return True
def __set_monitor_list(self, section, key): """Init the monitored processes list. The list is defined in the Glances configuration file. """ for l in range(1, self.__monitor_list_max_size + 1): value = {} key = "list_" + str(l) + "_" try: description = self.config.get_value(section, key + "description") regex = self.config.get_value(section, key + "regex") command = self.config.get_value(section, key + "command") countmin = self.config.get_value(section, key + "countmin") countmax = self.config.get_value(section, key + "countmax") except Exception as e: logger.error("Cannot read monitored list: {0}".format(e)) else: if description is not None and regex is not None: # Build the new item value["description"] = description try: re.compile(regex) except Exception: continue else: value["regex"] = regex value["command"] = command value["countmin"] = countmin value["countmax"] = countmax value["count"] = None value["result"] = None # Add the item to the list self.__monitor_list.append(value)
def filter(self, value): """Set the filter (as a sting) and compute the regular expression A filter could be one of the following: - python > Process name of cmd start with python - .*python.* > Process name of cmd contain python - username:nicolargo > Process of nicolargo user """ self._filter_input = value if value is None: self._filter = None self._filter_key = None else: new_filter = value.split(':') if len(new_filter) == 1: self._filter = new_filter[0] self._filter_key = None else: self._filter = new_filter[1] self._filter_key = new_filter[0] self._filter_re = None if self.filter is not None: logger.info("Set filter to {} on key {}".format(self.filter, self.filter_key)) # Compute the regular expression try: self._filter_re = re.compile(self.filter) logger.debug("Filter regex compilation OK: {}".format(self.filter)) except Exception as e: logger.error("Cannot compile filter regex: {} ({})".format(self.filter, e)) self._filter = None self._filter_re = None self._filter_key = None
def export(self, name, columns, points): """Write the points to the ZeroMQ server.""" logger.debug("Export {} stats to ZeroMQ".format(name)) # Create DB input data = dict(zip(columns, points)) # Do not publish empty stats if data == {}: return False # Glances envelopes the stats in a publish message with two frames: # - First frame containing the following prefix (STRING) # - Second frame with the Glances plugin name (STRING) # - Third frame with the Glances plugin stats (JSON) message = [b(self.prefix), b(name), asbytes(json.dumps(data))] # Write data to the ZeroMQ bus # Result can be view: tcp://host:port try: self.client.send_multipart(message) except Exception as e: logger.error("Cannot export {} stats to ZeroMQ ({})".format( name, e)) return True
def export(self, name, columns, points): """Write the points to the InfluxDB server.""" logger.debug("Export {} stats to InfluxDB".format(name)) # Manage prefix if self.prefix is not None: name = self.prefix + '.' + name # Create DB input if self.version == INFLUXDB_08: data = [{'name': name, 'columns': columns, 'points': [points]}] else: # Convert all int to float (mandatory for InfluxDB>0.9.2) # Correct issue#750 and issue#749 for i, _ in enumerate(points): try: points[i] = float(points[i]) except (TypeError, ValueError) as e: logger.debug("InfluxDB error during stat convertion %s=%s (%s)" % (columns[i], points[i], e)) data = [{'measurement': name, 'tags': self.parse_tags(self.tags), 'fields': dict(zip(columns, points))}] # Write input to the InfluxDB database try: self.client.write_points(data) except Exception as e: logger.error("Cannot export {} stats to InfluxDB ({})".format(name, e))
def export(self, name, columns, points): """Write the points to the ZeroMQ server.""" logger.debug("Export {} stats to ZeroMQ".format(name)) # Create DB input data = dict(zip(columns, points)) # Do not publish empty stats if data == {}: return False # Glances envelopes the stats in a publish message with two frames: # - First frame containing the following prefix (STRING) # - Second frame with the Glances plugin name (STRING) # - Third frame with the Glances plugin stats (JSON) message = [b(self.prefix), b(name), asbytes(json.dumps(data))] # Write data to the ZeroMQ bus # Result can be view: tcp://host:port try: self.client.send_multipart(message) except Exception as e: logger.error("Cannot export {} stats to ZeroMQ ({})".format(name, e)) return True
def _load_plugin(self, plugin_script, args=None, config=None): """Load the plugin (script), init it and add to the _plugin dict.""" # The key is the plugin name # for example, the file glances_xxx.py # generate self._plugins_list["xxx"] = ... name = plugin_script[len(self.header):-3].lower() try: # Import the plugin plugin = __import__(plugin_script[:-3]) # Init and add the plugin to the dictionary if name in ('help', 'amps', 'ports', 'folders'): self._plugins[name] = plugin.Plugin(args=args, config=config) else: self._plugins[name] = plugin.Plugin(args=args) except Exception as e: # If a plugin can not be loaded, display a critical message # on the console but do not crash logger.critical( "Error while initializing the {} plugin ({})".format(name, e)) logger.error(traceback.format_exc()) # Disable the plugin if args is not None: setattr(args, 'disable_' + name, False) else: # Set the disable_<name> to False by default if args is not None: setattr(args, 'disable_' + name, getattr(args, 'disable_' + name, False))
def update(self, stats, duration=3): """Display stats to stdout. Refresh every duration second. """ for plugin, attribute in self.plugins_list: # Check if the plugin exist and is enable if plugin in stats.getPluginsList() and \ stats.get_plugin(plugin).is_enable(): stat = stats.get_plugin(plugin).get_export() else: continue # Display stats if attribute is not None: # With attribute try: print("{}.{}: {}".format(plugin, attribute, stat[attribute])) except KeyError as err: logger.error("Can not display stat {}.{} ({})".format( plugin, attribute, err)) else: # Without attribute print("{}: {}".format(plugin, stat)) # Wait until next refresh if duration > 0: time.sleep(duration)
def get_stats_history(self, item=None, nb=0): """Return the stats history as a JSON object (dict or None). Limit to lasts nb items (all if nb=0)""" s = self.get_json_history(nb=nb) if item is None: return self._json_dumps(s) if isinstance(s, dict): try: return self._json_dumps({item: s[item]}) except KeyError as e: logger.error("Cannot get item history {0} ({1})".format( item, e)) return None elif isinstance(s, list): try: # Source: # http://stackoverflow.com/questions/4573875/python-get-index-of-dictionary-item-in-list return self._json_dumps({item: map(itemgetter(item), s)}) except (KeyError, ValueError) as e: logger.error("Cannot get item history {0} ({1})".format( item, e)) return None else: return None
def log_and_exit(self, msg=''): """Log and exit.""" if not self.return_to_browser: logger.critical(msg) sys.exit(2) else: logger.error(msg)
def __init__(self): """Init sensors stats.""" # Temperatures self.init_temp = False self.stemps = {} try: # psutil>=5.1.0 is required self.stemps = psutil.sensors_temperatures() except AttributeError: logger.warning("PsUtil 5.1.0 or higher is needed to grab temperatures sensors") except OSError as e: # FreeBSD: If oid 'hw.acpi.battery' not present, Glances wont start #1055 logger.error("Can not grab temperatures sensors ({})".format(e)) else: self.init_temp = True # Fans self.init_fan = False self.sfans = {} try: # psutil>=5.2.0 is required self.sfans = psutil.sensors_fans() except AttributeError: logger.warning("PsUtil 5.2.0 or higher is needed to grab fans sensors") except OSError as e: logger.error("Can not grab fans sensors ({})".format(e)) else: self.init_fan = True # !!! Disable Fan: High CPU consumption is PSUtil 5.2.0 # Delete the following line when corrected self.init_fan = False # Init the stats self.reset()
def export(self, name, columns, points): """Write the points to the InfluxDB server.""" logger.debug("Export {} stats to InfluxDB".format(name)) # Manage prefix if self.prefix is not None: name = self.prefix + '.' + name # Create DB input if self.version == INFLUXDB_08: data = [{'name': name, 'columns': columns, 'points': [points]}] else: # Convert all int to float (mandatory for InfluxDB>0.9.2) # Correct issue#750 and issue#749 for i, _ in enumerate(points): try: points[i] = float(points[i]) except (TypeError, ValueError) as e: logger.debug( "InfluxDB error during stat convertion %s=%s (%s)" % (columns[i], points[i], e)) data = [{ 'measurement': name, 'tags': self.parse_tags(self.tags), 'fields': dict(zip(columns, points)) }] # Write input to the InfluxDB database try: self.client.write_points(data) except Exception as e: logger.error("Cannot export {} stats to InfluxDB ({})".format( name, e))
def connect(self): """Connect to the Docker server.""" try: ret = docker.from_env() except Exception as e: logger.error("docker plugin - Can not connect to Docker ({})".format(e)) ret = None return ret
def load(self, config): """Load the server list from the configuration file.""" server_list = [] if config is None: logger.debug( "No configuration file available. Cannot load server list.") elif not config.has_section(self._section): logger.warning( "No [%s] section in the configuration file. Cannot load server list." % self._section) else: logger.info( "Start reading the [%s] section in the configuration file" % self._section) for i in range(1, 256): new_server = {} postfix = 'server_%s_' % str(i) # Read the server name (mandatory) for s in ['name', 'port', 'alias']: new_server[s] = config.get_value(self._section, '%s%s' % (postfix, s)) if new_server['name'] is not None: # Manage optionnal information if new_server['port'] is None: new_server['port'] = '61209' new_server['username'] = '******' # By default, try empty (aka no) password new_server['password'] = '' try: new_server['ip'] = gethostbyname(new_server['name']) except gaierror as e: logger.error( "Cannot get IP address for server %s (%s)" % (new_server['name'], e)) continue new_server[ 'key'] = new_server['name'] + ':' + new_server['port'] # Default status is 'UNKNOWN' new_server['status'] = 'UNKNOWN' # Server type is 'STATIC' new_server['type'] = 'STATIC' # Add the server to the list logger.debug("Add server %s to the static list" % new_server['name']) server_list.append(new_server) # Server list loaded logger.info("%s server(s) loaded from the configuration file" % len(server_list)) logger.debug("Static server list: %s" % server_list) return server_list
def get_item_key(self, item): """Return the value of the item 'key'.""" try: ret = item[item['key']] except KeyError: logger.error("No 'key' available in {}".format(item)) if isinstance(ret, list): return ret[0] else: return ret
def __init__(self, bind_address, bind_port=61209, requestHandler=GlancesXMLRPCHandler): try: self.address_family = socket.getaddrinfo(bind_address, bind_port)[0][0] except socket.error as e: logger.error("Couldn't open socket: {}".format(e)) sys.exit(1) super(GlancesXMLRPCServer, self).__init__((bind_address, bind_port), requestHandler)
def __init__(self, bind_address, bind_port=61209, requestHandler=GlancesXMLRPCHandler): try: self.address_family = socket.getaddrinfo(bind_address, bind_port)[0][0] except socket.error as e: logger.error("Couldn't open socket: {0}".format(e)) sys.exit(1) super(GlancesXMLRPCServer, self).__init__((bind_address, bind_port), requestHandler)
def _save_cache(self): """Save data to the cache file.""" # Create the cache directory safe_makedirs(self.cache_dir) # Create/overwrite the cache file try: with open(self.cache_file, 'wb') as f: pickle.dump(self.data, f) except Exception as e: logger.error("Cannot write version to cache file {} ({})".format(self.cache_file, e))
def update(self): """Update the stats using SNMP.""" # For each plugins, call the update method for p in self._plugins: # Set the input method to SNMP self._plugins[p].input_method = 'snmp' self._plugins[p].short_system_name = self.system_name try: self._plugins[p].update() except Exception as e: logger.error("Update {0} failed: {1}".format(p, e))
def update(self, stats): """Update stats in the CSV output file.""" # Get the stats all_stats = stats.getAllExportsAsDict( plugin_list=self.plugins_to_export()) # Init data with timestamp (issue#708) if self.first_line: csv_header = ['timestamp'] csv_data = [time.strftime('%Y-%m-%d %H:%M:%S')] # Loop over plugins to export for plugin in self.plugins_to_export(): if isinstance(all_stats[plugin], list): for stat in all_stats[plugin]: # First line: header if self.first_line: csv_header += ('{}_{}_{}'.format( plugin, self.get_item_key(stat), item) for item in stat) # Others lines: stats csv_data += itervalues(stat) elif isinstance(all_stats[plugin], dict): # First line: header if self.first_line: fieldnames = iterkeys(all_stats[plugin]) csv_header += ('{}_{}'.format(plugin, fieldname) for fieldname in fieldnames) # Others lines: stats csv_data += itervalues(all_stats[plugin]) # Export to CSV # Manage header if self.first_line: if self.old_header is None: # New file, write the header on top on the CSV file self.writer.writerow(csv_header) # File already exist, check if header are compatible if self.old_header != csv_header: # Header are differents, log an error and do not write data logger.error( "Cannot append data to existing CSV file. Headers are differents." ) logger.debug("Old header: {}".format(self.old_header)) logger.debug("New header: {}".format(csv_header)) else: # Header are equals, ready to write data self.old_header = None # Only do this once self.first_line = False # Manage data if self.old_header is None: self.writer.writerow(csv_data) self.csv_file.flush()
def update(self): """Update the stats using SNMP.""" # For each plugins, call the update method for p in self._plugins: # Set the input method to SNMP self._plugins[p].input_method = 'snmp' self._plugins[p].short_system_name = self.system_name try: self._plugins[p].update() except Exception as e: logger.error("Update {} failed: {}".format(p, e))
def export(self, name, columns, points): """Export the stats to the Statsd server.""" for i in range(len(columns)): if not isinstance(points[i], Number): continue stat_name = '{}.{}'.format(name, columns[i]) stat_value = points[i] try: self.client.gauge(stat_name, stat_value) except Exception as e: logger.error("Can not export stats to Statsd (%s)" % e) logger.debug("Export {} stats to Statsd".format(name))
def remove_server(self, name): """Remove a server from the dict.""" for i in self._server_list: if i['key'] == name: try: self._server_list.remove(i) logger.debug("Remove server %s from the list" % name) logger.debug("Updated servers list (%s servers): %s" % (len(self._server_list), self._server_list)) except ValueError: logger.error("Cannot remove server %s from the list" % name)
def export(self, name, columns, points): """Write the points in Riemann.""" for i in range(len(columns)): if not isinstance(points[i], Number): continue else: data = {'host': self.hostname, 'service': name + " " + columns[i], 'metric': points[i]} logger.debug(data) try: self.client.send(data) except Exception as e: logger.error("Cannot export stats to Riemann (%s)" % e)
def connect(self, version=None): """Connect to the Docker server.""" if hasattr(docker, 'from_env') and version is not None: # Connect to Docker using the default socket or # the configuration in your environment ret = docker.from_env() else: ret = self.__connect_old(version=version) # Check the server connection with the version() method try: ret.version() except requests.exceptions.ConnectionError as e: # Connexion error (Docker not detected) # Let this message in debug mode logger.debug( "docker plugin - Can't connect to the Docker server (%s)" % e) return None except docker.errors.APIError as e: if version is None: # API error (Version mismatch ?) logger.debug("docker plugin - Docker API error (%s)" % e) # Try the connection with the server version version = re.search( '(?:server API version|server)\:\ (.*)\)\".*\)', str(e)) if version: logger.debug( "docker plugin - Try connection with Docker API version %s" % version.group(1)) ret = self.connect(version=version.group(1)) else: logger.debug( "docker plugin - Can not retreive Docker server version" ) ret = None else: # API error logger.error("docker plugin - Docker API error (%s)" % e) ret = None except Exception as e: # Others exceptions... # Connexion error (Docker not detected) logger.error( "docker plugin - Can't connect to the Docker server (%s)" % e) ret = None # Log an info if Docker plugin is disabled if ret is None: logger.debug( "docker plugin - Docker plugin is disable because an error has been detected" ) return ret
def export(self, name, columns, points): """Export the stats to the Statsd server.""" for i in range(len(columns)): if not isinstance(points[i], Number): continue stat_name = '{0}.{1}'.format(name, columns[i]) stat_value = points[i] try: self.client.gauge(stat_name, stat_value) except Exception as e: logger.error("Can not export stats to Statsd (%s)" % e) logger.debug("Export {0} stats to Statsd".format(name))
def export(self, name, columns, points): """Write the points in Riemann.""" for i in range(len(columns)): if not isinstance(points[i], Number): continue else: data = {'host': self.hostname, 'service': name + " " + columns[i], 'metric': points[i]} logger.debug(data) try: self.client.send(data) except Exception as e: logger.error("Can not export stats to Riemann (%s)" % e)
def remove_server(self, name): """Remove a server from the dict.""" for i in self._server_list: if i['key'] == name: try: self._server_list.remove(i) logger.debug("Remove server %s from the list" % name) logger.debug("Updated servers list (%s servers): %s" % ( len(self._server_list), self._server_list)) except ValueError: logger.error( "Cannot remove server %s from the list" % name)
def process_filter(self, value): """Set the process filter.""" logger.info("Set process filter to {0}".format(value)) self._process_filter = value if value is not None: try: self._process_filter_re = re.compile(value) logger.debug("Process filter regex compilation OK: {0}".format(self.process_filter)) except Exception: logger.error("Cannot compile process filter regex: {0}".format(value)) self._process_filter_re = None else: self._process_filter_re = None
def export(self, name, columns, points): """Write the points to the InfluxDB server.""" # Manage prefix if self.prefix is not None: name = self.prefix + '.' + name # Write input to the InfluxDB database try: self.client.write_points(self._normalize(name, columns, points)) except Exception as e: logger.error("Cannot export {} stats to InfluxDB ({})".format(name, e)) else: logger.debug("Export {} stats to InfluxDB".format(name))
def __init__(self, config): """Init the folder list from the configuration file, if it exists.""" self.config = config if self.config is not None and self.config.has_section('folders'): if scandir_tag: # Process monitoring list logger.debug("Folder list configuration detected") self.__set_folder_list('folders') else: logger.error('Scandir not found. Please use Python 3.5+ or install the scandir lib') else: self.__folder_list = []
def export(self, name, columns, points): """Export the stats to the Statsd server.""" for i in range(len(columns)): if not isinstance(points[i], Number): continue stat_name = '{}.{}.{}'.format(self.prefix, name, columns[i]) stat_value = points[i] tags = self.parse_tags(self.tags) try: self.client.send(stat_name, stat_value, **tags) except Exception as e: logger.error("Can not export stats %s to OpenTSDB (%s)" % (name, e)) logger.debug("Export {} stats to OpenTSDB".format(name))
def _init_history(self): """Init the history option.""" self.reset_history_tag = False self.graph_tag = False if self.args.export_graph: logger.info('Export graphs function enabled with output path %s' % self.args.path_graph) from glances.exports.graph import GlancesGraph self.glances_graph = GlancesGraph(self.args.path_graph) if not self.glances_graph.graph_enabled(): self.args.export_graph = False logger.error('Export graphs disabled')
def save_password(self, hashed_password): """Save the hashed password to the Glances folder.""" # Check if the Glances folder already exists if not os.path.exists(self.password_path): # Create the Glances folder try: os.makedirs(self.password_path) except OSError as e: logger.error("Cannot create Glances directory: {0}".format(e)) return # Create/overwrite the password file with open(self.password_filepath, 'wb') as file_pwd: file_pwd.write(b(hashed_password))
def _init_history(self): '''Init the history option''' self.reset_history_tag = False self.history_tag = False if self.args.enable_history: logger.info('Stats history enabled with output path %s' % self.args.path_history) from glances.exports.glances_history import GlancesHistory self.glances_history = GlancesHistory(self.args.path_history) if not self.glances_history.graph_enabled(): self.args.enable_history = False logger.error( 'Stats history disabled because MatPlotLib is not installed')
def export(self, name, columns, points): """Write the points in RabbitMQ.""" data = ('hostname=' + self.hostname + ', name=' + name + ', dateinfo=' + datetime.datetime.utcnow().isoformat()) for i in range(len(columns)): if not isinstance(points[i], Number): continue else: data += ", " + columns[i] + "=" + str(points[i]) logger.debug(data) try: self.client.basic_publish(exchange='', routing_key=self.queue, body=data) except Exception as e: logger.error("Can not export stats to RabbitMQ (%s)" % e)
def export(self, name, columns, points): """Write the points in RabbitMQ.""" data = ('hostname=' + self.hostname + ', name=' + name + ', dateinfo=' + datetime.datetime.utcnow().isoformat()) for i in range(len(columns)): if not isinstance(points[i], Number): continue else: data += ", " + columns[i] + "=" + str(points[i]) logger.debug(data) try: self.client.basic_publish(exchange='', routing_key=self.rabbitmq_queue, body=data) except Exception as e: logger.error("Can not export stats to RabbitMQ (%s)" % e)
def get_stats_value(self, item, value): """Return the stats object for a specific item=value in JSON format. Stats should be a list of dict (processlist, network...) """ if not isinstance(self.stats, list): return None else: if value.isdigit(): value = int(value) try: return json.dumps({value: [i for i in self.stats if i[item] == value]}) except (KeyError, ValueError) as e: logger.error( "Cannot get item({0})=value({1}) ({2})".format(item, value, e)) return None
def export(self, name, columns, points): """Write the points to the kafka server.""" logger.debug("Export {} stats to Kafka".format(name)) # Create DB input data = dict(zip(columns, points)) # Send stats to the kafka topic # key=<plugin name> # value=JSON dict try: self.client.send(self.topic, key=name, value=data) except Exception as e: logger.error("Cannot export {} stats to Kafka ({})".format(name, e))
def __init__(self, args=None): if zeroconf_tag: logger.info("Init autodiscover mode (Zeroconf protocol)") try: self.zeroconf = Zeroconf() except socket.error as e: logger.error("Cannot start Zeroconf (%s)" % e) self.zeroconf_enable_tag = False else: self.listener = GlancesAutoDiscoverListener() self.browser = ServiceBrowser( self.zeroconf, zeroconf_type, self.listener) self.zeroconf_enable_tag = True else: logger.error("Cannot start autodiscover mode (Zeroconf lib is not installed)") self.zeroconf_enable_tag = False