def restart_switch(old_conf, new_conf): os.unlink(old_conf) os.rename(new_conf, old_conf) # because it is not called by circus in this case BashWrapper("before_start_step.switch.main") x = BashWrapper("_switch.stop") if not x: LOGGER.warning(x)
def restart_directory_observer(old_conf, new_conf): os.unlink(old_conf) os.rename(new_conf, old_conf) # because it is not called by circus in this case BashWrapper("before_start_directory_observer") x = BashWrapper("_directory_observer.stop") if not x: LOGGER.warning(x)
def get_plugin_info(name_or_filepath, mode="auto", plugins_base_dir=None): plugins_base_dir = _get_plugins_base_dir(plugins_base_dir) _assert_plugins_base_initialized(plugins_base_dir) res = {} if mode == "auto": mode = "name" if '/' in name_or_filepath or '.' in name_or_filepath: mode = "file" else: if os.path.isfile(name_or_filepath): mode = "file" if mode == "file": cmd = _get_rpm_cmd('-qi', '-p %s' % name_or_filepath, plugins_base_dir=plugins_base_dir) elif mode == "name": if _is_dev_link_plugin(name_or_filepath, plugins_base_dir=plugins_base_dir): res['metadatas'] = {} res['metadatas']['name'] = name_or_filepath res['metadatas']['release'] = 'dev_link' res['metadatas']['version'] = 'dev_link' res['raw_metadata_output'] = 'DEV LINK' res['raw_files_output'] = 'DEV LINK' res['files'] = [] return res cmd = _get_rpm_cmd('-qi', name_or_filepath, plugins_base_dir=plugins_base_dir) else: __get_logger().warning("unknown mode [%s]" % mode) return None metadata_output = BashWrapper(cmd) if not metadata_output: return None res['raw_metadata_output'] = metadata_output.stdout for line in metadata_output.stdout.split('\n'): tmp = line.strip().split(':', 1) if len(tmp) <= 1: continue name = tmp[0].strip().lower() value = tmp[1].strip() if 'metadatas' not in res: res['metadatas'] = {} res['metadatas'][name] = value if mode == "file": cmd = _get_rpm_cmd('-ql -p %s' % name_or_filepath, plugins_base_dir=plugins_base_dir) else: cmd = _get_rpm_cmd('-ql %s' % name_or_filepath, plugins_base_dir=plugins_base_dir) files_output = BashWrapper(cmd) if not files_output: return None res['files'] = [x.strip() for x in files_output.stdout.split('\n')] res['raw_files_output'] = files_output.stdout return res
def deploy_crontab(old_conf, new_conf): os.unlink(old_conf) os.rename(new_conf, old_conf) cmd = "_uninstall_crontab.sh" x = BashWrapper(cmd) if not x: LOGGER.warning(x) cmd = "deploycron_file '%s'" % old_conf x = BashWrapper(cmd) if not x: LOGGER.warning(x)
def get_stats(): stats = {} results = BashWrapper(CMD) if not results: LOGGER.warning("can't execute %s: %s" % (CMD, results)) return None try: processes = json.loads(results.stdout) except Exception: LOGGER.warning("can't parse %s output as JSON" % CMD) return None plugins = set([x['plugin'] for x in processes if x['plugin'] != '']) plugins.add('#monitoring#') if not IS_MONITORING_MODULE: plugins.add('#core#') for plugin in plugins: if plugin not in stats: stats[plugin] = {} for key in ('mem_percent', 'num_threads', 'cpu_percent', 'num_fds'): search_plugin = plugin if not plugin.startswith('#') else '' if plugin != '#monitoring#': stats[plugin][key] = sum([ x[key] for x in processes if x['plugin'] == search_plugin and not is_cmdline_monitoring(x['cmdline']) ]) else: stats[plugin][key] = sum([ x[key] for x in processes if x['plugin'] == search_plugin and is_cmdline_monitoring(x['cmdline']) ]) return stats
def transform(self, xaf): cmd = self.get_command(xaf.filepath) self.info("Calling %s ...", cmd) x = BashWrapper(cmd) if not x: self.warning("%s returned a bad return code: %i, details: %s", cmd, x.code, x) return else: self.debug("%s returned a good return code, output: %s", cmd, x) if self.command_returning_path: paths = [] lines = [tmp.strip() for tmp in x.stdout.split("\n")] for line in lines: if line.startswith("FILEPATH:"): path = line[len("FILEPATH:"):].strip() if not path.startswith('/'): self.warning("returned path: %s does not start with / " "=> ignoring" % path) return if not os.path.exists(path): self.warning("returned path: %s does not exist " "=> ignoring" % path) return self.debug("returned path = %s" % path) paths.append(path) return paths
def test_bash_wrapper2(self): x = BashWrapper("ls %s" % TEST_DIRECTORY) if not x: raise Exception("this exception must not be raised") self.assertTrue(x.code == 0) self.assertTrue(len(x.stdout) > 0) self.assertTrue(len(x.stderr) == 0) self.assertTrue(len("%s" % x) > 0)
def test_bash_wrapper(self): x = BashWrapper("ls /foo/bar") if x: raise Exception("this exception must not be raised") self.assertTrue(x.code != 0) self.assertTrue(len(x.stderr) > 0) self.assertTrue(len(x.stdout) == 0) self.assertTrue(len("%s" % x) > 0)
def _preuninstall_plugin(name, version, release, quiet=False): res = BashWrapper("_plugins.preuninstall %s %s %s" % (name, version, release)) if not res: if not quiet: __get_logger().warning("error during postuninstall: %s", res) return False return True
def _call(cmd): LOGGER.info("Calling %s..." % cmd) r = BashWrapper(cmd) if r.code != 0: msg = "Bad return code: %i from cmd: %s with output: %s" % \ (r.code, cmd, str(r).replace("\n", " ")) LOGGER.warning(msg) return False return True
def get_processes_to_kill(): x = BashWrapper("list_metwork_processes.py --pids-only " "--output-format=json") if not x: LOG.warning("can't execute: %s" % x) sys.exit(1) try: pids = json.loads(x.stdout) except Exception: LOG.warning("bad output: %s" % x) sys.exit(1) return pids
def _conditional_call(prefix, watcher_name, params=None): if watcher_name is not None: cmd = "%s_%s" % (prefix, watcher_name) else: cmd = prefix r = BashWrapper("which %s" % cmd) if r.code == 0: if params is not None: cmd = "%s %s" % (cmd, " ".join(params)) return _call(cmd) else: return True
def _call(cmd): LOGGER.info("Calling %s..." % cmd) r = BashWrapper(cmd) if r.code == 200: # if the exit code is 200, we return False but without any errors # it can be used (for example) with before_signal_shell to block # signals and use a custom shutdown procedure # https://circus.readthedocs.io/en/latest/for-devs/writing-hooks/ return False elif r.code != 0: msg = "Bad return code: %i from cmd: %s with output: %s" % \ (r.code, cmd, str(r).replace("\n", " ")) LOGGER.warning(msg) return False return True
def get_stats(): results = BashWrapper(CMD) if not results: LOGGER.warning("can't execute %s: %s" % (CMD, results)) return None lines = results.stdout.splitlines() stats = {x: 0 for x in STATES_VALUES} for line in lines[1:]: tmp = line.strip().split() if tmp[0] not in STATES_KEYS: stats["tcp_none"] = stats.get("tcp_none") + 1 continue stats[STATES[tmp[0]]] = stats.get(STATES[tmp[0]]) + 1 stats["tcp_total"] = sum(stats.values()) return stats
def get_stats2(): stats = {} results = BashWrapper(CMD2) if not results: LOGGER.warning("can't execute %s: %s" % (CMD2, results)) return None lines = results.stdout.splitlines() for line in lines: tmp = line.strip() for system_name, name in STATES2.items(): if tmp.startswith(system_name): try: stats[name] = int(tmp.split()[1]) except Exception: pass continue return stats
def get_stats(): stats = {} results = BashWrapper(CMD) if not results: LOGGER.warning("can't execute %s: %s" % (CMD, results)) return None try: processes = json.loads(results.stdout) except Exception: LOGGER.warning("can't parse %s output as JSON" % CMD) return None plugins = set( [x['plugin'] if x['plugin'] != '' else '#core#' for x in processes]) for plugin in plugins: if plugin not in stats: stats[plugin] = {} for key in ('mem_percent', 'num_threads', 'cpu_percent', 'num_fds'): search_plugin = plugin if plugin != '#core#' else '' stats[plugin][key] = sum( [x[key] for x in processes if x['plugin'] == search_plugin]) return stats
def is_dangerous_plugin(name, plugins_base_dir=None): """Display is_dangerous_plugin command. Display on the standard output (stdout) the result of the ``_plugins.is_dangerous`` command for a plugin. The ``_plugins.is_dangerous`` displays warnings for "dangerous" plugins, i.e. likely to have impacts on other modules and/or other plugins. Args: name: name of the plugin. plugins_base_dir (string): (optional) the plugin base directory path. If not set, the default plugins base directory path is used. """ with PluginsBaseDir(plugins_base_dir): res = BashWrapper("_plugins.is_dangerous %s" % (name,)) if not res: __get_logger().warning("error during %s", res) return if res.stdout and len(res.stdout) > 0: print(res.stdout)
#!/usr/bin/env python3 import os import json from mfutil import BashWrapper # List partitions of the system (and not block devices) # see https://github.com/sysstat/sysstat/issues/185 cmd = "cat /proc/diskstats |awk '{print $3;}'" partitions = [] output = BashWrapper(cmd) if output: for dev in output.stdout.split(): if not os.path.islink("/sys/block/%s" % dev): partitions.append(dev) print(json.dumps(partitions))
def get_plugin_info(name_or_filepath, mode="auto", plugins_base_dir=None): """Get detailed information about a plugin. Args: name_or_filepath (string): name or file path of the plugin. mode (string) - "name": get information from the plugin name (name_or_filepath is the name of the plugin). - "file": get information from the plutgin file (name_or_filepath is the plugin file path). - "auto": guess if the name_or_filepath parameter is the name or the file path of the plugin. plugins_base_dir (string): (optional) the plugin base directory path. If not set, the default plugins base directory path is used. Returns: (dict): dictionary containing plugin information Raises: MFUtilPluginBaseNotInitialized: if the plugins base is not initialized. """ plugins_base_dir = _get_plugins_base_dir(plugins_base_dir) _assert_plugins_base_initialized(plugins_base_dir) res = {} if mode == "auto": mode = "name" if '/' in name_or_filepath or '.' in name_or_filepath: mode = "file" else: if os.path.isfile(name_or_filepath): mode = "file" if mode == "file": cmd = _get_rpm_cmd('-qi', '-p %s' % name_or_filepath, plugins_base_dir=plugins_base_dir) elif mode == "name": if _is_dev_link_plugin(name_or_filepath, plugins_base_dir=plugins_base_dir): res['metadatas'] = {} res['metadatas']['name'] = name_or_filepath res['metadatas']['release'] = 'dev_link' res['metadatas']['version'] = 'dev_link' res['raw_metadata_output'] = 'DEV LINK' res['raw_files_output'] = 'DEV LINK' res['files'] = [] res['home'] = get_layer_home_from_plugin_name( name_or_filepath, plugins_base_dir=plugins_base_dir) return res cmd = _get_rpm_cmd('-qi', name_or_filepath, plugins_base_dir=plugins_base_dir) else: __get_logger().warning("unknown mode [%s]" % mode) return None metadata_output = BashWrapper(cmd) if not metadata_output: return None res['raw_metadata_output'] = metadata_output.stdout for line in metadata_output.stdout.split('\n'): tmp = line.strip().split(':', 1) if len(tmp) <= 1: continue name = tmp[0].strip().lower() value = tmp[1].strip() if 'metadatas' not in res: res['metadatas'] = {} res['metadatas'][name] = value if mode == "name": res["home"] = \ get_layer_home_from_plugin_name(name_or_filepath, plugins_base_dir=plugins_base_dir) if mode == "file": cmd = _get_rpm_cmd('-ql -p %s' % name_or_filepath, plugins_base_dir=plugins_base_dir) else: cmd = _get_rpm_cmd('-ql %s' % name_or_filepath, plugins_base_dir=plugins_base_dir) files_output = BashWrapper(cmd) if not files_output: return None res['files'] = [x.strip() for x in files_output.stdout.split('\n')] res['raw_files_output'] = files_output.stdout return res
def _touch_conf_monitor_control_file(): BashWrapper("touch %s/var/conf_monitor" % RUNTIME_HOME)
def _postinstall_plugin(name, version, release, plugins_base_dir=None): with PluginsBaseDir(plugins_base_dir): return BashWrapper("_plugins.postinstall %s %s %s" % (name, version, release))
def restart_nginx(old_conf, new_conf): os.unlink(old_conf) os.rename(new_conf, old_conf) x = BashWrapper("_nginx.reload") if not x: LOGGER.warning(x)
def restart_switch(old_conf, new_conf): os.unlink(old_conf) os.rename(new_conf, old_conf) x = BashWrapper("_switch.stop") if not x: LOGGER.warning(x)
def restart_directory_observer(old_conf, new_conf): os.unlink(old_conf) os.rename(new_conf, old_conf) x = BashWrapper("_directory_observer.stop") if not x: LOGGER.warning(x)
import sys from mflog import get_logger from mfutil import BashWrapper, get_tmp_filepath from mfplugin.compat import get_installed_plugins MFMODULE_HOME = os.environ.get("MFMODULE_HOME", None) MFMODULE = os.environ.get("MFMODULE", None) LOGGER = get_logger("_make_crontab.py") if not os.path.isfile(f"{MFMODULE_HOME}/config/crontab"): sys.exit(0) # FIXME: deprecated => remove for 0.11 release os.environ["RUNTIME_SUFFIX"] = "" x = BashWrapper(f"cat {MFMODULE_HOME}/config/crontab " "|envtpl --reduce-multi-blank-lines") if not x: LOGGER.critical("can't build module level crontab, details: %s" % x) sys.exit(1) print(x.stdout) plugins = [] try: plugins = get_installed_plugins() except Exception: pass for plugin in plugins: if not os.path.isfile(f"{plugin['home']}/crontab"): continue x = BashWrapper(f"cat {plugin['home']}/crontab " "| grep -v '^#' |grep [^[:space:]] "
def _postuninstall_plugin(name, version, release): res = BashWrapper("_plugins.postuninstall %s %s %s" % (name, version, release)) if not res: __get_logger().warning("error during postuninstall: %s", res)
def get_file_count(directory): cmd = "find %s -type f 2>/dev/null |wc -l" % directory x = BashWrapper(cmd) if x: return int(x.stdout)
import argparse import sys import time import os import datetime from mfutil import BashWrapper DESCRIPTION = "block until influxdb is up (return code: 0) or " \ "30s timeout (return code: 1)" INFLUXDB_PORT = int(os.environ['MFADMIN_INFLUXDB_HTTP_PORT']) INFLUXDB_COMMAND = "influx -port %i -precision rfc3339 " \ "-execute 'SHOW DATABASES'" % INFLUXDB_PORT parser = argparse.ArgumentParser(description=DESCRIPTION) parser.parse_args() before = datetime.datetime.now() while True: elapsed = (datetime.datetime.now() - before).total_seconds() if elapsed > 30: print("timeout") sys.exit(1) x = BashWrapper(INFLUXDB_COMMAND) if x: break else: print(x) time.sleep(1) sys.exit(0)