def remove(path, chmod=False): """Remove a file or directory located on the filesystem at path. If chmod is True, chmod -R 755 is executed on the path before rm -rf path is called. Parameters ---------- path : str file system path to an existing file or directory chmod : bool If True, chmod -R 755 is executed on path before it's removed. Raises ------ CalledProcessError If any part of the removal process fails. """ if not os.path.exists(path): return if chmod: cmd = "chmod -R 755 %s" % path subproc.call(cmd) cmd = 'rm -rf "{p}"'.format(p=path) subproc.call(cmd)
def from_path(self, path, extract_dir=None): if os.path.isdir(path): self.tmp_dir = path else: self._assert_type(path, False) tar_flag = self.TAR_FLAGS.get(self.content_type) self.tmp_dir = tempfile.mkdtemp(dir=extract_dir) command = "tar %s -x --exclude=*/dev/null -f %s -C %s" % (tar_flag, path, self.tmp_dir) logging.info("Extracting files in '%s'", self.tmp_dir) subproc.call(command, timeout=self.timeout) return self
def from_path(self, path, extract_dir=None, content_type=None): if os.path.isdir(path): self.tmp_dir = path else: self.content_type = content_type or content_type_from_file(path) tar_flag = self._tar_flag_for_content_type(self.content_type) self.tmp_dir = tempfile.mkdtemp(prefix="insights-", dir=extract_dir) self.created_tmp_dir = True command = "tar --delay-directory-restore %s -x --exclude=*/dev/null -f %s -C %s" % (tar_flag, path, self.tmp_dir) logging.debug("Extracting files in '%s'", self.tmp_dir) subproc.call(command, timeout=self.timeout) return self
def create_archive(path, remove_path=True): """ Creates a tar.gz of the path using the path basename + "tar.gz" The resulting file is in the parent directory of the original path, and the original path is removed. """ root_path = os.path.dirname(path) relative_path = os.path.basename(path) archive_path = path + ".tar.gz" cmd = [["tar", "-C", root_path, "-czf", archive_path, relative_path]] call(cmd, env=SAFE_ENV) if remove_path: fs.remove(path) return archive_path
def check_output(self, cmd, timeout=None, keep_rc=False, env=None): """ Subclasses can override to provide special environment setup, command prefixes, etc. """ return subproc.call(cmd, timeout=timeout or self.timeout, keep_rc=keep_rc, env=env)
def check_output(self, cmd, timeout=None, keep_rc=False): """ Subclasses can override to provide special environment setup, command prefixes, etc. """ return call(cmd, timeout=timeout or self.timeout, stderr=STDOUT, keep_rc=keep_rc)
def _assert_oscap_rpms_exist(self): rc, rpm = call('rpm -qa ' + ' '.join(REQUIRED_PACKAGES), keep_rc=True) if rc: logger.error('Tried running rpm -qa but failed: {0}.\n'.format(rpm)) exit(constants.sig_kill_bad) else: if len(rpm.strip().split('\n')) < len(REQUIRED_PACKAGES): logger.error('Missing required packages for compliance scanning. Please ensure the following packages are installed: {0}\n'.format(', '.join(REQUIRED_PACKAGES))) exit(constants.sig_kill_bad)
def find_scap_policy(self, profile_ref_id): rc, grep = call(('grep ' + profile_ref_id + ' ' + ' '.join(self.profile_files())).encode(), keep_rc=True) if rc: logger.error('XML profile file not found matching ref_id {0}\n{1}\n'.format(profile_ref_id, grep)) exit(constants.sig_kill_bad) filenames = findall('/usr/share/xml/scap/.+xml', grep) if not filenames: logger.error('No XML profile files found matching ref_id {0}\n{1}\n'.format(profile_ref_id, ' '.join(filenames))) exit(constants.sig_kill_bad) return filenames[0]
def load(self): filters = False if self.ds: filters = "\n".join(get_filters(self.ds)) if filters: cmd = "/bin/grep -F '{0}' {1}".format(filters, self.path) rc, out = subproc.call(cmd.encode("utf-8"), shell=False, keep_rc=True) if rc == 0 and out != '': results = out.splitlines() else: return [] else: out = subproc.call("cat {0}".format(self.path), shell=False) results = out.splitlines() if not self.validate_lines(results): first = results[0] if results else "<no content>" raise ContentException(self.relative_path + ": " + first) return results
def run_scan(self, profile_ref_id, policy_xml): logger.info('Running scan... this may take a while') rc, oscap = call('oscap xccdf eval --profile ' + profile_ref_id + ' --results ' + OSCAP_RESULTS_OUTPUT + ' ' + policy_xml, keep_rc=True) if rc and rc != NONCOMPLIANT_STATUS: logger.error('Scan failed') logger.error(oscap) exit(constants.sig_kill_bad)
def collect(manifest=default_manifest, tmp_path=None, compress=False): """ This is the collection entry point. It accepts a manifest, a temporary directory in which to store output, and a boolean for optional compression. Args: manifest (str or dict): json document or dictionary containing the collection manifest. See default_manifest for an example. tmp_path (str): The temporary directory that will be used to create a working directory for storing component output as well as the final tar.gz if one is generated. compress (boolean): True to create a tar.gz and remove the original workspace containing output. False to leave the workspace without creating a tar.gz Returns: The full path to the created tar.gz or workspace. """ manifest = load_manifest(manifest) client = manifest.get("client", {}) plugins = manifest.get("plugins", {}) run_strategy = client.get("run_strategy", {"name": "parallel"}) load_packages(plugins.get("packages", [])) apply_default_enabled(plugins) apply_configs(plugins) apply_blacklist(client.get("blacklist", {})) to_persist = get_to_persist(client.get("persist", set())) hostname = call("hostname -f", env=SAFE_ENV).strip() suffix = datetime.utcnow().strftime("%Y%m%d%H%M%S") relative_path = "insights-%s-%s" % (hostname, suffix) tmp_path = tmp_path or tempfile.gettempdir() output_path = os.path.join(tmp_path, relative_path) fs.ensure_path(output_path) fs.touch(os.path.join(output_path, "insights_archive.txt")) broker = dr.Broker() ctx = create_context(client.get("context", {})) broker[ctx.__class__] = ctx parallel = run_strategy.get("name") == "parallel" pool_args = run_strategy.get("args", {}) with get_pool(parallel, pool_args) as pool: h = Hydration(output_path, pool=pool) broker.add_observer(h.make_persister(to_persist)) dr.run_all(broker=broker, pool=pool) if compress: return create_archive(output_path) return output_path
def get_ssg_version(self): rpmcmd = 'rpm -qa --qf "%{VERSION}" ' + SSG_PACKAGE if not six.PY3: rpmcmd = rpmcmd.encode() rc, ssg_version = call(rpmcmd, keep_rc=True) if rc: logger.warning('Tried determinig SSG version but failed: {0}.\n'.format(ssg_version)) return logger.info('System uses SSG version %s', ssg_version) return ssg_version
def run_scan(self, profile_ref_id, policy_xml, output_path, tailoring_file_path=None): logger.info('Running scan for {0}... this may take a while'.format(profile_ref_id)) env = os.environ.copy() env.update({'TZ': 'UTC'}) oscap_command = self.build_oscap_command(profile_ref_id, policy_xml, output_path, tailoring_file_path) rc, oscap = call(oscap_command.encode(), keep_rc=True, env=env) if rc and rc != NONCOMPLIANT_STATUS: logger.error('Scan failed') logger.error(oscap) exit(constants.sig_kill_bad) else: self.archive.copy_file(output_path)
def run_scan(self, profile_ref_id, policy_xml, output_path): logger.info('Running scan for {0}... this may take a while'.format( profile_ref_id)) rc, oscap = call('oscap xccdf eval --profile ' + profile_ref_id + ' --results ' + output_path + ' ' + policy_xml, keep_rc=True) if rc and rc != NONCOMPLIANT_STATUS: logger.error('Scan failed') logger.error(oscap) exit(constants.sig_kill_bad) else: self.archive.copy_file(output_path)
def load(self): filters = False if self.ds: filters = "\n".join(get_filters(self.ds)) if filters: cmd = [["grep", "-F", filters, self.path]] rc, out = subproc.call(cmd, shell=False, keep_rc=True, env=SAFE_ENV) if rc == 0 and out != '': results = out.splitlines() else: return [] else: with open(self.path, "rU") as f: results = [l.rstrip("\n") for l in f] return results
def collect(manifest=default_manifest, tmp_path=None, compress=False, rm_conf=None, client_timeout=None): """ This is the collection entry point. It accepts a manifest, a temporary directory in which to store output, and a boolean for optional compression. Args: manifest (str or dict): json document or dictionary containing the collection manifest. See default_manifest for an example. tmp_path (str): The temporary directory that will be used to create a working directory for storing component output as well as the final tar.gz if one is generated. compress (boolean): True to create a tar.gz and remove the original workspace containing output. False to leave the workspace without creating a tar.gz rm_conf (dict): Client-provided python dict containing keys "commands", "files", and "keywords", to be injected into the manifest blacklist. client_timeout (int): Client-provided command timeout value Returns: The full path to the created tar.gz or workspace. """ manifest = load_manifest(manifest) client = manifest.get("client", {}) plugins = manifest.get("plugins", {}) run_strategy = client.get("run_strategy", {"name": "parallel"}) load_packages(plugins.get("packages", [])) apply_default_enabled(plugins) apply_configs(plugins) apply_blacklist(client.get("blacklist", {})) # insights-client if client_timeout: try: client['context']['args']['timeout'] = client_timeout except LookupError: log.warning('Could not set timeout option.') rm_conf = rm_conf or {} apply_blacklist(rm_conf) for component in rm_conf.get('components', []): if not dr.get_component_by_name(component): log.warning('WARNING: Unknown component in blacklist: %s' % component) else: dr.set_enabled(component, enabled=False) log.warning('WARNING: Skipping component: %s', component) to_persist = get_to_persist(client.get("persist", set())) try: filters.load() except IOError as e: # could not load filters file log.debug("No filters available: %s", str(e)) except AttributeError as e: # problem parsing the filters log.debug("Could not parse filters: %s", str(e)) try: hostname = call("hostname -f", env=SAFE_ENV).strip() except CalledProcessError: # problem calling hostname -f hostname = call("hostname", env=SAFE_ENV).strip() suffix = datetime.utcnow().strftime("%Y%m%d%H%M%S") relative_path = "insights-%s-%s" % (hostname, suffix) tmp_path = tmp_path or tempfile.gettempdir() output_path = os.path.join(tmp_path, relative_path) fs.ensure_path(output_path) fs.touch(os.path.join(output_path, "insights_archive.txt")) broker = dr.Broker() ctx = create_context(client.get("context", {})) broker[ctx.__class__] = ctx parallel = run_strategy.get("name") == "parallel" pool_args = run_strategy.get("args", {}) with get_pool(parallel, pool_args) as pool: h = Hydration(output_path, pool=pool) broker.add_observer(h.make_persister(to_persist)) dr.run_all(broker=broker, pool=pool) if compress: return create_archive(output_path) return output_path
def test_call_invalid_args(): with pytest.raises(subproc.CalledProcessError): subproc.call([1, 2, 3])
def test_call(): result = subproc.call('echo -n hello') assert result == 'hello'
def test_call_timeout(): # Timeouts don't work on OS X if sys.platform != "darwin": with pytest.raises(subproc.CalledProcessError): subproc.call('sleep 3', timeout=1)
def test_call_list_of_lists(): cmd = "echo -n ' hello '" cmd = shlex.split(cmd) result = subproc.call([cmd, ["grep", "-F", "hello"]]) assert "hello" in result
def from_path(self, path, extract_dir=None): self.tmp_dir = tempfile.mkdtemp(dir=extract_dir) command = "unzip -q -d %s %s" % (self.tmp_dir, path) subproc.call(command, timeout=self.timeout) return self
def from_path(self, path, extract_dir=None, content_type=None): self.tmp_dir = tempfile.mkdtemp(prefix="insights-", dir=extract_dir) self.created_tmp_dir = True command = "unzip -n -q -d %s %s" % (self.tmp_dir, path) subproc.call(command, timeout=self.timeout) return self