def _get_system_facts(archive_path):
     facts = {}
     default_packages = (
         "insights.specs.default",
         "insights.specs.insights_archive",
         "insights.combiners",
         "insights.parsers"
     )
     for pkg in default_packages:
         dr.load_components(pkg)
     broker = dr.Broker()
     try:
         with archives.extract(archive_path) as ex:
             ctx = create_context(ex.tmp_dir, HostArchiveContext)
             broker[ctx.__class__] = ctx
             broker = dr.run(components=[Specs.machine_id, LsEtc],
                             broker=broker)
             if Specs.machine_id in broker:
                 facts["id"] = broker[Specs.machine_id].content[0].strip()
             if LsEtc in broker:
                 facts["satellite_managed"] = any([broker[LsEtc].dir_contains(*satellite_file)
                                                   for satellite_file in SATELLITE_MANAGED_FILES.values()
                                                   if satellite_file[0] in broker[LsEtc]])
     except InvalidContentType:
         LOGGER.error("Unable to parse archive.")
     return facts
Example #2
0
    def process(self, broker, path):
        for w in self.watchers:
            w.watch_broker(broker)

        result = None

        try:
            self.fire("pre_extract", broker, path)

            with extract(path,
                         timeout=self.extract_timeout,
                         extract_dir=self.extract_tmp_dir) as extraction:
                ctx, broker = initialize_broker(extraction.tmp_dir,
                                                broker=broker)

                self.fire("on_extract", ctx, broker, extraction)

                output = StringIO()
                with self.Formatter(broker, stream=output):
                    dr.run(self.target_components, broker=broker)
                output.seek(0)
                result = output.read()
                self.fire("on_engine_success", broker, result)
                return result
        except Exception as ex:
            self.fire("on_engine_failure", broker, ex)
            raise
        finally:
            self.fire("on_engine_complete", broker)
    def process(self, broker, path):
        for w in self.watchers:
            w.watch_broker(broker)

        result = None

        try:
            self.fire("pre_extract", broker, path)

            with extract(path, timeout=self.timeout,
                         extract_dir=self.tmp_dir) as extraction:
                ctx = create_context(extraction.tmp_dir)
                broker[ctx.__class__] = ctx

                self.fire("on_extract", ctx, broker, extraction)

                output = StringIO()
                with self.Format(broker, stream=output):
                    dr.run(self.comps, broker=broker)
                output.seek(0)
                result = output.read()
                self.fire("on_engine_success", broker, result)
                return result
        except Exception as ex:
            self.fire("on_engine_failure", broker, ex)
            raise
        finally:
            self.fire("on_engine_complete", broker)
Example #4
0
def process_archives(archives):
    for archive in archives:
        with extract(archive) as ex:
            ctx = create_context(ex.tmp_dir)
            broker = dr.Broker()
            broker[ctx.__class__] = ctx
            yield dr.run(broker=broker)
Example #5
0
def get_rule_hit_info(archive, rule_name, timeout=None, tmp_dir=None):
    # We have to load everything again for multiprocessing or clustering to
    # work. Even though imports are cached internally after the first call,  we
    # can still optimize a bit with this LOADED hack.
    rule_func = dr.get_component(rule_name)

    if not LOADED[0]:
        load_default_plugins()
        LOADED[0] = True

    # this is also cached behind get_deps after the first call.
    graph, bool_deps = get_deps(rule_func)

    with extract(archive, timeout=timeout, extract_dir=tmp_dir) as arc:
        ctx = create_context(arc.tmp_dir, None)
        broker = dr.Broker()
        broker[ctx.__class__] = ctx

        results = dr.run(graph, broker=broker)

        rule_result = results.get(rule_func)
        rhr = results.get(RedHatRelease)

        result = extract_hits(bool_deps, results)

        result["archive"] = archive
        result["key"] = rule_result.get_key() if rule_result else None
        result["type"] = rule_result.__class__.__name__ if rule_result else None
        result["make_fail"] = True if rule_result and isinstance(rule_result, make_response) else False
        result["major"] = rhr.major if rhr else -1
        result["minor"] = rhr.minor if rhr else -1
        return result
    def parse(self):
        # pylint: disable=too-many-branches
        """Parse given archive."""
        ARCHIVE_PARSE_COUNT.inc()
        default_packages = ("insights.specs.default",
                            "insights.specs.insights_archive",
                            "insights.combiners", "insights.parsers")
        for pkg in default_packages:
            dr.load_components(pkg)
        broker = dr.Broker()

        with archives.extract(self.archive_path) as ex:
            ctx = create_context(ex.tmp_dir, HostArchiveContext)
            broker[ctx.__class__] = ctx
            broker = dr.run(components=[Installed, DnfModules, YumReposD],
                            broker=broker)

            if Installed in broker:
                pkglist = broker[Installed]
                self._delete_blacklisted_packages(pkglist.packages)
                for pkg_name in pkglist.packages:
                    pkg = pkglist.get_max(pkg_name)
                    self.package_list.append("%s-%s:%s-%s.%s" %
                                             (pkg.name, pkg.epoch, pkg.version,
                                              pkg.release, pkg.arch))
            else:
                RPMDB_PARSE_FAILURE.inc()
                LOGGER.error("Unable to parse package list from archive.")
                return

            if YumReposD in broker:
                repolist = broker[YumReposD]
                for repo_file in repolist:
                    if repo_file.file_name == 'redhat.repo':
                        for repo in repo_file:
                            if repo_file[repo].get(
                                    'enabled',
                                    '1').lower() in ('1', 'true', 'enabled',
                                                     'yes', 'on'):
                                self.repo_list.append(repo)
                        break

            if not self.repo_list:
                REPOLIST_PARSE_FAILURE.inc()
                LOGGER.warning("Unable to parse RHSM repo list from archive.")

            if DnfModules in broker:
                for module in broker[DnfModules]:
                    for module_name in module.sections():
                        self.modules_list.append({
                            'module_name':
                            module_name,
                            'module_stream':
                            module.get(module_name, 'stream')
                        })
Example #7
0
    def _get_system_profile(archive_path):
        profile = {}
        default_packages = ("insights.specs.default",
                            "insights.specs.insights_archive",
                            "insights.combiners", "insights.parsers")
        for pkg in default_packages:
            dr.load_components(pkg)
        broker = dr.Broker()
        try:
            with archives.extract(archive_path) as ex:
                ctx = create_context(ex.tmp_dir, HostArchiveContext)
                broker[ctx.__class__] = ctx
                broker = dr.run(components=[
                    Specs.machine_id, Installed, DnfModules, YumReposD
                ],
                                broker=broker)
                if Specs.machine_id in broker:
                    profile["id"] = broker[Specs.machine_id].content[0].strip()
                profile["installed_packages"] = []
                if Installed in broker:
                    pkglist = broker[Installed]
                    for pkg_name in pkglist.packages:
                        pkg = pkglist.get_max(pkg_name)
                        profile["installed_packages"].append(pkg.nevra)

                profile["yum_repos"] = []
                if YumReposD in broker:
                    repolist = broker[YumReposD]
                    for repo_file in repolist:
                        if repo_file.file_name == 'redhat.repo':
                            for repo in repo_file:
                                if repo_file[repo].get(
                                        'enabled',
                                        '1').lower() in ('1', 'true',
                                                         'enabled', 'yes',
                                                         'on'):
                                    profile["yum_repos"].append(repo)
                            break

                profile["dnf_modules"] = []
                if DnfModules in broker:
                    for module in broker[DnfModules]:
                        for module_name in module.sections():
                            profile["dnf_modules"].append({
                                'name':
                                module_name,
                                'stream':
                                module.get(module_name, 'stream')
                            })
                LOGGER.info(profile)
        except InvalidContentType:
            LOGGER.error("Unable to parse archive.")
        return profile
Example #8
0
    def parse(self):
        # pylint: disable=too-many-branches
        """Parse given archive."""
        ARCHIVE_PARSE_COUNT.inc()
        default_packages = ("insights.specs.default",
                            "insights.specs.insights_archive",
                            "insights.combiners", "insights.parsers")
        for pkg in default_packages:
            dr.load_components(pkg)
        broker = dr.Broker()

        with archives.extract(self.archive_path) as ex:
            ctx = create_context(ex.tmp_dir, HostArchiveContext)
            broker[ctx.__class__] = ctx
            broker = dr.run(components=[
                Installed, SubscriptionManagerReposListEnabled, DnfModules
            ],
                            broker=broker)

            if Installed in broker:
                pkglist = broker[Installed]
                self._delete_blacklisted_packages(pkglist.packages)
                for pkg_name in pkglist.packages:
                    pkg = pkglist.get_max(pkg_name)
                    self.package_list.append("%s-%s:%s-%s.%s" %
                                             (pkg.name, pkg.epoch, pkg.version,
                                              pkg.release, pkg.arch))
            else:
                RPMDB_PARSE_FAILURE.inc()
                LOGGER.error("Unable to parse package list from archive.")
                return

            if SubscriptionManagerReposListEnabled in broker:
                repolist = broker[SubscriptionManagerReposListEnabled]
                for repo_record in repolist.records:
                    repo_label = repo_record.get("Repo ID", None)
                    if repo_label:
                        self.repo_list.append(repo_label)

            if not self.repo_list:
                REPOLIST_PARSE_FAILURE.inc()
                LOGGER.warning("Unable to parse RHSM repo list from archive.")

            if DnfModules in broker:
                for module in broker[DnfModules]:
                    for module_name in module.sections():
                        self.modules_list.append({
                            'module_name':
                            module_name,
                            'module_stream':
                            module.get(module_name, 'stream')
                        })
Example #9
0
def process_archives(graph, archives):
    for archive in archives:
        if os.path.isfile(archive):
            with extract(archive) as ex:
                ctx = create_context(ex.tmp_dir)
                broker = dr.Broker()
                broker[ctx.__class__] = ctx
                yield dr.run(broker=broker)
        else:
            ctx = create_context(archive)
            broker = dr.Broker()
            broker[ctx.__class__] = ctx
            yield dr.run(graph, broker=broker)
Example #10
0
def upload(system_id):
    user_agent = request.headers.get("User-Agent", "Unknown")
    setattr(util.thread_context, "request_id",
            request.headers.get("X-Request-Id", "Unknown"))
    account_number = request.headers.get("X-Account", "")
    file_size, file_loc = extract()
    with archives.extract(file_loc) as ex:
        results = handle(ex.tmp_dir, system_id, config=config)
        response = handle_results(results, file_size, user_agent)
    s3.save(file_loc, results["system"].get("system_id"), ex.content_type,
            account_number)
    shutil.rmtree(os.path.dirname(file_loc))
    update_stats(results, user_agent)
    return response
Example #11
0
def analyze(paths, excludes=None):
    if not isinstance(paths, list):
        paths = [paths]

    results = []
    for path in paths:
        if content_type.from_file(path) == "text/plain":
            results.append(_load(path))
        elif os.path.isdir(path):
            results.extend(_process(path, excludes))
        else:
            with extract(path) as ex:
                results.extend(_process(ex.tmp_dir, excludes))

    return Result(children=results)
Example #12
0
    def process(self, path, broker=None):
        broker = dr.Broker() if broker is None else broker

        if os.path.isdir(path):
            results = {"results": self._evaluate(broker, path)}
        else:
            with extract(path) as extraction:
                results = {
                    "results": self._evaluate(broker, extraction.tmp_dir)
                }

        if self._include_timings:
            results["timings"] = self._get_timings(broker)

        if self._include_tracebacks:
            results["tracebacks"] = self._get_tracebacks(broker)

        return results
 def _get_system_facts(archive_path):
     facts = {}
     default_packages = ("insights.specs.default",
                         "insights.specs.insights_archive",
                         "insights.combiners", "insights.parsers")
     for pkg in default_packages:
         dr.load_components(pkg)
     broker = dr.Broker()
     try:
         with archives.extract(archive_path) as ex:
             ctx = create_context(ex.tmp_dir, HostArchiveContext)
             broker[ctx.__class__] = ctx
             broker = dr.run(components=[Specs.machine_id], broker=broker)
             if Specs.machine_id in broker:
                 facts["id"] = broker[Specs.machine_id].content[0].strip()
     except InvalidContentType:
         LOGGER.error("Unable to parse archive.")
     return facts
Example #14
0
def test_with_zip():
    tmp_dir = tempfile.mkdtemp()

    d = os.path.join(tmp_dir, 'sys', 'kernel')
    os.makedirs(d)
    with open(os.path.join(d, 'kexec_crash_size'), "w") as f:
        f.write("ohyeahbaby")

    try:
        os.unlink("/tmp/test.zip")
    except:
        pass

    # stolen from zipfile.py:main
    def _add_to_zip(zf, path, zippath):
        if os.path.isfile(path):
            zf.write(path, zippath, zipfile.ZIP_DEFLATED)
        elif os.path.isdir(path):
            if zippath:
                zf.write(path, zippath)
            for nm in os.listdir(path):
                _add_to_zip(zf, os.path.join(path, nm),
                            os.path.join(zippath, nm))
        # else: ignore

    with closing(zipfile.ZipFile("/tmp/test.zip", "w")) as zf:
        _add_to_zip(zf, tmp_dir, os.path.basename(tmp_dir))

    try:
        with extract("/tmp/test.zip") as ex:
            assert any(
                f.endswith("/sys/kernel/kexec_crash_size")
                for f in archives.get_all_files(ex.tmp_dir))

    finally:
        os.unlink("/tmp/test.zip")

    subprocess.call(shlex.split("rm -rf %s" % tmp_dir))
Example #15
0
    def process(self, path, broker=None):
        """
        Use this method to process archives. Pass a path to an archive and an
        optional seed broker to use when evaluating it.

        Archives are extracted outside the sandbox, and the temporary path is
        sent inside for processing.

        Arguments:
            path (str): Path to an archive or directory to analyze.
                Directories must be inside the temporary directory so the
                sandbox can see them.
            broker (insights.core.dr.Broker): Seed broker to use during the
                evaluation. Observers registered with the seed broker won't
                be fired since it is serialized into the sandbox before
                evaluation.
        """
        if os.path.isdir(path):
            return self._runner_adapter_proxy.process(path, broker=broker)
        else:
            with extract(path) as extraction:
                p = extraction.tmp_dir
                return self._runner_adapter_proxy.process(p, broker=broker)
Example #16
0
 def process(self, path):
     with extract(path) as ext:
         for item in self.process_dir(ext.tmp_dir):
             yield item