def testAssertCheckUndetected(self):
        """Tests for the asertCheckUndetected() method."""
        anomaly = {
            "finding": ["Adware 2.1.1 is installed"],
            "symptom": "Found: Malicious software.",
            "type": "ANALYSIS_ANOMALY"
        }

        # Simple no anomaly case.
        no_anomaly = {"SW-CHECK": checks.CheckResult(check_id="SW-CHECK")}
        self.assertCheckUndetected("SW-CHECK", no_anomaly)

        # The case were there is an anomaly in the results, just not the check
        # we are looking for.
        other_anomaly = {
            "SW-CHECK":
            checks.CheckResult(check_id="SW-CHECK"),
            "OTHER":
            checks.CheckResult(check_id="OTHER",
                               anomaly=[rdf_anomaly.Anomaly(**anomaly)])
        }
        self.assertCheckUndetected("SW-CHECK", other_anomaly)

        # Check the simple failure case works.
        has_anomaly = {
            "SW-CHECK":
            checks.CheckResult(check_id="SW-CHECK",
                               anomaly=[rdf_anomaly.Anomaly(**anomaly)])
        }
        self.assertRaises(AssertionError, self.assertCheckUndetected,
                          "SW-CHECK", has_anomaly)
Exemple #2
0
    def testWithAnomaly(self):
        checkresult = checks.CheckResult(
            check_id="check-id-2",
            anomaly=[
                rdf_anomaly.Anomaly(
                    type="PARSER_ANOMALY",
                    symptom="something was wrong on the system"),
                rdf_anomaly.Anomaly(
                    type="MANUAL_ANOMALY",
                    symptom="manually found wrong stuff",
                    anomaly_reference_id=["id1", "id2"],
                    finding=["file has bad permissions: /tmp/test"]),
            ])
        converter = check_result.CheckResultConverter()
        results = list(converter.Convert(self.metadata, checkresult))

        self.assertLen(results, 2)
        self.assertEqual(results[0].check_id, checkresult.check_id)
        self.assertEqual(results[0].anomaly.type, checkresult.anomaly[0].type)
        self.assertEqual(results[0].anomaly.symptom,
                         checkresult.anomaly[0].symptom)
        self.assertEqual(results[1].check_id, checkresult.check_id)
        self.assertEqual(results[1].anomaly.type, checkresult.anomaly[1].type)
        self.assertEqual(results[1].anomaly.symptom,
                         checkresult.anomaly[1].symptom)
        self.assertEqual(
            results[1].anomaly.anomaly_reference_id,
            "\n".join(checkresult.anomaly[1].anomaly_reference_id))
        self.assertEqual(results[1].anomaly.finding,
                         checkresult.anomaly[1].finding[0])
  def testSystemAccountAnomaly(self):
    passwd = [
        "root:x:0:0::/root:/bin/sash",
        "miss:x:1000:100:Missing:/home/miss:/bin/bash",
        "bad1:x:0:1001:Bad 1:/home/bad1:/bin/bash",
        "bad2:x:1002:0:Bad 2:/home/bad2:/bin/bash"
    ]
    shadow = [
        "root:{UNSET}:16000:0:99999:7:::", "ok:{SHA512}:16000:0:99999:7:::",
        "bad1::16333:0:99999:7:::", "bad2:{DES}:16333:0:99999:7:::"
    ]
    group = [
        "root:x:0:root", "miss:x:1000:miss", "bad1:x:1001:bad1",
        "bad2:x:1002:bad2"
    ]
    gshadow = ["root:::root", "miss:::miss", "bad1:::bad1", "bad2:::bad2"]
    pathspecs, files = self._GenFiles(passwd, shadow, group, gshadow)

    no_grp = {
        "symptom": "Accounts with invalid gid.",
        "finding": ["gid 100 assigned without /etc/groups entry: miss"],
        "type": "PARSER_ANOMALY"
    }
    uid = {
        "symptom": "Accounts with shared uid.",
        "finding": ["uid 0 assigned to multiple accounts: bad1,root"],
        "type": "PARSER_ANOMALY"
    }
    gid = {
        "symptom": "Privileged group with unusual members.",
        "finding": ["Accounts in 'root' group: bad2"],
        "type": "PARSER_ANOMALY"
    }
    no_match = {
        "symptom":
            "Mismatched passwd and shadow files.",
        "finding": [
            "Present in passwd, missing in shadow: miss",
            "Present in shadow, missing in passwd: ok"
        ],
        "type":
            "PARSER_ANOMALY"
    }
    expected = [
        rdf_anomaly.Anomaly(**no_grp),
        rdf_anomaly.Anomaly(**uid),
        rdf_anomaly.Anomaly(**gid),
        rdf_anomaly.Anomaly(**no_match)
    ]

    parser = linux_file_parser.LinuxSystemPasswdParser()
    rdfs = parser.ParseFiles(None, pathspecs, files)
    results = [r for r in rdfs if isinstance(r, rdf_anomaly.Anomaly)]

    self.assertLen(results, len(expected))
    for expect, result in zip(expected, results):
      self.assertEqual(expect.symptom, result.symptom)
      # Expand out repeated field helper.
      self.assertCountEqual(list(expect.finding), list(result.finding))
      self.assertEqual(expect.type, result.type)
Exemple #4
0
    def testSystemGroupParserAnomaly(self):
        """Detect anomalies in group/gshadow files."""
        group = [
            "root:x:0:root,usr1", "adm:x:1:syslog,usr1",
            "users:x:1000:usr1,usr2,usr3,usr4"
        ]
        gshadow = ["root::usr4:root", "users:{DES}:usr1:usr2,usr3,usr4"]
        stats, files = self._GenFiles(None, None, group, gshadow)

        # Set up expected anomalies.
        member = {
            "symptom":
            "Group/gshadow members differ in group: root",
            "finding": [
                "Present in group, missing in gshadow: usr1",
                "Present in gshadow, missing in group: usr4"
            ],
            "type":
            "PARSER_ANOMALY"
        }
        group = {
            "symptom": "Mismatched group and gshadow files.",
            "finding": ["Present in group, missing in gshadow: adm"],
            "type": "PARSER_ANOMALY"
        }
        expected = [
            rdf_anomaly.Anomaly(**member),
            rdf_anomaly.Anomaly(**group)
        ]

        parser = linux_file_parser.LinuxSystemGroupParser()
        rdfs = parser.ParseMultiple(stats, files, None)
        results = [r for r in rdfs if isinstance(r, rdf_anomaly.Anomaly)]
        self.assertEqual(expected, results)
Exemple #5
0
    def ParseFile(
        self,
        knowledge_base: rdf_client.KnowledgeBase,
        pathspec: rdf_paths.PathSpec,
        filedesc: IO[bytes],
    ) -> Iterator[rdf_protodict.AttributedDict]:
        del knowledge_base  # Unused.

        lines = set([
            l.strip()
            for l in utils.ReadFileBytesAsUnicode(filedesc).splitlines()
        ])

        users = []
        bad_lines = []
        for line in lines:
            # behaviour of At/Cron is undefined for lines with whitespace separated
            # fields/usernames
            if " " in line:
                bad_lines.append(line)
            elif line:  # drop empty lines
                users.append(line)

        filename = pathspec.path
        cfg = {"filename": filename, "users": users}
        yield rdf_protodict.AttributedDict(**cfg)

        if bad_lines:
            yield rdf_anomaly.Anomaly(type="PARSER_ANOMALY",
                                      symptom="Dodgy entries in %s." %
                                      (filename),
                                      reference_pathspec=pathspec,
                                      finding=bad_lines)
Exemple #6
0
  def Parse(self, cmd, args, stdout, stderr, return_val, time_taken,
            knowledge_base):
    _ = cmd, args, stdout, stderr, return_val, time_taken, knowledge_base
    packages = []
    installed = rdf_client.SoftwarePackage.InstallState.INSTALLED
    packages.append(
        rdf_client.SoftwarePackage(
            name="Package1",
            description="Desc1",
            version="1",
            architecture="amd64",
            install_state=installed))
    packages.append(
        rdf_client.SoftwarePackage(
            name="Package2",
            description="Desc2",
            version="1",
            architecture="i386",
            install_state=installed))

    yield rdf_client.SoftwarePackages(packages=packages)

    # Also yield something random so we can test return type filtering.
    yield rdf_client_fs.StatEntry()

    # Also yield an anomaly to test that.
    yield rdf_anomaly.Anomaly(
        type="PARSER_ANOMALY", symptom="could not parse gremlins.")
Exemple #7
0
    def ParseMultiple(self, stats, file_objects, knowledge_base):
        """Parse the found release files."""
        _ = knowledge_base

        # Collate files into path: contents dictionary.
        found_files = self._Combine(stats, file_objects)

        # Determine collected files and apply weighting.
        weights = [w for w in self.WEIGHTS if w.path in found_files]
        weights = sorted(weights, key=lambda x: x.weight)

        for _, path, handler in weights:
            contents = found_files[path]
            obj = handler(contents)

            complete, result = obj.Parse()
            if result is None:
                continue
            elif complete:
                yield rdf_protodict.Dict({
                    'os_release': result.release,
                    'os_major_version': result.major,
                    'os_minor_version': result.minor
                })
                break
        else:
            # No successful parse.
            yield rdf_anomaly.Anomaly(
                type='PARSER_ANOMALY',
                symptom='Unable to determine distribution.')
Exemple #8
0
    def ParseFile(
        self,
        knowledge_base: rdf_client.KnowledgeBase,
        pathspec: rdf_paths.PathSpec,
        filedesc: IO[bytes],
    ) -> Iterator[rdf_client.SoftwarePackages]:
        del knowledge_base  # Unused.
        del pathspec  # Unused.

        packages = []
        sw_data = utils.ReadFileBytesAsUnicode(filedesc)
        try:
            for pkg in self._deb822.Packages.iter_paragraphs(
                    sw_data.splitlines()):
                if self.installed_re.match(pkg["Status"]):
                    packages.append(
                        rdf_client.SoftwarePackage(
                            name=pkg["Package"],
                            description=pkg["Description"],
                            version=pkg["Version"],
                            architecture=pkg["Architecture"],
                            publisher=pkg["Maintainer"],
                            install_state="INSTALLED"))
        except SystemError:
            yield rdf_anomaly.Anomaly(type="PARSER_ANOMALY",
                                      symptom="Invalid dpkg status file")
        finally:
            if packages:
                yield rdf_client.SoftwarePackages(packages=packages)
Exemple #9
0
    def Issue(self, state, results):
        """Collect anomalous findings into a CheckResult.

    Comparisons with anomalous conditions collect anomalies into a single
    CheckResult message. The contents of the result varies depending on whether
    the method making the comparison is a Check, Method or Probe.
    - Probes evaluate raw host data and generate Anomalies. These are condensed
      into a new CheckResult.
    - Checks and Methods evaluate the results of probes (i.e. CheckResults). If
      there are multiple probe results, all probe anomalies are aggregated into
      a single new CheckResult for the Check or Method.

    Args:
      state: A text description of what combination of results were anomalous
        (e.g. some condition was missing or present.)
      results: Anomalies or CheckResult messages.

    Returns:
      A CheckResult message.
    """
        result = CheckResult()
        # If there are CheckResults we're aggregating methods or probes.
        # Merge all current results into one CheckResult.
        # Otherwise, the results are raw host data.
        # Generate a new CheckResult and add the specific findings.
        if results and all(isinstance(r, CheckResult) for r in results):
            result.ExtendAnomalies(results)
        else:
            result.anomaly = rdf_anomaly.Anomaly(
                type=anomaly_pb2.Anomaly.AnomalyType.Name(
                    anomaly_pb2.Anomaly.ANALYSIS_ANOMALY),
                symptom=self.hint.Problem(state),
                finding=self.hint.Render(results),
                explanation=self.hint.Fix())
        return result
Exemple #10
0
  def Parse(self, stat, file_obj, unused_knowledge_base):
    lines = set([
        l.strip() for l in utils.ReadFileBytesAsUnicode(file_obj).splitlines()
    ])

    users = []
    bad_lines = []
    for line in lines:
      # behaviour of At/Cron is undefined for lines with whitespace separated
      # fields/usernames
      if " " in line:
        bad_lines.append(line)
      elif line:  # drop empty lines
        users.append(line)

    filename = stat.pathspec.path
    cfg = {"filename": filename, "users": users}
    yield rdf_protodict.AttributedDict(**cfg)

    if bad_lines:
      yield rdf_anomaly.Anomaly(
          type="PARSER_ANOMALY",
          symptom="Dodgy entries in %s." % (filename),
          reference_pathspec=stat.pathspec,
          finding=bad_lines)
Exemple #11
0
 def _CheckMultipleSymPerCheck(self, check_id, results, sym_list, found_list):
   """Ensure results for a check containing multiple symptoms match."""
   anom = []
   for sym, found in zip(sym_list, found_list):
     anom.append(
         rdf_anomaly.Anomaly(
             symptom=sym, finding=found, type="ANALYSIS_ANOMALY"))
   expected = checks.CheckResult(check_id=check_id, anomaly=anom)
   self.assertResultEqual(expected, results[check_id])
Exemple #12
0
    def ParseFiles(
        self,
        knowledge_base: rdf_client.KnowledgeBase,
        pathspecs: Iterable[rdf_paths.PathSpec],
        filedescs: Iterable[IO[bytes]],
    ) -> Iterator[rdf_protodict.Dict]:
        del knowledge_base  # Unused.

        # Collate files into path: contents dictionary.
        found_files = self._Combine(pathspecs, filedescs)

        # Determine collected files and apply weighting.
        weights = [w for w in self.WEIGHTS if w.path in found_files]
        weights = sorted(weights, key=lambda x: x.weight)

        for _, path, handler in weights:
            contents = found_files[path]
            obj = handler(contents)

            complete, result = obj.Parse()
            if result is None:
                continue
            elif complete:
                yield rdf_protodict.Dict({
                    'os_release': result.release,
                    'os_major_version': result.major,
                    'os_minor_version': result.minor
                })
                return

        # Amazon AMIs place release info in /etc/system-release.
        # https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/amazon-linux-ami-basics.html
        system_release = found_files.get('/etc/system-release', None)
        if system_release and 'Amazon Linux' in system_release:
            match_object = ReleaseFileParseHandler.RH_RE.search(system_release)
            if match_object and match_object.lastindex > 1:
                yield rdf_protodict.Dict({
                    'os_release':
                    'AmazonLinuxAMI',
                    'os_major_version':
                    int(match_object.group(1)),
                    'os_minor_version':
                    int(match_object.group(2))
                })
                return

        # Fall back to /etc/os-release.
        results_dict = self._ParseOSReleaseFile(found_files)
        if results_dict is not None:
            yield results_dict
            return

        # No successful parse.
        yield rdf_anomaly.Anomaly(type='PARSER_ANOMALY',
                                  symptom='Unable to determine distribution.')
Exemple #13
0
 def testExtendAnomalies(self):
   anomaly1 = {
       "finding": ["Adware 2.1.1 is installed"],
       "symptom": "Found: Malicious software.",
       "explanation": "Remove software.",
       "type": "ANALYSIS_ANOMALY"
   }
   anomaly2 = {
       "finding": ["Java 6.0.240 is installed"],
       "symptom": "Found: Old Java installation.",
       "explanation": "Update Java.",
       "type": "ANALYSIS_ANOMALY"
   }
   result = checks.CheckResult(
       check_id="SW-CHECK", anomaly=[rdf_anomaly.Anomaly(**anomaly1)])
   other = checks.CheckResult(
       check_id="SW-CHECK", anomaly=[rdf_anomaly.Anomaly(**anomaly2)])
   result.ExtendAnomalies([other])
   expect = {"check_id": "SW-CHECK", "anomaly": [anomaly1, anomaly2]}
   self.assertDictEqual(expect, result.ToPrimitiveDict())
Exemple #14
0
  def setUp(self):
    super(ProcessHostDataTests, self).setUp()
    registered = set(iterkeys(checks.CheckRegistry.checks))
    if "SW-CHECK" not in registered:
      checks.LoadChecksFromFiles([os.path.join(CHECKS_DIR, "sw.yaml")])
    if "SSHD-CHECK" not in registered:
      checks.LoadChecksFromFiles([os.path.join(CHECKS_DIR, "sshd.yaml")])
    self.netcat = checks.CheckResult(
        check_id="SW-CHECK",
        anomaly=[
            rdf_anomaly.Anomaly(
                finding=["netcat-traditional 1.10-40 is installed"],
                symptom="Found: l337 software installed",
                type="ANALYSIS_ANOMALY")
        ])
    self.sshd = checks.CheckResult(
        check_id="SSHD-CHECK",
        anomaly=[
            rdf_anomaly.Anomaly(
                finding=["Configured protocols: 2,1"],
                symptom="Found: Sshd allows protocol 1.",
                type="ANALYSIS_ANOMALY")
        ])
    self.windows = checks.CheckResult(
        check_id="SW-CHECK",
        anomaly=[
            rdf_anomaly.Anomaly(
                finding=["Java 6.0.240 is installed"],
                symptom="Found: Old Java installation.",
                type="ANALYSIS_ANOMALY"),
            rdf_anomaly.Anomaly(
                finding=["Adware 2.1.1 is installed"],
                symptom="Found: Malicious software.",
                type="ANALYSIS_ANOMALY")
        ])

    self.data = {
        "WMIInstalledSoftware": self.SetArtifactData(parsed=GetWMIData()),
        "DebianPackagesStatus": self.SetArtifactData(parsed=GetDPKGData()),
        "SshdConfigFile": self.SetArtifactData(parsed=GetSSHDConfig())
    }
Exemple #15
0
 def Parse(self, stat, file_object, knowledge_base):
     """Parse the status file."""
     _, _ = stat, knowledge_base
     try:
         sw_data = file_object.read()
         for pkg in deb822.Packages.iter_paragraphs(sw_data.splitlines()):
             if self.installed_re.match(pkg["Status"]):
                 soft = rdf_client.SoftwarePackage(
                     name=pkg["Package"],
                     description=pkg["Description"],
                     version=pkg["Version"],
                     architecture=pkg["Architecture"],
                     publisher=pkg["Maintainer"],
                     install_state="INSTALLED")
                 yield soft
     except SystemError:
         yield rdf_anomaly.Anomaly(type="PARSER_ANOMALY",
                                   symptom="Invalid dpkg status file")
Exemple #16
0
 def Parse(self, cmd, args, stdout, stderr, return_val, time_taken,
           knowledge_base):
   """Parse the rpm -qa output."""
   _ = time_taken, args, knowledge_base  # Unused.
   rpm_re = re.compile(r"^(\w[-\w\+]+?)-(\d.*)$")
   self.CheckReturn(cmd, return_val)
   for line in stdout.splitlines():
     pkg_match = rpm_re.match(line.strip())
     if pkg_match:
       name, version = pkg_match.groups()
       status = rdf_client.SoftwarePackage.InstallState.INSTALLED
       yield rdf_client.SoftwarePackage(
           name=name, version=version, install_state=status)
   for line in stderr.splitlines():
     if "error: rpmdbNextIterator: skipping h#" in line:
       yield rdf_anomaly.Anomaly(
           type="PARSER_ANOMALY", symptom="Broken rpm database.")
       break
Exemple #17
0
    def ParseMultiple(self, result_dicts):
        """Parse WMI Event Consumers."""
        for result_dict in result_dicts:
            wmi_dict = result_dict.ToDict()

            try:
                creator_sid_bytes = bytes(wmi_dict["CreatorSID"])
                wmi_dict["CreatorSID"] = BinarySIDtoStringSID(
                    creator_sid_bytes)
            except ValueError:
                # We recover from corrupt SIDs by outputting it raw as a string
                wmi_dict["CreatorSID"] = compatibility.Repr(
                    wmi_dict["CreatorSID"])
            except KeyError:
                pass

            for output_type in self.output_types:
                anomalies = []

                output = rdfvalue.RDFValue.classes[output_type.__name__]()
                for k, v in wmi_dict.items():
                    try:
                        output.Set(k, v)
                    except AttributeError as e:
                        # Skip any attribute we don't know about
                        anomalies.append("Unknown field %s, with value %s" %
                                         (k, v))
                    except ValueError as e:
                        anomalies.append("Invalid value %s for field %s: %s" %
                                         (v, k, e))

                # Yield anomalies first to help with debugging
                if anomalies:
                    yield rdf_anomaly.Anomaly(
                        type="PARSER_ANOMALY",
                        generated_by=self.__class__.__name__,
                        finding=anomalies)

                # Raise if the parser generated no output but there were fields.
                if wmi_dict and not output:
                    raise ValueError(
                        "Non-empty dict %s returned empty output." % wmi_dict)

                yield output
Exemple #18
0
    def Parse(self, query, result, knowledge_base):
        """Parse a WMI Event Consumer."""
        _ = query, knowledge_base

        wmi_dict = result.ToDict()

        try:
            wmi_dict["CreatorSID"] = BinarySIDtoStringSID("".join(
                [chr(i).encode("latin-1") for i in wmi_dict["CreatorSID"]]))
        except (ValueError, TypeError) as e:
            # We recover from corrupt SIDs by outputting it raw as a string
            wmi_dict["CreatorSID"] = str(wmi_dict["CreatorSID"])
        except KeyError as e:
            pass

        for output_type in self.output_types:
            anomalies = []

            output = rdfvalue.RDFValue.classes[output_type]()
            for k, v in wmi_dict.iteritems():
                try:
                    output.Set(k, v)
                except AttributeError as e:
                    # Skip any attribute we don't know about
                    anomalies.append("Unknown field %s, with value %s" %
                                     (k, v))
                except ValueError as e:
                    anomalies.append("Invalid value %s for field %s: %s" %
                                     (v, k, e))

            # Yield anomalies first to help with debugging
            if anomalies:
                yield rdf_anomaly.Anomaly(type="PARSER_ANOMALY",
                                          generated_by=self.__class__.__name__,
                                          finding=anomalies)

            # Raise if the parser generated no output but there were fields.
            if wmi_dict and not output:
                raise ValueError("Non-empty dict %s returned empty output." %
                                 wmi_dict)

            yield output
Exemple #19
0
    def ParseLines(cls, lines):
        users = set()
        filter_regexes = [
            re.compile(x)
            for x in config.CONFIG["Artifacts.netgroup_filter_regexes"]
        ]
        username_regex = re.compile(cls.USERNAME_REGEX)
        blacklist = config.CONFIG["Artifacts.netgroup_user_blacklist"]
        for index, line in enumerate(lines):
            if line.startswith("#"):
                continue

            splitline = line.split(" ")
            group_name = splitline[0]

            if filter_regexes:
                filter_match = False
                for regex in filter_regexes:
                    if regex.search(group_name):
                        filter_match = True
                        break
                if not filter_match:
                    continue

            for member in splitline[1:]:
                if member.startswith("("):
                    try:
                        _, user, _ = member.split(",")
                        if user not in users and user not in blacklist:
                            if not username_regex.match(user):
                                yield rdf_anomaly.Anomaly(
                                    type="PARSER_ANOMALY",
                                    symptom="Invalid username: %s" % user)
                            else:
                                users.add(user)
                                yield rdf_client.User(
                                    username=utils.SmartUnicode(user))
                    except ValueError:
                        raise parser.ParseError(
                            "Invalid netgroup file at line %d: %s" %
                            (index + 1, line))
Exemple #20
0
  def ParseMultiple(self, stats, unused_file_obj, unused_kb):
    """Identify the init scripts and the start/stop scripts at each runlevel.

    Evaluate all the stat entries collected from the system.
    If the path name matches a runlevel spec, and if the filename matches a
    sysv init symlink process the link as a service.

    Args:
      stats: An iterator of StatEntry rdfs.
      unused_file_obj: An iterator of file contents. Not needed as the parser
        only evaluates link attributes.
      unused_kb: Unused KnowledgeBase rdf.

    Yields:
      rdf_anomaly.Anomaly if the startup link seems wierd.
      rdf_client.LinuxServiceInformation for each detected service.
    """
    services = {}
    for stat_entry in stats:
      path = stat_entry.pathspec.path
      runlevel = self.runlevel_re.match(os.path.dirname(path))
      runscript = self.runscript_re.match(os.path.basename(path))
      if runlevel and runscript:
        svc = runscript.groupdict()
        service = services.setdefault(
            svc["name"],
            rdf_client.LinuxServiceInformation(
                name=svc["name"], start_mode="INIT"))
        runlvl = GetRunlevelsNonLSB(runlevel.group(1))
        if svc["action"] == "S" and runlvl:
          service.start_on.append(runlvl.pop())
          service.starts = True
        elif runlvl:
          service.stop_on.append(runlvl.pop())
        if not stat.S_ISLNK(int(stat_entry.st_mode)):
          yield rdf_anomaly.Anomaly(
              type="PARSER_ANOMALY",
              finding=[path],
              explanation="Startup script is not a symlink.")
    for svc in itervalues(services):
      yield svc
Exemple #21
0
    def Parse(self, cmd, args, stdout, stderr, return_val, knowledge_base):
        """Parse the rpm -qa output."""
        _ = args, knowledge_base  # Unused.
        rpm_re = re.compile(r"^(\w[-\w\+]+?)-(\d.*)$")
        self.CheckReturn(cmd, return_val)
        packages = []
        for line in stdout.decode("utf-8").splitlines():
            pkg_match = rpm_re.match(line.strip())
            if pkg_match:
                name, version = pkg_match.groups()
                packages.append(
                    rdf_client.SoftwarePackage.Installed(name=name,
                                                         version=version))
        if packages:
            yield rdf_client.SoftwarePackages(packages=packages)

        for line in stderr.decode("utf-8").splitlines():
            if "error: rpmdbNextIterator: skipping h#" in line:
                yield rdf_anomaly.Anomaly(type="PARSER_ANOMALY",
                                          symptom="Broken rpm database.")
                break
Exemple #22
0
    def Parse(self, stat, file_object, knowledge_base):
        """Parse the status file."""
        _, _ = stat, knowledge_base

        packages = []
        sw_data = utils.ReadFileBytesAsUnicode(file_object)
        try:
            for pkg in self._deb822.Packages.iter_paragraphs(
                    sw_data.splitlines()):
                if self.installed_re.match(pkg["Status"]):
                    packages.append(
                        rdf_client.SoftwarePackage(
                            name=pkg["Package"],
                            description=pkg["Description"],
                            version=pkg["Version"],
                            architecture=pkg["Architecture"],
                            publisher=pkg["Maintainer"],
                            install_state="INSTALLED"))
        except SystemError:
            yield rdf_anomaly.Anomaly(type="PARSER_ANOMALY",
                                      symptom="Invalid dpkg status file")
        finally:
            if packages:
                yield rdf_client.SoftwarePackages(packages=packages)
Exemple #23
0
 def _Anomaly(self, msg, found):
     return rdf_anomaly.Anomaly(type="PARSER_ANOMALY",
                                symptom=msg,
                                finding=found)
 def testAnomaly(self):
     """Test the knowledge base stays uninitialized if an anomaly is returned."""
     self.request = self.InitializeRequest()
     self.response = rdf_anomaly.Anomaly()
     knowledge_base = self.GetUpdatedKnowledgeBase()
     self.assertEqual(knowledge_base, rdf_client.KnowledgeBase())