Exemplo n.º 1
0
    def testDownloadCollectionWithFlattenOption(self):
        """Check we can download files references in a collection."""
        # Create a collection with URNs to some files.
        fd = sequential_collection.GeneralIndexedCollection(
            self.collection_urn)
        with data_store.DB.GetMutationPool() as pool:
            fd.Add(rdfvalue.RDFURN(self.out.Add("testfile1")),
                   mutation_pool=pool)
            fd.Add(rdf_client.StatEntry(
                pathspec=rdf_paths.PathSpec(path="testfile2", pathtype="OS")),
                   mutation_pool=pool)
            fd.Add(rdf_file_finder.FileFinderResult(
                stat_entry=rdf_client.StatEntry(pathspec=rdf_paths.PathSpec(
                    path="testfile5", pathtype="OS"))),
                   mutation_pool=pool)

        with utils.TempDirectory() as tmpdir:
            export_utils.DownloadCollection(self.collection_urn,
                                            tmpdir,
                                            overwrite=True,
                                            dump_client_info=True,
                                            flatten=True,
                                            token=self.token,
                                            max_threads=2)

            # Check that "files" folder is filled with symlinks to downloaded files.
            symlinks = os.listdir(os.path.join(tmpdir, "files"))
            self.assertEqual(len(symlinks), 3)
            self.assertListEqual(sorted(symlinks), [
                "C.1000000000000000_fs_os_testfile1",
                "C.1000000000000000_fs_os_testfile2",
                "C.1000000000000000_fs_os_testfile5"
            ])
            self.assertEqual(
                os.readlink(
                    os.path.join(tmpdir, "files",
                                 "C.1000000000000000_fs_os_testfile1")),
                os.path.join(tmpdir, "C.1000000000000000", "fs", "os",
                             "testfile1"))
Exemplo n.º 2
0
    def testHuntAuthorizationIsRequiredToGenerateResultsArchive(self):
        stat_entry = rdf_client.StatEntry(pathspec=rdf_paths.PathSpec(
            path="/foo/bar", pathtype=rdf_paths.PathSpec.PathType.OS))
        values = [rdf_file_finder.FileFinderResult(stat_entry=stat_entry)]

        self.CreateGenericHuntWithCollection(values=values)

        self.Open("/")
        self.Click("css=a[grrtarget=hunts]")
        self.Click("css=td:contains('GenericHunt')")
        self.Click("css=li[heading=Results]")
        self.Click("css=button.DownloadButton")

        self.WaitUntil(self.IsTextPresent, "Create a new approval request")
Exemplo n.º 3
0
 def _GenFiles(self, passwd, shadow, group, gshadow):
     stats = []
     files = []
     for path in [
             "/etc/passwd", "/etc/shadow", "/etc/group", "/etc/gshadow"
     ]:
         p = rdf_paths.PathSpec(path=path)
         stats.append(rdf_client.StatEntry(pathspec=p))
     for data in passwd, shadow, group, gshadow:
         if data is None:
             data = []
         lines = "\n".join(data).format(**self.crypt)
         files.append(StringIO.StringIO(lines))
     return stats, files
Exemplo n.º 4
0
    def Stat(self, responses):
        """Save stat information on the directory."""
        # Did it work?
        if not responses.success:
            self.Error("Could not stat directory: %s" % responses.status)

        else:
            # Keep the stat response for later.
            stat_entry = rdf_client.StatEntry(responses.First())
            self.state.stat = stat_entry

            # The full path of the object is the combination of the client_id and the
            # path.
            self.state.urn = stat_entry.pathspec.AFF4Path(self.client_id)
Exemplo n.º 5
0
    def testPackageSourceData(self):
        test_data = r"""
    # comment 1
    [centosdvdiso]
    name=CentOS DVD ISO
    baseurl=file:///mnt
    http://mirror1.centos.org/CentOS/6/os/i386/
    baseurl =ssh://mirror2.centos.org/CentOS/6/os/i386/
    enabled=1
    gpgcheck=1
    gpgkey=file:///mnt/RPM-GPG-KEY-CentOS-6

    # comment2
    [examplerepo]
    name=Example Repository
    baseurl = https://mirror3.centos.org/CentOS/6/os/i386/
    enabled=1
    gpgcheck=1
    gpgkey=http://mirror.centos.org/CentOS/6/os/i386/RPM-GPG-KEY-CentOS-6

    """
        file_obj = StringIO.StringIO(test_data)
        pathspec = rdf_paths.PathSpec(path="/etc/yum.repos.d/test1.repo")
        stat = rdf_client.StatEntry(pathspec=pathspec)
        parser = config_file.YumPackageSourceParser()
        results = list(parser.Parse(stat, file_obj, None))

        result = [
            d for d in results if isinstance(d, rdf_protodict.AttributedDict)
        ][0]

        self.assertEqual("/etc/yum.repos.d/test1.repo", result.filename)
        self.assertEqual(4, len(result.uris))

        self.assertEqual("file", result.uris[0].transport)
        self.assertEqual("", result.uris[0].host)
        self.assertEqual("/mnt", result.uris[0].path)

        self.assertEqual("http", result.uris[1].transport)
        self.assertEqual("mirror1.centos.org", result.uris[1].host)
        self.assertEqual("/CentOS/6/os/i386/", result.uris[1].path)

        self.assertEqual("ssh", result.uris[2].transport)
        self.assertEqual("mirror2.centos.org", result.uris[2].host)
        self.assertEqual("/CentOS/6/os/i386/", result.uris[2].path)

        self.assertEqual("https", result.uris[3].transport)
        self.assertEqual("mirror3.centos.org", result.uris[3].host)
        self.assertEqual("/CentOS/6/os/i386/", result.uris[3].path)
Exemplo n.º 6
0
  def testStatEntryFromSimpleFile(self):
    client_urn = self.SetupClient(0)

    with self._Aff4Open(client_urn.Add("fs/os").Add("foo")) as fd:
      stat_entry = rdf_client.StatEntry(st_mode=1337, st_size=42)
      fd.Set(fd.Schema.STAT, stat_entry)

    data_migration.MigrateClientVfs(client_urn)

    path_info = data_store.REL_DB.ReadPathInfo(
        client_id=client_urn.Basename(),
        path_type=rdf_objects.PathInfo.PathType.OS,
        components=("foo",))
    self.assertEqual(path_info.stat_entry.st_mode, 1337)
    self.assertEqual(path_info.stat_entry.st_size, 42)
Exemplo n.º 7
0
    def _FakeDirStat(self, vfs_type=None):
        # We return some fake data, this makes writing tests easier for some
        # things but we give an error to the tester as it is often not what you
        # want.
        logging.warn("Fake value for %s under %s", self.path, self.prefix)

        for path in self.pathspec:
            path.path = self._NormalizeCaseForPath(self.path,
                                                   vfs_type=vfs_type)

        return rdf_client.StatEntry(pathspec=self.pathspec,
                                    st_mode=16877,
                                    st_size=12288,
                                    st_atime=1319796280,
                                    st_dev=1)
Exemplo n.º 8
0
  def _MakeRegStat(self, path, value, registry_type):
    options = rdf_paths.PathSpec.Options.CASE_LITERAL
    pathspec = rdf_paths.PathSpec(
        path=path,
        path_options=options,
        pathtype=rdf_paths.PathSpec.PathType.REGISTRY)

    if registry_type == rdf_client.StatEntry.RegistryType.REG_MULTI_SZ:
      reg_data = rdf_protodict.DataBlob(list=rdf_protodict.BlobArray(
          content=rdf_protodict.DataBlob(string=value)))
    else:
      reg_data = rdf_protodict.DataBlob().SetValue(value)

    return rdf_client.StatEntry(
        pathspec=pathspec, registry_data=reg_data, registry_type=registry_type)
Exemplo n.º 9
0
  def testShowsGenerateArchiveButtonForFileFinderHunt(self):
    stat_entry = rdf_client.StatEntry(
        pathspec=rdf_paths.PathSpec(
            path="/foo/bar", pathtype=rdf_paths.PathSpec.PathType.OS))
    values = [rdf_file_finder.FileFinderResult(stat_entry=stat_entry)]

    self.CreateGenericHuntWithCollection(values=values)

    self.Open("/")
    self.Click("css=a[grrtarget=hunts]")
    self.Click("css=td:contains('GenericHunt')")
    self.Click("css=li[heading=Results]")

    self.WaitUntil(self.IsTextPresent,
                   "Files referenced in this collection can be downloaded")
Exemplo n.º 10
0
 def testDownloadHuntResultCollection(self):
     """Check we can download files references in HuntResultCollection."""
     # Create a collection with URNs to some files.
     fd = results.HuntResultCollection(self.collection_urn)
     with data_store.DB.GetMutationPool() as pool:
         fd.AddAsMessage(rdfvalue.RDFURN(self.out.Add("testfile1")),
                         self.client_id,
                         mutation_pool=pool)
         fd.AddAsMessage(rdf_client.StatEntry(
             pathspec=rdf_paths.PathSpec(path="testfile2", pathtype="OS")),
                         self.client_id,
                         mutation_pool=pool)
         fd.AddAsMessage(rdf_file_finder.FileFinderResult(
             stat_entry=rdf_client.StatEntry(pathspec=rdf_paths.PathSpec(
                 path="testfile5", pathtype="OS"))),
                         self.client_id,
                         mutation_pool=pool)
         fd.AddAsMessage(collectors.ArtifactFilesDownloaderResult(
             downloaded_file=rdf_client.StatEntry(
                 pathspec=rdf_paths.PathSpec(path="testfile6",
                                             pathtype="OS"))),
                         self.client_id,
                         mutation_pool=pool)
     self._VerifyDownload()
Exemplo n.º 11
0
    def testShowsPerFileDownloadButtonForFileFinderHunt(self):
        stat_entry = rdf_client.StatEntry(pathspec=rdf_paths.PathSpec(
            path="/foo/bar", pathtype=rdf_paths.PathSpec.PathType.OS))
        values = [rdf_file_finder.FileFinderResult(stat_entry=stat_entry)]

        self.CreateGenericHuntWithCollection(values=values)

        self.Open("/")
        self.Click("css=a[grrtarget=hunts]")
        self.Click("css=td:contains('GenericHunt')")
        self.Click("css=li[heading=Results]")

        self.WaitUntil(
            self.IsElementPresent,
            "css=grr-results-collection button:has(span.glyphicon-download)")
Exemplo n.º 12
0
    def testBigQueryPluginWithValuesOfSameType(self):
        responses = []
        for i in range(10):
            responses.append(
                rdf_client.StatEntry(
                    pathspec=rdf_paths.PathSpec(path="/foo/bar/%d" % i,
                                                pathtype="OS"),
                    st_mode=33184,  # octal = 100640 => u=rw,g=r,o= => -rw-r-----
                    st_ino=1063090,
                    st_dev=64512L,
                    st_nlink=1 + i,
                    st_uid=139592,
                    st_gid=5000,
                    st_size=0,
                    st_atime=1336469177,
                    st_mtime=1336129892,
                    st_ctime=1336129892))

        output = self.ProcessResponses(
            plugin_args=bigquery_plugin.BigQueryOutputPluginArgs(),
            responses=responses)

        self.assertEqual(len(output), 1)
        _, stream, schema, job_id = output[0]

        self.assertEqual(job_id,
                         "C-1000000000000000_Results_ExportedFile_1445995873")

        self.CompareSchemaToKnownGood(schema)

        actual_fd = gzip.GzipFile(
            None, "r",
            bigquery_plugin.BigQueryOutputPlugin.GZIP_COMPRESSION_LEVEL,
            stream)

        # Compare to our stored data.
        expected_fd = open(
            os.path.join(config.CONFIG["Test.data_dir"], "bigquery",
                         "ExportedFile.json"), "rb")

        # Bigquery expects a newline separarted list of JSON dicts, but this isn't
        # valid JSON so we can't just load the whole thing and compare.
        counter = 0
        for actual, expected in zip(actual_fd, expected_fd):
            self.assertEqual(json.loads(actual), json.loads(expected))
            counter += 1

        self.assertEqual(counter, 10)
Exemplo n.º 13
0
  def testExportCommandIsShownForStatEntryResults(self):
    stat_entry = rdf_client.StatEntry(
        pathspec=rdf_paths.PathSpec(
            path="/foo/bar", pathtype=rdf_paths.PathSpec.PathType.OS))
    values = [rdf_file_finder.FileFinderResult(stat_entry=stat_entry)]

    hunt_urn = self.CreateGenericHuntWithCollection(values=values)

    self.Open("/#/hunts/%s/results" % hunt_urn.Basename())
    self.Click("link=Show export command")

    self.WaitUntil(
        self.IsTextPresent, "/usr/bin/grr_api_shell 'http://localhost:8000/' "
        "--exec_code 'grrapi.Hunt(\"%s\").GetFilesArchive()."
        "WriteToFile(\"./hunt_results_%s.zip\")'" %
        (hunt_urn.Basename(), hunt_urn.Basename().replace(":", "_")))
Exemplo n.º 14
0
 def _GenFileData(cls, paths, data, stats=None, files=None, modes=None):
     """Generate a tuple of list of stats and list of file contents."""
     if stats is None:
         stats = []
     if files is None:
         files = []
     if modes is None:
         modes = {}
     modes.setdefault("st_uid", 0)
     modes.setdefault("st_gid", 0)
     modes.setdefault("st_mode", 0o0100644)
     for path in paths:
         p = rdf_paths.PathSpec(path=path, pathtype="OS")
         stats.append(rdf_client.StatEntry(pathspec=p, **modes))
     for val in data:
         files.append(StringIO.StringIO(val))
     return stats, files
Exemplo n.º 15
0
    def testYamlPluginWithValuesOfMultipleTypes(self):
        zip_fd, prefix = self.ProcessValuesToZip({
            rdf_client.StatEntry: [
                rdf_client.StatEntry(pathspec=rdf_paths.PathSpec(
                    path="/foo/bar", pathtype="OS"))
            ],
            rdf_client.Process: [rdf_client.Process(pid=42)]
        })
        self.assertEqual(
            set(zip_fd.namelist()), {
                "%s/MANIFEST" % prefix,
                "%s/ExportedFile/from_StatEntry.yaml" % prefix,
                "%s/ExportedProcess/from_Process.yaml" % prefix
            })

        parsed_manifest = yaml.load(zip_fd.read("%s/MANIFEST" % prefix))
        self.assertEqual(
            parsed_manifest, {
                "export_stats": {
                    "StatEntry": {
                        "ExportedFile": 1
                    },
                    "Process": {
                        "ExportedProcess": 1
                    }
                }
            })

        parsed_output = yaml.load(
            zip_fd.read("%s/ExportedFile/from_StatEntry.yaml" % prefix))
        self.assertEqual(len(parsed_output), 1)

        # Only the client_urn is filled in by the plugin. Doing lookups for
        # all the clients metadata is possible but expensive. It doesn't seem to
        # be worth it.
        self.assertEqual(parsed_output[0]["metadata"]["client_urn"],
                         str(self.client_id))
        self.assertEqual(parsed_output[0]["metadata"]["source_urn"],
                         str(self.results_urn))
        self.assertEqual(parsed_output[0]["urn"],
                         self.client_id.Add("/fs/os/foo/bar"))

        parsed_output = yaml.load(
            zip_fd.read("%s/ExportedProcess/from_Process.yaml" % prefix))
        self.assertEqual(len(parsed_output), 1)
        self.assertEqual(parsed_output[0]["pid"], "42")
Exemplo n.º 16
0
    def ProcessListDirectory(self, responses):
        """Processes the results of the ListDirectory client action.

    Args:
      responses: a flow Responses object.
    """
        if not responses.success:
            raise flow.FlowError("Unable to list directory.")

        with data_store.DB.GetMutationPool() as pool:
            for response in responses:
                stat_entry = rdf_client.StatEntry(response)
                filesystem.CreateAFF4Object(stat_entry,
                                            self.client_id,
                                            pool,
                                            token=self.token)
                self.SendReply(stat_entry)
Exemplo n.º 17
0
  def testGenerateZipButtonGetsDisabledAfterClick(self):
    stat_entry = rdf_client.StatEntry(
        pathspec=rdf_paths.PathSpec(
            path="/foo/bar", pathtype=rdf_paths.PathSpec.PathType.OS))
    values = [rdf_file_finder.FileFinderResult(stat_entry=stat_entry)]

    hunt_urn = self.CreateGenericHuntWithCollection(values=values)
    self.RequestAndGrantHuntApproval(hunt_urn.Basename())

    self.Open("/")
    self.Click("css=a[grrtarget=hunts]")
    self.Click("css=td:contains('GenericHunt')")
    self.Click("css=li[heading=Results]")
    self.Click("css=button.DownloadButton")

    self.WaitUntil(self.IsElementPresent, "css=button.DownloadButton[disabled]")
    self.WaitUntil(self.IsTextPresent, "Generation has started")
Exemplo n.º 18
0
    def testFromStatEntryMetadata(self):
        stat_entry = rdf_client.StatEntry()
        stat_entry.pathspec.path = "foo/bar"
        stat_entry.pathspec.pathtype = rdf_paths.PathSpec.PathType.OS

        stat_obj = os.stat(tempfile.tempdir)
        stat_entry.st_mode = stat_obj.st_mode
        stat_entry.st_ino = stat_obj.st_ino
        stat_entry.st_dev = stat_obj.st_dev

        path_info = rdf_objects.PathInfo.FromStatEntry(stat_entry)
        self.assertEqual(path_info.path_type, rdf_objects.PathInfo.PathType.OS)
        self.assertEqual(path_info.components, ["foo", "bar"])
        self.assertTrue(path_info.directory)
        self.assertEqual(path_info.stat_entry.st_mode, stat_obj.st_mode)
        self.assertEqual(path_info.stat_entry.st_ino, stat_obj.st_ino)
        self.assertEqual(path_info.stat_entry.st_dev, stat_obj.st_dev)
Exemplo n.º 19
0
  def testStatFromTree(self):
    client_urn = self.SetupClient(0)

    with self._Aff4Open(client_urn.Add("fs/os").Add("foo/bar/baz")) as fd:
      stat_entry = rdf_client.StatEntry(st_mtime=101)
      fd.Set(fd.Schema.STAT, stat_entry)

    data_migration.MigrateClientVfs(client_urn)

    path_infos = data_store.REL_DB.ReadPathInfos(
        client_id=client_urn.Basename(),
        path_type=rdf_objects.PathInfo.PathType.OS,
        components_list=[("foo",), ("foo", "bar"), ("foo", "bar", "baz")])

    self.assertEqual(path_infos[("foo",)].stat_entry.st_mtime, None)
    self.assertEqual(path_infos[("foo", "bar")].stat_entry.st_mtime, None)
    self.assertEqual(path_infos[("foo", "bar", "baz")].stat_entry.st_mtime, 101)
Exemplo n.º 20
0
    def testYamlPluginWritesMoreThanOneBatchOfRowsCorrectly(self):
        num_rows = self.__class__.plugin_cls.ROW_BATCH * 2 + 1

        responses = []
        for i in range(num_rows):
            responses.append(
                rdf_client.StatEntry(pathspec=rdf_paths.PathSpec(
                    path="/foo/bar/%d" % i, pathtype="OS")))

        zip_fd, prefix = self.ProcessValuesToZip(
            {rdf_client.StatEntry: responses})
        parsed_output = yaml.load(
            zip_fd.open("%s/ExportedFile/from_StatEntry.yaml" % prefix))
        self.assertEqual(len(parsed_output), num_rows)
        for i in range(num_rows):
            self.assertEqual(parsed_output[i]["urn"],
                             self.client_id.Add("/fs/os/foo/bar/%d" % i))
Exemplo n.º 21
0
    def ProcessFileStats(self, responses):
        """Extract DataBlob from Stat response."""
        if not responses.success:
            return

        system_root_paths = ["Windows", "WinNT", "WINNT35", "WTSRV", "WINDOWS"]
        for response in responses:
            if response.pathspec.path[4:] in system_root_paths:
                systemdrive = response.pathspec.path[1:3]
                systemroot = "%s\\%s" % (systemdrive,
                                         response.pathspec.path[4:])

                # Put the data back into the original format expected for the artifact
                data = rdf_protodict.DataBlob().SetValue(systemroot)
                self.SendReply(rdf_client.StatEntry(registry_data=data))
                self.state.success = True
                break
Exemplo n.º 22
0
    def _ParsePCIDeviceTestData(self, test_data):
        """Given test_data dictionary, parse it using PCIDevicesInfoParser."""
        parser = linux_file_parser.PCIDevicesInfoParser()
        stats = []
        file_objs = []
        kb_objs = []

        # Populate stats, file_ojbs, kb_ojbs lists needed by the parser.
        for filename, data in test_data.items():
            pathspec = rdf_paths.PathSpec(path=filename, pathtype="OS")
            stat = rdf_client.StatEntry(pathspec=pathspec)
            file_obj = StringIO.StringIO(data)
            stats.append(stat)
            file_objs.append(file_obj)
            kb_objs.append(None)

        return list(parser.ParseMultiple(stats, file_objs, kb_objs))
Exemplo n.º 23
0
    def testYamlPluginWritesUnicodeValuesCorrectly(self):
        zip_fd, prefix = self.ProcessValuesToZip({
            rdf_client.StatEntry: [
                rdf_client.StatEntry(pathspec=rdf_paths.PathSpec(
                    path="/中国新闻网新闻中", pathtype="OS"))
            ]
        })
        self.assertEqual(set(zip_fd.namelist()), {
            "%s/MANIFEST" % prefix,
            "%s/ExportedFile/from_StatEntry.yaml" % prefix
        })

        parsed_output = yaml.load(
            zip_fd.open("%s/ExportedFile/from_StatEntry.yaml" % prefix))

        self.assertEqual(len(parsed_output), 1)
        self.assertEqual(parsed_output[0]["urn"],
                         self.client_id.Add("/fs/os/中国新闻网新闻中"))
Exemplo n.º 24
0
    def testOSXLaunchdPlistParser(self):
        parser = osx_file_parser.OSXLaunchdPlistParser()
        plists = ["com.google.code.grr.plist", "com.google.code.grr.bplist"]
        results = []
        for plist in plists:
            path = os.path.join(self.base_path, "parser_test", plist)
            plist_file = open(path, "rb")
            stat = rdf_client.StatEntry(pathspec=rdf_paths.PathSpec(
                path=path, pathtype=rdf_paths.PathSpec.PathType.OS),
                                        st_mode=16877)
            results.extend(list(parser.Parse(stat, plist_file, None)))

        for result in results:
            self.assertEqual(result.Label, "com.google.code.grr")
            self.assertItemsEqual(result.ProgramArguments, [
                "/usr/lib/grr/grr_3.0.0.5_amd64/grr",
                "--config=/usr/lib/grr/grr_3.0.0.5_amd64/grr.yaml"
            ])
Exemplo n.º 25
0
  def testStatAndHashEntryFromSimpleFile(self):
    client_urn = self.SetupClient(0)

    with self._Aff4Open(client_urn.Add("fs/os").Add("foo")) as fd:
      stat_entry = rdf_client.StatEntry(st_mode=108)
      fd.Set(fd.Schema.STAT, stat_entry)

      hash_entry = rdf_crypto.Hash(sha256=b"quux")
      fd.Set(fd.Schema.HASH, hash_entry)

    data_migration.MigrateClientVfs(client_urn)

    path_info = data_store.REL_DB.ReadPathInfo(
        client_id=client_urn.Basename(),
        path_type=rdf_objects.PathInfo.PathType.OS,
        components=("foo",))
    self.assertEqual(path_info.stat_entry.st_mode, 108)
    self.assertEqual(path_info.hash_entry.sha256, b"quux")
Exemplo n.º 26
0
def StatEntryFromPath(path, pathspec, ext_attrs=True):
  """Builds a stat entry object from a given path.

  Args:
    path: A path (string value) to stat.
    pathspec: A `PathSpec` corresponding to the `path`.
    ext_attrs: Whether to include extended file attributes in the result.

  Returns:
    `StatEntry` object.
  """
  try:
    stat = utils.Stat(path)
  except (IOError, OSError) as error:
    logging.error("Failed to obtain stat for '%s': %s", pathspec, error)
    return rdf_client.StatEntry(pathspec=pathspec)

  return StatEntryFromStat(stat, pathspec, ext_attrs=ext_attrs)
Exemplo n.º 27
0
    def _InitializeFiles(self, hashing=False):
        path1 = self.client_id.Add("fs/os/foo/bar/hello1.txt")
        archive_path1 = (u"test_prefix/%s/fs/os/foo/bar/hello1.txt" %
                         self.client_id.Basename())
        self._CreateFile(path=path1, content="hello1", hashing=hashing)

        path2 = self.client_id.Add(u"fs/os/foo/bar/中国新闻网新闻中.txt")
        archive_path2 = (u"test_prefix/%s/fs/os/foo/bar/"
                         u"中国新闻网新闻中.txt") % self.client_id.Basename()
        self._CreateFile(path=path2, content="hello2", hashing=hashing)

        self.stat_entries = []
        self.paths = [path1, path2]
        self.archive_paths = [archive_path1, archive_path2]
        for path in self.paths:
            self.stat_entries.append(
                rdf_client.StatEntry(pathspec=rdf_paths.PathSpec(
                    path="foo/bar/" + str(path).split("/")[-1],
                    pathtype=rdf_paths.PathSpec.PathType.OS)))
Exemplo n.º 28
0
    def testParse(self):
        parser = windows_persistence.WindowsPersistenceMechanismsParser()
        path = (r"HKEY_LOCAL_MACHINE\Software\Microsoft\Windows\CurrentVersion"
                r"\Run\test")
        pathspec = rdf_paths.PathSpec(
            path=path, pathtype=rdf_paths.PathSpec.PathType.REGISTRY)
        reg_data = "C:\\blah\\some.exe /v"
        reg_type = rdf_client.StatEntry.RegistryType.REG_SZ
        stat = rdf_client.StatEntry(
            pathspec=pathspec,
            registry_type=reg_type,
            registry_data=rdf_protodict.DataBlob(string=reg_data))

        persistence = [stat]
        image_paths = [
            "system32\\drivers\\ACPI.sys",
            "%systemroot%\\system32\\svchost.exe -k netsvcs",
            "\\SystemRoot\\system32\\drivers\\acpipmi.sys"
        ]
        reg_key = "HKEY_LOCAL_MACHINE/SYSTEM/CurrentControlSet/services/AcpiPmi"
        for path in image_paths:
            serv_info = rdf_client.WindowsServiceInformation(
                name="blah",
                display_name="GRRservice",
                image_path=path,
                registry_key=reg_key)
            persistence.append(serv_info)

        knowledge_base = rdf_client.KnowledgeBase()
        knowledge_base.environ_systemroot = "C:\\Windows"

        expected = [
            "C:\\blah\\some.exe", "C:\\Windows\\system32\\drivers\\ACPI.sys",
            "C:\\Windows\\system32\\svchost.exe",
            "C:\\Windows\\system32\\drivers\\acpipmi.sys"
        ]

        for index, item in enumerate(persistence):
            results = list(
                parser.Parse(item, knowledge_base,
                             rdf_paths.PathSpec.PathType.OS))
            self.assertEqual(results[0].pathspec.path, expected[index])
            self.assertEqual(len(results), 1)
Exemplo n.º 29
0
    def testBigQueryPluginWithValuesOfMultipleTypes(self):
        output = self.ProcessResponses(
            plugin_args=bigquery_plugin.BigQueryOutputPluginArgs(),
            responses=[
                rdf_client.StatEntry(pathspec=rdf_paths.PathSpec(
                    path="/中国新闻网新闻中", pathtype="OS")),
                rdf_client.Process(pid=42)
            ],
            process_responses_separately=True)

        # Should have two separate output streams for the two types
        self.assertEqual(len(output), 2)

        for name, stream, _, job_id in output:
            self.assertTrue(job_id in [
                "C-1000000000000000_Results_ExportedFile_1445995873",
                "C-1000000000000000_Results_ExportedProcess_1445995873"
            ])
            self._parseOutput(name, stream)
Exemplo n.º 30
0
class SqliteInstantOutputPluginTest(test_plugins.InstantOutputPluginTestBase):
    """Tests the SQLite instant output plugin."""

    plugin_cls = sqlite_plugin.SqliteInstantOutputPlugin

    STAT_ENTRY_RESPONSES = [
        rdf_client.StatEntry(
            pathspec=rdf_paths.PathSpec(path="/foo/bar/%d" % i, pathtype="OS"),
            st_mode=33184,  # octal = 100640 => u=rw,g=r,o= => -rw-r-----
            st_ino=1063090,
            st_dev=64512L,
            st_nlink=1 + i,
            st_uid=139592,
            st_gid=5000,
            st_size=0,
            st_atime=1493596800,  # Midnight, 01.05.2017 UTC in seconds
            st_mtime=1493683200,  # Midnight, 01.05.2017 UTC in seconds
            st_ctime=1493683200) for i in range(10)
    ]