Exemplo n.º 1
0
    def CreateFileWithTimeline(client_id, path, path_type, token):
        """Add a file with timeline."""

        # Add a version of the file at gui_test_lib.TIME_0. Since we write all MAC
        # times, this will result in three timeline items.
        stat_entry = rdf_client.StatEntry()
        stat_entry.pathspec.path = path
        stat_entry.pathspec.pathtype = path_type
        stat_entry.st_atime = gui_test_lib.TIME_0.AsSecondsSinceEpoch() + 1000
        stat_entry.st_mtime = gui_test_lib.TIME_0.AsSecondsSinceEpoch()
        stat_entry.st_ctime = gui_test_lib.TIME_0.AsSecondsSinceEpoch() - 1000

        with test_lib.FakeTime(gui_test_lib.TIME_0):
            filesystem.WriteStatEntries([stat_entry],
                                        client_id.Basename(),
                                        mutation_pool=None,
                                        token=token)

        # Add a version with a stat entry, but without timestamps.
        stat_entry = rdf_client.StatEntry()
        stat_entry.pathspec.path = path
        stat_entry.pathspec.pathtype = path_type
        stat_entry.st_ino = 99

        with test_lib.FakeTime(gui_test_lib.TIME_1):
            filesystem.WriteStatEntries([stat_entry],
                                        client_id.Basename(),
                                        mutation_pool=None,
                                        token=token)
Exemplo n.º 2
0
    def testMultiWriteHistoryTwoPathTypes(self):
        client_id = self.InitializeClient()

        path_info_1 = rdf_objects.PathInfo.OS(components=["foo"])
        path_info_1.timestamp = rdfvalue.RDFDatetime.FromHumanReadable(
            "1999-01-01")

        path_info_2 = rdf_objects.PathInfo.TSK(components=["bar"])
        path_info_2.timestamp = rdfvalue.RDFDatetime.FromHumanReadable(
            "1988-01-01")

        self.db.WritePathInfos(client_id, [path_info_1, path_info_2])

        stat_entries = {
            path_info_1: rdf_client.StatEntry(st_mode=1337),
            path_info_2: rdf_client.StatEntry(st_mode=108),
        }
        self.db.MultiWritePathHistory(client_id, stat_entries, {})

        path_info = self.db.ReadPathInfo(client_id,
                                         rdf_objects.PathInfo.PathType.OS,
                                         components=("foo", ))
        self.assertEqual(path_info.stat_entry.st_mode, 1337)
        self.assertEqual(path_info.last_stat_entry_timestamp,
                         rdfvalue.RDFDatetime.FromHumanReadable("1999-01-01"))

        path_info = self.db.ReadPathInfo(client_id,
                                         rdf_objects.PathInfo.PathType.TSK,
                                         components=("bar", ))
        self.assertEqual(path_info.stat_entry.st_mode, 108)
        self.assertEqual(path_info.last_stat_entry_timestamp,
                         rdfvalue.RDFDatetime.FromHumanReadable("1988-01-01"))
Exemplo n.º 3
0
    def testDownloadCollectionWithFoldersEntries(self):
        """Check we can download a collection that also references folders."""
        fd = sequential_collection.GeneralIndexedCollection(
            self.collection_urn)
        with data_store.DB.GetMutationPool() as pool:
            fd.Add(rdf_file_finder.FileFinderResult(
                stat_entry=rdf_client.StatEntry(pathspec=rdf_paths.PathSpec(
                    path="testfile5", pathtype="OS"))),
                   mutation_pool=pool)
            fd.Add(rdf_file_finder.FileFinderResult(
                stat_entry=rdf_client.StatEntry(pathspec=rdf_paths.PathSpec(
                    path="testdir1", pathtype="OS"),
                                                st_mode=stat.S_IFDIR)),
                   mutation_pool=pool)

        with utils.TempDirectory() as tmpdir:
            export_utils.DownloadCollection(self.collection_urn,
                                            tmpdir,
                                            overwrite=True,
                                            dump_client_info=True,
                                            token=self.token,
                                            max_threads=2)
            expected_outdir = os.path.join(tmpdir, self.out.Path()[1:])

            # Check we found both files.
            self.assertTrue("testfile5" in os.listdir(expected_outdir))
            self.assertTrue("testdir1" in os.listdir(expected_outdir))
Exemplo n.º 4
0
    def testReatPathInfosHistoriesWithTwoFilesWithTwoHistoryItems(self):
        client_id = self.InitializeClient()

        path_info_1_a = rdf_objects.PathInfo.OS(components=["foo"])
        path_info_1_a.timestamp = rdfvalue.RDFDatetime.FromHumanReadable(
            "1999-01-01")

        path_info_1_b = rdf_objects.PathInfo.OS(components=["foo"])
        path_info_1_b.timestamp = rdfvalue.RDFDatetime.FromHumanReadable(
            "1999-01-02")

        path_info_2_a = rdf_objects.PathInfo.OS(components=["bar"])
        path_info_2_a.timestamp = rdfvalue.RDFDatetime.FromHumanReadable(
            "1988-01-01")

        path_info_2_b = rdf_objects.PathInfo.OS(components=["bar"])
        path_info_2_b.timestamp = rdfvalue.RDFDatetime.FromHumanReadable(
            "1988-01-02")

        self.db.WritePathInfos(client_id, [path_info_1_a, path_info_2_a])

        stat_entries = {
            path_info_1_a: rdf_client.StatEntry(st_mode=1337),
            path_info_1_b: rdf_client.StatEntry(st_mode=1338),
            path_info_2_a: rdf_client.StatEntry(st_mode=109),
            path_info_2_b: rdf_client.StatEntry(st_mode=110),
        }
        self.db.MultiWritePathHistory(client_id, stat_entries, {})

        path_infos = self.db.ReadPathInfosHistories(
            client_id, rdf_objects.PathInfo.PathType.OS, [("foo", ),
                                                          ("bar", )])
        self.assertEqual(len(path_infos), 2)

        pi = path_infos[("bar", )]
        self.assertEqual(len(pi), 2)

        self.assertEqual(pi[0].components, ("bar", ))
        self.assertEqual(pi[0].stat_entry.st_mode, 109)
        self.assertEqual(pi[0].timestamp,
                         rdfvalue.RDFDatetime.FromHumanReadable("1988-01-01"))

        self.assertEqual(pi[1].components, ("bar", ))
        self.assertEqual(pi[1].stat_entry.st_mode, 110)
        self.assertEqual(pi[1].timestamp,
                         rdfvalue.RDFDatetime.FromHumanReadable("1988-01-02"))

        pi = path_infos[("foo", )]
        self.assertEqual(len(pi), 2)

        self.assertEqual(pi[0].components, ("foo", ))
        self.assertEqual(pi[0].stat_entry.st_mode, 1337)
        self.assertEqual(pi[0].timestamp,
                         rdfvalue.RDFDatetime.FromHumanReadable("1999-01-01"))

        self.assertEqual(pi[1].components, ("foo", ))
        self.assertEqual(pi[1].stat_entry.st_mode, 1338)
        self.assertEqual(pi[1].timestamp,
                         rdfvalue.RDFDatetime.FromHumanReadable("1999-01-02"))
Exemplo n.º 5
0
 def testFindPaths(self):
     # TODO(user): Deal with cases where multiple vars are exported.
     # export TERM PERLLIB=.:shouldntbeignored
     bashrc_data = io.BytesIO("""
   IGNORE='bad' PATH=${HOME}/bin:$PATH
  { PYTHONPATH=/path1:/path2 }
   export TERM=screen-256color
   export http_proxy="http://proxy.example.org:3128/"
   export HTTP_PROXY=$http_proxy
   if [[ "$some_condition" ]]; then
     export PATH=:$PATH; LD_LIBRARY_PATH=foo:bar:$LD_LIBRARY_PATH
     PYTHONPATH=$PATH:"${PYTHONPATH}"
     CLASSPATH=
   fi
   echo PATH=/should/be/ignored
   # Ignore PATH=foo:bar
   TERM=vt100 PS=" Foo" PERL5LIB=:shouldntbeignored
 """)
     cshrc_data = io.BytesIO("""
   setenv PATH ${HOME}/bin:$PATH
   setenv PYTHONPATH /path1:/path2
   set term = (screen-256color)
   setenv http_proxy "http://proxy.example.org:3128/"
   setenv HTTP_PROXY $http_proxy
   if ( -e "$some_condition" ) then
     set path =  (. $path); setenv LD_LIBRARY_PATH foo:bar:$LD_LIBRARY_PATH
     setenv PYTHONPATH $PATH:"${PYTHONPATH}"
     setenv CLASSPATH
   endif
   echo PATH=/should/be/ignored
   setenv PERL5LIB :shouldntbeignored
 """)
     parser = linux_file_parser.PathParser()
     bashrc_stat = rdf_client.StatEntry(pathspec=rdf_paths.PathSpec(
         path="/home/user1/.bashrc", pathtype="OS"))
     cshrc_stat = rdf_client.StatEntry(pathspec=rdf_paths.PathSpec(
         path="/home/user1/.cshrc", pathtype="OS"))
     bashrc = {
         r.name: r.vals
         for r in parser.Parse(bashrc_stat, bashrc_data, None)
     }
     cshrc = {
         r.name: r.vals
         for r in parser.Parse(cshrc_stat, cshrc_data, None)
     }
     expected = {
         "PATH": [".", "${HOME}/bin", "$PATH"],
         "PYTHONPATH": [".", "${HOME}/bin", "$PATH", "/path1", "/path2"],
         "LD_LIBRARY_PATH": ["foo", "bar", "$LD_LIBRARY_PATH"],
         "CLASSPATH": [],
         "PERL5LIB": [".", "shouldntbeignored"]
     }
     # Got the same environment variables for bash and cshrc files.
     self.assertItemsEqual(expected, bashrc)
     self.assertItemsEqual(expected, cshrc)
     # The path values are expanded correctly.
     for var_name in ("PATH", "PYTHONPATH", "LD_LIBRARY_PATH"):
         self.assertEqual(expected[var_name], bashrc[var_name])
         self.assertEqual(expected[var_name], cshrc[var_name])
Exemplo n.º 6
0
  def testReadPathInfoTimestampStatAndHashEntry(self):
    client_id = self.InitializeClient()

    path_info = rdf_objects.PathInfo.OS(components=["foo"])

    path_info.stat_entry = rdf_client.StatEntry(st_mode=42)
    path_info.hash_entry = None
    self.db.WritePathInfos(client_id, [path_info])
    timestamp_1 = rdfvalue.RDFDatetime.Now()

    path_info.stat_entry = None
    path_info.hash_entry = rdf_crypto.Hash(md5=b"quux")
    self.db.WritePathInfos(client_id, [path_info])
    timestamp_2 = rdfvalue.RDFDatetime.Now()

    path_info.stat_entry = rdf_client.StatEntry(st_mode=1337)
    path_info.hash_entry = None
    self.db.WritePathInfos(client_id, [path_info])
    timestamp_3 = rdfvalue.RDFDatetime.Now()

    path_info.stat_entry = rdf_client.StatEntry(st_mode=4815162342)
    path_info.hash_entry = rdf_crypto.Hash(md5=b"norf")
    self.db.WritePathInfos(client_id, [path_info])
    timestamp_4 = rdfvalue.RDFDatetime.Now()

    path_info_1 = self.db.ReadPathInfo(
        client_id,
        rdf_objects.PathInfo.PathType.OS,
        components=("foo",),
        timestamp=timestamp_1)
    self.assertEqual(path_info_1.stat_entry.st_mode, 42)
    self.assertFalse(path_info_1.HasField("hash_entry"))

    path_info_2 = self.db.ReadPathInfo(
        client_id,
        rdf_objects.PathInfo.PathType.OS,
        components=("foo",),
        timestamp=timestamp_2)
    self.assertEqual(path_info_2.stat_entry.st_mode, 42)
    self.assertEqual(path_info_2.hash_entry.md5, b"quux")

    path_info_3 = self.db.ReadPathInfo(
        client_id,
        rdf_objects.PathInfo.PathType.OS,
        components=("foo",),
        timestamp=timestamp_3)
    self.assertEqual(path_info_3.stat_entry.st_mode, 1337)
    self.assertEqual(path_info_3.hash_entry.md5, b"quux")

    path_info_4 = self.db.ReadPathInfo(
        client_id,
        rdf_objects.PathInfo.PathType.OS,
        components=("foo",),
        timestamp=timestamp_4)
    self.assertEqual(path_info_4.stat_entry.st_mode, 4815162342)
    self.assertEqual(path_info_4.hash_entry.md5, b"norf")
Exemplo n.º 7
0
    def testUpdateFromStatEntryOverride(self):
        stat_entry = rdf_client.StatEntry(st_mode=707)
        dst = rdf_objects.PathInfo(components=["foo", "bar"],
                                   stat_entry=stat_entry)

        stat_entry = rdf_client.StatEntry(st_mode=1337)
        src = rdf_objects.PathInfo(components=["foo", "bar"],
                                   stat_entry=stat_entry)

        dst.UpdateFrom(src)
        self.assertEqual(dst.stat_entry.st_mode, 1337)
Exemplo n.º 8
0
  def testReadPathInfoTimestampStatEntry(self):
    client_id = self.InitializeClient()

    pathspec = rdf_paths.PathSpec(
        path="foo/bar/baz", pathtype=rdf_paths.PathSpec.PathType.OS)

    stat_entry = rdf_client.StatEntry(pathspec=pathspec, st_size=42)
    self.db.WritePathInfos(client_id,
                           [rdf_objects.PathInfo.FromStatEntry(stat_entry)])
    timestamp_1 = rdfvalue.RDFDatetime.Now()

    stat_entry = rdf_client.StatEntry(pathspec=pathspec, st_size=101)
    self.db.WritePathInfos(client_id,
                           [rdf_objects.PathInfo.FromStatEntry(stat_entry)])
    timestamp_2 = rdfvalue.RDFDatetime.Now()

    stat_entry = rdf_client.StatEntry(pathspec=pathspec, st_size=1337)
    self.db.WritePathInfos(client_id,
                           [rdf_objects.PathInfo.FromStatEntry(stat_entry)])
    timestamp_3 = rdfvalue.RDFDatetime.Now()

    path_info_last = self.db.ReadPathInfo(
        client_id,
        rdf_objects.PathInfo.PathType.OS,
        components=("foo", "bar", "baz"))
    self.assertEqual(path_info_last.stat_entry.st_size, 1337)
    self.assertEqual(path_info_last.components, ["foo", "bar", "baz"])

    path_info_1 = self.db.ReadPathInfo(
        client_id,
        rdf_objects.PathInfo.PathType.OS,
        components=("foo", "bar", "baz"),
        timestamp=timestamp_1)
    self.assertEqual(path_info_1.stat_entry.st_size, 42)
    self.assertEqual(path_info_last.components, ["foo", "bar", "baz"])

    path_info_2 = self.db.ReadPathInfo(
        client_id,
        rdf_objects.PathInfo.PathType.OS,
        components=("foo", "bar", "baz"),
        timestamp=timestamp_2)
    self.assertEqual(path_info_2.stat_entry.st_size, 101)
    self.assertEqual(path_info_last.components, ["foo", "bar", "baz"])

    path_info_3 = self.db.ReadPathInfo(
        client_id,
        rdf_objects.PathInfo.PathType.OS,
        components=("foo", "bar", "baz"),
        timestamp=timestamp_3)
    self.assertEqual(path_info_3.stat_entry.st_size, 1337)
    self.assertEqual(path_info_last.components, ["foo", "bar", "baz"])
Exemplo n.º 9
0
  def testMultiWriteHistoryDoesNotAllowOverridingStat(self):
    datetime = rdfvalue.RDFDatetime.FromHumanReadable

    client_id = self.InitializeClient()

    path_info = rdf_objects.PathInfo.OS(components=("foo", "bar", "baz"))
    self.db.WritePathInfos(client_id, path_info)

    path_info.timestamp = datetime("2001-01-01")
    stat_entry = rdf_client.StatEntry(st_size=42)
    self.db.MultiWritePathHistory(client_id, {path_info: stat_entry}, {})

    with self.assertRaises(db.Error):
      stat_entry = rdf_client.StatEntry(st_size=108)
      self.db.MultiWritePathHistory(client_id, {path_info: stat_entry}, {})
Exemplo n.º 10
0
    def NotifyAboutEnd(self):
        super(GetFile, self).NotifyAboutEnd()

        if not self.runner.ShouldSendNotifications():
            return

        stat_entry = self.state.stat_entry
        if not stat_entry:
            stat_entry = rdf_client.StatEntry(pathspec=self.args.pathspec)

        urn = stat_entry.AFF4Path(self.client_id)
        components = urn.Split()
        file_ref = None
        if len(components) > 3:
            file_ref = rdf_objects.VfsFileReference(
                client_id=components[0],
                path_type=components[2].upper(),
                path_components=components[3:])

        if not self.state.get("success"):
            notification.Notify(
                self.token.username, rdf_objects.UserNotification.Type.
                TYPE_VFS_FILE_COLLECTION_FAILED, "File transfer failed.",
                rdf_objects.ObjectReference(
                    reference_type=rdf_objects.ObjectReference.Type.VFS_FILE,
                    vfs_file=file_ref))
        else:
            notification.Notify(
                self.token.username,
                rdf_objects.UserNotification.Type.TYPE_VFS_FILE_COLLECTED,
                "File transferred successfully.",
                rdf_objects.ObjectReference(
                    reference_type=rdf_objects.ObjectReference.Type.VFS_FILE,
                    vfs_file=file_ref))
Exemplo n.º 11
0
    def Stat(self, responses):
        """Fix up the pathspec of the file."""
        response = responses.First()
        if responses.success and response:
            self.state.stat_entry = response
        else:
            if not self.args.ignore_stat_failure:
                raise IOError("Error: %s" % responses.status)

            # Just fill up a bogus stat entry.
            self.state.stat_entry = rdf_client.StatEntry(
                pathspec=self.args.pathspec)

        # Adjust the size from st_size if read length is not specified.
        if self.args.read_length == 0:
            self.state.file_size = self.state.stat_entry.st_size
        else:
            self.state.file_size = self.args.read_length

        self.state.max_chunk_number = (self.state.file_size //
                                       self.CHUNK_SIZE) + 1

        self.FetchWindow(
            min(
                self.WINDOW_SIZE, self.state.max_chunk_number -
                self.state["current_chunk_number"]))
Exemplo n.º 12
0
    def ListFiles(self, ext_attrs=None):
        """A generator of all keys and values."""
        del ext_attrs  # Unused.

        if not self.IsDirectory():
            return

        if self.hive is None:
            for name in dir(_winreg):
                if name.startswith("HKEY_"):
                    response = rdf_client.StatEntry(st_mode=stat.S_IFDIR)
                    response_pathspec = self.pathspec.Copy()
                    response_pathspec.last.path = utils.JoinPath(
                        response_pathspec.last.path, name)
                    response.pathspec = response_pathspec

                    yield response
            return

        try:
            with OpenKey(self.hive, self.local_path) as key:
                (self.number_of_keys, self.number_of_values,
                 self.last_modified) = QueryInfoKey(key)

                # First keys - These will look like directories.
                for i in xrange(self.number_of_keys):
                    try:
                        name = EnumKey(key, i)
                        key_name = utils.JoinPath(self.local_path, name)

                        try:
                            # Store the default value in the stat response for values.
                            with OpenKey(self.hive, key_name) as subkey:
                                value, value_type = QueryValueEx(subkey, "")
                        except exceptions.WindowsError:
                            value, value_type = None, None

                        response = self._Stat(name, value, value_type)
                        # Keys look like Directories in the VFS.
                        response.st_mode = stat.S_IFDIR

                        yield response
                    except exceptions.WindowsError:
                        pass

                # Now Values - These will look like files.
                for i in xrange(self.number_of_values):
                    try:
                        name, value, value_type = EnumValue(key, i)
                        response = self._Stat(name, value, value_type)

                        # Values look like files in the VFS.
                        response.st_mode = stat.S_IFREG

                        yield response

                    except exceptions.WindowsError:
                        pass
        except exceptions.WindowsError as e:
            raise IOError("Unable to list key %s: %s" % (self.key_name, e))
Exemplo n.º 13
0
 def testStatModeFormat(self):
     rdf = rdf_client.StatEntry(st_mode=33204)
     expected = "-rw-rw-r--"
     template = "{st_mode}"
     hinter = hints.Hinter(template=template)
     result = hinter.Render(rdf)
     self.assertEqual(expected, result)
Exemplo n.º 14
0
def StatEntryFromStat(stat, pathspec, ext_attrs=True):
    """Build a stat entry object from a given stat object.

  Args:
    stat: A `Stat` object.
    pathspec: A `PathSpec` from which `stat` was obtained.
    ext_attrs: Whether to include extended file attributes in the result.

  Returns:
    `StatEntry` object.
  """
    result = rdf_client.StatEntry(pathspec=pathspec)

    for attr in _STAT_ATTRS:
        value = getattr(stat.GetRaw(), attr, None)
        if value is None:
            continue

        # TODO(hanuszczak): Why are we doing this?
        value = int(value)
        if value < 0:
            value &= 0xFFFFFFFF

        setattr(result, attr, value)

    result.st_flags_linux = stat.GetLinuxFlags()
    result.st_flags_osx = stat.GetOsxFlags()
    if ext_attrs:
        # TODO(hanuszczak): Can we somehow incorporate extended attribute getter to
        # the `Stat` class? That would make the code a lot prettier but would force
        # `utils` to depend on `xattrs`.
        result.ext_attrs = list(GetExtAttrs(stat.GetPath()))

    return result
Exemplo n.º 15
0
    def testWritePathInfoHashAndStatEntrySeparateWrites(self):
        client_id = self.InitializeClient()

        stat_entry = rdf_client.StatEntry(st_mode=1337)
        stat_entry_path_info = rdf_objects.PathInfo.OS(components=["foo"],
                                                       stat_entry=stat_entry)

        stat_entry_timestamp = rdfvalue.RDFDatetime.Now()
        self.db.WritePathInfos(client_id, [stat_entry_path_info])

        hash_entry = rdf_crypto.Hash(sha256=hashlib.sha256("foo").digest())
        hash_entry_path_info = rdf_objects.PathInfo.OS(components=["foo"],
                                                       hash_entry=hash_entry)

        hash_entry_timestamp = rdfvalue.RDFDatetime.Now()
        self.db.WritePathInfos(client_id, [hash_entry_path_info])

        result = self.db.ReadPathInfo(client_id,
                                      rdf_objects.PathInfo.PathType.OS,
                                      components=("foo", ))

        now = rdfvalue.RDFDatetime.Now()

        self.assertEqual(result.components, ["foo"])
        self.assertTrue(result.HasField("stat_entry"))
        self.assertTrue(result.HasField("hash_entry"))
        self.assertEqual(result.stat_entry, stat_entry)
        self.assertEqual(result.hash_entry, hash_entry)
        self.assertGreater(result.last_stat_entry_timestamp,
                           stat_entry_timestamp)
        self.assertLess(result.last_stat_entry_timestamp, hash_entry_timestamp)
        self.assertGreater(result.last_hash_entry_timestamp,
                           hash_entry_timestamp)
        self.assertLess(result.last_hash_entry_timestamp, now)
Exemplo n.º 16
0
    def PopulateCache(self):
        """Parse the paths from the fixture."""
        if self.paths:
            return

        # The cache is attached to the class so it can be shared by all instance.
        self.paths = self.__class__.cache[self.prefix] = {}
        for path, (vfs_type, attributes) in client_fixture.VFS:
            if not path.startswith(self.prefix):
                continue

            path = utils.NormalizePath(path[len(self.prefix):])
            if path == "/":
                continue

            stat = rdf_client.StatEntry()
            args = {"client_id": "C.1234"}
            attrs = attributes.get("aff4:stat")

            if attrs:
                attrs %= args  # Remove any %% and interpolate client_id.
                stat = rdf_client.StatEntry.FromTextFormat(
                    utils.SmartStr(attrs))

            stat.pathspec = rdf_paths.PathSpec(
                pathtype=self.supported_pathtype, path=path)

            # TODO(user): Once we add tests around not crossing device boundaries,
            # we need to be smarter here, especially for the root entry.
            stat.st_dev = 1
            path = self._NormalizeCaseForPath(path, vfs_type)
            self.paths[path] = (vfs_type, stat)

        self.BuildIntermediateDirectories()
Exemplo n.º 17
0
class MockVFSHandler(vfs.VFSHandler):
  """A mock VFS handler with fake files."""
  children = []
  for x in range(10):
    child = rdf_client.StatEntry(
        pathspec=rdf_paths.PathSpec(
            path="Foo%s" % x, pathtype=rdf_paths.PathSpec.PathType.OS))
    children.append(child)

  supported_pathtype = rdf_paths.PathSpec.PathType.OS

  def __init__(self,
               base_fd,
               pathspec=None,
               progress_callback=None,
               full_pathspec=None):
    super(MockVFSHandler, self).__init__(
        base_fd,
        pathspec=pathspec,
        progress_callback=progress_callback,
        full_pathspec=full_pathspec)

    self.pathspec.Append(pathspec)

  def ListFiles(self, ext_attrs=None):
    del ext_attrs  # Unused.
    return self.children

  def IsDirectory(self):
    return self.pathspec.path == "/"
Exemplo n.º 18
0
  def testExportedFilenamesAndManifestForValuesOfMultipleTypes(self):
    zip_fd, prefix = self.ProcessValuesToZip({
        rdf_client.StatEntry: [
            rdf_client.StatEntry(pathspec=rdf_paths.PathSpec(
                path="/foo/bar", pathtype="OS"))
        ],
        rdf_client.Process: [rdf_client.Process(pid=42)]
    })
    self.assertEqual(
        set(zip_fd.namelist()), {
            "%s/MANIFEST" % prefix,
            "%s/ExportedFile_from_StatEntry.sql" % prefix,
            "%s/ExportedProcess_from_Process.sql" % prefix
        })

    parsed_manifest = yaml.load(zip_fd.read("%s/MANIFEST" % prefix))
    self.assertEqual(parsed_manifest, {
        "export_stats": {
            "StatEntry": {
                "ExportedFile": 1
            },
            "Process": {
                "ExportedProcess": 1
            }
        }
    })
Exemplo n.º 19
0
 def _AddTestData(self, fd):
     with data_store.DB.GetMutationPool() as pool:
         fd.Add(rdfvalue.RDFURN(self.out.Add("testfile1")),
                mutation_pool=pool)
         fd.Add(rdf_client.StatEntry(
             pathspec=rdf_paths.PathSpec(path="testfile2", pathtype="OS")),
                mutation_pool=pool)
         fd.Add(rdf_file_finder.FileFinderResult(
             stat_entry=rdf_client.StatEntry(pathspec=rdf_paths.PathSpec(
                 path="testfile5", pathtype="OS"))),
                mutation_pool=pool)
         fd.Add(collectors.ArtifactFilesDownloaderResult(
             downloaded_file=rdf_client.StatEntry(
                 pathspec=rdf_paths.PathSpec(path="testfile6",
                                             pathtype="OS"))),
                mutation_pool=pool)
Exemplo n.º 20
0
  def testEmptySourceData(self):
    test_data = ("# comment 1\n"
                 "# deb http://security.debian.org/ wheezy/updates main\n"
                 "URI :\n"
                 "URI:\n"
                 "# Trailing whitespace on purpose\n"
                 "URI:          \n"
                 "\n"
                 "URIs :\n"
                 "URIs:\n"
                 "# Trailing whitespace on purpose\n"
                 "URIs:        \n"
                 "# comment 2\n")

    file_obj = io.BytesIO(test_data)
    pathspec = rdf_paths.PathSpec(path="/etc/apt/sources.list.d/test.list")
    stat = rdf_client.StatEntry(pathspec=pathspec)
    parser = config_file.APTPackageSourceParser()
    results = list(parser.Parse(stat, file_obj, None))

    result = [
        d for d in results if isinstance(d, rdf_protodict.AttributedDict)
    ][0]

    self.assertEqual("/etc/apt/sources.list.d/test.list", result.filename)
    self.assertEqual(0, len(result.uris))
Exemplo n.º 21
0
  def SetupTestTimeline(self):
    client_id = self.SetupClient(0)
    fixture_test_lib.ClientFixture(client_id, token=self.token)

    # Choose some directory with pathspec in the ClientFixture.
    self.category_path = "fs/os"
    self.folder_path = self.category_path + "/Users/中国新闻网新闻中/Shared"
    self.file_path = self.folder_path + "/a.txt"

    file_urn = client_id.Add(self.file_path)
    for i in range(0, 5):
      with test_lib.FakeTime(i):
        stat_entry = rdf_client.StatEntry()
        stat_entry.st_mtime = rdfvalue.RDFDatetimeSeconds.Now()
        stat_entry.pathspec.path = self.file_path[len(self.category_path):]
        stat_entry.pathspec.pathtype = rdf_paths.PathSpec.PathType.OS

        hash_entry = rdf_crypto.Hash(
            sha256=("0e8dc93e150021bb4752029ebbff51394aa36f069cf19901578"
                    "e4f06017acdb5").decode("hex"))

        with aff4.FACTORY.Create(
            file_urn, aff4_grr.VFSFile, mode="w", token=self.token) as fd:
          fd.Set(fd.Schema.STAT, stat_entry)
          fd.Set(fd.Schema.HASH, hash_entry)

        if data_store.RelationalDBWriteEnabled():
          cid = client_id.Basename()
          path_info = rdf_objects.PathInfo.FromStatEntry(stat_entry)
          path_info.hash_entry = hash_entry
          data_store.REL_DB.WritePathInfos(cid, [path_info])

    return client_id
Exemplo n.º 22
0
  def testDownloadFilesPanelIsShownWhenNewResultsAreAdded(self):
    f = flow.StartFlow(
        client_id=self.client_id,
        flow_name=gui_test_lib.RecursiveTestFlow.__name__,
        token=self.token)

    with data_store.DB.GetMutationPool() as pool:
      flow.GRRFlow.ResultCollectionForFID(f).Add(
          rdfvalue.RDFString("foo-result"), mutation_pool=pool)

    self.Open("/#/clients/%s" % self.client_id)
    # Ensure auto-refresh updates happen every second.
    self.GetJavaScriptValue(
        "grrUi.core.resultsCollectionDirective.setAutoRefreshInterval(1000);")

    # Go to the flows page without refreshing the page, so that
    # AUTO_REFRESH_INTERVAL_MS setting is not reset.
    self.Click("css=a[grrtarget='client.flows']")
    self.Click("css=tr:contains('%s')" % f.Basename())
    self.Click("css=li[heading=Results]:not([disabled]")

    self.WaitUntil(self.IsElementPresent,
                   "css=grr-results-collection td:contains('foo-result')")
    self.WaitUntilNot(
        self.IsElementPresent,
        "css=grr-results-collection grr-download-collection-files")

    stat_entry = rdf_client.StatEntry(
        pathspec=rdf_paths.PathSpec(
            path="/foo/bar", pathtype=rdf_paths.PathSpec.PathType.OS))
    with data_store.DB.GetMutationPool() as pool:
      flow.GRRFlow.ResultCollectionForFID(f).Add(stat_entry, mutation_pool=pool)

    self.WaitUntil(self.IsElementPresent,
                   "css=grr-results-collection grr-download-collection-files")
Exemplo n.º 23
0
    def ListDirectory(self, list_directory_request):
        """A mock list directory."""
        pathspec = list_directory_request.pathspec
        if not pathspec:
            raise RuntimeError("Missing pathspec.")

        if (pathspec.path != r"\\.\HarddiskVolumeShadowCopy3"
                or pathspec.pathtype != rdf_paths.PathSpec.PathType.OS):
            raise RuntimeError("Invalid pathspec.")

        if not pathspec.nested_path:
            raise RuntimeError("Missing nested pathspec.")

        if (pathspec.nested_path.path != "/" or pathspec.nested_path.pathtype
                != rdf_paths.PathSpec.PathType.TSK):
            raise RuntimeError("Invalid nested pathspec.")

        result = []
        for i in range(10):
            mock_pathspec = pathspec.Copy()
            mock_pathspec.last.path = "/file %s" % i
            result.append(
                rdf_client.StatEntry(pathspec=mock_pathspec,
                                     st_mode=stat.S_IFDIR))

        return result
Exemplo n.º 24
0
  def Parse(self, cmd, args, stdout, stderr, return_val, time_taken,
            knowledge_base):
    _ = cmd, args, stdout, stderr, return_val, time_taken, knowledge_base
    installed = rdf_client.SoftwarePackage.InstallState.INSTALLED
    soft = rdf_client.SoftwarePackage(
        name="Package1",
        description="Desc1",
        version="1",
        architecture="amd64",
        install_state=installed)
    yield soft
    soft = rdf_client.SoftwarePackage(
        name="Package2",
        description="Desc2",
        version="1",
        architecture="i386",
        install_state=installed)
    yield soft

    # Also yield something random so we can test return type filtering.
    yield rdf_client.StatEntry()

    # Also yield an anomaly to test that.
    yield rdf_anomaly.Anomaly(
        type="PARSER_ANOMALY", symptom="could not parse gremlins.")
Exemplo n.º 25
0
    def DoStat(self, path):
        result = rdf_client.StatEntry()
        if path.startswith("/mock2/directory3"):
            result.st_dev = 1
        else:
            result.st_dev = 2
        f = self.filesystem[path]
        if isinstance(f, str):
            if path.startswith("/mock2/directory1/directory2"):
                result.st_mode = 0o0100644  # u=rw,g=r,o=r on regular file
                result.st_uid = 50
                result.st_gid = 500
            elif path.startswith("/mock2/directory3"):
                result.st_mode = 0o0100643  # u=rw,g=r,o=wx on regular file
                result.st_uid = 60
                result.st_gid = 600
            else:
                result.st_mode = 0o0104666  # setuid, u=rw,g=rw,o=rw on regular file
                result.st_uid = 90
                result.st_gid = 900
        else:
            result.st_mode = 0o0040775  # u=rwx,g=rwx,o=rx on directory
            result.st_uid = 0
            result.st_gid = 4
        result.st_size = len(f)
        result.st_mtime = 1373185602

        return result
Exemplo n.º 26
0
    def testWritePathInfosStatEntry(self):
        client_id = self.InitializeClient()

        stat_entry = rdf_client.StatEntry()
        stat_entry.pathspec.path = "foo/bar"
        stat_entry.pathspec.pathtype = rdf_paths.PathSpec.PathType.OS
        stat_entry.st_mode = 1337
        stat_entry.st_mtime = 108
        stat_entry.st_atime = 4815162342

        path_info = rdf_objects.PathInfo.FromStatEntry(stat_entry)
        self.db.WritePathInfos(client_id, [path_info])

        results = self.db.ReadPathInfos(client_id,
                                        rdf_objects.PathInfo.PathType.OS, [
                                            (),
                                            ("foo", ),
                                            ("foo", "bar"),
                                        ])

        root_path_info = results[()]
        self.assertFalse(root_path_info.HasField("stat_entry"))

        foo_path_info = results[("foo", )]
        self.assertFalse(foo_path_info.HasField("stat_entry"))

        foobar_path_info = results[("foo", "bar")]
        self.assertTrue(foobar_path_info.HasField("stat_entry"))
        self.assertFalse(foobar_path_info.HasField("hash_entry"))
        self.assertEqual(foobar_path_info.stat_entry.st_mode, 1337)
        self.assertEqual(foobar_path_info.stat_entry.st_mtime, 108)
        self.assertEqual(foobar_path_info.stat_entry.st_atime, 4815162342)
Exemplo n.º 27
0
 def _GenStat(self,
              path="/etc/passwd",
              st_mode=33184,
              st_ino=1063090,
              st_dev=64512,
              st_nlink=1,
              st_uid=1001,
              st_gid=5000,
              st_size=1024,
              st_atime=1336469177,
              st_mtime=1336129892,
              st_ctime=1336129892):
     """Generate a StatEntry RDF value."""
     pathspec = rdf_paths.PathSpec(path=path,
                                   pathtype=rdf_paths.PathSpec.PathType.OS)
     return rdf_client.StatEntry(pathspec=pathspec,
                                 st_mode=st_mode,
                                 st_ino=st_ino,
                                 st_dev=st_dev,
                                 st_nlink=st_nlink,
                                 st_uid=st_uid,
                                 st_gid=st_gid,
                                 st_size=st_size,
                                 st_atime=st_atime,
                                 st_mtime=st_mtime,
                                 st_ctime=st_ctime)
Exemplo n.º 28
0
  def testParseCronData(self):
    test_data = r"""root
    user

    user2 user3
    root
    hi hello
    user
    pparth"""
    file_obj = io.BytesIO(test_data)
    pathspec = rdf_paths.PathSpec(path="/etc/at.allow")
    stat = rdf_client.StatEntry(pathspec=pathspec)
    parser = config_file.CronAtAllowDenyParser()
    results = list(parser.Parse(stat, file_obj, None))

    result = [
        d for d in results if isinstance(d, rdf_protodict.AttributedDict)
    ][0]
    filename = result.filename
    users = result.users
    self.assertEqual("/etc/at.allow", filename)
    self.assertEqual(sorted(["root", "user", "pparth"]), sorted(users))

    anomalies = [a for a in results if isinstance(a, rdf_anomaly.Anomaly)]
    self.assertEqual(1, len(anomalies))
    anom = anomalies[0]
    self.assertEqual("Dodgy entries in /etc/at.allow.", anom.symptom)
    self.assertEqual(sorted(["user2 user3", "hi hello"]), sorted(anom.finding))
    self.assertEqual(pathspec, anom.reference_pathspec)
    self.assertEqual("PARSER_ANOMALY", anom.type)
Exemplo n.º 29
0
  def testYamlPluginWithValuesOfSameType(self):
    responses = []
    for i in range(10):
      responses.append(
          rdf_client.StatEntry(
              pathspec=rdf_paths.PathSpec(
                  path="/foo/bar/%d" % i, pathtype="OS"),
              st_mode=33184,  # octal = 100640 => u=rw,g=r,o= => -rw-r-----
              st_ino=1063090,
              st_dev=64512,
              st_nlink=1 + i,
              st_uid=139592,
              st_gid=5000,
              st_size=0,
              st_atime=1336469177,
              st_mtime=1336129892,
              st_ctime=1336129892))

    zip_fd, prefix = self.ProcessValuesToZip({rdf_client.StatEntry: responses})
    self.assertEqual(
        set(zip_fd.namelist()), {
            "%s/MANIFEST" % prefix,
            "%s/ExportedFile/from_StatEntry.yaml" % prefix
        })

    parsed_manifest = yaml.load(zip_fd.read("%s/MANIFEST" % prefix))
    self.assertEqual(parsed_manifest,
                     {"export_stats": {
                         "StatEntry": {
                             "ExportedFile": 10
                         }
                     }})

    parsed_output = yaml.load(
        zip_fd.read("%s/ExportedFile/from_StatEntry.yaml" % prefix))
    self.assertEqual(len(parsed_output), 10)
    for i in range(10):
      # Only the client_urn is filled in by the plugin. Doing lookups for
      # all the clients metadata is possible but expensive. It doesn't seem to
      # be worth it.
      self.assertEqual(parsed_output[i]["metadata"]["client_urn"],
                       str(self.client_id))
      self.assertEqual(parsed_output[i]["metadata"]["source_urn"],
                       str(self.results_urn))
      self.assertEqual(parsed_output[i]["urn"],
                       self.client_id.Add("/fs/os/foo/bar").Add(str(i)))
      self.assertEqual(parsed_output[i]["st_mode"], "-rw-r-----")
      self.assertEqual(parsed_output[i]["st_ino"], "1063090")
      self.assertEqual(parsed_output[i]["st_dev"], "64512")
      self.assertEqual(parsed_output[i]["st_nlink"], str(1 + i))
      self.assertEqual(parsed_output[i]["st_uid"], "139592")
      self.assertEqual(parsed_output[i]["st_gid"], "5000")
      self.assertEqual(parsed_output[i]["st_size"], "0")
      self.assertEqual(parsed_output[i]["st_atime"], "2012-05-08 09:26:17")
      self.assertEqual(parsed_output[i]["st_mtime"], "2012-05-04 11:11:32")
      self.assertEqual(parsed_output[i]["st_ctime"], "2012-05-04 11:11:32")
      self.assertEqual(parsed_output[i]["st_blksize"], "0")
      self.assertEqual(parsed_output[i]["st_rdev"], "0")
      self.assertEqual(parsed_output[i]["symlink"], "")
Exemplo n.º 30
0
    def testCSVPluginWithValuesOfMultipleTypes(self):
        zip_fd, prefix = self.ProcessValuesToZip({
            rdf_client.StatEntry: [
                rdf_client.StatEntry(pathspec=rdf_paths.PathSpec(
                    path="/foo/bar", pathtype="OS"))
            ],
            rdf_client.Process: [rdf_client.Process(pid=42)]
        })
        self.assertEqual(
            set(zip_fd.namelist()),
            set([
                "%s/MANIFEST" % prefix,
                "%s/ExportedFile/from_StatEntry.csv" % prefix,
                "%s/ExportedProcess/from_Process.csv" % prefix
            ]))

        parsed_manifest = yaml.load(zip_fd.read("%s/MANIFEST" % prefix))
        self.assertEqual(
            parsed_manifest, {
                "export_stats": {
                    "StatEntry": {
                        "ExportedFile": 1
                    },
                    "Process": {
                        "ExportedProcess": 1
                    }
                }
            })

        parsed_output = list(
            csv.DictReader(
                zip_fd.open("%s/ExportedFile/from_StatEntry.csv" % prefix)))
        self.assertEqual(len(parsed_output), 1)

        # Make sure metadata is filled in.
        self.assertEqual(parsed_output[0]["metadata.client_urn"],
                         self.client_id)
        self.assertEqual(parsed_output[0]["metadata.hostname"], "Host-0")
        self.assertEqual(parsed_output[0]["metadata.mac_address"],
                         "aabbccddee00\nbbccddeeff00")
        self.assertEqual(parsed_output[0]["metadata.source_urn"],
                         self.results_urn)
        self.assertEqual(parsed_output[0]["urn"],
                         self.client_id.Add("/fs/os/foo/bar"))

        parsed_output = list(
            csv.DictReader(
                zip_fd.open("%s/ExportedProcess/from_Process.csv" % prefix)))
        self.assertEqual(len(parsed_output), 1)

        self.assertEqual(parsed_output[0]["metadata.client_urn"],
                         self.client_id)
        self.assertEqual(parsed_output[0]["metadata.hostname"], "Host-0")
        self.assertEqual(parsed_output[0]["metadata.mac_address"],
                         "aabbccddee00\nbbccddeeff00")
        self.assertEqual(parsed_output[0]["metadata.source_urn"],
                         self.results_urn)
        self.assertEqual(parsed_output[0]["pid"], "42")