예제 #1
0
    def testVFSVirtualRoot(self):

        # Let's open a file in the virtual root.
        os_root = "os:%s" % self.base_path
        with test_lib.ConfigOverrider({"Client.vfs_virtualroots": [os_root]}):
            # We need to reset the vfs.VFS_VIRTUALROOTS too.
            vfs.VFSInit().Run()

            fd = vfs.VFSOpen(
                rdf_paths.PathSpec(path="/morenumbers.txt",
                                   pathtype=rdf_paths.PathSpec.PathType.OS))
            data = fd.read(10)
            self.assertEqual(data, "1\n2\n3\n4\n5\n")

        # This should also work with TSK.
        tsk_root = "tsk:%s" % os.path.join(self.base_path, "test_img.dd")
        with test_lib.ConfigOverrider({"Client.vfs_virtualroots": [tsk_root]}):
            vfs.VFSInit().Run()

            image_file_ps = rdf_paths.PathSpec(
                path=u"איןד ןד ש אקדא/איןד.txt",
                pathtype=rdf_paths.PathSpec.PathType.TSK)

            fd = vfs.VFSOpen(image_file_ps)

            data = fd.read(10)
            self.assertEqual(data, "1\n2\n3\n4\n5\n")

            # This should not influence vfs handlers other than OS and TSK.
            reg_type = rdf_paths.PathSpec.PathType.REGISTRY
            os_handler = vfs.VFS_HANDLERS[rdf_paths.PathSpec.PathType.OS]
            with vfs_test_lib.VFSOverrider(reg_type, os_handler):
                with self.assertRaises(IOError):
                    image_file_ps.pathtype = reg_type
                    vfs.VFSOpen(image_file_ps)
예제 #2
0
    def testDownloadCollectionWithFoldersEntries(self):
        """Check we can download a collection that also references folders."""
        fd = sequential_collection.GeneralIndexedCollection(
            self.collection_urn)
        with data_store.DB.GetMutationPool() as pool:
            fd.Add(rdf_file_finder.FileFinderResult(
                stat_entry=rdf_client.StatEntry(pathspec=rdf_paths.PathSpec(
                    path="testfile5", pathtype="OS"))),
                   mutation_pool=pool)
            fd.Add(rdf_file_finder.FileFinderResult(
                stat_entry=rdf_client.StatEntry(pathspec=rdf_paths.PathSpec(
                    path="testdir1", pathtype="OS"),
                                                st_mode=stat.S_IFDIR)),
                   mutation_pool=pool)

        with utils.TempDirectory() as tmpdir:
            export_utils.DownloadCollection(self.collection_urn,
                                            tmpdir,
                                            overwrite=True,
                                            dump_client_info=True,
                                            token=self.token,
                                            max_threads=2)
            expected_outdir = os.path.join(tmpdir, self.out.Path()[1:])

            # Check we found both files.
            self.assertTrue("testfile5" in os.listdir(expected_outdir))
            self.assertTrue("testdir1" in os.listdir(expected_outdir))
예제 #3
0
    def testOpenFilehandlesExpire(self):
        """Test that file handles expire from cache."""
        files.FILE_HANDLE_CACHE = utils.FastStore(max_size=10)

        current_process = psutil.Process(os.getpid())
        num_open_files = len(current_process.open_files())

        path = os.path.join(self.base_path, "morenumbers.txt")
        fd = vfs.VFSOpen(
            rdf_paths.PathSpec(path=path,
                               pathtype=rdf_paths.PathSpec.PathType.OS))

        fds = []
        for filename in fd.ListNames():
            child_fd = vfs.VFSOpen(
                rdf_paths.PathSpec(path=os.path.join(path, filename),
                                   pathtype=rdf_paths.PathSpec.PathType.OS))
            fd.read(20)
            fds.append(child_fd)

        # This should not create any new file handles.
        self.assertTrue(len(current_process.open_files()) - num_open_files < 5)

        # Make sure we exceeded the size of the cache.
        self.assertGreater(fds, 20)
예제 #4
0
파일: flow_test.py 프로젝트: rainser/grr
    def testIteratedDirectoryListing(self):
        """Test that the client iterator works."""
        with vfs_test_lib.VFSOverrider(rdf_paths.PathSpec.PathType.OS,
                                       MockVFSHandler):
            path = "/"
            # Run the flow in the simulated way
            client_mock = action_mocks.ActionMock(
                standard.IteratedListDirectory)
            flow_test_lib.TestFlowHelper(
                filesystem.IteratedListDirectory.__name__,
                client_mock,
                client_id=self.client_id,
                pathspec=rdf_paths.PathSpec(
                    path="/", pathtype=rdf_paths.PathSpec.PathType.OS),
                token=self.token)

            fd = aff4.FACTORY.Open(self.client_id.Add("fs/os").Add(path),
                                   token=self.token)
            directory = [ch for ch in fd.OpenChildren()]
            pb = rdf_paths.PathSpec(path=path,
                                    pathtype=rdf_paths.PathSpec.PathType.OS)
            directory2 = list(vfs.VFSOpen(pb).ListFiles())
            directory.sort()
            result = [x.Get(x.Schema.STAT) for x in directory]

            # Make sure that the resulting directory is what it should be
            for x, y in zip(result, directory2):
                self.assertEqual(x.st_mode, y.st_mode)
                self.assertRDFValuesEqual(x, y)
예제 #5
0
    def testTSKInodeHandling(self):
        """Test that we can open files by inode."""
        path = os.path.join(self.base_path, "ntfs_img.dd")
        ps2 = rdf_paths.PathSpec(inode=65,
                                 ntfs_type=128,
                                 ntfs_id=0,
                                 path="/this/will/be/ignored",
                                 pathtype=rdf_paths.PathSpec.PathType.TSK)

        ps = rdf_paths.PathSpec(path=path,
                                pathtype=rdf_paths.PathSpec.PathType.OS,
                                offset=63 * 512)
        ps.Append(ps2)
        fd = vfs.VFSOpen(ps)

        self.assertEqual(fd.Read(100), "Hello world\n")

        ps2 = rdf_paths.PathSpec(inode=65,
                                 ntfs_type=128,
                                 ntfs_id=4,
                                 pathtype=rdf_paths.PathSpec.PathType.TSK)
        ps = rdf_paths.PathSpec(path=path,
                                pathtype=rdf_paths.PathSpec.PathType.OS,
                                offset=63 * 512)
        ps.Append(ps2)
        fd = vfs.VFSOpen(ps)

        self.assertEqual(fd.read(100), "I am a real ADS\n")

        # Make sure the size is correct:
        self.assertEqual(fd.Stat().st_size, len("I am a real ADS\n"))
예제 #6
0
  def testAFF4Path(self):
    """Test the pathspec to URN conversion function."""
    pathspec = rdf_paths.PathSpec(
        path="\\\\.\\Volume{1234}\\",
        pathtype=rdf_paths.PathSpec.PathType.OS,
        mount_point="/c:/").Append(
            path="/windows", pathtype=rdf_paths.PathSpec.PathType.TSK)

    urn = pathspec.AFF4Path(rdf_client.ClientURN("C.1234567812345678"))
    self.assertEqual(
        urn,
        rdfvalue.RDFURN(
            r"aff4:/C.1234567812345678/fs/tsk/\\.\Volume{1234}\/windows"))

    # Test an ADS
    pathspec = rdf_paths.PathSpec(
        path="\\\\.\\Volume{1234}\\",
        pathtype=rdf_paths.PathSpec.PathType.OS,
        mount_point="/c:/").Append(
            pathtype=rdf_paths.PathSpec.PathType.TSK,
            path="/Test Directory/notes.txt:ads",
            inode=66,
            ntfs_type=128,
            ntfs_id=2)

    urn = pathspec.AFF4Path(rdf_client.ClientURN("C.1234567812345678"))
    self.assertEqual(
        urn,
        rdfvalue.RDFURN(r"aff4:/C.1234567812345678/fs/tsk/\\.\Volume{1234}\/"
                        "Test Directory/notes.txt:ads"))
예제 #7
0
    def ConvertGlobIntoPathComponents(self, pattern):
        r"""Converts a glob pattern into a list of pathspec components.

    Wildcards are also converted to regular expressions. The pathspec components
    do not span directories, and are marked as a regex or a literal component.

    We also support recursion into directories using the ** notation.  For
    example, /home/**2/foo.txt will find all files named foo.txt recursed 2
    directories deep. If the directory depth is omitted, it defaults to 3.

    Example:
     /home/test/* -> ['home', 'test', '.*\\Z(?ms)']

    Args:
      pattern: A glob expression with wildcards.

    Returns:
      A list of PathSpec instances for each component.

    Raises:
      ValueError: If the glob is invalid.
    """

        components = []
        for path_component in pattern.split("/"):
            # A ** in the path component means recurse into directories that match the
            # pattern.
            m = rdf_paths.GlobExpression.RECURSION_REGEX.search(path_component)
            if m:
                path_component = path_component.replace(m.group(0), "*")

                component = rdf_paths.PathSpec(
                    path=fnmatch.translate(path_component),
                    pathtype=self.state.pathtype,
                    path_options=rdf_paths.PathSpec.Options.RECURSIVE)

                # Allow the user to override the recursion depth.
                if m.group(1):
                    component.recursion_depth = int(m.group(1))

            elif self.GLOB_MAGIC_CHECK.search(path_component):
                component = rdf_paths.PathSpec(
                    path=fnmatch.translate(path_component),
                    pathtype=self.state.pathtype,
                    path_options=rdf_paths.PathSpec.Options.REGEX)
            else:
                pathtype = self.state.pathtype
                # TODO(amoser): This is a backwards compatibility hack. Remove when
                # all clients reach 3.0.0.2.
                if (pathtype == rdf_paths.PathSpec.PathType.TSK
                        and re.match("^.:$", path_component)):
                    path_component = "%s\\" % path_component
                component = rdf_paths.PathSpec(
                    path=path_component,
                    pathtype=pathtype,
                    path_options=rdf_paths.PathSpec.Options.CASE_INSENSITIVE)

            components.append(component)

        return components
예제 #8
0
 def testFindPaths(self):
     # TODO(user): Deal with cases where multiple vars are exported.
     # export TERM PERLLIB=.:shouldntbeignored
     bashrc_data = StringIO.StringIO("""
   IGNORE='bad' PATH=${HOME}/bin:$PATH
  { PYTHONPATH=/path1:/path2 }
   export TERM=screen-256color
   export http_proxy="http://proxy.example.org:3128/"
   export HTTP_PROXY=$http_proxy
   if [[ "$some_condition" ]]; then
     export PATH=:$PATH; LD_LIBRARY_PATH=foo:bar:$LD_LIBRARY_PATH
     PYTHONPATH=$PATH:"${PYTHONPATH}"
     CLASSPATH=
   fi
   echo PATH=/should/be/ignored
   # Ignore PATH=foo:bar
   TERM=vt100 PS=" Foo" PERL5LIB=:shouldntbeignored
 """)
     cshrc_data = StringIO.StringIO("""
   setenv PATH ${HOME}/bin:$PATH
   setenv PYTHONPATH /path1:/path2
   set term = (screen-256color)
   setenv http_proxy "http://proxy.example.org:3128/"
   setenv HTTP_PROXY $http_proxy
   if ( -e "$some_condition" ) then
     set path =  (. $path); setenv LD_LIBRARY_PATH foo:bar:$LD_LIBRARY_PATH
     setenv PYTHONPATH $PATH:"${PYTHONPATH}"
     setenv CLASSPATH
   endif
   echo PATH=/should/be/ignored
   setenv PERL5LIB :shouldntbeignored
 """)
     parser = linux_file_parser.PathParser()
     bashrc_stat = rdf_client.StatEntry(pathspec=rdf_paths.PathSpec(
         path="/home/user1/.bashrc", pathtype="OS"))
     cshrc_stat = rdf_client.StatEntry(pathspec=rdf_paths.PathSpec(
         path="/home/user1/.cshrc", pathtype="OS"))
     bashrc = {
         r.name: r.vals
         for r in parser.Parse(bashrc_stat, bashrc_data, None)
     }
     cshrc = {
         r.name: r.vals
         for r in parser.Parse(cshrc_stat, cshrc_data, None)
     }
     expected = {
         "PATH": [".", "${HOME}/bin", "$PATH"],
         "PYTHONPATH": [".", "${HOME}/bin", "$PATH", "/path1", "/path2"],
         "LD_LIBRARY_PATH": ["foo", "bar", "$LD_LIBRARY_PATH"],
         "CLASSPATH": [],
         "PERL5LIB": [".", "shouldntbeignored"]
     }
     # Got the same environment variables for bash and cshrc files.
     self.assertItemsEqual(expected, bashrc)
     self.assertItemsEqual(expected, cshrc)
     # The path values are expanded correctly.
     for var_name in ("PATH", "PYTHONPATH", "LD_LIBRARY_PATH"):
         self.assertEqual(expected[var_name], bashrc[var_name])
         self.assertEqual(expected[var_name], cshrc[var_name])
예제 #9
0
파일: transfer_test.py 프로젝트: qsdj/grr
    def testMultiGetFileOfSpecialFiles(self):
        """Test that special /proc/ files are handled correctly.

    /proc/ files have the property that they are non seekable from their end
    (i.e. seeking them relative to the end is not supported). They also return
    an st_size of 0. For example:

    $ stat /proc/self/maps
    File: '/proc/self/maps'
    Size: 0   Blocks: 0   IO Block: 1024 regular empty file

    $ head /proc/self/maps
    00400000-00409000 r-xp 00000000 fc:01 9180740 /usr/bin/head
    00608000-00609000 r--p 00008000 fc:01 9180740 /usr/bin/head
    ...

    When we try to use the MultiGetFile flow, it deduplicates the files and
    since it thinks the file has a zero size, the flow will not download the
    file, and instead copy the zero size file into it.
    """
        client_mock = action_mocks.MultiGetFileClientMock()

        # # Create a zero sized file.
        zero_sized_filename = os.path.join(self.temp_dir, "zero_size")
        with open(zero_sized_filename, "wb") as fd:
            pass

        pathspec = rdf_paths.PathSpec(pathtype=rdf_paths.PathSpec.PathType.OS,
                                      path=zero_sized_filename)

        flow_test_lib.TestFlowHelper(transfer.MultiGetFile.__name__,
                                     client_mock,
                                     token=self.token,
                                     file_size="1MiB",
                                     client_id=self.client_id,
                                     pathspecs=[pathspec])

        # Now if we try to fetch a real /proc/ filename this will fail because the
        # filestore already contains the zero length file
        # aff4:/files/nsrl/da39a3ee5e6b4b0d3255bfef95601890afd80709.
        pathspec = rdf_paths.PathSpec(pathtype=rdf_paths.PathSpec.PathType.OS,
                                      path="/proc/self/environ")

        flow_test_lib.TestFlowHelper(transfer.MultiGetFile.__name__,
                                     client_mock,
                                     token=self.token,
                                     file_size=1024 * 1024,
                                     client_id=self.client_id,
                                     pathspecs=[pathspec])

        data = open(pathspec.last.path, "rb").read()

        # Test the AFF4 file that was created - it should be empty since by default
        # we judge the file size based on its stat.st_size.
        urn = pathspec.AFF4Path(self.client_id)
        fd = aff4.FACTORY.Open(urn, token=self.token)
        self.assertEqual(fd.size, len(data))
        self.assertMultiLineEqual(fd.read(len(data)), data)
예제 #10
0
 def testTSKListDirectory(self):
     """Test directory listing in sleuthkit."""
     path = os.path.join(self.base_path, u"test_img.dd")
     ps2 = rdf_paths.PathSpec(path=u"入乡随俗 海外春节别样过法",
                              pathtype=rdf_paths.PathSpec.PathType.TSK)
     ps = rdf_paths.PathSpec(path=path,
                             pathtype=rdf_paths.PathSpec.PathType.OS)
     ps.Append(ps2)
     directory = vfs.VFSOpen(ps)
     self.CheckDirectoryListing(directory, u"入乡随俗.txt")
예제 #11
0
    def testFileCasing(self):
        """Test our ability to read the correct casing from filesystem."""
        try:
            os.lstat(os.path.join(self.base_path, "nUmBeRs.txt"))
            os.lstat(os.path.join(self.base_path, "nuMbErs.txt"))
            # If we reached this point we are on a case insensitive file system
            # and the tests below do not make any sense.
            logging.warning(
                "Case insensitive file system detected. Skipping test.")
            return
        except (IOError, OSError):
            pass

        # Create 2 files with names that differ only in casing.
        with utils.TempDirectory() as temp_dir:
            path1 = os.path.join(temp_dir, "numbers.txt")
            shutil.copy(os.path.join(self.base_path, "numbers.txt"), path1)

            path2 = os.path.join(temp_dir, "numbers.TXT")
            shutil.copy(os.path.join(self.base_path, "numbers.txt.ver2"),
                        path2)

            fd = vfs.VFSOpen(
                rdf_paths.PathSpec(path=path1,
                                   pathtype=rdf_paths.PathSpec.PathType.OS))
            self.assertEqual(fd.pathspec.Basename(), "numbers.txt")

            fd = vfs.VFSOpen(
                rdf_paths.PathSpec(path=path2,
                                   pathtype=rdf_paths.PathSpec.PathType.OS))
            self.assertEqual(fd.pathspec.Basename(), "numbers.TXT")

            path = os.path.join(self.base_path, "Numbers.txt")
            fd = vfs.VFSOpen(
                rdf_paths.PathSpec(path=path,
                                   pathtype=rdf_paths.PathSpec.PathType.OS))
            read_path = fd.pathspec.Basename()

            # The exact file now is non deterministic but should be either of the two:
            if read_path != "numbers.txt" and read_path != "numbers.TXT":
                raise RuntimeError("read path is %s" % read_path)

            # Ensure that the produced pathspec specified no case folding:
            s = fd.Stat()
            self.assertEqual(s.pathspec.path_options,
                             rdf_paths.PathSpec.Options.CASE_LITERAL)

            # Case folding will only occur when requested - this should raise because
            # we have the CASE_LITERAL option:
            pathspec = rdf_paths.PathSpec(
                path=path,
                pathtype=rdf_paths.PathSpec.PathType.OS,
                path_options=rdf_paths.PathSpec.Options.CASE_LITERAL)
            self.assertRaises(IOError, vfs.VFSOpen, pathspec)
예제 #12
0
    def testTSKFile(self):
        """Test our ability to read from image files."""
        path = os.path.join(self.base_path, "test_img.dd")
        path2 = "Test Directory/numbers.txt"

        p2 = rdf_paths.PathSpec(path=path2,
                                pathtype=rdf_paths.PathSpec.PathType.TSK)
        p1 = rdf_paths.PathSpec(path=path,
                                pathtype=rdf_paths.PathSpec.PathType.OS)
        p1.Append(p2)
        fd = vfs.VFSOpen(p1)
        self.TestFileHandling(fd)
예제 #13
0
    def testUnicodeFile(self):
        """Test ability to read unicode files from images."""
        path = os.path.join(self.base_path, "test_img.dd")
        path2 = os.path.join(u"איןד ןד ש אקדא", u"איןד.txt")

        ps2 = rdf_paths.PathSpec(path=path2,
                                 pathtype=rdf_paths.PathSpec.PathType.TSK)

        ps = rdf_paths.PathSpec(path=path,
                                pathtype=rdf_paths.PathSpec.PathType.OS)
        ps.Append(ps2)
        fd = vfs.VFSOpen(ps)
        self.TestFileHandling(fd)
예제 #14
0
    def testRecursiveImages(self):
        """Test directory listing in sleuthkit."""
        p3 = rdf_paths.PathSpec(path="/home/a.txt",
                                pathtype=rdf_paths.PathSpec.PathType.TSK)
        p2 = rdf_paths.PathSpec(path="/home/image2.img",
                                pathtype=rdf_paths.PathSpec.PathType.TSK)
        p1 = rdf_paths.PathSpec(path=os.path.join(self.base_path,
                                                  "test_img.dd"),
                                pathtype=rdf_paths.PathSpec.PathType.OS)
        p2.Append(p3)
        p1.Append(p2)
        f = vfs.VFSOpen(p1)

        self.assertEqual(f.read(3), "yay")
예제 #15
0
파일: osx_launchd.py 프로젝트: rainser/grr
  def Parse(self, persistence, knowledge_base, download_pathtype):
    """Convert persistence collector output to downloadable rdfvalues."""
    pathspecs = []

    if isinstance(persistence, rdf_client.OSXServiceInformation):
      if persistence.program:
        pathspecs = rdf_paths.PathSpec(
            path=persistence.program, pathtype=download_pathtype)
      elif persistence.args:
        pathspecs = rdf_paths.PathSpec(
            path=persistence.args[0], pathtype=download_pathtype)

    for pathspec in pathspecs:
      yield rdf_standard.PersistenceFile(pathspec=pathspec)
예제 #16
0
    def testFindFilesWithGlob(self):
        """Test that the Find flow works with glob."""
        client_mock = action_mocks.ActionMock(searching.Find)

        # Prepare a findspec.
        findspec = rdf_client.FindSpec(
            path_glob="bash*",
            pathspec=rdf_paths.PathSpec(
                path="/", pathtype=rdf_paths.PathSpec.PathType.OS))

        session_id = flow_test_lib.TestFlowHelper(find.FindFiles.__name__,
                                                  client_mock,
                                                  client_id=self.client_id,
                                                  token=self.token,
                                                  findspec=findspec)

        # Check the results collection.
        fd = flow.GRRFlow.ResultCollectionForFID(session_id)

        # Make sure that bash is a file.
        matches = set([x.AFF4Path(self.client_id).Basename() for x in fd])
        self.assertEqual(sorted(matches), ["bash"])

        self.assertEqual(len(fd), 2)
        for child in fd:
            path = utils.SmartStr(child.AFF4Path(self.client_id))
            self.assertTrue(path.endswith("bash"))
            self.assertEqual(child.__class__.__name__, "StatEntry")
예제 #17
0
    def testFindDirectories(self):
        """Test that the Find flow works with directories."""

        client_mock = action_mocks.ActionMock(searching.Find)

        # Prepare a findspec.
        findspec = rdf_client.FindSpec(
            path_regex="bin",
            pathspec=rdf_paths.PathSpec(
                path="/", pathtype=rdf_paths.PathSpec.PathType.OS))

        session_id = flow_test_lib.TestFlowHelper(find.FindFiles.__name__,
                                                  client_mock,
                                                  client_id=self.client_id,
                                                  token=self.token,
                                                  findspec=findspec)

        # Check the results collection.
        fd = flow.GRRFlow.ResultCollectionForFID(session_id)

        # Make sure that bin is a directory
        self.assertEqual(len(fd), 2)
        for child in fd:
            path = utils.SmartStr(child.AFF4Path(self.client_id))
            self.assertTrue("bin" in path)
            self.assertEqual(child.__class__.__name__, "StatEntry")
예제 #18
0
파일: transfer_test.py 프로젝트: qsdj/grr
    def testMultiGetFileSetsFileHashAttributeWhenMultipleChunksDownloaded(
            self):
        client_mock = action_mocks.MultiGetFileClientMock()
        pathspec = rdf_paths.PathSpec(pathtype=rdf_paths.PathSpec.PathType.OS,
                                      path=os.path.join(
                                          self.base_path, "test_img.dd"))

        args = transfer.MultiGetFileArgs(pathspecs=[pathspec])
        flow_test_lib.TestFlowHelper(transfer.MultiGetFile.__name__,
                                     client_mock,
                                     token=self.token,
                                     client_id=self.client_id,
                                     args=args)

        # Fix path for Windows testing.
        pathspec.path = pathspec.path.replace("\\", "/")
        # Test the AFF4 file that was created.
        urn = pathspec.AFF4Path(self.client_id)
        fd_hash = data_store_utils.GetUrnHashEntry(urn)

        self.assertTrue(fd_hash)

        h = hashlib.sha256()
        with open(os.path.join(self.base_path, "test_img.dd"),
                  "rb") as model_fd:
            h.update(model_fd.read())
        self.assertEqual(fd_hash.sha256, h.digest())
예제 #19
0
파일: transfer_test.py 프로젝트: qsdj/grr
    def testMultiGetFileSizeLimit(self):
        client_mock = action_mocks.MultiGetFileClientMock()
        image_path = os.path.join(self.base_path, "test_img.dd")
        pathspec = rdf_paths.PathSpec(pathtype=rdf_paths.PathSpec.PathType.OS,
                                      path=image_path)

        # Read a bit more than one chunk (600 * 1024).
        expected_size = 750 * 1024
        args = transfer.MultiGetFileArgs(pathspecs=[pathspec],
                                         file_size=expected_size)
        flow_test_lib.TestFlowHelper(transfer.MultiGetFile.__name__,
                                     client_mock,
                                     token=self.token,
                                     client_id=self.client_id,
                                     args=args)

        urn = pathspec.AFF4Path(self.client_id)
        blobimage = aff4.FACTORY.Open(urn, token=self.token)
        # Make sure a VFSBlobImage got written.
        self.assertTrue(isinstance(blobimage, aff4_grr.VFSBlobImage))

        self.assertEqual(len(blobimage), expected_size)
        data = blobimage.read(100 * expected_size)
        self.assertEqual(len(data), expected_size)

        expected_data = open(image_path, "rb").read(expected_size)

        self.assertEqual(data, expected_data)
        hash_obj = data_store_utils.GetFileHashEntry(blobimage)

        d = hashlib.sha1()
        d.update(expected_data)
        expected_hash = d.hexdigest()

        self.assertEqual(hash_obj.sha1, expected_hash)
예제 #20
0
파일: transfer_test.py 프로젝트: qsdj/grr
    def testMultiGetFile(self):
        """Test MultiGetFile."""

        client_mock = action_mocks.MultiGetFileClientMock()
        pathspec = rdf_paths.PathSpec(pathtype=rdf_paths.PathSpec.PathType.OS,
                                      path=os.path.join(
                                          self.base_path, "test_img.dd"))

        args = transfer.MultiGetFileArgs(pathspecs=[pathspec, pathspec])
        with test_lib.Instrument(transfer.MultiGetFile,
                                 "StoreStat") as storestat_instrument:
            flow_test_lib.TestFlowHelper(transfer.MultiGetFile.__name__,
                                         client_mock,
                                         token=self.token,
                                         client_id=self.client_id,
                                         args=args)

            # We should only have called StoreStat once because the two paths
            # requested were identical.
            self.assertEqual(len(storestat_instrument.args), 1)

        # Fix path for Windows testing.
        pathspec.path = pathspec.path.replace("\\", "/")
        # Test the AFF4 file that was created.
        urn = pathspec.AFF4Path(self.client_id)
        fd1 = aff4.FACTORY.Open(urn, token=self.token)
        fd2 = open(pathspec.path, "rb")
        fd2.seek(0, 2)

        self.assertEqual(fd2.tell(), int(fd1.Get(fd1.Schema.SIZE)))
        self.CompareFDs(fd1, fd2)
예제 #21
0
파일: transfer_test.py 프로젝트: qsdj/grr
    def testMultiGetFileDeduplication(self):
        client_mock = action_mocks.MultiGetFileClientMock()

        pathspecs = []
        # Make 10 files to download.
        for i in xrange(10):
            path = os.path.join(self.temp_dir, "test_%s.txt" % i)
            with open(path, "wb") as fd:
                fd.write("Hello")

            pathspecs.append(
                rdf_paths.PathSpec(pathtype=rdf_paths.PathSpec.PathType.OS,
                                   path=path))

        # All those files are the same so the individual chunks should
        # only be downloaded once. By forcing maximum_pending_files=1,
        # there should only be a single TransferBuffer call.
        args = transfer.MultiGetFileArgs(pathspecs=pathspecs,
                                         maximum_pending_files=1)
        flow_test_lib.TestFlowHelper(transfer.MultiGetFile.__name__,
                                     client_mock,
                                     token=self.token,
                                     client_id=self.client_id,
                                     args=args)

        self.assertEqual(client_mock.action_counts["TransferBuffer"], 1)
예제 #22
0
파일: transfer_test.py 프로젝트: qsdj/grr
    def testGetFilePathCorrection(self):
        """Tests that the pathspec returned is used for the aff4path."""
        client_mock = action_mocks.GetFileClientMock()
        # Deliberately using the wrong casing.
        pathspec = rdf_paths.PathSpec(pathtype=rdf_paths.PathSpec.PathType.OS,
                                      path=os.path.join(
                                          self.base_path, "TEST_IMG.dd"))

        session_id = flow_test_lib.TestFlowHelper(transfer.GetFile.__name__,
                                                  client_mock,
                                                  token=self.token,
                                                  client_id=self.client_id,
                                                  pathspec=pathspec)

        results = flow.GRRFlow.ResultCollectionForFID(session_id)
        self.assertEqual(len(results), 1)
        res_pathspec = results[0].pathspec

        # Fix path for Windows testing.
        pathspec.path = pathspec.path.replace("\\", "/")
        # Test the AFF4 file that was created.
        urn = res_pathspec.AFF4Path(self.client_id)
        fd1 = aff4.FACTORY.Open(urn, token=self.token)
        fd2 = open(res_pathspec.path, "rb")
        fd2.seek(0, 2)

        self.assertEqual(fd2.tell(), int(fd1.Get(fd1.Schema.SIZE)))
        self.CompareFDs(fd1, fd2)
예제 #23
0
파일: standard.py 프로젝트: rainser/grr
    def Run(self, args):
        if platform.system() == "Windows":
            raise RuntimeError("os.statvfs not available on Windows")

        for path in args.path_list:

            try:
                fd = vfs.VFSOpen(rdf_paths.PathSpec(path=path,
                                                    pathtype=args.pathtype),
                                 progress_callback=self.Progress)
                st = fd.StatFS()
                mount_point = fd.GetMountPoint()
            except (IOError, OSError) as e:
                self.SetStatus(rdf_flows.GrrStatus.ReturnedStatus.IOERROR, e)
                continue

            unix = rdf_client.UnixVolume(mount_point=mount_point)

            # On linux pre 2.6 kernels don't have frsize, so we fall back to bsize.
            # The actual_available_allocation_units attribute is set to blocks
            # available to the unprivileged user, root may have some additional
            # reserved space.
            self.SendReply(
                rdf_client.Volume(
                    bytes_per_sector=(st.f_frsize or st.f_bsize),
                    sectors_per_allocation_unit=1,
                    total_allocation_units=st.f_blocks,
                    actual_available_allocation_units=st.f_bavail,
                    unixvolume=unix))
예제 #24
0
파일: vfs_test_lib.py 프로젝트: rainser/grr
    def PopulateCache(self):
        """Parse the paths from the fixture."""
        if self.paths:
            return

        # The cache is attached to the class so it can be shared by all instance.
        self.paths = self.__class__.cache[self.prefix] = {}
        for path, (vfs_type, attributes) in client_fixture.VFS:
            if not path.startswith(self.prefix):
                continue

            path = utils.NormalizePath(path[len(self.prefix):])
            if path == "/":
                continue

            stat = rdf_client.StatEntry()
            args = {"client_id": "C.1234"}
            attrs = attributes.get("aff4:stat")

            if attrs:
                attrs %= args  # Remove any %% and interpolate client_id.
                stat = rdf_client.StatEntry.FromTextFormat(
                    utils.SmartStr(attrs))

            stat.pathspec = rdf_paths.PathSpec(
                pathtype=self.supported_pathtype, path=path)

            # TODO(user): Once we add tests around not crossing device boundaries,
            # we need to be smarter here, especially for the root entry.
            stat.st_dev = 1
            path = self._NormalizeCaseForPath(path, vfs_type)
            self.paths[path] = (vfs_type, stat)

        self.BuildIntermediateDirectories()
예제 #25
0
    def testCronJobPreservesFlowNameAndArguments(self):
        """Testing initialization of a ConfigManager."""
        pathspec = rdf_paths.PathSpec(path="/foo",
                                      pathtype=rdf_paths.PathSpec.PathType.TSK)

        cron_manager = aff4_cronjobs.GetCronManager()

        cron_args = rdf_cronjobs.CreateCronJobFlowArgs(periodicity="1d",
                                                       allow_overruns=False)

        cron_args.flow_runner_args.flow_name = transfer.GetFile.__name__
        cron_args.flow_args.pathspec = pathspec

        job_id = cron_manager.CreateJob(cron_args=cron_args)

        # Check that CronJob definition is saved properly
        jobs = cron_manager.ListJobs(token=self.token)
        self.assertEqual(len(jobs), 1)
        self.assertEqual(jobs[0], job_id)

        cron_job = cron_manager.ReadJob(job_id, token=self.token)
        self.assertEqual(cron_job.cron_args.flow_runner_args.flow_name,
                         transfer.GetFile.__name__)

        self.assertEqual(cron_job.cron_args.flow_args.pathspec, pathspec)

        self.assertEqual(cron_job.cron_args.periodicity,
                         rdfvalue.Duration("1d"))
        self.assertEqual(cron_job.cron_args.allow_overruns, False)
예제 #26
0
    def GetRegistryValue(self, source):
        """Retrieve directly specified registry values, returning Stat objects."""
        new_paths = set()
        has_glob = False
        for kvdict in source.attributes["key_value_pairs"]:
            if "*" in kvdict["key"] or rdf_paths.GROUPING_PATTERN.search(
                    kvdict["key"]):
                has_glob = True

            if kvdict["value"]:
                # This currently only supports key value pairs specified using forward
                # slash.
                path = "\\".join((kvdict["key"], kvdict["value"]))
            else:
                # If value is not set, we want to get the default value. In
                # GRR this is done by specifying the key only, so this is what
                # we do here.
                path = kvdict["key"]

            expanded_paths = artifact_utils.InterpolateKbAttributes(
                path,
                self.state.knowledge_base,
                ignore_errors=self.args.ignore_interpolation_errors)
            new_paths.update(expanded_paths)

        if has_glob:
            self.CallFlow(filesystem.Glob.__name__,
                          paths=new_paths,
                          pathtype=rdf_paths.PathSpec.PathType.REGISTRY,
                          request_data={
                              "artifact_name": self.current_artifact_name,
                              "source": source.ToPrimitiveDict()
                          },
                          next_state="ProcessCollected")
        else:
            # We call statfile directly for keys that don't include globs because it
            # is faster and some artifacts rely on getting an IOError to trigger
            # fallback processing.
            for new_path in new_paths:
                pathspec = rdf_paths.PathSpec(
                    path=new_path,
                    pathtype=rdf_paths.PathSpec.PathType.REGISTRY)

                # TODO(hanuszczak): Support for old clients ends on 2021-01-01.
                # This conditional should be removed after that date.
                if self.client_version >= 3221:
                    stub = server_stubs.GetFileStat
                    request = rdf_client.GetFileStatRequest(pathspec=pathspec)
                else:
                    stub = server_stubs.StatFile
                    request = rdf_client.ListDirRequest(pathspec=pathspec)

                self.CallClient(stub,
                                request,
                                request_data={
                                    "artifact_name":
                                    self.current_artifact_name,
                                    "source": source.ToPrimitiveDict()
                                },
                                next_state="ProcessCollectedRegistryStatEntry")
예제 #27
0
    def testListDirectory(self):
        """Test our ability to list a directory."""
        directory = vfs.VFSOpen(
            rdf_paths.PathSpec(path=self.base_path,
                               pathtype=rdf_paths.PathSpec.PathType.OS))

        self.CheckDirectoryListing(directory, "morenumbers.txt")
예제 #28
0
  def Start(self):
    """Schedules the ReadBuffer client action."""
    pathspec = rdf_paths.PathSpec(
        path="\\\\.\\PhysicalDrive0\\",
        pathtype=rdf_paths.PathSpec.PathType.OS,
        path_options=rdf_paths.PathSpec.Options.CASE_LITERAL)

    self.state.bytes_downloaded = 0
    # An array to collect buffers. This is not very efficient, MBR
    # data should be kept short though so this is not a big deal.
    self.state.buffers = []

    buffer_size = constants.CLIENT_MAX_BUFFER_SIZE
    buffers_we_need = self.args.length / buffer_size
    if self.args.length % buffer_size:
      buffers_we_need += 1

    bytes_we_need = self.args.length

    for i in xrange(buffers_we_need):
      request = rdf_client.BufferReference(
          pathspec=pathspec,
          offset=i * buffer_size,
          length=min(bytes_we_need, buffer_size))
      self.CallClient(server_stubs.ReadBuffer, request, next_state="StoreMBR")
      bytes_we_need -= buffer_size
예제 #29
0
    def testFileSizeDistributionReportPlugin(self):
        filename = "winexec_img.dd"
        client_id = self.SetupClient(0)

        # Add a file to be reported.
        filestore_test_lib.AddFileToFileStore(rdf_paths.PathSpec(
            pathtype=rdf_paths.PathSpec.PathType.OS,
            path=os.path.join(self.base_path, filename)),
                                              client_id=client_id,
                                              token=self.token)

        # Scan for files to be reported (the one we just added).
        flow_test_lib.TestFlowHelper(
            filestore_stats.FilestoreStatsCronFlow.__name__, token=self.token)

        report = report_plugins.GetReportByName(
            filestore_report_plugins.FileSizeDistributionReportPlugin.__name__)

        api_report_data = report.GetReportData(
            stats_api.ApiGetReportArgs(name=report.__class__.__name__),
            token=self.token)

        self.checkStaticData(api_report_data)

        for series in api_report_data.stack_chart.data:
            if series.label == "976.6 KiB - 4.8 MiB":
                self.assertEqual([p.y for p in series.points], [1])
            else:
                self.assertEqual([p.y for p in series.points], [0])
예제 #30
0
    def testRecursiveListNames(self):
        """Test our ability to walk over a directory tree."""
        path = os.path.join(self.base_path, "a")

        directory = vfs.VFSOpen(
            rdf_paths.PathSpec(path=path,
                               pathtype=rdf_paths.PathSpec.PathType.OS))

        # Test the helper method
        self.assertEqual(directory._GetDepth("/"), 0)
        self.assertEqual(directory._GetDepth("/foo/bar/baz"), 3)
        # Relative paths aren't supported
        with self.assertRaises(RuntimeError):
            directory._GetDepth("foo/bar")
        # Multiple separators are redundant
        self.assertEqual(directory._GetDepth("/////foo///bar//////baz//"), 3)

        # Test the whole thing
        walk_tups_0 = list(directory.RecursiveListNames())
        walk_tups_1 = list(directory.RecursiveListNames(depth=1))
        walk_tups_2 = list(directory.RecursiveListNames(depth=2))
        walk_tups_inf = list(directory.RecursiveListNames(depth=float("inf")))

        self.assertEqual(walk_tups_0, [(path, ["b"], [])])
        self.assertEqual(walk_tups_1, [(path, ["b"], []),
                                       ("%s/b" % path, ["c", "d"], [])])
        self.assertEqual(walk_tups_2, [(path, ["b"], []),
                                       ("%s/b" % path, ["c", "d"], []),
                                       ("%s/b/c" % path, [], ["helloc.txt"]),
                                       ("%s/b/d" % path, [], ["hellod.txt"])])
        self.assertEqual(walk_tups_inf,
                         [(path, ["b"], []), ("%s/b" % path, ["c", "d"], []),
                          ("%s/b/c" % path, [], ["helloc.txt"]),
                          ("%s/b/d" % path, [], ["hellod.txt"])])