Exemplo n.º 1
0
 def testNTFSFile(self):
     pathspec = rdf_paths.PathSpec(
         path=os.path.join(self.base_path, "ntfs.img"),
         pathtype=rdf_paths.PathSpec.PathType.OS,
         path_options=rdf_paths.PathSpec.Options.CASE_LITERAL,
         nested_path=rdf_paths.PathSpec(
             path="numbers.txt", pathtype=rdf_paths.PathSpec.PathType.NTFS))
     fd = vfs.VFSOpen(pathspec)
     self.TestFileHandling(fd)
Exemplo n.º 2
0
 def testNTFSNestedFile(self):
     pathspec = self._GetNTFSPathSpec("/a/b1/c1/d")
     fd = vfs.VFSOpen(pathspec)
     self.assertEqual(fd.Read(100), b"foo\n")
     result = fd.Stat()
     self.assertEqual(
         result.pathspec,
         self._GetNTFSPathSpec("/a/b1/c1/d", A_B1_C1_D_FILE_REF,
                               rdf_paths.PathSpec.Options.CASE_LITERAL))
Exemplo n.º 3
0
    def Run(self, args):
        """Fingerprint a file."""
        with vfs.VFSOpen(args.pathspec,
                         progress_callback=self.Progress) as file_obj:
            fingerprinter = Fingerprinter(self.Progress, file_obj)
            response = rdf_client_action.FingerprintResponse()
            response.pathspec = file_obj.pathspec
            if args.tuples:
                tuples = args.tuples
            else:
                # There are none selected -- we will cover everything
                tuples = list()
                for k in self._fingerprint_types:
                    tuples.append(
                        rdf_client_action.FingerprintTuple(fp_type=k))

            for finger in tuples:
                hashers = [self._hash_types[h] for h in finger.hashers] or None
                if finger.fp_type in self._fingerprint_types:
                    invoke = self._fingerprint_types[finger.fp_type]
                    res = invoke(fingerprinter, hashers)
                    if res:
                        response.matching_types.append(finger.fp_type)
                else:
                    raise RuntimeError(
                        "Encountered unknown fingerprint type. %s" %
                        finger.fp_type)

            # Structure of the results is a list of dicts, each containing the
            # name of the hashing method, hashes for enabled hash algorithms,
            # and auxilliary data where present (e.g. signature blobs).
            # Also see Fingerprint:HashIt()
            response.results = fingerprinter.HashIt()

            # We now return data in a more structured form.
            for result in response.results:
                if result.GetItem("name") == "generic":
                    for hash_type in ["md5", "sha1", "sha256"]:
                        value = result.GetItem(hash_type)
                        if value is not None:
                            setattr(response.hash, hash_type, value)

                if result["name"] == "pecoff":
                    for hash_type in ["md5", "sha1", "sha256"]:
                        value = result.GetItem(hash_type)
                        if value:
                            setattr(response.hash, "pecoff_" + hash_type,
                                    value)

                    signed_data = result.GetItem("SignedData", [])
                    for data in signed_data:
                        response.hash.signed_data.Append(revision=data[0],
                                                         cert_type=data[1],
                                                         certificate=data[2])

            self.SendReply(response)
Exemplo n.º 4
0
 def testNTFSReadUnicode(self):
     with tempfile.TemporaryDirectory() as tmp_dir:
         path = os.path.join(tmp_dir, "入乡随俗 海外春节别样过法")
         file_data = "中国新闻"
         with open(path, "w", encoding="utf-8") as f:
             f.write(file_data)
         pathspec = rdf_paths.PathSpec(
             path=path, pathtype=rdf_paths.PathSpec.PathType.NTFS)
         fd = vfs.VFSOpen(pathspec)
         self.assertEqual(fd.Read(100).decode("utf-8"), file_data)
Exemplo n.º 5
0
 def testNTFSListNames(self):
   pathspec = self._GetNTFSPathSpec("/")
   fd = vfs.VFSOpen(pathspec)
   names = fd.ListNames()
   expected_names = [
       "$AttrDef", "$BadClus", "$Bitmap", "$Boot", "$Extend", "$LogFile",
       "$MFT", "$MFTMirr", "$Secure", "$UpCase", "$Volume", "a", "ads",
       "numbers.txt"
   ]
   self.assertSameElements(names, expected_names)
Exemplo n.º 6
0
 def testFileStat(self):
   fd = vfs.VFSOpen(
       rdf_paths.PathSpec(
           path=r"/HKEY_LOCAL_MACHINE\SOFTWARE\GRR_TEST\aaa",
           pathtype="REGISTRY"))
   stat = fd.Stat()
   self.assertIn(stat.pathspec.path,
                 "/HKEY_LOCAL_MACHINE/SOFTWARE/GRR_TEST/aaa")
   self.assertEqual(stat.pathspec.pathtype, "REGISTRY")
   self.assertEqual(stat.st_size, 6)
Exemplo n.º 7
0
 def testTSKListDirectory(self):
     """Test directory listing in sleuthkit."""
     path = os.path.join(self.base_path, u"test_img.dd")
     ps2 = rdf_paths.PathSpec(path=u"入乡随俗 海外春节别样过法",
                              pathtype=rdf_paths.PathSpec.PathType.TSK)
     ps = rdf_paths.PathSpec(path=path,
                             pathtype=rdf_paths.PathSpec.PathType.OS)
     ps.Append(ps2)
     directory = vfs.VFSOpen(ps)
     self.CheckDirectoryListing(directory, u"入乡随俗.txt")
Exemplo n.º 8
0
    def testGuessPathSpec(self):
        """Test that we can guess a pathspec from a path."""
        path = os.path.join(self.base_path, "test_img.dd", "home/image2.img",
                            "home/a.txt")

        pathspec = rdf_paths.PathSpec(path=path,
                                      pathtype=rdf_paths.PathSpec.PathType.OS)

        fd = vfs.VFSOpen(pathspec)
        self.assertEqual(fd.read(3), "yay")
Exemplo n.º 9
0
    def Run(self, args):
        try:
            fd = vfs.VFSOpen(args.pathspec, progress_callback=self.Progress)

            stat_entry = fd.Stat(ext_attrs=args.collect_ext_attrs,
                                 follow_symlink=args.follow_symlink)

            self.SendReply(stat_entry)
        except (IOError, OSError) as error:
            self.SetStatus(rdf_flows.GrrStatus.ReturnedStatus.IOERROR, error)
Exemplo n.º 10
0
    def Run(self, args):
        """Run."""
        # This action might crash the box so we need to flush the transaction log.
        self.SyncTransactionLog()

        if args.pathtype != "MEMORY":
            raise RuntimeError(
                "Can only GetMemoryInformation on memory devices.")

        with vfs.VFSOpen(args) as fd:
            self.SendReply(fd.GetMemoryInformation())
Exemplo n.º 11
0
 def _OpenAndCheckImplementationType(
         self, pathspec: rdf_paths.PathSpec,
         implementation_type: rdf_paths.PathSpec.ImplementationType
 ) -> None:
     with vfs.VFSOpen(pathspec) as f:
         self._CheckHasImplementationType(f.pathspec, implementation_type)
         self._CheckHasImplementationType(f.Stat().pathspec,
                                          implementation_type)
         for child in f.ListFiles():
             self._CheckHasImplementationType(child.pathspec,
                                              implementation_type)
Exemplo n.º 12
0
def _CheckConditionsShortCircuit(content_conditions, pathspec):
    """Checks all `content_conditions` until one yields no matches."""
    matches = []
    for cond in content_conditions:
        with vfs.VFSOpen(pathspec) as vfs_file:
            cur_matches = list(cond.Search(vfs_file))
        if cur_matches:
            matches.extend(cur_matches)
        else:  # As soon as one condition does not match, we skip the file.
            return []  # Return no matches to indicate skipping this file.
    return matches
Exemplo n.º 13
0
  def testTSKFile(self):
    """Test our ability to read from image files."""
    path = os.path.join(self.base_path, "test_img.dd")
    path2 = "Test Directory/numbers.txt"

    p2 = rdf_paths.PathSpec(
        path=path2, pathtype=rdf_paths.PathSpec.PathType.TSK)
    p1 = rdf_paths.PathSpec(path=path, pathtype=rdf_paths.PathSpec.PathType.OS)
    p1.Append(p2)
    fd = vfs.VFSOpen(p1)
    self.TestFileHandling(fd)
Exemplo n.º 14
0
 def testNTFSListFiles_alternateDataStreams(self):
   pathspec = self._GetNTFSPathSpec("/ads")
   fd = vfs.VFSOpen(pathspec)
   files = fd.ListFiles()
   files = list(files)
   files.sort(key=lambda x: x.pathspec.Basename())
   expected_files = [
       rdf_client_fs.StatEntry(
           pathspec=self._GetNTFSPathSpec(
               "/ads/ads.txt",
               inode=ADS_ADS_TXT_FILE_REF,
               path_options=rdf_paths.PathSpec.Options.CASE_LITERAL),
           st_atime=rdfvalue.RDFDatetimeSeconds.FromHumanReadable(
               "2020-04-07 13:48:51"),
           st_crtime=rdfvalue.RDFDatetimeSeconds.FromHumanReadable(
               "2020-04-07 13:18:53"),
           st_mtime=rdfvalue.RDFDatetimeSeconds.FromHumanReadable(
               "2020-04-07 13:48:56"),
           st_mode=stat.S_IFREG,
           st_size=5,
       ),
       rdf_client_fs.StatEntry(
           pathspec=self._GetNTFSPathSpec(
               "/ads/ads.txt",
               inode=ADS_ADS_TXT_FILE_REF,
               path_options=rdf_paths.PathSpec.Options.CASE_LITERAL,
               stream_name="one"),
           st_atime=rdfvalue.RDFDatetimeSeconds.FromHumanReadable(
               "2020-04-07 13:48:51"),
           st_crtime=rdfvalue.RDFDatetimeSeconds.FromHumanReadable(
               "2020-04-07 13:18:53"),
           st_mtime=rdfvalue.RDFDatetimeSeconds.FromHumanReadable(
               "2020-04-07 13:48:56"),
           st_mode=stat.S_IFREG,
           st_size=6,
       ),
       rdf_client_fs.StatEntry(
           pathspec=self._GetNTFSPathSpec(
               "/ads/ads.txt",
               inode=ADS_ADS_TXT_FILE_REF,
               path_options=rdf_paths.PathSpec.Options.CASE_LITERAL,
               stream_name="two"),
           st_atime=rdfvalue.RDFDatetimeSeconds.FromHumanReadable(
               "2020-04-07 13:48:51"),
           st_crtime=rdfvalue.RDFDatetimeSeconds.FromHumanReadable(
               "2020-04-07 13:18:53"),
           st_mtime=rdfvalue.RDFDatetimeSeconds.FromHumanReadable(
               "2020-04-07 13:48:56"),
           st_mode=stat.S_IFREG,
           st_size=7,
       ),
   ]
   # print("XX", str(files).replace("\\n", "\n"))
   self.assertEqual(files, expected_files)
Exemplo n.º 15
0
    def testTSKBTime(self):
        pathspec = rdf_paths.PathSpec(
            path=os.path.join(self.base_path, "ntfs_img.dd"),
            pathtype=rdf_paths.PathSpec.PathType.OS,
            offset=63 * 512,
            nested_path=rdf_paths.PathSpec(
                path="/Test Directory/notes.txt",
                pathtype=rdf_paths.PathSpec.PathType.TSK))

        fd = vfs.VFSOpen(pathspec)
        st = fd.Stat()
        self.assertEqual(str(st.st_btime), "2011-12-17 00:14:37")
Exemplo n.º 16
0
    def _GenerateLiteralMatch(self, dirpath):
        if PATH_GLOB_REGEX.search(self._glob) is not None:
            return None

        new_path = os.path.join(dirpath, self._glob)
        pathspec = rdf_paths.PathSpec(path=new_path,
                                      pathtype=self.opts.pathtype)
        try:
            fd = vfs.VFSOpen(pathspec)
            return os.path.basename(fd.path)
        except IOError:
            return None  # Indicate "File not found" by returning None.
Exemplo n.º 17
0
def CollectLargeFile(
    args: rdf_large_file.CollectLargeFileArgs,
) -> Iterator[rdf_large_file.CollectLargeFileResult]:
  """Implements the large file collection action procedure."""
  with vfs.VFSOpen(args.path_spec) as file:
    session = gcs.UploadSession.Open(args.signed_url)

    result = rdf_large_file.CollectLargeFileResult()
    result.session_uri = session.uri
    yield result

    session.SendFile(file)
Exemplo n.º 18
0
  def testUnicodeFile(self):
    """Test ability to read unicode files from images."""
    path = os.path.join(self.base_path, "test_img.dd")
    path2 = os.path.join(u"איןד ןד ש אקדא", u"איןד.txt")

    ps2 = rdf_paths.PathSpec(
        path=path2, pathtype=rdf_paths.PathSpec.PathType.TSK)

    ps = rdf_paths.PathSpec(path=path, pathtype=rdf_paths.PathSpec.PathType.OS)
    ps.Append(ps2)
    fd = vfs.VFSOpen(ps)
    self.TestFileHandling(fd)
Exemplo n.º 19
0
    def testOpenFilehandlesExpire(self):
        """Test that file handles expire from cache."""
        files.FILE_HANDLE_CACHE = utils.FastStore(max_size=10)

        current_process = psutil.Process(os.getpid())
        num_open_files = len(current_process.open_files())

        path = os.path.join(self.base_path, "morenumbers.txt")
        fd = vfs.VFSOpen(
            rdf_paths.PathSpec(path=path,
                               pathtype=rdf_paths.PathSpec.PathType.OS))

        fds = []
        for filename in fd.ListNames():
            child_fd = vfs.VFSOpen(
                rdf_paths.PathSpec(path=os.path.join(path, filename),
                                   pathtype=rdf_paths.PathSpec.PathType.OS))
            fd.read(20)
            fds.append(child_fd)

        # This should not create any new file handles.
        self.assertLess(len(current_process.open_files()) - num_open_files, 5)
Exemplo n.º 20
0
  def testNTFSStat(self):
    pathspec = self._GetNTFSPathSpec("numbers.txt")

    fd = vfs.VFSOpen(pathspec)
    s = fd.Stat()
    self.assertEqual(
        s.pathspec,
        self._GetNTFSPathSpec("/numbers.txt", NUMBERS_TXT_FILE_REF,
                              rdf_paths.PathSpec.Options.CASE_LITERAL))
    self.assertEqual(str(s.st_atime), "2020-03-03 20:10:46")
    self.assertEqual(str(s.st_mtime), "2020-03-03 20:10:46")
    self.assertEqual(str(s.st_crtime), "2020-03-03 16:46:00")
    self.assertEqual(s.st_size, 3893)
Exemplo n.º 21
0
  def Run(self, args):
    """Lists a directory."""
    try:
      directory = vfs.VFSOpen(args.pathspec, progress_callback=self.Progress)
    except (IOError, OSError) as e:
      self.SetStatus(rdf_flows.GrrStatus.ReturnedStatus.IOERROR, e)
      return

    files = list(directory.ListFiles())
    files.sort(key=lambda x: x.pathspec.path)

    for response in files:
      self.SendReply(response)
Exemplo n.º 22
0
 def testNTFSListFiles(self):
   pathspec = self._GetNTFSPathSpec("/")
   fd = vfs.VFSOpen(pathspec)
   files = fd.ListFiles()
   files = [f for f in files if not f.pathspec.Basename().startswith("$")]
   files = list(files)
   files.sort(key=lambda x: x.pathspec.Basename())
   expected_files = [
       rdf_client_fs.StatEntry(
           pathspec=self._GetNTFSPathSpec(
               "/a",
               inode=A_FILE_REF,
               path_options=rdf_paths.PathSpec.Options.CASE_LITERAL),
           st_atime=rdfvalue.RDFDatetimeSeconds.FromHumanReadable(
               "2020-03-03 16:48:16"),
           st_crtime=rdfvalue.RDFDatetimeSeconds.FromHumanReadable(
               "2020-03-03 16:47:43"),
           st_mtime=rdfvalue.RDFDatetimeSeconds.FromHumanReadable(
               "2020-03-03 16:47:50"),
           st_mode=stat.S_IFDIR,
       ),
       rdf_client_fs.StatEntry(
           pathspec=self._GetNTFSPathSpec(
               "/ads",
               inode=ADS_FILE_REF,
               path_options=rdf_paths.PathSpec.Options.CASE_LITERAL),
           st_atime=rdfvalue.RDFDatetimeSeconds.FromHumanReadable(
               "2020-04-07 14:57:02"),
           st_crtime=rdfvalue.RDFDatetimeSeconds.FromHumanReadable(
               "2020-04-07 13:23:07"),
           st_mtime=rdfvalue.RDFDatetimeSeconds.FromHumanReadable(
               "2020-04-07 14:56:47"),
           st_mode=stat.S_IFDIR,
       ),
       rdf_client_fs.StatEntry(
           pathspec=self._GetNTFSPathSpec(
               "/numbers.txt",
               inode=NUMBERS_TXT_FILE_REF,
               path_options=rdf_paths.PathSpec.Options.CASE_LITERAL),
           st_atime=rdfvalue.RDFDatetimeSeconds.FromHumanReadable(
               "2020-03-03 20:10:46"),
           st_crtime=rdfvalue.RDFDatetimeSeconds.FromHumanReadable(
               "2020-03-03 16:46:00"),
           st_mtime=rdfvalue.RDFDatetimeSeconds.FromHumanReadable(
               "2020-03-03 20:10:46"),
           st_mode=stat.S_IFREG,
           st_size=3893,
       ),
   ]
   # print("XX", str(files).replace("\\n", "\n"))
   self.assertEqual(files, expected_files)
Exemplo n.º 23
0
    def ListDirectory(self, pathspec, state, depth=0):
        """A recursive generator of files."""
        # Limit recursion depth
        if depth >= self.request.max_depth:
            return

        try:
            fd = vfs.VFSOpen(pathspec, progress_callback=self.Progress)
            files = fd.ListFiles()
        except (IOError, OSError) as e:
            if depth == 0:
                # We failed to open the directory the server asked for because dir
                # doesn't exist or some other reason. So we set status and return
                # back to the caller ending the Iterator.
                self.SetStatus(rdf_flows.GrrStatus.ReturnedStatus.IOERROR, e)
            else:
                # Can't open the directory we're searching, ignore the directory.
                logging.info("Find failed to ListDirectory for %s. Err: %s",
                             pathspec, e)
            return

        # If we are not supposed to cross devices, and don't know yet
        # which device we are on, we need to find out.
        if not self.request.cross_devs and self.filesystem_id is None:
            dir_stat = fd.Stat()
            self.filesystem_id = dir_stat.st_dev

        # Recover the start point for this directory from the state dict so we can
        # resume.
        start = state.get(pathspec.CollapsePath(), 0)

        for i, file_stat in enumerate(files):
            # Skip the files we already did before
            if i < start:
                continue

            if stat.S_ISDIR(file_stat.st_mode):
                # Do not traverse directories in a different filesystem.
                if self.request.cross_devs or self.filesystem_id == file_stat.st_dev:
                    for child_stat in self.ListDirectory(
                            file_stat.pathspec, state, depth + 1):
                        yield child_stat

            state[pathspec.CollapsePath()] = i + 1
            yield file_stat

        # Now remove this from the state dict to prevent it from getting too large
        try:
            del state[pathspec.CollapsePath()]
        except KeyError:
            pass
Exemplo n.º 24
0
def RegistryKeyFromClient(args: rdf_file_finder.FileFinderArgs):
    """This function expands paths from the args and returns registry keys.

  Args:
    args: An `rdf_file_finder.FileFinderArgs` object.

  Yields:
    `rdf_client_fs.StatEntry` instances.
  """
    for path in _GetExpandedPaths(args):
        pathspec = rdf_paths.PathSpec(
            path=path, pathtype=rdf_paths.PathSpec.PathType.REGISTRY)
        with vfs.VFSOpen(pathspec) as file_obj:
            yield file_obj.Stat()
Exemplo n.º 25
0
    def testRecursiveImages(self):
        """Test directory listing in sleuthkit."""
        p3 = rdf_paths.PathSpec(path="/home/a.txt",
                                pathtype=rdf_paths.PathSpec.PathType.TSK)
        p2 = rdf_paths.PathSpec(path="/home/image2.img",
                                pathtype=rdf_paths.PathSpec.PathType.TSK)
        p1 = rdf_paths.PathSpec(path=os.path.join(self.base_path,
                                                  "test_img.dd"),
                                pathtype=rdf_paths.PathSpec.PathType.OS)
        p2.Append(p3)
        p1.Append(p2)
        f = vfs.VFSOpen(p1)

        self.assertEqual(f.read(3), "yay")
Exemplo n.º 26
0
    def testGuessPathSpecPartial(self):
        """Test that we can guess a pathspec from a partial pathspec."""
        path = os.path.join(self.base_path, "test_img.dd")
        pathspec = rdf_paths.PathSpec(path=path,
                                      pathtype=rdf_paths.PathSpec.PathType.OS)
        pathspec.nested_path.path = "/home/image2.img/home/a.txt"
        pathspec.nested_path.pathtype = rdf_paths.PathSpec.PathType.TSK

        fd = vfs.VFSOpen(pathspec)
        self.assertEqual(fd.read(3), "yay")

        # Open as a directory
        pathspec.nested_path.path = "/home/image2.img/home/"

        fd = vfs.VFSOpen(pathspec)

        names = []
        for s in fd.ListFiles():
            # Make sure that the stat pathspec is correct - it should be 3 levels
            # deep.
            self.assertEqual(s.pathspec.nested_path.path, "/home/image2.img")
            names.append(s.pathspec.nested_path.nested_path.path)

        self.assertTrue("home/a.txt" in names)
Exemplo n.º 27
0
  def testFileSizeOverride(self):

    # We assume /dev/null exists and has a 0 size.
    fname = "/dev/null"
    try:
      st = os.stat(fname)
    except OSError:
      self.skipTest("%s not accessible." % fname)
    if st.st_size != 0:
      self.skipTest("%s doesn't have 0 size." % fname)

    pathspec = rdf_paths.PathSpec(
        path=fname, pathtype="OS", file_size_override=100000000)
    fd = vfs.VFSOpen(pathspec)
    self.assertEqual(fd.size, 100000000)
Exemplo n.º 28
0
  def Run(self, args):
    hash_types = set()
    for t in args.tuples:
      for hash_name in t.hashers:
        hash_types.add(str(hash_name).lower())

    hasher = client_utils_common.MultiHasher(hash_types, progress=self.Progress)
    with vfs.VFSOpen(args.pathspec, progress_callback=self.Progress) as fd:
      hasher.HashFile(fd, args.max_filesize)

    hash_object = hasher.GetHashObject()
    response = rdf_client.FingerprintResponse(
        pathspec=fd.pathspec,
        bytes_read=hash_object.num_bytes,
        hash=hash_object)
    self.SendReply(response)
Exemplo n.º 29
0
    def UploadFile(self, args):
        """Just copy the file into the filestore."""
        file_fd = vfs.VFSOpen(args.pathspec)

        fs = file_store.FileUploadFileStore()
        fd = fs.CreateFileStoreFile()
        while True:
            data = file_fd.read(self.BUFFER_SIZE)
            if not data:
                break
            fd.write(data)
        file_id = fd.Finalize()

        return [
            rdf_client.UploadedFile(stat_entry=file_fd.Stat(), file_id=file_id)
        ]
Exemplo n.º 30
0
  def testNTFSStat_alternateDataStreams(self):
    pathspec = self._GetNTFSPathSpec("/ads/ads.txt", stream_name="ONE")

    fd = vfs.VFSOpen(pathspec)
    s = fd.Stat()
    self.assertEqual(
        s.pathspec,
        self._GetNTFSPathSpec(
            "/ads/ads.txt",
            ADS_ADS_TXT_FILE_REF,
            stream_name="one",
            path_options=rdf_paths.PathSpec.Options.CASE_LITERAL))
    self.assertEqual(str(s.st_atime), "2020-04-07 13:48:51")
    self.assertEqual(str(s.st_mtime), "2020-04-07 13:48:56")
    self.assertEqual(str(s.st_crtime), "2020-04-07 13:18:53")
    self.assertEqual(s.st_size, 6)