コード例 #1
0
ファイル: filesystem.py プロジェクト: ksmaheshkumar/grr
  def ConvertGlobIntoPathComponents(self, pattern):
    """Converts a glob pattern into a list of pathspec components.

    Wildcards are also converted to regular expressions. The pathspec components
    do not span directories, and are marked as a regex or a literal component.

    We also support recursion into directories using the ** notation.  For
    example, /home/**2/foo.txt will find all files named foo.txt recursed 2
    directories deep. If the directory depth is omitted, it defaults to 3.

    Example:
     /home/test/* -> ['home', 'test', '.*\\Z(?ms)']

    Args:
      pattern: A glob expression with wildcards.

    Returns:
      A list of PathSpec instances for each component.

    Raises:
      ValueError: If the glob is invalid.
    """

    components = []
    for path_component in pattern.split("/"):
      # A ** in the path component means recurse into directories that match the
      # pattern.
      m = rdfvalue.GlobExpression.RECURSION_REGEX.search(path_component)
      if m:
        path_component = path_component.replace(m.group(0), "*")

        component = rdfvalue.PathSpec(
            path=fnmatch.translate(path_component),
            pathtype=self.state.pathtype,
            path_options=rdfvalue.PathSpec.Options.RECURSIVE)

        # Allow the user to override the recursion depth.
        if m.group(1):
          component.recursion_depth = int(m.group(1))

      elif self.GLOB_MAGIC_CHECK.search(path_component):
        component = rdfvalue.PathSpec(
            path=fnmatch.translate(path_component),
            pathtype=self.state.pathtype,
            path_options=rdfvalue.PathSpec.Options.REGEX)
      else:
        pathtype = self.state.pathtype
        # TODO(user): This is a backwards compatibility hack. Remove when
        # all clients reach 3.0.0.2.
        if (pathtype == rdfvalue.PathSpec.PathType.TSK and
            re.match("^.:$", path_component)):
          path_component = "%s\\" % path_component
        component = rdfvalue.PathSpec(
            path=path_component,
            pathtype=pathtype,
            path_options=rdfvalue.PathSpec.Options.CASE_INSENSITIVE)

      components.append(component)

    return components
コード例 #2
0
  def testTSKFileCasing(self):
    """Test our ability to read the correct casing from image."""
    path = os.path.join(self.base_path, "test_img.dd")
    path2 = os.path.join("test directory", "NuMbErS.TxT")

    ps2 = rdfvalue.PathSpec(
        path=path2,
        pathtype=rdfvalue.PathSpec.PathType.TSK)

    ps = rdfvalue.PathSpec(path=path,
                           pathtype=rdfvalue.PathSpec.PathType.OS)
    ps.Append(ps2)
    fd = vfs.VFSOpen(ps)

    # This fixes Windows paths.
    path = path.replace("\\", "/")
    # The pathspec should have 2 components.

    self.assertEqual(fd.pathspec.first.path,
                     utils.NormalizePath(path))
    self.assertEqual(fd.pathspec.first.pathtype,
                     rdfvalue.PathSpec.PathType.OS)

    nested = fd.pathspec.last
    self.assertEqual(nested.path, u"/Test Directory/numbers.txt")
    self.assertEqual(nested.pathtype, rdfvalue.PathSpec.PathType.TSK)
コード例 #3
0
ファイル: registry.py プロジェクト: zzzzpaul/grr
    def runTest(self):
        """Launch our flows."""
        for flow, args in [
            ("ListDirectory", {
                "pathspec":
                rdfvalue.PathSpec(pathtype=rdfvalue.PathSpec.PathType.REGISTRY,
                                  path=self.reg_path)
            }),
            ("FindFiles", {
                "findspec":
                rdfvalue.FindSpec(pathspec=rdfvalue.PathSpec(
                    path=self.reg_path,
                    pathtype=rdfvalue.PathSpec.PathType.REGISTRY),
                                  path_regex="ProfileImagePath"),
                "output":
                self.output_path
            })
        ]:

            if self.local_worker:
                self.session_id = debugging.StartFlowAndWorker(
                    self.client_id, flow, **args)
            else:
                self.session_id = flow_utils.StartFlowAndWait(self.client_id,
                                                              flow_name=flow,
                                                              token=self.token,
                                                              **args)

        self.CheckFlow()
コード例 #4
0
  def GetFindSpecs(self):
    """Determine the Find specifications.

    Yields:
      A path specification to search

    Raises:
      OSError: If the client operating system is not supported.
    """
    path_spec = rdfvalue.PathSpec(
        path=self.GetSophosAVInfectedPath(),
        pathtype=self.args.pathtype)

    yield rdfvalue.FindSpec(
        pathspec=path_spec,
        path_regex=".*",
        max_depth=1)

    path_spec = rdfvalue.PathSpec(
        path=self.GetSophosAVLogsPath(),
        pathtype=self.args.pathtype)

    yield rdfvalue.FindSpec(
        pathspec=path_spec,
        path_regex=self.GetSophosAVLogsPathRegex(),
        max_depth=1)
コード例 #5
0
ファイル: osx_file_parser_test.py プロジェクト: zzzzpaul/grr
    def testOSXUsersParser(self):
        """Ensure we can extract users from a passwd file."""
        paths = ["/Users/user1", "/Users/user2", "/Users/Shared"]
        statentries = []
        client = "C.1000000000000000"
        for path in paths:
            statentries.append(
                rdfvalue.StatEntry(
                    aff4path=rdfvalue.ClientURN(client).Add("fs/os").Add(path),
                    pathspec=rdfvalue.PathSpec(
                        path=path, pathtype=rdfvalue.PathSpec.PathType.OS),
                    st_mode=16877))

        statentries.append(
            rdfvalue.StatEntry(aff4path=rdfvalue.ClientURN(client).Add(
                "fs/os").Add("/Users/.localized"),
                               pathspec=rdfvalue.PathSpec(
                                   path="/Users/.localized",
                                   pathtype=rdfvalue.PathSpec.PathType.OS),
                               st_mode=33261))

        parser = osx_file_parser.OSXUsersParser()
        out = list(parser.Parse(statentries, None, None))
        self.assertItemsEqual([x.username for x in out], ["user1", "user2"])
        self.assertItemsEqual([x.homedir for x in out],
                              ["/Users/user1", "/Users/user2"])
コード例 #6
0
ファイル: discovery.py プロジェクト: pombredanne/grr-insider
  def EnumerateFilesystems(self, responses):
    """Store all the local filesystems in the client."""
    if responses.success and len(responses):
      filesystems = self.client.Schema.FILESYSTEM()
      for response in responses:
        filesystems.Append(response)

        if response.type == "partition":
          (device, offset) = response.device.rsplit(":", 1)

          offset = int(offset)

          pathspec = rdfvalue.PathSpec(
              path=device, pathtype=rdfvalue.PathSpec.PathType.OS,
              offset=offset)

          pathspec.Append(path="/",
                          pathtype=rdfvalue.PathSpec.PathType.TSK)

          urn = self.client.PathspecToURN(pathspec, self.client.urn)
          fd = aff4.FACTORY.Create(urn, "VFSDirectory", token=self.token)
          fd.Set(fd.Schema.PATHSPEC(pathspec))
          fd.Close()
          continue

        if response.device:
          # Create the raw device
          urn = "devices/%s" % response.device

          pathspec = rdfvalue.PathSpec(
              path=response.device,
              pathtype=rdfvalue.PathSpec.PathType.OS)

          pathspec.Append(path="/",
                          pathtype=rdfvalue.PathSpec.PathType.TSK)

          fd = aff4.FACTORY.Create(urn, "VFSDirectory", token=self.token)
          fd.Set(fd.Schema.PATHSPEC(pathspec))
          fd.Close()

          # Create the TSK device
          urn = self.client.PathspecToURN(pathspec, self.client.urn)
          fd = aff4.FACTORY.Create(urn, "VFSDirectory", token=self.token)
          fd.Set(fd.Schema.PATHSPEC(pathspec))
          fd.Close()

        if response.mount_point:
          # Create the OS device
          pathspec = rdfvalue.PathSpec(
              path=response.mount_point,
              pathtype=rdfvalue.PathSpec.PathType.OS)

          urn = self.client.PathspecToURN(pathspec, self.client.urn)
          fd = aff4.FACTORY.Create(urn, "VFSDirectory", token=self.token)
          fd.Set(fd.Schema.PATHSPEC(pathspec))
          fd.Close()

      self.client.Set(self.client.Schema.FILESYSTEM, filesystems)
    else:
      self.Log("Could not enumerate file systems.")
コード例 #7
0
    def testListsBinaries(self):
        process1_exe = "\\WINDOWS\\bar.exe"
        process2_exe = "\\WINDOWS\\foo.exe"

        client_mock = ListVADBinariesActionMock([process1_exe, process2_exe])
        output_path = "analysis/ListVADBinariesTest1"

        for _ in test_lib.TestFlowHelper("ListVADBinaries",
                                         client_mock,
                                         client_id=self.client_id,
                                         token=self.token,
                                         output=output_path):
            pass

        fd = aff4.FACTORY.Open(self.client_id.Add(output_path),
                               token=self.token)

        # Sorting output collection to make the test deterministic
        binaries = sorted(fd, key=lambda x: x.CollapsePath())
        self.assertListEqual(binaries, [
            rdfvalue.PathSpec(path="C:" + process1_exe,
                              pathtype=rdfvalue.PathSpec.PathType.OS),
            rdfvalue.PathSpec(path="C:" + process2_exe,
                              pathtype=rdfvalue.PathSpec.PathType.OS)
        ])
コード例 #8
0
    def testTSKInodeHandling(self):
        """Test that we can open files by inode."""
        path = os.path.join(self.base_path, "ntfs_img.dd")
        ps2 = rdfvalue.PathSpec(inode=65,
                                ntfs_type=128,
                                ntfs_id=0,
                                path="/this/will/be/ignored",
                                pathtype=rdfvalue.PathSpec.PathType.TSK)

        ps = rdfvalue.PathSpec(path=path,
                               pathtype=rdfvalue.PathSpec.PathType.OS,
                               offset=63 * 512)
        ps.Append(ps2)
        fd = vfs.VFSOpen(ps)

        self.assertEqual(fd.Read(100), "Hello world\n")

        ps2 = rdfvalue.PathSpec(inode=65,
                                ntfs_type=128,
                                ntfs_id=4,
                                pathtype=rdfvalue.PathSpec.PathType.TSK)
        ps = rdfvalue.PathSpec(path=path,
                               pathtype=rdfvalue.PathSpec.PathType.OS,
                               offset=63 * 512)
        ps.Append(ps2)
        fd = vfs.VFSOpen(ps)

        self.assertEqual(fd.read(100), "I am a real ADS\n")

        # Make sure the size is correct:
        self.assertEqual(fd.Stat().st_size, len("I am a real ADS\n"))
コード例 #9
0
    def testOpenFilehandlesExpire(self):
        """Test that file handles expire from cache."""
        files.FILE_HANDLE_CACHE = utils.FastStore(max_size=10)

        current_process = psutil.Process(os.getpid())
        num_open_files = len(current_process.open_files())

        path = os.path.join(self.base_path, "morenumbers.txt")
        fd = vfs.VFSOpen(
            rdfvalue.PathSpec(path=path,
                              pathtype=rdfvalue.PathSpec.PathType.OS))

        fds = []
        for filename in fd.ListNames():
            child_fd = vfs.VFSOpen(
                rdfvalue.PathSpec(path=os.path.join(path, filename),
                                  pathtype=rdfvalue.PathSpec.PathType.OS))
            fd.read(20)
            fds.append(child_fd)

        # This should not create any new file handles.
        self.assertTrue(len(current_process.open_files()) - num_open_files < 5)

        # Make sure we exceeded the size of the cache.
        self.assert_(fds > 20)
コード例 #10
0
ファイル: flow_test.py プロジェクト: thatarchguy/grr
  def testIteratedDirectoryListing(self):
    """Test that the client iterator works."""
    # Install the mock
    vfs.VFS_HANDLERS[rdfvalue.PathSpec.PathType.OS] = MockVFSHandler
    path = "/"

    # Run the flow in the simulated way
    client_mock = action_mocks.ActionMock("IteratedListDirectory")
    for _ in test_lib.TestFlowHelper(
        "IteratedListDirectory", client_mock, client_id=self.client_id,
        pathspec=rdfvalue.PathSpec(path="/",
                                   pathtype=rdfvalue.PathSpec.PathType.OS),
        token=self.token):
      pass

    fd = aff4.FACTORY.Open(self.client_id.Add("fs/os").Add(path),
                           token=self.token)
    directory = [ch for ch in fd.OpenChildren()]
    pb = rdfvalue.PathSpec(path=path,
                           pathtype=rdfvalue.PathSpec.PathType.OS)
    directory2 = list(vfs.VFSOpen(pb).ListFiles())
    directory.sort()
    result = [x.Get(x.Schema.STAT) for x in directory]

    # Make sure that the resulting directory is what it should be
    for x, y in zip(result, directory2):
      x.aff4path = None

      self.assertEqual(x.st_mode, y.st_mode)
      self.assertProtoEqual(x, y)
コード例 #11
0
ファイル: standard.py プロジェクト: pombredanne/grr-insider
    def Start(self, responses):
        """Start."""
        client_id = responses.request.client_id

        if not self.args.files:
            self.args.files = {}

        self.args.files[client_id] = 0
        for filename in self.registry_files:
            pathspec = rdfvalue.PathSpec(
                pathtype=rdfvalue.PathSpec.PathType.TSK,
                path=r"C:\windows\system32\config\%s" % filename)

            self.args.files[client_id] += 1
            self.CallFlow("GetFile",
                          pathspec=pathspec,
                          next_state="StoreResults",
                          client_id=client_id)

        client = aff4.FACTORY.Open(rdfvalue.ClientURN(client_id),
                                   mode="r",
                                   token=self.token)
        users = client.Get(client.Schema.USER) or []
        for user in users:
            pathspec = rdfvalue.PathSpec(
                pathtype=rdfvalue.PathSpec.PathType.TSK,
                path=user.homedir + r"\NTUSER.DAT")
            self.args.files[client_id] += 1
            self.CallFlow("GetFile",
                          pathspec=pathspec,
                          next_state="StoreResults",
                          client_id=client_id)
コード例 #12
0
ファイル: aff4_grr_test.py プロジェクト: ksmaheshkumar/grr
    def testPathspecToURN(self):
        """Test the pathspec to URN conversion function."""
        pathspec = rdfvalue.PathSpec(
            path="\\\\.\\Volume{1234}\\",
            pathtype=rdfvalue.PathSpec.PathType.OS,
            mount_point="/c:/").Append(path="/windows",
                                       pathtype=rdfvalue.PathSpec.PathType.TSK)

        urn = aff4.AFF4Object.VFSGRRClient.PathspecToURN(
            pathspec, "C.1234567812345678")
        self.assertEqual(
            urn,
            rdfvalue.RDFURN(
                r"aff4:/C.1234567812345678/fs/tsk/\\.\Volume{1234}\/windows"))

        # Test an ADS
        pathspec = rdfvalue.PathSpec(
            path="\\\\.\\Volume{1234}\\",
            pathtype=rdfvalue.PathSpec.PathType.OS,
            mount_point="/c:/").Append(pathtype=rdfvalue.PathSpec.PathType.TSK,
                                       path="/Test Directory/notes.txt:ads",
                                       inode=66,
                                       ntfs_type=128,
                                       ntfs_id=2)

        urn = aff4.AFF4Object.VFSGRRClient.PathspecToURN(
            pathspec, "C.1234567812345678")
        self.assertEqual(
            urn,
            rdfvalue.RDFURN(
                r"aff4:/C.1234567812345678/fs/tsk/\\.\Volume{1234}\/"
                "Test Directory/notes.txt:ads"))
コード例 #13
0
  def testFlowExecution(self):
    client_mock = action_mocks.ActionMock("ListDirectory", "StatFile")

    rollover = config_lib.CONFIG["Logging.aff4_audit_log_rollover"]
    # Set time to epoch + 20 intervals
    with test_lib.FakeTime(20 * rollover):
      for _ in test_lib.TestFlowHelper(
          "ListDirectory", client_mock, client_id=self.client_id,
          pathspec=rdfvalue.PathSpec(
              path=os.path.join(self.base_path, "test_img.dd/test directory"),
              pathtype=rdfvalue.PathSpec.PathType.OS),
          token=self.token):
        pass

      for _ in test_lib.TestFlowHelper(
          "ListDirectory", client_mock, client_id=self.client_id,
          pathspec=rdfvalue.PathSpec(
              path=os.path.join(self.base_path, "test_img.dd/test directory"),
              pathtype=rdfvalue.PathSpec.PathType.OS),
          token=self.token):
        pass

      parentdir = aff4.FACTORY.Open("aff4:/audit/logs", "AFF4Volume", mode="r",
                                    token=self.token)
      logs = list(parentdir.ListChildren())
      self.assertEqual(len(logs), 1)
      log = aff4.CurrentAuditLog()
      events = list(aff4.FACTORY.Open(log, token=self.token))

      self.assertEqual(len(events), 2)
      for event in events:
        self.assertEqual(event.action, rdfvalue.AuditEvent.Action.RUN_FLOW)
        self.assertEqual(event.flow_name, "ListDirectory")
        self.assertEqual(event.user, self.token.username)

    # Set time to epoch + 22 intervals
    with test_lib.FakeTime(22 * rollover):
      for _ in test_lib.TestFlowHelper(
          "ListDirectory", client_mock, client_id=self.client_id,
          pathspec=rdfvalue.PathSpec(
              path=os.path.join(self.base_path, "test_img.dd/test directory"),
              pathtype=rdfvalue.PathSpec.PathType.OS),
          token=self.token):
        pass

      parentdir = aff4.FACTORY.Open("aff4:/audit/logs", "AFF4Volume", mode="r",
                                    token=self.token)
      # Now we should have two collections
      logs = list(parentdir.ListChildren())
      self.assertEqual(len(logs), 2)

      # One with two events
      events = list(aff4.FACTORY.Open(logs[0], token=self.token))
      self.assertEqual(len(events), 2)

      # The other with one
      events = list(aff4.FACTORY.Open(logs[1], token=self.token))
      self.assertEqual(len(events), 1)
コード例 #14
0
 def testTSKListDirectory(self):
     """Test directory listing in sleuthkit."""
     path = os.path.join(self.base_path, u"test_img.dd")
     ps2 = rdfvalue.PathSpec(path=u"入乡随俗 海外春节别样过法",
                             pathtype=rdfvalue.PathSpec.PathType.TSK)
     ps = rdfvalue.PathSpec(path=path,
                            pathtype=rdfvalue.PathSpec.PathType.OS)
     ps.Append(ps2)
     directory = vfs.VFSOpen(ps)
     self.CheckDirectoryListing(directory, u"入乡随俗.txt")
コード例 #15
0
    def testTSKFile(self):
        """Test our ability to read from image files."""
        path = os.path.join(self.base_path, "test_img.dd")
        path2 = "Test Directory/numbers.txt"

        p2 = rdfvalue.PathSpec(path=path2,
                               pathtype=rdfvalue.PathSpec.PathType.TSK)
        p1 = rdfvalue.PathSpec(path=path,
                               pathtype=rdfvalue.PathSpec.PathType.OS)
        p1.Append(p2)
        fd = vfs.VFSOpen(p1)
        self.TestFileHandling(fd)
コード例 #16
0
    def testUnicodeFile(self):
        """Test ability to read unicode files from images."""
        path = os.path.join(self.base_path, "test_img.dd")
        path2 = os.path.join(u"איןד ןד ש אקדא", u"איןד.txt")

        ps2 = rdfvalue.PathSpec(path=path2,
                                pathtype=rdfvalue.PathSpec.PathType.TSK)

        ps = rdfvalue.PathSpec(path=path,
                               pathtype=rdfvalue.PathSpec.PathType.OS)
        ps.Append(ps2)
        fd = vfs.VFSOpen(ps)
        self.TestFileHandling(fd)
コード例 #17
0
  def testRecursiveImages(self):
    """Test directory listing in sleuthkit."""
    p3 = rdfvalue.PathSpec(path="/home/a.txt",
                           pathtype=rdfvalue.PathSpec.PathType.TSK)
    p2 = rdfvalue.PathSpec(path="/home/image2.img",
                           pathtype=rdfvalue.PathSpec.PathType.TSK)
    p1 = rdfvalue.PathSpec(path=os.path.join(self.base_path, "test_img.dd"),
                           pathtype=rdfvalue.PathSpec.PathType.OS)
    p2.Append(p3)
    p1.Append(p2)
    f = vfs.VFSOpen(p1)

    self.assertEqual(f.read(3), "yay")
コード例 #18
0
    def Parse(self, persistence, knowledge_base, download_pathtype):
        """Convert persistence collector output to downloadable rdfvalues."""
        pathspecs = []

        if isinstance(persistence, rdfvalue.OSXServiceInformation):
            if persistence.program:
                pathspecs = rdfvalue.PathSpec(path=persistence.program,
                                              pathtype=download_pathtype)
            elif persistence.args:
                pathspecs = rdfvalue.PathSpec(path=persistence.args[0],
                                              pathtype=download_pathtype)

        for pathspec in pathspecs:
            yield rdfvalue.PersistenceFile(pathspec=pathspec)
コード例 #19
0
    def testFileCasing(self):
        """Test our ability to read the correct casing from filesystem."""
        path = os.path.join(self.base_path, "numbers.txt")
        try:
            os.lstat(os.path.join(self.base_path, "nUmBeRs.txt"))
            os.lstat(os.path.join(self.base_path, "nuMbErs.txt"))
            # If we reached this point we are on a case insensitive file system
            # and the tests below do not make any sense.
            logging.warning(
                "Case insensitive file system detected. Skipping test.")
            return
        except (IOError, OSError):
            pass

        fd = vfs.VFSOpen(
            rdfvalue.PathSpec(path=path,
                              pathtype=rdfvalue.PathSpec.PathType.OS))
        self.assertEqual(fd.pathspec.Basename(), "numbers.txt")

        path = os.path.join(self.base_path, "numbers.TXT")

        fd = vfs.VFSOpen(
            rdfvalue.PathSpec(path=path,
                              pathtype=rdfvalue.PathSpec.PathType.OS))
        self.assertEqual(fd.pathspec.Basename(), "numbers.TXT")

        path = os.path.join(self.base_path, "Numbers.txt")
        fd = vfs.VFSOpen(
            rdfvalue.PathSpec(path=path,
                              pathtype=rdfvalue.PathSpec.PathType.OS))
        read_path = fd.pathspec.Basename()

        # The exact file now is non deterministic but should be either of the two:
        if read_path != "numbers.txt" and read_path != "numbers.TXT":
            raise RuntimeError("read path is %s" % read_path)

        # Ensure that the produced pathspec specified no case folding:
        s = fd.Stat()
        self.assertEqual(s.pathspec.path_options,
                         rdfvalue.PathSpec.Options.CASE_LITERAL)

        # Case folding will only occur when requested - this should raise because we
        # have the CASE_LITERAL option:
        pathspec = rdfvalue.PathSpec(
            path=path,
            pathtype=rdfvalue.PathSpec.PathType.OS,
            path_options=rdfvalue.PathSpec.Options.CASE_LITERAL)
        self.assertRaises(IOError, vfs.VFSOpen, pathspec)
コード例 #20
0
    def Start(self):
        """Check if driver is already loaded."""
        self.state.Register("device_urn", self.client_id.Add("devices/memory"))
        self.state.Register("installer_urns", [])
        self.state.Register("current_installer", None)

        if not self.args.driver_installer:
            # Fetch the driver installer from the data store.
            self.state.installer_urns = GetMemoryModules(self.client_id,
                                                         token=self.token)

            # Create a protobuf containing the request.
            if not self.state.installer_urns:
                raise IOError(
                    "Could not determine path for memory driver. No module "
                    "available for this platform.")

        if self.args.reload_if_loaded:
            self.CallStateInline(next_state="LoadDriver")
        else:
            # We just check for one of the drivers, assuming that they all use the
            # same device path.
            installer = GetDriverFromURN(self.state.installer_urns[0],
                                         token=self.token)

            self.CallClient("GetMemoryInformation",
                            rdfvalue.PathSpec(
                                path=installer.device_path,
                                pathtype=rdfvalue.PathSpec.PathType.MEMORY),
                            next_state="CheckMemoryInformation")
コード例 #21
0
ファイル: transfer.py プロジェクト: wwwiretap/grr
class TestGetFileTSKWindows(TestGetFileOSWindows):
  """Tests if GetFile works on Windows using TSK."""
  args = {"pathspec": rdfvalue.PathSpec(
      path="C:\\Windows\\regedit.exe",
      pathtype=rdfvalue.PathSpec.PathType.TSK)}

  def CheckFlow(self):
    urn = self.client_id.Add("/fs/tsk")
    fd = aff4.FACTORY.Open(urn, mode="r", token=self.token)
    volumes = list(fd.OpenChildren())
    found = False
    for volume in volumes:
      file_urn = volume.urn.Add("Windows/regedit.exe")
      fd = aff4.FACTORY.Open(file_urn, mode="r",
                             token=self.token)
      try:
        data = fd.Read(10)
        if data[:2] == "MZ":
          found = True
          self.delete_urns.add(file_urn)
          break
      except AttributeError:
        # If the file does not exist on this volume, Open returns a aff4volume
        # which does not have a Read method.
        pass
    self.assertTrue(found)
コード例 #22
0
ファイル: transfer.py プロジェクト: wwwiretap/grr
class TestGetFileTSKLinux(base.AutomatedTest):
  """Tests if GetFile works on Linux using Sleuthkit."""
  platforms = ["Linux"]
  flow = "GetFile"
  args = {"pathspec": rdfvalue.PathSpec(
      path="/bin/ls",
      pathtype=rdfvalue.PathSpec.PathType.TSK)}

  # Interpolate for /dev/mapper-...
  test_output_path = "/fs/tsk/.*/bin/ls"

  def CheckFlow(self):
    pos = self.test_output_path.find("*")
    if pos > 0:
      prefix = self.client_id.Add(self.test_output_path[:pos])
      for urn in base.RecursiveListChildren(prefix=prefix):
        if re.search(self.test_output_path + "$", str(urn)):
          self.delete_urns.add(urn)
          return self.CheckFile(aff4.FACTORY.Open(urn, token=self.token))

      self.fail(("Output file not found. Maybe the GRR client "
                 "is not running with root privileges?"))

    else:
      urn = self.client_id.Add(self.test_output_path)
      fd = aff4.FACTORY.Open(urn, token=self.token)
      if isinstance(fd, aff4.BlobImage):
        return self.CheckFile(fd)
      self.fail("Output file %s not found." % urn)

  def CheckFile(self, fd):
    data = fd.Read(10)
    self.assertEqual(data[1:4], "ELF")
コード例 #23
0
ファイル: transfer_test.py プロジェクト: zzzzpaul/grr
    def testGetFile(self):
        """Test that the GetFile flow works."""

        client_mock = action_mocks.ActionMock("TransferBuffer", "StatFile")
        pathspec = rdfvalue.PathSpec(pathtype=rdfvalue.PathSpec.PathType.OS,
                                     path=os.path.join(self.base_path,
                                                       "test_img.dd"))

        for _ in test_lib.TestFlowHelper("GetFile",
                                         client_mock,
                                         token=self.token,
                                         client_id=self.client_id,
                                         pathspec=pathspec):
            pass

        # Fix path for Windows testing.
        pathspec.path = pathspec.path.replace("\\", "/")
        # Test the AFF4 file that was created.
        urn = aff4.AFF4Object.VFSGRRClient.PathspecToURN(
            pathspec, self.client_id)
        fd1 = aff4.FACTORY.Open(urn, token=self.token)
        fd2 = open(pathspec.path)
        fd2.seek(0, 2)

        self.assertEqual(fd2.tell(), int(fd1.Get(fd1.Schema.SIZE)))
        self.CompareFDs(fd1, fd2)
コード例 #24
0
ファイル: action_test.py プロジェクト: ksmaheshkumar/grr
    def testIteratedListDirectory(self):
        """Tests iterated listing of directories."""
        p = rdfvalue.PathSpec(path=self.base_path,
                              pathtype=rdfvalue.PathSpec.PathType.OS)
        non_iterated_results = self.RunAction(
            "ListDirectory", rdfvalue.ListDirRequest(pathspec=p))

        # Make sure we get some results.
        l = len(non_iterated_results)
        self.assertTrue(l > 0)

        iterated_results = []
        request = rdfvalue.ListDirRequest(pathspec=p)
        request.iterator.number = 2
        while True:
            responses = self.RunAction("IteratedListDirectory", request)
            results = responses[:-1]
            if not results: break

            for result in results:
                iterated_results.append(result)

        for x, y in zip(non_iterated_results, iterated_results):
            # Reset the st_atime in the results to avoid potential flakiness.
            x.st_atime = y.st_atime = 0

            self.assertRDFValueEqual(x, y)
コード例 #25
0
ファイル: transfer_test.py プロジェクト: zzzzpaul/grr
    def testMultiGetFile(self):
        """Test MultiGetFile."""

        client_mock = action_mocks.ActionMock("TransferBuffer",
                                              "FingerprintFile", "StatFile",
                                              "HashBuffer")
        pathspec = rdfvalue.PathSpec(pathtype=rdfvalue.PathSpec.PathType.OS,
                                     path=os.path.join(self.base_path,
                                                       "test_img.dd"))

        args = rdfvalue.MultiGetFileArgs(pathspecs=[pathspec, pathspec])
        with test_lib.Instrument(transfer.MultiGetFile,
                                 "StoreStat") as storestat_instrument:
            for _ in test_lib.TestFlowHelper("MultiGetFile",
                                             client_mock,
                                             token=self.token,
                                             client_id=self.client_id,
                                             args=args):
                pass

            # We should only have called StoreStat once because the two paths
            # requested were identical.
            self.assertEqual(len(storestat_instrument.args), 1)

        # Fix path for Windows testing.
        pathspec.path = pathspec.path.replace("\\", "/")
        # Test the AFF4 file that was created.
        urn = aff4.AFF4Object.VFSGRRClient.PathspecToURN(
            pathspec, self.client_id)
        fd1 = aff4.FACTORY.Open(urn, token=self.token)
        fd2 = open(pathspec.path)
        fd2.seek(0, 2)

        self.assertEqual(fd2.tell(), int(fd1.Get(fd1.Schema.SIZE)))
        self.CompareFDs(fd1, fd2)
コード例 #26
0
ファイル: searching_test.py プロジェクト: ksmaheshkumar/grr
  def testFindAction(self):
    """Test the find action."""
    # First get all the files at once
    pathspec = rdfvalue.PathSpec(path="/mock2/",
                                 pathtype=rdfvalue.PathSpec.PathType.OS)
    request = rdfvalue.FindSpec(pathspec=pathspec, path_regex=".")
    request.iterator.number = 200
    result = self.RunAction("Find", request)
    all_files = [x.hit for x in result if isinstance(x, rdfvalue.FindSpec)]

    # Ask for the files one at the time
    files = []
    request = rdfvalue.FindSpec(pathspec=pathspec, path_regex=".")
    request.iterator.number = 1

    while True:
      result = self.RunAction("Find", request)
      if request.iterator.state == rdfvalue.Iterator.State.FINISHED:
        break

      self.assertEqual(len(result), 2)
      self.assertTrue(isinstance(result[0], rdfvalue.FindSpec))
      self.assertTrue(isinstance(result[1], rdfvalue.Iterator))
      files.append(result[0].hit)

      request.iterator = result[1].Copy()

    for x, y in zip(all_files, files):
      self.assertRDFValueEqual(x, y)

    # Make sure the iterator is finished
    self.assertEqual(request.iterator.state, rdfvalue.Iterator.State.FINISHED)

    # Ensure we remove old states from client_state
    self.assertEqual(len(request.iterator.client_state.dat), 0)
コード例 #27
0
ファイル: flow_test.py プロジェクト: thatarchguy/grr
  def testFlowNotification(self):
    FlowDoneListener.received_events = []

    # Install the mock
    vfs.VFS_HANDLERS[rdfvalue.PathSpec.PathType.OS] = MockVFSHandler
    path = rdfvalue.PathSpec(path="/",
                             pathtype=rdfvalue.PathSpec.PathType.OS)

    # Run the flow in the simulated way
    client_mock = action_mocks.ActionMock("IteratedListDirectory")
    for _ in test_lib.TestFlowHelper(
        "IteratedListDirectory", client_mock, client_id=self.client_id,
        notification_urn=rdfvalue.SessionID(queue=rdfvalue.RDFURN("EV"),
                                            flow_name="FlowDone"),
        pathspec=path, token=self.token):
      pass

    # The event goes to an external queue so we need another worker.
    worker = test_lib.MockWorker(queues=[rdfvalue.RDFURN("EV")],
                                 token=self.token)
    worker.Simulate()

    self.assertEqual(len(FlowDoneListener.received_events), 1)

    flow_event = FlowDoneListener.received_events[0].payload
    self.assertEqual(flow_event.flow_name, "IteratedListDirectory")
    self.assertEqual(flow_event.client_id, "aff4:/C.1000000000000000")
    self.assertEqual(flow_event.status, rdfvalue.FlowNotification.Status.OK)
コード例 #28
0
  def CreateSampleHunt(self, stopped=False):
    self.client_ids = self.SetupClients(10)

    with hunts.GRRHunt.StartHunt(
        hunt_name="GenericHunt",
        flow_runner_args=rdfvalue.FlowRunnerArgs(
            flow_name="GetFile"),
        flow_args=rdfvalue.GetFileArgs(
            pathspec=rdfvalue.PathSpec(
                path="/tmp/evil.txt",
                pathtype=rdfvalue.PathSpec.PathType.OS,
                )
            ),
        regex_rules=[rdfvalue.ForemanAttributeRegex(
            attribute_name="GRR client",
            attribute_regex="GRR")],
        output_plugins=[],
        client_rate=0, token=self.token) as hunt:
      if not stopped:
        hunt.Run()

    with aff4.FACTORY.Open("aff4:/foreman", mode="rw",
                           token=self.token) as foreman:

      for client_id in self.client_ids:
        foreman.AssignTasksToClient(client_id)

    self.hunt_urn = hunt.urn
    return aff4.FACTORY.Open(hunt.urn, mode="rw", token=self.token,
                             age=aff4.ALL_TIMES)
コード例 #29
0
    def RunHunt(self, plugin_name, plugin_args):
        with hunts.GRRHunt.StartHunt(
                hunt_name="GenericHunt",
                flow_runner_args=rdfvalue.FlowRunnerArgs(flow_name="GetFile"),
                flow_args=rdfvalue.GetFileArgs(pathspec=rdfvalue.PathSpec(
                    path="/tmp/evil.txt",
                    pathtype=rdfvalue.PathSpec.PathType.OS)),
                regex_rules=[
                    rdfvalue.ForemanAttributeRegex(attribute_name="GRR client",
                                                   attribute_regex="GRR")
                ],
                output_plugins=[
                    rdfvalue.OutputPlugin(plugin_name=plugin_name,
                                          plugin_args=plugin_args)
                ],
                client_rate=0,
                token=self.token) as hunt:
            hunt.Run()

            hunt.StartClients(hunt.session_id, self.client_ids)

            # Run the hunt.
            client_mock = test_lib.SampleHuntMock()
            test_lib.TestHuntHelper(client_mock, self.client_ids, False,
                                    self.token)

            # Stop the hunt now.
            hunt.GetRunner().Stop()

        # Run cron flow that executes actual output plugins
        for _ in test_lib.TestFlowHelper("ProcessHuntResultsCronFlow",
                                         token=self.token):
            pass

        return hunt.urn
コード例 #30
0
ファイル: standard.py プロジェクト: wwwiretap/grr
  def RunClient(self, responses):
    pathspec = rdfvalue.PathSpec(pathtype=rdfvalue.PathSpec.PathType.OS,
                                 path=self.args.filename)

    for client_id in responses:
      self.CallFlow("GetFile", pathspec=pathspec, next_state="StoreResults",
                    client_id=client_id)