Beispiel #1
0
    def GetRegistryValue(self, source):
        """Retrieve directly specified registry values, returning Stat objects."""
        new_paths = set()
        for kvdict in source.attributes["key_value_pairs"]:
            # TODO(user): this needs to be improved to support globbing for both
            # key and value, and possibly also support forward slash.
            path = "\\".join((kvdict["key"], kvdict["value"]))

            expanded_paths = artifact_lib.InterpolateKbAttributes(
                path, self.state.knowledge_base)
            new_paths.update(expanded_paths)

        for new_path in new_paths:
            pathspec = paths.PathSpec(
                path=new_path, pathtype=paths.PathSpec.PathType.REGISTRY)
            self.CallClient("StatFile",
                            pathspec=pathspec,
                            request_data={
                                "artifact_name": self.current_artifact_name,
                                "source": source.ToPrimitiveDict()
                            },
                            next_state="ProcessCollected")
Beispiel #2
0
  def testCronTabParser(self):
    """Ensure we can extract jobs from a crontab file."""
    parser = cron_file_parser.CronTabParser()
    results = []

    path = os.path.join(self.base_path, "parser_test", "crontab")
    plist_file = open(path, "rb")
    stat = rdf_client.StatEntry(
        pathspec=rdf_paths.PathSpec(
            path=path, pathtype=rdf_paths.PathSpec.PathType.OS),
        st_mode=16877)
    results.extend(list(parser.Parse(stat, plist_file, None)))

    self.assertEqual(len(results), 1)

    for result in results:
      self.assertEqual(result.jobs[0].minute, "1")
      self.assertEqual(result.jobs[0].hour, "2")
      self.assertEqual(result.jobs[0].dayofmonth, "3")
      self.assertEqual(result.jobs[0].month, "4")
      self.assertEqual(result.jobs[0].dayofweek, "5")
      self.assertEqual(result.jobs[0].command, "/usr/bin/echo \"test\"")
Beispiel #3
0
    def testEmptySourceData(self):
        test_data = ("# comment 1\n"
                     "baseurl=\n"
                     "# Trailing whitespace on purpose\n"
                     "baseurl=      \n"
                     "# Trailing whitespace on purpose\n"
                     "baseurl =            \n"
                     "baseurl\n"
                     "# comment 2\n")

        file_obj = StringIO.StringIO(test_data)
        pathspec = rdf_paths.PathSpec(path="/etc/yum.repos.d/emptytest.repo")
        stat = rdf_client.StatEntry(pathspec=pathspec)
        parser = config_file.YumPackageSourceParser()
        results = list(parser.Parse(stat, file_obj, None))

        result = [
            d for d in results if isinstance(d, rdf_protodict.AttributedDict)
        ][0]

        self.assertEqual("/etc/yum.repos.d/emptytest.repo", result.filename)
        self.assertEqual(0, len(result.uris))
Beispiel #4
0
    def Start(self):
        """Create some files to transfer.

    Using /dev/urandom ensures the file actually gets transferred and we don't
    just test the cache. The files created on the client will be automatically
    deleted.  If you need the client files for debugging, remove the lifetime
    parameter from CopyPathToFile.
    """
        self.state.Register("client_hashes", {})
        urandom = rdf_paths.PathSpec(path="/dev/urandom",
                                     pathtype=rdf_paths.PathSpec.PathType.OS)

        for _ in range(self.args.file_limit):
            self.CallClient(
                "CopyPathToFile",
                offset=0,
                length=2 * 1024 * 1024,  # 4 default sized blobs
                src_path=urandom,
                dest_dir="",
                gzip_output=False,
                lifetime=600,
                next_state="HashFile")
Beispiel #5
0
  def testVFSFileStartsOnlyOneMultiGetFileFlowOnUpdate(self):
    """File updates should only start one MultiGetFile at any point in time."""
    client_id = self.SetupClient(0)
    # We need to create a file path having a pathspec.
    path = "fs/os/c/bin/bash"

    with aff4.FACTORY.Create(
        client_id.Add(path),
        aff4_type=aff4_grr.VFSFile,
        mode="rw",
        token=self.token) as file_fd:
      file_fd.Set(
          file_fd.Schema.STAT,
          rdf_client.StatEntry(
              pathspec=rdf_paths.PathSpec(path="/bin/bash", pathtype="OS")))

      # Starts a MultiGetFile flow.
      file_fd.Update()

    # Check that there is exactly one flow on the client.
    flows_fd = aff4.FACTORY.Open(client_id.Add("flows"), token=self.token)
    flows = list(flows_fd.ListChildren())
    self.assertEqual(len(flows), 1)

    # The flow is the MultiGetFile flow holding the lock on the file.
    flow_obj = aff4.FACTORY.Open(flows[0], token=self.token)
    self.assertEqual(
        flow_obj.Get(flow_obj.Schema.TYPE), transfer.MultiGetFile.__name__)
    self.assertEqual(flow_obj.urn, file_fd.Get(file_fd.Schema.CONTENT_LOCK))

    # Since there is already a running flow having the lock on the file,
    # this call shouldn't do anything.
    file_fd.Update()

    # There should still be only one flow on the client.
    flows_fd = aff4.FACTORY.Open(client_id.Add("flows"), token=self.token)
    flows = list(flows_fd.ListChildren())
    self.assertEqual(len(flows), 1)
Beispiel #6
0
    def testFindAction(self):
        """Test the find action."""
        # First get all the files at once
        pathspec = rdf_paths.PathSpec(path="/mock2/",
                                      pathtype=rdf_paths.PathSpec.PathType.OS)
        request = rdf_client.FindSpec(pathspec=pathspec, path_regex=".")
        request.iterator.number = 200
        result = self.RunAction(searching.Find, request)
        all_files = [
            x.hit for x in result if isinstance(x, rdf_client.FindSpec)
        ]

        # Ask for the files one at the time
        files = []
        request = rdf_client.FindSpec(pathspec=pathspec, path_regex=".")
        request.iterator.number = 1

        while True:
            result = self.RunAction(searching.Find, request)
            if request.iterator.state == rdf_client.Iterator.State.FINISHED:
                break

            self.assertEqual(len(result), 2)
            self.assertTrue(isinstance(result[0], rdf_client.FindSpec))
            self.assertTrue(isinstance(result[1], rdf_client.Iterator))
            files.append(result[0].hit)

            request.iterator = result[1].Copy()

        for x, y in zip(all_files, files):
            self.assertRDFValuesEqual(x, y)

        # Make sure the iterator is finished
        self.assertEqual(request.iterator.state,
                         rdf_client.Iterator.State.FINISHED)

        # Ensure we remove old states from client_state
        self.assertEqual(len(request.iterator.client_state.dat), 0)
Beispiel #7
0
  def testCopyHuntPreservesRuleType(self):
    implementation.GRRHunt.StartHunt(
        hunt_name=standard.GenericHunt.__name__,
        description="model hunt",
        flow_runner_args=rdf_flows.FlowRunnerArgs(
            flow_name=transfer.GetFile.__name__),
        flow_args=transfer.GetFileArgs(pathspec=rdf_paths.PathSpec(
            path="/tmp/evil.txt",
            pathtype=rdf_paths.PathSpec.PathType.TSK,)),
        client_rule_set=rdf_foreman.ForemanClientRuleSet(
            rules=[
                rdf_foreman.ForemanClientRule(
                    rule_type=rdf_foreman.ForemanClientRule.Type.OS,
                    os=rdf_foreman.ForemanOsClientRule(os_darwin=True))
            ]),
        token=self.token)

    self.Open("/#main=ManageHunts")
    self.Click("css=tr:contains('model hunt')")
    self.Click("css=button[name=CopyHunt]:not([disabled])")

    # Wait until dialog appears.
    self.WaitUntil(self.IsElementPresent,
                   "css=grr-wizard-form:contains('What to run?')")
    # Click on "Next" button
    self.Click("css=grr-new-hunt-wizard-form button.Next")
    self.WaitUntil(self.IsElementPresent,
                   "css=grr-wizard-form:contains('Hunt parameters')")
    # Click on "Next" button.
    self.Click("css=grr-new-hunt-wizard-form button.Next")
    self.WaitUntil(self.IsElementPresent,
                   "css=grr-wizard-form:contains('How to process results')")
    # Click on "Next" button
    self.Click("css=grr-new-hunt-wizard-form button.Next")
    self.WaitUntil(self.IsElementPresent,
                   "css=grr-wizard-form:contains('Where to run?')")
    self.WaitUntil(self.IsElementPresent, "css=grr-new-hunt-wizard-form "
                   "label:contains('Os darwin') ~ * input:checked")
    def testShowsNotificationIfArchiveStreamingFailsInProgress(self):
        pathspec = rdf_paths.PathSpec(path=os.path.join(
            self.base_path, "test.plist"),
                                      pathtype=rdf_paths.PathSpec.PathType.OS)
        flow_urn = flow.GRRFlow.StartFlow(
            flow_name=flows_transfer.GetFile.__name__,
            client_id=self.client_id,
            pathspec=pathspec,
            token=self.token)

        with self.ACLChecksDisabled():
            for _ in test_lib.TestFlowHelper(flow_urn,
                                             self.action_mock,
                                             client_id=self.client_id,
                                             token=self.token):
                pass

        def RaisingStub(*unused_args, **unused_kwargs):
            yield "foo"
            yield "bar"
            raise RuntimeError("something went wrong")

        with utils.Stubber(api_call_handler_utils.CollectionArchiveGenerator,
                           "Generate", RaisingStub):
            self.Open("/#c=C.0000000000000001")

            self.Click("css=a[grrtarget='client.flows']")
            self.Click("css=td:contains('GetFile')")
            self.Click("link=Results")
            self.Click("css=button.DownloadButton")

            self.WaitUntil(
                self.IsUserNotificationPresent,
                "Archive generation failed for flow %s" % flow_urn.Basename())
            # There will be no failure message, as we can't get a status from an
            # iframe that triggers the download.
            self.WaitUntilNot(self.IsTextPresent,
                              "Can't generate archive: Unknown error")
Beispiel #9
0
    def testMultiGetFileSizeLimit(self):
        client_mock = action_mocks.ActionMock("TransferBuffer", "HashFile",
                                              "StatFile", "HashBuffer")
        image_path = os.path.join(self.base_path, "test_img.dd")
        pathspec = rdf_paths.PathSpec(pathtype=rdf_paths.PathSpec.PathType.OS,
                                      path=image_path)

        # Read a bit more than one chunk (600 * 1024).
        expected_size = 750 * 1024
        args = transfer.MultiGetFileArgs(pathspecs=[pathspec],
                                         file_size=expected_size)
        for _ in test_lib.TestFlowHelper("MultiGetFile",
                                         client_mock,
                                         token=self.token,
                                         client_id=self.client_id,
                                         args=args):
            pass

        urn = aff4.AFF4Object.VFSGRRClient.PathspecToURN(
            pathspec, self.client_id)
        blobimage = aff4.FACTORY.Open(urn, token=self.token)
        # Make sure a VFSBlobImage got written.
        self.assertTrue(isinstance(blobimage, aff4_grr.VFSBlobImage))

        self.assertEqual(len(blobimage), expected_size)
        data = blobimage.read(100 * expected_size)
        self.assertEqual(len(data), expected_size)

        expected_data = open(image_path, "rb").read(expected_size)

        self.assertEqual(data, expected_data)
        hash_obj = blobimage.Get(blobimage.Schema.HASH)

        d = hashlib.sha1()
        d.update(expected_data)
        expected_hash = d.hexdigest()

        self.assertEqual(hash_obj.sha1, expected_hash)
Beispiel #10
0
    def checkClickingOnDownloadAsStartsDownloadForType(self, mock_method,
                                                       plugin,
                                                       plugin_display_name):
        pathspec = rdf_paths.PathSpec(path=os.path.join(
            self.base_path, "test.plist"),
                                      pathtype=rdf_paths.PathSpec.PathType.OS)
        flow_urn = flow.GRRFlow.StartFlow(
            flow_name=flows_transfer.GetFile.__name__,
            client_id=self.client_id,
            pathspec=pathspec,
            token=self.token)
        flow_test_lib.TestFlowHelper(flow_urn,
                                     self.action_mock,
                                     client_id=self.client_id,
                                     token=self.token)

        self.Open("/#/clients/%s/flows/%s" %
                  (self.client_id, flow_urn.Basename()))
        self.Click("link=Results")
        self.Select("id=plugin-select", plugin_display_name)
        self.Click("css=grr-download-collection-as button[name='download-as']")

        def MockMethodIsCalled():
            try:
                # Mock should be called twice: once for HEAD (to check permissions)
                # and once for GET methods.
                mock_method.assert_called_with(
                    api_flow.ApiGetExportedFlowResultsArgs(
                        client_id=self.client_id,
                        flow_id=flow_urn.Basename(),
                        plugin_name=plugin),
                    token=mock.ANY)

                return True
            except AssertionError:
                return False

        self.WaitUntil(MockMethodIsCalled)
Beispiel #11
0
    def testDeleteMultipleRoots(self):
        temp_dir = "grr_temp"
        test_data = self._SetUpTempDirStructure(temp_dir)
        roots, _, invalid_temp_dirs, temp_files, other_files = test_data

        with test_lib.ConfigOverrider({
                "Client.tempdir_roots": roots,
                "Client.grr_tempdir": temp_dir
        }):

            result = self.RunAction(tempfiles.DeleteGRRTempFiles,
                                    rdf_paths.PathSpec())
            self.assertEqual(len(result), 1)
            log = result[0].data
            for f in temp_files:
                self.assertIn(f, log)
            for f in invalid_temp_dirs:
                self.assertNotIn(f, log)

        for f in temp_files:
            self.assertFalse(os.path.exists(f))
        for f in other_files:
            self.assertTrue(os.path.exists(f))
Beispiel #12
0
  def testFindWithMaxFiles(self):
    """Test that the Find flow works when specifying proto directly."""

    client_mock = action_mocks.ActionMock("Find")
    output_path = "analysis/FindFlowTest4"

    # Prepare a findspec.
    findspec = rdf_client.FindSpec(
        path_regex=".*",
        pathspec=rdf_paths.PathSpec(path="/",
                                    pathtype=rdf_paths.PathSpec.PathType.OS))

    for _ in test_lib.TestFlowHelper(
        "FindFiles", client_mock, client_id=self.client_id, token=self.token,
        findspec=findspec, iteration_count=3, output=output_path,
        max_results=7):
      pass

    # Check the output file is created
    fd = aff4.FACTORY.Open(self.client_id.Add(output_path), token=self.token)

    # Make sure we got the right number of results.
    self.assertEqual(len(fd), 7)
Beispiel #13
0
  def testExistingFileStat(self):
    bash_stat = {
        "st_ctime": rdfvalue.RDFDatetimeSeconds(1299502221),
        "st_rdev": 0,
        "st_mtime": rdfvalue.RDFDatetimeSeconds(1284154642),
        "st_blocks": 16,
        "st_nlink": 1,
        "st_gid": 0,
        "st_blksize": 4096,
        "pathspec": rdf_paths.PathSpec(
            path="/bin/bash",
            pathtype="OS",
            path_options="CASE_LITERAL"),
        "st_dev": 51713,
        "st_size": 4874,
        "st_ino": 1026148,
        "st_uid": 0,
        "st_mode": rdf_client.StatMode(33261),
        "st_atime": rdfvalue.RDFDatetimeSeconds(1299502220)
    }

    bash_path = os.path.join("/", self.client_name, "fs/os/c/bin/bash")
    self.assertItemsEqual(self.passthrough.getattr(bash_path), bash_stat)
Beispiel #14
0
class TestNetworkFlowLimit(base.AutomatedTest):
  """Test limit on bytes transferred for a flow."""
  platforms = ["Linux", "Darwin"]
  flow = "GetFile"
  args = {
      "pathspec": rdf_paths.PathSpec(path="/bin/bash",
                                     pathtype=rdf_paths.PathSpec.PathType.OS),
      "network_bytes_limit": 500 * 1024
  }

  test_output_path = "/fs/os/bin/bash"

  def CheckFlow(self):
    # Reopen the object to check the state.  We use OpenWithLock to avoid
    # reading old state.
    with aff4.FACTORY.OpenWithLock(
        self.session_id, token=self.token) as flow_obj:
      # Make sure we transferred approximately the right amount of data.
      self.assertAlmostEqual(flow_obj.state.context.network_bytes_sent,
                             500 * 1024, delta=30000)
      backtrace = flow_obj.state.context.get("backtrace", "")
      self.assertIsNotNone(backtrace)
      self.assertTrue("Network bytes limit exceeded." in backtrace)
Beispiel #15
0
  def setUp(self):
    super(DeleteGRRTempFiles, self).setUp()
    filename = "%s_blah" % config_lib.CONFIG["Client.tempfile_prefix"]
    self.tempfile = utils.JoinPath(self.temp_dir,
                                   "delete_test", filename)
    self.dirname = os.path.dirname(self.tempfile)
    os.makedirs(self.dirname)
    self.tempdir_overrider = test_lib.ConfigOverrider({
        "Client.tempdir_roots": [os.path.dirname(self.dirname)],
        "Client.grr_tempdir": os.path.basename(self.dirname)})
    self.tempdir_overrider.Start()

    self.not_tempfile = os.path.join(self.temp_dir, "notatempfile")
    open(self.not_tempfile, "w").write("something")

    self.temp_fd = tempfiles.CreateGRRTempFile(self.dirname)
    self.temp_fd2 = tempfiles.CreateGRRTempFile(self.dirname)
    self.assertTrue(os.path.exists(self.not_tempfile))
    self.assertTrue(os.path.exists(self.temp_fd.name))
    self.assertTrue(os.path.exists(self.temp_fd2.name))

    self.pathspec = rdf_paths.PathSpec(
        path=self.dirname, pathtype=rdf_paths.PathSpec.PathType.OS)
Beispiel #16
0
  def testGetFile(self):
    """Test that the GetFile flow works."""

    client_mock = action_mocks.ActionMock("TransferBuffer", "StatFile")
    pathspec = rdf_paths.PathSpec(
        pathtype=rdf_paths.PathSpec.PathType.OS,
        path=os.path.join(self.base_path, "test_img.dd"))

    for _ in test_lib.TestFlowHelper("GetFile", client_mock, token=self.token,
                                     client_id=self.client_id,
                                     pathspec=pathspec):
      pass

    # Fix path for Windows testing.
    pathspec.path = pathspec.path.replace("\\", "/")
    # Test the AFF4 file that was created.
    urn = aff4.AFF4Object.VFSGRRClient.PathspecToURN(pathspec, self.client_id)
    fd1 = aff4.FACTORY.Open(urn, token=self.token)
    fd2 = open(pathspec.path)
    fd2.seek(0, 2)

    self.assertEqual(fd2.tell(), int(fd1.Get(fd1.Schema.SIZE)))
    self.CompareFDs(fd1, fd2)
  def testShowsNotificationWhenArchiveGenerationIsDone(self):
    pathspec = rdf_paths.PathSpec(
        path=os.path.join(self.base_path, "test.plist"),
        pathtype=rdf_paths.PathSpec.PathType.OS)
    flow_urn = flow.GRRFlow.StartFlow(
        flow_name=flows_transfer.GetFile.__name__,
        client_id=self.client_id,
        pathspec=pathspec,
        token=self.token)

    for _ in flow_test_lib.TestFlowHelper(
        flow_urn, self.action_mock, client_id=self.client_id, token=self.token):
      pass

    self.Open("/#c=C.0000000000000001")

    self.Click("css=a[grrtarget='client.flows']")
    self.Click("css=td:contains('GetFile')")
    self.Click("link=Results")
    self.Click("css=button.DownloadButton")
    self.WaitUntil(self.IsTextPresent, "Generation has started")
    self.WaitUntil(self.IsUserNotificationPresent,
                   "Downloaded archive of flow %s" % flow_urn.Basename())
Beispiel #18
0
 def CreateSampleHunt(description, token=None):
     hunts.GRRHunt.StartHunt(
         hunt_name="GenericHunt",
         description=description,
         flow_runner_args=rdf_flows.FlowRunnerArgs(flow_name="GetFile"),
         flow_args=transfer.GetFileArgs(pathspec=rdf_paths.PathSpec(
             path="/tmp/evil.txt",
             pathtype=rdf_paths.PathSpec.PathType.TSK,
         )),
         client_rule_set=rdf_foreman.ForemanClientRuleSet(rules=[
             rdf_foreman.ForemanClientRule(
                 rule_type=rdf_foreman.ForemanClientRule.Type.REGEX,
                 regex=rdf_foreman.ForemanRegexClientRule(
                     attribute_name="GRR client", attribute_regex="GRR"))
         ]),
         output_plugins=[
             output_plugin.OutputPluginDescriptor(
                 plugin_name="DummyOutputPlugin",
                 plugin_args=DummyOutputPlugin.args_type(
                     filename_regex="blah!", fetch_binaries=True))
         ],
         client_rate=60,
         token=token)
Beispiel #19
0
    def testRegistryListing(self):
        """Test our ability to list registry keys."""
        reg = rdf_paths.PathSpec.PathType.REGISTRY
        with test_lib.VFSOverrider(reg, test_lib.FakeRegistryVFSHandler):
            pathspec = rdf_paths.PathSpec(
                pathtype=rdf_paths.PathSpec.PathType.REGISTRY,
                path=("/HKEY_USERS/S-1-5-20/Software/Microsoft"
                      "/Windows/CurrentVersion/Run"))

            expected_names = {
                "MctAdmin": stat.S_IFDIR,
                "Sidebar": stat.S_IFDIR
            }
            expected_data = [
                u"%ProgramFiles%\\Windows Sidebar\\Sidebar.exe /autoRun",
                u"%TEMP%\\Sidebar.exe"
            ]

            for f in vfs.VFSOpen(pathspec).ListFiles():
                base, name = os.path.split(f.pathspec.CollapsePath())
                self.assertEqual(base, pathspec.CollapsePath())
                self.assertIn(name, expected_names)
                self.assertIn(f.registry_data.GetValue(), expected_data)
Beispiel #20
0
    def testNestedProtobufAssignment(self):
        """Check that we can assign a nested protobuf."""
        container = rdf_rekall_types.RekallRequest()
        pathspec = rdf_paths.PathSpec(path=r"\\.\pmem", pathtype=1)

        # Should raise - incompatible RDFType.
        self.assertRaises(ValueError, setattr, container, "device",
                          rdfvalue.RDFString("hello"))

        # Should raise - incompatible RDFProto type.
        self.assertRaises(ValueError, setattr, container, "device",
                          rdf_client.StatEntry(st_size=5))

        # Assign directly.
        container.device = pathspec

        self.assertEqual(container.device.path, r"\\.\pmem")

        # Clear the field.
        container.device = None

        # Check the protobuf does not have the field set at all.
        self.assertFalse(container.HasField("device"))
Beispiel #21
0
    def ReadTestImage(self, size_threshold):
        path = os.path.join(self.base_path, "test_img.dd")

        urn = rdfvalue.RDFURN(self.client_id.Add("fs/os").Add(path))

        pathspec = rdf_paths.PathSpec(path=path,
                                      pathtype=rdf_paths.PathSpec.PathType.OS)

        client_mock = action_mocks.ActionMock("FingerprintFile", "HashBuffer",
                                              "HashFile", "StatFile", "Find",
                                              "TransferBuffer", "ReadBuffer")

        # Get everything as an AFF4SparseImage
        for _ in test_lib.TestFlowHelper("MakeNewAFF4SparseImage",
                                         client_mock,
                                         client_id=self.client_id,
                                         token=self.token,
                                         size_threshold=size_threshold,
                                         pathspec=pathspec):
            pass

        fd = aff4.FACTORY.Open(urn, token=self.token)
        return fd
Beispiel #22
0
class MockVFSHandler(vfs.VFSHandler):
  """A mock VFS handler with fake files."""
  children = []
  for x in range(10):
    child = rdf_client.StatEntry(pathspec=rdf_paths.PathSpec(
        path="Foo%s" % x, pathtype=rdf_paths.PathSpec.PathType.OS))
    children.append(child)

  supported_pathtype = rdf_paths.PathSpec.PathType.OS

  def __init__(self, base_fd, pathspec=None, progress_callback=None,
               full_pathspec=None):
    super(MockVFSHandler, self).__init__(
        base_fd, pathspec=pathspec, progress_callback=progress_callback,
        full_pathspec=full_pathspec)

    self.pathspec.Append(pathspec)

  def ListFiles(self):
    return self.children

  def IsDirectory(self):
    return self.pathspec.path == "/"
Beispiel #23
0
  def ParseRunKeys(self, responses):
    """Get filenames from the RunKeys and download the files."""
    filenames = []
    client = aff4.FACTORY.Open(self.client_id, mode="r", token=self.token)
    kb = artifact.GetArtifactKnowledgeBase(client)

    for response in responses:
      runkey = response.registry_data.string

      environ_vars = artifact_utils.GetWindowsEnvironmentVariablesMap(kb)
      path_guesses = path_detection_windows.DetectExecutablePaths([runkey],
                                                                  environ_vars)

      if not path_guesses:
        self.Log("Couldn't guess path for %s", runkey)

      for path in path_guesses:
        filenames.append(
            rdf_paths.PathSpec(
                path=path, pathtype=rdf_paths.PathSpec.PathType.TSK))

    if filenames:
      self.CallFlow("MultiGetFile", pathspecs=filenames, next_state="Done")
Beispiel #24
0
  def testTSKFileInode(self):
    """Test opening a file through an indirect pathspec."""
    pathspec = rdf_paths.PathSpec(
        path=os.path.join(self.base_path, "test_img.dd"),
        pathtype=rdf_paths.PathSpec.PathType.OS)
    pathspec.Append(
        pathtype=rdf_paths.PathSpec.PathType.TSK,
        inode=12,
        path="/Test Directory")
    pathspec.Append(
        pathtype=rdf_paths.PathSpec.PathType.TSK, path="numbers.txt")

    fd = vfs.VFSOpen(pathspec)

    # Check that the new pathspec is correctly reduced to two components.
    self.assertEqual(
        fd.pathspec.first.path,
        utils.NormalizePath(os.path.join(self.base_path, "test_img.dd")))
    self.assertEqual(fd.pathspec[1].path, "/Test Directory/numbers.txt")

    # And the correct inode is placed in the final branch.
    self.assertEqual(fd.Stat().pathspec.nested_path.inode, 15)
    self.TestFileHandling(fd)
Beispiel #25
0
  def testFindActionCrossDev(self):
    """Test that devices boundaries don't get crossed, also by default."""
    pathspec = rdf_paths.PathSpec(
        path="/mock2/", pathtype=rdf_paths.PathSpec.PathType.OS)
    request = rdf_client.FindSpec(
        pathspec=pathspec, cross_devs=True, path_regex=".")
    request.iterator.number = 200
    results = self.RunAction(searching.Find, request)
    all_files = [x.hit for x in results if isinstance(x, rdf_client.FindSpec)]
    self.assertEqual(len(all_files), 9)

    request = rdf_client.FindSpec(
        pathspec=pathspec, cross_devs=False, path_regex=".")
    request.iterator.number = 200
    results = self.RunAction(searching.Find, request)
    all_files = [x.hit for x in results if isinstance(x, rdf_client.FindSpec)]
    self.assertEqual(len(all_files), 7)

    request = rdf_client.FindSpec(pathspec=pathspec, path_regex=".")
    request.iterator.number = 200
    results = self.RunAction(searching.Find, request)
    all_files = [x.hit for x in results if isinstance(x, rdf_client.FindSpec)]
    self.assertEqual(len(all_files), 7)
Beispiel #26
0
    def testDownloadFilesPanelIsShownWhenNewResultsAreAdded(self):
        f = flow.GRRFlow.StartFlow(
            client_id=self.client_id,
            flow_name=gui_test_lib.RecursiveTestFlow.__name__,
            token=self.token)

        with data_store.DB.GetMutationPool() as pool:
            flow.GRRFlow.ResultCollectionForFID(f).Add(
                rdfvalue.RDFString("foo-result"), mutation_pool=pool)

        self.Open("/#/clients/%s" % self.client_id)
        # Ensure auto-refresh updates happen every second.
        self.GetJavaScriptValue(
            "grrUi.core.resultsCollectionDirective.setAutoRefreshInterval(1000);"
        )

        # Go to the flows page without refreshing the page, so that
        # AUTO_REFRESH_INTERVAL_MS setting is not reset.
        self.Click("css=a[grrtarget='client.flows']")
        self.Click("css=tr:contains('%s')" % f.Basename())
        self.Click("css=li[heading=Results]:not([disabled]")

        self.WaitUntil(self.IsElementPresent,
                       "css=grr-results-collection td:contains('foo-result')")
        self.WaitUntilNot(
            self.IsElementPresent,
            "css=grr-results-collection grr-download-collection-files")

        stat_entry = rdf_client.StatEntry(pathspec=rdf_paths.PathSpec(
            path="/foo/bar", pathtype=rdf_paths.PathSpec.PathType.OS))
        with data_store.DB.GetMutationPool() as pool:
            flow.GRRFlow.ResultCollectionForFID(f).Add(stat_entry,
                                                       mutation_pool=pool)

        self.WaitUntil(
            self.IsElementPresent,
            "css=grr-results-collection grr-download-collection-files")
Beispiel #27
0
  def Start(self):
    """Issue the find request."""
    super(FileFinder, self).Start()

    if not self.args.paths:
      # Nothing to do.
      return

    self.state.Register("files_found", 0)
    self.state.Register("sorted_conditions",
                        sorted(self.args.conditions, key=self._ConditionWeight))

    self.state.file_size = self.args.file_size

    if self.args.pathtype in (rdf_paths.PathSpec.PathType.MEMORY,
                              rdf_paths.PathSpec.PathType.REGISTRY):
      # Memory and Registry StatEntries won't pass the file type check.
      self.args.no_file_type_check = True

    if self.args.pathtype == rdf_paths.PathSpec.PathType.MEMORY:
      # If pathtype is MEMORY, we're treating provided paths not as globs,
      # but as paths to memory devices.
      for path in self.args.paths:
        pathspec = rdf_paths.PathSpec(
            path=utils.SmartUnicode(path),
            pathtype=rdf_paths.PathSpec.PathType.MEMORY)

        aff4path = aff4.AFF4Object.VFSGRRClient.PathspecToURN(
            pathspec, self.client_id)

        stat_entry = rdf_client.StatEntry(aff4path=aff4path, pathspec=pathspec)
        self.ApplyCondition(FileFinderResult(stat_entry=stat_entry),
                            condition_index=0)

    else:
      self.GlobForPaths(self.args.paths, pathtype=self.args.pathtype,
                        no_file_type_check=self.args.no_file_type_check)
Beispiel #28
0
  def DumpProcess(self, psutil_process, args):
    response = rdf_yara.YaraProcessDumpInformation()
    response.process = rdf_client.Process.FromPsutilProcess(psutil_process)

    process = client_utils.OpenProcessForMemoryAccess(pid=psutil_process.pid)

    bytes_limit = args.size_limit

    with process:
      streamer = streaming.MemoryStreamer(process, chunk_size=args.chunk_size)

      with tempfiles.TemporaryDirectory(cleanup=False) as tmp_dir:
        for start, length in client_utils.MemoryRegions(process, args):

          if bytes_limit and self.bytes_written + length > bytes_limit:
            response.error = ("Byte limit exceeded. Wrote %d bytes, "
                              "next block is %d bytes, limit is %d." %
                              (self.bytes_written, length, bytes_limit))
            return response

          end = start + length
          filename = "%s_%d_%x_%x.tmp" % (psutil_process.name(),
                                          psutil_process.pid, start, end)
          filepath = os.path.join(tmp_dir.path, filename)

          bytes_written = self._SaveMemDumpToFilePath(filepath, streamer, start,
                                                      length)

          if not bytes_written:
            continue

          self.bytes_written += bytes_written
          response.dump_files.Append(
              rdf_paths.PathSpec(
                  path=filepath, pathtype=rdf_paths.PathSpec.PathType.TMPFILE))

    return response
Beispiel #29
0
def CreateGRRTempFileVFS(directory=None,
                         filename=None,
                         lifetime=0,
                         mode="w+b",
                         suffix=""):
  """Creates a GRR VFS temp file.

  This function is analogous to CreateGRRTempFile but returns an open VFS handle
  to the newly created file. Arguments are the same as for CreateGRRTempFile:

  Args:
    directory: string representing absolute directory where file should be
               written. If None, use 'tmp' under the directory we're running
               from.

    filename: The name of the file to use. Note that setting both filename and
       directory name is not allowed.

    lifetime: time in seconds before we should delete this tempfile.

    mode: The mode to open the file.

    suffix: optional suffix to use for the temp file

  Returns:
    An open file handle to the new file and the corresponding pathspec.
  """

  fd = CreateGRRTempFile(
      directory=directory,
      filename=filename,
      lifetime=lifetime,
      mode=mode,
      suffix=suffix)
  pathspec = rdf_paths.PathSpec(
      path=fd.name, pathtype=rdf_paths.PathSpec.PathType.TMPFILE)
  return fd, pathspec
Beispiel #30
0
    def testAttributesOfFileFoundInHashFileStoreAreSetCorrectly(self):
        client_ids = self.SetupClients(2)

        filename = os.path.join(self.base_path, "tcpip.sig")
        pathspec = rdf_paths.PathSpec(pathtype=rdf_paths.PathSpec.PathType.OS,
                                      path=filename)
        urn1 = pathspec.AFF4Path(client_ids[0])
        urn2 = pathspec.AFF4Path(client_ids[1])

        for client_id in client_ids:
            client_mock = action_mocks.FileFinderClientMock()
            for _ in flow_test_lib.TestFlowHelper(
                    file_finder.FileFinder.__name__,
                    client_mock,
                    token=self.token,
                    client_id=client_id,
                    paths=[filename],
                    action=rdf_file_finder.FileFinderAction(
                        action_type=rdf_file_finder.FileFinderAction.Action.
                        DOWNLOAD)):
                pass
            # Running worker to make sure FileStore.AddFileToStore event is processed
            # by the worker.
            worker = worker_test_lib.MockWorker(token=self.token)
            worker.Simulate()

        fd1 = aff4.FACTORY.Open(urn1, token=self.token)
        self.assertTrue(isinstance(fd1, aff4_grr.VFSBlobImage))

        fd2 = aff4.FACTORY.Open(urn2, token=self.token)
        self.assertTrue(isinstance(fd2, aff4_grr.VFSBlobImage))

        self.assertTrue(fd1.Get(fd1.Schema.STAT))
        self.assertTrue(fd2.Get(fd2.Schema.STAT))
        self.assertEqual(fd1.Get(fd1.Schema.SIZE), fd2.Get(fd2.Schema.SIZE))
        self.assertEqual(fd1.Get(fd1.Schema.CONTENT_LAST),
                         fd2.Get(fd2.Schema.CONTENT_LAST))