Ejemplo n.º 1
0
  def testMultiGetFileSetsFileHashAttributeWhenMultipleChunksDownloaded(self):
    client_mock = action_mocks.ActionMock("TransferBuffer", "HashFile",
                                          "StatFile", "HashBuffer")
    pathspec = rdf_paths.PathSpec(
        pathtype=rdf_paths.PathSpec.PathType.OS,
        path=os.path.join(self.base_path, "test_img.dd"))

    args = transfer.MultiGetFileArgs(pathspecs=[pathspec])
    for _ in test_lib.TestFlowHelper("MultiGetFile", client_mock,
                                     token=self.token,
                                     client_id=self.client_id, args=args):
      pass

    # Fix path for Windows testing.
    pathspec.path = pathspec.path.replace("\\", "/")
    # Test the AFF4 file that was created.
    urn = aff4.AFF4Object.VFSGRRClient.PathspecToURN(pathspec, self.client_id)
    fd = aff4.FACTORY.Open(urn, token=self.token)
    fd_hash = fd.Get(fd.Schema.HASH)

    self.assertTrue(fd_hash)

    h = hashlib.sha256()
    with open(os.path.join(self.base_path, "test_img.dd")) as model_fd:
      h.update(model_fd.read())
    self.assertEqual(fd_hash.sha256, h.digest())
Ejemplo n.º 2
0
  def testMultiGetFile(self):
    """Test MultiGetFile."""

    client_mock = action_mocks.ActionMock("TransferBuffer", "HashFile",
                                          "StatFile", "HashBuffer")
    pathspec = rdf_paths.PathSpec(
        pathtype=rdf_paths.PathSpec.PathType.OS,
        path=os.path.join(self.base_path, "test_img.dd"))

    args = transfer.MultiGetFileArgs(pathspecs=[pathspec, pathspec])
    with test_lib.Instrument(
        transfer.MultiGetFile, "StoreStat") as storestat_instrument:
      for _ in test_lib.TestFlowHelper("MultiGetFile", client_mock,
                                       token=self.token,
                                       client_id=self.client_id, args=args):
        pass

      # We should only have called StoreStat once because the two paths
      # requested were identical.
      self.assertEqual(len(storestat_instrument.args), 1)

    # Fix path for Windows testing.
    pathspec.path = pathspec.path.replace("\\", "/")
    # Test the AFF4 file that was created.
    urn = aff4.AFF4Object.VFSGRRClient.PathspecToURN(pathspec, self.client_id)
    fd1 = aff4.FACTORY.Open(urn, token=self.token)
    fd2 = open(pathspec.path)
    fd2.seek(0, 2)

    self.assertEqual(fd2.tell(), int(fd1.Get(fd1.Schema.SIZE)))
    self.CompareFDs(fd1, fd2)
Ejemplo n.º 3
0
    def testMultiGetFileDeduplication(self):
        client_mock = action_mocks.MultiGetFileClientMock()

        pathspecs = []
        # Make 10 files to download.
        for i in xrange(10):
            path = os.path.join(self.temp_dir, "test_%s.txt" % i)
            with open(path, "wb") as fd:
                fd.write("Hello")

            pathspecs.append(
                rdf_paths.PathSpec(pathtype=rdf_paths.PathSpec.PathType.OS,
                                   path=path))

        # All those files are the same so the individual chunks should
        # only be downloaded once. By forcing maximum_pending_files=1,
        # there should only be a single TransferBuffer call.
        args = transfer.MultiGetFileArgs(pathspecs=pathspecs,
                                         maximum_pending_files=1)
        for _ in test_lib.TestFlowHelper("MultiGetFile",
                                         client_mock,
                                         token=self.token,
                                         client_id=self.client_id,
                                         args=args):
            pass

        self.assertEqual(client_mock.action_counts["TransferBuffer"], 1)
Ejemplo n.º 4
0
    def CreateMultiGetFileFlow(self, client_id, file_path, token):
        pathspec = rdf_paths.PathSpec(path=file_path,
                                      pathtype=rdf_paths.PathSpec.PathType.OS)
        flow_args = transfer.MultiGetFileArgs(pathspecs=[pathspec])

        return flow.GRRFlow.StartFlow(client_id=client_id,
                                      flow_name="MultiGetFile",
                                      args=flow_args,
                                      token=token)
Ejemplo n.º 5
0
    def testMultiGetFileMultiFiles(self):
        """Test MultiGetFile downloading many files at once."""
        client_mock = action_mocks.MultiGetFileClientMock()

        pathspecs = []
        # Make 30 files to download.
        for i in xrange(30):
            path = os.path.join(self.temp_dir, "test_%s.txt" % i)
            with open(path, "wb") as fd:
                fd.write("Hello")

            pathspecs.append(
                rdf_paths.PathSpec(pathtype=rdf_paths.PathSpec.PathType.OS,
                                   path=path))

        args = transfer.MultiGetFileArgs(pathspecs=pathspecs,
                                         maximum_pending_files=10)
        for session_id in test_lib.TestFlowHelper("MultiGetFile",
                                                  client_mock,
                                                  token=self.token,
                                                  client_id=self.client_id,
                                                  args=args):
            # Check up on the internal flow state.
            flow_obj = aff4.FACTORY.Open(session_id,
                                         mode="r",
                                         token=self.token)
            flow_state = flow_obj.state
            # All the pathspecs should be in this list.
            self.assertEqual(len(flow_state.indexed_pathspecs), 30)

            # At any one time, there should not be more than 10 files or hashes
            # pending.
            self.assertLessEqual(len(flow_state.pending_files), 10)
            self.assertLessEqual(len(flow_state.pending_hashes), 10)

        # When we finish there should be no pathspecs stored in the flow state.
        for flow_pathspec in flow_state.indexed_pathspecs:
            self.assertIsNone(flow_pathspec)
        for flow_request_data in flow_state.request_data_list:
            self.assertIsNone(flow_request_data)

        # Now open each file and make sure the data is there.
        for pathspec in pathspecs:
            urn = pathspec.AFF4Path(self.client_id)
            fd = aff4.FACTORY.Open(urn, token=self.token)
            self.assertEqual("Hello", fd.Read(100000))
Ejemplo n.º 6
0
    def testMultiGetFileSizeLimit(self):
        client_mock = action_mocks.ActionMock("TransferBuffer", "HashFile",
                                              "StatFile", "HashBuffer")
        image_path = os.path.join(self.base_path, "test_img.dd")
        pathspec = rdf_paths.PathSpec(pathtype=rdf_paths.PathSpec.PathType.OS,
                                      path=image_path)

        # Read a bit more than one chunk (600 * 1024).
        expected_size = 750 * 1024
        args = transfer.MultiGetFileArgs(pathspecs=[pathspec],
                                         file_size=expected_size)
        for _ in test_lib.TestFlowHelper("MultiGetFile",
                                         client_mock,
                                         token=self.token,
                                         client_id=self.client_id,
                                         args=args):
            pass

        urn = aff4.AFF4Object.VFSGRRClient.PathspecToURN(
            pathspec, self.client_id)
        blobimage = aff4.FACTORY.Open(urn, token=self.token)
        # Make sure a VFSBlobImage got written.
        self.assertTrue(isinstance(blobimage, aff4_grr.VFSBlobImage))

        self.assertEqual(len(blobimage), expected_size)
        data = blobimage.read(100 * expected_size)
        self.assertEqual(len(data), expected_size)

        expected_data = open(image_path, "rb").read(expected_size)

        self.assertEqual(data, expected_data)
        hash_obj = blobimage.Get(blobimage.Schema.HASH)

        d = hashlib.sha1()
        d.update(expected_data)
        expected_hash = d.hexdigest()

        self.assertEqual(hash_obj.sha1, expected_hash)