Ejemplo n.º 1
0
    def testMultiGetFileProgressReportsSkippedDuplicatesCorrectly(self):
        client_mock = action_mocks.MultiGetFileClientMock()
        image_path = os.path.join(self.base_path, "test_img.dd")
        pathspec = rdf_paths.PathSpec(pathtype=rdf_paths.PathSpec.PathType.OS,
                                      path=image_path)

        args = transfer.MultiGetFileArgs(pathspecs=[pathspec])
        # Let the flow run to make sure the file is collected.
        flow_test_lib.TestFlowHelper(transfer.MultiGetFile.__name__,
                                     client_mock,
                                     token=self.token,
                                     client_id=self.client_id,
                                     args=args)

        # Run the flow second time to make sure duplicates are collected.
        flow_id = flow_test_lib.TestFlowHelper(transfer.MultiGetFile.__name__,
                                               client_mock,
                                               token=self.token,
                                               client_id=self.client_id,
                                               args=args)

        f_obj = flow_test_lib.GetFlowObj(self.client_id, flow_id)
        f_instance = transfer.MultiGetFile(f_obj)

        p = f_instance.GetProgress()
        self.assertEqual(p.num_collected, 0)
        self.assertEqual(p.num_failed, 0)
        self.assertEqual(p.num_skipped, 1)

        self.assertLen(p.pathspecs_progress, 1)
        self.assertEqual(p.pathspecs_progress[0].pathspec, pathspec)
        self.assertEqual(p.pathspecs_progress[0].status,
                         transfer.PathSpecProgress.Status.SKIPPED)
Ejemplo n.º 2
0
  def testMultiGetFileSetsFileHashAttributeWhenMultipleChunksDownloaded(self):
    client_mock = action_mocks.MultiGetFileClientMock()
    pathspec = rdf_paths.PathSpec(
        pathtype=rdf_paths.PathSpec.PathType.OS,
        path=os.path.join(self.base_path, "test_img.dd"))

    args = transfer.MultiGetFileArgs(pathspecs=[pathspec])
    flow_test_lib.TestFlowHelper(
        transfer.MultiGetFile.__name__,
        client_mock,
        token=self.token,
        client_id=self.client_id,
        args=args)

    # Fix path for Windows testing.
    pathspec.path = pathspec.path.replace("\\", "/")
    # Test the AFF4 file that was created.
    urn = pathspec.AFF4Path(self.client_id)
    fd_hash = data_store_utils.GetUrnHashEntry(urn)

    self.assertTrue(fd_hash)

    h = hashlib.sha256()
    with open(os.path.join(self.base_path, "test_img.dd"), "rb") as model_fd:
      h.update(model_fd.read())
    self.assertEqual(fd_hash.sha256, h.digest())
Ejemplo n.º 3
0
    def testCorrectlyDisplaysMultiGetFileResults(self):
        flow_args = transfer.MultiGetFileArgs(pathspecs=[
            rdf_paths.PathSpec.OS(path=f"/somefile{i}") for i in range(10)
        ])
        flow_id = flow_test_lib.StartFlow(transfer.MultiGetFile,
                                          creator=self.test_username,
                                          client_id=self.client_id,
                                          flow_args=flow_args)

        with flow_test_lib.FlowProgressOverride(
                transfer.MultiGetFile,
                transfer.MultiGetFileProgress(num_pending_hashes=0,
                                              num_pending_files=2,
                                              num_skipped=0,
                                              num_collected=3,
                                              num_failed=5)):
            self.Open(f"/v2/clients/{self.client_id}")
            self.WaitUntil(self.IsElementPresent,
                           "css=.flow-title:contains('MultiGetFile')")

            flow_test_lib.AddResultsToFlow(
                self.client_id, flow_id,
                [self._GenSampleResult(i) for i in range(3)])

            self.Click(
                "css=multi-get-file-flow-details result-accordion .title:contains('/somefile0 + 9 more')"
            )
            for i in range(3):
                self.WaitUntil(
                    self.IsElementPresent,
                    f"css=multi-get-file-flow-details td:contains('/somefile{i}')"
                )
Ejemplo n.º 4
0
  def testMultiGetFileDeduplication(self):
    client_mock = action_mocks.MultiGetFileClientMock()

    pathspecs = []
    # Make 10 files to download.
    for i in range(10):
      path = os.path.join(self.temp_dir, "test_%s.txt" % i)
      with open(path, "wb") as fd:
        fd.write("Hello")

      pathspecs.append(
          rdf_paths.PathSpec(
              pathtype=rdf_paths.PathSpec.PathType.OS, path=path))

    # All those files are the same so the individual chunks should
    # only be downloaded once. By forcing maximum_pending_files=1,
    # there should only be a single TransferBuffer call.
    args = transfer.MultiGetFileArgs(
        pathspecs=pathspecs, maximum_pending_files=1)
    flow_test_lib.TestFlowHelper(
        transfer.MultiGetFile.__name__,
        client_mock,
        token=self.token,
        client_id=self.client_id,
        args=args)

    self.assertEqual(client_mock.action_counts["TransferBuffer"], 1)
Ejemplo n.º 5
0
  def testExistingChunks(self):
    client_mock = action_mocks.MultiGetFileClientMock()

    # Make a file to download that is three chunks long.
    # For the second run, we change the middle chunk. This will lead to a
    # different hash for the whole file and three chunks to download of which we
    # already have two.
    chunk_size = transfer.MultiGetFile.CHUNK_SIZE
    for data in [
        "A" * chunk_size + "B" * chunk_size + "C" * 100,
        "A" * chunk_size + "X" * chunk_size + "C" * 100
    ]:
      path = os.path.join(self.temp_dir, "test.txt")
      with open(path, "wb") as fd:
        fd.write(data)

      pathspec = rdf_paths.PathSpec(
          pathtype=rdf_paths.PathSpec.PathType.OS, path=path)

      args = transfer.MultiGetFileArgs(pathspecs=[pathspec])
      flow_test_lib.TestFlowHelper(
          transfer.MultiGetFile.__name__,
          client_mock,
          token=self.token,
          client_id=self.client_id,
          args=args)

      urn = pathspec.AFF4Path(self.client_id)
      blobimage = aff4.FACTORY.Open(urn)
      self.assertEqual(blobimage.size, len(data))
      self.assertEqual(blobimage.read(blobimage.size), data)

    # Three chunks to get for the first file, only one for the second.
    self.assertEqual(client_mock.action_counts["TransferBuffer"], 4)
Ejemplo n.º 6
0
    def testMultiGetFileSizeLimit(self):
        client_mock = action_mocks.MultiGetFileClientMock()
        image_path = os.path.join(self.base_path, "test_img.dd")
        pathspec = rdf_paths.PathSpec(pathtype=rdf_paths.PathSpec.PathType.OS,
                                      path=image_path)

        # Read a bit more than one chunk (600 * 1024).
        expected_size = 750 * 1024
        args = transfer.MultiGetFileArgs(pathspecs=[pathspec],
                                         file_size=expected_size)
        flow_test_lib.TestFlowHelper(transfer.MultiGetFile.__name__,
                                     client_mock,
                                     token=self.token,
                                     client_id=self.client_id,
                                     args=args)

        urn = pathspec.AFF4Path(self.client_id)
        blobimage = aff4.FACTORY.Open(urn, token=self.token)
        # Make sure a VFSBlobImage got written.
        self.assertTrue(isinstance(blobimage, aff4_grr.VFSBlobImage))

        self.assertEqual(len(blobimage), expected_size)
        data = blobimage.read(100 * expected_size)
        self.assertEqual(len(data), expected_size)

        expected_data = open(image_path, "rb").read(expected_size)

        self.assertEqual(data, expected_data)
        hash_obj = data_store_utils.GetFileHashEntry(blobimage)

        d = hashlib.sha1()
        d.update(expected_data)
        expected_hash = d.hexdigest()

        self.assertEqual(hash_obj.sha1, expected_hash)
Ejemplo n.º 7
0
  def testMultiGetFile(self):
    """Test MultiGetFile."""

    client_mock = action_mocks.MultiGetFileClientMock()
    pathspec = rdf_paths.PathSpec(
        pathtype=rdf_paths.PathSpec.PathType.OS,
        path=os.path.join(self.base_path, "test_img.dd"))

    args = transfer.MultiGetFileArgs(pathspecs=[pathspec, pathspec])
    with test_lib.Instrument(transfer.MultiGetFile,
                             "StoreStat") as storestat_instrument:
      flow_test_lib.TestFlowHelper(
          transfer.MultiGetFile.__name__,
          client_mock,
          token=self.token,
          client_id=self.client_id,
          args=args)

      # We should only have called StoreStat once because the two paths
      # requested were identical.
      self.assertEqual(len(storestat_instrument.args), 1)

    # Fix path for Windows testing.
    pathspec.path = pathspec.path.replace("\\", "/")
    # Test the AFF4 file that was created.
    urn = pathspec.AFF4Path(self.client_id)
    fd1 = aff4.FACTORY.Open(urn, token=self.token)
    fd2 = open(pathspec.path, "rb")
    fd2.seek(0, 2)

    self.assertEqual(fd2.tell(), int(fd1.Get(fd1.Schema.SIZE)))
    self.CompareFDs(fd1, fd2)
Ejemplo n.º 8
0
    def testMultiGetFileSetsFileHashAttributeWhenMultipleChunksDownloaded(
            self):
        client_mock = action_mocks.MultiGetFileClientMock()
        pathspec = rdf_paths.PathSpec(pathtype=rdf_paths.PathSpec.PathType.OS,
                                      path=os.path.join(
                                          self.base_path, "test_img.dd"))

        args = transfer.MultiGetFileArgs(pathspecs=[pathspec])
        flow_test_lib.TestFlowHelper(transfer.MultiGetFile.__name__,
                                     client_mock,
                                     token=self.token,
                                     client_id=self.client_id,
                                     args=args)

        h = hashlib.sha256()
        with open(os.path.join(self.base_path, "test_img.dd"),
                  "rb") as model_fd:
            h.update(model_fd.read())

        cp = db.ClientPath.FromPathSpec(self.client_id, pathspec)
        fd_rel_db = file_store.OpenFile(cp)
        self.assertEqual(fd_rel_db.hash_id.AsBytes(), h.digest())

        # Check that SHA256 hash of the file matches the contents
        # hash and that MD5 and SHA1 are set.
        history = data_store.REL_DB.ReadPathInfoHistory(
            cp.client_id, cp.path_type, cp.components)
        self.assertEqual(history[-1].hash_entry.sha256,
                         fd_rel_db.hash_id.AsBytes())
        self.assertIsNotNone(history[-1].hash_entry.sha1)
        self.assertIsNotNone(history[-1].hash_entry.md5)
Ejemplo n.º 9
0
    def CreateMultiGetFileFlow(self, client_id, file_path):
        pathspec = rdf_paths.PathSpec(path=file_path,
                                      pathtype=rdf_paths.PathSpec.PathType.OS)
        flow_args = transfer.MultiGetFileArgs(pathspecs=[pathspec])

        return flow.StartFlow(client_id=client_id,
                              flow_cls=transfer.MultiGetFile,
                              flow_args=flow_args)
Ejemplo n.º 10
0
    def CreateMultiGetFileFlow(self, client_id, file_path, token):
        pathspec = rdf_paths.PathSpec(path=file_path,
                                      pathtype=rdf_paths.PathSpec.PathType.OS)
        flow_args = transfer.MultiGetFileArgs(pathspecs=[pathspec])

        return flow.StartAFF4Flow(client_id=client_id,
                                  flow_name=transfer.MultiGetFile.__name__,
                                  args=flow_args,
                                  token=token)
Ejemplo n.º 11
0
    def testMultiGetFileSizeLimit(self):
        client_mock = action_mocks.MultiGetFileClientMock()
        image_path = os.path.join(self.base_path, "test_img.dd")
        pathspec = rdf_paths.PathSpec(pathtype=rdf_paths.PathSpec.PathType.OS,
                                      path=image_path)

        # Read a bit more than one chunk (600 * 1024).
        expected_size = 750 * 1024
        args = transfer.MultiGetFileArgs(pathspecs=[pathspec],
                                         file_size=expected_size)
        flow_test_lib.TestFlowHelper(transfer.MultiGetFile.__name__,
                                     client_mock,
                                     token=self.token,
                                     client_id=self.client_id,
                                     args=args)

        expected_data = open(image_path, "rb").read(expected_size)

        if data_store.RelationalDBReadEnabled():
            cp = db.ClientPath.FromPathSpec(self.client_id.Basename(),
                                            pathspec)
            fd_rel_db = file_store.OpenFile(cp)

            self.assertEqual(fd_rel_db.size, expected_size)

            data = fd_rel_db.read(2 * expected_size)
            self.assertLen(data, expected_size)

            d = hashlib.sha256()
            d.update(expected_data)
            self.assertEqual(fd_rel_db.hash_id.AsBytes(), d.digest())

            # Check that SHA256 hash of the file matches the contents
            # hash and that MD5 and SHA1 are set.
            history = data_store.REL_DB.ReadPathInfoHistory(
                cp.client_id, cp.path_type, cp.components)
            self.assertEqual(history[-1].hash_entry.sha256,
                             fd_rel_db.hash_id.AsBytes())
            self.assertIsNotNone(history[-1].hash_entry.sha1)
            self.assertIsNotNone(history[-1].hash_entry.md5)
        else:
            urn = pathspec.AFF4Path(self.client_id)
            blobimage = aff4.FACTORY.Open(urn, token=self.token)
            # Make sure a VFSBlobImage got written.
            self.assertIsInstance(blobimage, aff4_grr.VFSBlobImage)

            self.assertLen(blobimage, expected_size)
            data = blobimage.read(100 * expected_size)
            self.assertLen(data, expected_size)

            self.assertEqual(data, expected_data)
            hash_obj = data_store_utils.GetFileHashEntry(blobimage)

            d = hashlib.sha1()
            d.update(expected_data)
            self.assertEqual(hash_obj.sha1, d.digest())
Ejemplo n.º 12
0
        def _Check(expected_size):
            args = transfer.MultiGetFileArgs(pathspecs=[pathspec],
                                             file_size=expected_size)
            flow_test_lib.TestFlowHelper(transfer.MultiGetFile.__name__,
                                         client_mock,
                                         token=self.token,
                                         client_id=self.client_id,
                                         args=args)

            # Test the file that was created.
            cp = db.ClientPath.FromPathSpec(self.client_id, pathspec)
            fd = file_store.OpenFile(cp)
            self.assertEqual(fd.size, expected_size)
Ejemplo n.º 13
0
    def testExistingChunks(self):
        client_mock = action_mocks.MultiGetFileClientMock()

        # Make a file to download that is three chunks long.
        # For the second run, we change the middle chunk. This will lead to a
        # different hash for the whole file and three chunks to download of which we
        # already have two.
        chunk_size = transfer.MultiGetFile.CHUNK_SIZE
        for data in [
                "A" * chunk_size + "B" * chunk_size + "C" * 100,
                "A" * chunk_size + "X" * chunk_size + "C" * 100
        ]:
            path = os.path.join(self.temp_dir, "test.txt")
            with open(path, "wb") as fd:
                fd.write(data)

            pathspec = rdf_paths.PathSpec(
                pathtype=rdf_paths.PathSpec.PathType.OS, path=path)

            args = transfer.MultiGetFileArgs(pathspecs=[pathspec])
            flow_test_lib.TestFlowHelper(transfer.MultiGetFile.__name__,
                                         client_mock,
                                         token=self.token,
                                         client_id=self.client_id,
                                         args=args)

            if data_store.RelationalDBReadEnabled():
                cp = db.ClientPath.FromPathSpec(self.client_id.Basename(),
                                                pathspec)
                fd_rel_db = file_store.OpenFile(cp)
                self.assertEqual(fd_rel_db.size, len(data))
                self.assertEqual(fd_rel_db.read(), data)

                # Check that SHA256 hash of the file matches the contents
                # hash and that MD5 and SHA1 are set.
                history = data_store.REL_DB.ReadPathInfoHistory(
                    cp.client_id, cp.path_type, cp.components)
                self.assertEqual(history[-1].hash_entry.sha256,
                                 fd_rel_db.hash_id.AsBytes())
                self.assertIsNotNone(history[-1].hash_entry.sha1)
                self.assertIsNotNone(history[-1].hash_entry.md5)
            else:
                urn = pathspec.AFF4Path(self.client_id)
                blobimage = aff4.FACTORY.Open(urn)
                self.assertEqual(blobimage.size, len(data))
                self.assertEqual(blobimage.read(blobimage.size), data)

        # Three chunks to get for the first file, only one for the second.
        self.assertEqual(client_mock.action_counts["TransferBuffer"], 4)
Ejemplo n.º 14
0
  def CreateMultiGetFileFlow(self, client_id, file_path, token):
    pathspec = rdf_paths.PathSpec(
        path=file_path, pathtype=rdf_paths.PathSpec.PathType.OS)
    flow_args = transfer.MultiGetFileArgs(pathspecs=[pathspec])

    if data_store.RelationalDBFlowsEnabled():
      return flow.StartFlow(
          client_id=client_id.Basename(),
          flow_cls=transfer.MultiGetFile,
          flow_args=flow_args)
    else:
      return flow.StartAFF4Flow(
          client_id=client_id,
          flow_name=transfer.MultiGetFile.__name__,
          args=flow_args,
          token=token).Basename()
Ejemplo n.º 15
0
    def testMultiGetFileMultiFiles(self):
        """Test MultiGetFile downloading many files at once."""
        client_mock = action_mocks.MultiGetFileClientMock()

        pathspecs = []
        # Make 30 files to download.
        for i in xrange(30):
            path = os.path.join(self.temp_dir, "test_%s.txt" % i)
            with open(path, "wb") as fd:
                fd.write("Hello")

            pathspecs.append(
                rdf_paths.PathSpec(pathtype=rdf_paths.PathSpec.PathType.OS,
                                   path=path))

        args = transfer.MultiGetFileArgs(pathspecs=pathspecs,
                                         maximum_pending_files=10)
        for session_id in flow_test_lib.TestFlowHelper(
                transfer.MultiGetFile.__name__,
                client_mock,
                token=self.token,
                client_id=self.client_id,
                args=args):
            # Check up on the internal flow state.
            flow_obj = aff4.FACTORY.Open(session_id,
                                         mode="r",
                                         token=self.token)
            flow_state = flow_obj.state
            # All the pathspecs should be in this list.
            self.assertEqual(len(flow_state.indexed_pathspecs), 30)

            # At any one time, there should not be more than 10 files or hashes
            # pending.
            self.assertLessEqual(len(flow_state.pending_files), 10)
            self.assertLessEqual(len(flow_state.pending_hashes), 10)

        # When we finish there should be no pathspecs stored in the flow state.
        for flow_pathspec in flow_state.indexed_pathspecs:
            self.assertIsNone(flow_pathspec)
        for flow_request_data in flow_state.request_data_list:
            self.assertIsNone(flow_request_data)

        # Now open each file and make sure the data is there.
        for pathspec in pathspecs:
            urn = pathspec.AFF4Path(self.client_id)
            fd = aff4.FACTORY.Open(urn, token=self.token)
            self.assertEqual("Hello", fd.read())
Ejemplo n.º 16
0
    def testMultiGetFile(self):
        """Test MultiGetFile."""

        client_mock = action_mocks.MultiGetFileClientMock()
        pathspec = rdf_paths.PathSpec(pathtype=rdf_paths.PathSpec.PathType.OS,
                                      path=os.path.join(
                                          self.base_path, "test_img.dd"))

        args = transfer.MultiGetFileArgs(pathspecs=[pathspec, pathspec])
        with test_lib.Instrument(transfer.MultiGetFileMixin,
                                 "StoreStat") as storestat_instrument:
            flow_test_lib.TestFlowHelper(transfer.MultiGetFile.__name__,
                                         client_mock,
                                         token=self.token,
                                         client_id=self.client_id,
                                         args=args)

            # We should only have called StoreStat once because the two paths
            # requested were identical.
            self.assertLen(storestat_instrument.args, 1)

        # Fix path for Windows testing.
        pathspec.path = pathspec.path.replace("\\", "/")
        fd2 = open(pathspec.path, "rb")

        # Test the AFF4 file that was created.
        if data_store.RelationalDBReadEnabled():
            cp = db.ClientPath.FromPathSpec(self.client_id.Basename(),
                                            pathspec)
            fd_rel_db = file_store.OpenFile(cp)
            self.CompareFDs(fd2, fd_rel_db)

            # Check that SHA256 hash of the file matches the contents
            # hash and that MD5 and SHA1 are set.
            history = data_store.REL_DB.ReadPathInfoHistory(
                cp.client_id, cp.path_type, cp.components)
            self.assertEqual(history[-1].hash_entry.sha256,
                             fd_rel_db.hash_id.AsBytes())
            self.assertIsNotNone(history[-1].hash_entry.sha1)
            self.assertIsNotNone(history[-1].hash_entry.md5)
        else:
            urn = pathspec.AFF4Path(self.client_id)
            fd1 = aff4.FACTORY.Open(urn, token=self.token)
            fd2.seek(0, 2)
            self.assertEqual(fd2.tell(), int(fd1.Get(fd1.Schema.SIZE)))
            self.CompareFDs(fd1, fd2)
Ejemplo n.º 17
0
  def Handle(self, args, token=None):
    path_type, components = rdf_objects.ParseCategorizedPath(args.file_path)

    path_info = data_store.REL_DB.ReadPathInfo(
        str(args.client_id), path_type, components)

    if (not path_info or not path_info.stat_entry or
        not path_info.stat_entry.pathspec):
      raise FileNotFoundError("Unable to download file %s." % args.file_path)

    flow_args = transfer.MultiGetFileArgs(
        pathspecs=[path_info.stat_entry.pathspec])
    flow_id = flow.StartFlow(
        client_id=str(args.client_id),
        flow_cls=transfer.MultiGetFile,
        flow_args=flow_args,
        creator=token.username)

    return ApiUpdateVfsFileContentResult(operation_id=flow_id)
Ejemplo n.º 18
0
    def testMultiGetFileDeduplication(self):
        client_mock = action_mocks.MultiGetFileClientMock()

        pathspecs = []
        # Make 10 files to download.
        for i in range(10):
            path = os.path.join(self.temp_dir, "test_%s.txt" % i)
            with open(path, "wb") as fd:
                fd.write("Hello")

            pathspecs.append(
                rdf_paths.PathSpec(pathtype=rdf_paths.PathSpec.PathType.OS,
                                   path=path))

        # All those files are the same so the individual chunks should
        # only be downloaded once. By forcing maximum_pending_files=1,
        # there should only be a single TransferBuffer call.
        args = transfer.MultiGetFileArgs(pathspecs=pathspecs,
                                         maximum_pending_files=1)
        flow_test_lib.TestFlowHelper(transfer.MultiGetFile.__name__,
                                     client_mock,
                                     token=self.token,
                                     client_id=self.client_id,
                                     args=args)

        self.assertEqual(client_mock.action_counts["TransferBuffer"], 1)

        if data_store.RelationalDBReadEnabled():
            for pathspec in pathspecs:
                # Check that each referenced file can be read.
                cp = db.ClientPath.FromPathSpec(self.client_id.Basename(),
                                                pathspec)
                fd_rel_db = file_store.OpenFile(cp)
                self.assertEqual("Hello", fd_rel_db.read())

                # Check that SHA256 hash of the file matches the contents
                # hash and that MD5 and SHA1 are set.
                history = data_store.REL_DB.ReadPathInfoHistory(
                    cp.client_id, cp.path_type, cp.components)
                self.assertEqual(history[-1].hash_entry.sha256,
                                 fd_rel_db.hash_id.AsBytes())
                self.assertIsNotNone(history[-1].hash_entry.sha1)
                self.assertIsNotNone(history[-1].hash_entry.md5)
Ejemplo n.º 19
0
    def testMultiGetFileSetsFileHashAttributeWhenMultipleChunksDownloaded(
            self):
        client_mock = action_mocks.MultiGetFileClientMock()
        pathspec = rdf_paths.PathSpec(pathtype=rdf_paths.PathSpec.PathType.OS,
                                      path=os.path.join(
                                          self.base_path, "test_img.dd"))

        args = transfer.MultiGetFileArgs(pathspecs=[pathspec])
        flow_test_lib.TestFlowHelper(transfer.MultiGetFile.__name__,
                                     client_mock,
                                     token=self.token,
                                     client_id=self.client_id,
                                     args=args)

        h = hashlib.sha256()
        with open(os.path.join(self.base_path, "test_img.dd"),
                  "rb") as model_fd:
            h.update(model_fd.read())

        if not data_store.RelationalDBReadEnabled():
            # Fix path for Windows testing.
            pathspec.path = pathspec.path.replace("\\", "/")
            # Test the AFF4 file that was created.
            urn = pathspec.AFF4Path(self.client_id)
            fd_hash = data_store_utils.GetUrnHashEntry(urn)

            self.assertTrue(fd_hash)
            self.assertEqual(fd_hash.sha256, h.digest())

        if data_store.RelationalDBReadEnabled():
            cp = db.ClientPath.FromPathSpec(self.client_id.Basename(),
                                            pathspec)
            fd_rel_db = file_store.OpenFile(cp)
            self.assertEqual(fd_rel_db.hash_id.AsBytes(), h.digest())

            # Check that SHA256 hash of the file matches the contents
            # hash and that MD5 and SHA1 are set.
            history = data_store.REL_DB.ReadPathInfoHistory(
                cp.client_id, cp.path_type, cp.components)
            self.assertEqual(history[-1].hash_entry.sha256,
                             fd_rel_db.hash_id.AsBytes())
            self.assertIsNotNone(history[-1].hash_entry.sha1)
            self.assertIsNotNone(history[-1].hash_entry.md5)
Ejemplo n.º 20
0
    def testMultiGetFile(self):
        """Test MultiGetFile."""

        client_mock = action_mocks.MultiGetFileClientMock()
        pathspec = rdf_paths.PathSpec(pathtype=rdf_paths.PathSpec.PathType.OS,
                                      path=os.path.join(
                                          self.base_path, "test_img.dd"))
        expected_size = os.path.getsize(pathspec.path)

        args = transfer.MultiGetFileArgs(pathspecs=[pathspec, pathspec])
        with test_lib.Instrument(transfer.MultiGetFile,
                                 "_StoreStat") as storestat_instrument:
            flow_test_lib.TestFlowHelper(transfer.MultiGetFile.__name__,
                                         client_mock,
                                         token=self.token,
                                         client_id=self.client_id,
                                         args=args)

            # We should only have called StoreStat once because the two paths
            # requested were identical.
            self.assertLen(storestat_instrument.args, 1)

        # Fix path for Windows testing.
        pathspec.path = pathspec.path.replace("\\", "/")

        with open(pathspec.path, "rb") as fd2:
            # Test the file that was created.
            cp = db.ClientPath.FromPathSpec(self.client_id, pathspec)
            fd_rel_db = file_store.OpenFile(cp)
            self.CompareFDs(fd2, fd_rel_db)

        # Check that SHA256 hash of the file matches the contents
        # hash and that MD5 and SHA1 are set.
        history = data_store.REL_DB.ReadPathInfoHistory(
            cp.client_id, cp.path_type, cp.components)
        self.assertEqual(history[-1].hash_entry.sha256,
                         fd_rel_db.hash_id.AsBytes())
        self.assertEqual(history[-1].hash_entry.num_bytes, expected_size)
        self.assertIsNotNone(history[-1].hash_entry.sha1)
        self.assertIsNotNone(history[-1].hash_entry.md5)
Ejemplo n.º 21
0
    def testMultiGetFileSizeLimit(self):
        client_mock = action_mocks.MultiGetFileClientMock()
        image_path = os.path.join(self.base_path, "test_img.dd")
        pathspec = rdf_paths.PathSpec(pathtype=rdf_paths.PathSpec.PathType.OS,
                                      path=image_path)

        # Read a bit more than one chunk (600 * 1024).
        expected_size = 750 * 1024
        args = transfer.MultiGetFileArgs(pathspecs=[pathspec],
                                         file_size=expected_size)
        flow_test_lib.TestFlowHelper(transfer.MultiGetFile.__name__,
                                     client_mock,
                                     token=self.token,
                                     client_id=self.client_id,
                                     args=args)

        with open(image_path, "rb") as fd:
            expected_data = fd.read(expected_size)

        cp = db.ClientPath.FromPathSpec(self.client_id, pathspec)
        fd_rel_db = file_store.OpenFile(cp)

        self.assertEqual(fd_rel_db.size, expected_size)

        data = fd_rel_db.read(2 * expected_size)
        self.assertLen(data, expected_size)

        d = hashlib.sha256()
        d.update(expected_data)
        self.assertEqual(fd_rel_db.hash_id.AsBytes(), d.digest())

        # Check that SHA256 hash of the file matches the contents
        # hash and that MD5 and SHA1 are set.
        history = data_store.REL_DB.ReadPathInfoHistory(
            cp.client_id, cp.path_type, cp.components)
        self.assertEqual(history[-1].hash_entry.sha256,
                         fd_rel_db.hash_id.AsBytes())
        self.assertEqual(history[-1].hash_entry.num_bytes, expected_size)
        self.assertIsNotNone(history[-1].hash_entry.sha1)
        self.assertIsNotNone(history[-1].hash_entry.md5)
Ejemplo n.º 22
0
    def testMultiGetFileMultiFiles(self):
        """Test MultiGetFile downloading many files at once."""
        client_mock = action_mocks.MultiGetFileClientMock()

        pathspecs = []
        # Make 30 files to download.
        for i in range(30):
            path = os.path.join(self.temp_dir, "test_%s.txt" % i)
            with io.open(path, "wb") as fd:
                fd.write(b"Hello")

            pathspecs.append(
                rdf_paths.PathSpec(pathtype=rdf_paths.PathSpec.PathType.OS,
                                   path=path))

        args = transfer.MultiGetFileArgs(pathspecs=pathspecs,
                                         maximum_pending_files=10)
        flow_test_lib.TestFlowHelper(transfer.MultiGetFile.__name__,
                                     client_mock,
                                     token=self.token,
                                     client_id=self.client_id,
                                     args=args)

        # Now open each file and make sure the data is there.
        for pathspec in pathspecs:
            cp = db.ClientPath.FromPathSpec(self.client_id, pathspec)
            fd_rel_db = file_store.OpenFile(cp)
            self.assertEqual(b"Hello", fd_rel_db.read())

            # Check that SHA256 hash of the file matches the contents
            # hash and that MD5 and SHA1 are set.
            history = data_store.REL_DB.ReadPathInfoHistory(
                cp.client_id, cp.path_type, cp.components)
            self.assertEqual(history[-1].hash_entry.sha256,
                             fd_rel_db.hash_id.AsBytes())
            self.assertEqual(history[-1].hash_entry.num_bytes, 5)
            self.assertIsNotNone(history[-1].hash_entry.sha1)
            self.assertIsNotNone(history[-1].hash_entry.md5)
Ejemplo n.º 23
0
  def _ArgsToHuntArgs(
      self, args: ApiCreatePerClientFileCollectionHuntArgs
  ) -> rdf_hunt_objects.HuntArguments:
    flow_groups = []
    for client_arg in args.per_client_args:
      pathspecs = []
      for p in client_arg.paths:
        pathspecs.append(
            rdf_paths.PathSpec(path=p, pathtype=client_arg.path_type))

      flow_name = transfer.MultiGetFile.__name__
      flow_args = transfer.MultiGetFileArgs(pathspecs=pathspecs)

      flow_group = rdf_hunt_objects.VariableHuntFlowGroup(
          client_ids=[client_arg.client_id],
          flow_name=flow_name,
          flow_args=rdf_structs.AnyValue.Pack(flow_args))
      flow_groups.append(flow_group)

    return rdf_hunt_objects.HuntArguments(
        hunt_type=rdf_hunt_objects.HuntArguments.HuntType.VARIABLE,
        variable=rdf_hunt_objects.HuntArgumentsVariable(
            flow_groups=flow_groups))
Ejemplo n.º 24
0
    def testMultiGetFileProgressReportsFailuresAndSuccessesCorrectly(self):
        client_mock = action_mocks.MultiGetFileClientMock()
        image_path = os.path.join(self.base_path, "test_img.dd")
        pathspec_1 = rdf_paths.PathSpec(
            pathtype=rdf_paths.PathSpec.PathType.OS, path=image_path)
        pathspec_2 = rdf_paths.PathSpec(
            pathtype=rdf_paths.PathSpec.PathType.OS, path="/non/existing/path")

        args = transfer.MultiGetFileArgs(pathspecs=[
            pathspec_1,
            pathspec_2,
        ])
        flow_id = flow_test_lib.TestFlowHelper(transfer.MultiGetFile.__name__,
                                               client_mock,
                                               token=self.token,
                                               client_id=self.client_id,
                                               args=args)

        f_obj = flow_test_lib.GetFlowObj(self.client_id, flow_id)
        f_instance = transfer.MultiGetFile(f_obj)
        p = f_instance.GetProgress()

        self.assertEqual(p.num_pending_hashes, 0)
        self.assertEqual(p.num_pending_files, 0)
        self.assertEqual(p.num_skipped, 0)
        self.assertEqual(p.num_collected, 1)
        self.assertEqual(p.num_failed, 1)

        # Check that pathspecs in the progress proto are returned in the same order
        # as in the args proto.
        self.assertEqual(p.pathspecs_progress[0].pathspec, pathspec_1)
        self.assertEqual(p.pathspecs_progress[1].pathspec, pathspec_2)
        # Check that per-pathspecs statuses are correct.
        self.assertEqual(p.pathspecs_progress[0].status,
                         transfer.PathSpecProgress.Status.COLLECTED)
        self.assertEqual(p.pathspecs_progress[1].status,
                         transfer.PathSpecProgress.Status.FAILED)
Ejemplo n.º 25
0
    def testMultiGetFileMultiFiles(self):
        """Test MultiGetFile downloading many files at once."""
        client_mock = action_mocks.MultiGetFileClientMock()

        pathspecs = []
        # Make 30 files to download.
        for i in range(30):
            path = os.path.join(self.temp_dir, "test_%s.txt" % i)
            with open(path, "wb") as fd:
                fd.write("Hello")

            pathspecs.append(
                rdf_paths.PathSpec(pathtype=rdf_paths.PathSpec.PathType.OS,
                                   path=path))

        args = transfer.MultiGetFileArgs(pathspecs=pathspecs,
                                         maximum_pending_files=10)
        session_id = flow_test_lib.TestFlowHelper(
            transfer.MultiGetFile.__name__,
            client_mock,
            token=self.token,
            client_id=self.client_id,
            args=args)

        if data_store.RelationalDBReadEnabled():
            # Now open each file and make sure the data is there.
            for pathspec in pathspecs:
                cp = db.ClientPath.FromPathSpec(self.client_id.Basename(),
                                                pathspec)
                fd_rel_db = file_store.OpenFile(cp)
                self.assertEqual("Hello", fd_rel_db.read())

                # Check that SHA256 hash of the file matches the contents
                # hash and that MD5 and SHA1 are set.
                history = data_store.REL_DB.ReadPathInfoHistory(
                    cp.client_id, cp.path_type, cp.components)
                self.assertEqual(history[-1].hash_entry.sha256,
                                 fd_rel_db.hash_id.AsBytes())
                self.assertIsNotNone(history[-1].hash_entry.sha1)
                self.assertIsNotNone(history[-1].hash_entry.md5)
        else:
            # Check up on the internal flow state.
            flow_state = flow_test_lib.GetFlowState(self.client_id,
                                                    session_id,
                                                    token=self.token)
            # All the pathspecs should be in this list.
            self.assertLen(flow_state.indexed_pathspecs, 30)

            # At any one time, there should not be more than 10 files or hashes
            # pending.
            self.assertLessEqual(len(flow_state.pending_files), 10)
            self.assertLessEqual(len(flow_state.pending_hashes), 10)

            # When we finish there should be no pathspecs stored in the flow state.
            for flow_pathspec in flow_state.indexed_pathspecs:
                self.assertIsNone(flow_pathspec)
            for flow_request_data in flow_state.request_data_list:
                self.assertIsNone(flow_request_data)

            for pathspec in pathspecs:
                urn = pathspec.AFF4Path(self.client_id)
                fd = aff4.FACTORY.Open(urn, token=self.token)
                self.assertEqual("Hello", fd.read())