Exemple #1
0
  def testRaisesWhenTryingToReadTooMuchDataAtOnce(self):
    with test_lib.ConfigOverrider({"Server.max_unbound_read_size": 4}):
      # Recreate to make sure the new config option value is applied.
      self.blob_stream = file_store.BlobStream(None, self.blob_refs, None)

      self.blob_stream.read(4)
      with self.assertRaises(file_store.OversizedReadError):
        self.blob_stream.read()  # This would implicitly read 6 bytes.
    def testRaisesWhenTryingToReadTooMuchDataAtOnce(self):
        with test_lib.ConfigOverrider(
            {"Server.max_unbound_read_size": self.blob_size}):
            # Recreate to make sure the new config option value is applied.
            self.blob_stream = file_store.BlobStream(self.blob_refs, None)

            self.blob_stream.read(self.blob_size)
            with self.assertRaises(file_store.OversizedRead):
                self.blob_stream.read(self.blob_size + 1)
Exemple #3
0
    def setUp(self):
        super(BlobStreamTest, self).setUp()

        self.blob_size = 10
        self.blob_data, self.blob_refs = _GenerateBlobRefs(
            self.blob_size, b"abcde12345")
        blob_ids = [ref.blob_id for ref in self.blob_refs]
        data_store.BLOBS.WriteBlobs(dict(zip(blob_ids, self.blob_data)))

        self.blob_stream = file_store.BlobStream(None, self.blob_refs, None)
    def testWhenReadingWholeFileAndWholeFileSizeIsTooBig(self):
        self.blob_stream.read()
        self.blob_stream.seek(0)

        with test_lib.ConfigOverrider(
            {"Server.max_unbound_read_size": self.blob_size * 10 - 1}):
            # Recreate to make sure the new config option value is applied.
            self.blob_stream = file_store.BlobStream(self.blob_refs, None)

            with self.assertRaises(file_store.OversizedRead):
                self.blob_stream.read()
Exemple #5
0
  def setUp(self):
    super(BlobStreamTest, self).setUp()

    self.blob_size = 10
    self.blob_data = [c * self.blob_size for c in b"abcde12345"]
    self.blob_ids = [
        rdf_objects.BlobID.FromBlobData(bd) for bd in self.blob_data
    ]
    self.blob_refs = [
        rdf_objects.BlobReference(
            offset=i * self.blob_size, size=self.blob_size, blob_id=blob_id)
        for i, blob_id in enumerate(self.blob_ids)
    ]
    data_store.BLOBS.WriteBlobs(dict(zip(self.blob_ids, self.blob_data)))

    self.blob_stream = file_store.BlobStream(self.blob_refs, None)
Exemple #6
0
 def testAllowsReadingAboveLimitWhenSpecifiedManually(self):
     with test_lib.ConfigOverrider({"Server.max_unbound_read_size": 1}):
         # Recreate to make sure the new config option value is applied.
         self.blob_stream = file_store.BlobStream(None, self.blob_refs,
                                                  None)
         self.blob_stream.read(self.blob_size)
Exemple #7
0
 def testRaisesIfBlobIsMissing(self):
     _, missing_blob_refs = vfs_test_lib.GenerateBlobRefs(
         self.blob_size, "0")
     blob_stream = file_store.BlobStream(None, missing_blob_refs, None)
     with self.assertRaises(file_store.BlobNotFoundError):
         blob_stream.read(1)