Esempio n. 1
0
    def _testSeekForward(self, initial_seek):
        """Tests seeking to an initial position and then reading.

    This function simulates an upload that is resumed after a process break.
    It seeks from zero to the initial position (as if the server already had
    those bytes). Then it reads to the end of the file, ensuring the hash
    matches the original file upon completion.

    Args:
      initial_seek: Number of bytes to initially seek.

    Raises:
      AssertionError on wrong amount of data remaining or hash mismatch.
    """
        tmp_file = self._GetTestFile()
        tmp_file_len = os.path.getsize(tmp_file)

        self.assertLess(
            initial_seek, tmp_file_len,
            'initial_seek must be less than test file size %s '
            '(but was actually: %s)' % (tmp_file_len, initial_seek))

        digesters = {'md5': GetMd5()}
        with open(tmp_file, 'rb') as stream:
            wrapper = HashingFileUploadWrapper(stream, digesters,
                                               {'md5': GetMd5},
                                               self._dummy_url, self.logger)
            wrapper.seek(initial_seek)
            self.assertEqual(wrapper.tell(), initial_seek)
            data = wrapper.read()
            self.assertEqual(len(data), tmp_file_len - initial_seek)
        with open(tmp_file, 'rb') as stream:
            actual = CalculateMd5FromContents(stream)
        self.assertEqual(actual, digesters['md5'].hexdigest())
Esempio n. 2
0
 def testReadInChunksWithSeekToBeginning(self):
   """Reads one buffer, then seeks to 0 and reads chunks until the end."""
   tmp_file = self._GetTestFile()
   for initial_read in (TRANSFER_BUFFER_SIZE - 1,
                        TRANSFER_BUFFER_SIZE,
                        TRANSFER_BUFFER_SIZE + 1,
                        TRANSFER_BUFFER_SIZE * 2 - 1,
                        TRANSFER_BUFFER_SIZE * 2,
                        TRANSFER_BUFFER_SIZE * 2 + 1,
                        TRANSFER_BUFFER_SIZE * 3 - 1,
                        TRANSFER_BUFFER_SIZE * 3,
                        TRANSFER_BUFFER_SIZE * 3 + 1):
     for buffer_size in (TRANSFER_BUFFER_SIZE - 1,
                         TRANSFER_BUFFER_SIZE,
                         TRANSFER_BUFFER_SIZE + 1,
                         self._temp_test_file_len - 1,
                         self._temp_test_file_len,
                         self._temp_test_file_len + 1):
       # Can't seek to 0 if the buffer is too small, so we expect an
       # exception.
       expect_exception = buffer_size < self._temp_test_file_len
       with open(tmp_file, 'rb') as stream:
         wrapper = ResumableStreamingJsonUploadWrapper(
             stream, buffer_size, test_small_buffer=True)
         wrapper.read(initial_read)
         # CalculateMd5FromContents seeks to 0, reads in chunks, then seeks
         # to 0 again.
         try:
           hex_digest = CalculateMd5FromContents(wrapper)
           if expect_exception:
             self.fail('Did not get expected CommandException for '
                       'initial read size %s, buffer size %s' %
                       (initial_read, buffer_size))
         except CommandException as e:
           if not expect_exception:
             self.fail('Got unexpected CommandException "%s" for '
                       'initial read size %s, buffer size %s' %
                       (str(e), initial_read, buffer_size))
       if not expect_exception:
         with open(tmp_file, 'rb') as stream:
           actual = CalculateMd5FromContents(stream)
         self.assertEqual(
             actual, hex_digest,
             'Digests not equal for initial read size %s, buffer size %s' %
             (initial_read, buffer_size))
Esempio n. 3
0
 def testReadToEOF(self):
   digesters = {'md5': md5()}
   tmp_file = self.CreateTempFile(contents='a' * TRANSFER_BUFFER_SIZE * 4)
   with open(tmp_file, 'rb') as stream:
     wrapper = HashingFileUploadWrapper(stream, digesters, {'md5': md5},
                                        self._dummy_url, self.logger)
     wrapper.read()
   with open(tmp_file, 'rb') as stream:
     actual = CalculateMd5FromContents(stream)
   self.assertEqual(actual, digesters['md5'].hexdigest())
Esempio n. 4
0
 def testReadInChunks(self):
   tmp_file = self._GetTestFile()
   with open(tmp_file, 'rb') as stream:
     wrapper = ResumableStreamingJsonUploadWrapper(
         stream, TRANSFER_BUFFER_SIZE, test_small_buffer=True)
     hash_dict = {'md5': md5()}
     # CalculateHashesFromContents reads in chunks, but does not seek.
     CalculateHashesFromContents(wrapper, hash_dict)
   with open(tmp_file, 'rb') as stream:
     actual = CalculateMd5FromContents(stream)
   self.assertEqual(actual, hash_dict['md5'].hexdigest())
Esempio n. 5
0
    def _testSeekBack(self, initial_position, seek_back_amount):
        """Tests reading then seeking backwards.

    This function simulates an upload that is resumed after a connection break.
    It reads one transfer buffer at a time until it reaches initial_position,
    then seeks backwards (as if the server did not receive some of the bytes)
    and reads to the end of the file, ensuring the hash matches the original
    file upon completion.

    Args:
      initial_position: Initial number of bytes to read before seek.
      seek_back_amount: Number of bytes to seek backward.

    Raises:
      AssertionError on wrong amount of data remaining or hash mismatch.
    """
        tmp_file = self._GetTestFile()
        tmp_file_len = os.path.getsize(tmp_file)

        self.assertGreaterEqual(
            initial_position, seek_back_amount,
            'seek_back_amount must be less than initial position %s '
            '(but was actually: %s)' % (initial_position, seek_back_amount))
        self.assertLess(
            initial_position, tmp_file_len,
            'initial_position must be less than test file size %s '
            '(but was actually: %s)' % (tmp_file_len, initial_position))

        digesters = {'md5': GetMd5()}
        with open(tmp_file, 'rb') as stream:
            wrapper = HashingFileUploadWrapper(stream, digesters,
                                               {'md5': GetMd5},
                                               self._dummy_url, self.logger)
            position = 0
            while position < initial_position - TRANSFER_BUFFER_SIZE:
                data = wrapper.read(TRANSFER_BUFFER_SIZE)
                position += len(data)
            wrapper.read(initial_position - position)
            wrapper.seek(initial_position - seek_back_amount)
            self.assertEqual(wrapper.tell(),
                             initial_position - seek_back_amount)
            data = wrapper.read()
            self.assertEqual(
                len(data),
                tmp_file_len - (initial_position - seek_back_amount))
        with open(tmp_file, 'rb') as stream:
            actual = CalculateMd5FromContents(stream)
        self.assertEqual(actual, digesters['md5'].hexdigest())
Esempio n. 6
0
    def _testSeekAway(self, initial_read):
        """Tests reading to an initial position and then seeking to EOF and back.

    This function simulates an size check on the input file by seeking to the
    end of the file and then back to the current position. Then it reads to
    the end of the file, ensuring the hash matches the original file upon
    completion.

    Args:
      initial_read: Number of bytes to initially read.

    Raises:
      AssertionError on wrong amount of data remaining or hash mismatch.
    """
        tmp_file = self._GetTestFile()
        tmp_file_len = os.path.getsize(tmp_file)

        self.assertLess(
            initial_read, tmp_file_len,
            'initial_read must be less than test file size %s '
            '(but was actually: %s)' % (tmp_file_len, initial_read))

        digesters = {'md5': GetMd5()}
        with open(tmp_file, 'rb') as stream:
            wrapper = HashingFileUploadWrapper(stream, digesters,
                                               {'md5': GetMd5},
                                               self._dummy_url, self.logger)
            wrapper.read(initial_read)
            self.assertEqual(wrapper.tell(), initial_read)
            wrapper.seek(0, os.SEEK_END)
            self.assertEqual(wrapper.tell(), tmp_file_len)
            wrapper.seek(initial_read, os.SEEK_SET)
            data = wrapper.read()
            self.assertEqual(len(data), tmp_file_len - initial_read)
        with open(tmp_file, 'rb') as stream:
            actual = CalculateMd5FromContents(stream)
        self.assertEqual(actual, digesters['md5'].hexdigest())