def _calculate_required_part_size(self, total_size): min_part_size_required = minimum_part_size(total_size) if self._part_size >= min_part_size_required: part_size = self._part_size else: part_size = min_part_size_required log.debug("The part size specified (%s) is smaller than " "the minimum required part size. Using a part " "size of: %s", self._part_size, part_size) total_parts = int(math.ceil(total_size / float(part_size))) return total_parts, part_size
def create_archive_from_file(self, filename=None, file_obj=None, description=None, upload_id_callback=None): """ Create a new archive and upload the data from the given file or file-like object. :type filename: str :param filename: A filename to upload :type file_obj: file :param file_obj: A file-like object to upload :type description: str :param description: An optional description for the archive. :type upload_id_callback: function :param upload_id_callback: if set, call with the upload_id as the only parameter when it becomes known, to enable future calls to resume_archive_from_file in case resume is needed. :rtype: str :return: The archive id of the newly created archive """ part_size = self.DefaultPartSize if not file_obj: file_size = os.path.getsize(filename) try: part_size = minimum_part_size(file_size, part_size) except ValueError: raise UploadArchiveError("File size of %s bytes exceeds " "40,000 GB archive limit of Glacier.") file_obj = open(filename, "rb") writer = self.create_archive_writer(description=description, part_size=part_size) if upload_id_callback: upload_id_callback(writer.upload_id) while True: data = file_obj.read(part_size) if not data: break writer.write(data) writer.close() return writer.get_archive_id()
def create_archive_from_file(self, filename=None, file_obj=None, description=None, upload_id_callback=None): """ Create a new archive and upload the data from the given file or file-like object. :type filename: str :param filename: A filename to upload :type file_obj: file :param file_obj: A file-like object to upload :type description: str :param description: An optional description for the archive. :type upload_id_callback: function :param upload_id_callback: if set, call with the upload_id as the only parameter when it becomes known, to enable future calls to resume_archive_from_file in case resume is needed. :rtype: str :return: The archive id of the newly created archive """ part_size = self.DefaultPartSize if not file_obj: file_size = os.path.getsize(filename) try: part_size = minimum_part_size(file_size, part_size) except ValueError: raise UploadArchiveError("File size of %s bytes exceeds " "40,000 GB archive limit of Glacier.") file_obj = open(filename, "rb") writer = self.create_archive_writer( description=description, part_size=part_size) if upload_id_callback: upload_id_callback(writer.upload_id) while True: data = file_obj.read(part_size) if not data: break writer.write(data) writer.close() return writer.get_archive_id()
def test_default_part_size_can_be_specified(self): default_part_size = 2 * 1024 * 1024 self.assertEqual(minimum_part_size(8 * 1024 * 1024, default_part_size), default_part_size)
def test_file_size_too_large(self): with self.assertRaises(ValueError): minimum_part_size((40000 * 1024 * 1024 * 1024) + 1)
def test_terabyte_size(self): # For a 4 TB file we need at least a 512 MB part size. self.assertEqual(minimum_part_size(4 * 1024 * 1024 * 1024 * 1024), 512 * 1024 * 1024)
def test_gigabyte_size(self): # If we're over the maximum default part size, we go up to the next # power of two until we find a part size that keeps us under 10,000 # parts. self.assertEqual(minimum_part_size(8 * 1024 * 1024 * 10000), 8 * 1024 * 1024)
def test_under_the_maximum_value(self): # If we're under the maximum, we can use 4MB part sizes. self.assertEqual(minimum_part_size(8 * 1024 * 1024), 4 * 1024 * 1024)
def test_small_values_still_use_default_part_size(self): self.assertEqual(minimum_part_size(1), 4 * 1024 * 1024)