Example #1
0
 def test_fileobj_closed_when_thread_shuts_down(self):
     thread = UploadWorkerThread(mock.Mock(), 'vault_name', self.filename,
                                 'upload_id', Queue(), Queue())
     fileobj = thread._fileobj
     self.assertFalse(fileobj.closed)
     # By settings should_continue to False, it should immediately
     # exit, and we can still verify cleanup behavior.
     thread.should_continue = False
     thread.run()
     self.assertTrue(fileobj.closed)
Example #2
0
    def upload(self, filename, description=None):
        """Concurrently create an archive.

        The part_size value specified when the class was constructed
        will be used *unless* it is smaller than the minimum required
        part size needed for the size of the given file.  In that case,
        the part size used will be the minimum part size required
        to properly upload the given file.

        :type file: str
        :param file: The filename to upload

        :type description: str
        :param description: The description of the archive.

        :rtype: str
        :return: The archive id of the newly created archive.

        """
        total_size = os.stat(filename).st_size
        total_parts, part_size = self._calculate_required_part_size(total_size)
        hash_chunks = [None] * total_parts
        worker_queue = Queue()
        result_queue = Queue()
        response = self._api.initiate_multipart_upload(self._vault_name,
                                                       part_size,
                                                       description)
        upload_id = response['UploadId']
        # The basic idea is to add the chunks (the offsets not the actual
        # contents) to a work queue, start up a thread pool, let the crank
        # through the items in the work queue, and then place their results
        # in a result queue which we use to complete the multipart upload.
        self._add_work_items_to_queue(total_parts, worker_queue, part_size)
        self._start_upload_threads(result_queue, upload_id,
                                   worker_queue, filename)
        try:
            self._wait_for_upload_threads(hash_chunks, result_queue,
                                          total_parts)
        except UploadArchiveError as e:
            log.debug("An error occurred while uploading an archive, "
                      "aborting multipart upload.")
            self._api.abort_multipart_upload(self._vault_name, upload_id)
            raise e
        log.debug("Completing upload.")
        response = self._api.complete_multipart_upload(
            self._vault_name, upload_id, bytes_to_hex(tree_hash(hash_chunks)),
            total_size)
        log.debug("Upload finished.")
        return response['ArchiveId']
Example #3
0
    def test_num_retries_is_obeyed(self):
        # total attempts is 1 + num_retries so if I have num_retries of 2,
        # I'll attempt the upload once, and if that fails I'll retry up to
        # 2 more times for a total of 3 attempts.
        api = mock.Mock()
        job_queue = Queue()
        result_queue = Queue()
        upload_thread = UploadWorkerThread(
            api, 'vault_name', self.filename,
            'upload_id', job_queue, result_queue, num_retries=2,
            time_between_retries=0)
        api.upload_part.side_effect = Exception()
        job_queue.put((0, 1024))
        job_queue.put(_END_SENTINEL)

        upload_thread.run()
        self.assertEqual(api.upload_part.call_count, 3)
Example #4
0
    def test_upload_errors_have_exception_messages(self):
        api = mock.Mock()
        job_queue = Queue()
        result_queue = Queue()
        upload_thread = UploadWorkerThread(
            api, 'vault_name', self.filename,
            'upload_id', job_queue, result_queue, num_retries=1,
            time_between_retries=0)
        api.upload_part.side_effect = Exception("exception message")
        job_queue.put((0, 1024))
        job_queue.put(_END_SENTINEL)

        upload_thread.run()
        result = result_queue.get(timeout=1)
        self.assertIn("exception message", str(result))
Example #5
0
    def download(self, filename):
        """
        Concurrently download an archive.

        :param filename: The filename to download the archive to
        :type filename: str

        """
        total_size = self._job.archive_size
        total_parts, part_size = self._calculate_required_part_size(total_size)
        worker_queue = Queue()
        result_queue = Queue()
        self._add_work_items_to_queue(total_parts, worker_queue, part_size)
        self._start_download_threads(result_queue, worker_queue)
        try:
            self._wait_for_download_threads(filename, result_queue, total_parts)
        except DownloadArchiveError as e:
            log.debug("An error occurred while downloading an archive: %s", e)
            raise e
        log.debug("Download completed.")
Example #6
0
    def test_upload_errors_have_exception_messages(self):
        api = mock.Mock()
        job_queue = Queue()
        result_queue = Queue()
        upload_thread = UploadWorkerThread(
            api, 'vault_name', self.filename,
            'upload_id', job_queue, result_queue, num_retries=1,
            time_between_retries=0)
        api.upload_part.side_effect = Exception("exception message")
        job_queue.put((0, 1024))
        job_queue.put(_END_SENTINEL)

        upload_thread.run()
        result = result_queue.get(timeout=1)
        self.assertIn("exception message", str(result))
Example #7
0
    def test_num_retries_is_obeyed(self):
        # total attempts is 1 + num_retries so if I have num_retries of 2,
        # I'll attempt the upload once, and if that fails I'll retry up to
        # 2 more times for a total of 3 attempts.
        api = mock.Mock()
        job_queue = Queue()
        result_queue = Queue()
        upload_thread = UploadWorkerThread(
            api, 'vault_name', self.filename,
            'upload_id', job_queue, result_queue, num_retries=2,
            time_between_retries=0)
        api.upload_part.side_effect = Exception()
        job_queue.put((0, 1024))
        job_queue.put(_END_SENTINEL)

        upload_thread.run()
        self.assertEqual(api.upload_part.call_count, 3)