def _download(self, manager, bucket, key): """ Download the specified object and print it to stdout. :type manager: s3transfer.manager.TransferManager :param manager: The transfer manager to use for the download. :type bucket: str :param bucket: The bucket to download the object from. :type key: str :param key: The name of the key to download. :return: A CommandResult representing the download status. """ params = {} # `download` performs the head_object as well, but the params are # the same for both operations, so there's nothing missing here. RequestParamsMapper.map_get_object_params(params, self.params) with manager: future = manager.download(fileobj=StdoutBytesWriter(), bucket=bucket, key=key, extra_args=params) return self._process_transfer(future)
def test_get_object(self): params = {} RequestParamsMapper.map_get_object_params(params, self.cli_params) self.assertEqual(params, { 'SSECustomerAlgorithm': 'AES256', 'SSECustomerKey': 'my-sse-c-key' })
def test_get_object(self): params = {} RequestParamsMapper.map_get_object_params(params, self.cli_params) self.assertEqual( params, {'SSECustomerAlgorithm': 'AES256', 'SSECustomerKey': 'my-sse-c-key'} )
def download(self): """ Redirects the file to the multipart download function if the file is large. If it is small enough, it gets the file as an object from s3. """ bucket, key = find_bucket_key(self.src) params = {'Bucket': bucket, 'Key': key} RequestParamsMapper.map_get_object_params(params, self.parameters) response_data = self.client.get_object(**params) save_file(self.dest, response_data, self.last_update, self.is_stream)
def _download_part(self): total_file_size = self._filename.size start_range = self._part_number * self._chunk_size if self._part_number == int(total_file_size / self._chunk_size) - 1: end_range = '' else: end_range = start_range + self._chunk_size - 1 range_param = 'bytes=%s-%s' % (start_range, end_range) LOGGER.debug("Downloading bytes range of %s for file %s", range_param, self._filename.dest) bucket, key = find_bucket_key(self._filename.src) params = {'Bucket': bucket, 'Key': key, 'Range': range_param} RequestParamsMapper.map_get_object_params(params, self._params) for i in range(self.TOTAL_ATTEMPTS): try: LOGGER.debug("Making GetObject requests with byte range: %s", range_param) response_data = self._client.get_object(**params) LOGGER.debug("Response received from GetObject") body = response_data['Body'] self._queue_writes(body) self._context.announce_completed_part(self._part_number) message = print_operation(self._filename, 0) total_parts = int(self._filename.size / self._chunk_size) result = { 'message': message, 'error': False, 'total_parts': total_parts } self._result_queue.put(PrintTask(**result)) LOGGER.debug("Task complete: %s", self) return except (socket.timeout, socket.error, ReadTimeoutError) as e: LOGGER.debug( "Timeout error caught, retrying request, " "(attempt %s / %s)", i, self.TOTAL_ATTEMPTS, exc_info=True) continue except IncompleteReadError as e: LOGGER.debug("Incomplete read detected: %s, (attempt %s / %s)", e, i, self.TOTAL_ATTEMPTS) continue raise RetriesExeededError("Maximum number of attempts exceeded: %s" % self.TOTAL_ATTEMPTS)
def _download_part(self): total_file_size = self._filename.size start_range = self._part_number * self._chunk_size if self._part_number == int(total_file_size / self._chunk_size) - 1: end_range = '' else: end_range = start_range + self._chunk_size - 1 range_param = 'bytes=%s-%s' % (start_range, end_range) LOGGER.debug("Downloading bytes range of %s for file %s", range_param, self._filename.dest) bucket, key = find_bucket_key(self._filename.src) params = {'Bucket': bucket, 'Key': key, 'Range': range_param} RequestParamsMapper.map_get_object_params(params, self._params) for i in range(self.TOTAL_ATTEMPTS): try: LOGGER.debug("Making GetObject requests with byte range: %s", range_param) response_data = self._client.get_object(**params) LOGGER.debug("Response received from GetObject") body = response_data['Body'] self._queue_writes(body) self._context.announce_completed_part(self._part_number) message = print_operation(self._filename, 0) total_parts = int(self._filename.size / self._chunk_size) result = {'message': message, 'error': False, 'total_parts': total_parts} self._result_queue.put(PrintTask(**result)) LOGGER.debug("Task complete: %s", self) return except (socket.timeout, socket.error, ReadTimeoutError) as e: LOGGER.debug("Timeout error caught, retrying request, " "(attempt %s / %s)", i, self.TOTAL_ATTEMPTS, exc_info=True) continue except IncompleteReadError as e: LOGGER.debug("Incomplete read detected: %s, (attempt %s / %s)", e, i, self.TOTAL_ATTEMPTS) continue raise RetriesExeededError("Maximum number of attempts exceeded: %s" % self.TOTAL_ATTEMPTS)
def test_get_object(self): params = {} RequestParamsMapper.map_get_object_params(params, self.cli_params) self.assertEqual(params, {"SSECustomerAlgorithm": "AES256", "SSECustomerKey": "my-sse-c-key"})
def test_get_object(self): params = {} RequestParamsMapper.map_get_object_params(params, self.cli_params) self.assertEqual(params, {'RequestPayer': 'requester'})