def __call__(self): try: part_info = self.part_queue.get(True, QUEUE_TIMEOUT_GET) with self.counter_lock: self.part_counter.count += 1 try: filename = part_info[0] upload_id = part_info[1] part_number = part_info[2] part_size = part_info[3] body = self.read_part(filename, part_number, part_size) bucket, key = find_bucket_key(filename.dest) if sys.version_info[:2] == (2, 6): stream_body = StringIO(body) else: stream_body = bytearray(body) params = {'endpoint': self.endpoint, 'bucket': bucket, 'key': key, 'part_number': str(part_number), 'upload_id': upload_id, 'body': stream_body} response_data, http = operate(self.service, 'UploadPart', params) etag = retrieve_http_etag(http) check_etag(etag, body) parts = {'ETag': etag, 'PartNumber': part_number} self.dest_queue.put((part_number, parts)) print_str = print_operation(filename, 0) print_result = {'result': print_str} total = int(math.ceil(filename.size/float(part_size))) part_str = {'total': total} print_result['part'] = part_str self.printQueue.put(print_result) except requests.ConnectionError as e: connect_error = str(e) LOGGER.debug("%s part upload failure: %s" % (part_info[0].src, connect_error)) self.part_queue.put(part_info) self.executer.submit(copy.copy(self)) except MD5Error: LOGGER.debug("%s part upload failure: Data" "was corrupted" % part_info[0].src) self.part_queue.put(part_info) self.executer.submit(copy.copy(self)) except Exception as e: LOGGER.debug('%s' % str(e)) self.part_queue.task_done() with self.counter_lock: self.part_counter.count -= 1 except Queue.Empty: pass
def upload(self): """ Redirects the file to the multipart upload function if the file is large. If it is small enough, it puts the file as an object in s3. """ body = read_file(self.src) bucket, key = find_bucket_key(self.dest) if sys.version_info[:2] == (2, 6): stream_body = StringIO(body) else: stream_body = bytearray(body) params = {'endpoint': self.endpoint, 'bucket': bucket, 'key': key} if body: params['body'] = stream_body self._handle_object_params(params) response_data, http = operate(self.service, 'PutObject', params) etag = retrieve_http_etag(http) check_etag(etag, body)
def upload(self): """ Redirects the file to the multipart upload function if the file is large. If it is small enough, it puts the file as an object in s3. """ if not self.is_multi: body = read_file(self.src) bucket, key = find_bucket_key(self.dest) if sys.version_info[:2] == (2, 6): stream_body = StringIO(body) else: stream_body = bytearray(body) params = {'endpoint': self.endpoint, 'bucket': bucket, 'key': key} if body: params['body'] = stream_body if self.parameters['acl']: params['acl'] = self.parameters['acl'][0] response_data, http = operate(self.service, 'PutObject', params) etag = retrieve_http_etag(http) check_etag(etag, body) else: self.multi_upload()
def __call__(self): LOGGER.debug("Uploading part %s for filename: %s", self._part_number, self._filename.src) try: LOGGER.debug("Waiting for upload id.") upload_id = self._upload_context.wait_for_upload_id() body = self._read_part() bucket, key = find_bucket_key(self._filename.dest) if sys.version_info[:2] == (2, 6): body = StringIO(body) else: body = bytearray(body) params = {'endpoint': self._filename.endpoint, 'bucket': bucket, 'key': key, 'part_number': str(self._part_number), 'upload_id': upload_id, 'body': body} response_data, http = operate( self._filename.service, 'UploadPart', params) etag = retrieve_http_etag(http) self._upload_context.announce_finished_part( etag=etag, part_number=self._part_number) print_str = print_operation(self._filename, 0) print_result = {'result': print_str} total = int(math.ceil( self._filename.size/float(self._chunk_size))) part_str = {'total': total} print_result['part'] = part_str self._print_queue.put(print_result) except Exception as e: LOGGER.debug('Error during part upload: %s' , e, exc_info=True) self._upload_context.cancel_upload() else: LOGGER.debug("Part number %s completed for filename: %s", self._part_number, self._filename.src)
def upload(self): """ Redirects the file to the multipart upload function if the file is large. If it is small enough, it puts the file as an object in s3. """ if not self.is_multi: body = read_file(self.src) bucket, key = find_bucket_key(self.dest) if sys.version_info[:2] == (2, 6): stream_body = StringIO(body) else: stream_body = bytearray(body) params = {'endpoint': self.endpoint, 'bucket': bucket, 'key': key} if body: params['body'] = stream_body if self.parameters['acl']: params['acl'] = self.parameters['acl'][0] if self.parameters['guess_mime_type']: self._inject_content_type(params, self.src) response_data, http = operate(self.service, 'PutObject', params) etag = retrieve_http_etag(http) check_etag(etag, body) else: self.multi_upload()