async def main(): async with aiohttp.ClientSession() as session: config = Config() bucket_name = config.source_bucket_name object_name = config.object_name_prefix request_uri = AWSV4Signer.fmt_s3_request_uri(bucket_name, object_name) query_params = urllib.parse.urlencode({'uploads': ''}) body = "" headers = AWSV4Signer( config.endpoint, config.s3_service_name, config.s3_region, config.access_key, config.secret_key).prepare_signed_header( 'POST', request_uri, query_params, body) if (headers['Authorization'] is None): print("Failed to generate v4 signature") sys.exit(-1) print('POST on {}'.format(config.endpoint + request_uri)) async with session.post(config.endpoint + request_uri, params=query_params, headers=headers) as resp: http_status = resp.status print("Response of POST request {} ".format(resp)) if http_status == 200: print("HTTP status {} OK!".format(http_status)) else: print("ERROR : BAD RESPONSE! status = {}".format(http_status))
async def main(): async with aiohttp.ClientSession() as session: config = Config() # Ensure bucket is exists before test. bucket_name = config.source_bucket_name request_uri = AWSV4Signer.fmt_s3_request_uri(bucket_name) query_params = urllib.parse.urlencode({'replication': None}) body = "" headers = AWSV4Signer(config.endpoint, config.s3_service_name, config.s3_region, config.access_key, config.secret_key).prepare_signed_header( 'GET', request_uri, query_params, body) if (headers['Authorization'] is None): print("Failed to generate v4 signature") sys.exit(-1) # Request url url = config.endpoint + request_uri print('GET on {}'.format(url)) async with session.get(url, params=query_params, headers=headers) as resp: print("Response url {}".format((resp.url))) print("Received reponse {}".format((resp))) total_received = 0 while True: chunk = await resp.content.read(1024) if not chunk: break total_received += len(chunk) print("Received chunk {}".format(chunk))
async def create(self): request_uri = AWSV4Signer.fmt_s3_request_uri(self._bucket_name, self._object_name) query_params = urllib.parse.urlencode({'uploads': ''}) body = "" headers = AWSV4Signer(self._session.endpoint, self._session.service_name, self._session.region, self._session.access_key, self._session.secret_key).prepare_signed_header( 'POST', request_uri, query_params, body) if (headers['Authorization'] is None): self._logger.error( fmt_reqid_log(self._request_id) + "Failed to generate v4 signature") sys.exit(-1) self._logger.info( fmt_reqid_log(self._request_id) + 'POST on {}'.format(self._session.endpoint + request_uri)) self._logger.debug( fmt_reqid_log(self._request_id) + "POST Request Header {}".format(headers)) self._timer.start() try: async with self._session.get_client_session().post( self._session.endpoint + request_uri, params=query_params, headers=headers) as resp: self._logger.info( fmt_reqid_log(self._request_id) + 'POST response received with' + ' status code: {}'.format(resp.status)) self._logger.info( 'Response url {}'.format(self._session.endpoint + request_uri)) if resp.status == 200: self._response_headers = resp.headers self._logger.info('Response headers {}'.format( self._response_headers)) self._upload_id = self._response_headers.get( "UploadId", None) else: self._state = S3RequestState.FAILED error_msg = await resp.text() self._logger.error( fmt_reqid_log(self._request_id) + 'POST failed with http status: {}'.format(resp.status) + ' Error Response: {}'.format(error_msg)) return except aiohttp.client_exceptions.ClientConnectorError as e: self._remote_down = True self._state = S3RequestState.FAILED self._logger.error( fmt_reqid_log(self._request_id) + "Failed to connect to S3: " + str(e)) self._timer.stop() return
async def fetch(self, chunk_size): request_uri = AWSV4Signer.fmt_s3_request_uri( self._bucket_name, self._object_name) query_params = "" body = "" # check for range read request if self._range_read_length >= 0: # get object range read function start_bytes = self._range_read_offset end_bytes = self._range_read_offset + self._range_read_length object_range = "bytes=" + str(start_bytes) + "-" + str(end_bytes) total_to_fetch = (end_bytes - start_bytes) + 1 else: # get object object_range = None total_to_fetch = self._object_size headers = AWSV4Signer( self._session.endpoint, self._session.service_name, self._session.region, self._session.access_key, self._session.secret_key).prepare_signed_header( 'GET', request_uri, query_params, body, object_range) if (headers['Authorization'] is None): self._logger.error(fmt_reqid_log(self._request_id) + "Failed to generate v4 signature") sys.exit(-1) self._logger.info(fmt_reqid_log(self._request_id) + 'GET on {}'.format( self._session.endpoint + request_uri)) self._logger.debug(fmt_reqid_log(self._request_id) + "GET with headers {}".format(headers)) self._timer.start() try: async with self._session.get_client_session().get( self._session.endpoint + request_uri, headers=headers) as resp: print("response {} ".format(resp)) self._http_status = resp.status self._response_headers = resp.headers if object_range is None: if resp.status == 200: # get object successful with 200 status code self._logger.info( fmt_reqid_log(self._request_id) + 'GET Object completed with http status: {}'.format( resp.status)) else: self._state = S3RequestState.FAILED error_msg = await resp.text() self._logger.error( fmt_reqid_log(self._request_id) + 'GET Object failed with http status: {}'. format(resp.status) + '\nError Response: {}'.format(error_msg)) return else: if resp.status == 206: # get object range read successful with 206 status code self._logger.info(fmt_reqid_log( self._request_id) + 'GET object range read' 'completed with http status: {}'.format( resp.status)) else: self._state = S3RequestState.FAILED error_msg = await resp.text() self._logger.error( fmt_reqid_log(self._request_id) + 'GET object range read failed ' 'with http status: {}'. format(resp.status) + ' Error Response: {}'.format(error_msg)) return self._state = S3RequestState.RUNNING while True: # If abort requested, stop the loop and return. if self._state == S3RequestState.ABORTED: self._logger.debug( fmt_reqid_log(self._request_id) + "Aborted after reading %d bytes" "for object size of %d", (self._object_size - total_to_fetch, self._object_size)) break data_chunk = await resp.content.read(chunk_size) self._object_range = len(data_chunk) if not data_chunk: break self._logger.debug( fmt_reqid_log(self._request_id) + "Received data_chunk of size {} bytes.".format( len(data_chunk))) yield data_chunk total_to_fetch = total_to_fetch - len(data_chunk) if total_to_fetch == 0: # Completed reading all expected data. self._state = S3RequestState.COMPLETED break elif total_to_fetch < 0: self._state = S3RequestState.FAILED self._logger.error( fmt_reqid_log(self._request_id) + "Received %d more bytes than" "expected object size of %d", (total_to_fetch * -1, self._object_size)) # end of While True if self._state != S3RequestState.ABORTED: if total_to_fetch > 0: self._state = S3RequestState.FAILED self._logger.error( fmt_reqid_log(self._request_id) + "Received partial object." "Expected object size (%d), " "Actual received size (%d)", self._object_size, self._object_size - total_to_fetch) except aiohttp.client_exceptions.ClientConnectorError as e: self.remote_down = True self._state = S3RequestState.FAILED self._logger.error(fmt_reqid_log(self._request_id) + "Failed to connect to S3: " + str(e)) self._timer.stop() return
async def complete_upload(self): self._state = S3RequestState.RUNNING request_uri = AWSV4Signer.fmt_s3_request_uri(self._bucket_name, self._object_name) query_params = urllib.parse.urlencode({'uploadId': self._upload_id}) body = "" # Prepare xml format etag_str = "<CompleteMultipartUpload>" for part, etag in self._etag_dict.items(): etag_str += "<Part><ETag>" + \ str(etag) + "</ETag><PartNumber>" + str(part) + "</PartNumber></Part>" etag_str += "</CompleteMultipartUpload>" headers = AWSV4Signer(self._session.endpoint, self._session.service_name, self._session.region, self._session.access_key, self._session.secret_key).prepare_signed_header( 'POST', request_uri, query_params, body) # check the header signature if (headers['Authorization'] is None): self._logger.error( fmt_reqid_log(self._request_id) + "Failed to generate v4 signature") sys.exit(-1) self._logger.info( fmt_reqid_log(self._request_id) + 'POST on {}'.format(self._session.endpoint + request_uri)) self._logger.debug( fmt_reqid_log(self._request_id) + "POST Request Header {}".format(headers)) self._timer.start() try: async with self._session.get_client_session().post( self._session.endpoint + request_uri, data=etag_str, params=query_params, headers=headers) as resp: self._logger.info( fmt_reqid_log(self._request_id) + 'POST response received with' + ' status code: {}'.format(resp.status)) self._logger.info( 'Response url {}'.format(self._session.endpoint + request_uri)) if resp.status == 200: self._state = S3RequestState.COMPLETED # Get the response header and body self._response_headers = resp.headers self._logger.info('Response headers {}'.format( self._response_headers)) # Response body resp_body = await resp.text() # Remove the namespace from response body elements resp_body = re.sub('xmlns="[^"]+"', '', resp_body) xml_dict = fromstring(resp_body) # Get the ETag from response body self._final_etag = xml_dict.find('ETag').text else: # show the error messages self._state = S3RequestState.FAILED error_msg = await resp.text() self._logger.error( fmt_reqid_log(self._request_id) + 'POST failed with http status: {}'.format(resp.status) + ' Error Response: {}'.format(error_msg)) return except aiohttp.client_exceptions.ClientConnectorError as e: self._remote_down = True self._state = S3RequestState.FAILED self._logger.error( fmt_reqid_log(self._request_id) + "Failed to connect to S3: " + str(e)) self._timer.stop() return
async def fetch(self): request_uri = AWSV4Signer.fmt_s3_request_uri(self._bucket_name, self._object_name) query_params = urllib.parse.urlencode({'tagging': None}) body = "" headers = AWSV4Signer(self._session.endpoint, self._session.service_name, self._session.region, self._session.access_key, self._session.secret_key).prepare_signed_header( 'GET', request_uri, query_params, body) if (headers['Authorization'] is None): self._logger.error( fmt_reqid_log(self._request_id) + "Failed to generate v4 signature") sys.exit(-1) self._logger.info( fmt_reqid_log(self._request_id) + 'GET on {}'.format(self._session.endpoint + request_uri)) self._logger.debug( fmt_reqid_log(self._request_id) + "GET Request Header {}".format(headers)) self._timer.start() try: async with self._session.get_client_session().get( self._session.endpoint + request_uri, params=query_params, headers=headers) as resp: self._logger.info( fmt_reqid_log(self._request_id) + 'GET response received with' + ' status code: {}'.format(resp.status)) self._logger.info( 'Response url {}'.format(self._session.endpoint + request_uri)) if resp.status == 200: self._response_headers = resp.headers received_tagset = await resp.text() self._logger.info( "Received tagset {}".format(received_tagset)) # Remove namespace using regular expression # search and replace given pattern from the given string received_tagset = re.sub('xmlns="[^"]+"', '', received_tagset) # Parse XML response root = fromstring(received_tagset) tags_dict = {} # Find all Tags elements in the entire tree. for ele in root.findall(".//Tag"): key = ele.find('Key').text value = ele.find('Value').text tags_dict[key] = value self._response_tags_dict = tags_dict else: self._state = S3RequestState.FAILED error_msg = await resp.text() self._logger.error( fmt_reqid_log(self._request_id) + 'GET failed with http status: {}'.format(resp.status) + 'Error Response: {}'.format(error_msg)) return except aiohttp.client_exceptions.ClientConnectorError as e: self._remote_down = True self._state = S3RequestState.FAILED self._logger.error( fmt_reqid_log(self._request_id) + "Failed to connect to S3: " + str(e)) self._timer.stop() return
async def send(self): request_uri = AWSV4Signer.fmt_s3_request_uri(self._bucket_name, self._object_name) query_params = urllib.parse.urlencode({'tagging': ''}) body = "" # Prepare tag xml format tag_str1 = "<Tagging><TagSet>" tag_str2 = "</TagSet></Tagging>" result = "" for key, val in (self._tag_set).items(): result = result + "<Tag><Key>" + key + "</Key><Value>" + val + "</Value></Tag>" tagset = tag_str1 + result + tag_str2 headers = AWSV4Signer(self._session.endpoint, self._session.service_name, self._session.region, self._session.access_key, self._session.secret_key).prepare_signed_header( 'PUT', request_uri, query_params, body) if (headers['Authorization'] is None): self._logger.error( fmt_reqid_log(self._request_id) + "Failed to generate v4 signature") sys.exit(-1) self._logger.info( fmt_reqid_log(self._request_id) + 'PUT on {}'.format(self._session.endpoint + request_uri)) self._logger.debug( fmt_reqid_log(self._request_id) + "PUT Request Header {}".format(headers)) self._timer.start() try: async with self._session.get_client_session().put( self._session.endpoint + request_uri, data=tagset, params=query_params, headers=headers) as resp: self._logger.info( fmt_reqid_log(self._request_id) + 'PUT response received with' + ' status code: {}'.format(resp.status)) self._logger.info( 'Response url {}'.format(self._session.endpoint + request_uri)) if resp.status == 200: self._response_headers = resp.headers self._logger.info('Response headers {}'.format( self._response_headers)) # Delete temporary tagset file. os.system('rm -rf tagset.xml') else: self._state = S3RequestState.FAILED error_msg = await resp.text() self._logger.error( fmt_reqid_log(self._request_id) + 'PUT failed with http status: {}'.format(resp.status) + ' Error Response: {}'.format(error_msg)) return except aiohttp.client_exceptions.ClientConnectorError as e: self._remote_down = True self._state = S3RequestState.FAILED self._logger.error( fmt_reqid_log(self._request_id) + "Failed to connect to S3: " + str(e)) self._timer.stop() return
async def get(self, part_number): request_uri = AWSV4Signer.fmt_s3_request_uri(self._bucket_name, self._object_name) self._part_number = part_number query_params = urllib.parse.urlencode({ 'partNumber': self._part_number, 'versionId': self._version_id }) body = "" headers = AWSV4Signer(self._session.endpoint, self._session.service_name, self._session.region, self._session.access_key, self._session.secret_key).prepare_signed_header( 'HEAD', request_uri, query_params, body) if (headers['Authorization'] is None): self._logger.error( fmt_reqid_log(self._request_id) + "Failed to generate v4 signature") sys.exit(-1) self._logger.info( fmt_reqid_log(self._request_id) + 'HEAD on {}'.format(self._session.endpoint + request_uri)) self._logger.debug( fmt_reqid_log(self._request_id) + "HEAD Request Header {}".format(headers)) self._timer.start() try: async with self._session.get_client_session().head( self._session.endpoint + request_uri, params=query_params, headers=headers) as resp: if resp.status == 200: self._response_headers = dict(resp.headers) self._logger.info( fmt_reqid_log(self._request_id) + 'HEAD Object response received with' + ' status code: {}'.format(resp.status)) self._logger.info('received reponse header {}'.format( self._response_headers)) else: self._state = S3RequestState.FAILED error_msg = await resp.text() self._logger.error( fmt_reqid_log(self._request_id) + 'HEAD Object failed with http status: {}'.format( resp.status) + ' Error Response: {}'.format(error_msg)) return self._state = S3RequestState.RUNNING except aiohttp.client_exceptions.ClientConnectorError as e: self.remote_down = True self._state = S3RequestState.FAILED self._logger.error( fmt_reqid_log(self._request_id) + "Failed to connect to S3: " + str(e)) self._timer.stop() return
async def get(self): """Yields data chunk for given size.""" request_uri = AWSV4Signer.fmt_s3_request_uri(self._bucket_name) self._logger.debug( fmt_reqid_log(self._request_id) + "request_uri : {}".format(request_uri)) query_params = urllib.parse.urlencode({'replication': None}) body = "" headers = AWSV4Signer(self._session.endpoint, self._session.service_name, self._session.region, self._session.access_key, self._session.secret_key).prepare_signed_header( 'GET', request_uri, query_params, body) if (headers['Authorization'] is None): self._logger.error( fmt_reqid_log(self._request_id) + "Failed to generate v4 signature") sys.exit(-1) # Request url url = self._session.endpoint + request_uri self._logger.info( fmt_reqid_log(self._request_id) + 'GET on {}'.format(url)) self._timer.start() try: async with self._session.get_client_session().get( url, params=query_params, headers=headers) as resp: self._logger.debug( fmt_reqid_log(self._request_id) + "Response url {}".format((resp.url))) self._logger.debug( fmt_reqid_log(self._request_id) + "Received response url {}".format(resp)) if resp.status == 200: self._logger.info( fmt_reqid_log(self._request_id) + "Received reponse [{} OK]".format(resp.status)) xml_resp = await resp.text() self._response_dict = xmltodict.parse(xml_resp) self._logger.debug('Response xml : {}\n'.format( self._response_dict)) else: self._state = S3RequestState.FAILED error_msg = await resp.text() self._logger.error( fmt_reqid_log(self._request_id) + 'Error Response: {}'.format(error_msg)) except Exception as e: self._logger.error( fmt_reqid_log(self._request_id) + "Error: Exception '{}' occured!".format(e)) self._timer.stop() self._logger.debug( fmt_reqid_log(self._request_id) + "execution time is : {}".format(self.get_execution_time())) return
async def upload(self, data_reader, part_no, chunk_size): self._state = S3RequestState.RUNNING self._part_no = part_no request_uri = AWSV4Signer.fmt_s3_request_uri(self._bucket_name, self._object_name) print("Part Number : {}".format(self._part_no)) query_params = urllib.parse.urlencode({ 'partNumber': self._part_no, 'uploadId': self._upload_id }) body = "" headers = AWSV4Signer(self._session.endpoint, self._session.service_name, self._session.region, self._session.access_key, self._session.secret_key).prepare_signed_header( 'PUT', request_uri, query_params, body) if (headers['Authorization'] is None): self._logger.error( fmt_reqid_log(self._request_id) + "Failed to generate v4 signature") sys.exit(-1) headers["Content-Length"] = str(chunk_size) self._logger.info( fmt_reqid_log(self._request_id) + "PUT on {}".format(self._session.endpoint + request_uri)) self._logger.debug( fmt_reqid_log(self._request_id) + "PUT with headers {}".format(headers)) self._timer.start() try: async with self._session.get_client_session().put( self._session.endpoint + request_uri, headers=headers, params=query_params, data=data_reader.fetch(chunk_size)) as resp: self._timer.stop() self._http_status = resp.status self._response_headers = resp.headers self._logger.info( fmt_reqid_log(self._request_id) + 'PUT Object completed with http status: {}' '\n header{}'.format(resp.status, self._response_headers)) self._etag_dict[self._part_no] = self._response_headers["Etag"] if resp.status == 200: self._state = S3RequestState.COMPLETED else: error_msg = await resp.text() self._logger.error( fmt_reqid_log(self._request_id) + 'Error Response: {}'.format(error_msg)) self._state = S3RequestState.FAILED except aiohttp.client_exceptions.ClientConnectorError as e: self._timer.stop() self.remote_down = True self._state = S3RequestState.FAILED self._logger.error( fmt_reqid_log(self._request_id) + "Failed to connect to S3: " + str(e)) return
async def send(self, data_reader, transfer_size): self._state = S3RequestState.RUNNING self._data_reader = data_reader request_uri = AWSV4Signer.fmt_s3_request_uri(self._bucket_name, self._object_name) query_params = "" body = "" headers = AWSV4Signer(self._session.endpoint, self._session.service_name, self._session.region, self._session.access_key, self._session.secret_key).prepare_signed_header( 'PUT', request_uri, query_params, body) if (headers['Authorization'] is None): self._logger.error( fmt_reqid_log(self._request_id) + "Failed to generate v4 signature") sys.exit(-1) headers["Content-Length"] = str(self._object_size) self._logger.info( fmt_reqid_log(self._request_id) + "PUT on {}".format(self._session.endpoint + request_uri)) self._logger.debug( fmt_reqid_log(self._request_id) + "PUT with headers {}".format(headers)) self._timer.start() try: async with self._session.get_client_session().put( self._session.endpoint + request_uri, headers=headers, # Read all data from data_reader data=data_reader.fetch(transfer_size)) as resp: self._timer.stop() if data_reader.get_state() != S3RequestState.ABORTED: self._http_status = resp.status self._response_headers = resp.headers self._logger.info( fmt_reqid_log(self._request_id) + 'PUT Object completed with http status: {}'.format( resp.status)) # Validate if upload object etag matches. if self.get_etag() != data_reader.get_etag(): self._state = S3RequestState.FAILED error_msg = "ETag mismatch." self._logger.error( fmt_reqid_log(self._request_id) + 'Error Response: {}'.format(error_msg)) if resp.status == 200: self._state = S3RequestState.COMPLETED else: error_msg = await resp.text() self._logger.error( fmt_reqid_log(self._request_id) + 'Error Response: {}'.format(error_msg)) self._state = S3RequestState.FAILED except aiohttp.client_exceptions.ClientConnectorError as e: self._timer.stop() self.remote_down = True self._state = S3RequestState.FAILED self._logger.error( fmt_reqid_log(self._request_id) + "Failed to connect to S3: " + str(e)) return