async def aio_s3_object_access(s3_uri: str, config: AioAWSConfig, s3_client: AioBaseClient) -> Tuple[str, bool]: """ Asynchronous coroutine to issue a HEAD request to check if s3 object access is allowed. :param s3_uri: an s3 URI :param config: an AioAWSConfig :param s3_client: an AioBaseClient for s3 :return: a tuple of ``(s3_uri: str, access: bool)`` """ try: head = await aio_s3_object_head(s3_uri, config=config, s3_client=s3_client) if response_success(head): return s3_uri, True return s3_uri, False except ClientError as err: access = handle_head_error_code(err, s3_uri) if access is False: return s3_uri, False raise
async def s3_file_info(s3_uri: Union[S3URI, str], s3_client: AioBaseClient) -> S3Info: """ Collect data from an S3 HEAD request for an S3URI :param s3_uri: a fully qualified S3 URI for the s3 object to read :param s3_client: a required aiobotocore.client.AioBaseClient for s3 :return: an S3Info object with HEAD data on success; on failure the S3Info object has no HEAD data """ if isinstance(s3_uri, str): s3_uri = S3URI(s3_uri) s3_info = S3Info(s3_uri=s3_uri) try: s3_head = await s3_client.head_object(Bucket=s3_uri.bucket, Key=s3_uri.key) if response_success(s3_head): # LastModified is a datetime.datetime s3_info.last_modified = s3_head["LastModified"] s3_info.s3_size = int(s3_head["ContentLength"]) LOGGER.debug("Success S3URI info: %s", s3_uri) except ClientError as err: LOGGER.debug("Failed S3URI info: %s", s3_uri) LOGGER.debug(err) return s3_info
async def test_aio_s3_list_buckets(aio_aws_s3_client, aio_s3_buckets): resp = await aio_aws_s3_client.list_buckets() assert response_success(resp) bucket_names = [b["Name"] for b in resp["Buckets"]] assert bucket_names == aio_s3_buckets
def test_s3_list_buckets(aws_s3_client, s3_buckets): resp = aws_s3_client.list_buckets() assert response_success(resp) bucket_names = [b["Name"] for b in resp["Buckets"]] assert bucket_names == s3_buckets
async def aio_s3_objects_list( bucket_name: str, bucket_prefix: str, config: AioAWSConfig, s3_client: AioBaseClient, ) -> List[Dict]: """ Asynchronous coroutine to collect all objects in a bucket prefix. :param bucket_name: param passed to s3_client.list_objects_v2 'Bucket' :param bucket_prefix: param passed to s3_client.list_objects_v2 'Prefix' :param config: an AioAWSConfig :param s3_client: an AioBaseClient for s3 :return: a list of s3 object data, e.g. .. code-block:: >>> aio_s3_objects[0] {'ETag': '"192e29f360ea8297b5876b33b8419741"', 'Key': 'ABI-L2-ADPC/2019/337/13/OR_ABI-L2-ADPC-M6_G16_s20193371331167_e20193371333539_c20193371334564.nc', 'LastModified': datetime.datetime(2019, 12, 3, 14, 27, 5, tzinfo=tzutc()), 'Size': 892913, 'StorageClass': 'INTELLIGENT_TIERING'} .. seealso: - https://botocore.amazonaws.com/v1/documentation/api/latest/reference/services/s3.html#S3.Client.list_objects_v2 """ for tries in range(config.retries + 1): try: LOGGER.debug( "AWS S3 list objects, get first page: %s/%s", bucket_name, bucket_prefix, ) response = await s3_client.list_objects_v2(Bucket=bucket_name, Prefix=bucket_prefix) LOGGER.debug("AWS S3 list objects OK? %s", response_success(response)) s3_objects = [] while True: if response_success(response): s3_objects += response["Contents"] if response["IsTruncated"]: LOGGER.debug( "AWS S3 list objects, get next page: %s/%s", bucket_name, bucket_prefix, ) next_token = response["NextContinuationToken"] response = await s3_client.list_objects_v2( Bucket=bucket_name, Prefix=bucket_prefix, ContinuationToken=next_token, ) LOGGER.debug("AWS S3 list objects OK? %s", response_success(response)) continue else: break else: break return s3_objects except ClientError as err: LOGGER.debug("AWS S3 list objects error: %s", err.response) err_code = err.response.get("Error", {}).get("Code") if err_code == "TooManyRequestsException": if tries < config.retries: await jitter("s3-list-objects", config.min_jitter, config.max_jitter) continue # allow it to retry, if possible else: raise raise RuntimeError("AWS S3 list objects exceeded retries")
async def invoke( self, config: AioAWSConfig, lambda_client: aiobotocore.client.AioBaseClient ) -> "AWSLambdaFunction": """ Asynchronous coroutine to invoke a lambda function; this updates the ``response`` and calls the py:meth:`.read_response` method to handle the response. :param config: aio session and client settings :param lambda_client: aio client for lambda :return: a lambda response :raises: botocore.exceptions.ClientError, botocore.exceptions.ParamValidationError """ async with config.semaphore: for tries in range(config.retries + 1): try: LOGGER.debug("AWS Lambda params: %s", self.params) response = await lambda_client.invoke(**self.params) self.response = response LOGGER.debug("AWS Lambda response: %s", self.response) if response_success(self.response): await self.read_response() # updates self.data if self.data: LOGGER.info("AWS Lambda invoked OK: %s", self.name) else: error = self.error if error: LOGGER.error( "AWS Lambda error: %s, %s", self.name, error ) else: # TODO: are there some failures that could be recovered here? LOGGER.error("AWS Lambda invoke failure: %s", self.name) return self except botocore.exceptions.ClientError as err: response = err.response LOGGER.warning( "AWS Lambda client error: %s, %s", self.name, response ) error = response.get("Error", {}) if error.get("Code") == "TooManyRequestsException": if tries < config.retries: await jitter( "lambda-retry", config.min_jitter, config.max_jitter ) continue # allow it to retry, if possible else: LOGGER.error( "AWS Lambda too many retries: %s, %s", self.name, response, ) else: LOGGER.error( "AWS Lambda client error: %s, %s", self.name, response ) self.response = response raise raise RuntimeError("AWS Lambda invoke exceeded retries")
lambda_funcs = [] for i in range(N_lambdas): event = {"i": i} # event = {"action": "too-large"} # event = {"action": "runtime-error"} payload = json.dumps(event).encode() func = AWSLambdaFunction(name="lambda_dev", payload=payload) lambda_funcs.append(func) asyncio.run(run_lambda_functions(lambda_funcs)) # # Note: a thread pool executor is not faster than asyncio alone # asyncio.run(run_lambda_function_thread_pool(lambda_funcs, n_tasks=N_lambdas)) responses = [] for func in lambda_funcs: assert response_success(func.response) if N_lambdas < 3: print() print("Params:") pprint(func.params) print("Response:") pprint(func.response) print("Data:") pprint(func.data) print("JSON:") pprint(func.json) print("Error:") pprint(func.error) print("Logs") pprint(func.logs) elif N_lambdas < 20: