async def test_timeit(self, monkeypatch, mockConfig): from cloudwatch_metrics.metric_recorder_async import CloudwatchMetricRecorderAsync async with aioboto3.client( MockAwsCredentials.service, region_name=MockAwsCredentials.region, endpoint_url=MockAwsCredentials.endpoint_url) as mock_client: monkeypatch.setattr(CloudwatchMetricRecorderAsync, "client", mock_client) mock_class_first_iteration = await CloudwatchMetricRecorderAsync( config=mockConfig).timeit() mock_class_second_iteration = await mock_class_first_iteration( MockClass()) mock_class_third_iteration = await mock_class_second_iteration( MockClass) async with aioboto3.client( MockAwsCredentials.service, region_name=MockAwsCredentials.region, endpoint_url=MockAwsCredentials.endpoint_url) as mock_client: metrics = await mock_client.list_metrics() metric = metrics["Metrics"][1] assert metric["Namespace"] == mockConfig.namespace assert metric["MetricName"] == MockClass().__name__ assert metric["Dimensions"][0]["Name"] == MockClass().__name__
async def test_put_metric(self, monkeypatch, mockConfig): from cloudwatch_metrics.metric_recorder_async import CloudwatchMetricRecorderAsync async with aioboto3.client( MockAwsCredentials.service, region_name=MockAwsCredentials.region, endpoint_url=MockAwsCredentials.endpoint_url) as mock_client: monkeypatch.setattr(CloudwatchMetricRecorderAsync, "client", mock_client) await CloudwatchMetricRecorderAsync(config=mockConfig).put_metric( MockMetrics.mock_put_metric_name, MockMetrics.mock_put_metric_d_value, MockMetrics.mock_put_metric_value, MockMetrics.mock_put_metric_unit) async with aioboto3.client( MockAwsCredentials.service, region_name=MockAwsCredentials.region, endpoint_url=MockAwsCredentials.endpoint_url) as mock_client: metrics = await mock_client.list_metrics() metric = metrics["Metrics"][0] assert metric["Namespace"] == mockConfig.namespace assert metric["MetricName"] == MockMetrics.mock_put_metric_name assert metric["Dimensions"][0][ "Value"] == MockMetrics.mock_put_metric_d_value assert metric["Dimensions"][0][ "Name"] == MockMetrics.mock_put_metric_name
async def concurrent(self, func): regions = await self.get_regions() for region in regions: async with aioboto3.client(self.service_name, region_name=region) as client: r = await func(client) yield r
async def notifications(self, msg): async with aioboto3.client( "sns", aws_access_key_id=self.config.aws_key_id, aws_secret_access_key=self.config.aws_secret_key, region_name=self.config.aws_region) as client: await client.publish(PhoneNumber=self.config.phone, Message=msg)
async def bulk_read(bucket, keys): tasks = [] async with aioboto3.client("s3") as client: for key in keys: tasks.append(async_read_object(client, bucket, key)) data = await asyncio.gather(*tasks) return data
async def create_forward(self, fqdn, ip_address): if not self.dns_forward_enable: self.log.info("{0}:aws: forward DNS is disabled".format(self.name)) return if self.dry_run: return async with aioboto3.client('route53') as client: try: await client.change_resource_record_sets( HostedZoneId=self.dns_forward_zone, ChangeBatch={ "Comment": "CatWeazle: add {0} in A {1}".format(fqdn, ip_address), "Changes": [{ "Action": "UPSERT", "ResourceRecordSet": { "Name": fqdn, "Type": "A", "TTL": 300, "ResourceRecords": [{ "Value": ip_address }] } }] }) except ClientError: pass
async def delete_arpa(self, fqdn, ip_address): if not self.dns_arpa_enable: self.log.info("{0}:aws: reverse DNS is disabled".format(self.name)) return ip_addr = ipaddress.IPv4Address(ip_address) subnet_id = self.arpa_responsible(ip_address=ip_addr) if not subnet_id: return if self.dry_run: return async with aioboto3.client('route53') as client: try: await client.change_resource_record_sets( HostedZoneId=subnet_id, ChangeBatch={ "Comment": "CatWeazle: remove {0} in PTR {1}".format( fqdn, ip_address), "Changes": [{ "Action": "DELETE", "ResourceRecordSet": { "Name": ip_addr.reverse_pointer, "Type": "PTR", "TTL": 300, "ResourceRecords": [{ "Value": fqdn }] } }] }) except ClientError: pass
async def _report(self): import aioboto3 async with self.lock: num_metrics = len(self.metrics) + len(self.statistics) metric_data = self._calculate_metrics( ) + self._calculate_statistics() for n in range( math.ceil( len(metric_data) / CloudWatchAsyncMetricReporter.MAX_METRICS_PER_REPORT)): if CloudWatchAsyncMetrics.debug_level > 1: log.debug('Namespace: {})'.format( CloudWatchAsyncMetrics.namespace)) log.debug('Metric data: {}'.format(metric_data[ n * CloudWatchAsyncMetricReporter.MAX_METRICS_PER_REPORT: (n + 1) * CloudWatchAsyncMetricReporter.MAX_METRICS_PER_REPORT])) async with aioboto3.client('cloudwatch') as client: response = await client.put_metric_data( Namespace=CloudWatchAsyncMetrics.namespace, MetricData=metric_data[n * CloudWatchAsyncMetricReporter. MAX_METRICS_PER_REPORT:(n + 1) * CloudWatchAsyncMetricReporter. MAX_METRICS_PER_REPORT]) if response.get('ResponseMetadata', {}).get('HTTPStatusCode') != 200: log.warning( 'Failed reporting metrics to CloudWatch; response={}'. format(response)) log.debug('Reported {} metrics to CloudWatch'.format(num_metrics))
async def notifications(msg): async with aioboto3.client('sns', aws_access_key_id=aws_key_id, aws_secret_access_key=aws_secret_key, region_name=aws_region) as client: await client.publish(PhoneNumber=phone,Message=msg)
async def main(): limiter = AsyncLimiter(40, 1) tasks = set() # load the image IDs we've already processed from previous runs img_ids = set( os.path.basename(s)[6:].split(".")[0] for s in glob.glob(os.path.join(OUTPUT_PATH, "image_*")) ) async with NASA_API as napi: async with aiohttp.ClientSession() as session: async with aioboto3.client("rekognition") as rk: # the lower progress par represents the progress of the `rekognize` tasks pbar = tqdm.tqdm(position=1, total=0) # these search parameters can be changed to get a variety of images search = await napi.search(center="JSC", media_type="image", q="dock") # the upper progress bar represents the progress of the NASA API search async for item in tqdm.asyncio.tqdm(search, position=0): # skip images that have already been processed if item["nasa_id"] not in img_ids: img_ids.add(item["nasa_id"]) # enqueue a task that will fetch the image data, run AWS recoknition, # and then write the output to disk tasks.add( asyncio.create_task( rekognize(session, rk, pbar, limiter, item) ) ) pbar.total += 1 # wait for all rekognition tasks to finish await asyncio.gather(*tasks)
async def get_regions(self): if hasattr(Service, "regions"): return Service.regions async with aioboto3.client("ec2") as client: response = await client.describe_regions() Service.regions = [r["RegionName"] for r in response["Regions"]] return Service.regions
async def aio_sts_assume_role(src_role_arn, dest_role_arn, dest_external_id=None): session_name = ''.join(random.choice('0123456789ABCDEF') for i in range(16)) async with aioboto3.client('sts') as sts: src_role = await sts.assume_role( RoleArn=src_role_arn, RoleSessionName=session_name ) async with aioboto3.Session( aws_access_key_id=src_role['Credentials']['AccessKeyId'], aws_secret_access_key=src_role['Credentials']['SecretAccessKey'], aws_session_token=src_role['Credentials']['SessionToken'], ).client('sts') as sts_client: sts_role = await ( sts_client.assume_role( RoleArn=dest_role_arn, RoleSessionName=session_name, ExternalId=dest_external_id, ) if dest_external_id else sts_client.assume_role( RoleArn=dest_role_arn, RoleSessionName=session_name ) ) return aioboto3.Session( aws_access_key_id=sts_role['Credentials']['AccessKeyId'], aws_secret_access_key=sts_role['Credentials']['SecretAccessKey'], aws_session_token=sts_role['Credentials']['SessionToken'], )
async def test_getting_client(event_loop): """Simple getting of client.""" client = aioboto3.client('ssm', loop=event_loop, region_name='eu-central-1') assert isinstance(client, AioBaseClient) await client.close()
async def send_request(self, file): """PUT requestion for S3 from local file""" print(f'PUT Reqeust for: {file}') path = os.path.join(os.path.dirname(__file__), file) async with aioboto3.client('s3', **self.client_params) as s3: await s3.upload_file(Filename=path, **self._req_params(file)) print('PUT Request Completed Successfully')
def __init__(self, stream_name, checkpoint_table=None, host_key=None, shard_iterator_type=None, iterator_timestamp=None, shard_iterators=None, recover_from_dynamo=False, iterator_sequence_number=None, custom_kinesis_client=None): """ Initialize Async Kinesis Consumer :param stream_name: stream name to read from :param checkpoint_table: DynamoDB table for checkpointing; If not set, checkpointing is not used :param host_key: Key to identify reader instance; If not set, defaults to FQDN. :param shard_iterator_type: Type of shard iterator, see https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/kinesis.html#Kinesis.Client.get_shard_iterator :param iterator_timestamp: Timestamp (datetime type) for shard iterator of type 'AT_TIMESTAMP'. See link above :param iterator_sequence_number: Sequence number for shard iterator of type 'AT_SEQUENCE_NUMBER'. See link above :param shard_iterators: List of shard iterators; if given, consumer will read only those shards and ignore others :param recover_from_dynamo: If True, try to recover last read sequence number from DynamoDB during the initialization If successful, shard_iterator_type will be ignored :param custom_kinesis_client (aiobotocore.client.Kinesis, optional): Custom kinesis client to use instead of the basic client instantiated in this class. Leave as None for the default behaviour. """ super(AsyncKinesisConsumer, self).__init__() self.stream_name = stream_name self.shard_iterator_type = shard_iterator_type self.iterator_timestamp = iterator_timestamp self.iterator_sequence_number = iterator_sequence_number self.restricted_shard_iterators = shard_iterators if recover_from_dynamo and not checkpoint_table: raise RuntimeError( 'Can not use recover_from_dynamo without checkpoint table') self.recover_from_dynamodb = recover_from_dynamo # Allow a custom kinesis client to be passed in. This allows for setting of any additional parameters in # the client without needing to track them in this library. if custom_kinesis_client is not None: self.kinesis_client = custom_kinesis_client else: self.kinesis_client = aioboto3.client('kinesis') self.checkpoint_table = checkpoint_table self.checkpoint_callback = None self.host_key = host_key self.shard_readers = {} self.dynamodb_instances = {} self.stream_data = None self.force_rescan = True self.checkpoint_interval = AsyncKinesisConsumer.DEFAULT_CHECKPOINT_INTERVAL self.lock_holding_time = AsyncKinesisConsumer.DEFAULT_LOCK_HOLDING_TIME self.reader_sleep_time = AsyncKinesisConsumer.DEFAULT_SLEEP_TIME self.fallback_time_delta = AsyncKinesisConsumer.DEFAULT_FALLBACK_TIME_DELTA
async def get_raw_request(self, file): """GET request from S3 storing to variable as string""" print(f'GET Reqeust for: {file} - Returning raw data') async with aioboto3.client('s3', **self.client_params) as s3: resp = await s3.get_object(**self._req_params(file)) raw_out = await resp['Body'].read() print('GET Request Completed Successfully') return raw_out.decode('utf-8')
async def get_request(self, file): """GET request from S3 storing to local file""" print(f'GET Reqeust for: {file}') async with aioboto3.client('s3', **self.client_params) as s3: save_path = os.path.join(os.path.dirname(__file__), file) await s3.download_file(Filename=save_path, **self._req_params(file)) print('GET Request Completed Successfully')
async def setup(self): if sys.version_info < (3, 7): self._loop = asyncio.get_event_loop() else: self._loop = asyncio.get_running_loop() self._s3_client = aioboto3.client('s3', **self._s3_client_args) await self._crypto_context.setup()
async def _get_route53_client(role): route53_role = await sts_client.assume_role( RoleArn=role, RoleSessionName="ScanPlanSession") assumed_creds = route53_role["Credentials"] return aioboto3.client( "route53", aws_access_key_id=assumed_creds['AccessKeyId'], aws_secret_access_key=assumed_creds['SecretAccessKey'], aws_session_token=assumed_creds['SessionToken'])
async def __aenter__(self): self.ssm_client = aioboto3.client("ssm", region_name=self.region) self.sqs_client = aioboto3.client("sqs", region_name=self.region) self.sns_client = aioboto3.client("sns", region_name=self.region) to_load = self.ssm_parameters_to_load() print(f"Loading params {to_load}") params = await self.ssm_client.get_parameters(Names=to_load) self.ssm_params = {p["Name"]: p["Value"] for p in params["Parameters"]} print(f"Loaded params {self.ssm_params}") self.sqs_input_queue_url = self.ssm_params[self.sqs_input_queue] self.sns_output_notifier_arn = self.ssm_params[self.sns_output_notifier] print(f"Testing scan from queue {self.sqs_input_queue_url} to topic {self.sns_output_notifier_arn}") return self
async def put(self, data: bytes, metadata: Metadata) -> bytes: name = sha256(data, metadata.to_dict()) async with aioboto3.client("s3", endpoint_url=self.endpoint_url) as s3: await s3.put_object( Key=self.path_for_name(name), Bucket=self.bucket, Body=gzip.compress(data), Metadata=metadata.to_dict(), ) return name
async def __call__(self, file_like: IO, file_name: str) -> AnyHttpUrl: """Загружает файл""" async with aioboto3.client("s3", **asdict(self.config)) as s3: await s3.upload_fileobj(file_like, self.bucket, f"{self.dir}/{file_name}") return cast( AnyHttpUrl, f"{self.config.endpoint_url}/{self.bucket}/{self.dir}/{file_name}", )
async def get_metadata(self, name: str) -> Metadata: async with aioboto3.client("s3", endpoint_url=self.endpoint_url) as s3: try: response = await s3.head_object( Key=self.path_for_name(name), Bucket=self.bucket ) metadata = Metadata.from_dict(response["Metadata"]) except s3.exceptions.NoSuchKey: return None return metadata
async def delete_file(name): conf = { 'service_name': 's3', 'endpoint_url': 'https://storage.yandexcloud.net', 'aws_access_key_id': aws_key_id, 'aws_secret_access_key': aws_secret_key } async with aioboto3.client(**conf) as s3: forDeletion = [{'Key': name}] await s3.delete_objects(Bucket='speechrecognition', Delete={'Objects': forDeletion})
async def get(self, name: str) -> (bytes, Metadata): async with aioboto3.client("s3", endpoint_url=self.endpoint_url) as s3: try: response = await s3.get_object( Key=self.path_for_name(name), Bucket=self.bucket, ) data = gzip.decompress(await response["Body"].read()) metadata = Metadata.from_dict(response["Metadata"]) except s3.exceptions.NoSuchKey: return None return (data, metadata)
async def check_account(entry, data): async with aioboto3.client( 'lambda', region_name=random.choice(functions).split(':')[3]) as client: failure = True tries = 0 max_tries = 10 while failure and tries <= max_tries: response = await client.invoke( FunctionName=random.choice(functions), InvocationType='RequestResponse', LogType='None', Payload=json.dumps({ 'site': 'LOLNA', 'user_name': data[0], 'password': data[1] })) result = json.loads(await response['Payload'].read()) if 'HTTPError' in result: failure = True result_stats['retries'] += 1 await time.sleep(15) tries += 1 continue if 'message' not in result.keys(): result_stats['retries'] += 1 tries += 1 continue failure = False if tries == max_tries and failure is True: retries.append(entry) token_response = json.loads(result['message']) # sometimes lambda will give us back a bad response if not token_response: return # determine if invalid, error, or hit if 'access_token' in token_response: result_list.write("{}\n".format(entry)) result_stats['hits'] += 1 elif 'Access denied' in token_response: result_stats['retries'] += 1 elif 'invalid_credentials' in token_response: result_stats['failures'] += 1
async def detect(self, data: FileImages): """ 顔認証メソッド :param list image_files ファイル名のリスト """ async with aioboto3.client( 'rekognition', region_name=CONFIG['AWS_DEFAULT_REGION'], aws_access_key_id=CONFIG['AWS_ACCESS_KEY_ID'], aws_secret_access_key=CONFIG['AWS_SECRET_ACCESS_KEY'], ) as client: await asyncio.gather( *[self.__single_file(image, client) for image in data.images])
def __init__(self, stream_name, ordered=True): self.stream_name = stream_name self.ordered = ordered self.seq = '0' self.record_buf = [] self.buf_size = 0 client = aioboto3.client('kinesis') self.kinesis_client = RetriableKinesisProducer(client=client) log.debug("Configured kinesis producer for stream '%s'; ordered=%s", stream_name, ordered)
def client(self): s3 = None try: url, key, secret = None, None, None s3 = aioboto3.client('s3', endpoint_url=url, aws_access_key_id=key, aws_secret_access_key=secret) return s3 except Exception as e: print(e) raise Exception('Error on creating S3 Client') finally: if s3 is not None: s3.close()
async def copy_object_async(src_bucket, src_prefix, dest_bucket, dest_prefix, s3_key): s3_filename = s3_key[(s3_key.index('/') + 1):] dest_key = f"{dest_prefix}/{s3_filename}" async with aioboto3.client("s3") as s3_async: # print(f"{threading.get_ident()}: begin copying {src_bucket}/{s3_key} to {dest_bucket}/{dest_key}") task = await s3_async.copy_object(Bucket=dest_bucket, Key=dest_key, CopySource={ 'Bucket': src_bucket, 'Key': s3_key }) # print(f"{threading.get_ident()}: done copying {src_bucket}/{s3_key} to {dest_bucket}/{dest_key}") return task