def main(event, _ctxt=None, sns_client=None): print(f'event={event!r}') sns_client = sns_client or boto3.client('sns') topic_arn = os.environ['TOPIC_ARN'] for sns_event in extract_sns_messages_from_lambda_event(event): image = sns_event.message if image['desiredVersion'] > image['currentVersion']: print(f"{image['desiredVersion']} > {image['currentVersion']}, creating job") else: print(f"{image['desiredVersion']} <= {image['currentVersion']}, nothing to do") continue message = { 'shardId': image['shardId'], 'desiredVersion': image['desiredVersion'], } publish_sns_message( sns_client=sns_client, topic_arn=topic_arn, message=message, subject='Reindex job from reindex_job_creator' )
def post_ingest_request(event, sns_client, topic_arn): request = event['body'] path = event.get('path', '') try: upload_url = request['uploadUrl'] callback_url = request.get('callbackUrl', None) except TypeError: raise TypeError(f"[BadRequest] Invalid request not json: {request}") except KeyError as keyError: raise KeyError(f"[BadRequest] Invalid request missing '{keyError.args[0]}' in {request}") ingest_request_id = str(uuid.uuid4()) logger.debug('ingest_request_id: %r', ingest_request_id) message = archive_bag_message(ingest_request_id, upload_url, callback_url) logger.debug("sns-message: %r", message) topic_name = topic_arn.split(":")[-1] publish_sns_message( sns_client=sns_client, topic_arn=topic_arn, message=message, subject=f"source: archive_ingest ({topic_name})" ) logger.debug("published: %r to %r", message, topic_arn) return { 'id': ingest_request_id, 'location': join_url((path, ingest_request_id)) }
def main(event=None, _ctxt=None, sns_client=None): print(os.environ) sns_client = sns_client or boto3.client('sns') topic_arn = os.environ['TOPIC_ARN'] public_bucket_name = os.environ['PUBLIC_BUCKET_NAME'] public_object_key_v1 = os.environ['PUBLIC_OBJECT_KEY_V1'] public_object_key_v2 = os.environ['PUBLIC_OBJECT_KEY_V2'] for (api_version, public_object_key) in [ ('v1', public_object_key_v1), ('v2', public_object_key_v2), ]: snapshot_request_message = SnapshotRequest( apiVersion=api_version, publicBucketName=public_bucket_name, publicObjectKey=public_object_key, ) publish_sns_message(sns_client=sns_client, topic_arn=topic_arn, message=attr.asdict(snapshot_request_message), subject='source: snapshot_scheduler.main')
def test_publish_sns_message(sns_sqs): sns_client = boto3.client('sns') sqs_client = boto3.client('sqs') topic_arn, queue_url = sns_sqs test_message = { 'string': 'a', 'number': 1, 'date': datetime.strptime('Jun 1 2005 1:33PM', '%b %d %Y %I:%M%p') } expected_decoded_message = { 'string': 'a', 'number': 1, 'date': '2005-06-01T13:33:00' } sns_utils.publish_sns_message(sns_client, topic_arn, test_message) messages = sqs_client.receive_message(QueueUrl=queue_url, MaxNumberOfMessages=1) message_body = messages['Messages'][0]['Body'] inner_message = json.loads(message_body)['Message'] actual_decoded_message = json.loads(inner_message) assert (json.loads( actual_decoded_message['default']) == expected_decoded_message)
def main(event=None, _ctxt=None, sns_client=None): sns_client = sns_client or boto3.client('sns') topic_arn = os.environ['TOPIC_ARN'] window_length_minutes = int(os.environ['WINDOW_LENGTH_MINUTES']) print( f'topic_arn={topic_arn}, window_length_minutes={window_length_minutes}' ) message = build_window(minutes=window_length_minutes) publish_sns_message(sns_client=sns_client, topic_arn=topic_arn, message=message, subject='source: sierra_window_generator.main')
def main(event=None, _ctxt=None, sns_client=None): sns_client = sns_client or boto3.client("sns") topic_arn = os.environ["TOPIC_ARN"] window_length_minutes = int(os.environ["WINDOW_LENGTH_MINUTES"]) print( f"topic_arn={topic_arn}, window_length_minutes={window_length_minutes}" ) message = build_window(minutes=window_length_minutes) publish_sns_message( sns_client=sns_client, topic_arn=topic_arn, message=message, subject="source: sierra_window_generator.main", )
def main(event, _ctxt=None, sns_client=None): print(f'Received event: {event!r}') topic_arn = os.environ['TOPIC_ARN'] stream_view_type = os.environ.get('STREAM_VIEW_TYPE', 'FULL_EVENT') sns_client = sns_client or boto3.client('sns') for message in get_sns_messages( trigger_event=event, stream_view_type=stream_view_type ): publish_sns_message( sns_client=sns_client, topic_arn=topic_arn, message=message )
def main(event, _): table_name = os.environ["TABLE_NAME"] topic_arn = os.environ["TOPIC_ARN"] age_boundary_mins = int(os.environ["AGE_BOUNDARY_MINS"]) sns_client = boto3.client('sns') dynamodb = boto3.resource('dynamodb') table = dynamodb.Table(table_name) deployments = get_deployments_from_dynamo(table) old_deployments = filter_old_deployments(deployments, age_boundary_mins) if deployments: publish_sns_message( sns_client=sns_client, topic_arn=topic_arn, message=deployments ) print(f'old_deployments = {old_deployments!r}')
def main(event, _ctxt=None, s3_client=None, sns_client=None): topic_arn = os.environ["TOPIC_ARN"] s3_client = s3_client or boto3.client('s3') sns_client = sns_client or boto3.client('sns') s3_events = s3_utils.parse_s3_record(event=event) assert len(s3_events) == 1 s3_event = s3_events[0] resp = s3_client.get_object(Bucket=s3_event['bucket_name'], Key=s3_event['object_key']) body = resp['Body'].read() records = json.loads(body) for r in records: sns_utils.publish_sns_message(sns_client=sns_client, topic_arn=topic_arn, message=r, subject='source: s3_demultiplexer.main')
def main(event=None, _ctxt=None, sns_client=None): print(os.environ) sns_client = sns_client or boto3.client("sns") topic_arn = os.environ["TOPIC_ARN"] public_bucket_name = os.environ["PUBLIC_BUCKET_NAME"] public_object_key_v2 = os.environ["PUBLIC_OBJECT_KEY_V2"] for (api_version, public_object_key) in [("v2", public_object_key_v2)]: snapshot_request_message = SnapshotRequest( apiVersion=api_version, publicBucketName=public_bucket_name, publicObjectKey=public_object_key, ) publish_sns_message( sns_client=sns_client, topic_arn=topic_arn, message=attr.asdict(snapshot_request_message), subject="source: snapshot_scheduler.main", )
def drain_ecs_container_instance(asg_client, ec2_client, ecs_client, sns_client, event): topic_arn = event['Records'][0]['Sns']['TopicArn'] message = event['Records'][0]['Sns']['Message'] message_data = json.loads(message) # Check this is an interesting message if 'AutoScalingGroupName' not in message_data: return if 'LifecycleHookName' not in message_data: return ec2_instance_id = message_data['EC2InstanceId'] asg_group_name = message_data['AutoScalingGroupName'] lifecycle_hook_name = message_data['LifecycleHookName'] lifecycle_transition = message_data['LifecycleTransition'] lifecycle_action_token = message_data['LifecycleActionToken'] if lifecycle_transition == 'autoscaling:EC2_INSTANCE_TERMINATING': tags_dict = get_ec2_tags(ec2_client, ec2_instance_id) try: cluster_arn = tags_dict['clusterArn'] ecs_container_instance_arn = tags_dict['containerInstanceArn'] except KeyError as e: continue_lifecycle_action(asg_client, asg_group_name, ec2_instance_id, lifecycle_hook_name) return running_tasks = ecs_client.list_tasks( cluster=cluster_arn, containerInstance=ecs_container_instance_arn) print(f"running tasks: {running_tasks['taskArns']}") if not running_tasks['taskArns']: continue_lifecycle_action(asg_client, asg_group_name, ec2_instance_id, lifecycle_hook_name) else: asg_client.record_lifecycle_action_heartbeat( LifecycleHookName=lifecycle_hook_name, AutoScalingGroupName=asg_group_name, LifecycleActionToken=lifecycle_action_token, InstanceId=ec2_instance_id, ) container_instance_info = ecs_client.describe_container_instances( cluster=cluster_arn, containerInstances=[ecs_container_instance_arn], ) status = container_instance_info['containerInstances'][0]['status'] if status != 'DRAINING': set_container_instance_to_draining(ecs_client, cluster_arn, ecs_container_instance_arn) time.sleep(30) publish_sns_message(sns_client=sns_client, topic_arn=topic_arn, message=message_data)