Пример #1
0
def parse_notification_configuration(notification_config: Dict,
                                     pool=None) -> List[S3Notification]:
    # notification_config returned by:
    # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/s3.html#S3.Client.get_bucket_notification_configuration
    notifications = list()

    arn_selectors = {
        "QueueConfigurations": "QueueArn",
        "TopicConfigurations": "TopicArn",
        "LambdaFunctionConfigurations": "LambdaFunctionArn",
    }

    for config_type, configs in notification_config.items():
        if config_type not in arn_selectors:
            continue

        for config in configs:
            try:
                arn = config[arn_selectors[config_type]]
                target = EventSource.get(arn, pool=pool)
                notification = S3Notification(target.id)
                notification.target = target
                notifications.append(notification)
            except Exception as e:
                LOG.warning("error parsing s3 notification: %s", e)

    return notifications
Пример #2
0
def extract_endpoints(code_map, pool=None):
    if pool is None:
        pool = {}
    result = []
    identifiers = []
    for key, code in code_map.items():
        # Elasticsearch references
        pattern = r'[\'"](.*\.es\.amazonaws\.com)[\'"]'
        for es in re.findall(pattern, code):
            if es not in identifiers:
                identifiers.append(es)
                es = EventSource.get(es, pool=pool, type=ElasticSearch)
                if es:
                    result.append(es)
        # Elasticsearch references
        pattern = r"\.put_record_batch\([^,]+,\s*([^,\s]+)\s*,"
        for firehose in re.findall(pattern, code):
            if firehose not in identifiers:
                identifiers.append(firehose)
                firehose = EventSource.get(firehose,
                                           pool=pool,
                                           type=FirehoseStream)
                if firehose:
                    result.append(firehose)
        # DynamoDB references
        # TODO fix pattern to be generic
        pattern = r"\.(insert|get)_document\s*\([^,]+,\s*([^,\s]+)\s*,"
        for (op, dynamo) in re.findall(pattern, code):
            dynamo = resolve_string_or_variable(dynamo, code_map)
            if dynamo not in identifiers:
                identifiers.append(dynamo)
                dynamo = EventSource.get(dynamo, pool=pool, type=DynamoDB)
                if dynamo:
                    result.append(dynamo)
        # S3 references
        pattern = r"\.upload_file\([^,]+,\s*([^,\s]+)\s*,"
        for s3 in re.findall(pattern, code):
            s3 = resolve_string_or_variable(s3, code_map)
            if s3 not in identifiers:
                identifiers.append(s3)
                s3 = EventSource.get(s3, pool=pool, type=S3Bucket)
                if s3:
                    result.append(s3)
    return result
Пример #3
0
def extract_endpoints(code_map, pool={}):
    result = []
    identifiers = []
    for key, code in iteritems(code_map):
        # Elasticsearch references
        pattern = r'[\'"](.*\.es\.amazonaws\.com)[\'"]'
        for es in re.findall(pattern, code):
            if es not in identifiers:
                identifiers.append(es)
                es = EventSource.get(es, pool=pool, type=ElasticSearch)
                if es:
                    result.append(es)
        # Elasticsearch references
        pattern = r'\.put_record_batch\([^,]+,\s*([^,\s]+)\s*,'
        for firehose in re.findall(pattern, code):
            if firehose not in identifiers:
                identifiers.append(firehose)
                firehose = EventSource.get(firehose, pool=pool, type=FirehoseStream)
                if firehose:
                    result.append(firehose)
        # DynamoDB references
        # TODO fix pattern to be generic
        pattern = r'\.(insert|get)_document\s*\([^,]+,\s*([^,\s]+)\s*,'
        for (op, dynamo) in re.findall(pattern, code):
            dynamo = resolve_string_or_variable(dynamo, code_map)
            if dynamo not in identifiers:
                identifiers.append(dynamo)
                dynamo = EventSource.get(dynamo, pool=pool, type=DynamoDB)
                if dynamo:
                    result.append(dynamo)
        # S3 references
        pattern = r'\.upload_file\([^,]+,\s*([^,\s]+)\s*,'
        for s3 in re.findall(pattern, code):
            s3 = resolve_string_or_variable(s3, code_map)
            if s3 not in identifiers:
                identifiers.append(s3)
                s3 = EventSource.get(s3, pool=pool, type=S3Bucket)
                if s3:
                    result.append(s3)
    return result
 def handle(func):
     func_name = func['FunctionName']
     if re.match(filter, func_name):
         arn = func['FunctionArn']
         f = LambdaFunction(arn)
         pool[arn] = f
         result.append(f)
         if details:
             sources = get_lambda_event_sources(f.name(), env=env)
             for src in sources:
                 arn = src['EventSourceArn']
                 f.event_sources.append(EventSource.get(arn, pool=pool))
             try:
                 code_map = get_lambda_code(func_name, env=env)
                 f.targets = extract_endpoints(code_map, pool)
             except Exception:
                 LOG.warning("Unable to get code for lambda '%s'" % func_name)
Пример #5
0
 def handle(func):
     func_name = func['FunctionName']
     if re.match(filter, func_name):
         arn = func['FunctionArn']
         f = LambdaFunction(arn)
         pool[arn] = f
         result.append(f)
         if details:
             sources = get_lambda_event_sources(f.name(), env=env)
             for src in sources:
                 arn = src['EventSourceArn']
                 f.event_sources.append(EventSource.get(arn, pool=pool))
             try:
                 code_map = get_lambda_code(func_name, env=env)
                 f.targets = extract_endpoints(code_map, pool)
             except Exception:
                 LOG.warning("Unable to get code for lambda '%s'" % func_name)
Пример #6
0
def get_firehose_streams(filter=".*", pool={}, env=None, region=None):
    result = []
    try:
        firehose_client = _connect("firehose", env=env, region=region)
        out = firehose_client.list_delivery_streams()
        for stream_name in out["DeliveryStreamNames"]:
            if re.match(filter, stream_name):
                details = firehose_client.describe_delivery_stream(DeliveryStreamName=stream_name)
                details = details["DeliveryStreamDescription"]
                arn = details["DeliveryStreamARN"]
                s = FirehoseStream(arn)
                for dest in details["Destinations"]:
                    dest_s3 = dest["S3DestinationDescription"]["BucketARN"]
                    bucket = EventSource.get(dest_s3, pool=pool)
                    s.destinations.append(bucket)
                result.append(s)
    except Exception:
        pass
    return result
Пример #7
0
 def handle(bucket):
     bucket_name = bucket["Name"]
     if re.match(filter, bucket_name):
         arn = "arn:aws:s3:::%s" % bucket_name
         bucket = S3Bucket(arn)
         result.append(bucket)
         pool[arn] = bucket
         if details:
             try:
                 out = s3_client.get_bucket_notification(Bucket=bucket_name)
                 if out:
                     if "CloudFunctionConfiguration" in out:
                         func = out["CloudFunctionConfiguration"]["CloudFunction"]
                         func = EventSource.get(func, pool=pool)
                         n = S3Notification(func.id)
                         n.target = func
                         bucket.notifications.append(n)
             except Exception as e:
                 print("WARNING: Unable to get details for bucket: %s" % e)
def get_firehose_streams(filter='.*', pool={}, env=None):
    result = []
    try:
        out = cmd_firehose('list-delivery-streams', env)
        out = json.loads(out)
        for stream_name in out['DeliveryStreamNames']:
            if re.match(filter, stream_name):
                details = cmd_firehose(
                    'describe-delivery-stream --delivery-stream-name %s' % stream_name, env)
                details = json.loads(details)['DeliveryStreamDescription']
                arn = details['DeliveryStreamARN']
                s = FirehoseStream(arn)
                for dest in details['Destinations']:
                    dest_s3 = dest['S3DestinationDescription']['BucketARN']
                    bucket = EventSource.get(dest_s3, pool=pool)
                    s.destinations.append(bucket)
                result.append(s)
    except socket.error:
        pass
    return result
 def handle(bucket):
     bucket_name = bucket['Name']
     if re.match(filter, bucket_name):
         arn = 'arn:aws:s3:::%s' % bucket_name
         bucket = S3Bucket(arn)
         result.append(bucket)
         pool[arn] = bucket
         if details:
             try:
                 out = cmd_s3api('get-bucket-notification-configuration --bucket %s' % bucket_name, env=env)
                 if out:
                     out = json.loads(out)
                     if 'CloudFunctionConfiguration' in out:
                         func = out['CloudFunctionConfiguration']['CloudFunction']
                         func = EventSource.get(func, pool=pool)
                         n = S3Notification(func.id)
                         n.target = func
                         bucket.notifications.append(n)
             except Exception as e:
                 print('WARNING: Unable to get details for bucket: %s' % e)
Пример #10
0
def get_firehose_streams(filter='.*', pool={}, env=None):
    result = []
    try:
        out = cmd_firehose('list-delivery-streams', env)
        out = json.loads(out)
        for stream_name in out['DeliveryStreamNames']:
            if re.match(filter, stream_name):
                details = cmd_firehose(
                    'describe-delivery-stream --delivery-stream-name %s' % stream_name, env)
                details = json.loads(details)['DeliveryStreamDescription']
                arn = details['DeliveryStreamARN']
                s = FirehoseStream(arn)
                for dest in details['Destinations']:
                    dest_s3 = dest['S3DestinationDescription']['BucketARN']
                    bucket = EventSource.get(dest_s3, pool=pool)
                    s.destinations.append(bucket)
                result.append(s)
    except socket.error:
        pass
    return result
Пример #11
0
 def handle(bucket):
     bucket_name = bucket['Name']
     if re.match(filter, bucket_name):
         arn = 'arn:aws:s3:::%s' % bucket_name
         bucket = S3Bucket(arn)
         result.append(bucket)
         pool[arn] = bucket
         if details:
             try:
                 out = cmd_s3api('get-bucket-notification-configuration --bucket %s' % bucket_name, env=env)
                 if out:
                     out = json.loads(out)
                     if 'CloudFunctionConfiguration' in out:
                         func = out['CloudFunctionConfiguration']['CloudFunction']
                         func = EventSource.get(func, pool=pool)
                         n = S3Notification(func.id)
                         n.target = func
                         bucket.notifications.append(n)
             except Exception as e:
                 print('WARNING: Unable to get details for bucket: %s' % e)
Пример #12
0
def get_firehose_streams(filter='.*', pool={}, env=None):
    result = []
    try:
        firehose_client = aws_stack.connect_to_service('firehose')
        out = firehose_client.list_delivery_streams()
        for stream_name in out['DeliveryStreamNames']:
            if re.match(filter, stream_name):
                details = firehose_client.describe_delivery_stream(
                    DeliveryStreamName=stream_name)
                details = details['DeliveryStreamDescription']
                arn = details['DeliveryStreamARN']
                s = FirehoseStream(arn)
                for dest in details['Destinations']:
                    dest_s3 = dest['S3DestinationDescription']['BucketARN']
                    bucket = EventSource.get(dest_s3, pool=pool)
                    s.destinations.append(bucket)
                result.append(s)
    except Exception:
        pass
    return result
Пример #13
0
 def handle(bucket):
     bucket_name = bucket['Name']
     if re.match(filter, bucket_name):
         arn = 'arn:aws:s3:::%s' % bucket_name
         bucket = S3Bucket(arn)
         result.append(bucket)
         pool[arn] = bucket
         if details:
             try:
                 s3_client = aws_stack.connect_to_service('s3')
                 out = s3_client.get_bucket_notification(Bucket=bucket_name)
                 if out:
                     if 'CloudFunctionConfiguration' in out:
                         func = out['CloudFunctionConfiguration'][
                             'CloudFunction']
                         func = EventSource.get(func, pool=pool)
                         n = S3Notification(func.id)
                         n.target = func
                         bucket.notifications.append(n)
             except Exception as e:
                 print('WARNING: Unable to get details for bucket: %s' % e)