Пример #1
0
    def test_s3_lambda(self):
        pipeline_name = 'two-node-pipe'
        lambda_name = 'Echo1'
        qualified_lambda_name = deploy.lambda_name(pipeline_name, lambda_name,
                                                   True)
        s1 = S3(BUCKET1, DEFAULT_REGION, events=[FILE_CREATED_EVENT])
        l1 = echo_lambda(lambda_name)

        p = s1 >> l1
        piper = Pipeline(pipeline_name, [p])

        lambda_with_modified_name = deploy.copy_lambda(qualified_lambda_name,
                                                       p.children()[0],
                                                       copy.copy)

        info = deploy.deploy_info(piper, test_mode=True)

        exp_agg = deploy.init_aggregator()
        exp_agg[deploy.S3] = {BUCKET1: {deploy.REGION_NAME: DEFAULT_REGION}}
        exp_agg[deploy.LAMBDA] = {
            qualified_lambda_name: {
                deploy.LAMBDA_INSTANCE: lambda_with_modified_name,
                deploy.S3_NOTIFICATION: {
                    BUCKET1: [
                        S3.event(S3.ALL_CREATED_OBJECTS,
                                 service_name=lambda_name)
                    ]
                }
            }
        }

        self.assertEqual(info, exp_agg)
Пример #2
0
    def test_s3_lambda_name_mismatch(self):
        pipeline_name = 'two-node-pipe'
        lambda_name = 'Echo1'
        s1 = S3(BUCKET1,
                DEFAULT_REGION,
                events=[
                    S3.event(S3.ALL_CREATED_OBJECTS,
                             service_name='WrongLambdaName')
                ])
        l1 = echo_lambda(lambda_name)

        p = s1 >> l1
        piper = Pipeline(pipeline_name, [p])

        with self.assertRaises(ValueError) as cx:
            deploy.deploy_info(piper, test_mode=True)
Пример #3
0
    def test_subset_tree(self):
        lambda_name = 'Echo1'
        s1 = S3(BUCKET2,
                DEFAULT_REGION,
                events=[S3.event(S3.ALL_CREATED_OBJECTS)])
        s2 = S3(BUCKET2, DEFAULT_REGION)

        l1 = echo_lambda(lambda_name)
        l2 = echo_lambda('OtherEcho')

        inp_exp = [(s1, None), (l1, l1), (l2, None), (s1 >> l1, s1 >> l1),
                   (s1 >> l2, None), (s1 >> l1 >> s2, s1 >> l1),
                   (s1 >> l2 >> s2, None), (l1 >> s1, l1), (l2 >> s2, None)]

        for inp, exp in inp_exp:
            tree = deploy.subset_tree(inp, None, lambda_name)
            self.assertEqual(tree, exp)
Пример #4
0
    def test_non_qualified_names(self):
        lambda_name = 'CopyS3Lambda1'
        pipeline_name = 'three-node-pipe'
        dont_qualify_lambda_name = False
        qualified_lambda_name = deploy.lambda_name(pipeline_name, lambda_name,
                                                   dont_qualify_lambda_name)
        s1 = S3(BUCKET1,
                DEFAULT_REGION,
                events=[S3.event(S3.ALL_CREATED_OBJECTS)])
        s2 = S3(BUCKET2, DEFAULT_REGION)

        l1 = copy_lambda(lambda_name)
        p = s1 >> l1 >> s2
        piper = Pipeline(pipeline_name, [p])

        lambda_with_modified_name = deploy.copy_lambda(qualified_lambda_name,
                                                       p.children()[0].root(),
                                                       copy.copy)

        exp_agg = deploy.init_aggregator()
        exp_agg[deploy.S3] = {
            BUCKET1: {
                deploy.REGION_NAME: DEFAULT_REGION
            },
            BUCKET2: {
                deploy.REGION_NAME: DEFAULT_REGION
            }
        }
        exp_agg[deploy.LAMBDA] = {
            qualified_lambda_name: {
                deploy.LAMBDA_INSTANCE: lambda_with_modified_name,
                deploy.S3_NOTIFICATION: {
                    BUCKET1: [
                        S3.event(S3.ALL_CREATED_OBJECTS,
                                 service_name=lambda_name)
                    ]
                }
            }
        }

        info = deploy.deploy_info(piper,
                                  test_mode=True,
                                  qualify_lambda_name=dont_qualify_lambda_name)
        self.assertEqual(info, exp_agg)
        pass
Пример #5
0
    def test_single_s3_bucket(self):
        p = S3(BUCKET1, DEFAULT_REGION)

        piper = Pipeline('my-first', [p])

        info = deploy.deploy_info(piper, test_mode=True)
        exp_agg = deploy.init_aggregator()
        exp_agg[deploy.S3] = {BUCKET1: {deploy.REGION_NAME: DEFAULT_REGION}}

        self.assertEqual(exp_agg, info)
Пример #6
0
    def test_unique_lambda_names(self):
        lambda_name = 'CopyS3Lambda1'
        pipeline_name = 'incorrect-pipe-with-multiple-lambdas-with-same-name'

        s1 = S3(BUCKET1,
                DEFAULT_REGION,
                events=[S3.event(S3.ALL_CREATED_OBJECTS)])
        s2 = S3(BUCKET2,
                DEFAULT_REGION,
                events=[S3.event(S3.ALL_CREATED_OBJECTS)])

        l1 = copy_lambda(lambda_name)
        s3 = S3(BUCKET3, DEFAULT_REGION)

        same_named_lambda = copy_lambda(lambda_name)
        p = s1 >> l1 >> s2 >> same_named_lambda >> s3
        piper = Pipeline(pipeline_name, [p])
        with self.assertRaises(ValueError) as cx:
            deploy.deploy_info(piper, test_mode=True)
Пример #7
0
    def test_multi_prefix_multi_children(self):
        pipeline_name = 'two-node-pipe'
        lambda_name1 = 'Echo1'
        lambda_name2 = 'Echo2'
        s1 = S3(BUCKET1,
                DEFAULT_REGION,
                events=[
                    S3.event(S3.ALL_CREATED_OBJECTS),
                    S3.event(S3.ALL_REMOVED_OBJECTS)
                ])
        l1 = echo_lambda(lambda_name1)

        l2 = copy.deepcopy(l1)
        l2.name = lambda_name2
        p = s1 >> [l1, l2]
        piper = Pipeline(pipeline_name, [p])

        # Since both S3 event notifications are unmapped to both the lambda services, it should error out
        with self.assertRaises(ValueError) as cx:
            deploy.deploy_info(piper, test_mode=True)
Пример #8
0
def pipeline(args: Dict[str, str], os_environ):
    region_name = args['region_name']

    handler = CopyHandler(args)
    copy_lambda = AWSLambda(
        'jaya_copy_lambda',
        handler,
        region_name,
        virtual_environment_path=os_environ['VIRTUAL_ENV'],
        role_name=args['role'],
        description="This project was inspired by a woman who codes",
        dependencies=[jaya])

    s1 = S3(bucket_name=args['source_bucket'],
            region_name=region_name,
            events=[S3.event(S3.ALL_CREATED_OBJECTS)])

    s2 = S3(bucket_name=args['destination_bucket'], region_name=region_name)
    p = s1 >> copy_lambda >> s2

    piper = Pipeline('JayaCopyPipeline', [p])
    return piper
Пример #9
0
import unittest
from .handlers import echo_handler_lambda, CopyHandler
from jaya import S3, AWSLambda, Pipeline
from jaya.deployment import deploy
import json

import copy
import jaya

FILE_CREATED_EVENT = S3.event(S3.ALL_CREATED_OBJECTS)

DEFAULT_REGION = 'us-east-1'
BUCKET1 = 'tsa-test-bucket1'
BUCKET2 = 'tsa-lambda-dest-bucket'
BUCKET3 = 'tsa-bucket3'
ENVIRONMENT = 'development'


def copy_lambda(lambda_name):
    copy_handler = CopyHandler({})
    l1 = AWSLambda(lambda_name,
                   copy_handler,
                   DEFAULT_REGION,
                   alias=ENVIRONMENT,
                   dependencies=[jaya])
    return l1


def echo_lambda(lambda_name):
    return echo_handler_lambda(lambda_name, 'development', 'Unit Test Echo')