예제 #1
0
    def test_lambda_create_exposes_boto3_create_api(self, monkeypatch):
        task = LambdaCreate(
            function_name="test",
            runtime="python3.7",
            role="aws_role",
            handler="file.handler",
        )
        client = MagicMock()
        client.create_function = MagicMock()
        boto3 = MagicMock(client=client)
        monkeypatch.setattr("prefect.utilities.aws.boto3", boto3)
        task.run()

        client().create_function.assert_called_once_with(
            FunctionName="test",
            Runtime="python3.7",
            Role="aws_role",
            Handler="file.handler",
            Code={
                "S3Bucket": "",
                "S3Key": ""
            },
            Description="",
            Timeout=3,
            MemorySize=128,
            Publish=True,
            VpcConfig={},
            DeadLetterConfig={},
            Environment={"Variables": {}},
            KMSKeyArn="",
            TracingConfig={"Mode": "PassThrough"},
            Tags={},
            Layers=[],
        )
예제 #2
0
    def test_lambda_create_exposes_boto3_create_api(self, monkeypatch):
        task = LambdaCreate(
            function_name="test",
            runtime="python3.6",
            role="aws_role",
            handler="file.handler",
        )
        client = MagicMock()
        client.create_function = MagicMock()
        boto3 = MagicMock(client=client)
        monkeypatch.setattr("prefect.utilities.aws.boto3", boto3)
        task.run()

        called_method = client.mock_calls[1]
        assert called_method[0] == "().create_function"
        called_method.assert_called_once_with({"FunctionName": "test"})
예제 #3
0
 def test_initialization(self):
     task = LambdaCreate(
         function_name="test",
         runtime="python3.7",
         role="aws_role",
         handler="file.handler",
         bucket="s3_bucket",
         bucket_key="bucket_key",
     )
     assert task.code == {"S3Bucket": "s3_bucket", "S3Key": "bucket_key"}
예제 #4
0
 def test_initialization(self):
     task = LambdaCreate(
         function_name="test",
         runtime="python3.6",
         role="aws_role",
         handler="file.handler",
         bucket="s3_bucket",
         bucket_key="bucket_key",
     )
     assert task.code == {"S3Bucket": "s3_bucket", "S3Key": "bucket_key"}
     assert task.aws_credentials_secret == "AWS_CREDENTIALS"
예제 #5
0
 def test_creds_are_pulled_from_secret(self, monkeypatch):
     task = LambdaCreate(
         function_name="test",
         runtime="python3.6",
         role="aws_role",
         handler="file.handler",
         bucket="s3_bucket",
         bucket_key="bucket_key",
     )
     client = MagicMock()
     boto3 = MagicMock(client=client)
     monkeypatch.setattr("prefect.tasks.aws.lambda_function.boto3", boto3)
     with set_temporary_config({"cloud.use_local_secrets": True}):
         with prefect.context(secrets=dict(AWS_CREDENTIALS={
                 "ACCESS_KEY": "42",
                 "SECRET_ACCESS_KEY": "99"
         })):
             task.run()
     kwargs = client.call_args[1]
     assert kwargs == {
         "aws_access_key_id": "42",
         "aws_secret_access_key": "99"
     }
    Docker Storage - ECR
        one image per flow
    ECS RUN
        Infra can be abstracted to another file

Execution
    Run on ECS Agent
    One K8s cluster, two agents
    1 - ECS Agent to submit Fargate Tasks, "serverless" labels 
    2 - **Kubernetes Agent to submit K8s Jobs, "eks" labels 
    
"""
my_s3_key = PrefectSecret(name="Secret Key")
buckets = Parameter(name="Bucket List", default=["users", "transactions"])

upload_data = S3Download(key=my_s3_key)
event_trigger = LambdaCreate()


@task
def transform_data(data):
    pass


@task
def upload_to_redshift(data):
    pass


with Flow("S3 to Redshift") as flow:
    upload_data()