示例#1
0
def s3_events_to_db(
    graph_attr: dict = GRAPH_ATTR,
    output_format: str = OUTPUT_FORMAT,
    output_path: str = OUTPUT_PATH,
) -> str:
    """ Generates S3 events to DB diagrams """
    output = f"{output_path}/event_processing"

    with Diagram(
            "Event Processing",
            show=False,
            outformat=output_format,
            # graph_attr=graph_attr,
            filename=output,
    ):
        with Cluster("Serverless"):
            source = S3("S3 events")

            with Cluster("Event Flows"):
                with Cluster("Concurrent Processing"):
                    handlers = [
                        Lambda("Python"),
                        Lambda("Python"),
                        Lambda("Python")
                    ]

            dw = Dynamodb("S3 metadata index")

            source >> handlers
            handlers >> dw

    return f"{output}.{output_format}"
示例#2
0
def backend(
    graph_attr: dict = GRAPH_ATTR,
    output_format: str = OUTPUT_FORMAT,
    output_path: str = OUTPUT_PATH,
) -> str:
    """ Generates backend diagrams """
    output = f"{output_path}/backend"

    with Diagram(
            "Backend",
            show=False,
            outformat=output_format,
            # graph_attr=graph_attr,
            filename=output,
    ):

        with Cluster("Serverless"):
            apigw = APIGateway("API Gateway")

            with Cluster("Concurrent Processing"):
                handlers = [
                    Lambda("API (FastAPI)"),
                    Lambda("API (FastAPI)"),
                    Lambda("API (FastAPI)"),
                ]

            db = Dynamodb("data store")

        apigw >> handlers >> db >> handlers >> apigw

    return f"{output}.{output_format}"
def apigw_dynamodb_lambda():
    stack_objective = "apigw-dynamodb-lambda"
    with Diagram(stack_objective,
                 outformat="png",
                 filename=f"{stack_objective}/pics/arch",
                 show=False):
        apigw = APIGateway("/task")
        dynamodb = Dynamodb("DynamoDB")
        apigw >> Edge(
            label="POST /example/update") >> Lambda("update status") >> Edge(
                label="update item") >> dynamodb
        apigw >> Edge(label="POST /example") >> Lambda("producer") >> Edge(
            label="put item") >> dynamodb
        apigw >> Edge(label="GET /example") >> Lambda("consumer") >> Edge(
            label="read all item") >> dynamodb
示例#4
0
def process_media(
    graph_attr: dict = GRAPH_ATTR,
    output_format: str = OUTPUT_FORMAT,
    output_path: str = OUTPUT_PATH,
) -> str:
    """ Generates media processing diagrams """
    output = f"{output_path}/media_processing"

    with Diagram(
            "Media Processing",
            show=False,
            outformat=output_format,
            # graph_attr=graph_attr,
            filename=output,
    ):
        cli = Python("CLI")

        with Cluster("Serverless"):
            source = SQS("tasks queue")

            with Cluster("Concurrent Processing"):
                handlers = Lambda("convert image\nencode video")

                with Cluster("Lambda layers"):
                    [Layer("ImageMagic"), Layer("ffmpeg")]

            src = S3("input\nmedia files")
            dest = S3("output\nmedia files")

            cli >> source >> handlers << src
            handlers >> dest

    return f"{output}.{output_format}"
def apigw_lambda():
    stack_objective = "apigw-lambda"
    with Diagram(stack_objective,
                 outformat="png",
                 filename=f"{stack_objective}/pics/arch",
                 show=False):
        APIGateway("APIGateway") >> Edge(label="integration") >> Lambda("task")
示例#6
0
    def test4():
        filename = os.path.join(img_dir, sys._getframe().f_code.co_name)
        with Diagram("Event Processing", show=False, filename=filename):
            source = EKS("k8s source")

            with Cluster("Event Flows"):
                with Cluster("Event Workers"):
                    workers = [ECS("worker1"), ECS("worker2"), ECS("worker3")]

                queue = SQS("event queue")

                with Cluster("Processing"):
                    handlers = [Lambda("proc1"), Lambda("proc2"), Lambda("proc3")]

            store = S3("events store")
            dw = Redshift("analytics")

            source >> workers >> queue >> handlers
            handlers >> store
            handlers >> dw
def apigw_dynamodb_sfn_with_heavytask():
    stack_objective = "apigw-dynamodb-sfn-with-heavytask"
    with Diagram(stack_objective,
                 outformat="png",
                 filename=f"{stack_objective}/pics/arch",
                 show=False):
        sqs = SQS("SQS")
        apigw = APIGateway("/task") >> Lambda("integration") >> [
            sqs, Dynamodb("DynamoDB")
        ]

        timer_lambda = Lambda("timer lambda")
        sqs << Edge(label="dequeue") << timer_lambda << Cloudwatch("cron")

        with Cluster(label="StepFunctions", direction="TB"):
            sfn_start = SFn_TASK("update DynamoDB\nset `running`")
            sfn_start \
                >> Lambda("Some Task") \
                >> [SFn_TASK("update DynamoDB\nset `success`"), SFn_TASK("update DynamoDB\nset `failure`")]

        # invoke sfn from Lambda
        timer_lambda >> sfn_start
示例#8
0
def main():
    with Diagram('New SFTP Process') as diag:
        user = Windows('User')
        with Cluster('Repay-CDE-PROD-SFTP', direction='TB'):
            sftp_service = TransferForSftp('SFTP')
            s3_sftp_bucket = S3('channels_sftp_transfer')
            sftp_service - s3_sftp_bucket
            aws_sftp_group = [sftp_service, s3_sftp_bucket]

        user >> Edge(label='Upload a BLF into SFTP') >> sftp_service

        with Cluster('repay-cde-prod-channels', direction='TB'):
            lambda_blf_copy = Lambda('s3_copy_lambda')
            s3_blf_processor = S3('blf_processor bucket')
            sns_blf_uploaded_to_s3 = SNS('SNS - blf_uploaded_to_s3')
            redis = ElastiCache('(redis)')
            elasticsearch = ES('1 index per BLF')
            lambda_blf_copy >> s3_blf_processor
            s3_blf_processor >> sns_blf_uploaded_to_s3
            cde_group = [
                lambda_blf_copy, s3_blf_processor, sns_blf_uploaded_to_s3,
                redis, elasticsearch
            ]

        with Cluster('repay-cde-prod-k8s', direction='TB'):
            k8s_api_pod = Pod('Channels API\n/api/v1/blf_upload_sns')
            k8s_blf_processor_job = Job('process-blf-k8s')
            k8s_api_pod >> Edge(
                label='Create job if no BLF lock exists for org/filename'
            ) >> k8s_blf_processor_job
            k8s_group = [k8s_api_pod, k8s_blf_processor_job]

        # TODO - MAKE SURE TO HIGHLIGHT THE USE OF INVISIBLE EDGES
        s3_sftp_bucket >> Edge(style='invis') >> cde_group
        redis >> Edge(style='invis') >> k8s_group
        elasticsearch >> Edge(style='invis') >> k8s_group

        k8s_blf_processor_job << Edge(
            label='Download file from s3') << s3_blf_processor
        s3_sftp_bucket >> Edge(
            label='S3 notification Object Created') >> lambda_blf_copy
        sns_blf_uploaded_to_s3 >> Edge(
            label='HTTP request with BLF file name') >> k8s_api_pod
        k8s_api_pod >> Edge(
            label='Create BLF lock - 5 minute expiration') >> redis
        k8s_blf_processor_job >> Edge(
            label='Delete BLF lock when done') >> redis
        k8s_blf_processor_job >> Edge(label='Create ES index') >> elasticsearch
示例#9
0
def frontend(
    graph_attr: dict = GRAPH_ATTR,
    output_format: str = OUTPUT_FORMAT,
    output_path: str = OUTPUT_PATH,
) -> str:
    """ Generates frontend diagrams """
    output = f"{output_path}/frontend"

    with Diagram(
        "Frontend",
        show=False,
        outformat=output_format,
        # graph_attr=graph_attr,
        filename=output,
    ):

        client = Client("client")

        with Cluster("Serverless"):

            with Cluster("UI"):
                with Cluster("cache"):
                    web_cdn = CloudFront("CDN\nUI")

                with Cluster("static"):
                    web_host = S3("web")
                    react = React("app")

            with Cluster("Static Assets"):
                with Cluster("cache"):
                    assets_cdn = CloudFront("CDN\nassets")
                    assets_apigw = APIGateway("API Gateway")

                assets = S3("assets\nimages")

                with Cluster("media processing"):
                    assets_gen = Lambda("generate image")
                    layers = Layer("layer\nImageMagick")

        web_cdn << react << web_host
        assets_cdn << assets_apigw << assets_gen << layers << assets
        client - Edge(color="orange") << assets_cdn
        client - Edge(color="orange") << web_cdn
        assets_apigw >> assets_gen
        assets_cdn >> assets_apigw

    return f"{output}.{output_format}"
示例#10
0
def main():
    with Diagram("Recipeify Architecture", direction='LR'):

        with Cluster("Github"):
            ci = Github("Actions CI/CD")
            Github("Open Source Repo") >> ci

        with Cluster("Cloudfare"):
            clf = Cloudflare("CDN\n DDoS Protection")

        with Cluster("AWS", direction='LR'):
            with Cluster("VPC"):
                elb = ELB("Elastic Load Balancer")
                eb = EB("Elastic Beanstalk")
                es = ES("Elasticsearch")
                lmda = Lambda("Lambda")
            ecr = ECR("Container Registry")

        with Cluster("3rd Party SaaS", direction='LR'):
            with Cluster("MongoDB Atlas"):
                mongo = MongoDB("Users DB")

            with Cluster("Recombee"):
                rec = Recombee("Recommendation\n Engine")

            with Cluster("Auth0"):
                auth = Auth0("Oauth2 Integrator")

            with Cluster("Cloudinary"):
                cl = Cloudinary("Image Caching\n and Proccessing")

            auth >> Edge(style="invis") >> mongo >> Edge(
                style="invis") >> cl >> Edge(style="invis") >> rec

        eb >> auth
        eb >> mongo
        eb >> cl
        eb >> rec

        eb << ecr << ci
        es << lmda
        clf >> elb >> eb >> es
示例#11
0
def cli(
    graph_attr: dict = GRAPH_ATTR,
    output_format: str = OUTPUT_FORMAT,
    output_path: str = OUTPUT_PATH,
) -> str:
    """ Generates CLI diagrams """
    output = f"{output_path}/cli"

    with Diagram(
            "CLI",
            show=False,
            outformat=output_format,
            # graph_attr=graph_attr,
            filename=output,
    ):
        cli = Python("CLI")

        with Cluster("targets"):

            with Cluster("local resources"):
                data = Storage("build objects\nassets")
                layers = Layer("build\nLambda layers")
                server = Server("media\nlocal build")

            with Cluster("cloud resources"):
                db = Dynamodb("create DB\nseed DB")
                s3 = S3("sync data")
                lbd = Lambda("resize images\nencode video")
                sqs = SQS("queue\nmedia generate\ntasks")

        cli >> data >> cli
        cli >> s3
        cli >> db
        cli >> lbd
        cli >> sqs
        cli >> layers
        cli >> server

    return f"{output}.{output_format}"
示例#12
0
class Extended(Diagram):
    def render(self):
        self.dot.render(format=self.outformat, view=False, quiet=True)
        if self.outformat == "svg":
            path = pathlib.Path(f"{self.filename}.{self.outformat}")
            t = ET.parse(str(path))
            fix_image_links(t)
            logger.info("fix links %s", self.filename)
            t.write(str(path))

        # TODO: support self.show


with Extended(
    "architecture",
    show=True, # not supported yet
    direction="LR",
    outformat="svg",
    filename="architecture.embed",
):
    amplify = Amplify("AWS Amplify")
    api_gateway = APIGateway("Amazon API Gateway")
    aws_lambda = Lambda("AWS lambda")
    iam = IAM("AWS Identity and Access Management")
    dynamo_db = Dynamodb("Amazon DynamoDB")

    amplify >> api_gateway >> aws_lambda >> dynamo_db
    iam >> aws_lambda

# print("@", to_base64.cache_info())
from diagrams import Diagram
from diagrams.aws.compute import Lambda
from diagrams.aws.database import DynamodbTable
from diagrams.aws.integration import SNS
from diagrams.aws.network import APIGateway

with Diagram("User Profile", show=False, direction="TB"):

    userprofile = Lambda("user profile")
    # topic = SNS("user profile")

    APIGateway("user profile api") >> userprofile >> DynamodbTable("user profiles") >> userprofile
    # userprofile >> topic
from diagrams import Cluster, Diagram
from diagrams.aws.compute import Lambda, Fargate
from diagrams.aws.database import Dynamodb
from diagrams.aws.integration import SQS, SNS, SF
from diagrams.aws.network import APIGateway
from diagrams.aws.storage import S3
from diagrams.aws.analytics import Kinesis, KinesisDataFirehose

with Diagram("SNS Integration", direction="LR", show=False):

    apig = APIGateway("Producer (http post)")
    topic = SNS("Topic")

    with Cluster("Consumers"):
        consumers_l = [
            Lambda("Consumer"),
            Lambda("Consumer"),
            Lambda("Consumer")
        ]

    apig >> topic
    consumers_l >> topic
示例#15
0
            curated = S3("CuratedData")

            submissions_crawler = GlueCrawlers("submissions crawler")
            curated_crawler = Glue("ETL")

            ctas = Athena("train/eval split")

            catalog = GlueDataCatalog("data catalog")

            notebooks = SagemakerNotebook("Build Model")
            job = SagemakerTrainingJob("Train Model")
            model = SagemakerModel("Fitted Model")

        with Cluster("Inference"):

            endpointLambda = Lambda("call endpoint")
            with Cluster("Multi AZ endpoints") as az:
                endpoints = [
                    Endpoint("us-east-1a"),
                    Endpoint("us-east-1b"),
                    Endpoint("us-east-1c"),
                ]

            published = S3("Monitor data")
            monitor_sched = EMR("model monitor")

    source >> submissions >> submissions_crawler >> curated_crawler >> curated
    submissions >> catalog
    iot >> inference >> source
    curated >> ctas >> [catalog, job]
    notebooks >> job >> model
示例#16
0
    "fontsize": "45"
}
node_attr = {
       "esep": "+20",
    "fontsize": "10" 
}


with Diagram("On Demand Athena Reports", show=True, direction='LR',
            graph_attr=graph_attr, node_attr =node_attr):



    _ddb = DDB('DynamoDB Table \nReports Definitions (queries)')

    _lambda = Lambda('Lambda Publisher \nPOST {report_id: 100, client_id:1}')
    _lambda << Edge() >> _ddb 

    _api = APIGateway('Rest API')
    _client = Client('Client API Request Report ID=100')
    _client >> _api
    _api >> Edge(color="darkgreen") >> _lambda


    with Cluster("Reports Queue "):
        _sqs =SQS('Amazon SQS\nReport Request Queue')
        _lambda >> _sqs
        _lambda2 = Lambda('Lambda Subscriber \nProcess Queue Messages \n (start query)')
        _sqs >> _lambda2

    
from diagrams import Cluster, Diagram
from diagrams.aws.compute import Lambda

with Diagram("The Secret Formula", show=False, direction="TB"):
    who = Lambda("Who?")
    where = Lambda("Where?")
    bait = Lambda("Bait?")
    result = Lambda("Result?")
    with Cluster("Who?", direction="TB"):
        whoQuestions = [
            Lambda("Who is your dream client?"),
            Lambda("Who do I actually want to work with?"),
            Lambda("What are they passionate about?"),
            Lambda("What are their goals, dreams, and desires?")
        ]

    with Cluster("Where?"):
        whereQuestions = [
            Lambda("Where can you find your dream clients?"),
            Lambda("What groups are they a part of?"),
            Lambda("What interests do they have?"),
            Lambda("Where do they hang out online?")
        ]

    with Cluster("Bait?"):
        baitQuestions = [
            Lambda("What bait will you use to attract your dream clients?"),
            Lambda("Is your bait a physical book?"),
            Lambda("Is your bait a CD?"),
            Lambda("Is your bait an audio file?")
        ]
示例#18
0
from diagrams import Diagram, Edge
from diagrams.aws.compute import Lambda
from diagrams.aws.engagement import SES
from diagrams.aws.integration import SNS

with Diagram("Calendar Mail Send", show=False, direction="TB"):

    topic = SNS("calendar topic")
    ses = SES("simple email service")

    topic >> Edge(
        label="published calendar") >> Lambda("calendar mail send") >> Edge(
            label="multipart email") >> ses
示例#19
0
# diagram.py
from diagrams import Cluster, Diagram, Edge
from diagrams.aws.compute import Lambda
from diagrams.aws.network import APIGateway

graph_attr = {"bgcolor": "transparent"}
with Diagram("CSV to Heartbeat YAML",
             show=False,
             outformat="png",
             filename="overview",
             graph_attr=graph_attr):

    lambda_converter = Lambda("Converter")
    lambda_retention = Lambda("Lambda Retention policy")
    gw = APIGateway("API Gateway")
    gw >> lambda_converter
示例#20
0
#!/usr/bin/env python
from diagrams import Diagram
from diagrams.aws.compute import Lambda
from diagrams.aws.ml import Personalize
from diagrams.aws.analytics import KinesisDataStreams, KinesisDataFirehose, Athena, Quicksight, Glue
from diagrams.onprem.client import Client
from diagrams.aws.storage import S3

with Diagram('イベントストリーミング'):
    # インスタンス化によってノードを作成
    # ノードにラベルを付与でき、\nを入れることでラベルの改行も可能
    stream = KinesisDataStreams('Kinesis\nData Streams')
    s3 = S3('S3')
    athena = Athena('Athena')

    # 定義したノードを始点とした流れを作成
    # 変数に代入せずとも、ノードは作成可能
    Client() >> stream >> Lambda('Lambda') >> Personalize('Personalize\nEventTracker')
    stream >> KinesisDataFirehose('Kinesis\nData Firehose') >> s3
    s3 - athena >> Quicksight('QuickSight') << Client()
    s3 >> Glue('Glue') >> athena
from diagrams import Diagram
from diagrams.aws.compute import Lambda
from diagrams.aws.integration import SNS
from diagrams.aws.network import APIGateway

with Diagram("Calendar User Agent", show=False, direction="TB"):

    cua = Lambda("calendar user agent")

    # APIGateway("calendar user agent api") >> cua

    topic = SNS("calendar topic")
    events = APIGateway("calendar events")
    tasks = APIGateway("calendar tasks")
    journal = APIGateway("calendar journal")
    availability = APIGateway("calendar availability")

    topic >> cua >> topic
    events >> cua >> events
    tasks >> cua >> tasks
    journal >> cua >> journal
    availability >> cua >> availability
示例#22
0
from diagrams import Cluster, Diagram
from diagrams.aws.compute import Lambda, Fargate
from diagrams.aws.database import Dynamodb
from diagrams.aws.integration import Eventbridge
from diagrams.aws.network import APIGateway
from diagrams.aws.storage import S3

with Diagram("Message Broker Refactored", show=False):
    with Cluster("Event Processors"):
        with Cluster("System C", direction="TB"):
            apig = APIGateway("webhook")
            handler = Lambda("handler")

        with Cluster("System D", direction="TB"):

            processor = Lambda("handler")
            ddb = Dynamodb("database")
            stream_listener = Lambda("processor")

        broker = Eventbridge("message broker")

        with Cluster("System B", direction="TB"):
            event_handler = Lambda("handler")
            bucket = S3("S3")
            file_processor = Lambda("processor")


        apig >> handler >> broker >> processor

        processor >> ddb >> stream_listener >> broker
示例#23
0
from diagrams.programming.framework import Vue, Django
from diagrams.aws.compute import Lambda
from diagrams.aws.engagement import Connect
from diagrams.aws.analytics import Kinesis
from diagrams.aws.database import Dynamodb
from pathlib import Path

with Diagram(
    "CC3 AWS Connect High Level Architecture",
    filename="architecture",
    outformat="png",
    show=False,
    direction="BT",
):
    connect = Connect("Connect")
    connectLambda = Lambda("awsConnect")
    web = Vue("CCU3 Web")
    api = Django("CCU3 Api")
    users = [Users("Users"), Users("Agents")]

    with Cluster("CCU3 awsconnect"):
        gateway = APIGateway("Websocket Gateway")
        database = Dynamodb("ConnectClientsDB")

        with Cluster("Websocket Handler"):

            with Cluster("Connection"):
                connectionHandlers = [Lambda("$connect" + str(i)) for i in range(3)]
                gateway - connectionHandlers
                connectionHandlers >> database
    with Cluster("AWS"):

        security = Cognito("Cognito")
        gateway = APIGateway("Gateway")
        route = Route53("Route53")
        db = DDB("DynamoDB")
        email_service = SES("SES")
        monitoring = Cloudwatch("AWS CloudWatch ")
        firewall = WAF("AWS WAF")
        identity = IAM("AWS IAM")

        with Cluster("CDN"):
            cdn = S3("S3") >> CF("CloudFront CDN")

        with Cluster("Functions") as xyz:
            func_send_mail = Lambda("Send Email")
            func_store_data = Lambda("Store Data")
            functions = [func_send_mail, func_store_data]
            gateway >> Edge() << functions

        functions >> Edge() << identity

        func_send_mail >> Edge() >> email_service >> users
        func_store_data - Edge() - db
        cdn >> Edge() << route

        # Monitoring
        log_connection = Edge(color="darkpink", style="dotted")
        monitoring >> log_connection << gateway
        monitoring >> log_connection << [func_send_mail, func_store_data]
        monitoring >> log_connection << firewall
示例#25
0
from diagrams import Diagram
from diagrams.aws.compute import Lambda

with Diagram("AWS Lambda Layer", show=False, direction="TB"):

    Lambda("lambda layer")
示例#26
0
with Diagram(None, filename="aws-cross-account-pipeline", show=False):
    developers = Users("Developers")

    with Cluster("Developer Account"):
        source_code = Codecommit("CodeCommit")
        source_code << Edge(label="merge pr") << developers

    with Cluster("Shared Services Account"):
        with Cluster("Pipeline"):
            pipeline = Codepipeline("CodePipeline")
            build = Codebuild("Codebuild")
        artifacts = S3("Build Artifacts")
        source_code >> Edge(label="trigger") >> pipeline
        developers >> Edge(label="manual approval") >> pipeline
        pipeline >> build >> Edge(label="yaml file") >> artifacts

    with Cluster("Test Workload Account"):
        test_stack = Cloudformation("CloudFormation")
        test_function = Lambda("Lambda")
        test_api = APIGateway("API Gateway")
        pipeline >> test_stack
        test_api >> test_function

    with Cluster("Prod Workload Account"):
        prod_stack = Cloudformation("CloudFormation")
        prod_function = Lambda("Lambda")
        prod_api = APIGateway("API Gateway")
        pipeline >> prod_stack
        prod_api >> prod_function
示例#27
0
from diagrams import Cluster, Diagram
from diagrams.aws.compute import ECS, EKS, Lambda
from diagrams.aws.database import Redshift
from diagrams.aws.integration import SQS
from diagrams.aws.storage import S3

with Diagram("Event Processing", show=False):
    source = EKS("k8s source")

    with Cluster("Event Flows"):
        with Cluster("Event Workers"):
            workers = [ECS("worker1"), ECS("worker2"), ECS("worker3")]

        queue = SQS("event queue")

        with Cluster("Processing"):
            handlers = [Lambda("proc1"), Lambda("proc2"), Lambda("proc3")]

    store = S3("events store")
    dw = Redshift("analytics")

    source >> workers >> queue >> handlers
    handlers >> store
    handlers >> dw
示例#28
0
from diagrams import Diagram, Cluster

from diagrams.aws.storage import S3
from diagrams.aws.compute import Lambda
from diagrams.aws.integration import SNS, Eventbridge
from diagrams.aws.management import Cloudwatch
from diagrams.onprem.queue import ActiveMQ

with Diagram("Alerting Workflow", show=True):
    with Cluster('main account'):
        topic = SNS('SNS Topic')

        with Cluster('Lambda'):
            l = Lambda('processor')
            topic >> l
            S3('lambda source') - l

        cl = Cloudwatch('Cloudwatch')
        l >> cl

        event = Eventbridge('Cloudwatch\nevent rule')
        cl >> event

    with Cluster('Event Bus'):
        event_bus = ActiveMQ('bus')
        event >> event_bus
示例#29
0
文件: lambda.py 项目: nimkar/diagrams
from diagrams import Diagram
from diagrams.aws.network import APIGateway
from diagrams.aws.compute import Lambda
from diagrams.aws.database import Aurora

with Diagram("Lambda Invocation", show=False):
    api_gw = APIGateway("API Gateway")
    my_lambda = Lambda("Lambda")
    my_db = Aurora("Aurora Database")

    api_gw >> my_lambda >> my_db




示例#30
0
from diagrams.aws.compute import Lambda
from diagrams.aws.storage import S3
from diagrams.aws.network import APIGateway
from diagrams.aws.database import DynamodbTable
from diagrams.aws.security import IdentityAndAccessManagementIam
from diagrams.aws.devtools import Codebuild
from diagrams.aws.devtools import Codecommit
from diagrams.aws.devtools import Codedeploy
from diagrams.aws.devtools import Codepipeline
from diagrams.aws.management import Cloudformation
from diagrams.aws.devtools import CommandLineInterface

with Diagram("Serverless Web Apps", show=False, direction="TB"):

    with Cluster("CloudFormation"):
        cloudformation = Cloudformation("Stack")
        cloudformation >> IdentityAndAccessManagementIam("IAM") >> Codecommit(
            "CodeCommit") >> Codebuild("CodeBuild") >> S3(
                "S3") >> Codepipeline("CodePipeline")

    with Cluster("CodePipeline"):
        codepipeline = Codepipeline("Pipeline")
        codepipeline >> Codecommit("CodeCommit") >> Codebuild(
            "CodeBuild") >> Cloudformation("CloudFormation")

    with Cluster("Serverless Application Model"):
        sam = Cloudformation("SAM Template")
        sam >> APIGateway("API Gateway") >> Lambda("Lambda") >> DynamodbTable(
            "DynamoDB")
        cloudformation >> codepipeline >> sam