Esempio n. 1
0
# diagram.py
from diagrams import Diagram, Cluster, Edge
from diagrams.aws.compute import EC2
from diagrams.aws.compute import AutoScaling
from diagrams.aws.database import RDS
from diagrams.aws.network import ELB
from diagrams.aws.management import Cloudwatch
from diagrams.aws.storage import S3

with Diagram("AIK Archicteture Diagram Solution", show=False):

    bucket = S3("Bucket x3")
    vpc = Cluster("VPC")

    with vpc:
        loadBal = ELB("ELB")
        jenkins = EC2("Jenkins CI/CD Server")
        with Cluster("EC2 Instances - AutoScaling Group"):
            ec2_1 = EC2("AIK App")
            ec2_2 = EC2("AIK App")
        with Cluster("DB RDS MySQL"):
            rds = RDS("")

    rds << Edge(label="") >> ec2_1
    rds << Edge(label="") >> ec2_2

    loadBal >> ec2_1
    loadBal >> ec2_2

    ec2_1 >> bucket
    ec2_2 >> bucket
Esempio n. 2
0
from diagrams import Cluster, Diagram, Edge
from diagrams.aws.compute import ECS
from diagrams.aws.database import RDS
from diagrams.aws.network import ELB
from diagrams.aws.network import Route53
from diagrams.aws.management import Cloudwatch
from diagrams.aws.general import InternetGateway
from diagrams.aws.security import CertificateManager, IAM
from diagrams.aws.general import Users
from diagrams.aws.storage import S3

with Diagram("Architecture", show=False):

    cw = Cloudwatch("Cloudwatch")
    s3 = S3("S3")
    users = Users("Users")
    iam = IAM("IAM")
    dns = Route53("DNS")
    cm = CertificateManager("CertificateManager")
    listener = InternetGateway(":443")

    with Cluster("VPC: 172.16.x.x"):
        lb = ELB("ALB")

        with Cluster("ECS"):
            with Cluster("Public Subnet #1"):
                ecs_stack_1 = ECS("rshiny-app-1")

            with Cluster("Public Subnet #2"):
                ecs_stack_2 = ECS("rshiny-app-2")
Esempio n. 3
0
from diagrams import Cluster, Diagram
from diagrams.aws.compute import ECS, EKS, Lambda
from diagrams.aws.database import Redshift
from diagrams.aws.integration import SQS
from diagrams.aws.storage import S3

with Diagram("Sample", show=False):
    source = EKS("k8s source")

    with Cluster("Event Flows"):
        with Cluster("Event Workers"):
            workers = [ECS("worker1"), ECS("worker2"), ECS("worker3")]

        queue = SQS("event queue")

        with Cluster("Processing"):
            handlers = [Lambda("proc1"), Lambda("proc2"), Lambda("proc3")]

    store = S3("events store")
    dw = Redshift("analytics")

    source >> workers >> queue >> handlers
    handlers >> store
    handlers >> dw
Esempio n. 4
0
                aaCloud = Cloudformation("CloudFormation")
                aaCli = CommandLineInterface("AWS Cli")
                aaDpA = ConsoleMobileApplication("Deployed Application")

        with Cluster("Sentry Exception Monitoring"):
            semELB = ElasticLoadBalancing("Load Balancer")
            with Cluster("Sentry Application Services"):
                sasECS1 = ECS("ECS Container")
                sasECS2 = ECS("ECS Container")
                sasECS3 = ECS("ECS Container")
                ses = SimpleEmailServiceSes("SES Email")
            with Cluster("Redis Cache"):
                rcECS1 = ECS("ECS Container")
                rcCloudmap = CloudMap("AWS CloudMap")
            with Cluster("Sentry Artifact Store"):
                s3 = S3("S3 Bucket")
            with Cluster("Sentry Database"):
                sdRDS = RDS("Relational Database Service for Postgres")
            


#STEP2:set up relationships
    #format entities within one group
    gsIamUser - Edge(color="transparent") - gsServer
    genericSamlToken - Edge(color="transparent") - codeRepo
    aaCloud - Edge(color="transparent") - aaCli
    ses - Edge(color="transparent") - sasECS1
    sasECS2 - Edge(color="transparent") - sasECS3
    sasECS1 - Edge(color="transparent") - rcECS1 - Edge(color="transparent") - s3 - Edge(color="transparent") - sdRDS

    #connect between entities
Esempio n. 5
0
                PrivateSubnet("Subnet zone d"),
                ]
            with Cluster("Kubernetes cluster"):
                autoscaling_group = AutoScaling("Autoscaling group")
                autoscaling_group_instances = [
                    EC2("K8s worker zone a"),
                    EC2("K8s worker zone b"),
                    EC2("K8s worker zone c"),
                    EC2("K8s worker zone d"),
                ]
                ingress = Ingress("Ingress gateway")
                services = Service("Services")
                pods = Pod("Container pods")

    ci_pipeline = GitlabCI("CI pipeline")
    terraform_repo = Terraform("Infra as code")
    remote_state = S3("Remote state")

    ssl_certificate - dns_name
    dns_name - load_balancer
    load_balancer - public_subnets
    public_subnets - nat_gateway
    nat_gateway - private_subnets
    private_subnets - autoscaling_group
    autoscaling_group - autoscaling_group_instances
    autoscaling_group_instances - ingress
    ingress - services
    services - pods
    ci_pipeline - terraform_repo
    terraform_repo - remote_state
Esempio n. 6
0
from diagrams import Diagram, Cluster

from diagrams.aws.storage import S3
from diagrams.aws.compute import Lambda
from diagrams.aws.integration import SNS, Eventbridge
from diagrams.aws.management import Cloudwatch
from diagrams.onprem.queue import ActiveMQ

with Diagram("Alerting Workflow", show=True):
    with Cluster('main account'):
        topic = SNS('SNS Topic')

        with Cluster('Lambda'):
            l = Lambda('processor')
            topic >> l
            S3('lambda source') - l

        cl = Cloudwatch('Cloudwatch')
        l >> cl

        event = Eventbridge('Cloudwatch\nevent rule')
        cl >> event

    with Cluster('Event Bus'):
        event_bus = ActiveMQ('bus')
        event >> event_bus
Esempio n. 7
0
    "fontname": "Helvetica",
    "style": "rounded",
    "bgcolor": "transparent"
}

cluster = {
    "fontsize": "16",
    "fontname": "Helvetica",
    "style": "rounded",
    "bgcolor": "transparent"
}

with Diagram("Codebuild", graph_attr=graph_attr, direction="LR"):

    with Cluster("Code change", graph_attr=major_cluster):

        with Cluster("Trigger", graph_attr=cluster):
            Trigger = Cloudwatch("Event Trigger")
            IAMRole("Trigger Role") >> Trigger
            Cloudwatch = Codecommit("Code Change") >> Edge(
                color="firebrick") >> Cloudwatch("Event Rule") >> Trigger

        with Cluster("Build", graph_attr=cluster):
            Build = Codebuild("Codebuild")
            IAMRole("Codebuild Role") >> Build
            Build << Edge(color="black") >> ParameterStore("Build No")
            Build << Edge(color="black") >> ParameterStore("Latest")
            Build >> Edge(color="darkgreen") >> S3("Artefact Store")

    Trigger >> Edge(color="firebrick") >> Build
Esempio n. 8
0
             outformat='png'):
    with Cluster('Amazon Web Services'):
        with Cluster('Virtual Private Cloud'):
            kube = EKS('Elastic Kubernetes\nService')
            instance_1 = EC2('EC2 Instance 1\n(m5.xlarge)')
            disk_1 = EBS('EBS Disk 1\n(gp2)')
            instance_2 = EC2('EC2 Instance 2\n(m5.xlarge)')
            disk_2 = EBS('EBS Disk 2\n(gp2)')

            instance_1 - disk_1
            instance_2 - disk_2
            kube - instance_1
            kube - instance_2
            kube - RDS('Amazon RDS\nfor PostgreSQL\n(db.t3.large)')

        kube - S3('S3')
        kube - Cloudwatch('Amazon CloudWatch')
        dns = Route53('Route53')
        lb = ELB('Elastic Load Balancer')
        lb >> kube
        dns >> lb

    [Client('SDKs'), Users('Users')] >> dns

with Diagram('Google Cloud Platform resources',
             show=False,
             filename='gke',
             outformat='png'):
    with Cluster('Google Cloud Platform'):
        with Cluster('Virtual Private Cloud'):
            kube = GKE('Google Kubernetes\nEngine')
Esempio n. 9
0
        db_master - [onprMysql("userdb")]

    users - dns >> web_servers
    web_servers >> Backbone
    Backbone >> app_servers
    app_servers >> db_master
    app_servers >> nfs

with Diagram("AWS web application", show=True):
    users = Users('website/mobile users')

    with Cluster("Ingress"):
        dns = Route53("Route53")

        with Cluster("Cloudfront CDN"):
            s3_content = S3('Shared content')
            cf = CF('Cloudfront CDN')

    with Cluster('VPC'):
        with Cluster("WebProxy AutoScalingGroup (ASG)"):
            web_asg = AutoScaling('ASG')
            web_lb = ELB("NLB")

        with Cluster("Application servers AutoScalingGroup (ASG)"):
            app_asg = AutoScaling('ASG')
            app_lb = ELB("NLB")

        with Cluster("AWS Batch"):
            cwa = cw('CW Event')
            batch_s3 = S3('Batch data')
            batch = Batch('AWS Batch')
Esempio n. 10
0
from diagrams.aws.database import Dynamodb
from diagrams.aws.ml import Sagemaker
from diagrams.aws.network import APIGateway
from diagrams.aws.storage import S3
from diagrams.onprem.analytics import Spark
from diagrams.onprem.client import User
from diagrams.onprem.compute import Server
from diagrams.onprem.network import Internet

with Diagram(name="", show=False, direction="LR",
             filename="setup/architecture",
             graph_attr={"dpi": "300"}) as diag:
    with Cluster("Source"):
        source = Server("HTTP")
    with Cluster("Data load"):
        storage = S3("Data storage")
        download = ECS("ECS download task")
        unzip_trigger = Lambda("S3 event trigger")
        unzip = ECS("ECS unzip task")
    with Cluster("Data processing"):
        parse = Glue("Glue Spark XML\nparser")
        catalog = GlueDataCatalog("Data Catalog")
        with Cluster("Feature engineering"):
            train_features = Glue("Glue Spark job:\ntrain features")
            predict_features = Glue("Glue Spark job:\nprediction features")
        prediction_db = Dynamodb("Prediction database")
    with Cluster("ML model"):
        cluster = EMRCluster("EMR Cluster")
        model_fit = Spark("Spark + MLeap")
        model = Sagemaker("Sagemaker endpoint")
    with Cluster("Serving"):
Esempio n. 11
0
with Diagram("BLT Kata final container diagram", show=True):

    payment_gw = Server("Payment Gateway")
    customer = User("Customer")
    marketing = User("Marketing")

    with Cluster("System"):
        with Cluster("Promotions"):
            promotions_cms = ECS("CMS")
            promotions_gw = APIGateway("Promotions API")
            promotions_be = Lambda("Promotions BE")
            promotions_cms >> promotions_gw >> promotions_be

        with Cluster("In Store"):
            printer_fe = S3("Docket Printer Front-end")
            docket_printer = Server("Docket Printer")
            cashier = User("Cashier")
            store_fe = S3("Store Front-end")
            store_api = APIGateway("Store API")
            store_be = Lambda("Store BE")
            cashier >> Edge(label="Retrieve printed docket") >> docket_printer
            printer_fe >> docket_printer
            store_db = S3("Store DB")
            cashier >> store_fe >> store_api >> store_be >> store_db

        with Cluster("Alerting"):
            logs = Cloudwatch("CloudWatch")
            alerts = SNS("SNS")
            logs >> alerts
Esempio n. 12
0
"""
quick start from diagrams
"""
from diagrams import Diagram
from diagrams.aws.compute import EC2
from diagrams.aws.database import RDS
from diagrams.aws.network import ELB, CF
from diagrams.aws.storage import S3, EFS

with Diagram("Web Service", show=True):
    workers = [
        EC2("1-web"),
    ]
    shared = EFS('wp-content')
    balancer = ELB('lb')
    cdn = CF('cdn')
    static = S3('wp-static')
    db = RDS('wp-tables')

    balancer >> workers >> db
    workers >> static
    workers >> shared
    cdn >> static
Esempio n. 13
0
from diagrams import Diagram
from diagrams.saas.analytics import Snowflake
from diagrams.aws.storage import S3
from diagrams.programming.framework import Fastapi, FastAPI
from diagrams.saas.identity import Auth0
from diagrams.custom import Custom

with Diagram("Event Processing", show=False):
    cc_csv = Custom("csv file", "./csv.png")
    source = S3("Upload csv to S3")
    download = S3("Download csv from S3")
    ingestion = Snowflake("Snowflake Database")
    api = FastAPI("API Endpoints")
    authentication = Auth0("API Auth")
    cc_test = Custom("Pytest", "./pytest.png")
    cc_locust = Custom("Locust", "./Locust.png")

    cc_csv >> source >> download >> ingestion >> api >> authentication >> cc_test >> cc_locust

Esempio n. 14
0
        user = User("User")
        console = Client("Browser")
        user >> console

    with Cluster("Cloud9", direction="LR"):
        builder = User("Builder")
        cli = CommandLineInterface("AWS CLI")
        builder >> cli

    with Cluster("CloudFormation"):
        cloudformation = Cloudformation("Stack")
        cloudformation >> IdentityAndAccessManagementIam("IAM")
        cloudformation >> Codecommit("CodeCommit")
        cloudformation >> Codebuild("CodeBuild")
        cloudformation >> Codepipeline("CodePipeline")
        cloudformation >> S3("S3")
        cli >> cloudformation

    with Cluster("CodePipeline"):
        codepipeline = Codepipeline("Pipeline")
        codepipeline >> Codecommit("CodeCommit")
        codepipeline >> Codebuild("CodeBuild")
        codepipeline >> Cloudformation("CloudFormation")

    with Cluster("Serverless Application Model"):
        sam = Cloudformation("SAM Template")
        apigateway = APIGateway("API Gateway")
        mylambda = Lambda("Lambda")
        ddb = DynamodbTable("DynamoDB")
        sam >> apigateway
        sam >> mylambda
# diagram.py
# Needs diagrams from pip and graphwiz installed
from diagrams import Cluster, Diagram
#from diagrams.aws.network.ElasticLoadBalancing import ELB
from diagrams.aws.compute import AutoScaling
from diagrams.aws.compute import EC2
from diagrams.aws.database import RDS
from diagrams.aws.network import Route53
from diagrams.aws.network import ELB
from diagrams.aws.storage import S3

with Diagram("Artifactory", show=False):
    dns = Route53("dns")
    lb = ELB("lb")

    with Cluster("DB Security Group"):
        RDS("MySql")
        S3("Artifact Store")
from diagrams.onprem.container import Docker
from diagrams.onprem.database import PostgreSQL
from diagrams.onprem.monitoring import Grafana, Prometheus
from diagrams.onprem.network import Internet
from diagrams.programming.framework import Spring, React

graph_attr = {
    "fontsize": "20",
    "bgcolor": "white"  #transparent
}

with Diagram("Application Architecture",
             graph_attr=graph_attr,
             outformat="png",
             filename="application_architecture"):
    ELB("lb") >> EC2("web") >> RDS("userdb") >> S3("store")
    ELB("lb") >> EC2("web") >> RDS("userdb") << EC2("stat")
    (ELB("lb") >> EC2("web")) - EC2("web") >> RDS("userdb")

    with Cluster("Application Context"):
        app = EC2("Spring Boot")

    ELB("lb") >> app

    metrics = Prometheus("metric")
    metrics << Edge(color="firebrick", style="dashed") << Grafana("monitoring")

    Jenkins("CI")
    client = Client("A")
    client >> User("B") >> Users("S")
    client >> PostgreSQL("Database")
Esempio n. 17
0
from diagrams.aws.management import Cloudformation
from diagrams.aws.mobile import APIGateway
from diagrams.aws.storage import S3

with Diagram(None, filename="aws-cross-account-pipeline", show=False):
    developers = Users("Developers")

    with Cluster("Developer Account"):
        source_code = Codecommit("CodeCommit")
        source_code << Edge(label="merge pr") << developers

    with Cluster("Shared Services Account"):
        with Cluster("Pipeline"):
            pipeline = Codepipeline("CodePipeline")
            build = Codebuild("Codebuild")
        artifacts = S3("Build Artifacts")
        source_code >> Edge(label="trigger") >> pipeline
        developers >> Edge(label="manual approval") >> pipeline
        pipeline >> build >> Edge(label="yaml file") >> artifacts

    with Cluster("Test Workload Account"):
        test_stack = Cloudformation("CloudFormation")
        test_function = Lambda("Lambda")
        test_api = APIGateway("API Gateway")
        pipeline >> test_stack
        test_api >> test_function

    with Cluster("Prod Workload Account"):
        prod_stack = Cloudformation("CloudFormation")
        prod_function = Lambda("Lambda")
        prod_api = APIGateway("API Gateway")
Esempio n. 18
0
from diagrams.onprem.client import Client
from diagrams.aws.storage import S3

graph_attr = {'bgcolor': 'gray', 'fontsize': '28'}

node_attr = {'fontsize': '14'}

edge_attr = {'arrowsize': '2.0', 'penwidth': '5.0'}

with Diagram(name='イベントストリーミング2',
             filename='event_streaming',
             show=True,
             direction='TB',
             graph_attr=graph_attr,
             edge_attr=edge_attr,
             node_attr=node_attr):

    # インスタンス化によってノードを作成
    # ノードにラベルを付与でき、\nを入れることでラベルの改行も可能
    stream = KinesisDataStreams('Kinesis\nData Streams')
    s3 = S3('S3')
    athena = Athena('Athena')

    # 定義したノードを始点とした流れを作成
    # 変数に代入せずとも、ノードは作成可能
    Client() >> stream >> Lambda('Lambda') >> Personalize(
        'Personalize\nEventTracker')
    stream >> KinesisDataFirehose('Kinesis\nData Firehose') >> s3
    s3 - athena >> Quicksight('QuickSight') << Client()
    s3 >> Glue('Glue') >> athena
Esempio n. 19
0
from diagrams.aws.compute import Lambda
from diagrams.aws.storage import S3
from diagrams.aws.network import APIGateway
from diagrams.aws.database import DynamodbTable
from diagrams.aws.security import IdentityAndAccessManagementIam
from diagrams.aws.devtools import Codebuild
from diagrams.aws.devtools import Codecommit
from diagrams.aws.devtools import Codedeploy
from diagrams.aws.devtools import Codepipeline
from diagrams.aws.management import Cloudformation
from diagrams.aws.devtools import CommandLineInterface

with Diagram("Serverless Web Apps", show=False, direction="TB"):

    with Cluster("CloudFormation"):
        cloudformation = Cloudformation("Stack")
        cloudformation >> IdentityAndAccessManagementIam("IAM") >> Codecommit(
            "CodeCommit") >> Codebuild("CodeBuild") >> S3(
                "S3") >> Codepipeline("CodePipeline")

    with Cluster("CodePipeline"):
        codepipeline = Codepipeline("Pipeline")
        codepipeline >> Codecommit("CodeCommit") >> Codebuild(
            "CodeBuild") >> Cloudformation("CloudFormation")

    with Cluster("Serverless Application Model"):
        sam = Cloudformation("SAM Template")
        sam >> APIGateway("API Gateway") >> Lambda("Lambda") >> DynamodbTable(
            "DynamoDB")
        cloudformation >> codepipeline >> sam
    users = Users()

    with Cluster("AWS"):

        security = Cognito("Cognito")
        gateway = APIGateway("Gateway")
        route = Route53("Route53")
        db = DDB("DynamoDB")
        email_service = SES("SES")
        monitoring = Cloudwatch("AWS CloudWatch ")
        firewall = WAF("AWS WAF")
        identity = IAM("AWS IAM")

        with Cluster("CDN"):
            cdn = S3("S3") >> CF("CloudFront CDN")

        with Cluster("Functions") as xyz:
            func_send_mail = Lambda("Send Email")
            func_store_data = Lambda("Store Data")
            functions = [func_send_mail, func_store_data]
            gateway >> Edge() << functions

        functions >> Edge() << identity

        func_send_mail >> Edge() >> email_service >> users
        func_store_data - Edge() - db
        cdn >> Edge() << route

        # Monitoring
        log_connection = Edge(color="darkpink", style="dotted")
Esempio n. 21
0
    _client >> _api
    _api >> Edge(color="darkgreen") >> _lambda


    with Cluster("Reports Queue "):
        _sqs =SQS('Amazon SQS\nReport Request Queue')
        _lambda >> _sqs
        _lambda2 = Lambda('Lambda Subscriber \nProcess Queue Messages \n (start query)')
        _sqs >> _lambda2

    

    with Cluster('Repor Process'):
        with Cluster('Data'):
            _athena = Athena('Amazon Anthena') 
            data_stack = [S3('Dataset'),
            GlueDataCatalog('Catalog')] 
            _athena <<Edge(color="darkgreen") >> data_stack
        
        with Cluster("Query Status"):
            _sqs2 =SQS('Amazon SQS\nOngoing queries')
            _lambda3 = Lambda('Poll status queue \nif SUCCESS Generate presigned url with results')
            _sqs2 <<Edge()>> _lambda3


    lambda_destinations = [_sqs2, _athena]


    _lambda2 >>Edge()>> lambda_destinations

    _lambda3 << Edge(label='Get query Execution')>> _athena
Esempio n. 22
0
from diagrams import Diagram, Cluster
from diagrams.aws.compute import EC2
from diagrams.aws.database import RDS
from diagrams.aws.network import ELB
from diagrams.aws.storage import S3
from diagrams.onprem.workflow import Airflow

with Diagram("Grouped Workers", show=False, direction="TB"):
    di_store = S3('DI DataLake')

    with Cluster('DI Airflow Cluster'):
        di_store >> Airflow('DI')
Esempio n. 23
0
    GlueDataCatalog,
    KinesisDataFirehose,
    Kinesis,
    EMR,
    Quicksight,
)
from diagrams.aws.ml import SagemakerNotebook, SagemakerTrainingJob, SagemakerModel

with Diagram("AWS ML Lab", show=False):
    iot = IotRule("Engine devices")
    inference = Kinesis("real-time")
    source = KinesisDataFirehose("batches")
    quicksight = Quicksight("dashboard")
    with Cluster("VPC"):
        with Cluster("Training"):
            submissions = S3("Submissions")
            curated = S3("CuratedData")

            submissions_crawler = GlueCrawlers("submissions crawler")
            curated_crawler = Glue("ETL")

            ctas = Athena("train/eval split")

            catalog = GlueDataCatalog("data catalog")

            notebooks = SagemakerNotebook("Build Model")
            job = SagemakerTrainingJob("Train Model")
            model = SagemakerModel("Fitted Model")

        with Cluster("Inference"):
Esempio n. 24
0
# diagram.py
from diagrams import Diagram
from diagrams.aws.network import CF
from diagrams.aws.storage import S3


with Diagram("Static site", show=False):
     S3("Logging Bucket") << S3("Private Bucket") << CF("Public website")
urlretrieve(terraform_url, terraform_icon)

dask_icon = "images/dask-icon.png"


with Diagram("Multi-Cloud", show=False):
    terraform = Custom("Terraform", terraform_icon)
    client = Client("Client Notebook")

    with Cluster("AWS"):
        with Cluster("Kubernetes"):
            dask1 = Custom("\nDask", dask_icon)
            worker = RS("Dask Worker")
            worker >> Edge(color="orange") << dask1

        s3 = S3("LENS")

        terraform >> worker
        worker >> s3

    with Cluster("GCP"):
        with Cluster("Kubernetes"):
            dask2 = Custom("\nDask", dask_icon)
            worker2 = RS("Dask Worker")
            worker2 >> Edge(color="orange") << dask2

        gcs = GCS("ERA5")

        terraform >> worker2
        worker2 >> gcs
Esempio n. 26
0
from diagrams import Cluster, Diagram
from diagrams.aws.storage import S3
from diagrams.saas.analytics import Snowflake
from diagrams.programming.framework import FastAPI
from diagrams.firebase.develop import Authentication
from diagrams.programming.flowchart import Preparation
from diagrams.onprem.workflow import Airflow
from diagrams.programming.flowchart import MultipleDocuments

with Diagram("API Architecture", show=False):

    airflow = Airflow("Airflow")

    with Cluster("Airflow Process"):
        input_data = MultipleDocuments("Input CSV's")
        prep = Preparation("Preprocess")
        with Cluster("Snowflake"):
            snowflake = Snowflake("Staging")
            database = Snowflake("Database")

    with Cluster("FastAPI"):
        with Cluster("Methods"):
            methods = [FastAPI("FastAPI Get"), FastAPI("FastAPI Post")]

        auth = Authentication("Authenticate")
        fetch = Snowflake("Fetch Data")
        out = S3("Output")

    airflow >> input_data >> prep >> snowflake >> database >> methods >> auth >> fetch >> out
Esempio n. 27
0
from diagrams import Diagram, Cluster
from diagrams.aws.storage import S3

with Diagram("AWS S3 Bucket", show=False, direction="TB"):

    S3("s3 bucket") << S3("public access block")
Esempio n. 28
0
from diagrams.onprem.vcs import Github 

with Diagram(name="ningenme.net", filename="public/diagrams/diagram", show=False):
    user = User("Client")

    with Cluster("AWS"):
        with Cluster("route53"):
            route53NingenmeNet       = Route53("ningenme.net")
            route53ApiNingenmeNet    = Route53("api.ningenme.net")
            route53StaticNingenmeNet = Route53("static.ningenme.net")
        with Cluster("cloud front"):
            cloudFrontNingenmeNet       = CloudFront("ningenme.net")
            cloudFrontApiNingenmeNet    = CloudFront("api.ningenme.net")
            cloudFrontStaticNingenmeNet = CloudFront("static.ningenme.net")
        with Cluster("s3"):
            s3StaticNingenmeNet = S3("static.ningenme.net")
        with Cluster("alb"):
            albNetFront = ALB("net-front")
            albNetApi   = ALB("net-api")
        with Cluster("ecs-cluster"):
            ecsNetFront = ECS("net-front")
            ecsNetApi   = ECS("net-api")
        with Cluster("db"):
            mysql = RDS("ningenme-mysql")
        with Cluster("code deploy"):
            codeDeployNetFront = Codedeploy("net-front")
            codeDeployNetApi   = Codedeploy("net-api")
    with Cluster("GitHub"):
        with Cluster("github actions"):
            actionsNetFront = GithubActions("net-front")
            actionsNetApi   = GithubActions("net-api")
Esempio n. 29
0
from diagrams.aws.database import Dynamodb
from diagrams.aws.integration import Eventbridge
from diagrams.aws.network import APIGateway
from diagrams.aws.storage import S3

with Diagram("Message Broker Refactored", show=False):
    with Cluster("Event Processors"):
        with Cluster("System C", direction="TB"):
            apig = APIGateway("webhook")
            handler = Lambda("handler")

        with Cluster("System D", direction="TB"):

            processor = Lambda("handler")
            ddb = Dynamodb("database")
            stream_listener = Lambda("processor")

        broker = Eventbridge("message broker")

        with Cluster("System B", direction="TB"):
            event_handler = Lambda("handler")
            bucket = S3("S3")
            file_processor = Lambda("processor")


        apig >> handler >> broker >> processor

        processor >> ddb >> stream_listener >> broker

        broker >> event_handler >> bucket >> file_processor
Esempio n. 30
0
from diagrams import Cluster, Diagram
from diagrams.aws.compute import ECS
from diagrams.aws.database import Redshift, RDS
from diagrams.aws.integration import SQS
from diagrams.aws.storage import S3

with Diagram("FIN DEP POLLUTION BASELINE CITY",
             show=False,
             filename="IMAGES/IMAGES/fin_dep_pollution_baseline_city",
             outformat="jpg"):

    temp_1 = S3('china_city_sector_pollution')
    temp_2 = S3('china_city_code_normalised')
    temp_3 = S3('china_city_reduction_mandate')
    temp_4 = S3('china_city_tcz_spz')
    temp_5 = S3('ind_cic_2_name')
    temp_6 = S3('china_credit_constraint')
    temp_7 = S3('province_credit_constraint')
    temp_8 = ECS('asif_firms_prepared')
    temp_12 = SQS('asif_industry_financial_ratio_city')
    temp_13 = SQS('china_sector_pollution_threshold')
    temp_14 = SQS('asif_tfp_firm_level')
    temp_15 = SQS('asif_industry_characteristics_ownership')
    temp_16 = SQS('asif_city_characteristics_ownership')

    with Cluster("FINAL"):

        temp_final_0 = Redshift('fin_dep_pollution_baseline_city')

    temp_final_0 << temp_1
    temp_final_0 << temp_2