def process_media(
    graph_attr: dict = GRAPH_ATTR,
    output_format: str = OUTPUT_FORMAT,
    output_path: str = OUTPUT_PATH,
) -> str:
    """ Generates media processing diagrams """
    output = f"{output_path}/media_processing"

    with Diagram(
            "Media Processing",
            show=False,
            outformat=output_format,
            # graph_attr=graph_attr,
            filename=output,
    ):
        cli = Python("CLI")

        with Cluster("Serverless"):
            source = SQS("tasks queue")

            with Cluster("Concurrent Processing"):
                handlers = Lambda("convert image\nencode video")

                with Cluster("Lambda layers"):
                    [Layer("ImageMagic"), Layer("ffmpeg")]

            src = S3("input\nmedia files")
            dest = S3("output\nmedia files")

            cli >> source >> handlers << src
            handlers >> dest

    return f"{output}.{output_format}"
Beispiel #2
0
def webhooked_vault_agent_architecture():
    with Diagram(name="Mutating Webhook", show=False):
        with Cluster("Control Plane"):
            apiserver = APIServer()

        with Cluster(""):
            webhook = SQS("Mutating Webhook")

        with Cluster(""):
            vault = Vault("Vault")

        with Cluster("Secure Pod"):
            with Cluster("Injected"):
                vault_init_agent = Custom("Init Vault Agent", crio_icon)
                vault_agent = Custom("Vault Agent", crio_icon)
                injected = [vault_init_agent, vault_agent]

            app_container = Custom("App", crio_icon)
            inMemory = Vol("In Memory")

            vault_init_agent >> inMemory
            app_container << inMemory

            vault >> Edge() << vault_agent >> Edge() << app_container

        apiserver >> Edge() << webhook
        apiserver >> Edge() << vault
        webhook >> vault_agent
Beispiel #3
0
    def test4():
        filename = os.path.join(img_dir, sys._getframe().f_code.co_name)
        with Diagram("Event Processing", show=False, filename=filename):
            source = EKS("k8s source")

            with Cluster("Event Flows"):
                with Cluster("Event Workers"):
                    workers = [ECS("worker1"), ECS("worker2"), ECS("worker3")]

                queue = SQS("event queue")

                with Cluster("Processing"):
                    handlers = [Lambda("proc1"), Lambda("proc2"), Lambda("proc3")]

            store = S3("events store")
            dw = Redshift("analytics")

            source >> workers >> queue >> handlers
            handlers >> store
            handlers >> dw
Beispiel #4
0
def cli(
    graph_attr: dict = GRAPH_ATTR,
    output_format: str = OUTPUT_FORMAT,
    output_path: str = OUTPUT_PATH,
) -> str:
    """ Generates CLI diagrams """
    output = f"{output_path}/cli"

    with Diagram(
            "CLI",
            show=False,
            outformat=output_format,
            # graph_attr=graph_attr,
            filename=output,
    ):
        cli = Python("CLI")

        with Cluster("targets"):

            with Cluster("local resources"):
                data = Storage("build objects\nassets")
                layers = Layer("build\nLambda layers")
                server = Server("media\nlocal build")

            with Cluster("cloud resources"):
                db = Dynamodb("create DB\nseed DB")
                s3 = S3("sync data")
                lbd = Lambda("resize images\nencode video")
                sqs = SQS("queue\nmedia generate\ntasks")

        cli >> data >> cli
        cli >> s3
        cli >> db
        cli >> lbd
        cli >> sqs
        cli >> layers
        cli >> server

    return f"{output}.{output_format}"
def apigw_dynamodb_sfn_with_heavytask():
    stack_objective = "apigw-dynamodb-sfn-with-heavytask"
    with Diagram(stack_objective,
                 outformat="png",
                 filename=f"{stack_objective}/pics/arch",
                 show=False):
        sqs = SQS("SQS")
        apigw = APIGateway("/task") >> Lambda("integration") >> [
            sqs, Dynamodb("DynamoDB")
        ]

        timer_lambda = Lambda("timer lambda")
        sqs << Edge(label="dequeue") << timer_lambda << Cloudwatch("cron")

        with Cluster(label="StepFunctions", direction="TB"):
            sfn_start = SFn_TASK("update DynamoDB\nset `running`")
            sfn_start \
                >> Lambda("Some Task") \
                >> [SFn_TASK("update DynamoDB\nset `success`"), SFn_TASK("update DynamoDB\nset `failure`")]

        # invoke sfn from Lambda
        timer_lambda >> sfn_start
Beispiel #6
0
# diagram.py
from diagrams import Diagram
from diagrams.aws.compute import EC2
from diagrams.aws.database import RDS
from diagrams.aws.network import ELB
from diagrams.aws.integration import SQS

with Diagram("PubSub", show=False):
    EC2("Web-Server") >> SQS("Event") >> RDS("database")
from diagrams import Cluster, Diagram
from diagrams.aws.compute import Lambda, Fargate
from diagrams.aws.database import Dynamodb
from diagrams.aws.integration import SQS, SNS, SF
from diagrams.aws.network import APIGateway
from diagrams.aws.storage import S3
from diagrams.aws.analytics import Kinesis, KinesisDataFirehose

with Diagram("SQS Integration", direction="LR", show=False):

    queue = SQS("Queue")
    apig = APIGateway("Producer (http post)")

    with Cluster("Consumers"):
        consumer_a = Lambda("Proc 1")
        consumer_b = Lambda("Proc 2")
        consumer_c = Lambda("Proc 2")

    apig >> queue
    queue << consumer_b
Beispiel #8
0
from diagrams import Cluster, Diagram
from diagrams.aws.compute import ECS, EKS, Lambda
from diagrams.aws.database import Redshift
from diagrams.aws.integration import SQS
from diagrams.aws.storage import S3
from diagrams.programming.language import Bash

with Diagram("Falco For Security", show=False):
    source = EKS("Syscall Events")

    with Cluster("Falco"):
        with Cluster("Falco Processing"):
            workers = [ECS("Falco Daemon")]

        queue = SQS("Falco Sidekick")

        with Cluster("Sidekick outputs"):
            handlers = [Lambda("slack"), Lambda("logdna"), Lambda("loki")]

    store = S3("store")
    dw = Redshift("analytics")

    rules = Bash("Rules Definitions")
    source >> workers >> queue >> handlers
    rules >> workers
    handlers >> store
    handlers >> dw
from diagrams import Cluster, Diagram
from diagrams.aws.compute import ECS, EKS, Lambda
from diagrams.aws.database import Redshift
from diagrams.aws.integration import SQS
from diagrams.aws.storage import S3

with Diagram("Event Processing", show=False):
    source = EKS("k8s source")

    with Cluster("Event Flows"):
        with Cluster("Event Workers"):
            workers = [ECS("worker1"), ECS("worker2"), ECS("worker3")]

        queue = SQS("event queue")

        with Cluster("Processing"):
            handlers = [Lambda("proc1"), Lambda("proc2"), Lambda("proc3")]

    store = S3("events store")
    dw = Redshift("analytics")

    source >> workers >> queue >> handlers
    handlers >> store
    handlers >> dw
    "fontsize": "20",
    "bgcolor": "white"  #transparent
}

with Diagram("Application Architecture",
             graph_attr=graph_attr,
             outformat="png",
             filename="application_architecture"):
    ELB("lb") >> EC2("web") >> RDS("userdb") >> S3("store")
    ELB("lb") >> EC2("web") >> RDS("userdb") << EC2("stat")
    (ELB("lb") >> EC2("web")) - EC2("web") >> RDS("userdb")

    with Cluster("Application Context"):
        app = EC2("Spring Boot")

    ELB("lb") >> app

    metrics = Prometheus("metric")
    metrics << Edge(color="firebrick", style="dashed") << Grafana("monitoring")

    Jenkins("CI")
    client = Client("A")
    client >> User("B") >> Users("S")
    client >> PostgreSQL("Database")
    client >> Internet("Remote API")
    client >> Docker("Docker")
    client >> Server("Server")
    client >> SQS("Sync Books")
    client >> Spring("Backend")
    client >> React("React")
#
#  (C) Copyright 2021  Pavel Tisnovsky
#
#  All rights reserved. This program and the accompanying materials
#  are made available under the terms of the Eclipse Public License v1.0
#  which accompanies this distribution, and is available at
#  http://www.eclipse.org/legal/epl-v10.html
#
#  Contributors:
#      Pavel Tisnovsky
#

from diagrams import Diagram
from diagrams.aws.compute import EC2
from diagrams.aws.integration import SQS

# novy graf s urcenim jeho zakladnich vlastnosti
with Diagram("AWS", show=True, direction="TB"):
    # definice uzlu - konzument
    consumer = SQS("input stream")

    # rozvetveni - vetsi mnozstvi workeru
    workers = [EC2("worker #1"), EC2("worker #2"), EC2("worker #3")]

    # definice uzlu - producent
    producer = SQS("output stream")

    # propojeni uzlu grafu orientovanymi hranami
    consumer >> workers >> producer
from diagrams import Cluster, Diagram
from diagrams.aws.compute import Lambda, Fargate
from diagrams.aws.database import Dynamodb
from diagrams.aws.integration import SQS, SNS, SF
from diagrams.aws.network import APIGateway
from diagrams.aws.storage import S3
from diagrams.aws.analytics import Kinesis, KinesisDataFirehose

with Diagram("Lambda Pinball", show=False):

    bucket = S3("bucket")
    ddb = Dynamodb("database")
    apig = APIGateway("HTTP")
    tsp = Lambda("Transpangler")
    steps = [SF("Step 1"), SF("Step 2"), SF("Step 3")]

    apig >> Lambda("POST handler") >> ddb >> Lambda(
        "Stream Listener") >> Kinesis("Event stream") >> KinesisDataFirehose(
            "Firehose") >> bucket >> SNS("Notifier") >> SQS(
                "Job queue") >> tsp >> steps >> bucket >> SNS(
                    "Finished") >> Lambda("on finished") >> ddb

    tsp >> ddb
    tsp >> bucket
    steps >> ddb

    apig >> Lambda("GET handler") >> ddb
# diagram.py
from diagrams import Diagram, Edge
from diagrams.aws.compute import Lambda
from diagrams.aws.storage import S3

from diagrams.aws.network import APIGateway

from diagrams.aws.integration import SQS

from diagrams.onprem.network import Internet

from diagrams.elastic.elasticsearch import Beats, Elasticsearch, Kibana, Logstash


with Diagram("node-sass mirror", show=False, outformat="png", filename="overview"):

    inet = Internet("Internet")
    s3_bucket = S3(label="S3 Bucket")
    gw = APIGateway(label="API Gateway")
    lambda_function = Lambda("Lambda function")
    message_queue = SQS(label="Processing Queue")
    dl_queue = SQS(label="Dead Letter Queue")
    inet >> gw >> message_queue
    message_queue >> dl_queue
    message_queue >> Edge(label="triggers") >> lambda_function
    lambda_function >> Edge(label="uploads asset to ") >> s3_bucket
    s3_bucket >> Edge(label="download for whitelisted addresses") >> inet
Beispiel #14
0
from diagrams.aws.management import Cloudwatch
from diagrams.aws.compute import Lambda
from diagrams.aws.storage import S3
from diagrams.aws.database import Dynamodb
from diagrams.aws.integration import SQS, SNS
from diagrams.aws.ml import Rekognition

with Diagram("IOT Diagram", show=True, direction="TB"):

    _iotoutside = InternetOfThings("ESP-32 Board")

    with Cluster("AWS Serverless IOT"):

        _iotcore = IotCore("ESP-32 Iot Core")
        _iotevent = IotEvents("Event trigger SQS")
        _sqsesp32 = SQS("SQS Queue ESP32")
        _logsesp32 = Cloudwatch("Log Operations")
        _eventtriggeresp32 = Cloudwatch("Event Trigger to LAMBDA")
        _lambdaprocessimages = Lambda("Lambda process images")
        _imgrekog = Rekognition("Rekognition process")
        _tabledynamo = Dynamodb("Table history")
        _s3bucket = S3("S3 bucket images converted")
        _snstopico = SNS("Alert cat found")

    _iotoutside >> _iotcore >> _iotevent >> _sqsesp32
    _iotcore >> _logsesp32
    _eventtriggeresp32 >> _lambdaprocessimages >> _sqsesp32
    _lambdaprocessimages >> _imgrekog
    _imgrekog >> _lambdaprocessimages
    _lambdaprocessimages >> _tabledynamo
    _lambdaprocessimages >> _s3bucket
Beispiel #15
0
            with Cluster("Config API - Web"):
                cawApi = APIGateway("API Gateway")
                cawLambda = Lambda("Lambda")

            with Cluster("Config Web App"):
                cwaCloudFront = CloudFront("CloudFront")
                cwaS3 = S3("S3 Bucket")

        with Cluster("Archive"):
            with Cluster("Analyst Web App"):
                awaCloudFront = CloudFront("CloudFront")
                awaS3 = S3("S3 Bucket")

        with Cluster("Data Management"):
            with Cluster("AV Scanning"):
                asSQS = SQS("SQS Queue")
                asECS = ECS("ECS Container")

            with Cluster("Data Validation"):
                dvSQS = SQS("SQS Queue")
                dvECS = ECS("ECS Container")

            with Cluster("Data Quarantine"):
                dqS3 = S3("S3 Bucket")
                sns = SimpleNotificationServiceSns("SNS Notification")
                #couldn't find icons
                snsEmail1 = Blank("")
                snsEmail2 = Blank("")

            with Cluster("Data Archive"):
                daS3 = S3("S3 Bucket")
# diagram.py
# Needs diagrams from pip and graphwiz installed
from diagrams import Cluster, Diagram
from diagrams.aws.security import KMS
from diagrams.aws.integration import SQS

with Diagram("Simple Queue Service", show=False):
    KMS("Key") - SQS("Queue")
Beispiel #17
0
            cewaECS = ECS("ECS Container")

        with Cluster("Open311 API"):
            api = APIGateway("API Gateway")
            lambda1 = Lambda("Lambda Function")

        with Cluster("Management App | Configuration API"):
            macaELB = ElasticLoadBalancing("Load Balancer")
            macaECS = ECS("ECS Container")

        with Cluster("Open311 Data Store"):
            s3a = S3("S3 Bucket")
            s3b = S3("S3 Bucket")
            s3c = S3("S3 Bucket")
            s3d = S3("S3 Bucket")
            sqs = SQS("SQS Queue")

        with Cluster("Integration Process | Management App"):
            ipmaECS1 = ECS("ECS Container")
            ipmaECS2 = ECS("ECS Container")
            ses = SimpleEmailServiceSes("Simple Email Service")

        with Cluster("Management Database"):
            elasticacheForRedis = Elasticache("Elasticache for Redis")
            mdRDS = RDS("Relational Database Service for Postgres")

    #format within Public and Rangers
    pUser - Edge(color="transparent") - pDevice
    rUser - Edge(color="transparent") - rDevice
    #format within Dept
    dsUser - Edge(color="transparent") - dsDevice
Beispiel #18
0
from diagrams.aws.storage import S3

with Diagram("FIN DEP POLLUTION BASELINE CITY",
             show=False,
             filename="IMAGES/IMAGES/fin_dep_pollution_baseline_city",
             outformat="jpg"):

    temp_1 = S3('china_city_sector_pollution')
    temp_2 = S3('china_city_code_normalised')
    temp_3 = S3('china_city_reduction_mandate')
    temp_4 = S3('china_city_tcz_spz')
    temp_5 = S3('ind_cic_2_name')
    temp_6 = S3('china_credit_constraint')
    temp_7 = S3('province_credit_constraint')
    temp_8 = ECS('asif_firms_prepared')
    temp_12 = SQS('asif_industry_financial_ratio_city')
    temp_13 = SQS('china_sector_pollution_threshold')
    temp_14 = SQS('asif_tfp_firm_level')
    temp_15 = SQS('asif_industry_characteristics_ownership')
    temp_16 = SQS('asif_city_characteristics_ownership')

    with Cluster("FINAL"):

        temp_final_0 = Redshift('fin_dep_pollution_baseline_city')

    temp_final_0 << temp_1
    temp_final_0 << temp_2
    temp_final_0 << temp_3
    temp_final_0 << temp_4
    temp_final_0 << temp_5
    temp_final_0 << temp_6
Beispiel #19
0
        filename=
        "/home/ec2-user/VAT_rebate_quality_china/utils/IMAGES/china_vat_quality",
        outformat="jpg"):

    temp_1 = S3('world_bank_gdp_per_capita')
    temp_2 = S3('hs6_homogeneous')
    temp_3 = S3('industry_high_tech')
    temp_4 = S3('industry_energy')
    temp_5 = S3('industry_skilled_oriented')
    temp_6 = S3('industry_rd_oriented')
    input_china_import_export = S3("china_import_export")
    input_city_cn_en = S3("city_cn_en")
    input_china_country_name = S3("china_country_name")
    input_china_applied_mfn_tariffs_hs2 = S3("china_applied_mfn_tariffs_hs2")
    input_hs6_china_vat_rebate = S3("hs6_china_vat_rebate")
    temp_7 = SQS('china_export_tariff_tax')
    temp_8 = SQS('china_product_quality')
    temp_10 = SQS('export_foreign_city_product')
    temp_11 = SQS('export_soe_city_product')

    with Cluster("FINAL"):

        temp_final_0 = Redshift('china_vat_quality')

    temp_final_0 << temp_1
    temp_final_0 << temp_2
    temp_final_0 << temp_3
    temp_final_0 << temp_4
    temp_final_0 << temp_5
    temp_final_0 << temp_6
    input_china_import_export >> temp_7
Beispiel #20
0
        with Cluster("Reader"):
            planner = Dataflow("Planner")
            redis >> planner
            ai_server >> planner >> ai_server
            planner >> commands
            # game_server >> planner >> game_server

with Diagram("Ruler AI Proactive", show=True):
    lb = ELB("lb")

    game_server = ECS("Game Server")
    game_server_cp = ECS("Game Server")
    ai_server = ECS("AI Server")

    commands = BigTable("Commands")
    executor = SQS("Executor")
    commands >> executor >> game_server_cp

    with Cluster("Data Crawler"):
        workers = [ECS("worker1"), ECS("worker2"), ECS("worker3")]
        game_server >> workers

    with Cluster("DB Cluster"):
        redis = RDS("NPC State")

    with Cluster("Backend Server"):
        with Cluster("Writer"):
            state_flow = Dataflow("Collector")
            workers >> lb >> state_flow >> redis

        with Cluster("Reader"):
Beispiel #21
0
from diagrams import Cluster, Diagram
from diagrams.aws.database import RDS
from diagrams.aws.compute import ECS, EKS, Lambda
from diagrams.aws.database import Redshift
from diagrams.aws.integration import SQS

with Diagram("axie diagrams", show=False):
    source = EKS("多开管理器")
    source2 = EKS("多开管理器")

    with Cluster("Axie 事件处理"):
        with Cluster("多开管理器"):
            accounts = [ECS("账号1"), ECS("账号2"), ECS("账号3")]

        with Cluster("通过游戏API获取当前游戏进行状态和数据"):
            state = SQS("游戏状态")
            data = RDS("游戏数据")
            info = [state, data]

        with Cluster("多进程YOLO服务 根据规则和AI\n获得点击位置"):
            handlers = [Lambda("proc1"), Lambda("proc2"), Lambda("proc3")]

    click_operate = Redshift("执行点击操作")

    source >> accounts >> state
    accounts >> data

    data >> handlers
    state >> handlers

    handlers >> click_operate
Beispiel #22
0


    _ddb = DDB('DynamoDB Table \nReports Definitions (queries)')

    _lambda = Lambda('Lambda Publisher \nPOST {report_id: 100, client_id:1}')
    _lambda << Edge() >> _ddb 

    _api = APIGateway('Rest API')
    _client = Client('Client API Request Report ID=100')
    _client >> _api
    _api >> Edge(color="darkgreen") >> _lambda


    with Cluster("Reports Queue "):
        _sqs =SQS('Amazon SQS\nReport Request Queue')
        _lambda >> _sqs
        _lambda2 = Lambda('Lambda Subscriber \nProcess Queue Messages \n (start query)')
        _sqs >> _lambda2

    

    with Cluster('Repor Process'):
        with Cluster('Data'):
            _athena = Athena('Amazon Anthena') 
            data_stack = [S3('Dataset'),
            GlueDataCatalog('Catalog')] 
            _athena <<Edge(color="darkgreen") >> data_stack
        
        with Cluster("Query Status"):
            _sqs2 =SQS('Amazon SQS\nOngoing queries')
Beispiel #23
0
from diagrams import Cluster, Diagram
from diagrams.aws.compute import Compute, Lambda
from diagrams.aws.database import Dynamodb
from diagrams.aws.integration import SQS, SNS, Eventbridge
from diagrams.aws.network import APIGateway
from diagrams.aws.storage import S3
from diagrams.aws.analytics import Kinesis, KinesisDataFirehose

with Diagram("Procurement services", show=False):

    broker = Eventbridge()

    with Cluster("Lot Selection"):
        inbox = S3("Listings")
        listing_queue = SQS()
        textract = Lambda("Extract lots")
        listings_db = Dynamodb("Listings db")
        selector = Lambda("Select lots")

        inbox >> listing_queue >> textract >> listings_db >> selector

    with Cluster("Buying app"):
        pricer = Lambda("Bid calculator")
        purchases = Dynamodb("Buying db")
        apig = APIGateway("Manager app")
        ls = Lambda("View proposals")
        put = Lambda("Set price")
        invoices = S3("Invoice bucket")
        invoice_listener = Lambda("Invoice listener")
        listener = Lambda("Lot listener")
Beispiel #24
0
from diagrams.aws.integration import SQS
from diagrams.aws.storage import S3

with Diagram("ASIF TFP CREDIT CONSTRAINT",
             show=False,
             filename="IMAGES/IMAGES/asif_tfp_credit_constraint",
             outformat="jpg"):

    temp_1 = S3('china_city_code_normalised')
    temp_2 = S3('china_city_sector_pollution')
    temp_3 = S3('china_city_reduction_mandate')
    temp_4 = S3('china_city_tcz_spz')
    temp_5 = S3('ind_cic_2_name')
    temp_6 = S3('province_credit_constraint')
    temp_7 = S3('china_credit_constraint')
    temp_8 = ECS('asif_firms_prepared')
    temp_9 = SQS('asif_tfp_firm_level')

    with Cluster("FINAL"):

        temp_final_1 = Redshift('asif_tfp_credit_constraint')

    temp_final_1 << temp_1
    temp_final_1 << temp_2
    temp_final_1 << temp_3
    temp_final_1 << temp_4
    temp_final_1 << temp_5
    temp_final_1 << temp_6
    temp_final_1 << temp_7
    temp_1 >> temp_8 >> temp_9 >> temp_final_1
Beispiel #25
0
from diagrams.onprem.database import PostgreSQL
from diagrams.onprem.network import Internet
from diagrams.programming.framework import Spring, React

graph_attr = {
    "fontsize": "20",
    "bgcolor": "white"  # transparent
}

with Diagram("",
             direction="LR",
             graph_attr=graph_attr,
             outformat="png",
             filename="book-reviewr-application-architecture"):
    with Cluster("Book Reviewr"):
        frontend = React("Frontend")
        backend = Spring("Backend")

    queue = SQS("SQS (Messaging Queue)")
    users = Users("Users")
    database = PostgreSQL("PostgreSQL (Database)")
    keycloak = Server("Keycloak (Identity Provider)")
    api = Internet("Open Library (REST API)")

    keycloak << [frontend, backend]

    users >> frontend
    frontend >> backend >> database
    backend >> api
    backend << queue