Esempio n. 1
0
    def test4():
        filename = os.path.join(img_dir, sys._getframe().f_code.co_name)
        with Diagram("Event Processing", show=False, filename=filename):
            source = EKS("k8s source")

            with Cluster("Event Flows"):
                with Cluster("Event Workers"):
                    workers = [ECS("worker1"), ECS("worker2"), ECS("worker3")]

                queue = SQS("event queue")

                with Cluster("Processing"):
                    handlers = [Lambda("proc1"), Lambda("proc2"), Lambda("proc3")]

            store = S3("events store")
            dw = Redshift("analytics")

            source >> workers >> queue >> handlers
            handlers >> store
            handlers >> dw
Esempio n. 2
0
from diagrams import Cluster, Diagram
from diagrams.aws.compute import ECS, EKS, Lambda
from diagrams.aws.database import Redshift
from diagrams.aws.integration import SQS
from diagrams.aws.storage import S3

with Diagram("Event Processing", show=False):
    source = EKS("k8s source")

    with Cluster("Event Flows"):
        with Cluster("Event Workers"):
            workers = [ECS("worker1"), ECS("worker2"), ECS("worker3")]

        queue = SQS("event queue")

        with Cluster("Processing"):
            handlers = [Lambda("proc1"), Lambda("proc2"), Lambda("proc3")]

    store = S3("events store")
    dw = Redshift("analytics")

    source >> workers >> queue >> handlers
    handlers >> store
    handlers >> dw
Esempio n. 3
0
        core - GCS('Object Storage')
        lb = LoadBalancing('Load Balancer')
        dns = DNS('DNS')
        lb >> ing
        dns >> lb

    [Client('SDKs'), Users('Users')] >> dns

with Diagram('Amazon Web Services resources',
             show=False,
             filename='aws',
             outformat='png'):
    with Cluster('Amazon Web Services'):
        with Cluster('Virtual Private Cloud'):
            kube = EKS('Elastic Kubernetes\nService')
            instance_1 = EC2('EC2 Instance 1\n(m5.xlarge)')
            disk_1 = EBS('EBS Disk 1\n(gp2)')
            instance_2 = EC2('EC2 Instance 2\n(m5.xlarge)')
            disk_2 = EBS('EBS Disk 2\n(gp2)')

            instance_1 - disk_1
            instance_2 - disk_2
            kube - instance_1
            kube - instance_2
            kube - RDS('Amazon RDS\nfor PostgreSQL\n(db.t3.large)')

        kube - S3('S3')
        kube - Cloudwatch('Amazon CloudWatch')
        dns = Route53('Route53')
        lb = ELB('Elastic Load Balancer')
Esempio n. 4
0
    # Grouping step
    with Cluster("VPC"):
        igw = InternetGateway("IGW")
        with Cluster("Public Subnet"):
            with Cluster("Availability Zone A"):
                alb_leg_a = ELB("elb-interface-1")
            with Cluster("Availability Zone B"):
                alb_leg_b = ELB("elb-interface-2")
            with Cluster("Availability Zone C"):
                alb_leg_c = ELB("elb-interface-3")

        vpc_router = VPCRouter("Internal router")

        with Cluster("Private Subnet"):
            with Cluster("Availability Zone A"):
                eks1 = EKS("Kubernetes node 1")
            with Cluster("Availability Zone B"):
                eks2 = EKS("Kubernetes node 2")
            with Cluster("Availability Zone C"):
                eks3 = EKS("Kubernetes node 3")

    # Drawing step.
    Internet("Internet") >> igw

    igw >> alb_leg_a
    igw >> alb_leg_b
    igw >> alb_leg_c

    alb_leg_a >> vpc_router
    alb_leg_b >> vpc_router
    alb_leg_c >> vpc_router
Esempio n. 5
0
from socket import has_dualstack_ipv6
from diagrams import Cluster, Diagram
from diagrams.aws.database import RDS
from diagrams.aws.compute import ECS, EKS, Lambda
from diagrams.aws.database import Redshift
from diagrams.aws.integration import SQS

with Diagram("axie diagrams", show=False):
    source = EKS("多开管理器")
    source2 = EKS("多开管理器")

    with Cluster("Axie 事件处理"):
        with Cluster("多开管理器"):
            accounts = [ECS("账号1"), ECS("账号2"), ECS("账号3")]

        with Cluster("通过游戏API获取当前游戏进行状态和数据"):
            state = SQS("游戏状态")
            data = RDS("游戏数据")
            info = [state, data]

        with Cluster("多进程YOLO服务 根据规则和AI\n获得点击位置"):
            handlers = [Lambda("proc1"), Lambda("proc2"), Lambda("proc3")]

    click_operate = Redshift("执行点击操作")

    source >> accounts >> state
    accounts >> data

    data >> handlers
    state >> handlers
Esempio n. 6
0
    with Cluster("odhk-data-vpc"):
        cf = CloudFront("odhk-data-cf")
        waf = WAF("odhk-data-waf")
        s3 = S3("odhk-data-s3")
        rt = RouteTable("odhk-data-rt")
        natrt = RouteTable("odhk-data-nat-rt")
        igw = InternetGateway("odhk-data-igw")
        with Cluster("odhk-data-gw-subnet"):
            natgw = NATGateway("odhk-data-nat-gw")
        with Cluster("data-public-subnet"):
            elb = ELB("data-pipeline-elb")
            with Cluster("data-pg-sg"):
                ppg = RDS("pipeline-pg")
                wpg = RDS("warehouse-pg")
        with Cluster("data-pipeline-subnet"):
            eks = EKS("data-pipeline-eks")
            with Cluster("data-pipelie-eks-ng"):
                ng = EC2("data-pipelie-eks-node")
        with Cluster("data-pipelie-redis-sg"):
            ec = ElastiCache("data-pipeline-ec")
        with Cluster("odhk-data-integration-subnet"):
            alb = ELB("odhk-data-integration-alb")
            ecs = ECS("odhk-data-integration-ecs")
            with Cluster("data-integration-tg"):
                node = EC2("data-integration-ecss-node")
                fg = Fargate("odhk-data-integration-fg")

    dns >> cf >> elb >> eks >> ec
    cf - waf
    eks - ng
    ng >> ppg - wpg << office
Esempio n. 7
0
from diagrams import Cluster, Diagram
from diagrams.aws.compute import ECS, EKS, Lambda
from diagrams.aws.database import Redshift
from diagrams.aws.integration import SQS
from diagrams.aws.storage import S3
from diagrams.programming.language import Bash

with Diagram("Falco For Security", show=False):
    source = EKS("Syscall Events")

    with Cluster("Falco"):
        with Cluster("Falco Processing"):
            workers = [ECS("Falco Daemon")]

        queue = SQS("Falco Sidekick")

        with Cluster("Sidekick outputs"):
            handlers = [Lambda("slack"), Lambda("logdna"), Lambda("loki")]

    store = S3("store")
    dw = Redshift("analytics")

    rules = Bash("Rules Definitions")
    source >> workers >> queue >> handlers
    rules >> workers
    handlers >> store
    handlers >> dw