from diagrams import Cluster, Diagram
from diagrams.gcp.analytics import BigQuery, Dataflow, PubSub
from diagrams.gcp.compute import AppEngine, Functions
from diagrams.gcp.database import BigTable
from diagrams.gcp.iot import IotCore
from diagrams.gcp.storage import GCS

with Diagram("Message Collecting", show=False):
    pubsub = PubSub("pubsub")

    with Cluster("Source of Data"):
        [IotCore("core1"), IotCore("core2"), IotCore("core3")] >> pubsub

    with Cluster("Targets"):
        with Cluster("Data Flow"):
            flow = Dataflow("data flow")

        with Cluster("Data Lake"):
            flow >> [BigQuery("bq"), GCS("storage")]

        with Cluster("Event Driven"):
            with Cluster("Processing"):
                flow >> AppEngine("engine") >> BigTable("bigtable")

            with Cluster("Serverless"):
                flow >> Functions("func") >> AppEngine("appengine")

    pubsub >> flow
Esempio n. 2
0
from diagrams import Cluster, Diagram
from diagrams.gcp.analytics import BigQuery, Dataflow, PubSub
from diagrams.gcp.compute import AppEngine, Functions
from diagrams.gcp.database import BigTable
from diagrams.gcp.iot import IotCore
from diagrams.gcp.storage import GCS

with Diagram("Media Monitoring Storage Architecture", show=False) as med_diag:
    pubsub = PubSub("pubsub")
    flow = Dataflow("DataFlow")

    with Cluster("Data Collection"):
        [
            Functions("RSS Feed Webhook"),
            Functions("Twitter Webhook"),
            Functions("Press Release")
        ] >> pubsub >> flow

    with Cluster("Storage"):
        with Cluster("Data Lake"):
            flow >> [BigQuery("BigQuery"), GCS("Storage")]

        with Cluster("Event Driven"):
            with Cluster("Processing"):
                flow >> AppEngine("GAE") >> BigTable("BigTable")

            with Cluster("Serverless"):
                flow >> Functions("Function") >> AppEngine("AppEngine")

    pubsub >> flow
Esempio n. 3
0
            msad << Edge(color="blue",
                         label="авторизация пользователя [HTTPS]") << msgmod

            t2s >> Edge(color="blue", label="запись файла [HTTPS]") >> ceph
            [ocr, s2t] << Edge(color="blue",
                               label="чтение файла [HTTPS]") << ceph
            [emailmod, msgmod] << Edge(
                color="blue", label="чтение/запись файла [HTTPS]") >> ceph

            confluence << Edge(color="blue",
                               label="чтение данных [HTTPS]") << kbmod
            itsm << Edge(color="blue", label="чтение данных [HTTPS]") << kbmod
            kb << Edge(color="blue", label="актуализация БДВО [JDBC]") << kbmod

        with Cluster("Data Operations Workflow"):
            fabric = Dataflow("Data Fabric")
            etl = GKE("Online ETL")
            align = GKE("Data Aligner")
            datalake = Datastore("Log Storage")
            catalog = DataCatalog("Data Catalog")

            emailmod >> Edge(color="blue", label="logging [HTTPS]") >> datalake
            msgmod >> Edge(color="blue", label="logging [HTTPS]") >> datalake
            datalake >> Edge(color="blue", label="batching [JDBC]") >> etl
            etl >> Edge(color="blue", label="batching [JDBC]") >> catalog
            catalog >> Edge(color="blue", label="batching [JDBC]") >> align
            align >> Edge(color="blue", label="batching [JDBC]") >> fabric

        with Cluster("ML Operations Workflow"):
            textomator = GKE("Workflow Processor")
            jupyter = GKE("Jupyter Notebook")
Esempio n. 4
0
from diagrams.aws.integration import SQS
from diagrams.gcp.analytics import Dataflow

with Diagram("Ruler AI", show=True):
    lb = ELB("lb")

    game_server = ECS("Game Server")
    ai_server = ECS("AI Server")
    commands = BigTable("Commands")

    with Cluster("DB Cluster"):
        redis = RDS("NPC State")

    with Cluster("Backend Server"):
        with Cluster("Writer"):
            state_flow = Dataflow("Collector")
            game_server >> lb >> state_flow >> redis

        with Cluster("Reader"):
            planner = Dataflow("Planner")
            redis >> planner
            ai_server >> planner >> ai_server
            planner >> commands
            # game_server >> planner >> game_server

with Diagram("Ruler AI Proactive", show=True):
    lb = ELB("lb")

    game_server = ECS("Game Server")
    game_server_cp = ECS("Game Server")
    ai_server = ECS("AI Server")
Esempio n. 5
0
             graph_attr=graph_attr,
             show=False,
             filename="images/data_science"):
    webshop_1 = Client("Webshop")

    with Cluster("Operational Data Hub Platform"):
        with Cluster("Ingest Project"):
            function_1 = Functions("Ingest")

        with Cluster("Operational Data Hub"):
            with Cluster("Pub/Sub Topic X"):
                pubsub_1_1 = PubSub("Subscription XA")

            with Cluster("Pub/Sub Topic Z"):
                pubsub_2_1 = PubSub("Subscription ZA")

        with Cluster("Consume Project"):
            dataflow_1 = Dataflow("Dataflow")

            with Cluster("Analyze"):
                bigquery_1 = Bigquery("BigQuery")
                aiplatform_1 = AIPlatform("AI Platform")
                firestore_2 = Firestore("Database")
                dataflow_2 = Dataflow("Backfill/reprocess")

    pubsub_1_1 >> dataflow_1
    dataflow_1 >> [bigquery_1, aiplatform_1, firestore_2] >> dataflow_2
    bigquery_1 >> Edge(label="Results", color="orange") >> function_1 >> Edge(
        color="orange") >> pubsub_2_1
    pubsub_2_1 >> Edge(label="Results", color="orange") >> webshop_1