from diagrams import Cluster, Diagram
from diagrams.gcp.analytics import BigQuery, Dataflow, PubSub
from diagrams.gcp.compute import AppEngine, Functions
from diagrams.gcp.database import BigTable
from diagrams.gcp.iot import IotCore
from diagrams.gcp.storage import GCS

with Diagram("Message Collecting", show=False):
    pubsub = PubSub("pubsub")

    with Cluster("Source of Data"):
        [IotCore("core1"), IotCore("core2"), IotCore("core3")] >> pubsub

    with Cluster("Targets"):
        with Cluster("Data Flow"):
            flow = Dataflow("data flow")

        with Cluster("Data Lake"):
            flow >> [BigQuery("bq"), GCS("storage")]

        with Cluster("Event Driven"):
            with Cluster("Processing"):
                flow >> AppEngine("engine") >> BigTable("bigtable")

            with Cluster("Serverless"):
                flow >> Functions("func") >> AppEngine("appengine")

    pubsub >> flow
예제 #2
0

with Diagram("Multi-Cloud", show=False):
    terraform = Custom("Terraform", terraform_icon)
    client = Client("Client Notebook")

    with Cluster("AWS"):
        with Cluster("Kubernetes"):
            dask1 = Custom("\nDask", dask_icon)
            worker = RS("Dask Worker")
            worker >> Edge(color="orange") << dask1

        s3 = S3("LENS")

        terraform >> worker
        worker >> s3

    with Cluster("GCP"):
        with Cluster("Kubernetes"):
            dask2 = Custom("\nDask", dask_icon)
            worker2 = RS("Dask Worker")
            worker2 >> Edge(color="orange") << dask2

        gcs = GCS("ERA5")

        terraform >> worker2
        worker2 >> gcs

    client >> dask1
    client >> dask2
예제 #3
0
from diagrams import Cluster, Diagram, Edge
from diagrams.gcp.compute import GKE
from diagrams.gcp.storage import GCS
from diagrams.k8s.compute import Pod
from diagrams.k8s.others import CRD
from diagrams.onprem.analytics import Spark
from diagrams.onprem.monitoring import Grafana, Prometheus

with Diagram(show=False, outformat="png",
             filename="diagrams/tbd",
             direction="LR",
             node_attr ={"fontsize": "14"},
             edge_attr ={"labelfontsize": "14"}):
    with Cluster("Google Cloud") as cloud:
        storage = GCS("Google Cloud Storage")
        with Cluster("Kubernetes cluster"):
            with Cluster("Monitoring") as monitoring:
                metrics = Prometheus("Prometheus")
                metrics_push = Prometheus("Prometheus Gateway")
                metrics_push << Edge(label="pull metrics") << metrics
                service_monitor = CRD("ServiceMonitor")
                service_monitor << Edge(label="watch") << metrics
                metrics << Edge(label="query metrics") << Grafana("Grafana")
            with Cluster("Apache Spark application") as spark:
                Spark()
                driver = Pod("driver")
                driver << service_monitor
                exec_1 = Pod("executor 1")
                exec_2 = Pod("executor 2")
                exec_3 = Pod("executor 3")
                driver << exec_1
예제 #4
0
from diagrams.gcp.analytics import BigQuery, Dataflow, PubSub
from diagrams.gcp.compute import AppEngine, Functions
from diagrams.gcp.database import BigTable
from diagrams.gcp.iot import IotCore
from diagrams.gcp.storage import GCS

with Diagram("Media Monitoring Storage Architecture", show=False) as med_diag:
    pubsub = PubSub("pubsub")
    flow = Dataflow("DataFlow")

    with Cluster("Data Collection"):
        [
            Functions("RSS Feed Webhook"),
            Functions("Twitter Webhook"),
            Functions("Press Release")
        ] >> pubsub >> flow

    with Cluster("Storage"):
        with Cluster("Data Lake"):
            flow >> [BigQuery("BigQuery"), GCS("Storage")]

        with Cluster("Event Driven"):
            with Cluster("Processing"):
                flow >> AppEngine("GAE") >> BigTable("BigTable")

            with Cluster("Serverless"):
                flow >> Functions("Function") >> AppEngine("AppEngine")

    pubsub >> flow

med_diag
예제 #5
0
from diagrams import Diagram, Cluster
from diagrams.gcp.database import Firestore
from diagrams.gcp.network import CDN, DNS, LoadBalancing
from diagrams.gcp.storage import GCS
from diagrams.gcp.compute import GKE
from diagrams.onprem.compute import Server
from diagrams.oci.network import Internetgateway

with Diagram("Gran - GCP Design", show=False):
    server = Server("Client (Nuxt.js)")

    with Cluster("Google Cloud Platform"):
        firestore = Firestore("Firebase Firestore")
        cdn = CDN("Cloud CDN")
        dns = DNS("Cloud DNS")
        lb = LoadBalancing("Cloud Load Balancing")
        gcs = GCS("Cloud Storage")
        gke = GKE("Kubernetes Engine")
        net = Internetgateway("Gateway")

        dns >> cdn >> gcs
        dns >> lb >> gke >> firestore

    server >> net
    net >> dns
예제 #6
0
    with Cluster("GCP account (sysdig)", graph_attr={"bgcolor": "lightblue"}):
        sds = Custom("Sysdig Secure", "../../resources/diag-sysdig-icon.png")
        bench = General("Cloud Bench")
        sds >> Edge(label="schedule on rand rand * * *") >> bench

    with Cluster("GCP organization project", graph_attr={"bgcolor": "pink"}):
        ccProjectSink = Custom("\nLog Router \n Sink",
                               "../../resources/sink.png")
        orgBenchRole = Iam("Cloud Bench Role")

    with Cluster("Secure for Cloud (children project)"):
        ccBenchRole = Iam("Cloud Bench Role")
        ccPubSub = PubSub("CC PubSub Topic")
        ccEventarc = Code("CloudRun\nEventarc Trigger")
        ccCloudRun = Run("Cloud Connector")
        bucket = GCS("Bucket\nCC Config")
        keys = KMS("Sysdig \n Secure Keys")

        ccCloudRun << Edge(style="dashed") << keys
        bucket << Edge(style="dashed",
                       label="Get CC \n config file") << ccCloudRun
        ccEventarc >> ccCloudRun
        ccEventarc << ccPubSub
        ccProjectSink >> ccPubSub

        gcrPubSub = PubSub("GCR PubSub Topic\n(gcr named)")
        gcrSubscription = Code("GCR PubSub\nSubscription")
        csCloudBuild = Build("Triggered\n Cloud Builds")
        gcr = GCR("Google \n Cloud Registry")

        gcrSubscription >> ccCloudRun
예제 #7
0
        disk_2 = PersistentDisk('OS Disk 2')
        instance_3 = ComputeEngine('Server 3')
        disk_3 = PersistentDisk('OS Disk 3')

        pv_disk = PersistentDisk('SSD Disk 1')
        node_1 - instance_1
        node_2 - instance_2
        node_3 - instance_3
        instance_1 - disk_1
        instance_2 - disk_2
        instance_3 - disk_3
        instance_3 - pv_disk
        pv_workspace - pv_disk
        core - SQL('PostgreSQL')

        core - GCS('Object Storage')
        lb = LoadBalancing('Load Balancer')
        dns = DNS('DNS')
        lb >> ing
        dns >> lb

    [Client('SDKs'), Users('Users')] >> dns

with Diagram('Amazon Web Services resources',
             show=False,
             filename='aws',
             outformat='png'):
    with Cluster('Amazon Web Services'):
        with Cluster('Virtual Private Cloud'):
            kube = EKS('Elastic Kubernetes\nService')
            instance_1 = EC2('EC2 Instance 1\n(m5.xlarge)')
예제 #8
0
from diagrams import Diagram, Cluster
from diagrams.gcp.compute import GCF
from diagrams.gcp.storage import GCS

from diagrams.onprem.iac import Terraform
from diagrams.programming.language import Python

Terraform._height = 0.9
Python._height = 0.9

diagram_kwargs = dict(direction="LR", filename="docs/diagram", show=False)


with Diagram("GCF Data Mining Example", **diagram_kwargs):

    with Cluster("DevOps & Source Code") as devops:
        source_code = GCS("Source Code Bucket")
        python = Python()

        state = GCS("Terraform State Bucket")
        terraform = Terraform()

    with Cluster("Application") as app:
        function = GCF("Data Mining Service")
        data = GCS("Data Bucket")

    source_code >> python >> function
    state >> terraform >> function
    function >> data
예제 #9
0
from diagrams.gcp.analytics import PubSub
from diagrams.gcp.compute import Functions
from diagrams.gcp.storage import GCS
from diagrams.gcp.analytics import BigQuery
from diagrams.gcp.database import Firestore
from diagrams.gcp.devtools import Scheduler

with Diagram("Strava Leaderboard Architecture Diagram ", show=True):
    source = Functions("generate grid")

    with Cluster("Data Pipeline"):
        gird_queue = PubSub("grid queue")

        credential = Firestore("credentials store")

        store = GCS("raw JSON")

        with Cluster("Extract-Load"):
            with Cluster("scheduler"):
                scheduler = Scheduler("scheduler")
                schedule_queue = PubSub("schedule queue")

            extract_load = Functions("worker")

            staging = BigQuery("BigQuery staging dataset")

        with Cluster("Transform"):
            transform = Functions("transform worker")
            analysis = BigQuery("BigQuery analysis dataset")

    scheduler >> schedule_queue >> extract_load
예제 #10
0
}

with Diagram("GCP Architecture",
             show=False,
             outformat="png",
             graph_attr=graph_attr):

    with Cluster("Local") as local:
        local_machine = BareMetal("Local machine")

    with Cluster("Google Cloud") as gcp:

        tpu = TPU("Cloud TPU")

        with Cluster("Google Cloud Storage") as gcs:
            bucket_data = GCS("polyaxon data")
            bucket_outputs = GCS("polyaxon outputs")
            bucket_logs = GCS("polyaxon logs")

        with Cluster("Private VPC") as vpc:
            cloud_sql = SQL("CloudSQL\n(Postgres)")
            polyaxon_repos = Filestore("Google Filestore\n(polyaxon-repos)")
            redis = Memorystore("Cloud Memorystore\n(optional)")

            with Cluster("GKE") as gke:
                polyaxon = Polyaxon("polyaxon")

    local_machine - Edge(label="SSH tunnel", minlen="3.0") - polyaxon
    polyaxon - [
        cloud_sql, polyaxon_repos, redis, tpu, bucket_data, bucket_logs,
        bucket_outputs
예제 #11
0
from diagrams import Cluster, Diagram, Edge
from diagrams.onprem.client import Users
from diagrams.gcp.analytics import BigQuery, Dataflow, PubSub, DataCatalog
from diagrams.gcp.compute import AppEngine, Functions, GPU, ComputeEngine, GKE
from diagrams.gcp.database import BigTable, Spanner, Firestore, SQL, Datastore
from diagrams.gcp.storage import GCS

with Diagram("Референсная архитектура робота сервисного центра"):
    users = Users("Пользователи")
    analytics = Users("Аналитики заказчика")
    modellers = Users("Модельеры исполнителя")

    with Cluster("Корпоративные сиcтемы"):
        itsm = GCS("ITSM")
        exchange = GCS("Exchange")
        teams = GCS("Teams")
        msad = GCS("AD")
        confluence = GCS("Confluence")

    with Cluster("Кластер с компонентами робота"):
        with Cluster("Основной бизнес-процесс"):
            with Cluster("Обработка входящего трафика"):
                emailmod = GKE("Обработка писем")
                msgmod = GKE("Обработка сообщений")
            with Cluster("Общие компоненты"):
                ocr = GKE("OCR")
                s2t = GKE("Speech to Text")
                t2s = GKE("Text to Speech")
                cls = GKE("Text Classifiers")
                ceph = GCS("Object Storage")
            with Cluster("База знаний"):
예제 #12
0
from diagrams import Cluster, Diagram
from diagrams.gcp.analytics import BigQuery, Dataflow, PubSub
from diagrams.gcp.compute import AppEngine, Functions
from diagrams.gcp.database import BigTable
from diagrams.gcp.iot import IotCore
from diagrams.gcp.storage import GCS

with Diagram("Message Collecting", show=False):
    pubsub = PubSub("pubsub")

    with Cluster("Source of Data"):
        [IotCore("core1"),
         IotCore("core2"),
         IotCore("core3")] >> pubsub

    with Cluster("Targets"):
        with Cluster("Data Flow"):
            flow = Dataflow("data flow")

        with Cluster("Data Lake"):
            flow >> [BigQuery("bq"),
                     GCS("storage")]

        with Cluster("Event Driven"):
            with Cluster("Processing"):
                flow >> AppEngine("engine") >> BigTable("bigtable")

            with Cluster("Serverless"):
                flow >> Functions("func") >> AppEngine("appengine")

    pubsub >> flow