def simple_main(): # graph_attr é a lista de parâmetros utilizados na construção do diagrama. graph_attr = {"fontsize": "32", "fontcolor": "#1D3B52", "bgcolor": "white"} # Cria o Diagrama base do nosso mapa with Diagram('fboaventura.dev', direction='LR', filename='simple_fboaventura_dev', outformat='png', graph_attr=graph_attr, show=False) as diag: # Adiciona os nós no mapa ingress = Haproxy('loadbalancer') webserver = Nginx('django') db = Postgresql('database') memcached = Memcached('sessions') # Criamos um cluster para os componentes do Celery, que trabalham em conjunto with Cluster('Celery'): beat = Celery('beat') workers = [Celery('worker1'), Celery('worker2')] flower = Celery('flower') broker = Rabbitmq('broker') logs = Redis('logs') # Montamos o mapa de relacionamentos entre os nós ingress >> webserver webserver >> broker beat >> broker workers >> beat webserver >> db db >> broker webserver >> memcached broker >> logs workers >> logs flower >> beat flower >> workers beat >> logs
# # (C) Copyright 2021 Pavel Tisnovsky # # All rights reserved. This program and the accompanying materials # are made available under the terms of the Eclipse Public License v1.0 # which accompanies this distribution, and is available at # http://www.eclipse.org/legal/epl-v10.html # # Contributors: # Pavel Tisnovsky # from diagrams import Diagram from diagrams.onprem.queue import Kafka, Rabbitmq from diagrams.programming.language import Go # novy graf s urcenim jeho zakladnich vlastnosti with Diagram("OnPrem #1", show=True): # definice uzlu - konzument consumer = Kafka("input stream") # definice uzlu - worker worker = Go("worker") # definice uzlu - producent producer = Rabbitmq("output stream") # propojeni uzlu grafu orientovanymi hranami consumer >> worker >> producer
# Cria o Diagrama base do nosso mapa with Diagram('fboaventura.dev', direction='LR', filename='simple_fboaventura_dev', outformat='png', graph_attr=graph_attr) as diag: # Adiciona os nós no mapa ingress = Haproxy('loadbalancer') webserver = Nginx('django') db = Postgresql('database') memcached = Memcached('sessions') # Criamos um cluster para os componentes do Celery, que trabalham em conjunto with Cluster('Celery'): beat = Celery('beat') workers = [Celery('worker1'), Celery('worker2')] flower = Celery('flower') broker = Rabbitmq('broker') logs = Redis('logs') # Montamos o mapa de relacionamentos entre os nós ingress >> webserver webserver >> broker beat >> broker workers >> beat webserver >> db db >> broker webserver >> memcached broker >> logs workers >> logs flower >> beat flower >> workers beat >> logs
from diagrams.onprem.database import Mongodb from diagrams.onprem.queue import Rabbitmq from diagrams.gcp.analytics import BigQuery, Dataflow, PubSub from diagrams.programming.language import Nodejs from diagrams.onprem.monitoring import Grafana, Prometheus from diagrams.aws.database import RDS from diagrams.aws.network import ELB from diagrams.aws.storage import S3 from diagrams.onprem.network import Traefik from diagrams.custom import Custom from diagrams.onprem.tracing import Jaeger from diagrams.onprem.database import Cassandra with Diagram("OpenFlow Basic"): with Cluster("Backend"): b = [Mongodb("MongoDB"), Rabbitmq("RabbitMQ")] with Cluster("Remote Clients"): rc = [Custom("OpenRPA", "./my_resources/open_rpa128.png"), Custom("PowerShell", "./my_resources/PowerShell_5.0_icon.png"), Custom("NodeRED", "./my_resources/node-red-icon.png")] with Cluster("Frontend + API"): api = EC2("WEB-API") Custom("NodeRED", "./my_resources/node-red-icon.png") b << api api << rc with Diagram("OpenFlow with Traefik"): with Cluster("Backend"): b = [Mongodb("MongoDB"), Rabbitmq("RabbitMQ")] with Cluster("Remote Clients"):
from diagrams.azure.analytics import Hdinsightclusters from diagrams.alibabacloud.analytics import ElaticMapReduce from diagrams.aws.analytics import EMR node_attr = { "fontsize":"20" } graph_attr = { "fontsize":"28" } with Diagram("", show=False, node_attr=node_attr): with Cluster("Brokers", graph_attr=graph_attr): kafka = Kafka("\nKafka") activemq = Activemq("\nActiveMQ") rabbitmq = Rabbitmq("\nRabbitMQ") zeromq = Zeromq("\nZeroMQ") kafka - activemq rabbitmq - zeromq with Cluster("Speed Layer", graph_attr=graph_attr): kstream = Kafka("\nKafka\nStreams") sparks = Spark("\nSpark Streaming") flink = Flink("\nFlink") #stream_group = [kstream, sparks, flink] kstream - [sparks] - flink with Cluster("Batch Layer", graph_attr=graph_attr): hdfs = Hadoop("\nHDFS") with Cluster("Serving Layer", graph_attr=graph_attr):