Beispiel #1
0
def cb(c):
    with Diagram('cb', show=False):
        _kafka = Kafka('Kafka')
        _zk = Zookeeper('Zookeeper')
        _logstash = Logstash('Logstash')
        _elasticsearch = Elasticsearch('Elasticsearch')
        _cb_man = Python('cb-manager')

        with Cluster('elasticsearch-config'):
            _elasticsearch_cfg = [
                CM('elasticsearch.yml'),
                CM('log4j2.properties')
            ]
        _ = _elasticsearch_cfg - _elasticsearch

        with Cluster('logstash-config'):
            _logstash_cfg = [CM('logstash.yml'), CM('log4j2.properties')]
        _ = _logstash_cfg - _logstash

        with Cluster('logstash-pipeline'):
            _logstash_pipe = [CM('data.conf')]
        _ = _logstash_pipe - _logstash

        _zk - _kafka >> _logstash >> _elasticsearch << _cb_man
        _logstash << _cb_man >> _kafka
def cb(c):
    with Diagram('Context Broker Pod',
                 filename='cb',
                 show=False,
                 graph_attr={'pad': '0.0'}):
        _kafka = Kafka('Kafka')
        _zk = Zookeeper('Zookeeper')
        _logstash = Logstash('Logstash')
        _elasticsearch = Elasticsearch('Elasticsearch')
        _kibana = Kibana('Kibana')
        _cb_man = Python('cb-manager')

        with Cluster('elasticsearch-config'):
            _elasticsearch_cfg = [
                CM('elasticsearch.yml'),
                CM('log4j2.properties')
            ]
        _ = _elasticsearch_cfg - _elasticsearch

        with Cluster('logstash-config'):
            _logstash_cfg = [CM('logstash.yml'), CM('log4j2.properties')]
        _ = _logstash_cfg - _logstash

        with Cluster('logstash-pipeline'):
            _logstash_pipe = [CM('data.conf')]
        _ = _logstash_pipe - _logstash

        _zk - _kafka >> _logstash >> _elasticsearch << _cb_man
        _elasticsearch << _kibana
        _logstash << _cb_man >> _kafka
Beispiel #3
0
def cb(c, version):
    with Diagram(f'Context Broker (ver. {version}) Pod', filename=f'cb-{version}', show=False, graph_attr={'pad': '0.0'}):
        _metricbeat = Beats('Metricbeat')
        _heartbeat = Beats('Heartbeat')
        _kafka = Kafka('Kafka')
        _zk = Zookeeper('Zookeeper')
        _logstash = Logstash('Logstash')
        _elasticsearch = Elasticsearch('Elasticsearch')
        _kibana = Kibana('Kibana')

        with Cluster('elasticsearch-config'):
            _elasticsearch_cfg = [CM(f'elasticsearch-{version}.yml'), CM('log4j2.properties')]
        _ = _elasticsearch_cfg - _elasticsearch

        with Cluster('heartbeat-config'):
            _heartbeat_cfg = [CM('hearbeat.yml')]
        _ = _heartbeat_cfg - _heartbeat

        with Cluster('heartbeat-monitor'):
            _heartbeat_monitor = [CM('elasticsearch.yml'), CM('host.yml'), CM('kafka.yml'),
                                  CM('kibana.yml'), CM('logstash.yml'), CM('zookeeper.yml')]
        _ = _heartbeat_monitor - _heartbeat

        with Cluster('kibana-config'):
            _kibana_cfg = [CM('kibana.yml')]
        _ = _kibana_cfg - _kibana

        with Cluster('logstash-config'):
            _logstash_cfg = [CM('logstash.yml'), CM('pipelines.yml'), CM('log4j2.properties')]
        _ = _logstash_cfg - _logstash

        with Cluster('logstash-pipeline'):
            _logstash_pipe = [CM('apache.conf'), CM('mysql.conf'), CM('ssh-server.conf'), CM('system.conf')]
        _ = _logstash_pipe - _logstash

        with Cluster('metricbeat-config'):
            _metricbeat_cfg = [CM('metricbeat.yml')]
        _ = _metricbeat_cfg - _metricbeat

        with Cluster('metricbeat-modules'):
            _metricbeat_mod = [CM('kafka.yml')]
        _ = _metricbeat_mod - _metricbeat

        _zk - _kafka >> _logstash >> _elasticsearch
        _elasticsearch << _kibana
        _logstash << _metricbeat
        _logstash << _heartbeat
Beispiel #4
0
# kubernetes-diagram.py
# run the cmd: python3 cyri-lan-archi-diagram.py to generate the png file.
from diagrams import Cluster, Diagram
from diagrams.generic.network import Switch, Router
from diagrams.generic.storage import Storage
from diagrams.k8s.compute import Pod
from diagrams.k8s.network import Ingress, Service
from diagrams.k8s.storage import PV, PVC, StorageClass
from diagrams.elastic.elasticsearch import Elasticsearch, Logstash, Kibana
from diagrams.oci.connectivity import DNS
from diagrams.onprem.compute import Server, Nomad

with Diagram("Kubernetes Diagram", show=False):
    synology = DNS("reverse DNS")

    with Cluster("RaspberryPi4 + K3S"):
        ingress = Ingress("cyri.intra")
        svc = Service("services")
        pvc = PVC("pv claim")
        with Cluster("apps"):
            logstash = Logstash("logstash-oss")
            elasticsearch = Elasticsearch("elasticsearch")
            squid = Server("squid")
            elk = [elasticsearch - logstash - Kibana("kibana")]
        with Cluster("local-storage"):
            pv = [StorageClass("storage class") >> PV("persistent volume")]
        k8s = ingress >> svc
        k8s >> squid >> pvc << pv
        k8s >> logstash >> pvc << pv

    synology << ingress
Beispiel #5
0
def main():
    graph_attr = {
        "fontsize": "45",
        'overlap_scaling': '100',
        'size': '24!',
        'ratio': 'expand'
    }

    with Diagram(name='Automation Framework Swarm', direction='LR', graph_attr=graph_attr):
        with Cluster('Docker Cluster'):
            docker = Docker('Docker')

            with Cluster('container1'):
                python_container = Python('APIs\nOther Microservices')

        with Cluster('Kafka Cluster'):
            with Cluster('Zookeeper'):
                Zookeeper('Zookeeper\ntcp:2181')

            with Cluster('REST Proxy'):
                rest_proxy = Custom('REST Proxy\ntcp:8082', 'custom_icons/REST-API.png')

            with Cluster('Control Center'):
                control_center = Kafka('Control Center\ntcp:9021')

            with Cluster('Schema Registry'):
                schema_registry = Storage('Schema Registry\ntcp:8081')

            with Cluster('Brokers'):
                broker_1 = Kafka('Broker 1\ntcp:9092')
                kafka_brokers = [
                    broker_1,
                    Kafka('Broker 2\ntcp:9093'),
                    Kafka('Broker 3\ntcp:9094')
                ]

        with Cluster('Secrets Managers'):
            vault = Vault('HashiCorp Vault\ntcp:8200')
            secrets_managers = [
                vault,
            ]

        with Cluster('Logging and Search'):
            with Cluster('Search and Logging'):
                elastic_search = Elasticsearch('Elastic Search\ntcp:9200')
                kibana = Kibana('Kibana\ntcp:5601')
                logstash = Logstash('Logstash\ntcp:5044')
                search_log = [
                    elastic_search,
                    kibana,
                    logstash
                ]

        with Cluster('Inventory and Connectivity'):
            with Cluster('Inventory'):
                nautobot = Custom('Nautobot\ntcp:8000', 'custom_icons/Nautobot.jpeg')

        kafka_brokers - python_container

        python_container - vault

        python_container - nautobot

        nautobot - logstash
        python_container - logstash
            web_clients = [React("Client 1"), React("Client 2")]
        with Cluster("API and Database"):
            with Cluster("Heroku"):
                hasura = Server("GraphQL")
            with Cluster("Aiven"):
                pg = PostgreSQL("DB")
        web_clients << Edge(color="green") >> hasura >> Edge(
            color="green") << pg

        with Cluster("Aiven"):
            kfk = Kafka("Kakfa")
        web_clients << Edge(color="red", label="Produce/Consume") >> kfk
        kfk >> Edge(color="red", label="Postgres Sink Connector") >> pg

        with Cluster("Message Search"):
            es = Elasticsearch("Elasticsearch")
        kfk >> Edge(color="blue", label="Elasticsearch Sink Connector") >> es
        es << Edge(color="blue", label="Search") >> web_clients

    # Step 4
    with Diagram(show=True, filename=files[3]):
        with Cluster("Web"):
            web_clients = [React("Client 1"), React("Client 2")]
        with Cluster("API and Database"):
            with Cluster("Heroku"):
                hasura = Server("GraphQL")
            with Cluster("Aiven"):
                pg = PostgreSQL("DB")
        web_clients << Edge(color="green") >> hasura >> Edge(
            color="green") << pg
Beispiel #7
0
def main():
    graph_attr = {
        "fontsize": "45",
        'overlap_scaling': '100',
        'size': '24!',
        'ratio': 'expand'
    }

    with Diagram(name='Automation Framework Compose',
                 direction='LR',
                 graph_attr=graph_attr):
        with Cluster('Docker Cluster'):
            docker = Docker('Docker')

            with Cluster('container1'):
                python_container = Python('APIs\nOther Microservices')

            with Cluster('Docker Registry'):
                docker_registry_container = Docker('Docker Registry\ntcp:5000')

            with Cluster('Docker Registry Browser'):
                docker_registry_browser_container = Python(
                    'Docker Registry Browser\ntcp:8088')

            with Cluster('BatFish'):
                batfish_container = Custom(
                    'BatFish\ntcp:8888\ntcp:9997\ntcp:9996',
                    'custom_icons/BatFish.png')

        with Cluster('Kafka Cluster'):
            with Cluster('Zookeeper'):
                Zookeeper('Zookeeper\ntcp:2181')

            with Cluster('REST Proxy'):
                rest_proxy = Custom('REST Proxy\ntcp:8082',
                                    'custom_icons/REST-API.png')

            with Cluster('Control Center'):
                control_center = Kafka('Control Center\ntcp:9021')

            with Cluster('Schema Registry'):
                schema_registry = Storage('Schema Registry\ntcp:8081')

            with Cluster('Brokers'):
                broker_1 = Kafka('Broker 1\ntcp:9092')
                kafka_brokers = [
                    broker_1,
                    Kafka('Broker 2\ntcp:9093'),
                    Kafka('Broker 3\ntcp:9094')
                ]

        with Cluster('Secrets Managers'):
            vault = Vault('HashiCorp Vault\ntcp:8200')
            secrets_managers = [
                vault,
            ]

        with Cluster('Logging and Search'):
            with Cluster('ELK Stack'):
                elastic_search = Elasticsearch('Elastic Search\ntcp:9200')
                kibana = Kibana('Kibana\ntcp:5601')
                logstash = Logstash('Logstash\ntcp:5044')
                search_log = [elastic_search, kibana, logstash]

            with Cluster('Influxdb'):
                infulxdb = Influxdb('Influxdb\ntcp:8086')

            with Cluster('Grafana'):
                grafana = Grafana('Grafana\ntcp:3000')

        with Cluster('Inventory and Connectivity'):
            with Cluster('Inventory'):
                nautobot = Custom('Nautobot\ntcp:8000',
                                  'custom_icons/Nautobot.jpeg')

        with Cluster('Database'):
            with Cluster('Mongo dB'):
                mongodb = Mongodb('MongoDb\ntcp:27017')
                mongodb_express = Mongodb('MongoDb Express\ntcp:8181')
                mongo_group = [mongodb, mongodb_express]

        with Cluster('CI/CD'):
            team_city = TC('TeamCity')

        kafka_brokers - python_container

        python_container - vault

        python_container - nautobot

        nautobot - logstash
        python_container - logstash

        nautobot - infulxdb
        python_container - infulxdb

        python_container - mongodb
with Diagram("", show=False, node_attr=node_attr):
    with Cluster("Logging", graph_attr=graph_attr):
        logstash = Logstash("\nLogstash")
        fluentd = Fluentd("\nFluentd")
        loki = Loki("\nLoki")
        logstash - [fluentd] - loki

    with Cluster("Monitoring", graph_attr=graph_attr):
        prometheus = Prometheus("\nPrometheus")
        thanos = Thanos("\nThanos")
        prometheus - thanos

    with Cluster("Storage", graph_attr=graph_attr):
        with Cluster("Logs", graph_attr=graph_attr):
            elasticsearch = Elasticsearch("\nElasticsearch")
            solr = Solr("\nSolr")
            mongodb = Mongodb("\nMongoDB")
            elasticsearch - solr - mongodb

        with Cluster("Metrics", graph_attr=graph_attr):
            influx = Influxdb("\nInfluxDB")
            prometheus2 = Prometheus("\nPrometheus")
            prometheus2 - influx

        loki >> elasticsearch
        thanos >> prometheus2

    with Cluster("Visualization", graph_attr=graph_attr):
        kibana = Kibana("\nKibana")
        grafana = Grafana("\nGrafana")