from diagrams import Diagram, Cluster from diagrams.aws.storage import S3 from diagrams.aws.compute import Lambda from diagrams.aws.integration import SNS, Eventbridge from diagrams.aws.management import Cloudwatch from diagrams.onprem.queue import ActiveMQ with Diagram("Alerting Workflow", show=True): with Cluster('main account'): topic = SNS('SNS Topic') with Cluster('Lambda'): l = Lambda('processor') topic >> l S3('lambda source') - l cl = Cloudwatch('Cloudwatch') l >> cl event = Eventbridge('Cloudwatch\nevent rule') cl >> event with Cluster('Event Bus'): event_bus = ActiveMQ('bus') event >> event_bus
from diagrams.aws.database import Dynamodb from diagrams.aws.integration import Eventbridge from diagrams.aws.network import APIGateway from diagrams.aws.storage import S3 with Diagram("Message Broker Refactored", show=False): with Cluster("Event Processors"): with Cluster("System C", direction="TB"): apig = APIGateway("webhook") handler = Lambda("handler") with Cluster("System D", direction="TB"): processor = Lambda("handler") ddb = Dynamodb("database") stream_listener = Lambda("processor") broker = Eventbridge("message broker") with Cluster("System B", direction="TB"): event_handler = Lambda("handler") bucket = S3("S3") file_processor = Lambda("processor") apig >> handler >> broker >> processor processor >> ddb >> stream_listener >> broker broker >> event_handler >> bucket >> file_processor
from diagrams import Diagram from diagrams.aws.compute import Lambda, Batch, ECS, EKS, ElasticBeanstalk, Compute, AutoScaling, Fargate, Lightsail, SAR, ServerlessApplicationRepository from diagrams.aws.storage import S3, Backup, EBS, EFS, S3Glacier, Storage, StorageGateway from diagrams.aws.database import Dynamodb, RDS, Redshift, DB, ElastiCache, Neptune, Timestream from diagrams.aws.network import APIGateway, CloudFront, GlobalAccelerator, Route53, VPC from diagrams.aws.integration import Eventbridge, SNS, SQS, Appsync, StepFunctions from diagrams.aws.analytics import Kinesis, Athena, Quicksight, ES, ElasticsearchService, Analytics, DataPipeline, Glue, EMR, KinesisDataAnalytics, KinesisDataFirehose, KinesisDataStreams, LakeFormation, Redshift, ManagedStreamingForKafka from diagrams.aws.ml import Sagemaker, Comprehend, Rekognition, Forecast, Personalize, Polly, Textract, Transcribe, Translate, MachineLearning, Lex, SagemakerNotebook, SagemakerModel, SagemakerTrainingJob with Diagram("Event-based Application", show=False): eb = Eventbridge("EventBridge (Event Router)") apig = APIGateway("REST API") apig >> Lambda("API Handler(s)") >> Dynamodb("Data")
from diagrams import Diagram from diagrams.aws.compute import Lambda, ECS from diagrams.aws.devtools import Codebuild from diagrams.aws.integration import Eventbridge from diagrams.aws.database import Dynamodb, Timestream from diagrams.onprem.client import User with Diagram("SLO Infrastructure", direction="TB"): ci = Codebuild("CI/CD") producer = Eventbridge("Event Bus") function = Lambda("Event Handler") event_store = Dynamodb("EventStore") series_store = Timestream("SLO/Events") stores = [event_store, series_store] grafana = ECS("Dashboard") user = User() ci >> producer >> function >> stores user >> grafana >> series_store
from diagrams import Diagram, Cluster from diagrams.aws.network import Route53 from diagrams.aws.storage import S3 from diagrams.aws.mobile import APIGateway from diagrams.aws.compute import Lambda from diagrams.aws.integration import Eventbridge from diagrams.programming.language import Python, Javascript from diagrams.aws.database import DDB from diagrams.aws.engagement import SES from diagrams.aws.ml import Textract with Diagram("architecture", show=False): with Cluster(" Envoie_de_la notification "): flux_2 = Eventbridge( "planifie l'execution \nde la fonction\nscan_user du lundi \nau vendredi à 8H00" ) >> Lambda("Handler:lambdascan_handeler") #with Cluster("Enregistrement_de_l'utilisateur "): flux_1 = Route53( "redirige le trafic \n de mtchou-mouh.mongulu.cm \n vers le bucket S3 de meme nom" ) >> S3( "3 PAGES WEB :\n-index.html\n-demo.html\n-error.html" ) >> APIGateway( "-OPTIONS:résout\nproblèmeCORS\n-POST:donne les\n informations à LAMBDA" ) >> Lambda( "Handler:lambda.register_handler\n-Enregistrement information dans\n DynamoDB table Register\n-Envoie mail de verification \nvia AmazoneSES" ) with Cluster("Programming"): languages = [Python("BackEnd"), Javascript("FrontEnd")]
from diagrams import Cluster, Diagram from diagrams.aws.compute import Compute, Lambda from diagrams.aws.database import Dynamodb from diagrams.aws.integration import SQS, SNS, Eventbridge from diagrams.aws.network import APIGateway from diagrams.aws.storage import S3 from diagrams.aws.analytics import Kinesis, KinesisDataFirehose with Diagram("Procurement services", show=False): broker = Eventbridge() with Cluster("Lot Selection"): inbox = S3("Listings") listing_queue = SQS() textract = Lambda("Extract lots") listings_db = Dynamodb("Listings db") selector = Lambda("Select lots") inbox >> listing_queue >> textract >> listings_db >> selector with Cluster("Buying app"): pricer = Lambda("Bid calculator") purchases = Dynamodb("Buying db") apig = APIGateway("Manager app") ls = Lambda("View proposals") put = Lambda("Set price") invoices = S3("Invoice bucket") invoice_listener = Lambda("Invoice listener") listener = Lambda("Lot listener")
from diagrams import Cluster, Diagram from diagrams.aws.analytics import ES, Kinesis from diagrams.aws.compute import Lambda from diagrams.aws.integration import Eventbridge from diagrams.aws.storage import S3 with Diagram("Event capture", show=False): with Cluster("Production Account"): prod_lambdas = [Lambda("Func"), Lambda("Func"), Lambda("Func")] prod_bus = Eventbridge("Default") prod_lambdas >> prod_bus with Cluster("Test Account"): test_lambdas = [Lambda("Func"), Lambda("Func"), Lambda("Func")] test_bus = Eventbridge("Default") test_lambdas >> test_bus with Cluster("Logging Account"): event_bus = Eventbridge("Shared") prod_bus >> event_bus test_bus >> event_bus stream = Kinesis("Event stream") event_bus >> stream stream >> S3("Archive") stream >> ES("Analytics")
#!/usr/bin/env python # -*- coding: utf-8 -*- from diagrams import Diagram from diagrams.aws.integration import Eventbridge from diagrams.aws.compute import Lambda graph_attr = { "fontsize": "10", "bgcolor": "white", } with Diagram( "EventBridge -> DynamoDB table (dynamodb-logs) reads", show=True, filename="reads", graph_attr=graph_attr, ): handler = Lambda("Lambda") event_bridge = Eventbridge("EventBridge") event_bridge >> handler
Blank("") Blank("") Blank("") # with Cluster("Private Subnet (2)") as priv2: tableau = Tableau("Tableau Server\n(EC2)") with Cluster("S3 Data Lake"): s3data = storage.S3("Data Bucket") s3meta = storage.S3("Metadata Bucket") s3logs = storage.S3("Logging Bucket") sftp = TransferForSftp("SFTP\nTransfer Service") py_fn1 = compute.Lambda("File Listener\n(Lambda Python)") glue = Glue("Spark Transforms\n(Glue)") # with Cluster("AWS Serverless"): events = Eventbridge("Event Triggers\n(AWS Eventbridge)") secrets = security.SecretsManager("AWS Secrets\nManager") cw = Cloudwatch("Cloudwatch Logs") source = Internet("External\nData Source") py_fn1 << s3data << py_fn1 glue << s3data << glue nat << singer1 nat >> source elb >> tableau s3meta >> singer1 >> s3data singer1 << secrets singer1 << events rs1 << singer1