from diagrams import Cluster, Diagram from diagrams.gcp.analytics import BigQuery, Dataflow, PubSub from diagrams.gcp.compute import AppEngine, Functions from diagrams.gcp.database import BigTable from diagrams.gcp.iot import IotCore from diagrams.gcp.storage import GCS with Diagram("Message Collecting", show=False): pubsub = PubSub("pubsub") with Cluster("Source of Data"): [IotCore("core1"), IotCore("core2"), IotCore("core3")] >> pubsub with Cluster("Targets"): with Cluster("Data Flow"): flow = Dataflow("data flow") with Cluster("Data Lake"): flow >> [BigQuery("bq"), GCS("storage")] with Cluster("Event Driven"): with Cluster("Processing"): flow >> AppEngine("engine") >> BigTable("bigtable") with Cluster("Serverless"): flow >> Functions("func") >> AppEngine("appengine") pubsub >> flow
from diagrams.gcp.analytics import BigQuery, Dataflow, PubSub from diagrams.gcp.compute import AppEngine, Functions from diagrams.gcp.database import BigTable from diagrams.gcp.storage import GCS from diagrams.programming.framework import React with Diagram("Data project3:Bitcoin"): pubsub = PubSub("Pub Sub") with Cluster("Source data"): yfsd = Functions("yahoo finance") yfsd >> pubsub with Cluster("Artificial Intelligence - Bitcoin"): flow = Functions("ETL process") with Cluster("Data base"): db = BigQuery("Big Query") flow >> db predict = Functions("Calculate Prediction") with Cluster("Update model"): CTF = Functions("Control Training") train = Functions("Training") with Cluster("Web"): webpage = React("React.App") pubsub >> flow #db >> train db >> predict db >> CTF #
from diagrams.gcp.analytics import BigQuery, Dataflow, PubSub from diagrams.gcp.compute import AppEngine, Functions from diagrams.gcp.database import BigTable from diagrams.gcp.iot import IotCore from diagrams.gcp.storage import GCS with Diagram("Media Monitoring Storage Architecture", show=False) as med_diag: pubsub = PubSub("pubsub") flow = Dataflow("DataFlow") with Cluster("Data Collection"): [ Functions("RSS Feed Webhook"), Functions("Twitter Webhook"), Functions("Press Release") ] >> pubsub >> flow with Cluster("Storage"): with Cluster("Data Lake"): flow >> [BigQuery("BigQuery"), GCS("Storage")] with Cluster("Event Driven"): with Cluster("Processing"): flow >> AppEngine("GAE") >> BigTable("BigTable") with Cluster("Serverless"): flow >> Functions("Function") >> AppEngine("AppEngine") pubsub >> flow med_diag
from diagrams.gcp.devtools import Scheduler with Diagram("Strava Leaderboard Architecture Diagram ", show=True): source = Functions("generate grid") with Cluster("Data Pipeline"): gird_queue = PubSub("grid queue") credential = Firestore("credentials store") store = GCS("raw JSON") with Cluster("Extract-Load"): with Cluster("scheduler"): scheduler = Scheduler("scheduler") schedule_queue = PubSub("schedule queue") extract_load = Functions("worker") staging = BigQuery("BigQuery staging dataset") with Cluster("Transform"): transform = Functions("transform worker") analysis = BigQuery("BigQuery analysis dataset") scheduler >> schedule_queue >> extract_load source >> gird_queue >> extract_load credential >> extract_load extract_load >> store extract_load >> staging >> transform >> analysis
def main(): with Diagram("sample", show=False): AppEngine("appengine") >> Datastore("datastore") >> BigQuery( "bigquery")