def register_flow(f: prefect.Flow, project_name: str) -> None: """Registers f to "Monitorfich" project. Args: f (prefect.Flow): Prefect flow """ f.register(project_name)
from prefect import task, Flow from prefect.environments.storage import Docker from datetime import timedelta import time @task(tags=["test"]) def fail_me(): print("I am starting") time.sleep(600) print("I am done sleeping") f = Flow("Tag1-Flow", tasks=[fail_me], storage=Docker(prefect_version="q-schedule")) f.register(project_name="Demo")
def welcome_logger(): logger = prefect.context["logger"] with open("/ascii-welcome.txt", "r") as f: lines = "\n\n" + "".join(f.readlines()) + "\n\n" logger.info(lines) f = Flow("Welcome Flow", tasks=[welcome_logger]) ## if we don't provide our own storage, Prefect will default to storing ## your flow in ~/.prefect/flows, and only agents running on this machine ## will be able to submit this flow for execution. ## At this time, your environment will be automatically labeled with ## the labels ["local", "welcome-flow"] f.register("Jenny") # f.run() ## now, we can run an appropriately configured agent for this flow ## immediately in-process; this agent will listen for scheduled work ## from Prefect Cloud: # f.run_agent() # spawns a local agent ## we can also run an agent via CLI in the same way as before: ## (local agents will always label themselves "local") # prefect agent start local -t TOKEN -l welcome-flow ## if we exit the process in which we created this Flow, we can always ## restore the Flow object via the Flow.load interface; ## if we were to recreate the Flow, we must call flow.deploy() again ## to re-register the Flow with its new Task IDs
import prefect from prefect import task, Flow @task(log_stdout=True) def check(): print(prefect.context.get("flow_id")) print(prefect.context.get("TEST")) print(prefect.context.get("logging", {}).get("level")) f = Flow( "ctx-check", tasks=[check], storage=prefect.environments.storage.Local( stored_as_script=True, path="/Users/josh/Desktop/code/Dummy-Flows/ctx_check.py" ), ) f.register("Demo")
from prefect import task, Flow from musedashboard.mongo_job_flow import MongoJobFlow @task def update_history_muse_dashboard(): MongoJobFlow.save_history_in_mongo_db() flow = Flow("muse_dashboard", tasks=[update_history_muse_dashboard]) flow.register(project_name="muse_dashboard")
def register_update_flow(): flow = Flow("update_task", tasks=[update_task]) flow.register(project_name=prefect_project_name)
from datetime import timedelta from prefect import Flow from prefect.schedules import Schedule from prefect.schedules.clocks import IntervalClock schedule = Schedule(clocks=[IntervalClock(timedelta(seconds=65))]) my_flow = Flow("Interval Test", schedule=schedule) my_flow.register(project_name="experimental")
container = CreateContainer( docker_server_url="tcp://localhost:2375", image_name="prefecthq/prefect:latest", command= '''python -c "from prefect import Flow; f = Flow('empty'); f.run()"''', ) start = StartContainer(docker_server_url="tcp://localhost:2375", ) logs = GetContainerLogs(docker_server_url="tcp://localhost:2375", trigger=always_run) status_code = WaitOnContainer(docker_server_url="tcp://localhost:2375", ) flow = Flow( "Run a Prefect Flow in Docker", environment=KubernetesJobEnvironment(job_spec_file="job_spec.yaml"), storage=Docker( registry_url="joshmeek18", image_name="flows", ), ) # set task dependencies using imperative API container.set_upstream(image, flow=flow) start.set_upstream(container, flow=flow, key="container_id") logs.set_upstream(container, flow=flow, key="container_id") status_code.set_upstream(container, flow=flow, key="container_id") status_code.set_upstream(start, flow=flow) logs.set_upstream(status_code, flow=flow) flow.register("Demo")
#!/usr/bin/env python import prefect from prefect import task, Flow from prefect.environments.storage import Docker @task def hello_task(): logger = prefect.context.get("logger") logger.info("test message") flow = Flow("prefect-base-logs", tasks=[hello_task]) base_image = "prefect-base-logs:latest" flow.storage = Docker(base_image=base_image, local_image=True) flow.register(project_name="test")
import prefect from prefect import task, Flow @task def hello_task(): logger = prefect.context.get("logger") logger.info("Hello World!") flow = Flow("Hi World", tasks=[hello_task]) # to run locally use flow.run() #flow.run() # to register to the prefect server use flow.register # this assumes you have a project named ExtractLoad flow.register(project_name="ExtractLoad")
def register_init_flow(): flow = Flow("init_task", tasks=[init_task]) flow.register(project_name=prefect_project_name)
import prefect from prefect import task, Flow from prefect.environments.storage import Docker @task def hello_task(): logger = prefect.context.get("logger") logger.info("Hello, Cloud!") flow = Flow("hello-flow", tasks=[hello_task]) flow.storage = Docker(registry_url="docker.io/joshmeek18") flow.register(project_name="Hello, World!")
MAX_WIDTH = 11 PROB_EDGE = 0.1 VERSION = 6 random.seed(SEED) flow = Flow(f"{SEED} Seed Flow {VERSION}") LEVELS = dict() for level in range(N_LEVELS): width = random.randint(MIN_WIDTH, MAX_WIDTH) LEVELS[level] = [ Task(name=f"Task {level}-{i}") for i, _ in enumerate(range(width)) ] for task in LEVELS[level]: flow.add_task(task) if level: for a, b in itertools.product(LEVELS[level - 1], LEVELS[level]): if random.random() > PROB_EDGE: flow.add_edge(a, b) # flow.storage = Docker( # base_image="python:3.8", # python_dependencies=[], # registry_url="znicholasbrown", # image_name=f"random_seed-{VERSION}", # image_tag=f"random-seed-flow-{VERSION}", # ) flow.register(project_name="Community Support Flows")