# Initial and final simulation dates in WRF format
    initial = start_date.strftime("%Y-%m-%d_%H:%M:%S")
    final = end_date.strftime("%Y-%m-%d_%H:%M:%S")

    # Create the orchestration workflow
    workflow = Workflow("CCMMMA")

    # Some beauty logging
    workflow.logger.info("initialization date: %s", i_date)
    workflow.logger.info("data dir: %s", data_dir)
    workflow.logger.info("initial: %s", initial)
    workflow.logger.info("final: %s", final)

    # The makeWpsNamelist task executed locally
    taskMakeWpsNameList = batch.Batch(
        "makeWpsNameList", command_dir_base + "/makeWpsNamelist." + wrf_model +
        " " + initial + " " + final)

    # The geogrid task executed using Slurm
    taskGeogrid = Slurm(
        "geogrid", command_dir_base + "/geogrid " + i_date +
        " workflow:///makeWpsNameList/namelist.wps", "hicpu", 1)

    # The ungrib task executed using Slurm
    taskUngrib = Slurm(
        "ungrib", command_dir_base + "/ungrib " + i_date + " " + data_dir +
        " workflow:///makeWpsNameList/namelist.wps", "hicpu", 1)

    # the metgrid task executed using Slurm
    taskMetgrid = Slurm(
        "metgrid", command_dir_base + "/metgrid " + i_date +
Exemple #2
0
import datetime
import os.path

# Check if this is the main
if __name__ == '__main__':

  config={
    "scratch_dir_base":"/tmp/test6",
    "remove_dir":False
  }

  # Create the orchestration workflow
  workflow=Workflow("DataFlow-Demo",config)
  
  # The task a
  taskA=batch.Batch("Tokio","mkdir output;ls > output/f1.txt")
  
  # The task b
  taskB=batch.Batch("Berlin","echo $RANDOM > f2.txt; cat workflow://Tokio/output/f1.txt >> f2.txt")
  
  # The task c
  taskC=batch.Batch("Nairobi","echo $RANDOM > f2.txt; cat workflow://Tokio/output/f1.txt >> f2.txt")
  
  # The task d
  taskD=batch.Batch("Mosco","cat workflow://Berlin/f2.txt workflow://Nairobi/f2.txt > f3.txt")
  
  # add tasks to the workflow
  workflow.add_task(taskA)
  workflow.add_task(taskB)
  workflow.add_task(taskC)
  workflow.add_task(taskD)
Exemple #3
0
import json
from dagon import Workflow
from dagon import batch

if __name__ == '__main__':

    config = {"scratch_dir_base": "/tmp/", "remove_dir": False}

    # Create the orchestration workflow
    workflow = Workflow("Taskflow-Demo", config)

    taskA = batch.Batch("Tokio", "/bin/hostname >tokio.out")
    taskB = batch.Batch("Berlin", "/bin/date")
    taskC = batch.Batch("Nairobi", "/usr/bin/uptime")
    taskD = batch.Batch("Mosco", "cat workflow://Tokio/tokio.out")

    workflow.add_task(taskA)
    workflow.add_task(taskB)
    workflow.add_task(taskC)
    workflow.add_task(taskD)

    taskB.add_dependency_to(taskA)
    taskC.add_dependency_to(taskA)
    taskD.add_dependency_to(taskB)
    taskD.add_dependency_to(taskC)

    jsonWorkflow = workflow.asJson()
    with open('taskflow-demo.json', 'w') as outfile:
        stringWorkflow = json.dumps(jsonWorkflow, sort_keys=True, indent=2)
        outfile.write(stringWorkflow)