示例#1
0
                              "mkdir output;hostname > output/f1.txt")

            # The task f
            taskF = DagonTask(
                TaskType.BATCH, "F",
                "echo $RANDOM > f2.txt; cat workflow://wf1-transversal-demo/A/output/f1.txt >> f2.txt; cat workflow:///E/output/f1.txt >> f2.txt"
            )

            # The task g
            taskG = DagonTask(
                TaskType.BATCH, "G",
                "cat workflow:///F/f2.txt >> f3.txt; cat workflow://wf1-transversal-demo/C/f2.txt >> f3.txt"
            )

            # add tasks to the workflow 2
            workflow2.add_task(taskE)
            workflow2.add_task(taskF)
            workflow2.add_task(taskG)

            workflow2.make_dependencies()
            break
        except Exception as exe:
            logging.debug(exe)
            time.sleep(1)  #if the transversal workflow does not exist

    # run the workflow
    workflow2.run()

    # if workflow.get_dry() is False:
    #     # set the result filename
    #     result_filename = taskD.get_scratch_dir() + "/f3.txt"
示例#2
0
        "echo Soy Berlin > f2.txt; cat workflow://Tokio/f1.txt >> f2.txt",
        "ubuntu")

    # The task c
    taskC = DockerTask(
        "Nairobi",
        "echo Soy Nairobi > f2.txt; cat workflow://Tokio/f1.txt >> f2.txt",
        "ubuntu")

    # The task d
    taskD = DockerTask(
        "Mosco",
        "cat workflow://Berlin/f2.txt workflow://Nairobi/f2.txt > f3.txt",
        "ubuntu")

    # add tasks to the workflow
    workflow.add_task(taskA)
    workflow.add_task(taskB)
    workflow.add_task(taskC)
    workflow.add_task(taskD)

    workflow.make_dependencies()

    jsonWorkflow = workflow.asJson()
    with open('dataflow-demo-docker.json', 'w') as outfile:
        stringWorkflow = json.dumps(jsonWorkflow, sort_keys=True, indent=2)
        outfile.write(stringWorkflow)

    # run the workflow
    workflow.run()
        wf_b = Workflow("TestTPS_WFb")
        taskD = DagonTask(
            TaskType.BATCH, "AdquisitionB", "cp -r " + command_dir +
            "/launcher/lib $PWD;java -jar " + command_dir +
            "launcher/launcher.jar 1 1 01-03-2019 02-03-2019 $PWD/ adq_tps_b")
        taskE = DockerTask(
            "InterpolationB",
            "python /home/Interpolacion.py -i workflow:///AdquisitionB/metadata/FilesNc -w 1 ",
            image="module_interpolation:v1")
        taskF = DockerTask(
            "UploaderB",
            "python /home/upload.py -i workflow:///InterpolationB/output -w 1 ",
            image="module_todb:v1")

        wf_a.add_task(taskA)
        wf_a.add_task(taskB)
        wf_a.add_task(taskC)

        wf_b.add_task(taskD)
        wf_b.add_task(taskE)
        wf_b.add_task(taskF)

        meta_workflow.add_workflow(wf_a)
        meta_workflow.add_workflow(wf_b)

        meta_workflow.make_dependencies()

        end_building = time.time()  #end building
        start_validation = time.time()  #start validate
        meta_workflow.Validate_WF()
    taskD = DagonTask(TaskType.BATCH, "D", "cat workflow:///B/f2.txt >> f3.txt; cat workflow:///C/f2.txt >> f3.txt")

#second workflow
    workflow2 = Workflow("DataFlow-transversal")
    workflow2.set_dry(False)
    # The task E
    taskE = DagonTask(TaskType.BATCH, "E", "mkdir output;cat /tmp/pruebas/merra.csv > output/f1.csv")

    # The task f
    taskF = DagonTask(TaskType.BATCH, "F", "echo $RANDOM > f2.txt; cat workflow://DataFlow-Demo-Server/A/output/f1.csv >> f2.txt; cat workflow:///E/output/f1.csv >> f2.txt")

    # The task g
    taskG = DagonTask(TaskType.BATCH, "G", "cat workflow:///F/f2.txt >> f3.txt; cat workflow://DataFlow-Demo-Server/C/f2.txt >> f3.txt")

    # add tasks to the workflow 1
    workflow.add_task(taskA)
    workflow.add_task(taskB)
    workflow.add_task(taskC)
    workflow.add_task(taskD)

    # add tasks to the workflow 2
    workflow2.add_task(taskE)
    workflow2.add_task(taskF)
    workflow2.add_task(taskG)

#list of the workflows
    #WF =[workflow,workflow2]
    metaworkflow=DAG_TPS("NewDAG")
    metaworkflow.add_workflow(workflow)
    metaworkflow.add_workflow(workflow2)
    metaworkflow.make_dependencies()
        "metgrid", command_dir_base + "/metgrid " + i_date +
        " workflow:///makeWpsNameList/namelist.wps workflow:///ungrib/FILE\* workflow:///geogrid/geo_em.\*"
    )

    taskMakeInputNameList = batch.Batch(
        "makeInputNameList", command_dir_base + "/makeInputNamelist." +
        wrf_model + " " + i_date + " " + f_date + " 3 " + useRestart + " 1440")

    # The real task executed using Slurm
    taskReal = Slurm(
        "real", command_dir_base + "/real " + i_date +
        " workflow:///makeInputNameList/namelist.input workflow:///metgrid/met_em.\*",
        "hicpu", 1)

    # add tasks to the workflow
    workflow.add_task(taskMakeWpsNameList)
    workflow.add_task(taskGeogrid)
    workflow.add_task(taskUngrib)
    workflow.add_task(taskMetgrid)

    workflow.add_task(taskMakeInputNameList)
    workflow.add_task(taskReal)

    days = hours / 24

    for day in range(0, days):
        i_date1 = start_date + datetime.timedelta(hours=24 * day)
        f_date1 = start_date + datetime.timedelta(hours=24 * (day + 1))

        i_date1s = i_date1.strftime("%Y%m%dZ%H")
        f_date1s = f_date1.strftime("%Y%m%dZ%H")
示例#6
0
    # ACQUISITION PHASE

    acq_cores = 16
    acq_state = "yuc camp qroo"
    acq_partitions = 10
    acq_volume = "data"
    acq_command = "java -jar acquisition.jar %d %d %s %s" % (acq_cores, acq_partitions, acq_state, acq_volume)

    task_acquisition = DagonTask(TaskType.BATCH, "ACQ" ,acq_command, ip="148.247.201.227", ssh_username="******",
                                 working_dir="/home/hreyes/pruebas_dante/tasks/acquisition")
    # PARSING PHASE
    parsing_input = "workflow:///ACQ/%s/documentos/lote" % acq_volume
    parsing_command = "python /home/task/parser.py -i %s -o res"

    workflow.add_task(task_acquisition)

    for i in range(1, acq_partitions + 1):
        input_element = parsing_input + str(i)
        command = parsing_command % input_element

        task_parsing = DagonTask(TaskType.DOCKER, "P%d" % i, command,
                                 ip="ec2-34-208-132-217.us-west-2.compute.amazonaws.com",
                                 ssh_username="******", keypath="dagon_services.pem", image="ddomizzi/parser")
        workflow.add_task(task_parsing)

    # TRANSFORM THE DATA

    transform_command = "Rscript /home/batman/Transform.R "

    for i in range(1, acq_partitions + 1):
        TaskType.BATCH, "H",
        "mkdir output;hostname > output/f1.txt; cat workflow://WF-2/G/f3.txt >> output/f1.txt"
    )

    # The task i
    taskI = DagonTask(
        TaskType.BATCH, "I",
        "echo $RANDOM > f2.txt; cat workflow:///H/output/f1.txt >> f2.txt")

    # The task j
    taskJ = DagonTask(
        TaskType.BATCH, "J",
        "echo $RANDOM > f3.txt; cat workflow:///I/f2.txt >> f3.txt")

    # add tasks to the workflow 1
    workflow.add_task(taskA)
    workflow.add_task(taskB)
    workflow.add_task(taskC)
    workflow.add_task(taskD)

    # add tasks to the workflow 2
    workflow2.add_task(taskE)
    workflow2.add_task(taskF)
    workflow2.add_task(taskG)

    # add tasks to the workflow 2
    workflow3.add_task(taskH)
    workflow3.add_task(taskI)
    workflow3.add_task(taskJ)

    #list of the workflows