Ejemplo n.º 1
0
 def load_json(self, Json_data):
     from dagon.task import DagonTask, TaskType
     self.name = Json_data['name']
     self.workflow_id = Json_data['id']
     for task in Json_data['tasks']:
         temp = Json_data['tasks'][task]
         tk = DagonTask(TaskType[temp['type'].upper()], temp['name'],
                        temp['command'])
         self.add_task(tk)
Ejemplo n.º 2
0
logging.debug('This message should go to the log file')
logging.info('So should this')
logging.warning('And this, too')

# Check if this is the main
if __name__ == '__main__':

    config = {"scratch_dir_base": "/tmp/test6", "remove_dir": False}

    #second workflow
    while (True):
        try:
            workflow2 = Workflow("wf2-transversal-demo")
            workflow2.set_dry(False)
            # The task E
            taskE = DagonTask(TaskType.BATCH, "E",
                              "mkdir output;hostname > output/f1.txt")

            # The task f
            taskF = DagonTask(
                TaskType.BATCH, "F",
                "echo $RANDOM > f2.txt; cat workflow://wf1-transversal-demo/A/output/f1.txt >> f2.txt; cat workflow:///E/output/f1.txt >> f2.txt"
            )

            # The task g
            taskG = DagonTask(
                TaskType.BATCH, "G",
                "cat workflow:///F/f2.txt >> f3.txt; cat workflow://wf1-transversal-demo/C/f2.txt >> f3.txt"
            )

            # add tasks to the workflow 2
            workflow2.add_task(taskE)
Ejemplo n.º 3
0
# Check if this is the main
if __name__ == '__main__':

    config = {
        "scratch_dir_base": "/tmp/test6",
        "remove_dir": False
    }

    # Create the orchestration workflow
    workflow = Workflow("DataFlow-Demo-Server")

    # Set the dry
    workflow.set_dry(False)

    # The task a
    taskA = DagonTask(TaskType.BATCH, "A", "mkdir output;hostname > output/f1.txt")

    # The task b
    taskB = DagonTask(TaskType.BATCH, "B", "echo $RANDOM > f2.txt; cat workflow:///A/output/f1.txt >> f2.txt")

    # The task c
    taskC = DagonTask(TaskType.BATCH, "C", "echo $RANDOM > f2.txt; cat workflow:///A/output/f1.txt >> f2.txt")

    # The task d
    taskD = DagonTask(TaskType.BATCH, "D", "cat workflow:///B/f2.txt >> f3.txt; cat workflow:///C/f2.txt >> f3.txt")

    # add tasks to the workflow
    workflow.add_task(taskA)
    workflow.add_task(taskB)
    workflow.add_task(taskC)
    workflow.add_task(taskD)
Ejemplo n.º 4
0
        "cloudargs": {
            "name": "new_key",
            "public_key": keysDO[1],
            "private_key": keysDO[0]
        }
    }
    do_flavour = {
        "image": "39769319",
        "size": "1gb",
        "location": "nyc1"
    }

    workflow = Workflow("DataFlow-Demo-Cloud")

    # The task a
    taskA = DagonTask(TaskType.CLOUD, "A", "mkdir output;echo I am A > output/f1.txt", Provider.EC2, "ubuntu", ssh_key_ec2,
                      instance_id="i-0792e2eeb013b0b2b", endpoint="880105d0-f2eb-11e8-8cc0-0a1d4c5c824a")

    # The task b (cloud)
    taskB = DagonTask(TaskType.CLOUD, "B", "echo $RANDOM > f2.txt; ls workflow:///A/output/f1.txt >> f2.txt", Provider.EC2,
                      "ubuntu", ssh_key_ec2, instance_id="i-0136ac7985609c759", endpoint="4ef4630c-f2f2-11e8-8cc0-0a1d4c5c824a")

    # add tasks to the workflow
    workflow.add_task(taskA)
    workflow.add_task(taskB)
    workflow.make_dependencies()

    jsonWorkflow = workflow.as_json()
    with open('dataflow-demo-docker.json', 'w') as outfile:
        stringWorkflow = json.dumps(jsonWorkflow, sort_keys=True, indent=2)
        outfile.write(stringWorkflow)
    command_dir = sys.argv[1]
    no_TPS = int(sys.argv[2])
    iterations = 1

    logfile = open("../logs/" + str(no_TPS) + "_TPS_test.txt", "a+")
    logfile.write("building,validation,runtime,extraction,processing,\n")

    # Create the orchestration workflow
    for i in range(0, iterations):

        meta_workflow = DAG_TPS("DAGtp_2WF_" + str(no_TPS) + "TPS")
        start_building = time.time()

        wf_a = Workflow("TestTPS_WFa")
        taskA = DagonTask(
            TaskType.BATCH, "AdquisitionA", "cp -r " + command_dir +
            "/launcher/lib $PWD;java -jar " + command_dir +
            "launcher/launcher.jar 1 1 01-01-2019 02-01-2019 $PWD/ adq_tps_a")
        taskB = DockerTask(
            "InterpolationA",
            "python /home/Interpolacion.py -i workflow:///AdquisitionA/metadata/FilesNc -w 1 ",
            image="module_interpolation:v1")
        taskC = DockerTask(
            "UploaderA",
            "python /home/upload.py -i workflow:///InterpolationA/output -w 1 ",
            image="module_todb:v1")

        wf_b = Workflow("TestTPS_WFb")
        taskD = DagonTask(
            TaskType.BATCH, "AdquisitionB", "cp -r " + command_dir +
            "/launcher/lib $PWD;java -jar " + command_dir +
            "launcher/launcher.jar 1 1 01-03-2019 02-03-2019 $PWD/ adq_tps_b")
Ejemplo n.º 6
0
# Check if this is the main
from dagon.task import DagonTask, TaskType

if __name__ == '__main__':

    # Create the orchestration workflow
    workflow = Workflow("DataFlow-Demo-Server")

    # Set the dry
    workflow.set_dry(False)

    # The task a
    taskA = DagonTask(TaskType.BATCH,
                      "A",
                      "mkdir output;hostname > output/f1.txt",
                      ip="disys0.tamps.cinvestav.mx",
                      ssh_username="******")

    # The task b
    taskB = DagonTask(
        TaskType.BATCH,
        "B",
        "echo $RANDOM > f2.txt; cat workflow:///A/output/f1.txt >> f2.txt",
        ip="disys0.tamps.cinvestav.mx",
        ssh_username="******")

    # The task c
    taskC = DagonTask(
        TaskType.BATCH,
        "C",
Ejemplo n.º 7
0
    init_date = sys.argv[2]
    end_date = sys.argv[3]
    id_test = str(sys.argv[4]) #also iterations
    # Create the orchestration workflow
    if not os.path.exists("../logs/"):
        os.makedirs("../logs/")

    logfile= open("../logs/LOG_single_test_No-<"+id_test+".txt", "a+")
    logfile.write("building,validation,runtime\n")

    for i in range(0,int(id_test)):
        workflow = Workflow("Merra-Docker")

        # The task a
        start_building = time.time()
        taskA = DagonTask(TaskType.BATCH, "Adquisition", "cp -r "+command_dir+"/launcher/lib $PWD;\
            java -jar "+command_dir+"launcher/launcher.jar 1 1 "+init_date+" "+end_date+" $PWD/ aq_"+id_test)
        # The task b
        taskB = DockerTask("Interpolation", "python /home/Interpolacion.py -i workflow:///Adquisition/metadata/FilesNc -w 1 ", image="module_interpolation:v1")
        
        taskC = DockerTask("Uploader", "python /home/upload.py -i workflow:///Interpolation/output -w 1 ", image="module_todb:v1")

        # add tasks to the workflow
        workflow.add_task(taskA)
        workflow.add_task(taskB)
        workflow.add_task(taskC)

        workflow.make_dependencies()
        end_building = time.time()
        start_validation= time.time()
        workflow.Validate_WF()
        end_validation = time.time()
Ejemplo n.º 8
0
# Check if this is the main
if __name__ == '__main__':

    config = {
        "scratch_dir_base": "/tmp/test6",
        "remove_dir": False
    }

    # Create the orchestration workflow
    workflow = Workflow("DataFlow-Demo-Server")

    # Set the dry
    workflow.set_dry(False)

    # The task a
    taskA = DagonTask(TaskType.BATCH, "A", "mkdir output;cat /tmp/pruebas/conHeaders.csv > output/f1.csv")

    # The task b
    taskB = DagonTask(TaskType.BATCH, "B", "echo $RANDOM > f2.txt; cat workflow:///A/output/f1.csv >> f2.txt")

    # The task c
    taskC = DagonTask(TaskType.BATCH, "C", "echo $RANDOM > f2.txt; cat workflow:///A/output/f1.csv >> f2.txt")

    # The task d
    taskD = DagonTask(TaskType.BATCH, "D", "cat workflow:///B/f2.txt >> f3.txt; cat workflow:///C/f2.txt >> f3.txt")

#second workflow
    workflow2 = Workflow("DataFlow-transversal")
    workflow2.set_dry(False)
    # The task E
    taskE = DagonTask(TaskType.BATCH, "E", "mkdir output;cat /tmp/pruebas/merra.csv > output/f1.csv")
Ejemplo n.º 9
0
from dagon.task import DagonTask, TaskType

if __name__ == '__main__':

    # Create the orchestration workflow
    workflow = Workflow("CONAGUASlurm5")

    # ACQUISITION PHASE

    acq_cores = 16
    acq_state = "yuc camp qroo"
    acq_partitions = 10
    acq_volume = "data"
    acq_command = "java -jar acquisition.jar %d %d %s %s" % (acq_cores, acq_partitions, acq_state, acq_volume)

    task_acquisition = DagonTask(TaskType.BATCH, "ACQ" ,acq_command, ip="148.247.201.227", ssh_username="******",
                                 working_dir="/home/hreyes/pruebas_dante/tasks/acquisition")
    # PARSING PHASE
    parsing_input = "workflow:///ACQ/%s/documentos/lote" % acq_volume
    parsing_command = "python /home/task/parser.py -i %s -o res"

    workflow.add_task(task_acquisition)

    for i in range(1, acq_partitions + 1):
        input_element = parsing_input + str(i)
        command = parsing_command % input_element

        task_parsing = DagonTask(TaskType.DOCKER, "P%d" % i, command,
                                 ip="ec2-34-208-132-217.us-west-2.compute.amazonaws.com",
                                 ssh_username="******", keypath="dagon_services.pem", image="ddomizzi/parser")
        workflow.add_task(task_parsing)
Ejemplo n.º 10
0
logging.info('So should this')
logging.warning('And this, too')

# Check if this is the main
if __name__ == '__main__':

    config = {"scratch_dir_base": "/tmp/test6", "remove_dir": False}

    # Create the orchestration workflow
    workflow = Workflow("wf1-transversal-demo")

    # Set the dry
    workflow.set_dry(False)

    # The task a
    taskA = DagonTask(TaskType.BATCH, "A",
                      "mkdir output;hostname > output/f1.txt")

    # The task b
    taskB = DagonTask(
        TaskType.BATCH, "B",
        "echo $RANDOM > f2.txt; cat workflow:///A/output/f1.txt >> f2.txt;sleep 10"
    )

    # The task c
    taskC = DagonTask(
        TaskType.BATCH, "C",
        "echo $RANDOM > f2.txt; cat workflow:///A/output/f1.txt >> f2.txt;sleep 10"
    )

    # The task d
    taskD = DagonTask(
Ejemplo n.º 11
0
logging.warning('And this, too')

# Check if this is the main
if __name__ == '__main__':

    config = {"scratch_dir_base": "/tmp/test6", "remove_dir": False}

    # Create the orchestration workflow
    workflow = Workflow("WF-1")

    # Set the dry
    workflow.set_dry(False)

    # The task a
    taskA = DagonTask(
        TaskType.BATCH, "A",
        "$SC:5 ;mkdir output;hostname > output/f1.txt; cat workflow://WF-3/H/output/f1.txt >> output/f1.txt"
    )

    # The task b
    taskB = DagonTask(
        TaskType.BATCH, "B",
        "echo $RANDOM > f2.txt; cat workflow:///A/output/f1.txt >> f2.txt")

    # The task c
    taskC = DagonTask(
        TaskType.BATCH, "C",
        "echo $RANDOM > f2.txt; cat workflow:///A/output/f1.txt >> f2.txt")

    # The task d
    taskD = DagonTask(
        TaskType.BATCH, "D",
        
        idate = datetime.strptime(init_date, '%d-%m-%Y')
        edate = datetime.strptime(end_date, '%d-%m-%Y')
        daterange = (edate-idate)/no_workflows
        start_building = time.time()

        meta_workflow = DAG_TPS("DAGtp_"+str(no_workflows)+"_workflows")
        adquisition_list = "'"
        for i in range(1,no_workflows+1):
            init_date = idate.strftime('%d-%m-%Y')
            end_date = (idate + daterange).strftime('%d-%m-%Y')
            workflow_name = "Merra-Docker_%s" %(i)
            workflow = Workflow(workflow_name)
            adquisition_list += "workflow://%s/Adquisition%s/metadata/FilesNc '" %(workflow_name,i)

            taskA = DagonTask(TaskType.BATCH, "Adquisition"+str(i) , "cp -r "+command_dir+"/launcher/lib $PWD;java -jar "+command_dir+"launcher/launcher.jar 1 1 "+str(init_date)+" "+str(end_date)+" $PWD/ aqui"+str(i))
            # The task b
            taskB = DockerTask("Interpolation"+str(i), "python /home/Interpolacion.py -i "+adquisition_list+" -w 1 ", image="module_interpolation:v1")
            
            taskC = DockerTask("Uploader"+str(i), "python /home/upload.py -i workflow:///Interpolation"+str(i)+"/output -w 1 ", image="module_todb:v1")
            # add tasks to the workflow
            workflow.add_task(taskA)
            workflow.add_task(taskB)
            workflow.add_task(taskC)
            meta_workflow.add_workflow(workflow)
            adquisition_list = adquisition_list[:-1]
            adquisition_list+=","
            idate = datetime.strptime(end_date, '%d-%m-%Y') + timedelta(days=1) ##adding 1 day

        # The task a 
        meta_workflow.make_dependencies()
Ejemplo n.º 13
0
from dagon import Workflow
from dagon.task import TaskType, DagonTask
import json

if __name__ == '__main__':

    # Create the orchestration workflow
    workflow = Workflow("Taskflow-Demo")

    taskA = DagonTask(TaskType.BATCH, "Tokio", "/bin/hostname >tokio.out")
    taskB = DagonTask(TaskType.BATCH, "Berlin", "/bin/date")
    taskC = DagonTask(TaskType.BATCH, "Nairobi", "/usr/bin/uptime")
    taskD = DagonTask(TaskType.BATCH, "Mosco",
                      "cat workflow:///Tokio/tokio.out")

    workflow.add_task(taskA)
    workflow.add_task(taskB)
    workflow.add_task(taskC)
    workflow.add_task(taskD)

    taskB.add_dependency_to(taskA)
    taskC.add_dependency_to(taskA)
    taskD.add_dependency_to(taskB)
    taskD.add_dependency_to(taskC)

    jsonWorkflow = workflow.as_json()
    with open('taskflow-demo.json', 'w') as outfile:
        stringWorkflow = json.dumps(jsonWorkflow, sort_keys=True, indent=2)
        outfile.write(stringWorkflow)

    workflow.run()