コード例 #1
0
def main():
    try:
        # this describes the parameters and requirements for our pilot job
        pilot_description = pilot.PilotComputeDescription()
        pilot_description.service_url = "fork://%s" % HOSTNAME
        pilot_description.number_of_processes = 4 
        pilot_description.working_directory = WORKDIR
        pilot_description.walltime = 10

        # create a new pilot job
        pilot_compute_service = pilot.PilotComputeService(COORD)
        pilotjob = pilot_compute_service.create_pilot(pilot_description)

         # submit 'A' tasks to pilot job
        task_set_A = list()
        for i in range(NUMBER_JOBS):
            task_desc = pilot.ComputeUnitDescription()
            task_desc.executable = '/bin/echo'
            task_desc.arguments = ['I am an $TASK_SET task with id $TASK_NO', ]
            task_desc.environment = {'TASK_SET': 'A', 'TASK_NO': i}
            task_desc.number_of_processes = 1
            task_desc.output = 'A-stdout.txt'
            task_desc.error  = 'A-stderr.txt'

	    # Submit task to PilotJob
            task = pilotjob.submit_compute_unit(task_desc)
            print "* Submitted 'A' task '%s' with id '%s'" % (i, task.get_id())
            task_set_A.append(task)

        # Chaining tasks i.e submit a compute unit, when compute unit from A is successfully executed.
        # A 'B' task reads the content of the output file of an 'A' task and writes it into its own
        # output file.
        task_set_B = list()
        while len(task_set_A) > 0:
            for a_task in task_set_A:
                if a_task.get_state() == "Done":
                    print "One 'A' task %s finished. Launching a 'B' task." % (a_task.get_id())
                    task_desc = pilot.ComputeUnitDescription()
                    task_desc.executable = '/bin/echo'
                    task_desc.arguments = ['I am an $TASK_SET task with id $TASK_NO', ]
                    task_desc.environment = {'TASK_SET': 'B', 'TASK_NO': a_task}
                    task_desc.number_of_processes = 1
                    task_desc.output = 'B-stdout.txt'
                    task_desc.error  = 'B-stderr.txt'

		    # Submit task to Pilot Job
                    task = pilotjob.submit_compute_unit(task_desc)
                    print "* Submitted 'B' task '%s' with id '%s'" % (i, task.get_id())
                    task_set_B.append(task)
                    task_set_A.remove(a_task)

    except Exception, ex:
        print "AN ERROR OCCURRED: %s" % ((str(ex)))
        # print a stack trace in case of an exception -
        # this can be helpful for debugging the problem
        traceback.print_exc()
        sys.exit(-1)
def main():
    try:
        # this describes the parameters and requirements for our pilot job
        pilot_description = pilot.PilotComputeDescription()
        pilot_description.service_url = "ssh://%s" % HOSTNAME
        pilot_description.number_of_processes = 1
        pilot_description.working_directory = WORKDIR
        pilot_description.walltime = 10

        # create a new pilot job
        pilot_compute_service = pilot.PilotComputeService(COORD)
        pilot_compute_service.create_pilot(pilot_description)

        # Compute Data Service
        compute_data_service = pilot.ComputeDataService()
        compute_data_service.add_pilot_compute_service(pilot_compute_service)

        # submit tasks to pilot job
        tasks = list()
        for i in range(NUMBER_JOBS):
            task_desc = pilot.ComputeUnitDescription()
            task_desc.executable = '/bin/echo'
            task_desc.arguments = [
                'Hello, I am an $TASK_SET task with number $TASK_NO',
            ]
            task_desc.environment = {'TASK_SET': 'A', 'TASK_NO': i}
            task_desc.number_of_processes = 1
            task_desc.output = 'simple-ensemble-stdout.txt'
            task_desc.error = 'simple-ensemble-stderr.txt'

            task = compute_data_service.submit_compute_unit(task_desc)
            print "* Submitted task '%s' with id '%s' to %s" % (
                i, task.get_id(), HOSTNAME)
            tasks.append(task)

        print "Waiting for tasks to finish..."
        compute_data_service.wait()

        # all compute units have finished. now we can use saga-python
        # to transfer back the output files...
        for task in tasks:
            print task.get_local_working_directory()
        #    d = saga.filesystem.Directory("sftp://%s/%s" % (HOSTNAME, task.get_local_working_directory()))
        #    local_filename = "stdout-%s.txt" % (task.get_id())
        #    d.copy("simple-ensemble-stdout.txt", "file://localhost/%s/%s" % (os.getcwd(), local_filename))
        #    print "* Output for '%s' can be found locally in: './%s'" % (task.get_id(), local_filename)

        return (0)

    except Exception, ex:
        print "AN ERROR OCCURED: %s" % ((str(ex)))
        # print a stack trace in case of an exception -
        # this can be helpful for debugging the problem
        traceback.print_exc()
        return (-1)
コード例 #3
0
def main():
    try:
        # this describes the parameters and requirements for our pilot job
        pilot_description = pilot.PilotComputeDescription()
        pilot_description.service_url = "%s://%s@%s" % (SAGA_ADAPTOR,
                                                        USER_NAME, HOSTNAME)
        pilot_description.queue = QUEUE
        pilot_description.project = PROJECT
        pilot_description.number_of_processes = PILOT_SIZE
        pilot_description.working_directory = WORKDIR
        pilot_description.walltime = WALLTIME
        pilot_description.processes_per_node = PROCESSES_PER_NODE
        pilot_description.spmd_variation = SPMD_VARIATION

        # create a new pilot job
        pilot_compute_service = pilot.PilotComputeService(REDIS_URL)
        pilotjob = pilot_compute_service.create_pilot(pilot_description)

        # submit tasks to pilot job
        tasks = list()
        for i in range(NUMBER_JOBS):
            # -------- BEGIN USER DEFINED TASK DESCRIPTION --------- #
            task_desc = pilot.ComputeUnitDescription()
            task_desc.executable = '/bin/echo'
            task_desc.arguments = ['I am task number $TASK_NO']
            task_desc.environment = {'TASK_NO': i}
            task_desc.number_of_processes = 1
            task_desc.spmd_variation = "single"  # Valid values are single or mpi
            task_desc.output = 'simple-ensemble-stdout.txt'
            task_desc.error = 'simple-ensemble-stderr.txt'
            # -------- END USER DEFINED TASK DESCRIPTION --------- #

            task = pilotjob.submit_compute_unit(task_desc)
            print "* Submitted task '%s' with id '%s' to %s" % (
                i, task.get_id(), HOSTNAME)
            tasks.append(task)

        print "Waiting for tasks to finish..."
        pilotjob.wait()

        return (0)

    except Exception, ex:
        print "AN ERROR OCCURRED: %s" % ((str(ex)))
        # print a stack trace in case of an exception -
        # this can be helpful for debugging the problem
        traceback.print_exc()
        return (-1)
コード例 #4
0
def example_datatransfer():
    """Example entry point.
    """
    # we use this dictionary to store the paths of the
    # individual tasks output directories::
    #
    #     {'task_id' : 'output_dir'}3
    #
    output_paths = {}

    ###################################
    # Step 1: Submit tasks via BigJob #
    ###################################
    try:
        pilot_description = pilot.PilotComputeDescription()
        pilot_description.service_url = "slurm+ssh://" + EXEC_HOST
        pilot_description.number_of_processes = 16
        pilot_description.walltime = 1
        pilot_description.project = "TG-MCB090174"
        pilot_description.queue = "normal"
        pilot_description.working_directory = WORKING_DIR

        pilot_service = pilot.PilotComputeService(REDIS_SERVER)
        pilotjob = pilot_service.create_pilot(pilot_description)

        for t in range(0, 32):
            # Task output will end up in WORKING_DIR+/task-<t>+/stdout::
            #
            #     /home1/00988/tg802352/mysim/task-<t>/stdout
            #
            output_path = "%s/task-%s" % (WORKING_DIR, t)
            output_paths[t] = output_path

            task = pilot.ComputeUnitDescription()
            task.executable = "/bin/echo"
            task.working_directory = output_path
            task.arguments = ["Hello from task %s" % t]
            pilotjob.submit_compute_unit(task)

        print "Waiting for tasks to finish..."
        pilotjob.wait()
        print "FINISHED"
        pilot_service.cancel()

    except Exception, ex:
        print "AN ERROR OCCURED: %s" % ((str(ex)))
        traceback.print_exc()
        return (-1)
コード例 #5
0
def main():
    try:
        # this describes the parameters and requirements for our pilot job
        pilot_description = pilot.PilotComputeDescription()
        pilot_description.service_url = "ssh://%s" % HOSTNAME
        pilot_description.number_of_processes = 1
        pilot_description.working_directory = WORKDIR
        pilot_description.walltime = 10

        # create a new pilot job
        pilot_compute_service = pilot.PilotComputeService(COORD)
        pilot_compute_service.create_pilot(pilot_description)

        # Compute Data Service
        compute_data_service = pilot.ComputeDataService()
        compute_data_service.add_pilot_compute_service(pilot_compute_service)

        # submit tasks to pilot job
        tasks = list()
        for i in range(NUMBER_JOBS):
            task_desc = pilot.ComputeUnitDescription()
            task_desc.executable = '/bin/echo'
            task_desc.arguments = [
                'Hello, I am task number $TASK_NO',
            ]
            task_desc.environment = ['TASK_NO=%s' % i]
            task_desc.number_of_processes = 1
            task_desc.output = 'simple-ensemble-stdout.txt'
            task_desc.error = 'simple-ensemble-stderr.txt'

            task = compute_data_service.submit_compute_unit(task_desc)
            print "* Submitted task '%s' with id '%s' to %s" % (
                i, task.get_id(), HOSTNAME)
            tasks.append(task)

        print "Waiting for tasks to finish..."
        compute_data_service.wait()

        return (0)

    except Exception, ex:
        print "AN ERROR OCCURED: %s" % ((str(ex)))
        # print a stack trace in case of an exception -
        # this can be helpful for debugging the problem
        traceback.print_exc()
        return (-1)
コード例 #6
0
def main():
    try:
        pilot_description = pilot.PilotComputeDescription()
        pilot_description.service_url = HOST
        pilot_description.number_of_processes = 1
        pilot_description.working_directory = os.getcwd()

        pilot_service = pilot.PilotComputeService(COORD)

        ### This is broken !!! -> https://github.com/saga-project/BigJob/issues/118
        #pilotjob = pilot_service.create_pilot(pilot_compute_description)
        pilotjob = pilot_service.create_pilot(
            pilot_compute_description=pilot_description)

        task = pilot.ComputeUnitDescription()
        task.executable = "/bin/sleep"
        task.arguments = ["10"]

        pilotjob.submit_compute_unit(task)

        # do something useful here, wait or whatever. print some information.

        # Not sure how to cancel properly
        #   - see https://github.com/saga-project/BigJob/issues/121
        #   - see https://github.com/saga-project/BigJob/issues/131
        for i, pj in enumerate(pjs):
            print "cancel %3d" % i
            pj.cancel()

        pilot_service.cancel()

        return (0)

    except Exception, ex:
        print "AN ERROR OCCURED: %s" % ((str(ex)))
        # print a stack trace in case of an exception -
        # this can be helpful for debugging the problem
        traceback.print_exc()
        return (-1)
コード例 #7
0
        # this describes the parameters and requirements for our pilot job
        pilot_description = pilot.PilotComputeDescription()
        pilot_description.service_url = "slurm+ssh://%s" % HOSTNAME
        pilot_description.queue = QUEUE
        pilot_description.number_of_processes = NUMBER_JOBS
        pilot_description.working_directory = WORKDIR
        pilot_description.walltime = 10

        # create a new pilot job
        pilot_compute_service = pilot.PilotComputeService(COORD)
        pilotjob = pilot_compute_service.create_pilot(pilot_description)

        # submit 'A' tasks to pilot job
        task_set_A = list()
        for i in range(NUMBER_JOBS):
            task_desc = pilot.ComputeUnitDescription()
            task_desc.executable = '/bin/echo'
            task_desc.arguments = [
                'I am an $TASK_SET task with id $TASK_NO',
            ]
            task_desc.environment = {'TASK_SET': 'A', 'TASK_NO': i}
            task_desc.number_of_processes = 1
            task_desc.output = 'A-stdout.txt'
            task_desc.error = 'A-stderr.txt'
            task = pilotjob.submit_compute_unit(task_desc)
            print "* Submitted 'A' task '%s' with id '%s'" % (i, task.get_id())
            task_set_A.append(task)

        # submit 'B' tasks to pilot job
        task_set_B = list()
        for i in range(NUMBER_JOBS):
コード例 #8
0
def main():
    try:
        # copy the executable and warpper script to the remote host
        workdir = saga.filesystem.Directory(
            "sftp://%s/%s" % (HOSTNAME, WORKDIR),
            saga.filesystem.CREATE_PARENTS)
        mbwrapper = saga.filesystem.File("file://localhost/%s/mandelbrot.sh" %
                                         os.getcwd())
        mbwrapper.copy(workdir.get_url())
        mbexe = saga.filesystem.File("file://localhost/%s/mandelbrot.py" %
                                     os.getcwd())
        mbexe.copy(workdir.get_url())

        # this describes the parameters and requirements for our pilot job
        pilot_description = pilot.PilotComputeDescription()
        pilot_description.service_url = "slurm+ssh://%s" % HOSTNAME
        pilot_description.queue = QUEUE
        pilot_description.number_of_processes = 32
        pilot_description.working_directory = WORKDIR
        pilot_description.walltime = 10

        # create a new pilot job
        pilot_compute_service = pilot.PilotComputeService(COORD)
        pilotjob = pilot_compute_service.create_pilot(pilot_description)

        # submit tasks to pilot job
        tasks = list()

        for x in range(0, TILESX):
            for y in range(0, TILESY):
                # describe a single Mandelbrot job. we're using the
                # directory created above as the job's working directory
                task_desc = pilot.ComputeUnitDescription()
                task_desc.executable = '/bin/sh'
                task_desc.arguments = [
                    "/%s/mandelbrot.sh" % WORKDIR, IMGX, IMGY,
                    (IMGX / TILESX * x), (IMGX / TILESX * (x + 1)),
                    (IMGY / TILESY * y), (IMGY / TILESY * (y + 1)),
                    '%s/tile_x%s_y%s.gif' % (WORKDIR, x, y)
                ]

                task_desc.wall_time_limit = 10
                task_desc.number_of_processes = 1

                task = pilotjob.submit_compute_unit(task_desc)
                print "* Submitted task '%s' to %s" % (task.get_id(), HOSTNAME)
                tasks.append(task)

        # ---------------------------------------------------------------------
        print "Waiting for tasks to finish..."
        pilotjob.wait()
        # ---------------------------------------------------------------------

        # copy image tiles back to our 'local' directory
        for image in workdir.list('*.gif'):
            print ' * Copying %s/%s back to %s' % (workdir.get_url(), image,
                                                   os.getcwd())
            workdir.copy(image, 'file://localhost/%s/' % os.getcwd())

        # stitch together the final image
        fullimage = Image.new('RGB', (IMGX, IMGY), (255, 255, 255))
        print ' * Stitching together the whole fractal: mandelbrot_full.gif'
        for x in range(0, TILESX):
            for y in range(0, TILESY):
                partimage = Image.open('tile_x%s_y%s.gif' % (x, y))
                fullimage.paste(
                    partimage,
                    (IMGX / TILESX * x, IMGY / TILESY * y, IMGX / TILESX *
                     (x + 1), IMGY / TILESY * (y + 1)))
        fullimage.save("mandelbrot_full.gif", "GIF")

        return (0)

    except Exception, ex:
        print "AN ERROR OCCURED: %s" % ((str(ex)))
        # print a stack trace in case of an exception -
        # this can be helpful for debugging the problem
        traceback.print_exc()
        return (-1)
コード例 #9
0
def main():
    try:
        # this describes the parameters and requirements for our pilot job
        pilot_description = pilot.PilotComputeDescription()
        pilot_description.service_url = "%s://%s@%s" % (SAGA_ADAPTOR,
                                                        USER_NAME, HOSTNAME)
        pilot_description.queue = QUEUE
        pilot_description.project = PROJECT
        pilot_description.number_of_processes = PILOT_SIZE
        pilot_description.working_directory = WORKDIR
        pilot_description.walltime = WALLTIME
        pilot_description.processes_per_node = PROCESSES_PER_NODE
        pilot_description.spmd_variation = SPMD_VARIATION

        # create a new pilot job
        pilot_compute_service = pilot.PilotComputeService(REDIS_URL)
        pilotjob = pilot_compute_service.create_pilot(pilot_description)

        # submit 'A' tasks to pilot job
        task_set_A = list()
        for i in range(NUMBER_JOBS):

            # -------- BEGIN USER DEFINED TASK 1 DESCRIPTION --------- #
            task_desc = pilot.ComputeUnitDescription()
            task_desc.executable = '/bin/echo'
            task_desc.arguments = [
                'I am an $TASK_SET task with id $TASK_NO',
            ]
            task_desc.environment = {'TASK_SET': 'A', 'TASK_NO': i}
            task_desc.spmd_variation = 'single'
            task_desc.number_of_processes = 1
            task_desc.output = 'A-stdout.txt'
            task_desc.error = 'A-stderr.txt'
            # -------- END USER DEFINED TASK 1 DESCRIPTION --------- #

            task = pilotjob.submit_compute_unit(task_desc)
            print "* Submitted 'A' task '%s' with id '%s'" % (i, task.get_id())
            task_set_A.append(task)

        # submit 'B' tasks to pilot job
        task_set_B = list()
        for i in range(NUMBER_JOBS):

            # -------- BEGIN USER DEFINED TASK 2 DESCRIPTION --------- #
            task_desc = pilot.ComputeUnitDescription()
            task_desc.executable = '/bin/echo'
            task_desc.arguments = ['I am a $TASK_SET task with id $TASK_NO']
            task_desc.environment = {'TASK_SET': 'B', 'TASK_NO': i}
            task_desc.spmd_variation = 'single'
            task_desc.number_of_processes = 1
            task_desc.output = 'B-stdout.txt'
            task_desc.error = 'B-stderr.txt'
            # -------- END USER DEFINED TASK 2 DESCRIPTION --------- #

            task = pilotjob.submit_compute_unit(task_desc)
            print "* Submitted 'B' task '%s' with id '%s'" % (i, task.get_id())
            task_set_B.append(task)

        # ---------------------------------------------------------------------
        print "Waiting for 'A' and 'B' tasks to complete..."
        pilotjob.wait()
        print "Executing 'C' tasks now..."
        # ---------------------------------------------------------------------

        # submit 'C' tasks to pilot job. each 'C' task takes the output of
        # an 'A' and a 'B' task and puts them together.
        task_set_C = list()
        for i in range(NUMBER_JOBS):
            # -------- BEGIN USER DEFINED TASK 3 DESCRIPTION --------- #
            task_desc = pilot.ComputeUnitDescription()
            task_desc.executable = '/bin/echo'
            task_desc.arguments = ['I am a $TASK_SET task with id $TASK_NO']
            task_desc.environment = {'TASK_SET': 'C', 'TASK_NO': i}
            task_desc.spmd_variation = 'single'
            task_desc.number_of_processes = 1
            task_desc.output = 'C-stdout.txt'
            task_desc.error = 'C-stderr.txt'
            # -------- END USER DEFINED TASK 3 DESCRIPTION --------- #

            task = pilotjob.submit_compute_unit(task_desc)
            print "* Submitted 'C' task '%s' with id '%s'" % (i, task.get_id())
            task_set_C.append(task)

        # ---------------------------------------------------------------------
        print "Waiting for 'C' tasks to complete..."
        pilotjob.wait()
        # ---------------------------------------------------------------------

        return (0)

    except Exception, ex:
        print "AN ERROR OCCURRED: %s" % ((str(ex)))
        # print a stack trace in case of an exception -
        # this can be helpful for debugging the problem
        traceback.print_exc()
        return (-1)
コード例 #10
0
def main():
    try:
        # this describes the parameters and requirements for our pilot job
        pilot_description = pilot.PilotComputeDescription()
        pilot_description.service_url = "%s://%s@%s" % (SAGA_ADAPTOR,
                                                        USER_NAME, HOSTNAME)
        pilot_description.queue = QUEUE
        pilot_description.project = PROJECT
        pilot_description.number_of_processes = PILOT_SIZE
        pilot_description.working_directory = WORKDIR
        pilot_description.walltime = WALLTIME
        pilot_description.processes_per_node = PROCESSES_PER_NODE
        pilot_description.spmd_variation = SPMD_VARIATION

        # create a new pilot job
        pilot_compute_service = pilot.PilotComputeService(REDIS_URL)
        pilotjob = pilot_compute_service.create_pilot(pilot_description)

        # submit tasks to pilot job
        tasks = list()
        for i in range(NUMBER_JOBS):
            # -------- BEGIN USER DEFINED TASK DESCRIPTION --------- #
            task_desc = pilot.ComputeUnitDescription()
            task_desc.executable = '/bin/echo'
            task_desc.arguments = [
                'I am task number $TASK_NO',
            ]
            task_desc.environment = {'TASK_NO': i}
            task_desc.number_of_processes = 1
            task_desc.spmd_variation = single  # Valid values are single or mpi
            task_desc.output = 'stdout.txt'
            task_desc.error = 'stderr.txt'
            # -------- END USER DEFINED TASK DESCRIPTION --------- #

            task = pilotjob.submit_compute_unit(task_desc)
            print "* Submitted task '%s' with id '%s' to %s" % (
                i, task.get_id(), HOSTNAME)
            tasks.append(task)

        print "Waiting for tasks to finish..."
        pilotjob.wait()

        # ------------ BEGIN FILE TRANSFER LOGIC ------------- #
        # all compute units have finished. now we can use saga-python
        # to transfer back the output files...
        d = saga.filesystem.Directory("sftp://%s/" % (HOSTNAME))
        for task in tasks:
            local_filename = "ex-2-stdout-%s.txt" % (task.get_id())
            d.copy("%s/stdout.txt" % (task.get_local_working_directory()),
                   "file://localhost/%s/%s" % (os.getcwd(), local_filename))
            print "* Output for '%s' copied to: './%s'" % (task.get_id(),
                                                           local_filename)

# ------------ END FILE TRANSFER LOGIC  ------------- #

        return (0)

    except Exception, ex:
        print "AN ERROR OCCURRED: %s" % ((str(ex)))
        # print a stack trace in case of an exception -
        # this can be helpful for debugging the problem
        traceback.print_exc()
        return (-1)
コード例 #11
0
ファイル: heisenbug.py プロジェクト: zonca/saga-python
def main():
    pilotjob1              = None
    pilot_compute_service1 = None
    pilotjob2              = None
    pilot_compute_service2 = None

    try:
        # this describes the parameters and requirements for our pilot job1
        pilot_compute_description_amazon_west1 = pilot.PilotComputeDescription()
        pilot_compute_description_amazon_west1 = {
                             "service_url": 'ec2+ssh://%s' % EC2url,
                             "number_of_processes": 2,
                             "vm_id":AMI,
                             "vm_ssh_username": USER,
                             "vm_ssh_keyname": KEY,
                             "vm_ssh_keyfile": SSHKEYFILE,
                             "vm_type":VM,
                             "region" :REGION,
                             "access_key_id":ACCESS_KEY_ID,
                             "secret_access_key":SECRET_ACCESS_KEY,
                            # "affinity_machine_label": ""
                            }
 
        # create a new pilot job1
        pilot_compute_service1 = pilot.PilotComputeService(COORD)
        pilotjob1 = pilot_compute_service1.create_pilot(pilot_compute_description_amazon_west1)
 
        # this describes the parameters and requirements for our pilot job2
        pilot_compute_description_amazon_west2 = pilot.PilotComputeDescription()
        pilot_compute_description_amazon_west2 = {
                             "service_url": 'ec2+ssh://%s' % EC2url,
                             "number_of_processes": 2,
                             "vm_id": AMI,
                             "vm_ssh_username": USER,
                             "vm_ssh_keyname": KEY,
                             "vm_ssh_keyfile": SSHKEYFILE,
                             "vm_type": VM,
                             "region" : REGION,
                             "access_key_id":ACCESS_KEY_ID,
                             "secret_access_key":SECRET_ACCESS_KEY,
                            # "affinity_machine_label": ""
                            }
 
        # create a new pilot job2
        pilot_compute_service2 = pilot.PilotComputeService(COORD)
        pilotjob2 = pilot_compute_service2.create_pilot(pilot_compute_description_amazon_west2)
 
        # submit tasks1 to pilot job1
        tasks1 = list()
        for i in range(NUMBER_JOBS):
            task_desc1 = pilot.ComputeUnitDescription()
            task_desc1.working_directory = WORKDIR
            task_desc1.executable = '/bin/echo'
            task_desc1.arguments = ['I am task number $TASK_NO from pj1', ]
            task_desc1.environment = {'TASK_NO': i}
            task_desc1.number_of_processes = 1
            task_desc1.output = 'stdout1.txt'
            task_desc1.error = 'stderr1.txt'
 
            task1 = pilotjob1.submit_compute_unit(task_desc1)
            print "* Submitted task '%s' with id '%s' to %s" % (i, task1.get_id(), EC2url)
            tasks1.append(task1)
 
        print "Waiting for tasks to finish..."
        pilotjob1.wait()
 
        # submit tasks2 to pilot job2
        tasks2 = list()
        for i in range(NUMBER_JOBS):
            task_desc2 = pilot.ComputeUnitDescription()
            task_desc2.working_directory = WORKDIR
            task_desc2.executable = '/bin/echo'
            task_desc2.arguments = ['I am task number $TASK_NO from pj2', ]
            task_desc2.environment = {'TASK_NO': i}
            task_desc2.number_of_processes = 1
            task_desc2.output = 'stdout2.txt'
            task_desc2.error = 'stderr2.txt'
 
            task2 = pilotjob2.submit_compute_unit(task_desc2)
            print "* Submitted task '%s' with id '%s' to %s" % (i, task2.get_id(), EC2url)
            tasks2.append(task2)
 
        print "Waiting for tasks to finish..."
        pilotjob2.wait()
        
        return(0)
 
    except Exception, ex:
            print "AN ERROR OCCURED: %s" % ((str(ex)))
            # print a stack trace in case of an exception -
            # this can be helpful for debugging the problem
            traceback.print_exc()
            return(-1)
コード例 #12
0
def main():
    try:
        # this describes the parameters and requirements for our pilot job
        pilot_description = pilot.PilotComputeDescription()
        pilot_description.service_url = "%s://%s@%s" % (SAGA_ADAPTOR,
                                                        USER_NAME, HOSTNAME)
        pilot_description.queue = QUEUE
        pilot_description.project = PROJECT
        pilot_description.number_of_processes = PILOT_SIZE
        pilot_description.working_directory = WORKDIR
        pilot_description.walltime = WALLTIME
        pilot_description.processes_per_node = PROCESSES_PER_NODE
        pilot_description.spmd_variation = SPMD_VARIATION

        # create a new pilot job
        pilot_compute_service = pilot.PilotComputeService(REDIS_URL)
        pilotjob = pilot_compute_service.create_pilot(pilot_description)

        # submit 'A' tasks to pilot job
        task_set_A = list()
        for i in range(NUMBER_JOBS):

            # -------- BEGIN USER DEFINED TASK 1 DESCRIPTION --------- #
            task_desc = pilot.ComputeUnitDescription()
            task_desc.executable = '/bin/echo'
            task_desc.arguments = [
                'I am an $TASK_SET task with id $TASK_NO',
            ]
            task_desc.environment = {'TASK_SET': 'A', 'TASK_NO': i}
            task_desc.spmd_variation = 'single'
            task_desc.number_of_processes = 1
            task_desc.output = 'A-stdout.txt'
            task_desc.error = 'A-stderr.txt'
            # -------- END USER DEFINED TASK 1 DESCRIPTION --------- #

            # Submit task to PilotJob
            task = pilotjob.submit_compute_unit(task_desc)
            print "* Submitted 'A' task '%s' with id '%s'" % (i, task.get_id())
            task_set_A.append(task)

        # Chaining tasks i.e submit a compute unit, when compute unit from A is successfully executed.
        # A 'B' task reads the content of the output file of an 'A' task and writes it into its own
        # output file.
        task_set_B = list()
        while len(task_set_A) > 0:
            for a_task in task_set_A:
                if a_task.get_state() == "Done":
                    print "One 'A' task %s finished. Launching a 'B' task." % (
                        a_task.get_id())

                    # -------- BEGIN USER DEFINED TASK 2 DESCRIPTION --------- #
                    task_desc = pilot.ComputeUnitDescription()
                    task_desc.executable = '/bin/echo'
                    task_desc.arguments = [
                        'I am a $TASK_SET task with id $TASK_NO',
                    ]
                    task_desc.environment = {
                        'TASK_SET': 'B',
                        'TASK_NO': a_task
                    }
                    task_desc.spmd_variation = 'single'
                    task_desc.number_of_processes = 1
                    task_desc.output = 'B-stdout.txt'
                    task_desc.error = 'B-stderr.txt'
                    # -------- END USER DEFINED TASK 2 DESCRIPTION --------- #

                    # Submit task to Pilot Job
                    task = pilotjob.submit_compute_unit(task_desc)
                    print "* Submitted 'B' task '%s' with id '%s'" % (
                        i, task.get_id())
                    task_set_B.append(task)
                    task_set_A.remove(a_task)

        return (0)

    except Exception, ex:
        print "AN ERROR OCCURRED: %s" % ((str(ex)))
        # print a stack trace in case of an exception -
        # this can be helpful for debugging the problem
        traceback.print_exc()
        return (-1)
コード例 #13
0
    print "start  %3d" % i

    pilot_description = pilot.PilotComputeDescription()
    pilot_description.service_url = HOST
    pilot_description.number_of_processes = 1
    pilot_description.working_directory = os.getcwd()

    pilot_service = pilot.PilotComputeService(COORD)

    ### This is broken !!! -> https://github.com/saga-project/BigJob/issues/118
    #pilotjob = pilot_service.create_pilot(pilot_compute_description)
    pilotjob = pilot_service.create_pilot(pilot_compute_description=pilot_description)

    pjs.append(pilotjob)

    task = pilot.ComputeUnitDescription()
    task.executable = "/bin/sleep"
    task.arguments = ["10"]

    pilotjob.submit_compute_unit(task)

stop = time.time()

# see https://github.com/saga-project/BigJob/issues/121
# see https://github.com/saga-project/BigJob/issues/131

for i, pj in enumerate(pjs):
    print "cancel %3d" % i
    pj.cancel()

pilot_service.cancel()