示例#1
0
 def submit_pilot_by_description(self, coordination_url="redis://localhost/", pilot_compute_description={}):
     pilot_compute_service = PilotComputeService(coordination_url=coordination_url)
     pilot_compute = pilot_compute_service.create_pilot(pilot_compute_description=pilot_compute_description)
     pilot_url = pilot_compute.get_url()
     self.pilots.append(pilot_url)
     print("Started Pilot: %s"%(pilot_url))
     self.__persist()
示例#2
0
文件: tasks.py 项目: Python3pkg/DARE
def start_run_pilot(pilot_id, coordination_url=COORD_URL):
    pilot = DareBigJobPilot.objects.get(id=pilot_id)
    pilot_compute_service = PilotComputeService(coordination_url=COORD_URL)
    print(pilot.get_pilot_info())
    pilot_compute = pilot_compute_service.create_pilot(
        pilot_compute_description=pilot.get_pilot_info())
    pilot.pilot_url = pilot_compute.get_url()
    pilot.status = "Submitted"
    pilot.save()
    print(("Started Pilot: %s " % (pilot.pilot_url), pilot.id))
示例#3
0
def start_pilot(pilot_compute_description=None):
    COORDINATION_URL = "redis://localhost:6379"
    pilot_compute_service = PilotComputeService(coordination_url=COORDINATION_URL)
    if pilot_compute_description==None:
        pilot_compute_description = {
                             "service_url": 'fork://localhost',
                             "number_of_processes": 2,                             
                             "working_directory": os.getcwd() + "/work/",
                             }    
    pilot = pilot_compute_service.create_pilot(pilot_compute_description=pilot_compute_description)
    return pilot
示例#4
0
    def start(self):
        darelogger.info("Creating Compute Engine service ")
        self.pilot_compute_service = PilotComputeService(
            coordination_url=COORDINATION_URL)
        self.pilot_data_service = PilotDataService(
            coordination_url=COORDINATION_URL)

        for compute_pilot, desc in list(
                self.workflow.compute_pilot_repo.items()):
            self.pilot_compute_service.create_pilot(
                pilot_compute_description=desc)

        for data_pilot, desc in list(self.workflow.data_pilot_repo.items()):
            self.data_pilot_service_repo.append(
                self.pilot_data_service.create_pilot(
                    pilot_data_description=desc))

        self.compute_data_service = ComputeDataServiceDecentral()
        self.compute_data_service.add_pilot_compute_service(
            self.pilot_compute_service)
        self.compute_data_service.add_pilot_data_service(
            self.pilot_data_service)

        ### run the steps
        self.step_start_lock = threading.RLock()
        self.step_run_lock = threading.RLock()

        for step_id in list(self.workflow.step_units_repo.keys()):
            darelogger.info(" Sumitted step %s " % step_id)
            self.step_start_lock.acquire()
            self.start_thread_step_id = step_id
            self.step_start_lock.release()
            self.step_threads[step_id] = threading.Thread(
                target=self.start_step)
            self.step_threads[step_id].start()

        while (1):
            count_step = [
                v.is_alive() for k, v in list(self.step_threads.items())
            ]
            darelogger.info('count_step %s' % count_step)
            if not True in count_step and len(count_step) > 0:
                break
            time.sleep(10)

        darelogger.info(" All Steps Done processing")

        self.quit(message='quit gracefully')
示例#5
0
import sys
import os
import time
import logging
from pilot import PilotComputeService, ComputeDataService, State
logging.basicConfig(level=logging.WARNING)

COORDINATION_URL = "redis://[email protected]:6379"

if __name__ == "__main__":

    pilot_compute_service = PilotComputeService(
        coordination_url=COORDINATION_URL)
    pilot_compute_description = []

    # create pilot job service and initiate a pilot job
    pilot_compute_description.append({
        "service_url":
        'sge-ssh://[email protected]',
        "number_of_processes":
        24,
        "walltime":
        10,
        "processes_per_node":
        12,
        "queue":
        "normal",
        "allocation":
        "TG-MCB090174",
        "working_directory":
        "/home1/01539/pmantha/agent",
示例#6
0
# the dimension (in pixel) of the whole fractal
imgx = 8192
imgy = 8192

# the number of tiles in X and Y direction
tilesx = 2
tilesy = 2

### This is the number of jobs you want to run
NUMBER_JOBS = 4
COORDINATION_URL = "redis://localhost:6379"

if __name__ == "__main__":

    pilot_compute_service = PilotComputeService(COORDINATION_URL)

    # copy image tiles back to our 'local' directory
    dirname = 'sftp://localhost/%s/PJ-mbrot/' % '/tmp'
    workdir = saga.filesystem.Directory(dirname, saga.filesystem.Create)

    pilot_compute_description = {
        "service_url": "fork://localhost",
        "number_of_processes": 12,
        "working_directory": workdir.get_url().path,
        "walltime": 10
    }

    pilot_compute_service.create_pilot(pilot_compute_description)

    compute_data_service = ComputeDataService()