def _setup_bulk_subjobs(self, dirac_ids, dirac_script): """ This is the old bulk submit method which is used to construct the subjobs for a parametric job Args: dirac_ids (list): This is a list of the Dirac ids which have been created dirac_script (str): Name of the dirac script which contains the job jdl """ f = open(dirac_script, 'r') parametric_datasets = get_parametric_datasets(f.read().split('\n')) f.close() if len(parametric_datasets) != len(dirac_ids): raise BackendError('Dirac', 'Missmatch between number of datasets defines in dirac API script and those returned by DIRAC') from Ganga.GPIDev.Lib.Job.Job import Job master_job = self.getJobObject() master_job.subjobs = [] for i in range(len(dirac_ids)): j = Job() j.copyFrom(master_job) j.splitter = None j.backend.id = dirac_ids[i] j.id = i j.inputdata = self._setup_subjob_dataset(parametric_datasets[i]) j.status = 'submitted' j.time.timenow('submitted') master_job.subjobs.append(j) return True
def _setup_bulk_subjobs(self, dirac_ids, dirac_script): """ This is the old bulk submit method which is used to construct the subjobs for a parametric job Args: dirac_ids (list): This is a list of the Dirac ids which have been created dirac_script (str): Name of the dirac script which contains the job jdl """ f = open(dirac_script, 'r') parametric_datasets = get_parametric_datasets(f.read().split('\n')) f.close() if len(parametric_datasets) != len(dirac_ids): raise BackendError( 'Dirac', 'Missmatch between number of datasets defines in dirac API script and those returned by DIRAC' ) master_job = self.getJobObject() master_job.subjobs = [] for i in range(len(dirac_ids)): j = Job() j.copyFrom(master_job) j.splitter = None j.backend.id = dirac_ids[i] j.id = i j.inputdata = self._setup_subjob_dataset(parametric_datasets[i]) j.status = 'submitted' j.time.timenow('submitted') master_job.subjobs.append(j) return True
def master_setup_bulk_subjobs(self, jobs, jdefids): from Ganga.GPIDev.Lib.Job.Job import Job master_job=self.getJobObject() for i in range(len(jdefids)): j=Job() j.copyFrom(master_job) j.splitter = None j.backend=Panda() j.backend.id = jdefids[i] j.id = i j.status = 'submitted' j.time.timenow('submitted') master_job.subjobs.append(j) return True
def master_setup_bulk_subjobs(self, jobs, jdefids): from Ganga.GPIDev.Lib.Job.Job import Job master_job = self.getJobObject() for i in range(len(jdefids)): j = Job() j.copyFrom(master_job) j.splitter = None j.backend = Panda() j.backend.id = jdefids[i] j.id = i j.status = 'submitted' j.time.timenow('submitted') master_job.subjobs.append(j) return True
def createSubjob(self, job, additional_skip_args=None): """ Create a new subjob by copying the master job and setting all fields correctly. """ from Ganga.GPIDev.Lib.Job.Job import Job if additional_skip_args is None: additional_skip_args = [] j = Job() skipping_args = ['splitter', 'inputsandbox', 'inputfiles', 'inputdata', 'subjobs'] for arg in additional_skip_args: skipping_args.append(arg) j.copyFrom(job, skipping_args) j.splitter = None j.inputsandbox = [] j.inputfiles = [] j.inputdata = None return j
def createSubjob(self, job, additional_skip_args=None): """ Create a new subjob by copying the master job and setting all fields correctly. """ from Ganga.GPIDev.Lib.Job.Job import Job if additional_skip_args is None: additional_skip_args = [] j = Job() skipping_args = [ 'splitter', 'inputsandbox', 'inputfiles', 'inputdata', 'subjobs' ] for arg in additional_skip_args: skipping_args.append(arg) j.copyFrom(job, skipping_args) j.splitter = None j.inputsandbox = [] j.inputfiles = [] j.inputdata = None return j
def _setup_bulk_subjobs(self, dirac_ids, dirac_script): f = open(dirac_script, 'r') parametric_datasets = get_parametric_datasets(f.read().split('\n')) f.close() if len(parametric_datasets) != len(dirac_ids): raise BackendError('Dirac', 'Missmatch between number of datasets defines in dirac API script and those returned by DIRAC') from Ganga.GPIDev.Lib.Job.Job import Job master_job = self.getJobObject() for i in range(len(dirac_ids)): j = Job() j.copyFrom(master_job) j.splitter = None j.backend.id = dirac_ids[i] j.id = i j.inputdata = self._setup_subjob_dataset(parametric_datasets[i]) j.status = 'submitted' j.time.timenow('submitted') master_job.subjobs.append(j) master_job._commit() return True
def _setup_bulk_subjobs(self, dirac_ids, dirac_script): f = open(dirac_script, 'r') parametric_datasets = get_parametric_datasets(f.read().split('\n')) f.close() if len(parametric_datasets) != len(dirac_ids): raise BackendError('Dirac', 'Missmatch between number of datasets defines in dirac API script and those returned by DIRAC') from Ganga.GPIDev.Lib.Job.Job import Job master_job = self.getJobObject() master_job.subjobs = [] for i in range(len(dirac_ids)): j = Job() j.copyFrom(master_job) j.splitter = None j.backend.id = dirac_ids[i] j.id = i j.inputdata = self._setup_subjob_dataset(parametric_datasets[i]) j.status = 'submitted' j.time.timenow('submitted') master_job.subjobs.append(j) master_job._commit() return True
def submit(N, K): jobs = [] for i in range(K): j = Job() j._auto__init__() j.backend = LCG() j.backend.middleware = 'GLITE' j.splitter = GenericSplitter() j.splitter.attribute = 'application.args' j.splitter.values = [['x']] * N j.submit() jobs.append(j) import time def finished(): for j in jobs: if not j.status in ['failed', 'completed']: return False return True while not finished(): time.sleep(1) return jobs