def _submit(self, method, options, datasets, jobs, caption=None, wait=True, why_build=False, workdir=None): """ Submit job to server and conditionaly wait for completion. """ self.job_method = method if not why_build and 'why_build' in self.flags: why_build = 'on_build' if self.monitor and not why_build: self.monitor.submit(method) if not caption: caption = '' params = {method: dict(options=options, datasets=datasets, jobs=jobs,)} data = setupfile.generate(caption, method, params, why_build=why_build) if self.subjob_cookie: data.subjob_cookie = self.subjob_cookie data.parent_pid = os.getpid() if workdir: data.workdir = workdir t0 = time.time() self.job_retur = self._server_submit(data) self.history.append((data, self.job_retur)) # if wait and not self.job_retur.done: self.wait(t0) if self.monitor and not why_build: self.monitor.done() return self.jobid(method), self.job_retur
def _submit(self, method, options, datasets, jobs, caption=None, wait=True, why_build=False, workdir=None, concurrency=None): """ Submit job to server and conditionaly wait for completion. """ self.job_method = method if not why_build and 'why_build' in self.flags: why_build = 'on_build' if self.monitor and not why_build: self.monitor.submit(method) if not caption: caption = '' data = setupfile.generate(caption, method, options, datasets, jobs, why_build=why_build) if self.subjob_cookie: data.subjob_cookie = self.subjob_cookie data.parent_pid = os.getpid() if workdir: data.workdir = workdir if concurrency: data.concurrency = concurrency if self.concurrency_map: data.concurrency_map = self.concurrency_map self.job_retur = self._server_submit(data) self.history.append((data, self.job_retur)) # if wait and not self.job_retur.done: self.wait() if self.monitor and not why_build: self.monitor.done() return self.jobid(method), self.job_retur
def initialise_jobs(setup, target_WorkSpace, DataBase, Methods, verbose=False): # create a DepTree object used to track options and make status DepTree = deptree.DepTree(Methods, setup) # compare database to deptree reqlist = DepTree.get_reqlist() for uid, job in DataBase.match_exact(reqlist): DepTree.set_link(uid, job) DepTree.propagate_make() why_build = setup.get('why_build') if why_build: orig_tree = deepcopy(DepTree.tree) DepTree.fill_in_default_options() # get list of jobs in execution order joblist = DepTree.get_sorted_joblist() newjoblist = [x for x in joblist if x['make']] num_new_jobs = len(newjoblist) if why_build == True or (why_build and num_new_jobs): res = OrderedDict() DepTree.tree = orig_tree joblist = DepTree.get_sorted_joblist() for job in joblist: if job['make']: res[job['method']] = find_possible_jobs(DataBase, Methods, job) else: res[job['method']] = {job['link']: {}} return [], {'why_build': res} if num_new_jobs: new_jobid_list = target_WorkSpace.allocate_jobs(num_new_jobs) # insert new jobids for (x, jid) in zip(newjoblist, new_jobid_list): x['link'] = jid for data in newjoblist: method = Methods.db[data['method']] new_setup = setupfile.generate( caption=setup.caption, method=data['method'], params=data['params'], package=method['package'], python=runners[method.version].python, ) new_setup.hash = Methods.hash[data['method']][0] new_setup.seed = randint(0, 2**63 - 1) new_setup.jobid = data['link'] new_setup.slices = target_WorkSpace.slices typing = {} for method in data['params']: m_typing = Methods.typing[method] if m_typing: typing[method] = m_typing if typing: new_setup['_typing'] = typing setupfile.save_setup(data['link'], new_setup) else: new_jobid_list = [] res = { j['method']: {k: v for k, v in j.items() if k in ('link', 'make', 'total_time')} for j in joblist } return new_jobid_list, {'jobs': res}