Ejemplo n.º 1
0
 def version(self):
     # this is self.params.version, but without fully loading the params
     # (unless already loaded).
     if 'params' in self._cache:
         return self._cache['params'].version
     from accelerator.setupfile import load_setup
     return load_setup(self).version
Ejemplo n.º 2
0
def _job_params(jobid):
    from accelerator.setupfile import load_setup
    d = load_setup(jobid)
    for method, tl in iteritems(d.get('_typing', {})):
        _apply_typing(d.params[method].options, tl)
    d.update(d.params[d.method])
    return d
Ejemplo n.º 3
0
def job_data(known, jid):
	if jid in known:
		data = known[jid]
	else:
		data = DotDict(method='???', totaltime=None, current=None)
		try:
			setup = load_setup(jid)
			data.method = setup.method
			if 'exectime' in setup:
				data.totaltime = setup.exectime.total
		except Exception:
			pass
	if isinstance(data.totaltime, (float, int)):
		data.totaltime = fmttime(data.totaltime)
	if data.totaltime is None:
		data.klass = 'unfinished'
	elif data.current:
		data.klass = 'current'
	else:
		data.klass = 'old'
	return data
Ejemplo n.º 4
0
 def run(jobidv, tlock):
     for jobid in jobidv:
         passed_cookie = None
         # This is not a race - all higher locks are locked too.
         while passed_cookie in job_tracking:
             passed_cookie = gen_cookie()
         concurrency_map = dict(
             data.concurrency_map)
         concurrency_map.update(
             setup.get('concurrency_map', ()))
         job_tracking[passed_cookie] = DotDict(
             lock=JLock(),
             last_error=None,
             last_time=0,
             workdir=workdir,
             concurrency_map=concurrency_map,
         )
         try:
             explicit_concurrency = setup.get(
                 'concurrency'
             ) or concurrency_map.get(setup.method)
             concurrency = explicit_concurrency or concurrency_map.get(
                 '-default-')
             if concurrency and setup.method == 'csvimport':
                 # just to be safe, check the package too
                 if load_setup(
                         jobid
                 ).package == 'accelerator.standard_methods':
                     # ignore default concurrency, error on explicit.
                     if explicit_concurrency:
                         raise JobError(
                             jobid, 'csvimport', {
                                 'server':
                                 'csvimport can not run with reduced concurrency'
                             })
                     concurrency = None
             self.ctrl.run_job(
                 jobid,
                 subjob_cookie=passed_cookie,
                 parent_pid=setup.get(
                     'parent_pid', 0),
                 concurrency=concurrency)
             # update database since a new jobid was just created
             job = self.ctrl.add_single_jobid(jobid)
             with tlock:
                 link2job[jobid]['make'] = 'DONE'
                 link2job[jobid][
                     'total_time'] = job.total
         except JobError as e:
             error.append(
                 [e.job, e.method, e.status])
             with tlock:
                 link2job[jobid]['make'] = 'FAIL'
             return
         finally:
             del job_tracking[passed_cookie]
     # everything was built ok, update symlink
     try:
         dn = self.ctrl.workspaces[workdir].path
         ln = os.path.join(dn, workdir + "-LATEST_")
         try:
             os.unlink(ln)
         except OSError:
             pass
         os.symlink(jobid, ln)
         os.rename(
             ln,
             os.path.join(dn, workdir + "-LATEST"))
     except OSError:
         traceback.print_exc(file=sys.stderr)
Ejemplo n.º 5
0
def _job_params(jobid):
	from accelerator.setupfile import load_setup
	d = load_setup(jobid)
	_apply_typing(d.options, d.get('_typing', ()))
	return d