def run(jobidv, tlock): for jobid in jobidv: passed_cookie = None # This is not a race - all higher locks are locked too. while passed_cookie in job_tracking: passed_cookie = gen_cookie() job_tracking[passed_cookie] = DotDict(lock=JLock(), last_error=None, last_time=0) try: self.ctrl.run_job(jobid, subjob_cookie=passed_cookie, parent_pid=setup.get('parent_pid', 0)) # update database since a new jobid was just created job = self.ctrl.add_single_jobid(jobid) with tlock: link2job[jobid]['make'] = 'DONE' link2job[jobid]['total_time'] = job.total except JobError as e: error.append([e.jobid, e.method, e.status]) with tlock: link2job[jobid]['make'] = 'FAIL' return finally: del job_tracking[passed_cookie] # everything was built ok, update symlink try: wn = self.ctrl.current_workspace dn = self.ctrl.workspaces[wn].path ln = os.path.join(dn, wn + "-LATEST_") try: os.unlink(ln) except OSError: pass os.symlink(jobid, ln) os.rename(ln, os.path.join(dn, wn + "-LATEST")) except Exception: pass # meh
def run(jobidv, tlock): for jobid in jobidv: passed_cookie = None # This is not a race - all higher locks are locked too. while passed_cookie in job_tracking: passed_cookie = gen_cookie() concurrency_map = dict( data.concurrency_map) concurrency_map.update( setup.get('concurrency_map', ())) job_tracking[passed_cookie] = DotDict( lock=JLock(), last_error=None, last_time=0, workdir=workdir, concurrency_map=concurrency_map, ) try: self.ctrl.run_job( jobid, subjob_cookie=passed_cookie, parent_pid=setup.get( 'parent_pid', 0), concurrency=setup.get( 'concurrency') or concurrency_map.get( setup.method) or concurrency_map.get('-default-')) # update database since a new jobid was just created job = self.ctrl.add_single_jobid(jobid) with tlock: link2job[jobid]['make'] = 'DONE' link2job[jobid][ 'total_time'] = job.total except JobError as e: error.append( [e.jobid, e.method, e.status]) with tlock: link2job[jobid]['make'] = 'FAIL' return finally: del job_tracking[passed_cookie] # everything was built ok, update symlink try: dn = self.ctrl.workspaces[workdir].path ln = os.path.join(dn, workdir + "-LATEST_") try: os.unlink(ln) except OSError: pass os.symlink(jobid, ln) os.rename( ln, os.path.join(dn, workdir + "-LATEST")) except OSError: traceback.print_exc()
from compat import unicode from extras import json_encode, json_decode, DotDict from dispatch import JobError from status import statmsg_sink, children, print_status_stacks, status_stacks_export DEBUG_WRITE_JSON = False def gen_cookie(size=16): return ''.join(random.choice(ascii_letters) for _ in range(size)) # This contains cookie: {lock, last_error, last_time} for all jobs, main jobs have cookie None. job_tracking = {None: DotDict(lock=JLock(), last_error=None, last_time=0)} # This needs .ctrl to work. It is set from main() class XtdHandler(BaseWebHandler): server_version = "scx/0.1" DEBUG = not True def log_message(self, format, *args): return def encode_body(self, body): if isinstance(body, bytes): return body if isinstance(body, unicode): return body.encode('utf-8')
def run(jobidv, tlock): for jobid in jobidv: passed_cookie = None # This is not a race - all higher locks are locked too. while passed_cookie in job_tracking: passed_cookie = gen_cookie() concurrency_map = dict( data.concurrency_map) concurrency_map.update( setup.get('concurrency_map', ())) job_tracking[passed_cookie] = DotDict( lock=JLock(), last_error=None, last_time=0, workdir=workdir, concurrency_map=concurrency_map, ) try: explicit_concurrency = setup.get( 'concurrency' ) or concurrency_map.get(setup.method) concurrency = explicit_concurrency or concurrency_map.get( '-default-') if concurrency and setup.method == 'csvimport': # just to be safe, check the package too if load_setup( jobid ).package == 'accelerator.standard_methods': # ignore default concurrency, error on explicit. if explicit_concurrency: raise JobError( jobid, 'csvimport', { 'server': 'csvimport can not run with reduced concurrency' }) concurrency = None self.ctrl.run_job( jobid, subjob_cookie=passed_cookie, parent_pid=setup.get( 'parent_pid', 0), concurrency=concurrency) # update database since a new jobid was just created job = self.ctrl.add_single_jobid(jobid) with tlock: link2job[jobid]['make'] = 'DONE' link2job[jobid][ 'total_time'] = job.total except JobError as e: error.append( [e.job, e.method, e.status]) with tlock: link2job[jobid]['make'] = 'FAIL' return finally: del job_tracking[passed_cookie] # everything was built ok, update symlink try: dn = self.ctrl.workspaces[workdir].path ln = os.path.join(dn, workdir + "-LATEST_") try: os.unlink(ln) except OSError: pass os.symlink(jobid, ln) os.rename( ln, os.path.join(dn, workdir + "-LATEST")) except OSError: traceback.print_exc(file=sys.stderr)
from accelerator.setupfile import load_setup from accelerator.statmsg import statmsg_sink, children, print_status_stacks, status_stacks_export from accelerator import iowrapper, board, g, __version__ as ax_version DEBUG_WRITE_JSON = False def gen_cookie(size=16): return ''.join(random.choice(ascii_letters) for _ in range(size)) # This contains cookie: {lock, last_error, last_time, workdir, concurrency_map} # for all jobs, main jobs have cookie None. job_tracking = { None: DotDict(lock=JLock(), last_error=None, last_time=0, workdir=None, concurrency_map={}) } # This needs .ctrl to work. It is set from main() class XtdHandler(BaseWebHandler): server_version = "scx/0.1" unicode_args = True DEBUG = not True def log_message(self, format, *args): return
from accelerator.extras import json_encode, json_decode, DotDict from accelerator.build import JobError from accelerator.status import statmsg_sink, children, print_status_stacks, status_stacks_export from accelerator import iowrapper DEBUG_WRITE_JSON = False def gen_cookie(size=16): return ''.join(random.choice(ascii_letters) for _ in range(size)) # This contains cookie: {lock, last_error, last_time, workdir} # for all jobs, main jobs have cookie None. job_tracking = { None: DotDict(lock=JLock(), last_error=None, last_time=0, workdir=None) } # This needs .ctrl to work. It is set from main() class XtdHandler(BaseWebHandler): server_version = "scx/0.1" DEBUG = not True def log_message(self, format, *args): return def encode_body(self, body): if isinstance(body, bytes): return body if isinstance(body, unicode):
from accelerator.build import JobError from accelerator.job import Job from accelerator.statmsg import statmsg_sink, children, print_status_stacks, status_stacks_export from accelerator import iowrapper, board, g, __version__ as ax_version DEBUG_WRITE_JSON = False def gen_cookie(size=16): return ''.join(random.choice(ascii_letters) for _ in range(size)) # This contains cookie: {lock, last_error, last_time, workdir, concurrency_map} # for all jobs, main jobs have cookie None. job_tracking = {None: DotDict(lock=JLock(), last_error=None, last_time=0, workdir=None, concurrency_map={})} # This needs .ctrl to work. It is set from main() class XtdHandler(BaseWebHandler): server_version = "scx/0.1" unicode_args = True DEBUG = not True def log_message(self, format, *args): return def do_response(self, code, content_type, body): hdrs = [('Accelerator-Version', ax_version)] BaseWebHandler.do_response(self, code, content_type, body, hdrs)