def run_automata(options, cfg): g.running = 'build' a = Automata(cfg.url, verbose=options.verbose, flags=options.flags.split(','), infoprints=True, print_full_jobpath=options.fullpath) try: a.wait(ignore_old_errors=not options.just_wait) except JobError: # An error occured in a job we didn't start, which is not our problem. pass if options.just_wait: return module_ref = find_automata(a, options.package, options.script) assert getarglist(module_ref.main) == ['urd'], "Only urd-enabled automatas are supported" if 'URD_AUTH' in os.environ: assert ':' in os.environ['URD_AUTH'], "Set $URD_AUTH to user:password" user, password = os.environ['URD_AUTH'].split(':', 1) else: user = os.environ.get('USER') if not user: user = '******' print("No $URD_AUTH or $USER in environment, using %r" % (user,), file=sys.stderr) password = '' info = a.info() urd = Urd(a, info, user, password, options.horizon, options.workdir) if options.quick: a.update_method_info() else: a.update_methods() module_ref.main(urd) urd._show_warnings()
def build(method, options={}, datasets={}, jobs={}, name=None, caption=None, **kw): """Just like urd.build, but for making subjobs""" global _a, _bad_kws assert g.running != 'analysis', "Analysis is not allowed to make subjobs" assert g.subjob_cookie, "Can't build subjobs: out of cookies" if not _a: _a = Automata(g.server_url, subjob_cookie=g.subjob_cookie) _a.update_method_info() _a.record[None] = _a.jobs = globals()['jobs'] _bad_kws = set(getarglist(_a.call_method)) bad_kws = _bad_kws & set(kw) if bad_kws: raise Exception('subjobs.build does not accept these keywords: %r' % (bad_kws, )) def run(): return _a.call_method(method, options=options, datasets=datasets, jobs=jobs, record_as=name, caption=caption, **kw) try: if name or caption: msg = 'Building subjob %s' % (name or method, ) if caption: msg += ' "%s"' % (caption, ) with status(msg): jid = run() else: jid = run() except ServerError as e: raise ServerError(e.args[0]) except JobError as e: raise JobError(e.job, e.method, e.status) for d in _a.job_retur.jobs.values(): if d.link not in _record: _record[d.link] = bool(d.make) return jid
def run_automata(options, cfg): a = Automata(cfg.url, verbose=options.verbose, flags=options.flags.split(','), infoprints=True, print_full_jobpath=options.fullpath) if options.abort: a.abort() return try: a.wait(ignore_old_errors=not options.just_wait) except JobError: # An error occured in a job we didn't start, which is not our problem. pass if options.just_wait: return module_ref = find_automata(a, options.package, options.script) assert getarglist( module_ref.main) == ['urd'], "Only urd-enabled automatas are supported" if 'URD_AUTH' in os.environ: user, password = os.environ['URD_AUTH'].split(':', 1) else: user, password = os.environ['USER'], '' info = a.info() urd = Urd(a, info, user, password, options.horizon) if options.quick: a.update_method_deps() else: a.update_methods() module_ref.main(urd) urd._show_warnings()
def execute_process(workdir, jobid, slices, concurrency, result_directory, common_directory, input_directory, index=None, workdirs=None, server_url=None, subjob_cookie=None, parent_pid=0): WORKDIRS.update(workdirs) g.job = jobid setproctitle('launch') path = os.path.join(workdir, jobid) try: os.chdir(path) except Exception: print("Cannot cd to workdir", path) exit(1) g.params = params = job_params() method_ref = import_module(params.package + '.a_' + params.method) g.sliceno = -1 g.job = CurrentJob(jobid, params, result_directory, input_directory) g.slices = slices g.options = params.options g.datasets = params.datasets g.jobs = params.jobs method_ref.options = params.options method_ref.datasets = params.datasets method_ref.jobs = params.jobs g.server_url = server_url g.running = 'launch' statmsg._start('%s %s' % ( jobid, params.method, ), parent_pid) def dummy(): pass prepare_func = getattr(method_ref, 'prepare', dummy) analysis_func = getattr(method_ref, 'analysis', dummy) synthesis_func = getattr(method_ref, 'synthesis', dummy) synthesis_needs_analysis = 'analysis_res' in getarglist(synthesis_func) fd2pid, names, masters, slaves = iowrapper.setup( slices, prepare_func is not dummy, analysis_func is not dummy) def switch_output(): fd = slaves.pop() os.dup2(fd, 1) os.dup2(fd, 2) os.close(fd) if analysis_func is dummy: q = None else: q = LockFreeQueue() iowrapper.run_reader(fd2pid, names, masters, slaves, q=q) for fd in masters: os.close(fd) # A chain must be finished from the back, so sort on that. sortnum_cache = {} def dw_sortnum(name): if name not in sortnum_cache: dw = dataset._datasetwriters.get(name) if not dw: # manually .finish()ed num = -1 elif dw.previous and dw.previous.startswith(jobid + '/'): pname = dw.previous.split('/')[1] num = dw_sortnum(pname) + 1 else: num = 0 sortnum_cache[name] = num return sortnum_cache[name] prof = {} if prepare_func is dummy: prof['prepare'] = 0 # truthish! else: t = monotonic() switch_output() g.running = 'prepare' g.subjob_cookie = subjob_cookie setproctitle(g.running) with statmsg.status(g.running): g.prepare_res = method_ref.prepare(**args_for(method_ref.prepare)) to_finish = [ dw.name for dw in dataset._datasetwriters.values() if dw._started ] if to_finish: with statmsg.status("Finishing datasets"): for name in sorted(to_finish, key=dw_sortnum): dataset._datasetwriters[name].finish() c_fflush() prof['prepare'] = monotonic() - t switch_output() setproctitle('launch') from accelerator.extras import saved_files if analysis_func is dummy: prof['per_slice'] = [] prof['analysis'] = 0 else: t = monotonic() g.running = 'analysis' g.subjob_cookie = None # subjobs are not allowed from analysis with statmsg.status( 'Waiting for all slices to finish analysis') as update: g.update_top_status = update prof['per_slice'], files, g.analysis_res = fork_analysis( slices, concurrency, analysis_func, args_for(analysis_func), synthesis_needs_analysis, slaves, q) del g.update_top_status prof['analysis'] = monotonic() - t saved_files.update(files) t = monotonic() g.running = 'synthesis' g.subjob_cookie = subjob_cookie setproctitle(g.running) with statmsg.status(g.running): synthesis_res = synthesis_func(**args_for(synthesis_func)) if synthesis_res is not None: blob.save(synthesis_res, temp=False) if dataset._datasetwriters: with statmsg.status("Finishing datasets"): for name in sorted(dataset._datasetwriters, key=dw_sortnum): dataset._datasetwriters[name].finish() if dataset._datasets_written: blob.save(dataset._datasets_written, 'DS/LIST', temp=False, _hidden=True) c_fflush() t = monotonic() - t prof['synthesis'] = t from accelerator.subjobs import _record return None, (prof, saved_files, _record)
def args_for(func): kw = {} for arg in getarglist(func): kw[arg] = getattr(g, arg) return kw