def run_pbs_mode(self): """ Runs this module in local mode. At this point arguments from cmd where parsed and converted. We create pbs start script/s and monitor their status/es. All jobs are inserted into queue and after finished evaluated. """ pbs_module = get_pbs_module(self.arg_options.host) jobs = self.prepare_pbs_files(pbs_module) result, multijob = pbs_control.do_work(jobs, pbs_module, finish_pbs_exec) return result.singlify()
def run_pbs_mode(self): """ Runs this module in local mode. At this point all configuration files has been loaded what is left to do is to create pbs scripts and put them to queue (qsub). After them we monitor all jobs (qstat) and if some job exits we parse result json file and determine ok/error status for the job """ pbs_module = get_pbs_module(self.arg_options.host) jobs = self.prepare_pbs_files(pbs_module) result, multijob = pbs_control.do_work(jobs, pbs_module, finish_pbs_runtest) return result.singlify()
def run_pbs_mode(configs, debug=False): """ :type debug: bool :type configs: scripts.config.yaml_config.ConfigPool """ global arg_options, arg_others, arg_rest pbs_module = get_pbs_module(arg_options.host) Printer.dynamic_output = not arg_options.batch Printer.dyn('Parsing yaml files') jobs = list() """ :type: list[(str, PBSModule)] """ for yaml_file, yaml_config in configs.files.items(): for case in yaml_config.get_one(yaml_file): pbs_run = pbs_module.Module(case) pbs_run.queue = arg_options.get('queue', True) pbs_run.ppn = arg_options.get('ppn', 1) pbs_content = create_pbs_job_content(pbs_module, case) IO.write(case.fs.pbs_script, pbs_content) qsub_command = pbs_run.get_pbs_command(case.fs.pbs_script) jobs.append((qsub_command, pbs_run)) # start jobs Printer.dyn('Starting jobs') total = len(jobs) job_id = 0 multijob = MultiJob(pbs_module.ModuleJob) for qsub_command, pbs_run in jobs: job_id += 1 Printer.dyn('Starting jobs {:02d} of {:02d}', job_id, total) output = subprocess.check_output(qsub_command) job = pbs_module.ModuleJob.create(output, pbs_run.case) job.full_name = "Case {}".format(pbs_run.case) multijob.add(job) Printer.out() Printer.out('{} job/s inserted into queue', total) # # first update to get more info about multijob jobs Printer.out() Printer.separator() Printer.dyn('Updating job status') multijob.update() # print jobs statuses Printer.out() if not arg_options.batch: multijob.print_status() Printer.separator() Printer.dyn(multijob.get_status_line()) returncodes = dict() # wait for finish while multijob.is_running(): Printer.dyn('Updating job status') multijob.update() Printer.dyn(multijob.get_status_line()) # if some jobs changed status add new line to dynamic output remains jobs_changed = multijob.get_all(status=JobState.COMPLETED) if jobs_changed: Printer.out() Printer.separator() # get all jobs where was status update to COMPLETE state for job in jobs_changed: returncodes[job] = finish_pbs_job(job, arg_options.batch) if jobs_changed: Printer.separator() Printer.out() # after printing update status lets sleep for a bit if multijob.is_running(): time.sleep(5) Printer.out(multijob.get_status_line()) Printer.out('All jobs finished') # get max return code or number 2 if there are no returncodes returncode = max(returncodes.values()) if returncodes else 2 sys.exit(returncode)
def run_pbs_mode(debug=False): pbs_module = get_pbs_module() jobs = prepare_pbs_files(pbs_module) if debug: return 0 # start jobs Printer.dyn('Starting jobs') total = len(jobs) job_id = 0 multijob = MultiJob(pbs_module.ModuleJob) for qsub_command, pbs_run in jobs: job_id += 1 Printer.dyn('Starting jobs {:02d} of {:02d}', job_id, total) output = subprocess.check_output(qsub_command) job = pbs_module.ModuleJob.create(output, pbs_run.case) job.full_name = "Case {}".format(pbs_run.case) multijob.add(job) Printer.out() Printer.out('{} job/s inserted into queue', total) # # first update to get more info about multijob jobs Printer.out() Printer.separator() Printer.dyn('Updating job status') multijob.update() # print jobs statuses Printer.out() if not arg_options.batch: multijob.print_status() Printer.separator() Printer.dyn(multijob.get_status_line()) returncodes = dict() # wait for finish while multijob.is_running(): Printer.dyn('Updating job status') multijob.update() Printer.dyn(multijob.get_status_line()) # if some jobs changed status add new line to dynamic output remains jobs_changed = multijob.get_all(status=JobState.COMPLETED) if jobs_changed: Printer.out() Printer.separator() # get all jobs where was status update to COMPLETE state for job in jobs_changed: returncodes[job] = finish_pbs_job(job, arg_options.batch) if jobs_changed: Printer.separator() Printer.out() # after printing update status lets sleep for a bit if multijob.is_running(): time.sleep(5) Printer.out(multijob.get_status_line()) Printer.out('All jobs finished') # get max return code or number 2 if there are no returncodes return max(returncodes.values()) if returncodes else 2