def prepare_pbs_files(self, pbs_module): """ :type pbs_module: scripts.pbs.modules.pbs_tarkil_cesnet_cz :rtype: list[(str, PBSModule)] """ jobs = list() for p in self.proc: case = ConfigCase(dict( proc=p, time_limit=self.time_limit, memory_limit=self.memory_limit, tmp='exec-parallel' ), None) pbs_run = pbs_module.Module(case) pbs_run.queue = self.arg_options.get('queue', True) pbs_run.ppn = self.arg_options.get('ppn', 1) pbs_content = self.create_pbs_job_content(pbs_module, case) IO.write(case.fs.pbs_script, pbs_content) qsub_command = pbs_run.get_pbs_command(case.fs.pbs_script) jobs.append((qsub_command, pbs_run)) return jobs
def finish_pbs_exec(job, batch): """ Upon PBS finish determine Job exit :rtype: scripts.serialization.PyPyResult :type job: scripts.pbs.job.Job """ job.is_active = False try: result = load_pypy(job.case.fs.dump_output) except Exception as e: # no output file was generated assuming it went wrong job.status = JobState.EXIT_ERROR Printer.all.err('Job {} ended (no output file found). Case: {}', job, job.full_name) Printer.all.out(' pbs output: ') Printer.all.raw(format_n_lines(IO.read(job.case.fs.pbs_output), False)) return # check result if result.returncode == 0: job.status = JobState.EXIT_OK Printer.all.suc('Job {}({}) ended', job, job.full_name) else: job.status = JobState.EXIT_ERROR Printer.all.err('Job {}({}) ended', job, job.full_name) if result.returncode != 0 or batch: with Printer.all.with_level(1): Printer.all.raw(format_n_lines(IO.read(result.output), result.returncode == 0)) return result
def finish_pbs_runtest(job, batch): """ Upon PBS runtest finish determine Job exit :type job: scripts.pbs.job.Job :rtype: ResultParallelThreads """ job.is_active = False try: runner = load_runtest(job.case.fs.dump_output) except: # no output file was generated assuming it went wrong job.status = JobState.EXIT_ERROR Printer.all.err('Job {} ended (no output file found). Case: {}', job, job.full_name) Printer.all.out(' pbs output: ') Printer.all.raw(format_n_lines(IO.read(job.case.fs.pbs_output), False)) return job.status = JobState.EXIT_OK if runner.returncode == 0 else JobState.EXIT_ERROR for thread in runner.threads: StatusPrinter.print_test_result(thread) if thread.returncode != 0 or batch: with Printer.all.with_level(): Printer.all.out('Log file {}', job.case.fs.job_output) Printer.all.raw(format_n_lines(IO.read(job.case.fs.job_output), thread.returncode == 0)) # print status line only if pbs job container more cases if len(runner.threads) > 1: Printer.all.sep() StatusPrinter.print_runner_stat(runner) Printer.all.sep() return runner
def compare(self, reference_filepath, other_filepath, **kwargs): """ Method can do anything as long as int value is returned :param reference_filepath: :param other_filepath: :param kwargs: :return: """ reference_content = IO.read( Paths.abspath(reference_filepath) ) other_content = IO.read( Paths.abspath(other_filepath) ) self.output.write("In case of emergency,") self.output.write(" you can provide details on what went wrong") self.output.write(" using self.output.write method") self.output.write("") self.output.write("Error while comparing files \n{} \n{}" .format(reference_filepath, other_filepath)) # must return return-code! return 1
def on_complete(self, pypy=None): if pypy.executor.broken: return if self.log_file: IO.write(self.log_file, self.content) enabled = False if self.policy == self.POLICY_ALWAYS: enabled = True elif self.policy == self.POLICY_BATCH_OR_ERROR: enabled = pypy.with_error() or (not Printer.batched.is_muted()) elif self.policy == self.POLICY_ERROR_ONLY: enabled = pypy.with_error() if enabled: with Printer.all.with_level(): if self.pypy.full_output: Printer.all.sep() Printer.all.out('Output from file {self.pypy.full_output}'.format(**locals())) if not Printer.batched.is_muted(): Printer.batched.raw(format_n_lines(self.content, pypy.was_successful())) if not Printer.console.is_muted(): Printer.console.raw(format_n_lines(self.content, pypy.was_successful()))
def on_complete(self, pypy=None): if pypy.executor.broken: return if self.log_file: IO.write(self.log_file, self.content) enabled = False if self.policy == self.POLICY_ALWAYS: enabled = True elif self.policy == self.POLICY_BATCH_OR_ERROR: enabled = pypy.with_error() or (not Printer.batched.is_muted()) elif self.policy == self.POLICY_ERROR_ONLY: enabled = pypy.with_error() if enabled: with Printer.all.with_level(): if self.pypy.full_output: Printer.all.sep() Printer.all.out( 'Output from file {self.pypy.full_output}'.format( **locals())) if not Printer.batched.is_muted(): Printer.batched.raw( format_n_lines(self.content, pypy.was_successful())) if not Printer.console.is_muted(): Printer.console.raw( format_n_lines(self.content, pypy.was_successful()))
def test_pbs_mode(self): # this test can be only tested on PBS server do_work(parser, ['-q', '--', 'sleep', '1'], debug=True) mpi_dirs = [f for f in os.listdir(__dir__) if f.startswith('exec-parallel-')] self.assertGreaterEqual(len(mpi_dirs), 1) # test that script were actually created and contain at least one file # also remove them ... for mpi in mpi_dirs: self.assertGreaterEqual(len(os.listdir(os.path.join(__dir__, mpi))), 1) IO.delete_all(os.path.join(__dir__, mpi))
def tearDownClass(cls): super(UnitTest, cls).tearDownClass() from scripts.core.base import IO cls.end_state = os.listdir(cls.__dir__) for f in cls.end_state: if f not in cls.start_state: full_path = os.path.join(cls.__dir__, f) if os.path.isdir(full_path): IO.delete_all(full_path) else: IO.delete(full_path)
def on_complete(self, pypy=None): if self.log_file: IO.write(self.log_file, self.content) # finish after update if self.update_format and (not self.complete_format and not self.color_complete_format): printf.finish_rewrite() # print regular messages for fmt in ensure_iterable(self.complete_format): printf.out(fmt, monitor=self) # print messages if error if self.pypy.with_error(): for fmt in ensure_iterable(self.error_complete_format): printf.error(fmt, monitor=self) # print regular color messages based on result for fmt in ensure_iterable(self.color_complete_format): if self.pypy.returncode() == 0: printf.success(fmt, monitor=self) elif self.pypy.returncode() is None: printf.warning(fmt, monitor=self) else: printf.error(fmt, monitor=self) with printf: if printf.verbosity() is printf.OutputVerbosity.FULL: printf.sep() printf.out('Output from file {self.pypy.full_output}'.format( **locals())) printf.opt(raw=True).stream( format_n_lines(self.content, pypy.was_successful())) elif printf.verbosity( ) is printf.OutputVerbosity.SMART and pypy.with_error(): printf.sep() printf.out( 'Last 50 lines from file {self.pypy.full_output}'.format( **locals())) printf.opt(raw=True).stream( format_n_lines(self.content, success=False, n_lines=-50)) elif printf.verbosity( ) is printf.OutputVerbosity.MINIMAL and pypy.with_error(): printf.sep() printf.out( 'Last 50 lines from file {self.pypy.full_output}'.format( **locals())) printf.opt(raw=True).stream( format_n_lines(self.content, success=False, n_lines=-50))
def on_thread_complete(self, thread): """ :type thread: scripts.core.pypy.PyPy """ super(ComparisonMultiThread, self).on_thread_complete(thread) # append ndiff to file content = list() content.append('-' * 60 + '\n') content.append(thread.name + '\n') content.append('-' * 60 + '\n') content.append(thread.executor.output.read()) content.append('\n' * 3) IO.append(self.output, '\n'.join(content) or '')
def generate_status_file(cls, target): """ Will generate status file if target has option turned on :type target: PyPy """ if target.status_file: IO.write(target.status_file, json.dumps(target.status(), indent=4)) output_dir = Paths.dirname(target.status_file) files = Paths.browse( output_dir, [PathFilters.filter_wildcards('*/profiler_info_*.log.json')]) # profiler json is missing? if not files: IO.write( Paths.join(output_dir, 'profiler_info_dummy.log.json'), '{}')
def prepare_pbs_files(self, pbs_module): jobs = list() """ :type: list[(str, PBSModule)] """ for yaml_file, yaml_config in self.configs.files.items(): for case in yaml_config.get_one(yaml_file): pbs_run = pbs_module.Module(case) pbs_run.queue = self.arg_options.get('queue', True) pbs_run.ppn = self.arg_options.get('ppn', 1) pbs_content = self.create_pbs_job_content(pbs_module, case) IO.write(case.fs.pbs_script, pbs_content) qsub_command = pbs_run.get_pbs_command(case.fs.pbs_script) jobs.append((qsub_command, pbs_run)) return jobs
def finish_pbs_exec(job, batch): """ Upon PBS finish determine Job exit :rtype: scripts.serialization.PyPyResult :type job: scripts.pbs.job.Job """ job.is_active = False try: result = load_pypy(job.case.fs.dump_output) except Exception as e: # no output file was generated assuming it went wrong job.status = JobState.EXIT_ERROR Printer.all.err('Job {} ended (no output file found). Case: {}', job, job.full_name) Printer.all.out(' pbs output: ') Printer.all.raw(format_n_lines(IO.read(job.case.fs.pbs_output), False)) return # check result if result.returncode == 0: job.status = JobState.EXIT_OK Printer.all.suc('Job {}({}) ended', job, job.full_name) else: job.status = JobState.EXIT_ERROR Printer.all.err('Job {}({}) ended', job, job.full_name) if result.returncode != 0 or batch: with Printer.all.with_level(1): Printer.all.raw( format_n_lines(IO.read(result.output), result.returncode == 0)) return result
def read(self): if self.mode is self.DUMMY: return self.content if self.filename: if self.content is None: self.content = IO.read(self.filename) return self.content
def print_log_file(cls, f, n_lines): log_file = IO.read(f) if log_file: if n_lines == 0: Printer.out('Full log from file {}:', f) else: Printer.out('Last {} lines from file {}:', abs(n_lines), f) Printer.wrn(format_n_lines(log_file.rstrip(), -n_lines, indent=Printer.indent * ' '))
def print_log_file(cls, f, n_lines): log_file = IO.read(f) if log_file: if n_lines == 0: printf.out('Full log from file {}:', f) else: printf.out('Last {} lines from file {}:', abs(n_lines), f) printf.stream(format_n_lines(log_file.rstrip(), -n_lines))
def print_log_file(cls, f, n_lines): log_file = IO.read(f) if log_file: if n_lines == 0: Printer.out('Full log from file {}:', f) else: Printer.out('Last {} lines from file {}:', abs(n_lines), f) Printer.wrn( format_n_lines(log_file.rstrip(), -n_lines, indent=Printer.indent * ' '))
def close(self): if self.mode in {self.WRITE, self.APPEND, self.VARIABLE}: if self.fp is not None: if type(self.fp) is int: os.close(self.fp) else: self.fp.close() self.fp = None # remove temp file if self.mode in {self.VARIABLE}: self.content = IO.read(self.filename) os.unlink(self.filename)
def run_local_mode_one(proc, time_limit, memory_limit): if proc == 0: command = arg_rest[1:] else: command = [arg_rest[0], '-np', proc] + arg_rest[1:] n_lines = 0 if arg_options.batch else 10 pypy = PyPy(BinExecutor(command)) # set limits pypy.limit_monitor.time_limit = time_limit pypy.limit_monitor.memory_limit = memory_limit pypy.progress = not arg_options.batch pypy.info_monitor.deactivate() pypy.error_monitor.deactivate() # catch output to variable # in batch mode we will keep the files # otherwise we will keep logs only on error log_file = Paths.temp_file('exec-parallel-{date}-{time}-{rnd}.log') pypy.executor.output = OutputMode.variable_output() pypy.full_output = log_file # start and wait for exit pypy.start() pypy.join() # add result to global json result GlobalResult.add(pypy) # in batch mode or on error if not pypy.with_success() or arg_options.batch: content = pypy.executor.output.read() IO.write(log_file, content) Printer.close() Printer.out(format_n_lines(content, indent=' ', n_lines=-n_lines)) Printer.open() return pypy
def finish_pbs_job(job, batch): """ :type job: scripts.pbs.job.Job """ # try to get more detailed job status job.is_active = False job_output = IO.read(job.case.fs.json_output) if job_output: job_json = JsonParser(json.loads(job_output), batch) if job_json.returncode == 0: job.status = JobState.EXIT_OK Printer.out('OK: Job {}({}) ended', job, job.full_name) Printer.open() # in batch mode print all logs if batch: Printer.open() for test in job_json.tests: test.get_result() Printer.close() Printer.close() else: job.status = JobState.EXIT_ERROR Printer.out('ERROR: Job {}({}) ended', job, job.full_name) # in batch mode print all logs Printer.open() for test in job_json.tests: test.get_result() Printer.close() else: # no output file was generated assuming it went wrong job.status = JobState.EXIT_ERROR Printer.out('ERROR: Job {} ended (no output file found). Case: {}', job, job.full_name) Printer.out(' pbs output: ') Printer.out(format_n_lines(IO.read(job.case.fs.pbs_output), 0)) return 0 if job.status == JobState.EXIT_OK else 1
def close(self): if self.mode is self.DUMMY: return if self.mode in {self.WRITE, self.APPEND, self.VARIABLE}: if self.fp is not None: if type(self.fp) is int: os.close(self.fp) else: self.fp.close() self.fp = None # remove temp file if self.mode in {self.VARIABLE}: self.content = IO.read(self.filename) os.unlink(self.filename)
def read(self): if self.filename: if self.content is None: self.content = IO.read(self.filename) return self.content
def run_pbs_mode(configs, debug=False): """ :type debug: bool :type configs: scripts.config.yaml_config.ConfigPool """ global arg_options, arg_others, arg_rest pbs_module = get_pbs_module(arg_options.host) Printer.dynamic_output = not arg_options.batch Printer.dyn('Parsing yaml files') jobs = list() """ :type: list[(str, PBSModule)] """ for yaml_file, yaml_config in configs.files.items(): for case in yaml_config.get_one(yaml_file): pbs_run = pbs_module.Module(case) pbs_run.queue = arg_options.get('queue', True) pbs_run.ppn = arg_options.get('ppn', 1) pbs_content = create_pbs_job_content(pbs_module, case) IO.write(case.fs.pbs_script, pbs_content) qsub_command = pbs_run.get_pbs_command(case.fs.pbs_script) jobs.append((qsub_command, pbs_run)) # start jobs Printer.dyn('Starting jobs') total = len(jobs) job_id = 0 multijob = MultiJob(pbs_module.ModuleJob) for qsub_command, pbs_run in jobs: job_id += 1 Printer.dyn('Starting jobs {:02d} of {:02d}', job_id, total) output = subprocess.check_output(qsub_command) job = pbs_module.ModuleJob.create(output, pbs_run.case) job.full_name = "Case {}".format(pbs_run.case) multijob.add(job) Printer.out() Printer.out('{} job/s inserted into queue', total) # # first update to get more info about multijob jobs Printer.out() Printer.separator() Printer.dyn('Updating job status') multijob.update() # print jobs statuses Printer.out() if not arg_options.batch: multijob.print_status() Printer.separator() Printer.dyn(multijob.get_status_line()) returncodes = dict() # wait for finish while multijob.is_running(): Printer.dyn('Updating job status') multijob.update() Printer.dyn(multijob.get_status_line()) # if some jobs changed status add new line to dynamic output remains jobs_changed = multijob.get_all(status=JobState.COMPLETED) if jobs_changed: Printer.out() Printer.separator() # get all jobs where was status update to COMPLETE state for job in jobs_changed: returncodes[job] = finish_pbs_job(job, arg_options.batch) if jobs_changed: Printer.separator() Printer.out() # after printing update status lets sleep for a bit if multijob.is_running(): time.sleep(5) Printer.out(multijob.get_status_line()) Printer.out('All jobs finished') # get max return code or number 2 if there are no returncodes returncode = max(returncodes.values()) if returncodes else 2 sys.exit(returncode)
def compare(self, reference_filepath, other_filepath, **kwargs): """ Method can do anything as long as int value is returned :param reference_filepath: :param other_filepath: :param kwargs: :return: """ regex = kwargs.get('regex', None) substr = kwargs.get('substr', None) if regex is None and substr is None: self.output.write( "Invalid check rule! Specify either 'regex' or 'substr' keyword." ) return 1 # convert to str or compile to regex if substr is not None: substr = str(substr) if regex is not None: regex = re.compile(str(regex)) # for regex comparison we ignore reference file # read job_output.log content = IO.read(Paths.abspath(other_filepath)) # substr find if substr: found = False for line in content.splitlines(): if line.find(substr) != -1: found = True self.output.write( '[OK] Found substr "{substr}" in line:'.format( **locals())) self.output.write(' ' + line) break if not found: self.output.write( '[ERROR] Could not find substr "{substr}":'.format( **locals())) return 1 # regex match if regex: found = False for line in content.splitlines(): if regex.findall(line): found = True self.output.write( '[OK] Found regex "{regex.pattern}" in line:'.format( **locals())) self.output.write(' ' + line) break if not found: self.output.write( '[ERROR] Could not find regex {regex.pattern}:'.format( **locals())) return 1 # self.output.write("In case of emergency,") # self.output.write(" you can provide details on what went wrong") # self.output.write(" using self.output.write method") # self.output.write("") # self.output.write("Error while comparing files \n{} \n{}" # .format(reference_filepath, other_filepath)) # must return return-code! return 0