def on_complete(self, pypy=None):
        if pypy.executor.broken:
            return

        if self.log_file:
            IO.write(self.log_file, self.content)

        enabled = False
        if self.policy == self.POLICY_ALWAYS:
            enabled = True
        elif self.policy == self.POLICY_BATCH_OR_ERROR:
            enabled = pypy.with_error() or (not Printer.batched.is_muted())
        elif self.policy == self.POLICY_ERROR_ONLY:
            enabled = pypy.with_error()

        if enabled:
            with Printer.all.with_level():

                if self.pypy.full_output:
                    Printer.all.sep()
                    Printer.all.out(
                        'Output from file {self.pypy.full_output}'.format(
                            **locals()))

                if not Printer.batched.is_muted():
                    Printer.batched.raw(
                        format_n_lines(self.content, pypy.was_successful()))

                if not Printer.console.is_muted():
                    Printer.console.raw(
                        format_n_lines(self.content, pypy.was_successful()))
Beispiel #2
0
    def on_complete(self, pypy=None):
        if pypy.executor.broken:
            return

        if self.log_file:
            IO.write(self.log_file, self.content)

        enabled = False
        if self.policy == self.POLICY_ALWAYS:
            enabled = True
        elif self.policy == self.POLICY_BATCH_OR_ERROR:
            enabled = pypy.with_error() or (not Printer.batched.is_muted())
        elif self.policy == self.POLICY_ERROR_ONLY:
            enabled = pypy.with_error()

        if enabled:
            with Printer.all.with_level():

                if self.pypy.full_output:
                    Printer.all.sep()
                    Printer.all.out('Output from file {self.pypy.full_output}'.format(**locals()))

                if not Printer.batched.is_muted():
                    Printer.batched.raw(format_n_lines(self.content, pypy.was_successful()))

                if not Printer.console.is_muted():
                    Printer.console.raw(format_n_lines(self.content, pypy.was_successful()))
    def prepare_pbs_files(self, pbs_module):
        """
        :type pbs_module: scripts.pbs.modules.pbs_tarkil_cesnet_cz
        :rtype: list[(str, PBSModule)]
        """

        jobs = list()

        for p in self.proc:
            case = ConfigCase(dict(
                proc=p,
                time_limit=self.time_limit,
                memory_limit=self.memory_limit,
                tmp='exec-parallel'
            ), None)

            pbs_run = pbs_module.Module(case)
            pbs_run.queue = self.arg_options.get('queue', True)
            pbs_run.ppn = self.arg_options.get('ppn', 1)

            pbs_content = self.create_pbs_job_content(pbs_module, case)
            IO.write(case.fs.pbs_script, pbs_content)

            qsub_command = pbs_run.get_pbs_command(case.fs.pbs_script)
            jobs.append((qsub_command, pbs_run))
        return jobs
Beispiel #4
0
    def on_complete(self, pypy=None):

        if self.log_file:
            IO.write(self.log_file, self.content)

        # finish after update
        if self.update_format and (not self.complete_format
                                   and not self.color_complete_format):
            printf.finish_rewrite()

        # print regular messages
        for fmt in ensure_iterable(self.complete_format):
            printf.out(fmt, monitor=self)

        # print messages if error
        if self.pypy.with_error():
            for fmt in ensure_iterable(self.error_complete_format):
                printf.error(fmt, monitor=self)

        # print regular color messages based on result
        for fmt in ensure_iterable(self.color_complete_format):
            if self.pypy.returncode() == 0:
                printf.success(fmt, monitor=self)
            elif self.pypy.returncode() is None:
                printf.warning(fmt, monitor=self)
            else:
                printf.error(fmt, monitor=self)

        with printf:
            if printf.verbosity() is printf.OutputVerbosity.FULL:
                printf.sep()
                printf.out('Output from file {self.pypy.full_output}'.format(
                    **locals()))
                printf.opt(raw=True).stream(
                    format_n_lines(self.content, pypy.was_successful()))
            elif printf.verbosity(
            ) is printf.OutputVerbosity.SMART and pypy.with_error():
                printf.sep()
                printf.out(
                    'Last 50 lines from file {self.pypy.full_output}'.format(
                        **locals()))
                printf.opt(raw=True).stream(
                    format_n_lines(self.content, success=False, n_lines=-50))
            elif printf.verbosity(
            ) is printf.OutputVerbosity.MINIMAL and pypy.with_error():
                printf.sep()
                printf.out(
                    'Last 50 lines from file {self.pypy.full_output}'.format(
                        **locals()))
                printf.opt(raw=True).stream(
                    format_n_lines(self.content, success=False, n_lines=-50))
Beispiel #5
0
    def prepare_pbs_files(self, pbs_module):
        jobs = list()
        """ :type: list[(str, PBSModule)] """

        for yaml_file, yaml_config in self.configs.files.items():
            for case in yaml_config.get_one(yaml_file):
                pbs_run = pbs_module.Module(case)
                pbs_run.queue = self.arg_options.get('queue', True)
                pbs_run.ppn = self.arg_options.get('ppn', 1)

                pbs_content = self.create_pbs_job_content(pbs_module, case)
                IO.write(case.fs.pbs_script, pbs_content)

                qsub_command = pbs_run.get_pbs_command(case.fs.pbs_script)
                jobs.append((qsub_command, pbs_run))
        return jobs
Beispiel #6
0
 def generate_status_file(cls, target):
     """
     Will generate status file if target has option turned on
     :type target: PyPy
     """
     if target.status_file:
         IO.write(target.status_file, json.dumps(target.status(), indent=4))
         output_dir = Paths.dirname(target.status_file)
         files = Paths.browse(
             output_dir,
             [PathFilters.filter_wildcards('*/profiler_info_*.log.json')])
         # profiler json is missing?
         if not files:
             IO.write(
                 Paths.join(output_dir, 'profiler_info_dummy.log.json'),
                 '{}')
def run_local_mode_one(proc, time_limit, memory_limit):
    if proc == 0:
        command = arg_rest[1:]
    else:
        command = [arg_rest[0], '-np', proc] + arg_rest[1:]

    n_lines = 0 if arg_options.batch else 10
    pypy = PyPy(BinExecutor(command))

    # set limits
    pypy.limit_monitor.time_limit = time_limit
    pypy.limit_monitor.memory_limit = memory_limit
    pypy.progress = not arg_options.batch
    pypy.info_monitor.deactivate()
    pypy.error_monitor.deactivate()

    # catch output to variable
    # in batch mode we will keep the files
    # otherwise we will keep logs only on error
    log_file = Paths.temp_file('exec-parallel-{date}-{time}-{rnd}.log')
    pypy.executor.output = OutputMode.variable_output()
    pypy.full_output = log_file

    # start and wait for exit
    pypy.start()
    pypy.join()

    # add result to global json result
    GlobalResult.add(pypy)

    # in batch mode or on error
    if not pypy.with_success() or arg_options.batch:
        content = pypy.executor.output.read()
        IO.write(log_file, content)
        Printer.close()
        Printer.out(format_n_lines(content, indent='    ', n_lines=-n_lines))
        Printer.open()
    return pypy
def run_pbs_mode(configs, debug=False):
    """
    :type debug: bool
    :type configs: scripts.config.yaml_config.ConfigPool
    """
    global arg_options, arg_others, arg_rest
    pbs_module = get_pbs_module(arg_options.host)
    Printer.dynamic_output = not arg_options.batch
    Printer.dyn('Parsing yaml files')

    jobs = list()
    """ :type: list[(str, PBSModule)] """

    for yaml_file, yaml_config in configs.files.items():
        for case in yaml_config.get_one(yaml_file):
            pbs_run = pbs_module.Module(case)
            pbs_run.queue = arg_options.get('queue', True)
            pbs_run.ppn = arg_options.get('ppn', 1)

            pbs_content = create_pbs_job_content(pbs_module, case)
            IO.write(case.fs.pbs_script, pbs_content)

            qsub_command = pbs_run.get_pbs_command(case.fs.pbs_script)
            jobs.append((qsub_command, pbs_run))

    # start jobs
    Printer.dyn('Starting jobs')

    total = len(jobs)
    job_id = 0
    multijob = MultiJob(pbs_module.ModuleJob)
    for qsub_command, pbs_run in jobs:
        job_id += 1

        Printer.dyn('Starting jobs {:02d} of {:02d}', job_id, total)

        output = subprocess.check_output(qsub_command)
        job = pbs_module.ModuleJob.create(output, pbs_run.case)
        job.full_name = "Case {}".format(pbs_run.case)
        multijob.add(job)

    Printer.out()
    Printer.out('{} job/s inserted into queue', total)

    # # first update to get more info about multijob jobs
    Printer.out()
    Printer.separator()
    Printer.dyn('Updating job status')
    multijob.update()

    # print jobs statuses
    Printer.out()
    if not arg_options.batch:
        multijob.print_status()

    Printer.separator()
    Printer.dyn(multijob.get_status_line())
    returncodes = dict()

    # wait for finish
    while multijob.is_running():
        Printer.dyn('Updating job status')
        multijob.update()
        Printer.dyn(multijob.get_status_line())

        # if some jobs changed status add new line to dynamic output remains
        jobs_changed = multijob.get_all(status=JobState.COMPLETED)
        if jobs_changed:
            Printer.out()
            Printer.separator()

        # get all jobs where was status update to COMPLETE state
        for job in jobs_changed:
            returncodes[job] = finish_pbs_job(job, arg_options.batch)

        if jobs_changed:
            Printer.separator()
            Printer.out()

        # after printing update status lets sleep for a bit
        if multijob.is_running():
            time.sleep(5)

    Printer.out(multijob.get_status_line())
    Printer.out('All jobs finished')

    # get max return code or number 2 if there are no returncodes
    returncode = max(returncodes.values()) if returncodes else 2
    sys.exit(returncode)