Exemplo n.º 1
0
    def __init__(self, **kw):
        super(HostRunner, self).__init__(**kw)

        self._spawned_pids = set()

        if self._config.reuse:
            log.warning('Reuse not supported for HostRunner.')
Exemplo n.º 2
0
 def stop_running_tasks(self):
     """Stop containers started by Popper."""
     for c in self._spawned_containers:
         log.info(f"Stopping container {c}")
         _, ecode, _ = HostRunner._exec_cmd(["podman", "stop", c], logging=False)
         if ecode != 0:
             log.warning(f"Failed to stop the {c} container")
Exemplo n.º 3
0
    def check_for_unreachable_actions(self, skip=None):
        """Validates a workflow by checking for unreachable nodes / gaps
        in the workflow.

        Args:
            skip (list) : The list actions to skip if applicable.
        """

        def _traverse(entrypoint, reachable, actions):
            for node in entrypoint:
                reachable.add(node)
                _traverse(actions[node].get(
                    'next', []), reachable, actions)

        reachable = set()
        skipped = set(self.props.get('skip_list', []))
        actions = set(map(lambda a: a[0], self.action.items()))

        _traverse(self.root, reachable, self.action)

        unreachable = actions - reachable
        if unreachable - skipped:
            if skip:
                log.fail('Actions {} are unreachable.'.format(
                    ', '.join(unreachable - skipped))
                )
            else:
                log.warning('Actions {} are unreachable.'.format(
                    ', '.join(unreachable))
                )

        for a in unreachable:
            self.action.pop(a)
Exemplo n.º 4
0
 def _stop_out_stream(self):
     _out_stream_pid = list(self._out_stream_pid)[0]
     try:
         os.kill(_out_stream_pid, 0)
         os.kill(_out_stream_pid, signal.SIGKILL)
     except ProcessLookupError:
         log.warning('Tail process was killed by some other process.')
     self._out_stream_thread.join()
Exemplo n.º 5
0
 def _stop_out_stream(self):
     if len(self._out_stream_pid) != 1:
         log.fail("Cannot find PID for tail process")
     _out_stream_pid = list(self._out_stream_pid)[0]
     try:
         os.kill(_out_stream_pid, signal.SIGKILL)
     except ProcessLookupError:
         log.warning("Tail process was stopped by some other process.")
     self._out_stream_thread.join()
Exemplo n.º 6
0
def run_workflow(**kwargs):

    kwargs['wfile'] = pu.find_default_wfile(kwargs['wfile'])
    log.info('Found and running workflow at ' + kwargs['wfile'])
    # Initialize a Worklow. During initialization all the validation
    # takes place automatically.
    wf = Workflow(kwargs['wfile'])
    wf_runner = WorkflowRunner(wf)

    # Check for injected actions
    pre_wfile = os.environ.get('POPPER_PRE_WORKFLOW_PATH')
    post_wfile = os.environ.get('POPPER_POST_WORKFLOW_PATH')

    # Saving workflow instance for signal handling
    popper.cli.interrupt_params['parallel'] = kwargs['parallel']

    if kwargs['parallel']:
        if sys.version_info[0] < 3:
            log.fail('--parallel is only supported on Python3')
        log.warning("Using --parallel may result in interleaved output. "
                    "You may use --quiet flag to avoid confusion.")

    if kwargs['with_dependencies'] and (not kwargs['action']):
        log.fail('`--with-dependencies` can be used only with '
                 'action argument.')

    if kwargs['skip'] and kwargs['action']:
        log.fail('`--skip` can\'t be used when action argument ' 'is passed.')

    on_failure = kwargs.pop('on_failure')
    wfile = kwargs.pop('wfile')

    try:
        if pre_wfile:
            pre_wf = Workflow(pre_wfile)
            pre_wf_runner = WorkflowRunner(pre_wf)
            pre_wf_runner.run(**kwargs)

        wf_runner.run(**kwargs)

        if post_wfile:
            post_wf = Workflow(post_wfile)
            pre_wf_runner = WorkflowRunner(post_wf)
            pre_wf_runner.run(**kwargs)

    except SystemExit as e:
        if (e.code != 0) and on_failure:
            kwargs['skip'] = list()
            kwargs['action'] = on_failure
            wf_runner.run(**kwargs)
        else:
            raise

    if kwargs['action']:
        log.info('Action "{}" finished successfully.'.format(kwargs['action']))
    else:
        log.info('Workflow "{}" finished successfully.'.format(wfile))
Exemplo n.º 7
0
    def check_for_unreachable_steps(self, skip=None):
        """Validates a workflow by checking for unreachable nodes / gaps in the
        workflow.

        Args:
          skip(list, optional): The list of steps to skip if applicable.
                                (Default value = None)

        Returns:
            None
        """
        if not skip or self.wf_fmt == 'yml':
            # noop
            return

        def _traverse(entrypoint, reachable, steps):
            """

            Args:
              entrypoint(set): Set containing the entry point of part of the
                                workflow.
              reachable(set): Set containing all the reachable parts of
                                workflow.
              steps(dict): Dictionary containing the identifier of the
                                workflow and its description.

            Returns:
                None
            """
            for node in entrypoint:
                reachable.add(node)
                _traverse(steps[node].get('next', []), reachable, steps)

        reachable = set()
        skipped = set(self.props.get('skip_list', []))
        steps = set(map(lambda a: a[0], self.steps.items()))

        _traverse(self.root, reachable, self.steps)

        unreachable = steps - reachable
        if unreachable - skipped:
            if skip:
                log.fail(
                    f'Unreachable step(s): {", ".join(unreachable-skipped)}.')
            else:
                log.warning(f'Unreachable step(s): {", ".join(unreachable)}.')

        for a in unreachable:
            self.steps.pop(a)
Exemplo n.º 8
0
def cli(ctx, action, wfile, skip_clone, skip_pull, skip, workspace, reuse,
        recursive, quiet, debug, dry_run, parallel, log_file,
        with_dependencies, on_failure):
    """Executes one or more pipelines and reports on their status.
    """
    popper.scm.get_git_root_folder()
    level = 'ACTION_INFO'
    if quiet:
        level = 'INFO'
    if debug:
        level = 'DEBUG'
    log.setLevel(level)
    if log_file:
        logging.add_log(log, log_file)

    if os.environ.get('CI') == 'true':
        log.info("Running in CI environment.")
        if recursive:
            log.warning('When CI variable is set, --recursive is ignored.')
        wfile_list = pu.find_recursive_wfile()
        wfile_list = workflows_from_commit_message(wfile_list)
    else:
        if recursive:
            if action:
                log.fail(
                    "An 'action' argument and the --recursive flag cannot be "
                    "both given.")
            wfile_list = pu.find_recursive_wfile()
        else:
            wfile_list = [wfile]

    if not wfile_list:
        log.fail("No workflow to execute.")

    for wfile in wfile_list:
        wfile = pu.find_default_wfile(wfile)
        log.info("Found and running workflow at " + wfile)
        run_pipeline(action, wfile, skip_clone, skip_pull, skip, workspace,
                     reuse, dry_run, parallel, with_dependencies, on_failure)
Exemplo n.º 9
0
 def stop_running_tasks(self):
     for job_name in self._spawned_jobs:
         log.info(f"Cancelling job {job_name}")
         _, ecode, _ = HostRunner._exec_cmd(["scancel", "--name", job_name])
         if ecode != 0:
             log.warning(f"Failed to cancel the job {job_name}.")
Exemplo n.º 10
0
 def stop_running_tasks(self):
     for job_name in self._spawned_jobs:
         log.info(f'Cancelling job {job_name}')
         _, ecode, _ = HostRunner._exec_cmd(['scancel', '--name', job_name])
         if ecode != 0:
             log.warning(f'Failed to cancel the job {job_name}.')