Beispiel #1
0
def init_runner(**kwargs):
    '''
    Initialize the Runner() instance

    This function will properly initialize both run() and run_async()
    functions in the same way and return a value instance of Runner.

    See parameters given to :py:func:`ansible_runner.interface.run`
    '''
    dump_artifacts(kwargs)

    output.configure()

    debug = kwargs.pop('debug', None)
    if debug in (True, False):
        output.set_debug('enable' if debug is True else 'disable')

    logfile = kwargs.pop('logfile', None)
    if logfile:
        output.set_logfile(logfile)

    rc = RunnerConfig(**kwargs)
    rc.prepare()

    return Runner(rc)
Beispiel #2
0
def init_runner(**kwargs):
    '''
    Initialize the Runner() instance

    This function will properly initialize both run() and run_async()
    functions in the same way and return a value instance of Runner.

    See parameters given to :py:func:`ansible_runner.interface.run`
    '''
    dump_artifacts(kwargs)

    debug = kwargs.pop('debug', None)
    logfile = kwargs.pop('logfile', None)

    if not kwargs.pop("ignore_logging", True):
        output.configure()
        if debug in (True, False):
            output.set_debug('enable' if debug is True else 'disable')

        if logfile:
            output.set_logfile(logfile)

    event_callback_handler = kwargs.pop('event_handler', None)
    cancel_callback = kwargs.pop('cancel_callback', None)
    finished_callback = kwargs.pop('finished_callback', None)

    rc = RunnerConfig(**kwargs)
    rc.prepare()

    return Runner(rc,
                  event_handler=event_callback_handler,
                  cancel_callback=cancel_callback,
                  finished_callback=finished_callback)
Beispiel #3
0
def main():
    parser = argparse.ArgumentParser(description='manage ansible execution')

    parser.add_argument('--version', action='version', version=VERSION)

    parser.add_argument('command', choices=['run', 'start', 'stop', 'is-alive'])

    parser.add_argument('private_data_dir',
                        help='Base directory containing Runner metadata (project, inventory, etc')

    group = parser.add_mutually_exclusive_group()

    group.add_argument("-m", "--module", default=DEFAULT_RUNNER_MODULE,
                       help="Invoke an Ansible module directly without a playbook")

    group.add_argument("-p", "--playbook", default=DEFAULT_RUNNER_PLAYBOOK,
                       help="The name of the playbook to execute")

    group.add_argument("-r", "--role", default=DEFAULT_RUNNER_ROLE,
                       help="Invoke an Ansible role directly without a playbook")

    parser.add_argument("--hosts",
                        help="Define the set of hosts to execute against")

    parser.add_argument("-i", "--ident",
                        default=uuid4(),
                        help="An identifier that will be used when generating the"
                             "artifacts directory and can be used to uniquely identify a playbook run")

    parser.add_argument("--rotate-artifacts",
                        default=0,
                        type=int,
                        help="Automatically clean up old artifact directories after a given number has been created, the default is 0 which disables rotation")

    parser.add_argument("--roles-path", default=DEFAULT_ROLES_PATH,
                        help="Path to the Ansible roles directory")

    parser.add_argument("--role-vars",
                        help="Variables to pass to the role at runtime")

    parser.add_argument("--role-skip-facts", action="store_true", default=False,
                        help="Disable fact collection when executing a role directly")
    parser.add_argument("--artifact-dir",
                        help="Optional Path for the artifact root directory, by default it is located inside the private data dir")

    parser.add_argument("--inventory",
                        help="Override the default inventory location in private_data_dir")

    parser.add_argument("-j", "--json", action="store_true",
                        help="Output the json event structure to stdout instead of Ansible output")

    parser.add_argument("-v", action="count",
                        help="Increase the verbosity with multiple v's (up to 5) of the ansible-playbook output")

    parser.add_argument("-q", "--quiet", action="store_true",
                        help="Disable all output")

    parser.add_argument("--cmdline",
                        help="Command line options to pass to ansible-playbook at execution time")
    parser.add_argument("--debug", action="store_true",
                        help="Enable Runner debug output logging")

    parser.add_argument("--logfile",
                        help="Log output messages to a file")

    parser.add_argument("-a", "--args", dest='module_args',
                        help="Module arguments")

    args = parser.parse_args()

    output.configure()

    # enable or disable debug mode
    output.set_debug('enable' if args.debug else 'disable')

    # set the output logfile
    if args.logfile:
        output.set_logfile(args.logfile)

    output.debug('starting debug logging')

    # get the absolute path for start since it is a daemon
    args.private_data_dir = os.path.abspath(args.private_data_dir)

    pidfile = os.path.join(args.private_data_dir, 'pid')

    try:
        os.makedirs(args.private_data_dir)
    except OSError as exc:
        if exc.errno == errno.EEXIST and os.path.isdir(args.private_data_dir):
            pass
        else:
            raise

    if args.command != 'run':
        stderr_path = os.path.join(args.private_data_dir, 'daemon.log')
        if not os.path.exists(stderr_path):
            os.close(os.open(stderr_path, os.O_CREAT, stat.S_IRUSR | stat.S_IWUSR))
        stderr = open(stderr_path, 'w+')

    if args.command in ('start', 'run'):

        if args.command == 'start':
            import daemon
            from daemon.pidfile import TimeoutPIDLockFile
            context = daemon.DaemonContext(
                pidfile=TimeoutPIDLockFile(pidfile),
                stderr=stderr
            )
        else:
            context = threading.Lock()

        with context:
            with role_manager(args) as args:
                run_options = dict(private_data_dir=args.private_data_dir,
                                   ident=args.ident,
                                   playbook=args.playbook,
                                   module=args.module,
                                   module_args=args.module_args,
                                   host_pattern=args.hosts,
                                   verbosity=args.v,
                                   quiet=args.quiet,
                                   rotate_artifacts=args.rotate_artifacts,
                                   ignore_logging=False,
                                   json_mode=args.json)

                if args.hosts is not None:
                    run_options.update(inventory=args.hosts)

                if args.cmdline:
                    run_options['cmdline'] = args.cmdline

                res = run(**run_options)
            sys.exit(res.rc)

    try:
        with open(pidfile, 'r') as f:
            pid = int(f.readline())
    except IOError:
        sys.exit(1)

    if args.command == 'stop':
        try:
            with open(os.path.join(args.private_data_dir, 'args'), 'r') as args:
                Runner.handle_termination(pid)
        except IOError:
            Runner.handle_termination(pid)

    elif args.command == 'is-alive':
        try:
            os.kill(pid, signal.SIG_DFL)
            sys.exit(0)
        except OSError:
            sys.exit(1)
Beispiel #4
0
def main(sys_args=None):
    """Main entry point for ansible-runner executable

    When the ```ansible-runner``` command is executed, this function
    is the main entry point that is called and executed.

    :param sys_args: List of arguments to be parsed by the parser
    :type sys_args: list

    :returns: an instance of SystemExit
    :rtype: SystemExit
    """

    parser = argparse.ArgumentParser(
        prog='ansible-runner',
        description=
        "Use 'ansible-runner' (with no arguments) to see basic usage")
    subparser = parser.add_subparsers(
        help="Command to invoke",
        dest='command',
        description="COMMAND PRIVATE_DATA_DIR [ARGS]")
    add_args_to_parser(parser, DEFAULT_CLI_ARGS['generic_args'])
    subparser.required = True

    # positional options
    run_subparser = subparser.add_parser(
        'run', help="Run ansible-runner in the foreground")
    add_args_to_parser(run_subparser, DEFAULT_CLI_ARGS['positional_args'])
    start_subparser = subparser.add_parser(
        'start', help="Start an ansible-runner process in the background")
    add_args_to_parser(start_subparser, DEFAULT_CLI_ARGS['positional_args'])
    stop_subparser = subparser.add_parser(
        'stop',
        help="Stop an ansible-runner process that's running in the background")
    add_args_to_parser(stop_subparser, DEFAULT_CLI_ARGS['positional_args'])
    isalive_subparser = subparser.add_parser(
        'is-alive',
        help=
        "Check if a an ansible-runner process in the background is still running."
    )
    add_args_to_parser(isalive_subparser, DEFAULT_CLI_ARGS['positional_args'])

    # streaming commands
    transmit_subparser = subparser.add_parser(
        'transmit', help="Send a job to a remote ansible-runner process")
    add_args_to_parser(transmit_subparser, DEFAULT_CLI_ARGS['positional_args'])

    worker_subparser = subparser.add_parser(
        'worker', help="Execute work streamed from a controlling instance")
    worker_subparser.add_argument(
        "--private-data-dir",
        help="base directory containing the ansible-runner metadata "
        "(project, inventory, env, etc)",
    )

    process_subparser = subparser.add_parser(
        'process',
        help=
        "Receive the output of remote ansible-runner work and distribute the results"
    )
    add_args_to_parser(process_subparser, DEFAULT_CLI_ARGS['positional_args'])

    # adhoc command exec
    adhoc_subparser = subparser.add_parser(
        'adhoc', help="Run ansible adhoc commands in an Execution Environment")
    adhoc_subparser.add_argument(
        "--private-data-dir",
        help="base directory containing the ansible-runner metadata "
        "(project, inventory, env, etc)",
    )
    add_args_to_parser(adhoc_subparser, DEFAULT_CLI_ARGS['execenv_cli_group'])

    # playbook command exec
    playbook_subparser = subparser.add_parser(
        'playbook',
        help="Run ansible-playbook commands in an Execution Environment")
    playbook_subparser.add_argument(
        "--private-data-dir",
        help="base directory containing the ansible-runner metadata "
        "(project, inventory, env, etc)",
    )
    add_args_to_parser(playbook_subparser,
                       DEFAULT_CLI_ARGS['execenv_cli_group'])

    # generic args for all subparsers
    add_args_to_parser(run_subparser, DEFAULT_CLI_ARGS['generic_args'])
    add_args_to_parser(start_subparser, DEFAULT_CLI_ARGS['generic_args'])
    add_args_to_parser(stop_subparser, DEFAULT_CLI_ARGS['generic_args'])
    add_args_to_parser(isalive_subparser, DEFAULT_CLI_ARGS['generic_args'])
    add_args_to_parser(adhoc_subparser, DEFAULT_CLI_ARGS['generic_args'])
    add_args_to_parser(playbook_subparser, DEFAULT_CLI_ARGS['generic_args'])
    add_args_to_parser(transmit_subparser, DEFAULT_CLI_ARGS['generic_args'])
    add_args_to_parser(worker_subparser, DEFAULT_CLI_ARGS['generic_args'])
    add_args_to_parser(process_subparser, DEFAULT_CLI_ARGS['generic_args'])

    # runner group
    ansible_runner_group_options = (
        "Ansible Runner Options",
        "configuration options for controlling the ansible-runner "
        "runtime environment.",
    )
    base_runner_group = parser.add_argument_group(
        *ansible_runner_group_options)
    run_runner_group = run_subparser.add_argument_group(
        *ansible_runner_group_options)
    start_runner_group = start_subparser.add_argument_group(
        *ansible_runner_group_options)
    stop_runner_group = stop_subparser.add_argument_group(
        *ansible_runner_group_options)
    isalive_runner_group = isalive_subparser.add_argument_group(
        *ansible_runner_group_options)
    transmit_runner_group = transmit_subparser.add_argument_group(
        *ansible_runner_group_options)
    add_args_to_parser(base_runner_group, DEFAULT_CLI_ARGS['runner_group'])
    add_args_to_parser(run_runner_group, DEFAULT_CLI_ARGS['runner_group'])
    add_args_to_parser(start_runner_group, DEFAULT_CLI_ARGS['runner_group'])
    add_args_to_parser(stop_runner_group, DEFAULT_CLI_ARGS['runner_group'])
    add_args_to_parser(isalive_runner_group, DEFAULT_CLI_ARGS['runner_group'])
    add_args_to_parser(transmit_runner_group, DEFAULT_CLI_ARGS['runner_group'])

    # mutually exclusive group
    run_mutually_exclusive_group = run_subparser.add_mutually_exclusive_group()
    start_mutually_exclusive_group = start_subparser.add_mutually_exclusive_group(
    )
    stop_mutually_exclusive_group = stop_subparser.add_mutually_exclusive_group(
    )
    isalive_mutually_exclusive_group = isalive_subparser.add_mutually_exclusive_group(
    )
    transmit_mutually_exclusive_group = transmit_subparser.add_mutually_exclusive_group(
    )
    add_args_to_parser(run_mutually_exclusive_group,
                       DEFAULT_CLI_ARGS['mutually_exclusive_group'])
    add_args_to_parser(start_mutually_exclusive_group,
                       DEFAULT_CLI_ARGS['mutually_exclusive_group'])
    add_args_to_parser(stop_mutually_exclusive_group,
                       DEFAULT_CLI_ARGS['mutually_exclusive_group'])
    add_args_to_parser(isalive_mutually_exclusive_group,
                       DEFAULT_CLI_ARGS['mutually_exclusive_group'])
    add_args_to_parser(transmit_mutually_exclusive_group,
                       DEFAULT_CLI_ARGS['mutually_exclusive_group'])

    # ansible options
    ansible_options = (
        "Ansible Options",
        "control the ansible[-playbook] execution environment",
    )
    run_ansible_group = run_subparser.add_argument_group(*ansible_options)
    start_ansible_group = start_subparser.add_argument_group(*ansible_options)
    stop_ansible_group = stop_subparser.add_argument_group(*ansible_options)
    isalive_ansible_group = isalive_subparser.add_argument_group(
        *ansible_options)
    transmit_ansible_group = transmit_subparser.add_argument_group(
        *ansible_options)
    add_args_to_parser(run_ansible_group, DEFAULT_CLI_ARGS['ansible_group'])
    add_args_to_parser(start_ansible_group, DEFAULT_CLI_ARGS['ansible_group'])
    add_args_to_parser(stop_ansible_group, DEFAULT_CLI_ARGS['ansible_group'])
    add_args_to_parser(isalive_ansible_group,
                       DEFAULT_CLI_ARGS['ansible_group'])
    add_args_to_parser(transmit_ansible_group,
                       DEFAULT_CLI_ARGS['ansible_group'])

    # roles group
    roles_group_options = (
        "Ansible Role Options",
        "configuration options for directly executing Ansible roles",
    )
    run_roles_group = run_subparser.add_argument_group(*roles_group_options)
    start_roles_group = start_subparser.add_argument_group(
        *roles_group_options)
    stop_roles_group = stop_subparser.add_argument_group(*roles_group_options)
    isalive_roles_group = isalive_subparser.add_argument_group(
        *roles_group_options)
    transmit_roles_group = transmit_subparser.add_argument_group(
        *roles_group_options)
    add_args_to_parser(run_roles_group, DEFAULT_CLI_ARGS['roles_group'])
    add_args_to_parser(start_roles_group, DEFAULT_CLI_ARGS['roles_group'])
    add_args_to_parser(stop_roles_group, DEFAULT_CLI_ARGS['roles_group'])
    add_args_to_parser(isalive_roles_group, DEFAULT_CLI_ARGS['roles_group'])
    add_args_to_parser(transmit_roles_group, DEFAULT_CLI_ARGS['roles_group'])

    # modules groups

    modules_group_options = (
        "Ansible Module Options",
        "configuration options for directly executing Ansible modules",
    )
    run_modules_group = run_subparser.add_argument_group(
        *modules_group_options)
    start_modules_group = start_subparser.add_argument_group(
        *modules_group_options)
    stop_modules_group = stop_subparser.add_argument_group(
        *modules_group_options)
    isalive_modules_group = isalive_subparser.add_argument_group(
        *modules_group_options)
    transmit_modules_group = transmit_subparser.add_argument_group(
        *modules_group_options)
    add_args_to_parser(run_modules_group, DEFAULT_CLI_ARGS['modules_group'])
    add_args_to_parser(start_modules_group, DEFAULT_CLI_ARGS['modules_group'])
    add_args_to_parser(stop_modules_group, DEFAULT_CLI_ARGS['modules_group'])
    add_args_to_parser(isalive_modules_group,
                       DEFAULT_CLI_ARGS['modules_group'])
    add_args_to_parser(transmit_modules_group,
                       DEFAULT_CLI_ARGS['modules_group'])

    # playbook options
    playbook_group_options = (
        "Ansible Playbook Options",
        "configuation options for executing Ansible playbooks",
    )
    run_playbook_group = run_subparser.add_argument_group(
        *playbook_group_options)
    start_playbook_group = start_subparser.add_argument_group(
        *playbook_group_options)
    stop_playbook_group = stop_subparser.add_argument_group(
        *playbook_group_options)
    isalive_playbook_group = isalive_subparser.add_argument_group(
        *playbook_group_options)
    transmit_playbook_group = transmit_subparser.add_argument_group(
        *playbook_group_options)
    add_args_to_parser(run_playbook_group, DEFAULT_CLI_ARGS['playbook_group'])
    add_args_to_parser(start_playbook_group,
                       DEFAULT_CLI_ARGS['playbook_group'])
    add_args_to_parser(stop_playbook_group, DEFAULT_CLI_ARGS['playbook_group'])
    add_args_to_parser(isalive_playbook_group,
                       DEFAULT_CLI_ARGS['playbook_group'])
    add_args_to_parser(transmit_playbook_group,
                       DEFAULT_CLI_ARGS['playbook_group'])

    # container group
    container_group_options = (
        "Ansible Container Options",
        "configuation options for executing Ansible playbooks",
    )
    run_container_group = run_subparser.add_argument_group(
        *container_group_options)
    start_container_group = start_subparser.add_argument_group(
        *container_group_options)
    stop_container_group = stop_subparser.add_argument_group(
        *container_group_options)
    isalive_container_group = isalive_subparser.add_argument_group(
        *container_group_options)
    transmit_container_group = transmit_subparser.add_argument_group(
        *container_group_options)
    adhoc_container_group = adhoc_subparser.add_argument_group(
        *container_group_options)
    playbook_container_group = playbook_subparser.add_argument_group(
        *container_group_options)
    add_args_to_parser(run_container_group,
                       DEFAULT_CLI_ARGS['container_group'])
    add_args_to_parser(start_container_group,
                       DEFAULT_CLI_ARGS['container_group'])
    add_args_to_parser(stop_container_group,
                       DEFAULT_CLI_ARGS['container_group'])
    add_args_to_parser(isalive_container_group,
                       DEFAULT_CLI_ARGS['container_group'])
    add_args_to_parser(transmit_container_group,
                       DEFAULT_CLI_ARGS['container_group'])
    add_args_to_parser(adhoc_container_group,
                       DEFAULT_CLI_ARGS['container_group'])
    add_args_to_parser(playbook_container_group,
                       DEFAULT_CLI_ARGS['container_group'])

    if len(sys.argv) == 1:
        parser.print_usage()
        print_common_usage()
        parser.exit(status=0)

    if ('playbook' in sys.argv) or ('adhoc' in sys.argv):
        args, leftover_args = parser.parse_known_args(sys_args)
    else:
        args = parser.parse_args(sys_args)

    vargs = vars(args)

    # FIXME - Probably a more elegant way to handle this.
    # set some state about CLI Exec Env
    cli_execenv_cmd = ""

    if vargs.get('command') in ('adhoc', 'playbook'):
        cli_execenv_cmd = vargs.get('command')

        if not leftover_args:
            parser.exit(
                status=1,
                message=
                "The {} subcommand requires arguments to pass to Ansible inside the container.\n"
                .format(vargs.get('command')))

    if vargs.get('command') in ('worker', 'process', 'adhoc', 'playbook'):
        if not vargs.get('private_data_dir'):
            temp_private_dir = tempfile.mkdtemp()
            vargs['private_data_dir'] = temp_private_dir
            if vargs.get('keep_files', False):
                print("ANSIBLE-RUNNER: keeping temporary data directory: {}".
                      format(temp_private_dir))
            else:

                @atexit.register
                def conditonally_clean_cli_execenv_tempdir():
                    shutil.rmtree(temp_private_dir)

    if vargs.get('command') in ('start', 'run', 'transmit'):
        if vargs.get('hosts') and not (vargs.get('module')
                                       or vargs.get('role')):
            parser.exit(
                status=1,
                message="The --hosts option can only be used with -m or -r\n")
        if not (vargs.get('module')
                or vargs.get('role')) and not vargs.get('playbook'):
            parser.exit(
                status=1,
                message=
                "The -p option must be specified when not using -m or -r\n")

    output.configure()

    # enable or disable debug mode
    output.set_debug('enable' if vargs.get('debug') else 'disable')

    # set the output logfile
    if ('logfile' in args) and vargs.get('logfile'):
        output.set_logfile(vargs.get('logfile'))

    output.debug('starting debug logging')

    # get the absolute path for start since it is a daemon
    vargs['private_data_dir'] = os.path.abspath(vargs.get('private_data_dir'))

    pidfile = os.path.join(vargs.get('private_data_dir'), 'pid')

    try:
        os.makedirs(vargs.get('private_data_dir'), mode=0o700)
    except OSError as exc:
        if exc.errno == errno.EEXIST and os.path.isdir(
                vargs.get('private_data_dir')):
            pass
        else:
            raise

    stderr_path = None
    context = None
    if vargs.get('command') not in ('run', 'transmit', 'worker', 'adhoc',
                                    'playbook'):
        stderr_path = os.path.join(vargs.get('private_data_dir'), 'daemon.log')
        if not os.path.exists(stderr_path):
            os.close(
                os.open(stderr_path, os.O_CREAT, stat.S_IRUSR | stat.S_IWUSR))

    if vargs.get('command') in ('start', 'run', 'transmit', 'worker',
                                'process', 'adhoc', 'playbook'):

        if vargs.get('command') == 'start':
            import daemon
            from daemon.pidfile import TimeoutPIDLockFile
            context = daemon.DaemonContext(pidfile=TimeoutPIDLockFile(pidfile))
        else:
            context = threading.Lock()

        streamer = None
        if vargs.get('command') in ('transmit', 'worker', 'process'):
            streamer = vargs.get('command')

        with context:
            with role_manager(vargs) as vargs:
                run_options = dict(
                    private_data_dir=vargs.get('private_data_dir'),
                    ident=vargs.get('ident'),
                    binary=vargs.get('binary'),
                    playbook=vargs.get('playbook'),
                    module=vargs.get('module'),
                    module_args=vargs.get('module_args'),
                    host_pattern=vargs.get('hosts'),
                    verbosity=vargs.get('v'),
                    quiet=vargs.get('quiet'),
                    rotate_artifacts=vargs.get('rotate_artifacts'),
                    ignore_logging=False,
                    json_mode=vargs.get('json'),
                    omit_event_data=vargs.get('omit_event_data'),
                    only_failed_event_data=vargs.get('only_failed_event_data'),
                    inventory=vargs.get('inventory'),
                    forks=vargs.get('forks'),
                    project_dir=vargs.get('project_dir'),
                    artifact_dir=vargs.get('artifact_dir'),
                    roles_path=[vargs.get('roles_path')]
                    if vargs.get('roles_path') else None,
                    process_isolation=vargs.get('process_isolation'),
                    process_isolation_executable=vargs.get(
                        'process_isolation_executable'),
                    process_isolation_path=vargs.get('process_isolation_path'),
                    process_isolation_hide_paths=vargs.get(
                        'process_isolation_hide_paths'),
                    process_isolation_show_paths=vargs.get(
                        'process_isolation_show_paths'),
                    process_isolation_ro_paths=vargs.get(
                        'process_isolation_ro_paths'),
                    container_image=vargs.get('container_image'),
                    container_volume_mounts=vargs.get(
                        'container_volume_mounts'),
                    container_options=vargs.get('container_options'),
                    directory_isolation_base_path=vargs.get(
                        'directory_isolation_base_path'),
                    resource_profiling=vargs.get('resource_profiling'),
                    resource_profiling_base_cgroup=vargs.get(
                        'resource_profiling_base_cgroup'),
                    resource_profiling_cpu_poll_interval=vargs.get(
                        'resource_profiling_cpu_poll_interval'),
                    resource_profiling_memory_poll_interval=vargs.get(
                        'resource_profiling_memory_poll_interval'),
                    resource_profiling_pid_poll_interval=vargs.get(
                        'resource_profiling_pid_poll_interval'),
                    resource_profiling_results_dir=vargs.get(
                        'resource_profiling_results_dir'),
                    limit=vargs.get('limit'),
                    streamer=streamer,
                    cli_execenv_cmd=cli_execenv_cmd)
                if vargs.get('command') in ('adhoc', 'playbook'):
                    run_options['cmdline'] = sys.argv[sys.argv.
                                                      index(leftover_args[0]):]
                    run_options['process_isolation'] = True
                    run_options['process_isolation_executable'] = vargs.get(
                        'container_runtime')

                try:
                    res = run(**run_options)
                except Exception:
                    exc = traceback.format_exc()
                    if stderr_path:
                        open(stderr_path, 'w+').write(exc)
                    else:
                        sys.stderr.write(exc)
                    return 1
            return (res.rc)

    try:
        with open(pidfile, 'r') as f:
            pid = int(f.readline())
    except IOError:
        return (1)

    if vargs.get('command') == 'stop':
        Runner.handle_termination(pid, pidfile=pidfile)
        return (0)

    elif vargs.get('command') == 'is-alive':
        try:
            os.kill(pid, signal.SIG_DFL)
            return (0)
        except OSError:
            return (1)
Beispiel #5
0
def init_runner(**kwargs):
    '''
    Initialize the Runner() instance

    This function will properly initialize both run() and run_async()
    functions in the same way and return a value instance of Runner.

    See parameters given to :py:func:`ansible_runner.interface.run`
    '''
    # If running via the transmit-worker-process method, we must only extract things as read-only
    # inside of one of these commands. That could be either transmit or worker.
    if not kwargs.get('cli_execenv_cmd') and (kwargs.get('streamer')
                                              not in ('worker', 'process')):
        dump_artifacts(kwargs)

    if kwargs.get('streamer'):
        # undo any full paths that were dumped by dump_artifacts above in the streamer case
        private_data_dir = kwargs['private_data_dir']
        project_dir = os.path.join(private_data_dir, 'project')

        playbook_path = kwargs.get('playbook') or ''
        if os.path.isabs(playbook_path) and playbook_path.startswith(
                project_dir):
            kwargs['playbook'] = os.path.relpath(playbook_path, project_dir)

        inventory_path = kwargs.get('inventory') or ''
        if os.path.isabs(inventory_path) and inventory_path.startswith(
                private_data_dir):
            kwargs['inventory'] = os.path.relpath(inventory_path,
                                                  private_data_dir)

        roles_path = kwargs.get('envvars', {}).get('ANSIBLE_ROLES_PATH') or ''
        if os.path.isabs(roles_path) and roles_path.startswith(
                private_data_dir):
            kwargs['envvars']['ANSIBLE_ROLES_PATH'] = os.path.relpath(
                roles_path, private_data_dir)

    debug = kwargs.pop('debug', None)
    logfile = kwargs.pop('logfile', None)

    if not kwargs.pop("ignore_logging", True):
        output.configure()
        if debug in (True, False):
            output.set_debug('enable' if debug is True else 'disable')

        if logfile:
            output.set_logfile(logfile)

    if kwargs.get("process_isolation", False):
        pi_executable = kwargs.get("process_isolation_executable", "podman")
        if not check_isolation_executable_installed(pi_executable):
            print(
                f'Unable to find process isolation executable: {pi_executable}'
            )
            sys.exit(1)

    event_callback_handler = kwargs.pop('event_handler', None)
    status_callback_handler = kwargs.pop('status_handler', None)
    artifacts_handler = kwargs.pop('artifacts_handler', None)
    cancel_callback = kwargs.pop('cancel_callback', None)
    finished_callback = kwargs.pop('finished_callback', None)

    streamer = kwargs.pop('streamer', None)
    if streamer:
        if streamer == 'transmit':
            stream_transmitter = Transmitter(**kwargs)
            return stream_transmitter

        if streamer == 'worker':
            stream_worker = Worker(**kwargs)
            return stream_worker

        if streamer == 'process':
            stream_processor = Processor(
                event_handler=event_callback_handler,
                status_handler=status_callback_handler,
                artifacts_handler=artifacts_handler,
                cancel_callback=cancel_callback,
                finished_callback=finished_callback,
                **kwargs)
            return stream_processor

    kwargs.pop('_input', None)
    kwargs.pop('_output', None)
    rc = RunnerConfig(**kwargs)
    rc.prepare()

    return Runner(rc,
                  event_handler=event_callback_handler,
                  status_handler=status_callback_handler,
                  artifacts_handler=artifacts_handler,
                  cancel_callback=cancel_callback,
                  finished_callback=finished_callback)
Beispiel #6
0
def main(sys_args=None):
    """Main entry point for ansible-runner executable

    When the ```ansible-runner``` command is executed, this function
    is the main entry point that is called and executed.

    :param sys_args: List of arguments to be parsed by the parser
    :type sys_args: list

    :returns: an instance of SystemExit
    :rtype: SystemExit
    """
    parser = argparse.ArgumentParser(
        description=
        "Use 'ansible-runner' (with no arguments) to see basic usage")

    parser.add_argument('--version', action='version', version=VERSION)

    # positional options

    parser.add_argument(
        "command",
        choices=["run", "start", "stop", "is-alive"],
        metavar="COMMAND",
        help="command directive for controlling ansible-runner execution "
        "(one of 'run', 'start', 'stop', 'is-alive')"
        #help="command directive controlling ansible-runner execution"
    )

    parser.add_argument(
        'private_data_dir',
        help="base directory cotnaining the ansible-runner metadata "
        "(project, inventory, env, etc)")

    # mutually exclusive group

    group = parser.add_mutually_exclusive_group()

    group.add_argument(
        "-p",
        "--playbook",
        default=DEFAULT_RUNNER_PLAYBOOK,
        help="invoke an Ansible playbook from the ansible-runner project "
        "(See Ansible Playbook Options below)")

    group.add_argument(
        "-m",
        "--module",
        default=DEFAULT_RUNNER_MODULE,
        help="invoke an Ansible module directly without a playbook "
        "(See Ansible Module Options below)")

    group.add_argument(
        "-r",
        "--role",
        default=DEFAULT_RUNNER_ROLE,
        help="invoke an Ansible role directly without a playbook "
        "(See Ansible Role Options below)")

    # ansible-runner options

    runner_group = parser.add_argument_group(
        "Ansible Runner Options",
        "configuration options for controlling the ansible-runner "
        "runtime environment.")

    runner_group.add_argument(
        "--debug",
        action="store_true",
        help="enable ansible-runner debug output logging (default=False)")

    runner_group.add_argument(
        "--logfile", help="log output messages to a file (default=None)")

    runner_group.add_argument(
        "-b",
        "--binary",
        default=DEFAULT_RUNNER_BINARY,
        help="specifies the full path pointing to the Ansible binaries "
        "(default={})".format(DEFAULT_RUNNER_BINARY))

    runner_group.add_argument(
        "-i",
        "--ident",
        default=DEFAULT_UUID,
        help="an identifier that will be used when generating the artifacts "
        "directory and can be used to uniquely identify a playbook run "
        "(default={})".format(DEFAULT_UUID))

    runner_group.add_argument(
        "--rotate-artifacts",
        default=0,
        type=int,
        help="automatically clean up old artifact directories after a given "
        "number have been created (default=0, disabled)")

    runner_group.add_argument(
        "--artifact-dir",
        help="optional path for the artifact root directory "
        "(default=<private_data_dir>/artifacts)")

    runner_group.add_argument(
        "--project-dir",
        help="optional path for the location of the playbook content directory "
        "(default=<private_data_dir/project)")

    runner_group.add_argument(
        "--inventory",
        help="optional path for the location of the inventory content directory "
        "(default=<private_data_dir>/inventory)")

    runner_group.add_argument(
        "-j",
        "--json",
        action="store_true",
        help="output the JSON event structure to stdout instead of "
        "Ansible output (default=False)")

    runner_group.add_argument(
        "--omit-event-data",
        action="store_true",
        help="Omits including extra event data in the callback payloads "
        "or the Runner payload data files "
        "(status and stdout still included)")

    runner_group.add_argument(
        "--only-failed-event-data",
        action="store_true",
        help="Only adds extra event data for failed tasks in the callback "
        "payloads or the Runner payload data files "
        "(status and stdout still included for other events)")

    runner_group.add_argument(
        "-q",
        "--quiet",
        action="store_true",
        help="disable all messages sent to stdout/stderr (default=False)")

    runner_group.add_argument(
        "-v",
        action="count",
        help="increase the verbosity with multiple v's (up to 5) of the "
        "ansible-playbook output (default=None)")

    # ansible options

    ansible_group = parser.add_argument_group(
        "Ansible Options",
        "control the ansible[-playbook] execution environment")

    ansible_group.add_argument(
        "--limit",
        help="matches Ansible's ```--limit``` parameter to further constrain "
        "the inventory to be used (default=None)")

    ansible_group.add_argument(
        "--cmdline",
        help="command line options to pass to ansible-playbook at "
        "execution time (default=None)")

    ansible_group.add_argument(
        "--hosts",
        help="define the set of hosts to execute against (default=None) "
        "Note: this parameter only works with -m or -r")

    ansible_group.add_argument(
        "--forks",
        help="matches Ansible's ```--forks``` parameter to set the number "
        "of conconurent processes (default=None)")

    # roles group

    roles_group = parser.add_argument_group(
        "Ansible Role Options",
        "configuration options for directly executing Ansible roles")

    roles_group.add_argument(
        "--roles-path",
        default=DEFAULT_ROLES_PATH,
        help="path used to locate the role to be executed (default=None)")

    roles_group.add_argument(
        "--role-vars",
        help="set of variables to be passed to the role at run time in the "
        "form of 'key1=value1 key2=value2 keyN=valueN'(default=None)")

    roles_group.add_argument(
        "--role-skip-facts",
        action="store_true",
        default=False,
        help="disable fact collection when the role is executed (default=False)"
    )

    # modules groups

    modules_group = parser.add_argument_group(
        "Ansible Module Options",
        "configuration options for directly executing Ansible modules")

    modules_group.add_argument(
        "-a",
        "--args",
        dest='module_args',
        help="set of arguments to be passed to the module at run time in the "
        "form of 'key1=value1 key2=value2 keyN=valueN'(default=None)")

    # playbook options
    playbook_group = parser.add_argument_group(
        "Ansible Playbook Options",
        "configuation options for executing Ansible playbooks")

    playbook_group.add_argument(
        "--process-isolation",
        dest="process_isolation",
        action="store_true",
        help="limits what directories on the filesystem the playbook run "
        "has access to, defaults to /tmp (default=False)")

    playbook_group.add_argument(
        "--process-isolation-executable",
        dest="process_isolation_executable",
        default="bwrap",
        help="process isolation executable that will be used. (default=bwrap)")

    playbook_group.add_argument(
        "--process-isolation-path",
        dest="process_isolation_path",
        default="/tmp",
        help="path that an isolated playbook run will use for staging. "
        "(default=/tmp)")

    playbook_group.add_argument(
        "--process-isolation-hide-paths",
        dest="process_isolation_hide_paths",
        nargs='*',
        help="list of paths on the system that should be hidden from the "
        "playbook run (default=None)")

    playbook_group.add_argument(
        "--process-isolation-show-paths",
        dest="process_isolation_show_paths",
        nargs='*',
        help="list of paths on the system that should be exposed to the "
        "playbook run (default=None)")

    playbook_group.add_argument(
        "--process-isolation-ro-paths",
        dest="process_isolation_ro_paths",
        nargs='*',
        help="list of paths on the system that should be exposed to the "
        "playbook run as read-only (default=None)")

    playbook_group.add_argument(
        "--directory-isolation-base-path",
        dest="directory_isolation_base_path",
        help="copies the project directory to a location in this directory "
        "to prevent multiple simultaneous executions from conflicting "
        "(default=None)")

    playbook_group.add_argument(
        "--resource-profiling",
        dest='resource_profiling',
        action="store_true",
        help="Records resource utilization during playbook execution")

    playbook_group.add_argument(
        "--resource-profiling-base-cgroup",
        dest='resource_profiling_base_cgroup',
        default="ansible-runner",
        help=
        "Top-level cgroup used to collect information on resource utilization. Defaults to ansible-runner"
    )

    playbook_group.add_argument(
        "--resource-profiling-cpu-poll-interval",
        dest='resource_profiling_cpu_poll_interval',
        default=0.25,
        help=
        "Interval (in seconds) between CPU polling for determining CPU usage. Defaults to 0.25"
    )

    playbook_group.add_argument(
        "--resource-profiling-memory-poll-interval",
        dest='resource_profiling_memory_poll_interval',
        default=0.25,
        help=
        "Interval (in seconds) between memory polling for determining memory usage. Defaults to 0.25"
    )

    playbook_group.add_argument(
        "--resource-profiling-pid-poll-interval",
        dest='resource_profiling_pid_poll_interval',
        default=0.25,
        help=
        "Interval (in seconds) between polling PID count for determining number of processes used. Defaults to 0.25"
    )

    playbook_group.add_argument(
        "--resource-profiling-results-dir",
        dest='resource_profiling_results_dir',
        help=
        "Directory where profiling data files should be saved. Defaults to None (profiling_data folder under private data dir is used in this case)."
    )

    if len(sys.argv) == 1:
        parser.print_usage()
        print_common_usage()
        parser.exit(status=0)

    args = parser.parse_args(sys_args)

    if args.command in ('start', 'run'):
        if args.hosts and not (args.module or args.role):
            parser.exit(
                status=1,
                message="The --hosts option can only be used with -m or -r\n")
        if not (args.module or args.role) and not args.playbook:
            parser.exit(
                status=1,
                message=
                "The -p option must be specified when not using -m or -r\n")

    output.configure()

    # enable or disable debug mode
    output.set_debug('enable' if args.debug else 'disable')

    # set the output logfile
    if args.logfile:
        output.set_logfile(args.logfile)

    output.debug('starting debug logging')

    # get the absolute path for start since it is a daemon
    args.private_data_dir = os.path.abspath(args.private_data_dir)

    pidfile = os.path.join(args.private_data_dir, 'pid')

    try:
        os.makedirs(args.private_data_dir, mode=0o700)
    except OSError as exc:
        if exc.errno == errno.EEXIST and os.path.isdir(args.private_data_dir):
            pass
        else:
            raise

    stderr_path = None
    context = None
    if args.command != 'run':
        stderr_path = os.path.join(args.private_data_dir, 'daemon.log')
        if not os.path.exists(stderr_path):
            os.close(
                os.open(stderr_path, os.O_CREAT, stat.S_IRUSR | stat.S_IWUSR))

    if args.command in ('start', 'run'):

        if args.command == 'start':
            import daemon
            from daemon.pidfile import TimeoutPIDLockFile
            context = daemon.DaemonContext(pidfile=TimeoutPIDLockFile(pidfile))
        else:
            context = threading.Lock()

        with context:
            with role_manager(args) as args:
                run_options = dict(
                    private_data_dir=args.private_data_dir,
                    ident=args.ident,
                    binary=args.binary,
                    playbook=args.playbook,
                    module=args.module,
                    module_args=args.module_args,
                    host_pattern=args.hosts,
                    verbosity=args.v,
                    quiet=args.quiet,
                    rotate_artifacts=args.rotate_artifacts,
                    ignore_logging=False,
                    json_mode=args.json,
                    omit_event_data=args.omit_event_data,
                    only_failed_event_data=args.only_failed_event_data,
                    inventory=args.inventory,
                    forks=args.forks,
                    project_dir=args.project_dir,
                    artifact_dir=args.artifact_dir,
                    roles_path=[args.roles_path] if args.roles_path else None,
                    process_isolation=args.process_isolation,
                    process_isolation_executable=args.
                    process_isolation_executable,
                    process_isolation_path=args.process_isolation_path,
                    process_isolation_hide_paths=args.
                    process_isolation_hide_paths,
                    process_isolation_show_paths=args.
                    process_isolation_show_paths,
                    process_isolation_ro_paths=args.process_isolation_ro_paths,
                    directory_isolation_base_path=args.
                    directory_isolation_base_path,
                    resource_profiling=args.resource_profiling,
                    resource_profiling_base_cgroup=args.
                    resource_profiling_base_cgroup,
                    resource_profiling_cpu_poll_interval=args.
                    resource_profiling_cpu_poll_interval,
                    resource_profiling_memory_poll_interval=args.
                    resource_profiling_memory_poll_interval,
                    resource_profiling_pid_poll_interval=args.
                    resource_profiling_pid_poll_interval,
                    resource_profiling_results_dir=args.
                    resource_profiling_results_dir,
                    limit=args.limit)
                if args.cmdline:
                    run_options['cmdline'] = args.cmdline

                try:
                    res = run(**run_options)
                except Exception:
                    exc = traceback.format_exc()
                    if stderr_path:
                        open(stderr_path, 'w+').write(exc)
                    else:
                        sys.stderr.write(exc)
                    return 1
            return (res.rc)

    try:
        with open(pidfile, 'r') as f:
            pid = int(f.readline())
    except IOError:
        return (1)

    if args.command == 'stop':
        Runner.handle_termination(pid, pidfile=pidfile)
        return (0)

    elif args.command == 'is-alive':
        try:
            os.kill(pid, signal.SIG_DFL)
            return (0)
        except OSError:
            return (1)
Beispiel #7
0
def main():
    parser = argparse.ArgumentParser(description='manage ansible execution')

    parser.add_argument('--version', action='version', version=VERSION)

    parser.add_argument('command', choices=['run', 'start', 'stop', 'is-alive'])

    parser.add_argument('private_data_dir',
                        help='Base directory containing Runner metadata (project, inventory, etc')

    group = parser.add_mutually_exclusive_group()

    group.add_argument("-p", "--playbook", default=DEFAULT_RUNNER_PLAYBOOK,
                       help="The name of the playbook to execute")

    group.add_argument("-r", "--role", default=DEFAULT_RUNNER_ROLE,
                       help="Invoke an Ansible role directly without a playbook")

    parser.add_argument("--hosts",
                        help="Define the set of hosts to execute against")

    parser.add_argument("-i", "--ident",
                        default=uuid4(),
                        help="An identifier that will be used when generating the"
                             "artifacts directory and can be used to uniquely identify a playbook run")

    parser.add_argument("--roles-path", default=DEFAULT_ROLES_PATH,
                        help="Path to the Ansible roles directory")

    parser.add_argument("--role-vars",
                        help="Variables to pass to the role at runtime")

    parser.add_argument("--role-skip-facts", action="store_true", default=False,
                        help="Disable fact collection when executing a role directly")
    parser.add_argument("--artifact-dir",
                        help="Optional Path for the artifact root directory, by default it is located inside the private data dir")

    parser.add_argument("--inventory",
                        help="Override the default inventory location in private_data_dir")

    parser.add_argument("-j", "--json", action="store_true",
                        help="Output the json event structure to stdout instead of Ansible output")

    parser.add_argument("-v", action="count",
                        help="Increase the verbosity with multiple v's (up to 5) of the ansible-playbook output")

    parser.add_argument("-q", "--quiet", action="store_true",
                        help="Disable all output")

    parser.add_argument("--cmdline",
                        help="Command line options to pass to ansible-playbook at execution time")
    parser.add_argument("--debug", action="store_true",
                        help="Enable debug output logging")

    parser.add_argument("--logfile",
                        help="Log output messages to a file")

    args = parser.parse_args()

    output.configure()

    # enable or disable debug mode
    output.set_debug('enable' if args.debug else 'disable')

    # set the output logfile
    if args.logfile:
        output.set_logfile(args.logfile)

    output.debug('starting debug logging')

    pidfile = os.path.join(args.private_data_dir, 'pid')

    try:
        os.makedirs(args.private_data_dir)
    except OSError as exc:
        if exc.errno == errno.EEXIST and os.path.isdir(args.private_data_dir):
            pass
        else:
            raise

    if args.command != 'run':
        stderr_path = os.path.join(args.private_data_dir, 'daemon.log')
        if not os.path.exists(stderr_path):
            os.close(os.open(stderr_path, os.O_CREAT, stat.S_IRUSR | stat.S_IWUSR))
        stderr = open(stderr_path, 'w+')

    if args.command in ('start', 'run'):
        if args.role:
            role = {'name': args.role}
            if args.role_vars:
                role_vars = {}
                for item in shlex.split(args.role_vars):
                    key, value = item.split('=')
                    role_vars[key] = value
                role['vars'] = role_vars

            kwargs = dict(private_data_dir=args.private_data_dir,
                          json_mode=args.json)
            if args.artifact_dir:
                kwargs['artifact_dir'] = args.artifact_dir

            project_path = os.path.abspath(os.path.join(args.private_data_dir, 'project'))
            project_exists = os.path.exists(project_path)

            env_path = os.path.join(args.private_data_dir, 'env')
            env_exists = os.path.exists(env_path)

            envvars_path = os.path.join(args.private_data_dir, 'env/envvars')
            envvars_exists = os.path.exists(envvars_path)

            if args.cmdline:
                kwargs['cmdline'] = args.cmdline

            playbook = None
            tmpvars = None

            rc = 255
            errmsg = None

            try:
                play = [{'hosts': args.hosts if args.hosts is not None else "all",
                         'gather_facts': not args.role_skip_facts,
                         'roles': [role]}]

                filename = str(uuid4().hex)

                playbook = dump_artifact(json.dumps(play), project_path, filename)
                kwargs['playbook'] = playbook
                output.debug('using playbook file %s' % playbook)

                if args.inventory:
                    inventory_file = os.path.abspath(os.path.join(args.private_data_dir, 'inventory', args.inventory))
                    if not os.path.exists(inventory_file):
                        raise AnsibleRunnerException('location specified by --inventory does not exist')
                    kwargs['inventory'] = inventory_file
                    output.debug('using inventory file %s' % inventory_file)

                roles_path = args.roles_path or os.path.join(args.private_data_dir, 'roles')
                roles_path = os.path.abspath(roles_path)
                output.debug('setting ANSIBLE_ROLES_PATH to %s' % roles_path)

                envvars = {}
                if envvars_exists:
                    with open(envvars_path, 'rb') as f:
                        tmpvars = f.read()
                        envvars = safe_load(tmpvars)

                envvars['ANSIBLE_ROLES_PATH'] = roles_path
                kwargs['envvars'] = envvars

                res = run(**kwargs)
                rc = res.rc

            except AnsibleRunnerException as exc:
                errmsg = str(exc)

            finally:
                if not project_exists and os.path.exists(project_path):
                    logger.debug('removing dynamically generated project folder')
                    shutil.rmtree(project_path)
                elif playbook and os.path.isfile(playbook):
                    logger.debug('removing dynamically generated playbook')
                    os.remove(playbook)

                # if a previous envvars existed in the private_data_dir,
                # restore the original file contents
                if tmpvars:
                    with open(envvars_path, 'wb') as f:
                        f.write(tmpvars)
                elif not envvars_exists and os.path.exists(envvars_path):
                    logger.debug('removing dynamically generated envvars folder')
                    os.remove(envvars_path)

                # since ansible-runner created the env folder, remove it
                if not env_exists and os.path.exists(env_path):
                    logger.debug('removing dynamically generated env folder')
                    shutil.rmtree(env_path)

            if errmsg:
                print('ansible-runner: ERROR: %s' % errmsg)

            sys.exit(rc)

        elif args.command == 'start':
            import daemon
            from daemon.pidfile import TimeoutPIDLockFile
            context = daemon.DaemonContext(
                pidfile=TimeoutPIDLockFile(pidfile),
                stderr=stderr
            )
        else:
            context = threading.Lock()

        with context:
            run_options = dict(private_data_dir=args.private_data_dir,
                               ident=args.ident,
                               playbook=args.playbook,
                               verbosity=args.v,
                               quiet=args.quiet,
                               json_mode=args.json)

            if args.hosts is not None:
                run_options.update(inventory=args.hosts)

            if args.cmdline:
                run_options['cmdline'] = args.cmdline

            run(**run_options)
            sys.exit(0)

    try:
        with open(pidfile, 'r') as f:
            pid = int(f.readline())
    except IOError:
        sys.exit(1)

    if args.command == 'stop':
        try:
            with open(os.path.join(args.private_data_dir, 'args'), 'r') as args:
                Runner.handle_termination(pid, json.load(args), 'bwrap')
        except IOError:
            Runner.handle_termination(pid, [], 'bwrap')

    elif args.command == 'is-alive':
        try:
            os.kill(pid, signal.SIG_DFL)
            sys.exit(0)
        except OSError:
            sys.exit(1)
Beispiel #8
0
def main(sys_args=None):
    parser = argparse.ArgumentParser(description='manage ansible execution')

    parser.add_argument('--version', action='version', version=VERSION)

    parser.add_argument('command',
                        choices=['run', 'start', 'stop', 'is-alive'])

    parser.add_argument(
        'private_data_dir',
        help=
        'Base directory containing Runner metadata (project, inventory, etc')

    group = parser.add_mutually_exclusive_group()

    group.add_argument(
        "-m",
        "--module",
        default=DEFAULT_RUNNER_MODULE,
        help="Invoke an Ansible module directly without a playbook")

    group.add_argument("-p",
                       "--playbook",
                       default=DEFAULT_RUNNER_PLAYBOOK,
                       help="The name of the playbook to execute")

    group.add_argument(
        "-r",
        "--role",
        default=DEFAULT_RUNNER_ROLE,
        help="Invoke an Ansible role directly without a playbook")

    parser.add_argument("-b",
                        "--binary",
                        default=DEFAULT_RUNNER_BINARY,
                        help="The full path to ansible[-playbook] binary")

    parser.add_argument("--hosts",
                        help="Define the set of hosts to execute against")

    parser.add_argument(
        "-i",
        "--ident",
        default=uuid4(),
        help="An identifier that will be used when generating the"
        "artifacts directory and can be used to uniquely identify a playbook run"
    )

    parser.add_argument(
        "--rotate-artifacts",
        default=0,
        type=int,
        help=
        "Automatically clean up old artifact directories after a given number has been created, the default is 0 which disables rotation"
    )

    parser.add_argument("--roles-path",
                        default=DEFAULT_ROLES_PATH,
                        help="Path to the Ansible roles directory")

    parser.add_argument("--role-vars",
                        help="Variables to pass to the role at runtime")

    parser.add_argument(
        "--role-skip-facts",
        action="store_true",
        default=False,
        help="Disable fact collection when executing a role directly")

    parser.add_argument(
        "--artifact-dir",
        help=
        "Optional Path for the artifact root directory, by default it is located inside the private data dir"
    )

    parser.add_argument(
        "--project-dir",
        help=
        "Optional Path for the location of the playbook content directory, by default this is 'project' inside the private data dir"
    )

    parser.add_argument(
        "--inventory",
        help="Override the default inventory location in private_data_dir")

    parser.add_argument("--forks", help="Set Ansible concurrency via Forks")

    parser.add_argument(
        "-j",
        "--json",
        action="store_true",
        help=
        "Output the json event structure to stdout instead of Ansible output")

    parser.add_argument(
        "-v",
        action="count",
        help=
        "Increase the verbosity with multiple v's (up to 5) of the ansible-playbook output"
    )

    parser.add_argument("-q",
                        "--quiet",
                        action="store_true",
                        help="Disable all output")

    parser.add_argument(
        "--cmdline",
        help=
        "Command line options to pass to ansible-playbook at execution time")

    parser.add_argument("--debug",
                        action="store_true",
                        help="Enable Runner debug output logging")

    parser.add_argument("--logfile", help="Log output messages to a file")

    parser.add_argument("-a",
                        "--args",
                        dest='module_args',
                        help="Module arguments")

    parser.add_argument(
        "--process-isolation",
        dest='process_isolation',
        action="store_true",
        help=
        "Limits what directories on the filesystem the playbook run has access to, defaults to /tmp"
    )

    parser.add_argument(
        "--process-isolation-executable",
        dest='process_isolation_executable',
        default="bwrap",
        help="Process isolation executable that will be used. Defaults to bwrap"
    )

    parser.add_argument(
        "--process-isolation-path",
        dest='process_isolation_path',
        default="/tmp",
        help=
        "Path that an isolated playbook run will use for staging. Defaults to /tmp"
    )

    parser.add_argument(
        "--process-isolation-hide-paths",
        dest='process_isolation_hide_paths',
        help=
        "List of paths on the system that should be hidden from the playbook run"
    )

    parser.add_argument(
        "--process-isolation-show-paths",
        dest='process_isolation_show_paths',
        help=
        "List of paths on the system that should be exposed to the playbook run"
    )

    parser.add_argument(
        "--process-isolation-ro-paths",
        dest='process_isolation_ro_paths',
        help=
        "List of paths on the system that should be exposed to the playbook run as read-only"
    )

    parser.add_argument(
        "--directory-isolation-base-path",
        dest='directory_isolation_base_path',
        help=
        "Copies the project directory to a location in this directory to prevent multiple simultaneous executions from conflicting"
    )

    parser.add_argument(
        "--limit",
        help=
        "Matches ansible's ``--limit`` parameter to further constrain the inventory to be used"
    )

    args = parser.parse_args(sys_args)

    output.configure()

    # enable or disable debug mode
    output.set_debug('enable' if args.debug else 'disable')

    # set the output logfile
    if args.logfile:
        output.set_logfile(args.logfile)

    output.debug('starting debug logging')

    # get the absolute path for start since it is a daemon
    args.private_data_dir = os.path.abspath(args.private_data_dir)

    pidfile = os.path.join(args.private_data_dir, 'pid')

    try:
        os.makedirs(args.private_data_dir, mode=0o700)
    except OSError as exc:
        if exc.errno == errno.EEXIST and os.path.isdir(args.private_data_dir):
            pass
        else:
            raise

    stderr_path = None
    if args.command != 'run':
        stderr_path = os.path.join(args.private_data_dir, 'daemon.log')
        if not os.path.exists(stderr_path):
            os.close(
                os.open(stderr_path, os.O_CREAT, stat.S_IRUSR | stat.S_IWUSR))

    if args.command in ('start', 'run'):

        if args.command == 'start':
            import daemon
            from daemon.pidfile import TimeoutPIDLockFile
            context = daemon.DaemonContext(pidfile=TimeoutPIDLockFile(pidfile))
        else:
            context = threading.Lock()

        with context:
            with role_manager(args) as args:
                run_options = dict(
                    private_data_dir=args.private_data_dir,
                    ident=args.ident,
                    binary=args.binary,
                    playbook=args.playbook,
                    module=args.module,
                    module_args=args.module_args,
                    host_pattern=args.hosts,
                    verbosity=args.v,
                    quiet=args.quiet,
                    rotate_artifacts=args.rotate_artifacts,
                    ignore_logging=False,
                    json_mode=args.json,
                    inventory=args.inventory,
                    forks=args.forks,
                    project_dir=args.project_dir,
                    roles_path=[args.roles_path] if args.roles_path else None,
                    process_isolation=args.process_isolation,
                    process_isolation_executable=args.
                    process_isolation_executable,
                    process_isolation_path=args.process_isolation_path,
                    process_isolation_hide_paths=args.
                    process_isolation_hide_paths,
                    process_isolation_show_paths=args.
                    process_isolation_show_paths,
                    process_isolation_ro_paths=args.process_isolation_ro_paths,
                    directory_isolation_base_path=args.
                    directory_isolation_base_path,
                    limit=args.limit)
                if args.cmdline:
                    run_options['cmdline'] = args.cmdline

                try:
                    res = run(**run_options)
                except Exception:
                    exc = traceback.format_exc()
                    if stderr_path:
                        open(stderr_path, 'w+').write(exc)
                    else:
                        sys.stderr.write(exc)
                    return 1
            return (res.rc)

    try:
        with open(pidfile, 'r') as f:
            pid = int(f.readline())
    except IOError:
        return (1)

    if args.command == 'stop':
        Runner.handle_termination(pid)
        return (0)

    elif args.command == 'is-alive':
        try:
            os.kill(pid, signal.SIG_DFL)
            return (0)
        except OSError:
            return (1)
Beispiel #9
0
def main(sys_args=None):
    """Main entry point for ansible-runner executable

    When the ```ansible-runner``` command is executed, this function
    is the main entry point that is called and executed.

    :param sys_args: List of arguments to be parsed by the parser
    :type sys_args: list

    :returns: an instance of SystemExit
    :rtype: SystemExit
    """

    parser = AnsibleRunnerArgumentParser(
        prog='ansible-runner',
        description=
        "Use 'ansible-runner' (with no arguments) to see basic usage")
    subparser = parser.add_subparsers(
        help="Command to invoke",
        dest='command',
        description="COMMAND PRIVATE_DATA_DIR [ARGS]")
    add_args_to_parser(parser, DEFAULT_CLI_ARGS['generic_args'])
    subparser.required = True

    # positional options
    run_subparser = subparser.add_parser(
        'run', help="Run ansible-runner in the foreground")
    add_args_to_parser(run_subparser, DEFAULT_CLI_ARGS['positional_args'])
    add_args_to_parser(run_subparser, DEFAULT_CLI_ARGS['playbook_group'])
    start_subparser = subparser.add_parser(
        'start', help="Start an ansible-runner process in the background")
    add_args_to_parser(start_subparser, DEFAULT_CLI_ARGS['positional_args'])
    add_args_to_parser(start_subparser, DEFAULT_CLI_ARGS['playbook_group'])
    stop_subparser = subparser.add_parser(
        'stop',
        help="Stop an ansible-runner process that's running in the background")
    add_args_to_parser(stop_subparser, DEFAULT_CLI_ARGS['positional_args'])
    isalive_subparser = subparser.add_parser(
        'is-alive',
        help=
        "Check if a an ansible-runner process in the background is still running."
    )
    add_args_to_parser(isalive_subparser, DEFAULT_CLI_ARGS['positional_args'])

    # streaming commands
    transmit_subparser = subparser.add_parser(
        'transmit', help="Send a job to a remote ansible-runner process")
    add_args_to_parser(transmit_subparser, DEFAULT_CLI_ARGS['positional_args'])

    worker_subparser = subparser.add_parser(
        'worker', help="Execute work streamed from a controlling instance")
    worker_subcommands = worker_subparser.add_subparsers(
        help="Sub-sub command to invoke",
        dest='worker_subcommand',
        description="ansible-runner worker [sub-sub-command]",
    )
    cleanup_command = worker_subcommands.add_parser(
        'cleanup',
        help=
        "Cleanup private_data_dir patterns from prior jobs and supporting temporary folders.",
    )
    cleanup.add_cleanup_args(cleanup_command)

    worker_subparser.add_argument(
        "--private-data-dir",
        help="base directory containing the ansible-runner metadata "
        "(project, inventory, env, etc)",
    )

    worker_subparser.add_argument(
        "--worker-info",
        dest="worker_info",
        action="store_true",
        help=
        "show the execution node's Ansible Runner version along with its memory and CPU capacities"
    )
    worker_subparser.add_argument(
        "--delete",
        dest="delete_directory",
        action="store_true",
        default=False,
        help=
        ("Delete existing folder (and everything in it) in the location specified by --private-data-dir. "
         "The directory will be re-populated when the streamed data is unpacked. "
         "Using this will also assure that the directory is deleted when the job finishes."
         ))
    process_subparser = subparser.add_parser(
        'process',
        help=
        "Receive the output of remote ansible-runner work and distribute the results"
    )
    add_args_to_parser(process_subparser, DEFAULT_CLI_ARGS['positional_args'])

    # generic args for all subparsers
    add_args_to_parser(run_subparser, DEFAULT_CLI_ARGS['generic_args'])
    add_args_to_parser(start_subparser, DEFAULT_CLI_ARGS['generic_args'])
    add_args_to_parser(stop_subparser, DEFAULT_CLI_ARGS['generic_args'])
    add_args_to_parser(isalive_subparser, DEFAULT_CLI_ARGS['generic_args'])
    add_args_to_parser(transmit_subparser, DEFAULT_CLI_ARGS['generic_args'])
    add_args_to_parser(worker_subparser, DEFAULT_CLI_ARGS['generic_args'])
    add_args_to_parser(process_subparser, DEFAULT_CLI_ARGS['generic_args'])

    # runner group
    ansible_runner_group_options = (
        "Ansible Runner Options",
        "configuration options for controlling the ansible-runner "
        "runtime environment.",
    )
    base_runner_group = parser.add_argument_group(
        *ansible_runner_group_options)
    run_runner_group = run_subparser.add_argument_group(
        *ansible_runner_group_options)
    start_runner_group = start_subparser.add_argument_group(
        *ansible_runner_group_options)
    stop_runner_group = stop_subparser.add_argument_group(
        *ansible_runner_group_options)
    isalive_runner_group = isalive_subparser.add_argument_group(
        *ansible_runner_group_options)
    transmit_runner_group = transmit_subparser.add_argument_group(
        *ansible_runner_group_options)
    add_args_to_parser(base_runner_group, DEFAULT_CLI_ARGS['runner_group'])
    add_args_to_parser(run_runner_group, DEFAULT_CLI_ARGS['runner_group'])
    add_args_to_parser(start_runner_group, DEFAULT_CLI_ARGS['runner_group'])
    add_args_to_parser(stop_runner_group, DEFAULT_CLI_ARGS['runner_group'])
    add_args_to_parser(isalive_runner_group, DEFAULT_CLI_ARGS['runner_group'])
    add_args_to_parser(transmit_runner_group, DEFAULT_CLI_ARGS['runner_group'])

    # mutually exclusive group
    run_mutually_exclusive_group = run_subparser.add_mutually_exclusive_group()
    start_mutually_exclusive_group = start_subparser.add_mutually_exclusive_group(
    )
    stop_mutually_exclusive_group = stop_subparser.add_mutually_exclusive_group(
    )
    isalive_mutually_exclusive_group = isalive_subparser.add_mutually_exclusive_group(
    )
    transmit_mutually_exclusive_group = transmit_subparser.add_mutually_exclusive_group(
    )
    add_args_to_parser(run_mutually_exclusive_group,
                       DEFAULT_CLI_ARGS['mutually_exclusive_group'])
    add_args_to_parser(start_mutually_exclusive_group,
                       DEFAULT_CLI_ARGS['mutually_exclusive_group'])
    add_args_to_parser(stop_mutually_exclusive_group,
                       DEFAULT_CLI_ARGS['mutually_exclusive_group'])
    add_args_to_parser(isalive_mutually_exclusive_group,
                       DEFAULT_CLI_ARGS['mutually_exclusive_group'])
    add_args_to_parser(transmit_mutually_exclusive_group,
                       DEFAULT_CLI_ARGS['mutually_exclusive_group'])

    # ansible options
    ansible_options = (
        "Ansible Options",
        "control the ansible[-playbook] execution environment",
    )
    run_ansible_group = run_subparser.add_argument_group(*ansible_options)
    start_ansible_group = start_subparser.add_argument_group(*ansible_options)
    stop_ansible_group = stop_subparser.add_argument_group(*ansible_options)
    isalive_ansible_group = isalive_subparser.add_argument_group(
        *ansible_options)
    transmit_ansible_group = transmit_subparser.add_argument_group(
        *ansible_options)
    add_args_to_parser(run_ansible_group, DEFAULT_CLI_ARGS['ansible_group'])
    add_args_to_parser(start_ansible_group, DEFAULT_CLI_ARGS['ansible_group'])
    add_args_to_parser(stop_ansible_group, DEFAULT_CLI_ARGS['ansible_group'])
    add_args_to_parser(isalive_ansible_group,
                       DEFAULT_CLI_ARGS['ansible_group'])
    add_args_to_parser(transmit_ansible_group,
                       DEFAULT_CLI_ARGS['ansible_group'])

    # roles group
    roles_group_options = (
        "Ansible Role Options",
        "configuration options for directly executing Ansible roles",
    )
    run_roles_group = run_subparser.add_argument_group(*roles_group_options)
    start_roles_group = start_subparser.add_argument_group(
        *roles_group_options)
    stop_roles_group = stop_subparser.add_argument_group(*roles_group_options)
    isalive_roles_group = isalive_subparser.add_argument_group(
        *roles_group_options)
    transmit_roles_group = transmit_subparser.add_argument_group(
        *roles_group_options)
    add_args_to_parser(run_roles_group, DEFAULT_CLI_ARGS['roles_group'])
    add_args_to_parser(start_roles_group, DEFAULT_CLI_ARGS['roles_group'])
    add_args_to_parser(stop_roles_group, DEFAULT_CLI_ARGS['roles_group'])
    add_args_to_parser(isalive_roles_group, DEFAULT_CLI_ARGS['roles_group'])
    add_args_to_parser(transmit_roles_group, DEFAULT_CLI_ARGS['roles_group'])

    # modules groups

    modules_group_options = (
        "Ansible Module Options",
        "configuration options for directly executing Ansible modules",
    )
    run_modules_group = run_subparser.add_argument_group(
        *modules_group_options)
    start_modules_group = start_subparser.add_argument_group(
        *modules_group_options)
    stop_modules_group = stop_subparser.add_argument_group(
        *modules_group_options)
    isalive_modules_group = isalive_subparser.add_argument_group(
        *modules_group_options)
    transmit_modules_group = transmit_subparser.add_argument_group(
        *modules_group_options)
    add_args_to_parser(run_modules_group, DEFAULT_CLI_ARGS['modules_group'])
    add_args_to_parser(start_modules_group, DEFAULT_CLI_ARGS['modules_group'])
    add_args_to_parser(stop_modules_group, DEFAULT_CLI_ARGS['modules_group'])
    add_args_to_parser(isalive_modules_group,
                       DEFAULT_CLI_ARGS['modules_group'])
    add_args_to_parser(transmit_modules_group,
                       DEFAULT_CLI_ARGS['modules_group'])

    # container group
    container_group_options = (
        "Ansible Container Options",
        "configuation options for executing Ansible playbooks",
    )
    run_container_group = run_subparser.add_argument_group(
        *container_group_options)
    start_container_group = start_subparser.add_argument_group(
        *container_group_options)
    stop_container_group = stop_subparser.add_argument_group(
        *container_group_options)
    isalive_container_group = isalive_subparser.add_argument_group(
        *container_group_options)
    transmit_container_group = transmit_subparser.add_argument_group(
        *container_group_options)
    add_args_to_parser(run_container_group,
                       DEFAULT_CLI_ARGS['container_group'])
    add_args_to_parser(start_container_group,
                       DEFAULT_CLI_ARGS['container_group'])
    add_args_to_parser(stop_container_group,
                       DEFAULT_CLI_ARGS['container_group'])
    add_args_to_parser(isalive_container_group,
                       DEFAULT_CLI_ARGS['container_group'])
    add_args_to_parser(transmit_container_group,
                       DEFAULT_CLI_ARGS['container_group'])

    args = parser.parse_args(sys_args)

    vargs = vars(args)

    if vargs.get('command') == 'worker':
        if vargs.get('worker_subcommand') == 'cleanup':
            cleanup.run_cleanup(vargs)
            parser.exit(0)
        if vargs.get('worker_info'):
            cpu = get_cpu_count()
            mem = get_mem_in_bytes()
            errors = []
            uuid = ensure_uuid()
            if not isinstance(mem, int):
                errors.append(mem)
                mem = None
            if "Could not find" in uuid:
                errors.append(uuid)
                uuid = None
            info = {
                'errors': errors,
                'mem_in_bytes': mem,
                'cpu_count': cpu,
                'runner_version': VERSION,
                'uuid': uuid,
            }
            print(safe_dump(info, default_flow_style=True))
            parser.exit(0)

        private_data_dir = vargs.get('private_data_dir')
        delete_directory = vargs.get('delete_directory', False)
        if private_data_dir and delete_directory:
            shutil.rmtree(private_data_dir, ignore_errors=True)
            register_for_cleanup(private_data_dir)
        elif private_data_dir is None:
            temp_private_dir = tempfile.mkdtemp()
            vargs['private_data_dir'] = temp_private_dir
            register_for_cleanup(temp_private_dir)

    if vargs.get('command') == 'process':
        # the process command is the final destination of artifacts, user expects private_data_dir to not be cleaned up
        if not vargs.get('private_data_dir'):
            temp_private_dir = tempfile.mkdtemp()
            vargs['private_data_dir'] = temp_private_dir

    if vargs.get('command') in ('start', 'run', 'transmit'):
        if vargs.get('hosts') and not (vargs.get('module')
                                       or vargs.get('role')):
            parser.exit(
                status=1,
                message="The --hosts option can only be used with -m or -r\n")
        if not (vargs.get('module')
                or vargs.get('role')) and not vargs.get('playbook'):
            parser.exit(
                status=1,
                message=
                "The -p option must be specified when not using -m or -r\n")

    output.configure()

    # enable or disable debug mode
    output.set_debug('enable' if vargs.get('debug') else 'disable')

    # set the output logfile
    if ('logfile' in args) and vargs.get('logfile'):
        output.set_logfile(vargs.get('logfile'))

    output.debug('starting debug logging')

    # get the absolute path for start since it is a daemon
    vargs['private_data_dir'] = os.path.abspath(vargs.get('private_data_dir'))

    pidfile = os.path.join(vargs.get('private_data_dir'), 'pid')

    try:
        os.makedirs(vargs.get('private_data_dir'), mode=0o700)
    except OSError as exc:
        if exc.errno == errno.EEXIST and os.path.isdir(
                vargs.get('private_data_dir')):
            pass
        else:
            raise

    stderr_path = None
    context = None
    if vargs.get('command') not in ('run', 'transmit', 'worker'):
        stderr_path = os.path.join(vargs.get('private_data_dir'), 'daemon.log')
        if not os.path.exists(stderr_path):
            os.close(
                os.open(stderr_path, os.O_CREAT, stat.S_IRUSR | stat.S_IWUSR))

    if vargs.get('command') in ('start', 'run', 'transmit', 'worker',
                                'process'):

        if vargs.get('command') == 'start':
            import daemon
            from daemon.pidfile import TimeoutPIDLockFile
            context = daemon.DaemonContext(pidfile=TimeoutPIDLockFile(pidfile))
        else:
            context = threading.Lock()

        streamer = None
        if vargs.get('command') in ('transmit', 'worker', 'process'):
            streamer = vargs.get('command')

        with context:
            with role_manager(vargs) as vargs:
                run_options = dict(
                    private_data_dir=vargs.get('private_data_dir'),
                    ident=vargs.get('ident'),
                    binary=vargs.get('binary'),
                    playbook=vargs.get('playbook'),
                    module=vargs.get('module'),
                    module_args=vargs.get('module_args'),
                    host_pattern=vargs.get('hosts'),
                    verbosity=vargs.get('v'),
                    quiet=vargs.get('quiet'),
                    rotate_artifacts=vargs.get('rotate_artifacts'),
                    ignore_logging=False,
                    json_mode=vargs.get('json'),
                    omit_event_data=vargs.get('omit_event_data'),
                    only_failed_event_data=vargs.get('only_failed_event_data'),
                    inventory=vargs.get('inventory'),
                    forks=vargs.get('forks'),
                    project_dir=vargs.get('project_dir'),
                    artifact_dir=vargs.get('artifact_dir'),
                    roles_path=[vargs.get('roles_path')]
                    if vargs.get('roles_path') else None,
                    process_isolation=vargs.get('process_isolation'),
                    process_isolation_executable=vargs.get(
                        'process_isolation_executable'),
                    process_isolation_path=vargs.get('process_isolation_path'),
                    process_isolation_hide_paths=vargs.get(
                        'process_isolation_hide_paths'),
                    process_isolation_show_paths=vargs.get(
                        'process_isolation_show_paths'),
                    process_isolation_ro_paths=vargs.get(
                        'process_isolation_ro_paths'),
                    container_image=vargs.get('container_image'),
                    container_volume_mounts=vargs.get(
                        'container_volume_mounts'),
                    container_options=vargs.get('container_options'),
                    directory_isolation_base_path=vargs.get(
                        'directory_isolation_base_path'),
                    resource_profiling=vargs.get('resource_profiling'),
                    resource_profiling_base_cgroup=vargs.get(
                        'resource_profiling_base_cgroup'),
                    resource_profiling_cpu_poll_interval=vargs.get(
                        'resource_profiling_cpu_poll_interval'),
                    resource_profiling_memory_poll_interval=vargs.get(
                        'resource_profiling_memory_poll_interval'),
                    resource_profiling_pid_poll_interval=vargs.get(
                        'resource_profiling_pid_poll_interval'),
                    resource_profiling_results_dir=vargs.get(
                        'resource_profiling_results_dir'),
                    cmdline=vargs.get('cmdline'),
                    limit=vargs.get('limit'),
                    streamer=streamer)
                try:
                    res = run(**run_options)
                except Exception:
                    exc = traceback.format_exc()
                    if stderr_path:
                        open(stderr_path, 'w+').write(exc)
                    else:
                        sys.stderr.write(exc)
                    return 1
            return (res.rc)

    try:
        with open(pidfile, 'r') as f:
            pid = int(f.readline())
    except IOError:
        return (1)

    if vargs.get('command') == 'stop':
        Runner.handle_termination(pid, pidfile=pidfile)
        return (0)

    elif vargs.get('command') == 'is-alive':
        try:
            os.kill(pid, signal.SIG_DFL)
            return (0)
        except OSError:
            return (1)