Пример #1
0
def main():
    """Query Junos device and interface with Ansible playbook."""
    # define arguments from Ansible
    module = AnsibleModule(argument_spec=dict(
        host=dict(required=True),
        filename=dict(required=False, default=None),
        user=dict(required=False, default=os.getenv('USER')),
        passwd=dict(required=False, default=None, no_log=True),
        max_commits=dict(required=False, type='int', default=None)))

    # copy playbook arguments into local variables
    host = module.params['host']
    filename = module.params['filename']
    username = module.params['user']
    password = module.params['passwd']
    max_commits = module.params['max_commits']

    # determine if module should generate output file
    gen_file = False if filename is None else True

    # instantiate JunosCommits and run
    jc = JunosCommits(host, gen_file, username, password, max_commits)
    try:
        jc.run()
        if gen_file:
            module.atomic_move(jc.filespec, filename)
    except Exception as err:
        module.fail_json(msg=str(err))

    module.exit_json(changed=False, commits=jc.commits)
Пример #2
0
def main():
    module = AnsibleModule(
        argument_spec={
            'dest': {'type': 'path'},
            'call_fs_attributes': {'type': 'bool', 'default': True},
        },
        add_file_common_args=True,
    )

    results = {}

    with tempfile.NamedTemporaryFile(delete=False) as tf:
        file_args = module.load_file_common_arguments(module.params)
        module.atomic_move(tf.name, module.params['dest'])

        if module.params['call_fs_attributes']:
            results['changed'] = module.set_fs_attributes_if_different(file_args, True)

    module.exit_json(**results)
Пример #3
0
def run_module():
    module = AnsibleModule(
        argument_spec=dict(),
        supports_check_mode=True,
    )
    result = dict(
        changed=False,
        msg='',
    )
    if module.check_mode:
        module.exit_json(**result)
    else:
        try:
            docker_dir = os.path.expanduser('~/.docker')
            docker_config_file = os.path.join(docker_dir, 'config.json')
            if not os.path.exists(docker_config_file):
                docker_config = {}
            else:
                with open(docker_config_file) as fp:
                    docker_config = json.load(fp)
            if 'ecr-login' != docker_config.get('credsStore'):
                docker_config['credsStore'] = 'ecr-login'
                if not os.path.exists(docker_dir):
                    os.makedirs(docker_dir)
                # TODO: this is subtly wrong w.r.t deletion if there is an exception from the with block
                with tempfile.NamedTemporaryFile(mode='w',
                                                 dir=docker_dir,
                                                 delete=False) as fp:
                    json.dump(docker_config, fp, indent=4)
                    print(file=fp)
                module.atomic_move(fp.name, docker_config_file)
                result['changed'] = True
                result['msg'] = 'Enabled amazon-ecr-credential-helper'
        except Exception as e:
            module.fail_json(failed=True, msg="error : %s" % to_native(e))
        else:
            module.exit_json(failed=False, **result)
Пример #4
0
def main():

    module = AnsibleModule(
        # not checking because of daisy chain to file module
        argument_spec=dict(
            src=dict(type='path'),
            original_basename=dict(type='str'),  # used to handle 'dest is a directory' via template, a slight hack
            content=dict(type='str', no_log=True),
            dest=dict(type='path', required=True),
            backup=dict(type='bool', default=False),
            force=dict(type='bool', default=True, aliases=['thirsty']),
            validate=dict(type='str'),
            directory_mode=dict(type='raw'),
            remote_src=dict(type='bool'),
            local_follow=dict(type='bool'),
            checksum=dict(),
        ),
        add_file_common_args=True,
        supports_check_mode=True,
    )

    src = module.params['src']
    b_src = to_bytes(src, errors='surrogate_or_strict')
    dest = module.params['dest']
    # Make sure we always have a directory component for later processing
    if os.path.sep not in dest:
        dest = '.{0}{1}'.format(os.path.sep, dest)
    b_dest = to_bytes(dest, errors='surrogate_or_strict')
    backup = module.params['backup']
    force = module.params['force']
    original_basename = module.params.get('original_basename', None)
    validate = module.params.get('validate', None)
    follow = module.params['follow']
    remote_src = module.params['remote_src']
    checksum = module.params['checksum']

    if not os.path.exists(b_src):
        module.fail_json(msg="Source %s not found" % (src))
    if not os.access(b_src, os.R_OK):
        module.fail_json(msg="Source %s not readable" % (src))
    if os.path.isdir(b_src):
        module.fail_json(msg="Remote copy does not support recursive copy of directory: %s" % (src))

    # Preserve is usually handled in the action plugin but mode + remote_src has to be done on the
    # remote host
    if module.params['mode'] == 'preserve':
        module.params['mode'] = '0%03o' % stat.S_IMODE(os.stat(b_src).st_mode)
    mode = module.params['mode']

    checksum_src = module.sha1(src)
    checksum_dest = None
    # Backwards compat only.  This will be None in FIPS mode
    try:
        md5sum_src = module.md5(src)
    except ValueError:
        md5sum_src = None

    changed = False

    if checksum and checksum_src != checksum:
        module.fail_json(
            msg='Copied file does not match the expected checksum. Transfer failed.',
            checksum=checksum_src,
            expected_checksum=checksum
        )

    # Special handling for recursive copy - create intermediate dirs
    if original_basename and dest.endswith(os.sep):
        dest = os.path.join(dest, original_basename)
        b_dest = to_bytes(dest, errors='surrogate_or_strict')
        dirname = os.path.dirname(dest)
        b_dirname = to_bytes(dirname, errors='surrogate_or_strict')
        if not os.path.exists(b_dirname) and os.path.isabs(b_dirname):
            (pre_existing_dir, new_directory_list) = split_pre_existing_dir(dirname)
            os.makedirs(b_dirname)
            directory_args = module.load_file_common_arguments(module.params)
            directory_mode = module.params["directory_mode"]
            if directory_mode is not None:
                directory_args['mode'] = directory_mode
            else:
                directory_args['mode'] = None
            adjust_recursive_directory_permissions(pre_existing_dir, new_directory_list, module, directory_args, changed)

    if os.path.isdir(b_dest):
        basename = os.path.basename(src)
        if original_basename:
            basename = original_basename
        dest = os.path.join(dest, basename)
        b_dest = to_bytes(dest, errors='surrogate_or_strict')

    if os.path.exists(b_dest):
        if os.path.islink(b_dest) and follow:
            b_dest = os.path.realpath(b_dest)
            dest = to_native(b_dest, errors='surrogate_or_strict')
        if not force:
            module.exit_json(msg="file already exists", src=src, dest=dest, changed=False)
        if os.access(b_dest, os.R_OK):
            checksum_dest = module.sha1(dest)
    else:
        if not os.path.exists(os.path.dirname(b_dest)):
            try:
                # os.path.exists() can return false in some
                # circumstances where the directory does not have
                # the execute bit for the current user set, in
                # which case the stat() call will raise an OSError
                os.stat(os.path.dirname(b_dest))
            except OSError as e:
                if "permission denied" in to_native(e).lower():
                    module.fail_json(msg="Destination directory %s is not accessible" % (os.path.dirname(dest)))
            module.fail_json(msg="Destination directory %s does not exist" % (os.path.dirname(dest)))

    if not os.access(os.path.dirname(b_dest), os.W_OK) and not module.params['unsafe_writes']:
        module.fail_json(msg="Destination %s not writable" % (os.path.dirname(dest)))

    backup_file = None
    if checksum_src != checksum_dest or os.path.islink(b_dest):
        if not module.check_mode:
            try:
                if backup:
                    if os.path.exists(b_dest):
                        backup_file = module.backup_local(dest)
                # allow for conversion from symlink.
                if os.path.islink(b_dest):
                    os.unlink(b_dest)
                    open(b_dest, 'w').close()
                if validate:
                    # if we have a mode, make sure we set it on the temporary
                    # file source as some validations may require it
                    # FIXME: should we do the same for owner/group here too?
                    if mode is not None:
                        module.set_mode_if_different(src, mode, False)
                    if "%s" not in validate:
                        module.fail_json(msg="validate must contain %%s: %s" % (validate))
                    (rc, out, err) = module.run_command(validate % src)
                    if rc != 0:
                        module.fail_json(msg="failed to validate", exit_status=rc, stdout=out, stderr=err)
                b_mysrc = b_src
                if remote_src:
                    _, b_mysrc = tempfile.mkstemp(dir=os.path.dirname(b_dest))
                    shutil.copy2(b_src, b_mysrc)
                module.atomic_move(b_mysrc, dest, unsafe_writes=module.params['unsafe_writes'])
            except IOError:
                module.fail_json(msg="failed to copy: %s to %s" % (src, dest), traceback=traceback.format_exc())
        changed = True
    else:
        changed = False

    res_args = dict(
        dest=dest, src=src, md5sum=md5sum_src, checksum=checksum_src, changed=changed
    )
    if backup_file:
        res_args['backup_file'] = backup_file

    module.params['dest'] = dest
    if not module.check_mode:
        file_args = module.load_file_common_arguments(module.params)
        res_args['changed'] = module.set_fs_attributes_if_different(file_args, res_args['changed'])

    module.exit_json(**res_args)
Пример #5
0
def main():
    module = AnsibleModule(argument_spec=dict(
        src=dict(default=None, required=False, type='path'),
        dest=dict(default='/etc/network/interfaces',
                  required=False,
                  type='path'),
        iface=dict(required=False),
        method=dict(default=None, required=False),
        option=dict(required=False),
        value=dict(required=False),
        backup=dict(default='no', type='bool'),
        backupdir=dict(default=None, required=False, type='path'),
        state=dict(default='present',
                   choices=['present', 'absent', 'move', 'bridge']),
        bridge_options=dict(default=None, required=False, type='list'),
    ),
                           add_file_common_args=True,
                           supports_check_mode=True)

    src = module.params['src']
    dest = module.params['dest']
    iface = module.params['iface']
    method = module.params['method']
    option = module.params['option']
    value = module.params['value']
    backup = module.params['backup']
    backupdir = module.params['backupdir']
    state = module.params['state']
    bridge_options = module.params['bridge_options']

    if src is None:
        src = dest

    if option is not None and iface is None:
        module.fail_json(msg="Inteface must be set if option is defined")

    if option is not None and state == "present" and value is None:
        module.fail_json(
            msg="Value must be set if option is defined and state is 'present'"
        )

    if state == "move" and (iface is None or option is not None
                            or value is not None):
        module.fail_json(
            msg="Iface must be set if state is 'move' but not option nor value"
        )

    if state == "bridge" and (iface is None or option is not None
                              or value is not None or bridge_options is None):
        module.fail_json(
            msg=
            "Iface and bridge_options must be set if state is 'bridge' but not option nor value"
        )

    lines, ifaces = read_interfaces_file(module, src)

    changed = False

    if option is not None:
        changed, lines, _ = setInterfaceOption(module, lines, iface, ifaces,
                                               option, value, state)
    elif state == 'absent' or state == 'move' or state == 'bridge':
        changed, lines, removed_lines = setInterfaceOption(
            module, lines, iface, ifaces, None, None, state, bridge_options,
            method)
        if state == 'bridge' and src == dest:
            lines = lines + removed_lines
            removed_lines = []
    elif state == 'present' and option is None and value is None and iface is not None and iface not in ifaces.keys(
    ):
        for x in [{
                'line': '\n',
                'line_type': 'empty'
        }, {
                'comment': '# Added secondary network interface\n',
                'line': 'auto ' + iface + '\n',
                'iface': iface,
                'line_type': 'auto'
        }, {
                'params': {
                    'address_family': 'inet',
                    'post-up': [],
                    'auto': True,
                    'up': [],
                    'method': 'dhcp',
                    'down': [],
                    'pre-up': []
                },
                'line': 'iface ' + iface + ' inet dhcp\n',
                'iface': iface,
                'line_type': 'iface'
        }]:
            lines.append(x)
        changed = True

    if changed:
        _, ifaces = read_interfaces_lines(
            module, [d['line'] for d in lines if 'line' in d])

    if changed and not module.check_mode:
        if backup:
            backup_dest = module.backup_local(src)
            if backupdir:
                backupdir_infos = os.stat(os.path.dirname(backup_dest))
                if not os.path.exists(backupdir):
                    os.makedirs(backupdir, backupdir_infos.st_mode)
                    os.chown(backupdir, backupdir_infos.st_uid,
                             backupdir_infos.st_gid)
                module.atomic_move(
                    backup_dest,
                    os.path.join(backupdir, os.path.basename(backup_dest)))
        if src == dest:
            write_changes(module, select_lines_and_comments(lines), dest)
        else:
            if backup and os.path.exists(dest) and os.path.isfile(dest):
                module.backup_local(dest)
            write_changes(module, select_lines_and_comments(lines), src)
            write_changes(module, select_lines_and_comments(removed_lines),
                          dest)

    module.exit_json(dest=dest, changed=changed, ifaces=ifaces)
Пример #6
0
def main():

    module = AnsibleModule(
        argument_spec=dict(
            name=dict(required=True, type='str'),
            type=dict(required=True,
                      choices=['account', 'auth',
                               'password', 'session']),
            control=dict(required=True, type='str'),
            module_path=dict(required=True, type='str'),
            new_type=dict(required=False,
                          choices=['account', 'auth',
                                   'password', 'session']),
            new_control=dict(required=False, type='str'),
            new_module_path=dict(required=False, type='str'),
            module_arguments=dict(required=False, type='list'),
            state=dict(required=False, default="updated",
                       choices=['before', 'after', 'updated',
                                'args_absent', 'args_present', 'absent']),
            path=dict(required=False, default='/etc/pam.d', type='str'),
            backup=dict(default=False, type='bool')
        ),
        supports_check_mode=True,
        required_if=[
            ("state", "args_present", ["module_arguments"]),
            ("state", "args_absent", ["module_arguments"]),
            ("state", "before", ["new_control"]),
            ("state", "before", ["new_type"]),
            ("state", "before", ["new_module_path"]),
            ("state", "after", ["new_control"]),
            ("state", "after", ["new_type"]),
            ("state", "after", ["new_module_path"])

        ]
    )
    content = str()
    fname = os.path.join(module.params["path"], module.params["name"])
    backupdest = ""
    # Open the file and read the content or fail
    try:
        with open(fname, 'r') as service_file_obj:
            content = service_file_obj.read()
    except IOError as e:
        # If unable to read the file, fail out
        module.fail_json(msg='Unable to open/read PAM module \
                            file %s with error %s.' %
                         (fname, str(e)))

    # Assuming we didnt fail, create the service
    service = PamdService(content)
    # Set the action
    action = module.params['state']

    # Take action
    if action == 'updated':
        changes = service.update_rule(module.params['type'], module.params['control'], module.params['module_path'],
                                      module.params['new_type'], module.params['new_control'], module.params['new_module_path'],
                                      module.params['module_arguments'])
    elif action == 'before':
        changes = service.insert_before(module.params['type'], module.params['control'], module.params['module_path'],
                                        module.params['new_type'], module.params['new_control'], module.params['new_module_path'],
                                        module.params['module_arguments'])
    elif action == 'after':
        changes = service.insert_after(module.params['type'], module.params['control'], module.params['module_path'],
                                       module.params['new_type'], module.params['new_control'], module.params['new_module_path'],
                                       module.params['module_arguments'])
    elif action == 'args_absent':
        changes = service.remove_module_arguments(module.params['type'], module.params['control'], module.params['module_path'],
                                                  module.params['module_arguments'])
    elif action == 'args_present':
        changes = service.add_module_arguments(module.params['type'], module.params['control'], module.params['module_path'],
                                               module.params['module_arguments'])
    elif action == 'absent':
        changes = service.remove(module.params['type'], module.params['control'], module.params['module_path'])

    valid, msg = service.validate()

    # If the module is not valid (meaning one of the rules is invalid), we will fail
    if not valid:
        module.fail_json(msg=msg)

    # If not check mode and something changed, backup the original if necessary then write out the file or fail
    if not module.check_mode and changes > 0:
        pamd_file = os.path.realpath(fname)
        # First, create a backup if desired.
        if module.params['backup']:
            backupdest = module.backup_local(fname)
            print("BACKUP DEST", backupdest)
        try:
            temp_file = NamedTemporaryFile(mode='w')
            with open(temp_file.name, 'w') as fd:
                fd.write(str(service))

        except IOError:
            module.fail_json(msg='Unable to create temporary \
                                    file %s' % temp_file)

        module.atomic_move(temp_file.name, pamd_file)

    facts = {}
    facts['pamd'] = {'changed': changes > 0,
                     'change_count': changes,
                     'action': action,
                     'backupdest': backupdest}

    module.exit_json(changed=changes > 0, ansible_facts=facts)
Пример #7
0
def main():
    from ansible.module_utils.basic import AnsibleModule
    module = AnsibleModule(
        argument_spec={
            'name': {
                'required': True,
            },
            'state': {
                'required': False,
                'default': 'present',
                'choices': ['absent', 'present'],
            },
            'value': {
                'required': False,
                'default': None,
            },
            'edit_only': {
                'type': 'bool',
                'required': False,
                'default': False,
            },
            'path': {
                'required': False,
                'default': '',
            },
            'bootloader': {
                'required': False,
                'choices': list(BOOTLOADERS.keys()),
            },
            'backup': {
                'type': 'bool',
                'required': False,
                'default': False,
            },
        },
        supports_check_mode=True,
    )

    params = module.params

    path = params['path'].strip()
    bootloader = params['bootloader']
    if path and not bootloader:
        try:
            bootloader = PATHS[path]
        except KeyError:
            module.fail_json(
                msg=("If the `path` argument is given,"
                     " then `bootloader` must also be given."),
                changed=False,
            )
    if not path:
        try:
            path, bootloader = find_bootloader_config()
        except LookupError as err:
            module.fail_json(msg=str(err), changed=False)

    # seed the result dict in the object
    result = {
        'changed': False,
        'path': path,
        'bootloader': bootloader,
        'edited': False,
        'installed': False,
    }

    try:
        handler = BOOTLOADERS[bootloader](module)
    except KeyError:
        module.fail_json(msg=(
            "Unknown value for `bootloader` argument: {bootloader}".format(
                bootloader=bootloader)),
                         **result)

    # if the user is working with this module in only check mode we do not
    # want to make any changes to the environment, just return the current
    # state with no modifications
    if module.check_mode:
        return result

    # read in config file contents
    try:
        with open(path, 'r') as input:
            current_config = input.read()
    except (OSError, IOError) as err:
        module.fail_json(msg=("Cannot read file `{path}`: {err}".format(
            path=path, err=err)),
                         **result)

    # apply requested changes
    new_config = handler.edit(current_config, params['state'], params['name'],
                              params['value'])

    # exit early if no changes
    if new_config == current_config:
        module.exit_json(**result)

    # make a backup if requested
    if params['backup']:
        result['backup'] = module.backup_local(path)

    # write out changed config
    try:
        with NamedTemporaryFile(dir=dirname(path),
                                prefix=(basename(path) + '.'),
                                suffix='.tmp',
                                delete=False) as edited:
            edited.write(new_config)
        module.atomic_move(edited.name, path)
        result['changed'] = True
    except (OSError, IOError) as err:
        module.fail_json(msg=("Cannot write back file `{path}`: {err}".format(
            path=path, err=err)),
                         **result)
    finally:
        module.cleanup(edited.name)

    result['edited'] = True

    # ensure new config is used by the bootloader next time
    result['installed'] = False
    if not params['edit_only']:
        try:
            install_result = handler.install(path)
            result.update(install_result)
            result['installed'] = True
            result['changed'] = True
        except Exception as err:
            module.fail_json(
                msg=("Cannot install new config file `{path}`: {err}".format(
                    path=path, err=err)),
                **result)

    # all done
    module.exit_json(**result)
Пример #8
0
def main():
    pam_items = [
        'core', 'data', 'fsize', 'memlock', 'nofile', 'rss', 'stack', 'cpu',
        'nproc', 'as', 'maxlogins', 'maxsyslogins', 'priority', 'locks',
        'sigpending', 'msgqueue', 'nice', 'rtprio', 'chroot'
    ]

    pam_types = ['soft', 'hard', '-']

    limits_conf = '/etc/security/limits.conf'

    module = AnsibleModule(
        # not checking because of daisy chain to file module
        argument_spec=dict(
            domain=dict(required=True, type='str'),
            limit_type=dict(required=True, type='str', choices=pam_types),
            limit_item=dict(required=True, type='str', choices=pam_items),
            value=dict(required=True, type='str'),
            use_max=dict(default=False, type='bool'),
            use_min=dict(default=False, type='bool'),
            backup=dict(default=False, type='bool'),
            dest=dict(default=limits_conf, type='str'),
            comment=dict(required=False, default='', type='str')))

    domain = module.params['domain']
    limit_type = module.params['limit_type']
    limit_item = module.params['limit_item']
    value = module.params['value']
    use_max = module.params['use_max']
    use_min = module.params['use_min']
    backup = module.params['backup']
    limits_conf = module.params['dest']
    new_comment = module.params['comment']

    changed = False

    if os.path.isfile(limits_conf):
        if not os.access(limits_conf, os.W_OK):
            module.fail_json(msg="%s is not writable. Use sudo" % limits_conf)
    else:
        limits_conf_dir = os.path.dirname(limits_conf)
        if os.path.isdir(limits_conf_dir) and os.access(
                limits_conf_dir, os.W_OK):
            open(limits_conf, 'a').close()
            changed = True
        else:
            module.fail_json(
                msg=
                "directory %s is not writable (check presence, access rights, use sudo)"
                % limits_conf_dir)

    if use_max and use_min:
        module.fail_json(
            msg="Cannot use use_min and use_max at the same time.")

    if not (value in ['unlimited', 'infinity', '-1'] or value.isdigit()):
        module.fail_json(
            msg=
            "Argument 'value' can be one of 'unlimited', 'infinity', '-1' or positive number. Refer to manual pages for more details."
        )

    # Backup
    if backup:
        backup_file = module.backup_local(limits_conf)

    space_pattern = re.compile(r'\s+')

    message = ''
    f = open(limits_conf, 'rb')
    # Tempfile
    nf = tempfile.NamedTemporaryFile(mode='w+')

    found = False
    new_value = value

    for line in f:
        line = to_native(line, errors='surrogate_or_strict')
        if line.startswith('#'):
            nf.write(line)
            continue

        newline = re.sub(space_pattern, ' ', line).strip()
        if not newline:
            nf.write(line)
            continue

        # Remove comment in line
        newline = newline.split('#', 1)[0]
        try:
            old_comment = line.split('#', 1)[1]
        except Exception:
            old_comment = ''

        newline = newline.rstrip()

        if not new_comment:
            new_comment = old_comment

        line_fields = newline.split(' ')

        if len(line_fields) != 4:
            nf.write(line)
            continue

        line_domain = line_fields[0]
        line_type = line_fields[1]
        line_item = line_fields[2]
        actual_value = line_fields[3]

        if not (actual_value in ['unlimited', 'infinity', '-1']
                or actual_value.isdigit()):
            module.fail_json(
                msg=
                "Invalid configuration of '%s'. Current value of %s is unsupported."
                % (limits_conf, line_item))

        # Found the line
        if line_domain == domain and line_type == limit_type and line_item == limit_item:
            found = True
            if value == actual_value:
                message = line
                nf.write(line)
                continue

            actual_value_unlimited = actual_value in [
                'unlimited', 'infinity', '-1'
            ]
            value_unlimited = value in ['unlimited', 'infinity', '-1']

            if use_max:
                if value.isdigit() and actual_value.isdigit():
                    new_value = str(max(int(value), int(actual_value)))
                elif actual_value_unlimited:
                    new_value = actual_value
                else:
                    new_value = value

            if use_min:
                if value.isdigit() and actual_value.isdigit():
                    new_value = str(min(int(value), int(actual_value)))
                elif value_unlimited:
                    new_value = actual_value
                else:
                    new_value = value

            # Change line only if value has changed
            if new_value != actual_value:
                changed = True
                if new_comment:
                    new_comment = "\t#" + new_comment
                new_limit = domain + "\t" + limit_type + "\t" + limit_item + "\t" + new_value + new_comment + "\n"
                message = new_limit
                nf.write(new_limit)
            else:
                message = line
                nf.write(line)
        else:
            nf.write(line)

    if not found:
        changed = True
        if new_comment:
            new_comment = "\t#" + new_comment
        new_limit = domain + "\t" + limit_type + "\t" + limit_item + "\t" + new_value + new_comment + "\n"
        message = new_limit
        nf.write(new_limit)

    f.close()
    nf.flush()

    # Copy tempfile to newfile
    module.atomic_move(nf.name, f.name)

    try:
        nf.close()
    except Exception:
        pass

    res_args = dict(changed=changed, msg=message)

    if backup:
        res_args['backup_file'] = backup_file

    module.exit_json(**res_args)
Пример #9
0
def main():

    module = AnsibleModule(
        # not checking because of daisy chain to file module
        argument_spec=dict(
            src=dict(type='path'),
            _original_basename=dict(
                type='str'
            ),  # used to handle 'dest is a directory' via template, a slight hack
            content=dict(type='str', no_log=True),
            dest=dict(type='path', required=True),
            backup=dict(type='bool', default=False),
            force=dict(type='bool', default=True, aliases=['thirsty']),
            validate=dict(type='str'),
            directory_mode=dict(type='raw'),
            remote_src=dict(type='bool'),
            local_follow=dict(type='bool'),
            checksum=dict(),
        ),
        add_file_common_args=True,
        supports_check_mode=True,
    )

    src = module.params['src']
    b_src = to_bytes(src, errors='surrogate_or_strict')
    dest = module.params['dest']
    # Make sure we always have a directory component for later processing
    if os.path.sep not in dest:
        dest = '.{0}{1}'.format(os.path.sep, dest)
    b_dest = to_bytes(dest, errors='surrogate_or_strict')
    backup = module.params['backup']
    force = module.params['force']
    _original_basename = module.params.get('_original_basename', None)
    validate = module.params.get('validate', None)
    follow = module.params['follow']
    local_follow = module.params['local_follow']
    mode = module.params['mode']
    owner = module.params['owner']
    group = module.params['group']
    remote_src = module.params['remote_src']
    checksum = module.params['checksum']

    if not os.path.exists(b_src):
        module.fail_json(msg="Source %s not found" % (src))
    if not os.access(b_src, os.R_OK):
        module.fail_json(msg="Source %s not readable" % (src))

    # Preserve is usually handled in the action plugin but mode + remote_src has to be done on the
    # remote host
    if module.params['mode'] == 'preserve':
        module.params['mode'] = '0%03o' % stat.S_IMODE(os.stat(b_src).st_mode)
    mode = module.params['mode']

    checksum_dest = None

    if os.path.isfile(src):
        checksum_src = module.sha1(src)
    else:
        checksum_src = None

    # Backwards compat only.  This will be None in FIPS mode
    try:
        if os.path.isfile(src):
            md5sum_src = module.md5(src)
        else:
            md5sum_src = None
    except ValueError:
        md5sum_src = None

    changed = False

    if checksum and checksum_src != checksum:
        module.fail_json(
            msg=
            'Copied file does not match the expected checksum. Transfer failed.',
            checksum=checksum_src,
            expected_checksum=checksum)

    # Special handling for recursive copy - create intermediate dirs
    if _original_basename and dest.endswith(os.sep):
        dest = os.path.join(dest, _original_basename)
        b_dest = to_bytes(dest, errors='surrogate_or_strict')
        dirname = os.path.dirname(dest)
        b_dirname = to_bytes(dirname, errors='surrogate_or_strict')
        if not os.path.exists(b_dirname):
            try:
                (pre_existing_dir,
                 new_directory_list) = split_pre_existing_dir(dirname)
            except AnsibleModuleError as e:
                e.result['msg'] += ' Could not copy to {0}'.format(dest)
                module.fail_json(**e.results)

            os.makedirs(b_dirname)
            directory_args = module.load_file_common_arguments(module.params)
            directory_mode = module.params["directory_mode"]
            if directory_mode is not None:
                directory_args['mode'] = directory_mode
            else:
                directory_args['mode'] = None
            adjust_recursive_directory_permissions(pre_existing_dir,
                                                   new_directory_list, module,
                                                   directory_args, changed)

    if os.path.isdir(b_dest):
        basename = os.path.basename(src)
        if _original_basename:
            basename = _original_basename
        dest = os.path.join(dest, basename)
        b_dest = to_bytes(dest, errors='surrogate_or_strict')

    if os.path.exists(b_dest):
        if os.path.islink(b_dest) and follow:
            b_dest = os.path.realpath(b_dest)
            dest = to_native(b_dest, errors='surrogate_or_strict')
        if not force:
            module.exit_json(msg="file already exists",
                             src=src,
                             dest=dest,
                             changed=False)
        if os.access(b_dest, os.R_OK) and os.path.isfile(dest):
            checksum_dest = module.sha1(dest)
    else:
        if not os.path.exists(os.path.dirname(b_dest)):
            try:
                # os.path.exists() can return false in some
                # circumstances where the directory does not have
                # the execute bit for the current user set, in
                # which case the stat() call will raise an OSError
                os.stat(os.path.dirname(b_dest))
            except OSError as e:
                if "permission denied" in to_native(e).lower():
                    module.fail_json(
                        msg="Destination directory %s is not accessible" %
                        (os.path.dirname(dest)))
            module.fail_json(msg="Destination directory %s does not exist" %
                             (os.path.dirname(dest)))

    if not os.access(os.path.dirname(b_dest),
                     os.W_OK) and not module.params['unsafe_writes']:
        module.fail_json(msg="Destination %s not writable" %
                         (os.path.dirname(dest)))

    backup_file = None
    if checksum_src != checksum_dest or os.path.islink(b_dest):
        if not module.check_mode:
            try:
                if backup:
                    if os.path.exists(b_dest):
                        backup_file = module.backup_local(dest)
                # allow for conversion from symlink.
                if os.path.islink(b_dest):
                    os.unlink(b_dest)
                    open(b_dest, 'w').close()
                if validate:
                    # if we have a mode, make sure we set it on the temporary
                    # file source as some validations may require it
                    if mode is not None:
                        module.set_mode_if_different(src, mode, False)
                    if owner is not None:
                        module.set_owner_if_different(src, owner, False)
                    if group is not None:
                        module.set_group_if_different(src, group, False)
                    if "%s" not in validate:
                        module.fail_json(msg="validate must contain %%s: %s" %
                                         (validate))
                    (rc, out, err) = module.run_command(validate % src)
                    if rc != 0:
                        module.fail_json(msg="failed to validate",
                                         exit_status=rc,
                                         stdout=out,
                                         stderr=err)
                b_mysrc = b_src
                if remote_src and os.path.isfile(b_src):
                    _, b_mysrc = tempfile.mkstemp(dir=os.path.dirname(b_dest))

                    shutil.copyfile(b_src, b_mysrc)
                    try:
                        shutil.copystat(b_src, b_mysrc)
                    except OSError as err:
                        if err.errno == errno.ENOSYS and mode == "preserve":
                            module.warn("Unable to copy stats {0}".format(
                                to_native(b_src)))
                        else:
                            raise
                module.atomic_move(
                    b_mysrc,
                    dest,
                    unsafe_writes=module.params['unsafe_writes'])
            except (IOError, OSError):
                module.fail_json(msg="failed to copy: %s to %s" % (src, dest),
                                 traceback=traceback.format_exc())
        changed = True
    else:
        changed = False

    if checksum_src is None and checksum_dest is None:
        if remote_src and os.path.isdir(module.params['src']):
            b_src = to_bytes(module.params['src'],
                             errors='surrogate_or_strict')
            b_dest = to_bytes(module.params['dest'],
                              errors='surrogate_or_strict')

            if src.endswith(os.path.sep) and os.path.isdir(
                    module.params['dest']):
                diff_files_changed = copy_diff_files(b_src, b_dest, module)
                left_only_changed = copy_left_only(b_src, b_dest, module)
                common_dirs_changed = copy_common_dirs(b_src, b_dest, module)
                owner_group_changed = chown_recursive(b_dest, module)
                if diff_files_changed or left_only_changed or common_dirs_changed or owner_group_changed:
                    changed = True

            if src.endswith(
                    os.path.sep) and not os.path.exists(module.params['dest']):
                b_basename = to_bytes(os.path.basename(src),
                                      errors='surrogate_or_strict')
                b_dest = to_bytes(os.path.join(b_dest, b_basename),
                                  errors='surrogate_or_strict')
                b_src = to_bytes(os.path.join(module.params['src'], ""),
                                 errors='surrogate_or_strict')
                if not module.check_mode:
                    shutil.copytree(b_src, b_dest, symlinks=not (local_follow))
                chown_recursive(dest, module)
                changed = True

            if not src.endswith(os.path.sep) and os.path.isdir(
                    module.params['dest']):
                b_basename = to_bytes(os.path.basename(src),
                                      errors='surrogate_or_strict')
                b_dest = to_bytes(os.path.join(b_dest, b_basename),
                                  errors='surrogate_or_strict')
                b_src = to_bytes(os.path.join(module.params['src'], ""),
                                 errors='surrogate_or_strict')
                if not module.check_mode and not os.path.exists(b_dest):
                    shutil.copytree(b_src, b_dest, symlinks=not (local_follow))
                    changed = True
                    chown_recursive(dest, module)
                if module.check_mode and not os.path.exists(b_dest):
                    changed = True
                if os.path.exists(b_dest):
                    diff_files_changed = copy_diff_files(b_src, b_dest, module)
                    left_only_changed = copy_left_only(b_src, b_dest, module)
                    common_dirs_changed = copy_common_dirs(
                        b_src, b_dest, module)
                    owner_group_changed = chown_recursive(b_dest, module)
                    if diff_files_changed or left_only_changed or common_dirs_changed or owner_group_changed:
                        changed = True

            if not src.endswith(os.path.sep) and not os.path.exists(
                    module.params['dest']):
                b_basename = to_bytes(os.path.basename(module.params['src']),
                                      errors='surrogate_or_strict')
                b_dest = to_bytes(os.path.join(b_dest, b_basename),
                                  errors='surrogate_or_strict')
                if not module.check_mode and not os.path.exists(b_dest):
                    os.makedirs(b_dest)
                    b_src = to_bytes(os.path.join(module.params['src'], ""),
                                     errors='surrogate_or_strict')
                    diff_files_changed = copy_diff_files(b_src, b_dest, module)
                    left_only_changed = copy_left_only(b_src, b_dest, module)
                    common_dirs_changed = copy_common_dirs(
                        b_src, b_dest, module)
                    owner_group_changed = chown_recursive(b_dest, module)
                    if diff_files_changed or left_only_changed or common_dirs_changed or owner_group_changed:
                        changed = True
                if module.check_mode and not os.path.exists(b_dest):
                    changed = True

    res_args = dict(dest=dest,
                    src=src,
                    md5sum=md5sum_src,
                    checksum=checksum_src,
                    changed=changed)
    if backup_file:
        res_args['backup_file'] = backup_file

    module.params['dest'] = dest
    if not module.check_mode:
        file_args = module.load_file_common_arguments(module.params)
        res_args['changed'] = module.set_fs_attributes_if_different(
            file_args, res_args['changed'])

    module.exit_json(**res_args)
Пример #10
0
def main():

    module = AnsibleModule(
        # not checking because of daisy chain to file module
        argument_spec = dict(
            src = dict(required=True, type='path'),
            delimiter = dict(required=False),
            dest = dict(required=True, type='path'),
            backup=dict(default=False, type='bool'),
            remote_src=dict(default=False, type='bool'),
            regexp = dict(required=False),
            ignore_hidden = dict(default=False, type='bool'),
            validate = dict(required=False, type='str'),
        ),
        add_file_common_args=True
    )

    changed   = False
    path_hash   = None
    dest_hash   = None
    src       = module.params['src']
    dest      = module.params['dest']
    backup    = module.params['backup']
    delimiter = module.params['delimiter']
    regexp    = module.params['regexp']
    compiled_regexp = None
    ignore_hidden = module.params['ignore_hidden']
    validate = module.params.get('validate', None)

    result = dict(src=src, dest=dest)
    if not os.path.exists(src):
        module.fail_json(msg="Source (%s) does not exist" % src)

    if not os.path.isdir(src):
        module.fail_json(msg="Source (%s) is not a directory" % src)

    if regexp is not None:
        try:
            compiled_regexp = re.compile(regexp)
        except re.error as e:
            module.fail_json(msg="Invalid Regexp (%s) in \"%s\"" % (to_native(e), regexp))

    if validate and "%s" not in validate:
        module.fail_json(msg="validate must contain %%s: %s" % validate)

    path = assemble_from_fragments(src, delimiter, compiled_regexp, ignore_hidden)
    path_hash = module.sha1(path)
    result['checksum'] = path_hash

    # Backwards compat.  This won't return data if FIPS mode is active
    try:
        pathmd5 = module.md5(path)
    except ValueError:
        pathmd5 = None
    result['md5sum'] = pathmd5

    if os.path.exists(dest):
        dest_hash = module.sha1(dest)

    if path_hash != dest_hash:
        if validate:
            (rc, out, err) = module.run_command(validate % path)
            result['validation'] = dict(rc=rc, stdout=out, stderr=err)
            if rc != 0:
                cleanup(path)
                module.fail_json(msg="failed to validate: rc:%s error:%s" % (rc, err))
        if backup and dest_hash is not None:
            result['backup_file'] = module.backup_local(dest)

        module.atomic_move(path, dest, unsafe_writes=module.params['unsafe_writes'])
        changed = True

    cleanup(path, result)

    # handle file permissions
    file_args = module.load_file_common_arguments(module.params)
    result['changed'] = module.set_fs_attributes_if_different(file_args, changed)

    # Mission complete
    result['msg'] = "OK"
    module.exit_json(**result)
Пример #11
0
def main():

    module = AnsibleModule(argument_spec=dict(
        name=dict(required=True, type='str'),
        conf_path=dict(required=False, default='/etc/aide.conf', type='path'),
        options=dict(required=False, type='list'),
        options_string=dict(required=False, type='str'),
        state=dict(required=False,
                   default="updated",
                   choices=['absent', 'present', 'updated']),
        backup=dict(default=False, type='bool'),
        add_if_not_present=dict(default=False, required=False)),
                           supports_check_mode=True,
                           mutually_exclusive=[['options', 'options_string']],
                           required_one_of=[['options', 'options_string']])
    # For existing rule sets
    rulesets = dict()
    content = str()
    options = []
    options_changed = []
    existing_options = []
    backupdest = ""
    fname = module.params['conf_path']
    action = module.params['state']
    add_if_not_present = module.params['add_if_not_present']
    ruleset_name = module.params['name'].rstrip().lstrip()

    # Get the options from either the 'options' or 'options_string' module arguments
    if module.params['options']:
        options = module.params['options']
    else:
        options = module.params['options_string'].split('+')

    # Open the file and read the content or fail
    try:
        with open(fname, 'r') as aide_file_obj:
            content = aide_file_obj.read()
    except IOError as e:
        # If unable to read the file, fail out
        module.fail_json(msg='Unable to open/read AIDE configuration \
                            file %s with error %s.' % (fname, str(e)))
    matches = RULESET_REGEX.findall(content)
    # Load existing rule set
    for match in matches:
        existing_options = match[1].split('+')
        rulesets[match[0]] = existing_options

    module.log(msg="EXISTING RULESET KEYS: " + str(rulesets.keys()))
    module.log(msg="EXISTING RULESET: " + str(rulesets))

    # Take action
    if action == 'absent':
        changed, rulesets[ruleset_name], options_changed = remove_options(
            rulesets[ruleset_name], options)
    elif action == 'present':
        changed, rulesets[ruleset_name], options_changed = add_options(
            rulesets[ruleset_name], options)
    elif action == 'updated':
        if ruleset_name in rulesets.keys() or add_if_not_present:
            rulesets[ruleset_name] = options
            options_changed = options
            changed = True

    # Write file
    if not module.check_mode and changed:
        module.log(msg="WRITING")
        # Update the content
        pattern = r"^" + ruleset_name + r"\s?=\s?.*$"
        module.log(msg="PATTERN: " + pattern)
        replacement = ruleset_name + " = " + "+".join(rulesets[ruleset_name])
        module.log(msg="REPLACEMENT: " + replacement)
        new_content = re.sub(pattern, replacement, content, flags=re.MULTILINE)

        module.log(msg="OLD CONTENT equals NEW CONTENT: " +
                   str(content == new_content))
        # First, create a backup if desired.
        if module.params['backup']:
            backupdest = module.backup_local(fname)
        # Write the file
        try:
            temp_file = NamedTemporaryFile(mode='w')
            module.log(msg="TEMP FILE NAME: " + temp_file.name)
            with open(temp_file.name, 'w') as fd:
                fd.write(new_content)

        except IOError:
            module.fail_json(msg='Unable to create temporary \
                                    file %s' % temp_file)

        module.atomic_move(temp_file.name, fname)

    facts = {}
    facts['aide_ruleset'] = {
        'action': action,
        'name': ruleset_name,
        'existing_options': existing_options,
        'options_changed': options_changed,
        'backupdest': backupdest
    }

    module.exit_json(changed=changed, ansible_facts=facts)
Пример #12
0
def main():
    from ansible.module_utils.basic import AnsibleModule
    module = AnsibleModule(
        argument_spec = {
            'name': {
                'required': True,
            },
            'state': {
                'required': False,
                'default': 'present',
                'choices': ['absent', 'present'],
            },
            'value': {
                'required': False,
                'default': None,
            },
            'edit_only': {
                'type': 'bool',
                'required': False,
                'default': False,
            },
            'path': {
                'required': False,
                'default': '',
            },
            'bootloader': {
                'required': False,
                'choices': list(BOOTLOADERS.keys()),
            },
            'backup': {
                'type': 'bool',
                'required': False,
                'default': False,
            },
        },
        supports_check_mode=True,
    )

    params = module.params

    path = params['path'].strip()
    bootloader = params['bootloader']
    if path and not bootloader:
        try:
            bootloader = PATHS[path]
        except KeyError:
            module.fail_json(
                msg=("If the `path` argument is given,"
                     " then `bootloader` must also be given."),
                changed=False,
            )
    if not path:
        try:
            path, bootloader = find_bootloader_config()
        except LookupError as err:
            module.fail_json(msg=str(err), changed=False)

    # seed the result dict in the object
    result = {
        'changed': False,
        'path': path,
        'bootloader': bootloader,
        'edited': False,
        'installed': False,
    }

    try:
        handler = BOOTLOADERS[bootloader](module)
    except KeyError:
        module.fail_json(
            msg=("Unknown value for `bootloader` argument: {bootloader}"
                 .format(bootloader=bootloader)),
            **result
        )

    # if the user is working with this module in only check mode we do not
    # want to make any changes to the environment, just return the current
    # state with no modifications
    if module.check_mode:
        return result

    # read in config file contents
    try:
        with open(path, 'r') as input:
            current_config = input.read()
    except (OSError, IOError) as err:
        module.fail_json(
            msg=("Cannot read file `{path}`: {err}"
                 .format(path=path, err=err)),
            **result
        )

    # apply requested changes
    new_config = handler.edit(
        current_config, params['state'], params['name'], params['value'])

    # exit early if no changes
    if new_config == current_config:
        module.exit_json(**result)

    # make a backup if requested
    if params['backup']:
        result['backup'] = module.backup_local(path)

    # write out changed config
    try:
        with NamedTemporaryFile(
                dir=dirname(path),
                prefix=(basename(path) + '.'),
                suffix='.tmp',
                delete=False) as edited:
            edited.write(new_config)
        module.atomic_move(edited.name, path)
        result['changed'] = True
    except (OSError, IOError) as err:
        module.fail_json(
            msg=("Cannot write back file `{path}`: {err}"
                 .format(path=path, err=err)),
            **result
        )
    finally:
        module.cleanup(edited.name)

    result['edited'] = True

    # ensure new config is used by the bootloader next time
    result['installed'] = False
    if not params['edit_only']:
        try:
            install_result = handler.install(path)
            result.update(install_result)
            result['installed'] = True
            result['changed'] = True
        except Exception as err:
            module.fail_json(
                msg=("Cannot install new config file `{path}`: {err}"
                     .format(path=path, err=err)),
                **result
            )

    # all done
    module.exit_json(**result)
Пример #13
0
def main():

    module_args = dict(host=dict(type='str', required=True),
                       port=dict(type='str', required=True),
                       user=dict(type='str', required=True),
                       password=dict(type='str', required=True, no_log=True),
                       src=dict(type='path', required=True),
                       dest=dict(type='path', required=True),
                       mappings=dict(type='list', required=False, default=[]),
                       cert=dict(type='path', required=False),
                       unsafe_writes=dict(type='bool',
                                          required=False,
                                          default=False))

    module = AnsibleModule(argument_spec=module_args, supports_check_mode=True)

    result = dict(changed=False,
                  asset_count=0,
                  untranslated_assets=[],
                  untranslated_relationships=[])

    # if the user is working with this module in only check mode we do not
    # want to make any changes to the environment, just return the current
    # state with no modifications
    if module.check_mode:
        return result

    # Setup REST API connectivity via module_utils.igc_rest class
    igcrest = RestIGC(module,
                      result,
                      username=module.params['user'],
                      password=module.params['password'],
                      host=module.params['host'],
                      port=module.params['port'],
                      cert=module.params['cert'])

    mappings = module.params['mappings']
    src = module.params['src']
    dest = module.params['dest']

    if os.path.isdir(src):
        module.fail_json(rc=256, msg='Src %s is a directory !' % src)

    src_exists = os.path.exists(src)
    if not src_exists:
        module.fail_json(rc=257, msg='Src %s does not exist !' % src)

    f = open(to_bytes(src), 'rb')
    allAssets = json.load(f)
    f.close()

    if len(allAssets) > 0:
        asset_type = allAssets[0]['_type']
        # Retrieve descriptive asset properties to populate headers
        asset_name, propertyMap = igcrest.getPropertyMap(asset_type)
        aRows = []
        aHeader = []
        for asset in allAssets:

            aRow = {}
            if 'Name' not in aHeader:
                aHeader.append('Name')
            aRow['Name'] = asset['_name']
            # Start by translating context of the asset itself
            for ctx in asset['_context']:
                mappedCtx = get_mapped_value(ctx['_type'], 'name',
                                             ctx['_name'], mappings)
                ctxName, ctxMap = igcrest.getPropertyMap(ctx['_type'])
                if ctxName == 'Host (Engine)':
                    ctxName = 'Host'
                if ctxName not in aHeader:
                    aHeader.append(ctxName)
                aRow[ctxName] = mappedCtx
            result['asset_count'] += 1

            for reln_property in asset:
                if not reln_property.startswith('_'):
                    aRelations = []
                    relnName = propertyMap[reln_property]
                    if relnName not in aHeader:
                        aHeader.append(relnName)
                    for reln in asset[reln_property]:
                        # Translate the context of each related asset
                        sRelation = '"'
                        for relnCtx in reln['_context']:
                            mappedRelnCtx = get_mapped_value(
                                relnCtx['_type'], 'name', relnCtx['_name'],
                                mappings)
                            sRelation += mappedRelnCtx + ">>"
                            # relnCtxName, relnCtxMap = igcrest.getPropertyMap(relnCtx['_type'])
                        sRelation += reln['_name'] + '"'
                        aRelations.append(sRelation)
                    if len(aRelations) > 0:
                        aRow[relnName] = "[" + ";".join(aRelations) + "]"
                    else:
                        aRow[relnName] = ""
            if len(aRow) > 0:
                aRows.append(aRow)
            else:
                result['untranslated_assets'].append(asset)

    # Close the IGC REST API session
    igcrest.closeSession()

    # Write temporary file with the JSON output,
    # and then move to specified dest location
    try:
        tmpfd, tmpfile = tempfile.mkstemp()
        f = os.fdopen(tmpfd, 'wb')
        f.write("+++ " + asset_name + " - begin +++\n")
        writer = csv.DictWriter(f, fieldnames=aHeader)
        writer.writeheader()
        for row in aRows:
            writer.writerow(row)
        f.write("+++ " + asset_name + " - end +++\n\n")
        f.close()
    except IOError:
        module.fail_json(
            msg=
            'Unable to create temporary file to output transformed relationships',
            **result)

    # Checksumming to identify change...
    checksum_src = module.sha1(tmpfile)
    checksum_dest = None
    b_dest = to_bytes(dest, errors='surrogate_or_strict')
    if os.access(b_dest, os.R_OK):
        checksum_dest = module.sha1(dest)

    # If the file does not already exist and/or checksums are different,
    # move the new file over the old one and mark it as changed; otherwise
    # leave the original file (delete the tmpfile) and that there was no change
    if checksum_src != checksum_dest:
        module.atomic_move(tmpfile,
                           to_native(os.path.realpath(b_dest),
                                     errors='surrogate_or_strict'),
                           unsafe_writes=module.params['unsafe_writes'])
        result['changed'] = True
    else:
        os.unlink(tmpfile)

    module.exit_json(**result)
Пример #14
0
def main():

    module = AnsibleModule(
        argument_spec=dict(
            host=dict(required=True, aliases=['hostname']),
            url_username=dict(required=True, aliases=['username', 'login']),
            url_password=dict(required=True, no_log=True,
                              aliases=['password']),
            src=dict(required=True, aliases=['name']),
            datacenter=dict(required=True),
            datastore=dict(required=True),
            dest=dict(required=True, aliases=['path'], type='path'),
            backup=dict(type='bool'),
            tmp_dest=dict(type='path'),
            timeout=dict(type='int', default=10),
            force=dict(default='no', aliases=['thirsty'], type='bool'),
            validate_certs=dict(required=False, default=True, type='bool'),
        ),
        supports_check_mode=True,
        add_file_common_args=True,
    )

    host = module.params.get('host')
    login = module.params.get('url_username')
    password = module.params.get('url_password')
    src = module.params.get('src')
    datacenter = module.params.get('datacenter')
    datastore = module.params.get('datastore')
    dest = module.params.get('dest')
    validate_certs = module.params.get('validate_certs')
    tmp_dest = module.params.get('tmp_dest')
    backup = module.params.get('backup')
    force = module.params.get('force')
    timeout = module.params.get('timeout')

    dest_is_dir = os.path.isdir(dest)
    last_mod_time = None

    remote_path = vmware_path(datastore, datacenter, src)
    url = 'https://%s%s' % (host, remote_path)

    if not dest_is_dir and os.path.exists(dest):
        if not force:
            # allow file attribute changes
            module.params['path'] = dest
            file_args = module.load_file_common_arguments(module.params)
            file_args['path'] = dest
            changed = module.set_fs_attributes_if_different(file_args, False)

            if changed:
                module.exit_json(
                    msg="file already exists but file attributes changed",
                    dest=dest,
                    url=url,
                    changed=changed)
            module.exit_json(msg="file already exists",
                             dest=dest,
                             url=url,
                             changed=changed)

        # If the file already exists, prepare the last modified time for the
        # request.
        mtime = os.path.getmtime(dest)
        last_mod_time = datetime.datetime.utcfromtimestamp(mtime)

    tmpsrc, info = vmware_get(module,
                              url=url,
                              dest=dest,
                              last_mod_time=last_mod_time,
                              force=force,
                              timeout=timeout,
                              tmp_dest=tmp_dest)

    if dest_is_dir:
        filename = extract_filename_from_headers(info)
        if not filename:
            # Fall back to extracting the filename from the URL.
            # Pluck the URL from the info, since a redirect could have changed
            # it.
            filename = url_filename(info['url'])
        dest = os.path.join(dest, filename)

    # If the remote URL exists, we're done with check mode
    if module.check_mode:
        os.remove(tmpsrc)
        res_args = dict(url=url,
                        dest=dest,
                        src=tmpsrc,
                        changed=True,
                        msg=info.get('msg', ''))
        module.exit_json(**res_args)

    # raise an error if there is no tmpsrc file
    if not os.path.exists(tmpsrc):
        os.remove(tmpsrc)
        module.fail_json(msg="Request failed",
                         status_code=info['status'],
                         response=info['msg'])
    if not os.access(tmpsrc, os.R_OK):
        os.remove(tmpsrc)
        module.fail_json(msg="Source %s is not readable" % (tmpsrc))
    checksum_src = module.sha1(tmpsrc)

    # check if there is no dest file
    if os.path.exists(dest):
        # raise an error if copy has no permission on dest
        if not os.access(dest, os.W_OK):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s is not writable" % (dest))
        if not os.access(dest, os.R_OK):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s is not readable" % (dest))
    else:
        if not os.path.exists(os.path.dirname(dest)):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s does not exist" %
                             (os.path.dirname(dest)))
        if not os.access(os.path.dirname(dest), os.W_OK):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s is not writable" %
                             (os.path.dirname(dest)))

    backup_file = None
    try:
        if backup:
            if os.path.exists(dest):
                backup_file = module.backup_local(dest)
        module.atomic_move(tmpsrc, dest)
    except Exception as e:
        if os.path.exists(tmpsrc):
            os.remove(tmpsrc)
        module.fail_json(msg="failed to copy %s to %s: %s" %
                         (tmpsrc, dest, to_native(e)),
                         exception=traceback.format_exc())
    changed = True

    # allow file attribute changes
    module.params['path'] = dest
    file_args = module.load_file_common_arguments(module.params)
    file_args['path'] = dest
    changed = module.set_fs_attributes_if_different(file_args, changed)

    # Backwards compat only.  We'll return None on FIPS enabled systems
    try:
        md5sum = module.md5(dest)
    except ValueError:
        md5sum = None

    res_args = dict(url=url,
                    dest=dest,
                    src=tmpsrc,
                    md5sum=md5sum,
                    datacenter=datacenter,
                    datastore=datastore,
                    changed=changed,
                    msg=info.get('msg', ''),
                    status_code=info.get('status', ''))
    if backup_file:
        res_args['backup_file'] = backup_file

    # Mission complete
    module.exit_json(**res_args)
Пример #15
0
def main():
    module = AnsibleModule(
        argument_spec=dict(
            name=dict(type='str', required=True),
            rule=dict(type='str', no_log=True, aliases=['rules', 'snippet']),
            hook=dict(
                type='str',
                default='custom',
                choices=['custom', 'input', 'forward', 'internal',
                         'external']),
            prio=dict(type='int', default=DEFAULT_PRIO, aliases=['priority']),
            state=dict(type='str',
                       default='present',
                       choices=['present', 'absent']),
            backup=dict(type='bool', default=False),
            reload=dict(type='bool', default=True),
            ferm_dir=dict(type='str', default='/etc/ferm'),
        ),
        supports_check_mode=True,
        required_if=[('state', 'present', ['rule'])],
    )

    name = module.params['name']
    if re.search(r'^\d+-|[\s\\\/]|\.ferm$', name):
        module.fail_json(msg="Invalid rule name: '%s'" % name)

    prio = module.params['prio']
    if prio < 0 or prio > 99:
        module.fail_json(msg='Invalid rule prio: %d' % prio)

    hook = module.params['hook']
    hook_dir = os.path.join(module.params['ferm_dir'], '%s.d' % hook)
    if not os.path.isdir(hook_dir) or not os.access(hook_dir, os.W_OK):
        module.fail_json(msg='Directory is absent or not writable: ' +
                         hook_dir)

    new_path = os.path.join(hook_dir, '%02d-%s.ferm' % (prio, name))
    b_new_path = to_bytes(new_path, errors='surrogate_or_strict')

    path_glob = os.path.join(hook_dir, '*-%s.ferm' % name)
    regexp = os.path.join(re.escape(hook_dir),
                          '[0-9]{2}-%s.ferm' % re.escape(name))
    path_regex = re.compile('^%s$' % regexp)
    old_list = sorted(p for p in glob.glob(path_glob) if path_regex.match(p))

    exists = os.path.exists(b_new_path)
    if exists and not os.path.isfile(b_new_path):
        module.fail_json(msg='Destination is not a regular file: ' + new_path)
    if exists and not os.access(b_new_path, os.W_OK):
        module.fail_json(msg='Destination is not writable: ' + new_path)

    if exists:
        old_list.remove(new_path)  # must be present in the list
        old_path = new_path
    elif old_list:
        old_path = old_list.pop(0)  # first in sort order
        exists = True

    changed = False
    msg = ''
    backup = module.params['backup']
    backup_file = None
    state = module.params['state']

    if state == 'absent' and exists:
        changed = True
        if not module.check_mode:
            if backup:
                backup_file = module.backup_local(old_path)
            os.remove(to_bytes(old_path, errors='surrogate_or_strict'))
            msg = 'Rule removed: %s' % name
            if old_path != new_path:
                msg += ' (as old priority)'

    if state == 'present':
        rule = module.params['rule']
        if rule is None:
            module.fail_json(msg='Please provide the rule')
        b_rule = to_bytes(rule)

        if exists:
            with open(old_path, 'rb') as f:
                b_orig_rule = f.read()
            changed = b_rule != b_orig_rule or old_path != new_path
        else:
            changed = True

        if changed and not module.check_mode:
            tmpfd, tmpfile = tempfile.mkstemp()
            with os.fdopen(tmpfd, 'wb') as f:
                f.write(b_rule)

            if exists and backup:
                backup_file = module.backup_local(old_path)
            if exists and old_path != new_path:
                os.remove(to_bytes(old_path, errors='surrogate_or_strict'))

            module.atomic_move(tmpfile, new_path, unsafe_writes=False)
            msg = 'Rule saved: %s' % name
            if exists and old_path != new_path:
                msg += ' (as new priority)'

            module.set_mode_if_different(new_path, '0640', changed)
            module.set_owner_if_different(new_path, 'root', changed)
            module.set_group_if_different(new_path, 'root', changed)

    result = {'path': new_path}
    if backup_file:
        result['backup'] = backup_file
    if exists and old_path != new_path:
        result['old_path'] = old_path

    if old_list:
        if not module.check_mode:
            for path in old_list:
                os.remove(to_bytes(path, errors='surrogate_or_strict'))
        result['num_duplicates'] = len(old_list)
        if not msg:
            msg = 'Rule unchanged'
        msg += ', %d duplicate(s) removed' % len(old_list)
        changed = True

    if changed and module.params['reload'] and not module.check_mode:
        cmd = ['systemctl', 'reload-or-restart', 'ferm.service']
        rc, stdout, stderr = module.run_command(cmd)
        if rc:
            module.fail_json(msg='Failed to reload ferm',
                             rc=rc,
                             stdout=stdout,
                             stderr=stderr)

    module.exit_json(changed=changed, msg=msg, **result)
def main():

    module_args = dict(
        host=dict(type='str', required=True),
        port=dict(type='str', required=True),
        user=dict(type='str', required=True),
        password=dict(type='str', required=True, no_log=True),
        asset_type=dict(type='str', required=True),
        relationships=dict(type='list', required=True),
        dest=dict(type='path', required=True),
        from_time=dict(type='int', required=False, default=-1),
        to_time=dict(type='int', required=False),
        conditions=dict(type='list', required=False, default=[]),
        limit=dict(type='list', required=False, default=[]),
        dev_glossary=dict(type='bool', required=False, default=False),
        batch=dict(type='int', required=False, default=100),
        cert=dict(type='path', required=False),
        unsafe_writes=dict(type='bool', required=False, default=False)
    )

    module = AnsibleModule(
        argument_spec=module_args,
        supports_check_mode=True
    )

    result = dict(
        changed=False,
        queries=[],
        asset_count=0,
        relationship_count=0
    )

    # if the user is working with this module in only check mode we do not
    # want to make any changes to the environment, just return the current
    # state with no modifications
    if module.check_mode:
        return result

    # Setup REST API connectivity via module_utils.igc_rest class
    igcrest = RestIGC(
        module,
        result,
        username=module.params['user'],
        password=module.params['password'],
        host=module.params['host'],
        port=module.params['port'],
        cert=module.params['cert']
    )

    relnprops = module.params['relationships']
    limit = module.params['limit']
    batch = module.params['batch']
    dev_glossary = module.params['dev_glossary']
    asset_type = module.params['asset_type']
    wfl_enabled = igcrest.isWorkflowEnabled()

    # Basic query
    reqJSON = {
        "properties": relnprops,
        "types": [asset_type],
        "pageSize": batch
    }

    # Extend basic query with any optional conditions
    if len(module.params['conditions']) > 0:
        reqJSON['where'] = {
            "conditions": module.params['conditions'],
            "operator": "and"
        }
    if module.params['from_time'] != -1:
        if 'where' not in reqJSON:
            reqJSON['where'] = {
                "conditions": [],
                "operator": "and"
            }
        reqJSON['where']['conditions'].append({
            "min": module.params['from_time'],
            "max": module.params['to_time'],
            "property": "modified_on",
            "operator": "between"
        })
    if dev_glossary and wfl_enabled and igcrest.isWorkflowType(asset_type):
        reqJSON['workflowMode'] = "draft"

    # Execute the search
    jsonResults = igcrest.search(reqJSON)

    # Ensure search worked before proceeding
    if jsonResults == '':
        module.fail_json(msg='Initial IGC REST API search failed', **result)

    result['asset_count'] = len(jsonResults)

    for item in jsonResults:
        minifyItem(item)
        for itmCtx in item['_context']:
            minifyItem(itmCtx)
        for relnprop in relnprops:
            # Not all relationships are lists, some are singular; but we will wrap for ease of processing below
            bSingleRelation = False
            if 'items' in item[relnprop]:
                item[relnprop] = igcrest.getAllPages(item[relnprop]['items'],
                                                     item[relnprop]['paging'],
                                                     (dev_glossary and wfl_enabled))
            elif '_id' in item[relnprop]:
                item[relnprop] = [item[relnprop]]
                bSingleRelation = True
            else:
                item[relnprop] = []
                bSingleRelation = True
            aRemoveIndices = []
            iIdx = 0
            for relation in item[relnprop]:
                # Limit included relationships to only those types of interest
                if (len(limit) > 0) and not (relation['_type'] in limit):
                    aRemoveIndices.append(iIdx)
                else:
                    relnCtx = igcrest.getContextForItem(relation, (dev_glossary and wfl_enabled), batch=batch)
                    if relnCtx == '':
                        module.fail_json(msg='Unable to retieve context for search result', **result)
                    else:
                        minifyItem(relation)
                        for ctx in relnCtx:
                            minifyItem(ctx)
                        result['relationship_count'] += 1
                        relation['_context'] = relnCtx
                iIdx += 1
            iIdx = 0
            for removal in aRemoveIndices:
                del item[relnprop][removal - iIdx]
                iIdx += 1
            # Unbundle single relationships back out of their arrays
            if bSingleRelation:
                if len(item[relnprop]) > 0:
                    item[relnprop] = item[relnprop][0]
                else:
                    item[relnprop] = {}

    # Close the IGC REST API session
    igcrest.closeSession()

    # Write temporary file with the JSON output,
    # and then move to specified dest location
    try:
        tmpfd, tmpfile = tempfile.mkstemp()
        f = os.fdopen(tmpfd, 'wb')
        json.dump(jsonResults, f)
        f.close()
    except IOError:
        module.fail_json(msg='Unable to create temporary file to output relationship results', **result)

    # Checksumming to identify change...
    checksum_src = module.sha1(tmpfile)
    checksum_dest = None
    dest = module.params['dest']
    b_dest = to_bytes(dest, errors='surrogate_or_strict')
    if os.access(b_dest, os.R_OK):
        checksum_dest = module.sha1(dest)

    # If the file does not already exist and/or checksums are different,
    # move the new file over the old one and mark it as changed; otherwise
    # leave the original file (delete the tmpfile) and that there was no change
    if checksum_src != checksum_dest:
        module.atomic_move(tmpfile,
                           to_native(os.path.realpath(b_dest), errors='surrogate_or_strict'),
                           unsafe_writes=module.params['unsafe_writes'])
        result['changed'] = True
    else:
        os.unlink(tmpfile)

    module.exit_json(**result)
Пример #17
0
def main():
    argument_spec = url_argument_spec()
    argument_spec.update(
        url=dict(type='str', required=True),
        dest=dict(type='path', required=True),
        backup=dict(type='bool'),
        sha256sum=dict(type='str', default=''),
        checksum=dict(type='str', default=''),
        timeout=dict(type='int', default=10),
        headers=dict(type='raw'),
        tmp_dest=dict(type='path'),
    )

    module = AnsibleModule(
        # not checking because of daisy chain to file module
        argument_spec=argument_spec,
        add_file_common_args=True,
        supports_check_mode=True,
        mutually_exclusive=(['checksum', 'sha256sum']),
    )

    url = module.params['url']
    dest = module.params['dest']
    backup = module.params['backup']
    force = module.params['force']
    sha256sum = module.params['sha256sum']
    checksum = module.params['checksum']
    use_proxy = module.params['use_proxy']
    timeout = module.params['timeout']
    tmp_dest = module.params['tmp_dest']

    # Parse headers to dict
    if isinstance(module.params['headers'], dict):
        headers = module.params['headers']
    elif module.params['headers']:
        try:
            headers = dict(item.split(':', 1) for item in module.params['headers'].split(','))
            module.deprecate('Supplying `headers` as a string is deprecated. Please use dict/hash format for `headers`', version='2.10')
        except Exception:
            module.fail_json(msg="The string representation for the `headers` parameter requires a key:value,key:value syntax to be properly parsed.")
    else:
        headers = None

    dest_is_dir = os.path.isdir(dest)
    last_mod_time = None

    # workaround for usage of deprecated sha256sum parameter
    if sha256sum:
        checksum = 'sha256:%s' % (sha256sum)

    # checksum specified, parse for algorithm and checksum
    if checksum:
        try:
            algorithm, checksum = checksum.rsplit(':', 1)
            # Remove any non-alphanumeric characters, including the infamous
            # Unicode zero-width space
            checksum = re.sub(r'\W+', '', checksum).lower()
            # Ensure the checksum portion is a hexdigest
            int(checksum, 16)
        except ValueError:
            module.fail_json(msg="The checksum parameter has to be in format <algorithm>:<checksum>")

    if not dest_is_dir and os.path.exists(dest):
        checksum_mismatch = False

        # If the download is not forced and there is a checksum, allow
        # checksum match to skip the download.
        if not force and checksum != '':
            destination_checksum = module.digest_from_file(dest, algorithm)

            if checksum == destination_checksum:
                module.exit_json(msg="file already exists", dest=dest, url=url, changed=False)

            checksum_mismatch = True

        # Not forcing redownload, unless checksum does not match
        if not force and not checksum_mismatch:
            # allow file attribute changes
            module.params['path'] = dest
            file_args = module.load_file_common_arguments(module.params)
            file_args['path'] = dest
            changed = module.set_fs_attributes_if_different(file_args, False)

            if changed:
                module.exit_json(msg="file already exists but file attributes changed", dest=dest, url=url, changed=changed)
            module.exit_json(msg="file already exists", dest=dest, url=url, changed=changed)

        # If the file already exists, prepare the last modified time for the
        # request.
        mtime = os.path.getmtime(dest)
        last_mod_time = datetime.datetime.utcfromtimestamp(mtime)

        # If the checksum does not match we have to force the download
        # because last_mod_time may be newer than on remote
        if checksum_mismatch:
            force = True

    # download to tmpsrc
    tmpsrc, info = url_get(module, url, dest, use_proxy, last_mod_time, force, timeout, headers, tmp_dest)

    # Now the request has completed, we can finally generate the final
    # destination file name from the info dict.

    if dest_is_dir:
        filename = extract_filename_from_headers(info)
        if not filename:
            # Fall back to extracting the filename from the URL.
            # Pluck the URL from the info, since a redirect could have changed
            # it.
            filename = url_filename(info['url'])
        dest = os.path.join(dest, filename)

    checksum_src = None
    checksum_dest = None

    # If the remote URL exists, we're done with check mode
    if module.check_mode:
        os.remove(tmpsrc)
        res_args = dict(url=url, dest=dest, src=tmpsrc, changed=True, msg=info.get('msg', ''))
        module.exit_json(**res_args)

    # raise an error if there is no tmpsrc file
    if not os.path.exists(tmpsrc):
        os.remove(tmpsrc)
        module.fail_json(msg="Request failed", status_code=info['status'], response=info['msg'])
    if not os.access(tmpsrc, os.R_OK):
        os.remove(tmpsrc)
        module.fail_json(msg="Source %s is not readable" % (tmpsrc))
    checksum_src = module.sha1(tmpsrc)

    # check if there is no dest file
    if os.path.exists(dest):
        # raise an error if copy has no permission on dest
        if not os.access(dest, os.W_OK):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s is not writable" % (dest))
        if not os.access(dest, os.R_OK):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s is not readable" % (dest))
        checksum_dest = module.sha1(dest)
    else:
        if not os.path.exists(os.path.dirname(dest)):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s does not exist" % (os.path.dirname(dest)))
        if not os.access(os.path.dirname(dest), os.W_OK):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s is not writable" % (os.path.dirname(dest)))

    backup_file = None
    if checksum_src != checksum_dest:
        try:
            if backup:
                if os.path.exists(dest):
                    backup_file = module.backup_local(dest)
            module.atomic_move(tmpsrc, dest)
        except Exception as e:
            if os.path.exists(tmpsrc):
                os.remove(tmpsrc)
            module.fail_json(msg="failed to copy %s to %s: %s" % (tmpsrc, dest, to_native(e)),
                             exception=traceback.format_exc())
        changed = True
    else:
        changed = False
        if os.path.exists(tmpsrc):
            os.remove(tmpsrc)

    if checksum != '':
        destination_checksum = module.digest_from_file(dest, algorithm)

        if checksum != destination_checksum:
            os.remove(dest)
            module.fail_json(msg="The checksum for %s did not match %s; it was %s." % (dest, checksum, destination_checksum))

    # allow file attribute changes
    module.params['path'] = dest
    file_args = module.load_file_common_arguments(module.params)
    file_args['path'] = dest
    changed = module.set_fs_attributes_if_different(file_args, changed)

    # Backwards compat only.  We'll return None on FIPS enabled systems
    try:
        md5sum = module.md5(dest)
    except ValueError:
        md5sum = None

    res_args = dict(
        url=url, dest=dest, src=tmpsrc, md5sum=md5sum, checksum_src=checksum_src,
        checksum_dest=checksum_dest, changed=changed, msg=info.get('msg', ''), status_code=info.get('status', '')
    )
    if backup_file:
        res_args['backup_file'] = backup_file

    # Mission complete
    module.exit_json(**res_args)
Пример #18
0
def main():
    pam_items = ['core', 'data', 'fsize', 'memlock', 'nofile', 'rss', 'stack', 'cpu', 'nproc', 'as', 'maxlogins', 'maxsyslogins', 'priority', 'locks',
                 'sigpending', 'msgqueue', 'nice', 'rtprio', 'chroot']

    pam_types = ['soft', 'hard', '-']

    limits_conf = '/etc/security/limits.conf'

    module = AnsibleModule(
        # not checking because of daisy chain to file module
        argument_spec=dict(
            domain=dict(required=True, type='str'),
            limit_type=dict(required=True, type='str', choices=pam_types),
            limit_item=dict(required=True, type='str', choices=pam_items),
            value=dict(required=True, type='str'),
            use_max=dict(default=False, type='bool'),
            use_min=dict(default=False, type='bool'),
            backup=dict(default=False, type='bool'),
            dest=dict(default=limits_conf, type='str'),
            comment=dict(required=False, default='', type='str')
        )
    )

    domain = module.params['domain']
    limit_type = module.params['limit_type']
    limit_item = module.params['limit_item']
    value = module.params['value']
    use_max = module.params['use_max']
    use_min = module.params['use_min']
    backup = module.params['backup']
    limits_conf = module.params['dest']
    new_comment = module.params['comment']

    changed = False

    if os.path.isfile(limits_conf):
        if not os.access(limits_conf, os.W_OK):
            module.fail_json(msg="%s is not writable. Use sudo" % limits_conf)
    else:
        limits_conf_dir = os.path.dirname(limits_conf)
        if os.path.isdir(limits_conf_dir) and os.access(limits_conf_dir, os.W_OK):
            open(limits_conf, 'a').close()
            changed = True
        else:
            module.fail_json(msg="directory %s is not writable (check presence, access rights, use sudo)" % limits_conf_dir)

    if use_max and use_min:
        module.fail_json(msg="Cannot use use_min and use_max at the same time.")

    if not (value in ['unlimited', 'infinity', '-1'] or value.isdigit()):
        module.fail_json(msg="Argument 'value' can be one of 'unlimited', 'infinity', '-1' or positive number. Refer to manual pages for more details.")

    # Backup
    if backup:
        backup_file = module.backup_local(limits_conf)

    space_pattern = re.compile(r'\s+')

    message = ''
    f = open(limits_conf, 'rb')
    # Tempfile
    nf = tempfile.NamedTemporaryFile(mode='w+')

    found = False
    new_value = value

    for line in f:
        line = to_native(line, errors='surrogate_or_strict')
        if line.startswith('#'):
            nf.write(line)
            continue

        newline = re.sub(space_pattern, ' ', line).strip()
        if not newline:
            nf.write(line)
            continue

        # Remove comment in line
        newline = newline.split('#', 1)[0]
        try:
            old_comment = line.split('#', 1)[1]
        except:
            old_comment = ''

        newline = newline.rstrip()

        if not new_comment:
            new_comment = old_comment

        line_fields = newline.split(' ')

        if len(line_fields) != 4:
            nf.write(line)
            continue

        line_domain = line_fields[0]
        line_type = line_fields[1]
        line_item = line_fields[2]
        actual_value = line_fields[3]

        if not (actual_value in ['unlimited', 'infinity', '-1'] or actual_value.isdigit()):
            module.fail_json(msg="Invalid configuration of '%s'. Current value of %s is unsupported." % (limits_conf, line_item))

        # Found the line
        if line_domain == domain and line_type == limit_type and line_item == limit_item:
            found = True
            if value == actual_value:
                message = line
                nf.write(line)
                continue

            actual_value_unlimited = actual_value in ['unlimited', 'infinity', '-1']
            value_unlimited = value in ['unlimited', 'infinity', '-1']

            if use_max:
                if value.isdigit() and actual_value.isdigit():
                    new_value = str(max(int(value), int(actual_value)))
                elif actual_value_unlimited:
                    new_value = actual_value
                else:
                    new_value = value

            if use_min:
                if value.isdigit() and actual_value.isdigit():
                    new_value = str(min(int(value), int(actual_value)))
                elif value_unlimited:
                    new_value = actual_value
                else:
                    new_value = value

            # Change line only if value has changed
            if new_value != actual_value:
                changed = True
                if new_comment:
                    new_comment = "\t#" + new_comment
                new_limit = domain + "\t" + limit_type + "\t" + limit_item + "\t" + new_value + new_comment + "\n"
                message = new_limit
                nf.write(new_limit)
            else:
                message = line
                nf.write(line)
        else:
            nf.write(line)

    if not found:
        changed = True
        if new_comment:
            new_comment = "\t#" + new_comment
        new_limit = domain + "\t" + limit_type + "\t" + limit_item + "\t" + new_value + new_comment + "\n"
        message = new_limit
        nf.write(new_limit)

    f.close()
    nf.flush()

    # Copy tempfile to newfile
    module.atomic_move(nf.name, f.name)

    try:
        nf.close()
    except:
        pass

    res_args = dict(
        changed=changed, msg=message
    )

    if backup:
        res_args['backup_file'] = backup_file

    module.exit_json(**res_args)
Пример #19
0
def main():
    module = AnsibleModule(
        argument_spec={
            "gitlab_url": {
                "required": True,
                type: "str"
            },
            "gitlab_token": {
                "required": True,
                type: "str"
            },
            "description": {
                "required": True,
                "type": "str"
            },
            "registration_url": {
                "required": True,
                "type": "str"
            },
            "registration_token": {
                "required": True,
                "type": "str"
            },
            "config_dir": {
                "default": "/etc/gitlab-runner.d",
                "type": "str"
            },
            "executor": {
                "required": False,
                "type": "str"
            },
            "limit": {
                "required": False,
                "type": "str"
            },
            "tags": {
                "required": False,
                "type": "str"
            },
            "extra_args": {
                "required": False,
                "type": "str"
            },
            "enfore_unique_description": {
                "required": False,
                "default": True,
                "type": "bool"
            }
        })

    if not _HAS_DEPENDENCIES:
        module.fail_json(msg="A required Python module is not installed: %s" %
                         _IMPORT_ERROR)

    configuration_path = "%s/description-%s.json" % (
        module.params["config_dir"], module.params["description"])
    output_toml_path = "%s/description-%s-token-%s-url-%s.toml" % (
        module.params["config_dir"], module.params["description"],
        module.params["registration_token"],
        b64encode(module.params["registration_url"]))

    config = dict()
    config["description"] = module.params["description"]
    config["registration_url"] = module.params["registration_url"]
    config["registration_token"] = module.params["registration_token"]
    config["configuration_path"] = configuration_path
    config["output_toml_path"] = output_toml_path
    if "executor" in module.params:
        config["executor"] = module.params["executor"]
    if "limit" in module.params:
        config["limit"] = module.params["limit"]
    if "tags" in module.params:
        config["tags"] = ",".join(sorted(
            module.params["tags"].split(",")))  # canonicalize order of tags
    if "extra_args" in module.params:
        config["extra_args"] = module.params["extra_args"]

    changed = False

    connector = Gitlab(module.params["gitlab_url"],
                       module.params["gitlab_token"])
    try:
        runners = connector.runners.list(all=True)
        runners_tokens = {
            runner: connector.runners.get(runner.id).token
            for runner in runners
        }
    except GitlabGetError as e:
        module.fail_json(
            msg="Failed to get runners from gitlab API endpoint %s: %s" %
            (module.params["gitlab_url"], e))

    if os.path.isfile(configuration_path):
        with open(configuration_path) as file:
            existing_config = json.load(file)

        runner_registered = get_runner_token(
            config["output_toml_path"]) in runners_tokens.values()
        configuration_changed = existing_config != config

        if runner_registered:
            if not configuration_changed:
                module.exit_json(changed=False,
                                 message="Configuration unchanged")
            else:
                unregister_command = [
                    "gitlab-ci-multi-runner", "unregister", "-c",
                    existing_config["output_toml_path"], "-n",
                    existing_config["description"]
                ]
                unregister_process = subprocess.Popen(unregister_command,
                                                      shell=False,
                                                      stdout=subprocess.PIPE)
                unregister_process.wait()
                if unregister_process.returncode != 0:
                    module.exit_json(
                        failed=True,
                        changed=False,
                        message="Failed to unregister old configuration",
                        existing_config=existing_config)
        try:
            os.remove(existing_config["configuration_path"])
        except OSError:
            pass
        try:
            os.remove(existing_config["output_toml_path"])
        except OSError:
            pass
        changed = True

    register_command = [
        "gitlab-ci-multi-runner", "register", "-n", "--leave-runner", "--url",
        config["registration_url"], "--registration-token",
        config["registration_token"], "--description", config["description"]
    ]
    if "executor" in config:
        register_command.extend(["--executor", config["executor"]])
    if "limit" in config:
        register_command.extend(["--limit", config["limit"]])
    if "tags" in config:
        register_command.extend(["--tag-list", config["tags"]])
    if "extra_args" in config:
        register_command.extend(shlex.split(config["extra_args"]))
    register_command.extend(["-c", config["output_toml_path"]])
    try:
        subprocess.check_output(register_command,
                                shell=False,
                                stderr=subprocess.STDOUT)
    except subprocess.CalledProcessError as e:
        module.exit_json(
            failed=True,
            changed=changed,
            message=
            "Failed to register configuration (call to '%s' failed with status %s): %s"
            % (e.cmd, e.returncode, e.output),
            config=config)
    changed = True

    sed_command = [
        "sed", "-ni", "/^\[\[runners\]\]/ { p; :a; n; p; ba; }",
        config["output_toml_path"]
    ]
    sed_process = subprocess.Popen(sed_command,
                                   shell=False,
                                   stdout=subprocess.PIPE)
    sed_process.wait()
    if sed_process.returncode != 0:
        module.exit_json(
            failed=True,
            changed=changed,
            message=
            "Failed to process updated registration config TOML through sed",
            config=config)

    try:
        with NamedTemporaryFile(delete=False) as file:
            json.dump(config, file)
            file.close()
            module.atomic_move(file.name, configuration_path)

    except IOError as e:
        module.exit_json(failed=True,
                         changed=changed,
                         message="Failed to write config JSON to %s: %s" %
                         (configuration_path, e))

    deleted_runners = set()
    if module.params["enfore_unique_description"]:
        registered_runner_token = get_runner_token(config["output_toml_path"])
        try:
            projects = connector.projects.list(all=True)
        except GitlabGetError as e:
            module.fail_json(
                msg=
                "Failed to get runners/projects from gitlab API endpoint %s: %s"
                % (module.params["gitlab_url"], e))
        for runner in runners:
            if runner.description == config["description"] and runners_tokens[
                    runner] != registered_runner_token:
                deleted_runners.add(runner.description)
                delete_runner(runner, projects)

    module.exit_json(
        changed=True,
        message=
        "Gitlab runner registered successfully. Deleted %d old runner(s): %s" %
        (len(deleted_runners), deleted_runners))
Пример #20
0
class TomcatResourceRun(object):
    def __init__(self):
        self.changed = False

        self.module = AnsibleModule(
            argument_spec=dict(
                state=dict(default='present', choices=['present', 'absent']),
                name=dict(required=True),
                catalina_home=dict(type='path'),
                xml_path=dict(type='path'),
                attrs=dict(type='dict', default={})
            ),
            mutually_exclusive=[['catalina_home', 'xml_path']],
            required_one_of=[['catalina_home', 'xml_path']],
            supports_check_mode=True
        )

    def go(self):
        state = self.module.params['state']
        xml_path = self.module.params['xml_path']

        self.name = self.module.params['name']
        self.attrs = self.module.params['attrs']

        if not xml_path:
            xml_path = self.module.params['catalina_home'] + '/conf/server.xml'

        # Parser the XML
        self.dom = xml.dom.minidom.parse(xml_path)
        self.root = self.dom.documentElement
        self.gnr_node = self.root.getElementsByTagName("GlobalNamingResources")

        # It's unlikely, but there may not be a GNR node. If not create one.
        if not self.gnr_node:
            self.gnr_node = self.dom.createElement("GlobalNamingResources")
            self.root.appendChild(self.gnr_node)
        else:
            self.gnr_node = self.gnr_node[0]

        self.gnr_res = [node for node in self.gnr_node.getElementsByTagName("Resource")
                        if node.getAttribute("name") == self.name]

        if state == 'present':
            self.ensure_present()
        elif state == 'absent':
            self.ensure_absent()
        else:
            self.module.fail_json(msg="Invalid state: " + state)

        # Save the XML only if it's been changed
        if self.changed and not self.module.check_mode:
            tmpfd, tmpfile = tempfile.mkstemp()
            with os.fdopen(tmpfd, XML_WRITE_MODE) as out:
                self.root.writexml(out)
            self.module.atomic_move(tmpfile, xml_path)

        self.module.exit_json(changed=self.changed)

    def ensure_absent(self):
        for node in self.gnr_res:
            node.parentNode.removeChild(node)
            self.changed = True

    def ensure_present(self):
        if len(self.gnr_res) > 0:
            link = self.gnr_res[0]
        else:
            link = self.dom.createElement("Resource")
            self.gnr_node.appendChild(link)
            link.setAttribute("name", self.name)
            self.changed = True

        # We make a copy here and also string-ify booleans
        pending_attrs = {}
        for k in self.attrs.keys():
            v = self.attrs[k]
            if isinstance(v, bool):
                if v:
                    v = "true"
                else:
                    v = "false"
            pending_attrs[k] = str(v)

        attrs_to_remove = []

        for i in range(link.attributes.length):
            attr = link.attributes.item(i)
            if attr.name == "name":
                continue
            if attr.name in pending_attrs.keys():
                if attr.value != pending_attrs[attr.name]:
                    link.setAttribute(attr.name, pending_attrs[attr.name])
                    self.changed = True
                del(pending_attrs[attr.name])
            else:
                attrs_to_remove = attrs_to_remove + [attr.name]

        if len(attrs_to_remove) > 0:
            self.changed = True
            for n in attrs_to_remove:
                link.removeAttribute(n)

        for k in pending_attrs.keys():
            self.changed = True
            link.setAttribute(k, pending_attrs[k])
Пример #21
0
def main():

    module = AnsibleModule(
        # not checking because of daisy chain to file module
        argument_spec = dict(
            src               = dict(required=False, type='path'),
            original_basename = dict(required=False),  # used to handle 'dest is a directory' via template, a slight hack
            content           = dict(required=False, no_log=True),
            dest              = dict(required=True, type='path'),
            backup            = dict(default=False, type='bool'),
            force             = dict(default=True, aliases=['thirsty'], type='bool'),
            validate          = dict(required=False, type='str'),
            directory_mode    = dict(required=False),
            remote_src        = dict(required=False, type='bool'),
        ),
        add_file_common_args=True,
        supports_check_mode=True,
    )

    src = module.params['src']
    b_src = to_bytes(src, errors='surrogate_or_strict')
    dest = module.params['dest']
    b_dest = to_bytes(dest, errors='surrogate_or_strict')
    backup = module.params['backup']
    force = module.params['force']
    original_basename = module.params.get('original_basename', None)
    validate = module.params.get('validate', None)
    follow = module.params['follow']
    mode = module.params['mode']
    remote_src = module.params['remote_src']

    if not os.path.exists(b_src):
        module.fail_json(msg="Source %s not found" % (src))
    if not os.access(b_src, os.R_OK):
        module.fail_json(msg="Source %s not readable" % (src))
    if os.path.isdir(b_src):
        module.fail_json(msg="Remote copy does not support recursive copy of directory: %s" % (src))

    checksum_src = module.sha1(src)
    checksum_dest = None
    # Backwards compat only.  This will be None in FIPS mode
    try:
        md5sum_src = module.md5(src)
    except ValueError:
        md5sum_src = None

    changed = False

    # Special handling for recursive copy - create intermediate dirs
    if original_basename and dest.endswith(os.sep):
        dest = os.path.join(dest, original_basename)
        b_dest = to_bytes(dest, errors='surrogate_or_strict')
        dirname = os.path.dirname(dest)
        b_dirname = to_bytes(dirname, errors='surrogate_or_strict')
        if not os.path.exists(b_dirname) and os.path.isabs(b_dirname):
            (pre_existing_dir, new_directory_list) = split_pre_existing_dir(dirname)
            os.makedirs(b_dirname)
            directory_args = module.load_file_common_arguments(module.params)
            directory_mode = module.params["directory_mode"]
            if directory_mode is not None:
                directory_args['mode'] = directory_mode
            else:
                directory_args['mode'] = None
            adjust_recursive_directory_permissions(pre_existing_dir, new_directory_list, module, directory_args, changed)

    if os.path.isdir(b_dest):
        basename = os.path.basename(src)
        if original_basename:
            basename = original_basename
        dest = os.path.join(dest, basename)
        b_dest = to_bytes(dest, errors='surrogate_or_strict')

    if os.path.exists(b_dest):
        if os.path.islink(b_dest) and follow:
            b_dest = os.path.realpath(b_dest)
            dest = to_native(b_dest, errors='surrogate_or_strict')
        if not force:
            module.exit_json(msg="file already exists", src=src, dest=dest, changed=False)
        if os.access(b_dest, os.R_OK):
            checksum_dest = module.sha1(dest)
    else:
        if not os.path.exists(os.path.dirname(b_dest)):
            try:
                # os.path.exists() can return false in some
                # circumstances where the directory does not have
                # the execute bit for the current user set, in
                # which case the stat() call will raise an OSError
                os.stat(os.path.dirname(b_dest))
            except OSError:
                e = get_exception()
                if "permission denied" in to_native(e).lower():
                    module.fail_json(msg="Destination directory %s is not accessible" % (os.path.dirname(dest)))
            module.fail_json(msg="Destination directory %s does not exist" % (os.path.dirname(dest)))
    if not os.access(os.path.dirname(b_dest), os.W_OK):
        module.fail_json(msg="Destination %s not writable" % (os.path.dirname(dest)))

    backup_file = None
    if checksum_src != checksum_dest or os.path.islink(b_dest):
        if not module.check_mode:
            try:
                if backup:
                    if os.path.exists(b_dest):
                        backup_file = module.backup_local(dest)
                # allow for conversion from symlink.
                if os.path.islink(b_dest):
                    os.unlink(b_dest)
                    open(b_dest, 'w').close()
                if validate:
                    # if we have a mode, make sure we set it on the temporary
                    # file source as some validations may require it
                    # FIXME: should we do the same for owner/group here too?
                    if mode is not None:
                        module.set_mode_if_different(src, mode, False)
                    if "%s" not in validate:
                        module.fail_json(msg="validate must contain %%s: %s" % (validate))
                    (rc, out, err) = module.run_command(validate % src)
                    if rc != 0:
                        module.fail_json(msg="failed to validate", exit_status=rc, stdout=out, stderr=err)
                b_mysrc = b_src
                if remote_src:
                    _, b_mysrc = tempfile.mkstemp(dir=os.path.dirname(b_dest))
                    shutil.copy2(b_src, b_mysrc)
                module.atomic_move(b_mysrc, dest, unsafe_writes=module.params['unsafe_writes'])
            except IOError:
                module.fail_json(msg="failed to copy: %s to %s" % (src, dest), traceback=traceback.format_exc())
        changed = True
    else:
        changed = False

    res_args = dict(
        dest=dest, src=src, md5sum=md5sum_src, checksum=checksum_src, changed=changed
    )
    if backup_file:
        res_args['backup_file'] = backup_file

    module.params['dest'] = dest
    if not module.check_mode:
        file_args = module.load_file_common_arguments(module.params)
        res_args['changed'] = module.set_fs_attributes_if_different(file_args, res_args['changed'])

    module.exit_json(**res_args)
Пример #22
0
def main():

    module_args = dict(
        src=dict(type='list', required=True),
        dest=dict(type='path', required=True),
        mappings=dict(type='list', required=False, default=[]),
        unsafe_writes=dict(type='bool', required=False, default=False)
    )

    module = AnsibleModule(
        argument_spec=module_args,
        supports_check_mode=True
    )

    result = dict(
        changed=False,
        merged_asset_count=0,
        merged_relationship_count=0
    )

    # if the user is working with this module in only check mode we do not
    # want to make any changes to the environment, just return the current
    # state with no modifications
    if module.check_mode:
        return result

    src = module.params['src']
    dest = module.params['dest']
    mappings = module.params['mappings']

    mergedAssets = {}
    relnsForId = {}

    for filename in src:
        if os.path.isdir(filename):
            module.fail_json(rc=256, msg='Src %s is a directory !' % filename)

        src_exists = os.path.exists(filename)
        if not src_exists:
            module.fail_json(rc=257, msg='Src %s does not exist !' % filename)

        f = open(to_bytes(filename), 'rb')
        allAssetsFromFile = json.load(f)
        f.close()

        for asset in allAssetsFromFile:
            mapped_asset = get_mapped_identity(asset, mappings)
            asset_id = json.dumps(mapped_asset)
            if asset_id not in mergedAssets:
                mergedAssets[asset_id] = mapped_asset
                result['merged_asset_count'] += 1
            for prop in asset:
                bRelation = isinstance(asset[prop], list)
                if not prop.startswith('_'):
                    if bRelation:
                        if asset_id not in relnsForId:
                            relnsForId[asset_id] = {}
                        if prop not in mergedAssets[asset_id]:
                            mergedAssets[asset_id][prop] = []
                        if prop not in relnsForId[asset_id]:
                            relnsForId[asset_id][prop] = []
                        for reln in asset[prop]:
                            mapped_reln = get_mapped_identity(reln, mappings)
                            reln_id = json.dumps(mapped_reln)
                            if reln_id not in relnsForId[asset_id][prop]:
                                mergedAssets[asset_id][prop].append(mapped_reln)
                                relnsForId[asset_id][prop].append(reln_id)
                                result['merged_relationship_count'] += 1
                    else:
                        mergedAssets[asset_id][prop] = asset[prop]

    consolidatedAssets = []
    for asset_id in mergedAssets:
        asset = mergedAssets[asset_id]
        consolidatedAssets.append(asset)

    # Write temporary file with the JSON output,
    # and then move to specified dest location
    try:
        tmpfd, tmpfile = tempfile.mkstemp()
        f = os.fdopen(tmpfd, 'wb')
        json.dump(consolidatedAssets, f)
        f.close()
    except IOError:
        module.fail_json(msg='Unable to create temporary file to output merged relationships', **result)

    # Checksumming to identify change...
    checksum_src = module.sha1(tmpfile)
    checksum_dest = None
    b_dest = to_bytes(dest, errors='surrogate_or_strict')
    if os.access(b_dest, os.R_OK):
        checksum_dest = module.sha1(dest)

    # If the file does not already exist and/or checksums are different,
    # move the new file over the old one and mark it as changed; otherwise
    # leave the original file (delete the tmpfile) and that there was no change
    if checksum_src != checksum_dest:
        module.atomic_move(tmpfile,
                           to_native(os.path.realpath(b_dest), errors='surrogate_or_strict'),
                           unsafe_writes=module.params['unsafe_writes'])
        result['changed'] = True
    else:
        os.unlink(tmpfile)

    module.exit_json(**result)
Пример #23
0
def main():
    argument_spec = url_argument_spec()

    # setup aliases
    argument_spec['url_username']['aliases'] = ['username']
    argument_spec['url_password']['aliases'] = ['password']

    argument_spec.update(
        url=dict(type='str', required=True),
        dest=dict(type='path', required=True),
        backup=dict(type='bool', default=False),
        checksum=dict(type='str', default=''),
        timeout=dict(type='int', default=10),
        headers=dict(type='dict'),
        tmp_dest=dict(type='path'),
        unredirected_headers=dict(type='list', elements='str', default=[]),
    )

    module = AnsibleModule(
        # not checking because of daisy chain to file module
        argument_spec=argument_spec,
        add_file_common_args=True,
        supports_check_mode=True,
    )

    url = module.params['url']
    dest = module.params['dest']
    backup = module.params['backup']
    force = module.params['force']
    checksum = module.params['checksum']
    use_proxy = module.params['use_proxy']
    timeout = module.params['timeout']
    headers = module.params['headers']
    tmp_dest = module.params['tmp_dest']
    unredirected_headers = module.params['unredirected_headers']

    result = dict(
        changed=False,
        checksum_dest=None,
        checksum_src=None,
        dest=dest,
        elapsed=0,
        url=url,
    )

    dest_is_dir = os.path.isdir(dest)
    last_mod_time = None

    # checksum specified, parse for algorithm and checksum
    if checksum:
        try:
            algorithm, checksum = checksum.split(':', 1)
        except ValueError:
            module.fail_json(
                msg=
                "The checksum parameter has to be in format <algorithm>:<checksum>",
                **result)

        if is_url(checksum):
            checksum_url = checksum
            # download checksum file to checksum_tmpsrc
            checksum_tmpsrc, checksum_info = url_get(
                module,
                checksum_url,
                dest,
                use_proxy,
                last_mod_time,
                force,
                timeout,
                headers,
                tmp_dest,
                unredirected_headers=unredirected_headers)
            with open(checksum_tmpsrc) as f:
                lines = [line.rstrip('\n') for line in f]
            os.remove(checksum_tmpsrc)
            checksum_map = []
            filename = url_filename(url)
            if len(lines) == 1 and len(lines[0].split()) == 1:
                # Only a single line with a single string
                # treat it as a checksum only file
                checksum_map.append((lines[0], filename))
            else:
                # The assumption here is the file is in the format of
                # checksum filename
                for line in lines:
                    # Split by one whitespace to keep the leading type char ' ' (whitespace) for text and '*' for binary
                    parts = line.split(" ", 1)
                    if len(parts) == 2:
                        # Remove the leading type char, we expect
                        if parts[1].startswith((
                                " ",
                                "*",
                        )):
                            parts[1] = parts[1][1:]

                        # Append checksum and path without potential leading './'
                        checksum_map.append((parts[0], parts[1].lstrip("./")))

            # Look through each line in the checksum file for a hash corresponding to
            # the filename in the url, returning the first hash that is found.
            for cksum in (s for (s, f) in checksum_map if f == filename):
                checksum = cksum
                break
            else:
                checksum = None

            if checksum is None:
                module.fail_json(
                    msg="Unable to find a checksum for file '%s' in '%s'" %
                    (filename, checksum_url))
        # Remove any non-alphanumeric characters, including the infamous
        # Unicode zero-width space
        checksum = re.sub(r'\W+', '', checksum).lower()
        # Ensure the checksum portion is a hexdigest
        try:
            int(checksum, 16)
        except ValueError:
            module.fail_json(msg='The checksum format is invalid', **result)

    if not dest_is_dir and os.path.exists(dest):
        checksum_mismatch = False

        # If the download is not forced and there is a checksum, allow
        # checksum match to skip the download.
        if not force and checksum != '':
            destination_checksum = module.digest_from_file(dest, algorithm)

            if checksum != destination_checksum:
                checksum_mismatch = True

        # Not forcing redownload, unless checksum does not match
        if not force and checksum and not checksum_mismatch:
            # Not forcing redownload, unless checksum does not match
            # allow file attribute changes
            file_args = module.load_file_common_arguments(module.params,
                                                          path=dest)
            result['changed'] = module.set_fs_attributes_if_different(
                file_args, False)
            if result['changed']:
                module.exit_json(
                    msg="file already exists but file attributes changed",
                    **result)
            module.exit_json(msg="file already exists", **result)

        # If the file already exists, prepare the last modified time for the
        # request.
        mtime = os.path.getmtime(dest)
        last_mod_time = datetime.datetime.utcfromtimestamp(mtime)

        # If the checksum does not match we have to force the download
        # because last_mod_time may be newer than on remote
        if checksum_mismatch:
            force = True

    # download to tmpsrc
    start = datetime.datetime.utcnow()
    method = 'HEAD' if module.check_mode else 'GET'
    tmpsrc, info = url_get(module,
                           url,
                           dest,
                           use_proxy,
                           last_mod_time,
                           force,
                           timeout,
                           headers,
                           tmp_dest,
                           method,
                           unredirected_headers=unredirected_headers)
    result['elapsed'] = (datetime.datetime.utcnow() - start).seconds
    result['src'] = tmpsrc

    # Now the request has completed, we can finally generate the final
    # destination file name from the info dict.

    if dest_is_dir:
        filename = extract_filename_from_headers(info)
        if not filename:
            # Fall back to extracting the filename from the URL.
            # Pluck the URL from the info, since a redirect could have changed
            # it.
            filename = url_filename(info['url'])
        dest = os.path.join(dest, filename)
        result['dest'] = dest

    # raise an error if there is no tmpsrc file
    if not os.path.exists(tmpsrc):
        os.remove(tmpsrc)
        module.fail_json(msg="Request failed",
                         status_code=info['status'],
                         response=info['msg'],
                         **result)
    if not os.access(tmpsrc, os.R_OK):
        os.remove(tmpsrc)
        module.fail_json(msg="Source %s is not readable" % (tmpsrc), **result)
    result['checksum_src'] = module.sha1(tmpsrc)

    # check if there is no dest file
    if os.path.exists(dest):
        # raise an error if copy has no permission on dest
        if not os.access(dest, os.W_OK):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s is not writable" % (dest),
                             **result)
        if not os.access(dest, os.R_OK):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s is not readable" % (dest),
                             **result)
        result['checksum_dest'] = module.sha1(dest)
    else:
        if not os.path.exists(os.path.dirname(dest)):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s does not exist" %
                             (os.path.dirname(dest)),
                             **result)
        if not os.access(os.path.dirname(dest), os.W_OK):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s is not writable" %
                             (os.path.dirname(dest)),
                             **result)

    if module.check_mode:
        if os.path.exists(tmpsrc):
            os.remove(tmpsrc)
        result['changed'] = ('checksum_dest' not in result or
                             result['checksum_src'] != result['checksum_dest'])
        module.exit_json(msg=info.get('msg', ''), **result)

    backup_file = None
    if result['checksum_src'] != result['checksum_dest']:
        try:
            if backup:
                if os.path.exists(dest):
                    backup_file = module.backup_local(dest)
            module.atomic_move(tmpsrc,
                               dest,
                               unsafe_writes=module.params['unsafe_writes'])
        except Exception as e:
            if os.path.exists(tmpsrc):
                os.remove(tmpsrc)
            module.fail_json(msg="failed to copy %s to %s: %s" %
                             (tmpsrc, dest, to_native(e)),
                             exception=traceback.format_exc(),
                             **result)
        result['changed'] = True
    else:
        result['changed'] = False
        if os.path.exists(tmpsrc):
            os.remove(tmpsrc)

    if checksum != '':
        destination_checksum = module.digest_from_file(dest, algorithm)

        if checksum != destination_checksum:
            os.remove(dest)
            module.fail_json(
                msg="The checksum for %s did not match %s; it was %s." %
                (dest, checksum, destination_checksum),
                **result)

    # allow file attribute changes
    file_args = module.load_file_common_arguments(module.params, path=dest)
    result['changed'] = module.set_fs_attributes_if_different(
        file_args, result['changed'])

    # Backwards compat only.  We'll return None on FIPS enabled systems
    try:
        result['md5sum'] = module.md5(dest)
    except ValueError:
        result['md5sum'] = None

    if backup_file:
        result['backup_file'] = backup_file

    # Mission complete
    module.exit_json(msg=info.get('msg', ''),
                     status_code=info.get('status', ''),
                     **result)
def main():

    module_args = dict(host=dict(type='str', required=True),
                       port=dict(type='str', required=True),
                       user=dict(type='str', required=True),
                       password=dict(type='str', required=True, no_log=True),
                       bundle_name=dict(type='str', required=True),
                       dest=dict(type='path', required=True),
                       assets_to_keep=dict(type='list', required=True),
                       complete_types=dict(type='list',
                                           required=False,
                                           default=[]),
                       cert=dict(type='path', required=False),
                       unsafe_writes=dict(type='bool',
                                          required=False,
                                          default=False))

    module = AnsibleModule(argument_spec=module_args, supports_check_mode=True)

    result = dict(changed=False, asset_count=0)

    # if the user is working with this module in only check mode we do not
    # want to make any changes to the environment, just return the current
    # state with no modifications
    if module.check_mode:
        return result

    # Setup REST API connectivity via module_utils.igc_rest class
    igcrest = RestIGC(module,
                      result,
                      username=module.params['user'],
                      password=module.params['password'],
                      host=module.params['host'],
                      port=module.params['port'],
                      cert=module.params['cert'])

    complete_types = module.params['complete_types']

    # Execute the retrieval
    xmlResults = igcrest.getOpenIGCAssets(module.params['bundle_name'])

    # Ensure search worked before proceeding
    if not xmlResults:
        module.fail_json(msg='Retrieval of OpenIGC assets failed', **result)

    # Write temporary file with the full XML output to operate against
    try:
        tmpfd_full, tmpfile_full = tempfile.mkstemp()
        f = os.fdopen(tmpfd_full, 'wb')
        f.write(xmlResults)
        f.close()
    except IOError:
        module.fail_json(
            msg='Unable to create temporary file to output OpenIGC assets',
            **result)

    assets_to_keep = module.params['assets_to_keep']
    oigc_xml = OpenIGCHandler(module, result, tmpfile_full)

    partial_assets = []
    complete_assets = []
    assets_to_drop = []

    for rid in assets_to_keep:
        e_asset = oigc_xml.getAssetById(rid)
        asset_type = oigc_xml.getType(e_asset)
        if asset_type in complete_types:
            complete_assets.append(rid)
        else:
            partial_assets.append(rid)
        a_ancestors = oigc_xml.getAncestralAssetRids(rid)
        for ancenstor in a_ancestors:
            e_ancestor = oigc_xml.getAssetById(ancenstor)
            ancestor_type = oigc_xml.getType(e_ancestor)
            if ancestor_type in complete_types and ancenstor not in complete_assets:
                complete_assets.append(ancenstor)
            elif ancenstor not in partial_assets:
                partial_assets.append(ancenstor)
        a_children = oigc_xml.getAssetChildrenRids(rid)
        for child in a_children:
            e_child = oigc_xml.getAssetById(child)
            child_type = oigc_xml.getType(e_child)
            if child_type in complete_types and child not in complete_assets:
                complete_assets.append(child)
            elif child not in partial_assets:
                partial_assets.append(child)

    for e_asset in oigc_xml.getAssets():
        rid = oigc_xml.getRid(e_asset)
        if rid not in partial_assets and rid not in complete_assets:
            if rid is not None:
                oigc_xml.dropAsset(e_asset)
        else:
            result['asset_count'] += 1

    oigc_xml.setImportActionPartials(partial_assets)
    oigc_xml.setImportActionCompletes(complete_assets)

    # Remove the interim temporary file
    os.unlink(tmpfile_full)

    # Write a new temporary file with the revised XML output,
    # and then move to specified dest location
    try:
        tmpfd, tmpfile = tempfile.mkstemp()
        f = os.fdopen(tmpfd, 'wb')
        oigc_xml.writeCustomizedXML(tmpfile)
        f.close()
    except IOError:
        module.fail_json(
            msg='Unable to create temporary file to output project details',
            **result)

    # Checksumming to identify change...
    checksum_src = module.sha1(tmpfile)
    checksum_dest = None
    dest = module.params['dest']
    b_dest = to_bytes(dest, errors='surrogate_or_strict')
    if os.access(b_dest, os.R_OK):
        checksum_dest = module.sha1(dest)

    # If the file does not already exist and/or checksums are different,
    # move the new file over the old one and mark it as changed; otherwise
    # leave the original file (delete the tmpfile) and that there was no change
    if checksum_src != checksum_dest:
        module.atomic_move(tmpfile,
                           to_native(os.path.realpath(b_dest),
                                     errors='surrogate_or_strict'),
                           unsafe_writes=module.params['unsafe_writes'])
        result['changed'] = True
    else:
        os.unlink(tmpfile)

    module.exit_json(**result)
Пример #25
0
def main():
    argument_spec = url_argument_spec()

    # setup aliases
    argument_spec['url_username']['aliases'] = ['username']
    argument_spec['url_password']['aliases'] = ['password']

    argument_spec.update(
        url=dict(type='str', required=True),
        dest=dict(type='path', required=True),
        backup=dict(type='bool'),
        sha256sum=dict(type='str', default=''),
        checksum=dict(type='str', default=''),
        timeout=dict(type='int', default=10),
        headers=dict(type='raw'),
        tmp_dest=dict(type='path'),
    )

    module = AnsibleModule(
        # not checking because of daisy chain to file module
        argument_spec=argument_spec,
        add_file_common_args=True,
        supports_check_mode=True,
        mutually_exclusive=[['checksum', 'sha256sum']],
    )

    url = module.params['url']
    dest = module.params['dest']
    backup = module.params['backup']
    force = module.params['force']
    sha256sum = module.params['sha256sum']
    checksum = module.params['checksum']
    use_proxy = module.params['use_proxy']
    timeout = module.params['timeout']
    tmp_dest = module.params['tmp_dest']

    result = dict(
        changed=False,
        checksum_dest=None,
        checksum_src=None,
        dest=dest,
        elapsed=0,
        url=url,
    )

    # Parse headers to dict
    if isinstance(module.params['headers'], dict):
        headers = module.params['headers']
    elif module.params['headers']:
        try:
            headers = dict(
                item.split(':', 1)
                for item in module.params['headers'].split(','))
            module.deprecate(
                'Supplying `headers` as a string is deprecated. Please use dict/hash format for `headers`',
                version='2.10')
        except Exception:
            module.fail_json(
                msg=
                "The string representation for the `headers` parameter requires a key:value,key:value syntax to be properly parsed.",
                **result)
    else:
        headers = None

    dest_is_dir = os.path.isdir(dest)
    last_mod_time = None

    # workaround for usage of deprecated sha256sum parameter
    if sha256sum:
        checksum = 'sha256:%s' % (sha256sum)

    # checksum specified, parse for algorithm and checksum
    if checksum:
        try:
            algorithm, checksum = checksum.split(':', 1)
        except ValueError:
            module.fail_json(
                msg=
                "The checksum parameter has to be in format <algorithm>:<checksum>",
                **result)

        if checksum.startswith('http://') or checksum.startswith(
                'https://') or checksum.startswith('ftp://'):
            checksum_url = checksum
            # download checksum file to checksum_tmpsrc
            checksum_tmpsrc, checksum_info = url_get(module, checksum_url,
                                                     dest, use_proxy,
                                                     last_mod_time, force,
                                                     timeout, headers,
                                                     tmp_dest)
            with open(checksum_tmpsrc) as f:
                lines = [line.rstrip('\n') for line in f]
            os.remove(checksum_tmpsrc)
            checksum_map = {}
            for line in lines:
                parts = line.split(None, 1)
                if len(parts) == 2:
                    checksum_map[parts[0]] = parts[1]
            filename = url_filename(url)

            # Look through each line in the checksum file for a hash corresponding to
            # the filename in the url, returning the first hash that is found.
            for cksum in (s for (s, f) in checksum_map.items()
                          if f.strip('./') == filename):
                checksum = cksum
                break
            else:
                checksum = None

            if checksum is None:
                module.fail_json(
                    msg="Unable to find a checksum for file '%s' in '%s'" %
                    (filename, checksum_url))
        # Remove any non-alphanumeric characters, including the infamous
        # Unicode zero-width space
        checksum = re.sub(r'\W+', '', checksum).lower()
        # Ensure the checksum portion is a hexdigest
        try:
            int(checksum, 16)
        except ValueError:
            module.fail_json(msg='The checksum format is invalid', **result)

    if not dest_is_dir and os.path.exists(dest):
        checksum_mismatch = False

        # If the download is not forced and there is a checksum, allow
        # checksum match to skip the download.
        if not force and checksum != '':
            destination_checksum = module.digest_from_file(dest, algorithm)

            if checksum != destination_checksum:
                checksum_mismatch = True

        # Not forcing redownload, unless checksum does not match
        if not force and checksum and not checksum_mismatch:
            # Not forcing redownload, unless checksum does not match
            # allow file attribute changes
            module.params['path'] = dest
            file_args = module.load_file_common_arguments(module.params)
            file_args['path'] = dest
            result['changed'] = module.set_fs_attributes_if_different(
                file_args, False)
            if result['changed']:
                module.exit_json(
                    msg="file already exists but file attributes changed",
                    **result)
            module.exit_json(msg="file already exists", **result)

        # If the file already exists, prepare the last modified time for the
        # request.
        mtime = os.path.getmtime(dest)
        last_mod_time = datetime.datetime.utcfromtimestamp(mtime)

        # If the checksum does not match we have to force the download
        # because last_mod_time may be newer than on remote
        if checksum_mismatch:
            force = True

    # download to tmpsrc
    start = datetime.datetime.utcnow()
    tmpsrc, info = url_get(module, url, dest, use_proxy, last_mod_time, force,
                           timeout, headers, tmp_dest)
    result['elapsed'] = (datetime.datetime.utcnow() - start).seconds
    result['src'] = tmpsrc

    # Now the request has completed, we can finally generate the final
    # destination file name from the info dict.

    if dest_is_dir:
        filename = extract_filename_from_headers(info)
        if not filename:
            # Fall back to extracting the filename from the URL.
            # Pluck the URL from the info, since a redirect could have changed
            # it.
            filename = url_filename(info['url'])
        dest = os.path.join(dest, filename)
        result['dest'] = dest

    # raise an error if there is no tmpsrc file
    if not os.path.exists(tmpsrc):
        os.remove(tmpsrc)
        module.fail_json(msg="Request failed",
                         status_code=info['status'],
                         response=info['msg'],
                         **result)
    if not os.access(tmpsrc, os.R_OK):
        os.remove(tmpsrc)
        module.fail_json(msg="Source %s is not readable" % (tmpsrc), **result)
    result['checksum_src'] = module.sha1(tmpsrc)

    # check if there is no dest file
    if os.path.exists(dest):
        # raise an error if copy has no permission on dest
        if not os.access(dest, os.W_OK):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s is not writable" % (dest),
                             **result)
        if not os.access(dest, os.R_OK):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s is not readable" % (dest),
                             **result)
        result['checksum_dest'] = module.sha1(dest)
    else:
        if not os.path.exists(os.path.dirname(dest)):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s does not exist" %
                             (os.path.dirname(dest)),
                             **result)
        if not os.access(os.path.dirname(dest), os.W_OK):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s is not writable" %
                             (os.path.dirname(dest)),
                             **result)

    if module.check_mode:
        if os.path.exists(tmpsrc):
            os.remove(tmpsrc)
        result['changed'] = ('checksum_dest' not in result or
                             result['checksum_src'] != result['checksum_dest'])
        module.exit_json(msg=info.get('msg', ''), **result)

    backup_file = None
    if result['checksum_src'] != result['checksum_dest']:
        try:
            if backup:
                if os.path.exists(dest):
                    backup_file = module.backup_local(dest)
            module.atomic_move(tmpsrc, dest)
        except Exception as e:
            if os.path.exists(tmpsrc):
                os.remove(tmpsrc)
            module.fail_json(msg="failed to copy %s to %s: %s" %
                             (tmpsrc, dest, to_native(e)),
                             exception=traceback.format_exc(),
                             **result)
        result['changed'] = True
    else:
        result['changed'] = False
        if os.path.exists(tmpsrc):
            os.remove(tmpsrc)

    if checksum != '':
        destination_checksum = module.digest_from_file(dest, algorithm)

        if checksum != destination_checksum:
            os.remove(dest)
            module.fail_json(
                msg="The checksum for %s did not match %s; it was %s." %
                (dest, checksum, destination_checksum),
                **result)

    # allow file attribute changes
    module.params['path'] = dest
    file_args = module.load_file_common_arguments(module.params)
    file_args['path'] = dest
    result['changed'] = module.set_fs_attributes_if_different(
        file_args, result['changed'])

    # Backwards compat only.  We'll return None on FIPS enabled systems
    try:
        result['md5sum'] = module.md5(dest)
    except ValueError:
        result['md5sum'] = None

    if backup_file:
        result['backup_file'] = backup_file

    # Mission complete
    module.exit_json(msg=info.get('msg', ''),
                     status_code=info.get('status', ''),
                     **result)
Пример #26
0
def main():
    # 定义modules需要的参数
    module = AnsibleModule(
        argument_spec=dict(src=dict(required=True, type='path'),
                           dest=dict(required=True, type='path'),
                           force=dict(default=True, type='bool'),
                           original_basename=dict(required=False),
                           remote_src=dict(required=False, type='bool')),
        supports_check_mode=True,
    )

    # 获取modules的参数
    src = module.params['src']
    dest = module.params['dest']
    b_src = to_bytes(src, errors='surrogate_or_strict')
    b_dest = to_bytes(dest, errors='surrogate_or_strict')
    force = module.params['force']
    remote_src = module.params['remote_src']
    original_basename = module.params.get('original_basename', None)

    # 判断参数是否合规
    if not os.path.exists(b_src):
        module.fail_json(msg="Source %s not found" % (src))
    if not os.access(b_src, os.R_OK):
        module.fail_json(msg="Source %s not readable" % (src))
    if os.path.isdir(b_src):
        module.fail_json(
            msg="Remote copy does not support recursive copy of directory: %s"
            % (src))

    # 获取文件的sha1
    checksum_src = module.sha1(src)
    checksum_dest = None

    changed = False

    # 确定dest文件路径
    if original_basename and dest.endswith(os.sep):
        dest = os.path.join(dest, original_basename)
        b_dest = to_bytes(dest, errors='surrogate_or_strict')

    # 判断目标文件是否存在
    if os.path.exists(b_dest):
        if not force:
            module.exit_json(msg="file already exists",
                             src=src,
                             dest=dest,
                             changed=False)
        if os.access(b_dest, os.R_OK):
            checksum_dest = module.sha1(dest)
    # 目录不存在,退出执行
    elif not os.path.exists(os.path.dirname(b_dest)):
        try:
            os.stat(os.path.dirname(b_dest))
        except OSError:
            e = get_exception()
            if "permission denied" in to_native(e).lower():
                module.fail_json(
                    msg="Destination directory %s is not accessible" %
                    (os.path.dirname(dest)))
        module.fail_json(msg="Destination directory %s does not exist" %
                         (os.path.dirname(dest)))

    # 源文件与目标文件sha1值不一致时覆盖源文件
    if checksum_src != checksum_dest:
        if not module.check_mode:
            try:
                if remote_src:
                    shutil.copy(b_src, b_dest)
                else:
                    module.atomic_move(b_src, b_dest)
            except IOError:
                module.fail_json(msg="failed to copy: %s to %s" % (src, dest))
            changed = True

    else:
        changed = False

    # 返回值
    res_args = dict(dest=dest, src=src, checksum=checksum_src, changed=changed)

    module.exit_json(**res_args)
Пример #27
0
def main():

    global module

    module = AnsibleModule(
        # not checking because of daisy chain to file module
        argument_spec=dict(
            src=dict(type='path'),
            _original_basename=dict(
                type='str'
            ),  # used to handle 'dest is a directory' via template, a slight hack
            content=dict(type='str', no_log=True),
            dest=dict(type='path', required=True),
            backup=dict(type='bool', default=False),
            force=dict(type='bool', default=True, aliases=['thirsty']),
            validate=dict(type='str'),
            directory_mode=dict(type='raw'),
            remote_src=dict(type='bool'),
            local_follow=dict(type='bool'),
            checksum=dict(type='str'),
            follow=dict(type='bool', default=False),
        ),
        add_file_common_args=True,
        supports_check_mode=True,
    )

    if module.params.get('thirsty'):
        module.deprecate(
            'The alias "thirsty" has been deprecated and will be removed, use "force" instead',
            version='2.13',
            collection_name='ansible.builtin')

    src = module.params['src']
    b_src = to_bytes(src, errors='surrogate_or_strict')
    dest = module.params['dest']
    # Make sure we always have a directory component for later processing
    if os.path.sep not in dest:
        dest = '.{0}{1}'.format(os.path.sep, dest)
    b_dest = to_bytes(dest, errors='surrogate_or_strict')
    backup = module.params['backup']
    force = module.params['force']
    _original_basename = module.params.get('_original_basename', None)
    validate = module.params.get('validate', None)
    follow = module.params['follow']
    local_follow = module.params['local_follow']
    mode = module.params['mode']
    owner = module.params['owner']
    group = module.params['group']
    remote_src = module.params['remote_src']
    checksum = module.params['checksum']

    if not os.path.exists(b_src):
        module.fail_json(msg="Source %s not found" % (src))
    if not os.access(b_src, os.R_OK):
        module.fail_json(msg="Source %s not readable" % (src))

    # Preserve is usually handled in the action plugin but mode + remote_src has to be done on the
    # remote host
    if module.params['mode'] == 'preserve':
        module.params['mode'] = '0%03o' % stat.S_IMODE(os.stat(b_src).st_mode)
    mode = module.params['mode']

    checksum_dest = None

    if os.path.isfile(src):
        checksum_src = module.sha1(src)
    else:
        checksum_src = None

    # Backwards compat only.  This will be None in FIPS mode
    try:
        if os.path.isfile(src):
            md5sum_src = module.md5(src)
        else:
            md5sum_src = None
    except ValueError:
        md5sum_src = None

    changed = False

    if checksum and checksum_src != checksum:
        module.fail_json(
            msg=
            'Copied file does not match the expected checksum. Transfer failed.',
            checksum=checksum_src,
            expected_checksum=checksum)

    # Special handling for recursive copy - create intermediate dirs
    if dest.endswith(os.sep):
        if _original_basename:
            dest = os.path.join(dest, _original_basename)
        b_dest = to_bytes(dest, errors='surrogate_or_strict')
        dirname = os.path.dirname(dest)
        b_dirname = to_bytes(dirname, errors='surrogate_or_strict')
        if not os.path.exists(b_dirname):
            try:
                (pre_existing_dir,
                 new_directory_list) = split_pre_existing_dir(dirname)
            except AnsibleModuleError as e:
                e.result['msg'] += ' Could not copy to {0}'.format(dest)
                module.fail_json(**e.results)

            os.makedirs(b_dirname)
            directory_args = module.load_file_common_arguments(module.params)
            directory_mode = module.params["directory_mode"]
            if directory_mode is not None:
                directory_args['mode'] = directory_mode
            else:
                directory_args['mode'] = None
            adjust_recursive_directory_permissions(pre_existing_dir,
                                                   new_directory_list, module,
                                                   directory_args, changed)

    if os.path.isdir(b_dest):
        basename = os.path.basename(src)
        if _original_basename:
            basename = _original_basename
        dest = os.path.join(dest, basename)
        b_dest = to_bytes(dest, errors='surrogate_or_strict')

    if os.path.exists(b_dest):
        if os.path.islink(b_dest) and follow:
            b_dest = os.path.realpath(b_dest)
            dest = to_native(b_dest, errors='surrogate_or_strict')
        if not force:
            module.exit_json(msg="file already exists",
                             src=src,
                             dest=dest,
                             changed=False)
        if os.access(b_dest, os.R_OK) and os.path.isfile(b_dest):
            checksum_dest = module.sha1(dest)
    else:
        if not os.path.exists(os.path.dirname(b_dest)):
            try:
                # os.path.exists() can return false in some
                # circumstances where the directory does not have
                # the execute bit for the current user set, in
                # which case the stat() call will raise an OSError
                os.stat(os.path.dirname(b_dest))
            except OSError as e:
                if "permission denied" in to_native(e).lower():
                    module.fail_json(
                        msg="Destination directory %s is not accessible" %
                        (os.path.dirname(dest)))
            module.fail_json(msg="Destination directory %s does not exist" %
                             (os.path.dirname(dest)))

    if not os.access(os.path.dirname(b_dest),
                     os.W_OK) and not module.params['unsafe_writes']:
        module.fail_json(msg="Destination %s not writable" %
                         (os.path.dirname(dest)))

    backup_file = None
    if checksum_src != checksum_dest or os.path.islink(b_dest):
        if not module.check_mode:
            try:
                if backup:
                    if os.path.exists(b_dest):
                        backup_file = module.backup_local(dest)
                # allow for conversion from symlink.
                if os.path.islink(b_dest):
                    os.unlink(b_dest)
                    open(b_dest, 'w').close()
                if validate:
                    # if we have a mode, make sure we set it on the temporary
                    # file source as some validations may require it
                    if mode is not None:
                        module.set_mode_if_different(src, mode, False)
                    if owner is not None:
                        module.set_owner_if_different(src, owner, False)
                    if group is not None:
                        module.set_group_if_different(src, group, False)
                    if "%s" not in validate:
                        module.fail_json(msg="validate must contain %%s: %s" %
                                         (validate))
                    (rc, out, err) = module.run_command(validate % src)
                    if rc != 0:
                        module.fail_json(msg="failed to validate",
                                         exit_status=rc,
                                         stdout=out,
                                         stderr=err)
                b_mysrc = b_src
                if remote_src and os.path.isfile(b_src):
                    _, b_mysrc = tempfile.mkstemp(dir=os.path.dirname(b_dest))

                    shutil.copyfile(b_src, b_mysrc)
                    try:
                        shutil.copystat(b_src, b_mysrc)
                    except OSError as err:
                        if err.errno == errno.ENOSYS and mode == "preserve":
                            module.warn("Unable to copy stats {0}".format(
                                to_native(b_src)))
                        else:
                            raise

                # might be needed below
                if PY3 and hasattr(os, 'listxattr'):
                    try:
                        src_has_acls = 'system.posix_acl_access' in os.listxattr(
                            src)
                    except Exception as e:
                        # assume unwanted ACLs by default
                        src_has_acls = True

                module.atomic_move(
                    b_mysrc,
                    dest,
                    unsafe_writes=module.params['unsafe_writes'])

                if PY3 and hasattr(os, 'listxattr') and platform.system(
                ) == 'Linux' and not remote_src:
                    # atomic_move used above to copy src into dest might, in some cases,
                    # use shutil.copy2 which in turn uses shutil.copystat.
                    # Since Python 3.3, shutil.copystat copies file extended attributes:
                    # https://docs.python.org/3/library/shutil.html#shutil.copystat
                    # os.listxattr (along with others) was added to handle the operation.

                    # This means that on Python 3 we are copying the extended attributes which includes
                    # the ACLs on some systems - further limited to Linux as the documentation above claims
                    # that the extended attributes are copied only on Linux. Also, os.listxattr is only
                    # available on Linux.

                    # If not remote_src, then the file was copied from the controller. In that
                    # case, any filesystem ACLs are artifacts of the copy rather than preservation
                    # of existing attributes. Get rid of them:

                    if src_has_acls:
                        # FIXME If dest has any default ACLs, there are not applied to src now because
                        # they were overridden by copystat. Should/can we do anything about this?
                        # 'system.posix_acl_default' in os.listxattr(os.path.dirname(b_dest))

                        try:
                            clear_facls(dest)
                        except ValueError as e:
                            if 'setfacl' in to_native(e):
                                # No setfacl so we're okay.  The controller couldn't have set a facl
                                # without the setfacl command
                                pass
                            else:
                                raise
                        except RuntimeError as e:
                            # setfacl failed.
                            if 'Operation not supported' in to_native(e):
                                # The file system does not support ACLs.
                                pass
                            else:
                                raise

            except (IOError, OSError):
                module.fail_json(msg="failed to copy: %s to %s" % (src, dest),
                                 traceback=traceback.format_exc())
        changed = True
    else:
        changed = False

    # If neither have checksums, both src and dest are directories.
    if checksum_src is None and checksum_dest is None:
        if remote_src and os.path.isdir(module.params['src']):
            b_src = to_bytes(module.params['src'],
                             errors='surrogate_or_strict')
            b_dest = to_bytes(module.params['dest'],
                              errors='surrogate_or_strict')

            if src.endswith(os.path.sep) and os.path.isdir(
                    module.params['dest']):
                diff_files_changed = copy_diff_files(b_src, b_dest, module)
                left_only_changed = copy_left_only(b_src, b_dest, module)
                common_dirs_changed = copy_common_dirs(b_src, b_dest, module)
                owner_group_changed = chown_recursive(b_dest, module)
                if diff_files_changed or left_only_changed or common_dirs_changed or owner_group_changed:
                    changed = True

            if src.endswith(
                    os.path.sep) and not os.path.exists(module.params['dest']):
                b_basename = to_bytes(os.path.basename(src),
                                      errors='surrogate_or_strict')
                b_dest = to_bytes(os.path.join(b_dest, b_basename),
                                  errors='surrogate_or_strict')
                b_src = to_bytes(os.path.join(module.params['src'], ""),
                                 errors='surrogate_or_strict')
                if not module.check_mode:
                    shutil.copytree(b_src, b_dest, symlinks=not (local_follow))
                chown_recursive(dest, module)
                changed = True

            if not src.endswith(os.path.sep) and os.path.isdir(
                    module.params['dest']):
                b_basename = to_bytes(os.path.basename(src),
                                      errors='surrogate_or_strict')
                b_dest = to_bytes(os.path.join(b_dest, b_basename),
                                  errors='surrogate_or_strict')
                b_src = to_bytes(os.path.join(module.params['src'], ""),
                                 errors='surrogate_or_strict')
                if not module.check_mode and not os.path.exists(b_dest):
                    shutil.copytree(b_src, b_dest, symlinks=not (local_follow))
                    changed = True
                    chown_recursive(dest, module)
                if module.check_mode and not os.path.exists(b_dest):
                    changed = True
                if os.path.exists(b_dest):
                    diff_files_changed = copy_diff_files(b_src, b_dest, module)
                    left_only_changed = copy_left_only(b_src, b_dest, module)
                    common_dirs_changed = copy_common_dirs(
                        b_src, b_dest, module)
                    owner_group_changed = chown_recursive(b_dest, module)
                    if diff_files_changed or left_only_changed or common_dirs_changed or owner_group_changed:
                        changed = True

            if not src.endswith(os.path.sep) and not os.path.exists(
                    module.params['dest']):
                b_basename = to_bytes(os.path.basename(module.params['src']),
                                      errors='surrogate_or_strict')
                b_dest = to_bytes(os.path.join(b_dest, b_basename),
                                  errors='surrogate_or_strict')
                if not module.check_mode and not os.path.exists(b_dest):
                    os.makedirs(b_dest)
                    b_src = to_bytes(os.path.join(module.params['src'], ""),
                                     errors='surrogate_or_strict')
                    diff_files_changed = copy_diff_files(b_src, b_dest, module)
                    left_only_changed = copy_left_only(b_src, b_dest, module)
                    common_dirs_changed = copy_common_dirs(
                        b_src, b_dest, module)
                    owner_group_changed = chown_recursive(b_dest, module)
                    if diff_files_changed or left_only_changed or common_dirs_changed or owner_group_changed:
                        changed = True
                if module.check_mode and not os.path.exists(b_dest):
                    changed = True

    res_args = dict(dest=dest,
                    src=src,
                    md5sum=md5sum_src,
                    checksum=checksum_src,
                    changed=changed)
    if backup_file:
        res_args['backup_file'] = backup_file

    if not module.check_mode:
        file_args = module.load_file_common_arguments(module.params, path=dest)
        res_args['changed'] = module.set_fs_attributes_if_different(
            file_args, res_args['changed'])

    module.exit_json(**res_args)
def main():

    module_args = dict(host=dict(type='str', required=True),
                       port=dict(type='str', required=True),
                       user=dict(type='str', required=True),
                       password=dict(type='str', required=True, no_log=True),
                       project=dict(type='str', required=True),
                       dest=dict(type='path', required=True),
                       assets_to_keep=dict(type='list', required=True),
                       cert=dict(type='path', required=False),
                       unsafe_writes=dict(type='bool',
                                          required=False,
                                          default=False))

    module = AnsibleModule(argument_spec=module_args, supports_check_mode=True)

    result = dict(changed=False, asset_count=0)

    # if the user is working with this module in only check mode we do not
    # want to make any changes to the environment, just return the current
    # state with no modifications
    if module.check_mode:
        return result

    # Setup REST API connectivity via module_utils.igc_rest class
    iarest = RestIA(module,
                    result,
                    username=module.params['user'],
                    password=module.params['password'],
                    host=module.params['host'],
                    port=module.params['port'],
                    cert=module.params['cert'])

    # Execute the retrieval
    xmlResults = iarest.getProjectDetails(module.params['project'])

    # Ensure search worked before proceeding
    if not xmlResults:
        module.fail_json(msg='Retrieval of IA project details failed',
                         **result)

    # Write temporary file with the full XML output to operate against
    try:
        tmpfd_full, tmpfile_full = tempfile.mkstemp()
        f = os.fdopen(tmpfd_full, 'wb')
        f.write(xmlResults)
        f.close()
    except IOError:
        module.fail_json(
            msg='Unable to create temporary file to output project details',
            **result)

    assets_to_keep = module.params['assets_to_keep']
    ia_xml = IAHandler(module, result, tmpfile_full)

    drd_to_keep = []
    drsd_to_keep = []
    dr_to_keep = []
    drs_to_keep = []
    m_to_keep = []

    # Group the assets to keep by their asset type
    for asset in assets_to_keep:
        if asset['type'] == 'data_rule_definition':
            drd_to_keep.append(asset['name'])
        elif asset['type'] == 'data_rule_set_definition':
            drsd_to_keep.append(asset['name'])
        elif asset['type'] == 'data_rule':
            dr_to_keep.append(asset['name'])
        elif asset['type'] == 'data_rule_set':
            drs_to_keep.append(asset['name'])
        elif asset['type'] == 'metric':
            m_to_keep.append(asset['name'])

    # Remove executables first, since they're nested in the definitions
    for e_exec in ia_xml.getDataRules():
        name = ia_xml.getName(e_exec)
        if name not in dr_to_keep and name not in drs_to_keep:
            ia_xml.dropAsset(e_exec)
        else:
            result['asset_count'] += 1
    for e_metric in ia_xml.getMetrics():
        if ia_xml.getName(e_metric) not in m_to_keep:
            ia_xml.dropAsset(e_metric)
        else:
            result['asset_count'] += 1

    # Only remove definitions that have no executables AND are not keepers
    for e_defn in ia_xml.getDataRuleDefinitions():
        e_executables = ia_xml.getExecutables(e_defn)
        if ia_xml.getName(e_defn) not in drd_to_keep and len(
                e_executables) == 0:
            ia_xml.dropAsset(e_defn)
        else:
            result['asset_count'] += 1
    for e_defn in ia_xml.getRuleSetDefinitions():
        e_executables = ia_xml.getExecutables(e_defn)
        if ia_xml.getName(e_defn) not in drsd_to_keep and len(
                e_executables) == 0:
            ia_xml.dropAsset(e_defn)
        else:
            result['asset_count'] += 1

    # Remove the interim temporary file
    os.unlink(tmpfile_full)

    # Write a new temporary file with the revised XML output,
    # and then move to specified dest location
    try:
        tmpfd, tmpfile = tempfile.mkstemp()
        f = os.fdopen(tmpfd, 'wb')
        ia_xml.writeCustomizedXML(tmpfile)
        f.close()
    except IOError:
        module.fail_json(
            msg='Unable to create temporary file to output project details',
            **result)

    # Checksumming to identify change...
    checksum_src = module.sha1(tmpfile)
    checksum_dest = None
    dest = module.params['dest']
    b_dest = to_bytes(dest, errors='surrogate_or_strict')
    if os.access(b_dest, os.R_OK):
        checksum_dest = module.sha1(dest)

    # If the file does not already exist and/or checksums are different,
    # move the new file over the old one and mark it as changed; otherwise
    # leave the original file (delete the tmpfile) and that there was no change
    if checksum_src != checksum_dest:
        module.atomic_move(tmpfile,
                           to_native(os.path.realpath(b_dest),
                                     errors='surrogate_or_strict'),
                           unsafe_writes=module.params['unsafe_writes'])
        result['changed'] = True
    else:
        os.unlink(tmpfile)

    module.exit_json(**result)
Пример #29
0
def main():
    argument_spec = url_argument_spec()
    argument_spec.update(
        url=dict(type='str', required=True),
        dest=dict(type='path', required=True),
        backup=dict(type='bool'),
        sha256sum=dict(type='str', default=''),
        checksum=dict(type='str', default=''),
        timeout=dict(type='int', default=10),
        headers=dict(type='raw'),
        tmp_dest=dict(type='path'),
    )

    module = AnsibleModule(
        # not checking because of daisy chain to file module
        argument_spec=argument_spec,
        add_file_common_args=True,
        supports_check_mode=True,
        mutually_exclusive=(['checksum', 'sha256sum']),
    )

    url = module.params['url']
    dest = module.params['dest']
    backup = module.params['backup']
    force = module.params['force']
    sha256sum = module.params['sha256sum']
    checksum = module.params['checksum']
    use_proxy = module.params['use_proxy']
    timeout = module.params['timeout']
    tmp_dest = module.params['tmp_dest']

    # Parse headers to dict
    if isinstance(module.params['headers'], dict):
        headers = module.params['headers']
    elif module.params['headers']:
        try:
            headers = dict(
                item.split(':', 1)
                for item in module.params['headers'].split(','))
            module.deprecate(
                'Supplying `headers` as a string is deprecated. Please use dict/hash format for `headers`',
                version='2.10')
        except Exception:
            module.fail_json(
                msg=
                "The string representation for the `headers` parameter requires a key:value,key:value syntax to be properly parsed."
            )
    else:
        headers = None

    dest_is_dir = os.path.isdir(dest)
    last_mod_time = None

    # workaround for usage of deprecated sha256sum parameter
    if sha256sum:
        checksum = 'sha256:%s' % (sha256sum)

    # checksum specified, parse for algorithm and checksum
    if checksum:
        try:
            algorithm, checksum = checksum.rsplit(':', 1)
            # Remove any non-alphanumeric characters, including the infamous
            # Unicode zero-width space
            checksum = re.sub(r'\W+', '', checksum).lower()
            # Ensure the checksum portion is a hexdigest
            int(checksum, 16)
        except ValueError:
            module.fail_json(
                msg=
                "The checksum parameter has to be in format <algorithm>:<checksum>"
            )

    if not dest_is_dir and os.path.exists(dest):
        checksum_mismatch = False

        # If the download is not forced and there is a checksum, allow
        # checksum match to skip the download.
        if not force and checksum != '':
            destination_checksum = module.digest_from_file(dest, algorithm)

            if checksum == destination_checksum:
                module.exit_json(msg="file already exists",
                                 dest=dest,
                                 url=url,
                                 changed=False)

            checksum_mismatch = True

        # Not forcing redownload, unless checksum does not match
        if not force and not checksum_mismatch:
            # allow file attribute changes
            module.params['path'] = dest
            file_args = module.load_file_common_arguments(module.params)
            file_args['path'] = dest
            changed = module.set_fs_attributes_if_different(file_args, False)

            if changed:
                module.exit_json(
                    msg="file already exists but file attributes changed",
                    dest=dest,
                    url=url,
                    changed=changed)
            module.exit_json(msg="file already exists",
                             dest=dest,
                             url=url,
                             changed=changed)

        # If the file already exists, prepare the last modified time for the
        # request.
        mtime = os.path.getmtime(dest)
        last_mod_time = datetime.datetime.utcfromtimestamp(mtime)

        # If the checksum does not match we have to force the download
        # because last_mod_time may be newer than on remote
        if checksum_mismatch:
            force = True

    # download to tmpsrc
    tmpsrc, info = url_get(module, url, dest, use_proxy, last_mod_time, force,
                           timeout, headers, tmp_dest)

    # Now the request has completed, we can finally generate the final
    # destination file name from the info dict.

    if dest_is_dir:
        filename = extract_filename_from_headers(info)
        if not filename:
            # Fall back to extracting the filename from the URL.
            # Pluck the URL from the info, since a redirect could have changed
            # it.
            filename = url_filename(info['url'])
        dest = os.path.join(dest, filename)

    checksum_src = None
    checksum_dest = None

    # If the remote URL exists, we're done with check mode
    if module.check_mode:
        os.remove(tmpsrc)
        res_args = dict(url=url,
                        dest=dest,
                        src=tmpsrc,
                        changed=True,
                        msg=info.get('msg', ''))
        module.exit_json(**res_args)

    # raise an error if there is no tmpsrc file
    if not os.path.exists(tmpsrc):
        os.remove(tmpsrc)
        module.fail_json(msg="Request failed",
                         status_code=info['status'],
                         response=info['msg'])
    if not os.access(tmpsrc, os.R_OK):
        os.remove(tmpsrc)
        module.fail_json(msg="Source %s is not readable" % (tmpsrc))
    checksum_src = module.sha1(tmpsrc)

    # check if there is no dest file
    if os.path.exists(dest):
        # raise an error if copy has no permission on dest
        if not os.access(dest, os.W_OK):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s is not writable" % (dest))
        if not os.access(dest, os.R_OK):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s is not readable" % (dest))
        checksum_dest = module.sha1(dest)
    else:
        if not os.path.exists(os.path.dirname(dest)):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s does not exist" %
                             (os.path.dirname(dest)))
        if not os.access(os.path.dirname(dest), os.W_OK):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s is not writable" %
                             (os.path.dirname(dest)))

    backup_file = None
    if checksum_src != checksum_dest:
        try:
            if backup:
                if os.path.exists(dest):
                    backup_file = module.backup_local(dest)
            module.atomic_move(tmpsrc, dest)
        except Exception as e:
            if os.path.exists(tmpsrc):
                os.remove(tmpsrc)
            module.fail_json(msg="failed to copy %s to %s: %s" %
                             (tmpsrc, dest, to_native(e)),
                             exception=traceback.format_exc())
        changed = True
    else:
        changed = False
        if os.path.exists(tmpsrc):
            os.remove(tmpsrc)

    if checksum != '':
        destination_checksum = module.digest_from_file(dest, algorithm)

        if checksum != destination_checksum:
            os.remove(dest)
            module.fail_json(
                msg="The checksum for %s did not match %s; it was %s." %
                (dest, checksum, destination_checksum))

    # allow file attribute changes
    module.params['path'] = dest
    file_args = module.load_file_common_arguments(module.params)
    file_args['path'] = dest
    changed = module.set_fs_attributes_if_different(file_args, changed)

    # Backwards compat only.  We'll return None on FIPS enabled systems
    try:
        md5sum = module.md5(dest)
    except ValueError:
        md5sum = None

    res_args = dict(url=url,
                    dest=dest,
                    src=tmpsrc,
                    md5sum=md5sum,
                    checksum_src=checksum_src,
                    checksum_dest=checksum_dest,
                    changed=changed,
                    msg=info.get('msg', ''),
                    status_code=info.get('status', ''))
    if backup_file:
        res_args['backup_file'] = backup_file

    # Mission complete
    module.exit_json(**res_args)
Пример #30
0
def main():

    module = AnsibleModule(
        argument_spec=dict(
            name=dict(type='str', required=True),
            type=dict(type='str', required=True, choices=VALID_TYPES),
            control=dict(type='str', required=True),
            module_path=dict(type='str', required=True),
            new_type=dict(type='str', choices=VALID_TYPES),
            new_control=dict(type='str'),
            new_module_path=dict(type='str'),
            module_arguments=dict(type='list', elements='str'),
            state=dict(type='str',
                       default='updated',
                       choices=[
                           'absent', 'after', 'args_absent', 'args_present',
                           'before', 'updated'
                       ]),
            path=dict(type='path', default='/etc/pam.d'),
            backup=dict(type='bool', default=False),
        ),
        supports_check_mode=True,
        required_if=[
            ("state", "args_present", ["module_arguments"]),
            ("state", "args_absent", ["module_arguments"]),
            ("state", "before", ["new_control", "new_type",
                                 "new_module_path"]),
            ("state", "after", ["new_control", "new_type", "new_module_path"]),
        ],
    )
    content = str()
    fname = os.path.join(module.params["path"], module.params["name"])

    # Open the file and read the content or fail
    try:
        with open(fname, 'r') as service_file_obj:
            content = service_file_obj.read()
    except IOError as e:
        # If unable to read the file, fail out
        module.fail_json(
            msg='Unable to open/read PAM module file %s with error %s.' %
            (fname, str(e)))

    # Assuming we didn't fail, create the service
    service = PamdService(content)
    # Set the action
    action = module.params['state']

    changes = 0

    # Take action
    if action == 'updated':
        changes = service.update_rule(
            module.params['type'], module.params['control'],
            module.params['module_path'], module.params['new_type'],
            module.params['new_control'], module.params['new_module_path'],
            module.params['module_arguments'])
    elif action == 'before':
        changes = service.insert_before(
            module.params['type'], module.params['control'],
            module.params['module_path'], module.params['new_type'],
            module.params['new_control'], module.params['new_module_path'],
            module.params['module_arguments'])
    elif action == 'after':
        changes = service.insert_after(
            module.params['type'], module.params['control'],
            module.params['module_path'], module.params['new_type'],
            module.params['new_control'], module.params['new_module_path'],
            module.params['module_arguments'])
    elif action == 'args_absent':
        changes = service.remove_module_arguments(
            module.params['type'], module.params['control'],
            module.params['module_path'], module.params['module_arguments'])
    elif action == 'args_present':
        if [
                arg for arg in parse_module_arguments(
                    module.params['module_arguments']) if arg.startswith("[")
        ]:
            module.fail_json(
                msg=
                "Unable to process bracketed '[' complex arguments with 'args_present'. Please use 'updated'."
            )

        changes = service.add_module_arguments(
            module.params['type'], module.params['control'],
            module.params['module_path'], module.params['module_arguments'])
    elif action == 'absent':
        changes = service.remove(module.params['type'],
                                 module.params['control'],
                                 module.params['module_path'])

    valid, msg = service.validate()

    # If the module is not valid (meaning one of the rules is invalid), we will fail
    if not valid:
        module.fail_json(msg=msg)

    result = dict(
        changed=(changes > 0),
        change_count=changes,
        backupdest='',
    )

    # If not check mode and something changed, backup the original if necessary then write out the file or fail
    if not module.check_mode and result['changed']:
        # First, create a backup if desired.
        if module.params['backup']:
            result['backupdest'] = module.backup_local(fname)
        try:
            temp_file = NamedTemporaryFile(mode='w',
                                           dir=module.tmpdir,
                                           delete=False)
            with open(temp_file.name, 'w') as fd:
                fd.write(str(service))

        except IOError:
            module.fail_json(msg='Unable to create temporary \
                                    file %s' % temp_file)

        module.atomic_move(temp_file.name, os.path.realpath(fname))

    module.exit_json(**result)
Пример #31
0
def main():

    module = AnsibleModule(
        # not checking because of daisy chain to file module
        argument_spec=dict(
            src=dict(required=True, type='path'),
            delimiter=dict(required=False),
            dest=dict(required=True, type='path'),
            backup=dict(default=False, type='bool'),
            remote_src=dict(default=False, type='bool'),
            regexp=dict(required=False),
            ignore_hidden=dict(default=False, type='bool'),
            validate=dict(required=False, type='str'),
        ),
        add_file_common_args=True,
    )

    changed = False
    path_hash = None
    dest_hash = None
    src = module.params['src']
    dest = module.params['dest']
    backup = module.params['backup']
    delimiter = module.params['delimiter']
    regexp = module.params['regexp']
    compiled_regexp = None
    ignore_hidden = module.params['ignore_hidden']
    validate = module.params.get('validate', None)

    result = dict(src=src, dest=dest)
    if not os.path.exists(src):
        module.fail_json(msg="Source (%s) does not exist" % src)

    if not os.path.isdir(src):
        module.fail_json(msg="Source (%s) is not a directory" % src)

    if regexp is not None:
        try:
            compiled_regexp = re.compile(regexp)
        except re.error as e:
            module.fail_json(msg="Invalid Regexp (%s) in \"%s\"" %
                             (to_native(e), regexp))

    if validate and "%s" not in validate:
        module.fail_json(msg="validate must contain %%s: %s" % validate)

    path = assemble_from_fragments(src, delimiter, compiled_regexp,
                                   ignore_hidden, module.tmpdir)
    path_hash = module.sha1(path)
    result['checksum'] = path_hash

    # Backwards compat.  This won't return data if FIPS mode is active
    try:
        pathmd5 = module.md5(path)
    except ValueError:
        pathmd5 = None
    result['md5sum'] = pathmd5

    if os.path.exists(dest):
        dest_hash = module.sha1(dest)

    if path_hash != dest_hash:
        if validate:
            (rc, out, err) = module.run_command(validate % path)
            result['validation'] = dict(rc=rc, stdout=out, stderr=err)
            if rc != 0:
                cleanup(path)
                module.fail_json(msg="failed to validate: rc:%s error:%s" %
                                 (rc, err))
        if backup and dest_hash is not None:
            result['backup_file'] = module.backup_local(dest)

        module.atomic_move(path,
                           dest,
                           unsafe_writes=module.params['unsafe_writes'])
        changed = True

    cleanup(path, result)

    # handle file permissions
    file_args = module.load_file_common_arguments(module.params)
    result['changed'] = module.set_fs_attributes_if_different(
        file_args, changed)

    # Mission complete
    result['msg'] = "OK"
    module.exit_json(**result)
Пример #32
0
class TomcatUserRun(object):
    def __init__(self):
        self.changed = False

        self.module = AnsibleModule(
            argument_spec=dict(state=dict(default='present',
                                          choices=['present', 'absent']),
                               name=dict(required=True),
                               catalina_home=dict(type='path'),
                               xml_path=dict(type='path'),
                               roles=dict(),
                               password=dict(no_log=True),
                               append=dict(type='bool', default=False),
                               update_password=dict(
                                   default='always',
                                   choices=['always', 'on_create']),
                               obfuscate_password=dict(type='bool',
                                                       default=True)),
            mutually_exclusive=[['catalina_home', 'xml_path']],
            required_one_of=[['catalina_home', 'xml_path']],
            supports_check_mode=True)

    def go(self):
        state = self.module.params['state']
        xml_path = self.module.params['xml_path']

        self.name = self.module.params['name']
        self.roles = self.module.params['roles']
        self.password = self.module.params['password']
        self.append = self.module.params['append']
        self.update_password = self.module.params['update_password']

        # At the moment this gets messed up by the exporter
        # if self.password and self.module.params['obfuscate_password']:
        #     self.password = obfuscate(self.password);

        if not xml_path:
            xml_path = self.module.params[
                'catalina_home'] + '/conf/tomcat-users.xml'

        # Parse the XML
        self.dom = xml.dom.minidom.parse(xml_path)
        self.root = self.dom.documentElement

        # Find the user entry
        self.user_res = [
            node for node in self.root.getElementsByTagName("user")
            if node.getAttribute("username") == self.name
        ]

        if state == 'present':
            self.ensure_present()
        elif state == 'absent':
            self.ensure_absent()
        else:
            self.module.fail_json(msg="Invalid state: " + state)

        # Save the XML only if it's been changed
        if self.changed and not self.module.check_mode:
            tmpfd, tmpfile = tempfile.mkstemp()
            with os.fdopen(tmpfd, XML_WRITE_MODE) as out:
                self.root.writexml(out)
            self.module.atomic_move(tmpfile, xml_path)

        self.module.exit_json(changed=self.changed)

    def ensure_absent(self):
        for node in self.user_res:
            node.parentNode.removeChild(node)
            self.changed = True

    def ensure_present(self):
        if len(self.user_res) > 0:
            self.user_node = self.user_res[0]
        else:
            self.user_node = self.dom.createElement("user")
            self.root.appendChild(self.user_node)
            self.user_node.setAttribute("username", self.name)
            if self.password:
                self.user_node.setAttribute("password", self.password)
            self.changed = True

        self.ensure_roles()
        self.ensure_password()

    def ensure_roles(self):
        has_roles = role_list(self.user_node.getAttribute("roles"))
        expect_roles = role_list(self.roles)

        if self.append:
            expect_roles = sorted(list(set(has_roles + expect_roles)))

        if expect_roles != has_roles:
            self.changed = True
            self.user_node.setAttribute("roles", ",".join(expect_roles))

    def ensure_password(self):
        if self.password and self.update_password:
            has_pass = self.user_node.getAttribute("password")
            if has_pass != self.password:
                self.changed = True
                self.user_node.setAttribute("password", self.password)