def main():

    module = AnsibleModule(
        argument_spec=dict(
            path=dict(type='path', required=True, aliases=['dest']),
            section=dict(type='str', required=True),
            option=dict(type='str'),
            value=dict(type='str'),
            backup=dict(type='bool', default=False),
            state=dict(type='str', default='present', choices=['absent', 'present']),
            no_extra_spaces=dict(type='bool', default=False),
            allow_no_value=dict(type='bool', default=False),
            create=dict(type='bool', default=True)
        ),
        add_file_common_args=True,
        supports_check_mode=True,
    )

    path = module.params['path']
    section = module.params['section']
    option = module.params['option']
    value = module.params['value']
    state = module.params['state']
    backup = module.params['backup']
    no_extra_spaces = module.params['no_extra_spaces']
    allow_no_value = module.params['allow_no_value']
    create = module.params['create']

    (changed, backup_file, diff, msg) = do_ini(module, path, section, option, value, state, backup, no_extra_spaces, create, allow_no_value)

    if not module.check_mode and os.path.exists(path):
        file_args = module.load_file_common_arguments(module.params)
        changed = module.set_fs_attributes_if_different(file_args, changed)

    results = dict(
        changed=changed,
        diff=diff,
        msg=msg,
        path=path,
    )
    if backup_file is not None:
        results['backup_file'] = backup_file

    # Mission complete
    module.exit_json(**results)
示例#2
0
def main():
    argument_spec = url_argument_spec()
    argument_spec.update(
        dest=dict(type='path'),
        url_username=dict(type='str', aliases=['user']),
        url_password=dict(type='str', aliases=['password'], no_log=True),
        body=dict(type='raw'),
        body_format=dict(type='str',
                         default='raw',
                         choices=['form-urlencoded', 'json', 'raw']),
        src=dict(type='path'),
        method=dict(type='str', default='GET'),
        return_content=dict(type='bool', default=False),
        follow_redirects=dict(
            type='str',
            default='safe',
            choices=['all', 'no', 'none', 'safe', 'urllib2', 'yes']),
        creates=dict(type='path'),
        removes=dict(type='path'),
        status_code=dict(type='list', default=[200]),
        timeout=dict(type='int', default=30),
        headers=dict(type='dict', default={}),
        unix_socket=dict(type='path'),
    )

    module = AnsibleModule(
        argument_spec=argument_spec,
        add_file_common_args=True,
        mutually_exclusive=[['body', 'src']],
    )

    if module.params.get('thirsty'):
        module.deprecate(
            'The alias "thirsty" has been deprecated and will be removed, use "force" instead',
            version='2.13')

    url = module.params['url']
    body = module.params['body']
    body_format = module.params['body_format'].lower()
    method = module.params['method'].upper()
    dest = module.params['dest']
    return_content = module.params['return_content']
    creates = module.params['creates']
    removes = module.params['removes']
    status_code = [int(x) for x in list(module.params['status_code'])]
    socket_timeout = module.params['timeout']

    dict_headers = module.params['headers']

    if not re.match('^[A-Z]+$', method):
        module.fail_json(
            msg=
            "Parameter 'method' needs to be a single word in uppercase, like GET or POST."
        )

    if body_format == 'json':
        # Encode the body unless its a string, then assume it is pre-formatted JSON
        if not isinstance(body, string_types):
            body = json.dumps(body)
        if 'content-type' not in [header.lower() for header in dict_headers]:
            dict_headers['Content-Type'] = 'application/json'
    elif body_format == 'form-urlencoded':
        if not isinstance(body, string_types):
            try:
                body = form_urlencoded(body)
            except ValueError as e:
                module.fail_json(
                    msg='failed to parse body as form_urlencoded: %s' %
                    to_native(e),
                    elapsed=0)
        if 'content-type' not in [header.lower() for header in dict_headers]:
            dict_headers['Content-Type'] = 'application/x-www-form-urlencoded'

    if creates is not None:
        # do not run the command if the line contains creates=filename
        # and the filename already exists.  This allows idempotence
        # of uri executions.
        if os.path.exists(creates):
            module.exit_json(stdout="skipped, since '%s' exists" % creates,
                             changed=False)

    if removes is not None:
        # do not run the command if the line contains removes=filename
        # and the filename does not exist.  This allows idempotence
        # of uri executions.
        if not os.path.exists(removes):
            module.exit_json(stdout="skipped, since '%s' does not exist" %
                             removes,
                             changed=False)

    # Make the request
    start = datetime.datetime.utcnow()
    resp, content, dest = uri(module, url, dest, body, body_format, method,
                              dict_headers, socket_timeout)
    resp['elapsed'] = (datetime.datetime.utcnow() - start).seconds
    resp['status'] = int(resp['status'])
    resp['changed'] = False

    # Write the file out if requested
    if dest is not None:
        if resp['status'] in status_code and resp['status'] != 304:
            write_file(module, url, dest, content, resp)
            # allow file attribute changes
            resp['changed'] = True
            module.params['path'] = dest
            file_args = module.load_file_common_arguments(module.params)
            file_args['path'] = dest
            resp['changed'] = module.set_fs_attributes_if_different(
                file_args, resp['changed'])
        resp['path'] = dest

    # Transmogrify the headers, replacing '-' with '_', since variables don't
    # work with dashes.
    # In python3, the headers are title cased.  Lowercase them to be
    # compatible with the python2 behaviour.
    uresp = {}
    for key, value in iteritems(resp):
        ukey = key.replace("-", "_").lower()
        uresp[ukey] = value

    if 'location' in uresp:
        uresp['location'] = absolute_location(url, uresp['location'])

    # Default content_encoding to try
    content_encoding = 'utf-8'
    if 'content_type' in uresp:
        # Handle multiple Content-Type headers
        charsets = []
        content_types = []
        for value in uresp['content_type'].split(','):
            ct, params = cgi.parse_header(value)
            if ct not in content_types:
                content_types.append(ct)
            if 'charset' in params:
                if params['charset'] not in charsets:
                    charsets.append(params['charset'])

        if content_types:
            content_type = content_types[0]
            if len(content_types) > 1:
                module.warn(
                    'Received multiple conflicting Content-Type values (%s), using %s'
                    % (', '.join(content_types), content_type))
        if charsets:
            content_encoding = charsets[0]
            if len(charsets) > 1:
                module.warn(
                    'Received multiple conflicting charset values (%s), using %s'
                    % (', '.join(charsets), content_encoding))

        u_content = to_text(content, encoding=content_encoding)
        if any(candidate in content_type for candidate in JSON_CANDIDATES):
            try:
                js = json.loads(u_content)
                uresp['json'] = js
            except Exception:
                if PY2:
                    sys.exc_clear(
                    )  # Avoid false positive traceback in fail_json() on Python 2
    else:
        u_content = to_text(content, encoding=content_encoding)

    if resp['status'] not in status_code:
        uresp['msg'] = 'Status code was %s and not %s: %s' % (
            resp['status'], status_code, uresp.get('msg', ''))
        module.fail_json(content=u_content, **uresp)
    elif return_content:
        module.exit_json(content=u_content, **uresp)
    else:
        module.exit_json(**uresp)
def main():
    argument_spec = dict(
        action=dict(type='str', default='export', choices=['export', 'parse']),
        other_certificates=dict(type='list', elements='path', aliases=['ca_certificates']),
        certificate_path=dict(type='path'),
        force=dict(type='bool', default=False),
        friendly_name=dict(type='str', aliases=['name']),
        iter_size=dict(type='int', default=2048),
        maciter_size=dict(type='int', default=1),
        passphrase=dict(type='str', no_log=True),
        path=dict(type='path', required=True),
        privatekey_passphrase=dict(type='str', no_log=True),
        privatekey_path=dict(type='path'),
        state=dict(type='str', default='present', choices=['absent', 'present']),
        src=dict(type='path'),
        backup=dict(type='bool', default=False),
    )

    required_if = [
        ['action', 'parse', ['src']],
    ]

    module = AnsibleModule(
        add_file_common_args=True,
        argument_spec=argument_spec,
        required_if=required_if,
        supports_check_mode=True,
    )

    if not pyopenssl_found:
        module.fail_json(msg=missing_required_lib('pyOpenSSL'), exception=PYOPENSSL_IMP_ERR)

    base_dir = os.path.dirname(module.params['path']) or '.'
    if not os.path.isdir(base_dir):
        module.fail_json(
            name=base_dir,
            msg="The directory '%s' does not exist or the path is not a directory" % base_dir
        )

    try:
        pkcs12 = Pkcs(module)
        changed = False

        if module.params['state'] == 'present':
            if module.check_mode:
                result = pkcs12.dump()
                result['changed'] = module.params['force'] or not pkcs12.check(module)
                module.exit_json(**result)

            if not pkcs12.check(module, perms_required=False) or module.params['force']:
                if module.params['action'] == 'export':
                    if not module.params['friendly_name']:
                        module.fail_json(msg='Friendly_name is required')
                    pkcs12_content = pkcs12.generate(module)
                    pkcs12.write(module, pkcs12_content, 0o600)
                    changed = True
                else:
                    pkey, cert, other_certs, friendly_name = pkcs12.parse()
                    dump_content = '%s%s%s' % (to_native(pkey), to_native(cert), to_native(b''.join(other_certs)))
                    pkcs12.write(module, to_bytes(dump_content))

            file_args = module.load_file_common_arguments(module.params)
            if module.set_fs_attributes_if_different(file_args, changed):
                changed = True
        else:
            if module.check_mode:
                result = pkcs12.dump()
                result['changed'] = os.path.exists(module.params['path'])
                module.exit_json(**result)

            if os.path.exists(module.params['path']):
                pkcs12.remove(module)
                changed = True

        result = pkcs12.dump()
        result['changed'] = changed
        if os.path.exists(module.params['path']):
            file_mode = "%04o" % stat.S_IMODE(os.stat(module.params['path']).st_mode)
            result['mode'] = file_mode

        module.exit_json(**result)
    except crypto_utils.OpenSSLObjectError as exc:
        module.fail_json(msg=to_native(exc))
示例#4
0
def main():

    module = AnsibleModule(
        # not checking because of daisy chain to file module
        argument_spec=dict(
            src=dict(type='path', required=True),
            delimiter=dict(type='str'),
            dest=dict(type='path', required=True),
            backup=dict(type='bool', default=False),
            remote_src=dict(type='bool', default=False),
            regexp=dict(type='str'),
            ignore_hidden=dict(type='bool', default=False),
            validate=dict(type='str'),
        ),
        add_file_common_args=True,
    )

    changed = False
    path_hash = None
    dest_hash = None
    src = module.params['src']
    dest = module.params['dest']
    backup = module.params['backup']
    delimiter = module.params['delimiter']
    regexp = module.params['regexp']
    compiled_regexp = None
    ignore_hidden = module.params['ignore_hidden']
    validate = module.params.get('validate', None)

    result = dict(src=src, dest=dest)
    if not os.path.exists(src):
        module.fail_json(msg="Source (%s) does not exist" % src)

    if not os.path.isdir(src):
        module.fail_json(msg="Source (%s) is not a directory" % src)

    if regexp is not None:
        try:
            compiled_regexp = re.compile(regexp)
        except re.error as e:
            module.fail_json(msg="Invalid Regexp (%s) in \"%s\"" %
                             (to_native(e), regexp))

    if validate and "%s" not in validate:
        module.fail_json(msg="validate must contain %%s: %s" % validate)

    path = assemble_from_fragments(src, delimiter, compiled_regexp,
                                   ignore_hidden, module.tmpdir)
    path_hash = module.sha1(path)
    result['checksum'] = path_hash

    # Backwards compat.  This won't return data if FIPS mode is active
    try:
        pathmd5 = module.md5(path)
    except ValueError:
        pathmd5 = None
    result['md5sum'] = pathmd5

    if os.path.exists(dest):
        dest_hash = module.sha1(dest)

    if path_hash != dest_hash:
        if validate:
            (rc, out, err) = module.run_command(validate % path)
            result['validation'] = dict(rc=rc, stdout=out, stderr=err)
            if rc != 0:
                cleanup(path)
                module.fail_json(msg="failed to validate: rc:%s error:%s" %
                                 (rc, err))
        if backup and dest_hash is not None:
            result['backup_file'] = module.backup_local(dest)

        module.atomic_move(path,
                           dest,
                           unsafe_writes=module.params['unsafe_writes'])
        changed = True

    cleanup(path, result)

    # handle file permissions
    file_args = module.load_file_common_arguments(module.params)
    result['changed'] = module.set_fs_attributes_if_different(
        file_args, changed)

    # Mission complete
    result['msg'] = "OK"
    module.exit_json(**result)
def main():
    module = AnsibleModule(
        argument_spec=dict(
            group_id=dict(required=True),
            artifact_id=dict(required=True),
            version=dict(default=None),
            version_by_spec=dict(default=None),
            classifier=dict(default=''),
            extension=dict(default='jar'),
            repository_url=dict(default='https://repo1.maven.org/maven2'),
            username=dict(default=None, aliases=['aws_secret_key']),
            password=dict(default=None, no_log=True, aliases=['aws_secret_access_key']),
            headers=dict(type='dict'),
            force_basic_auth=dict(default=False, type='bool'),
            state=dict(default="present", choices=["present", "absent"]),  # TODO - Implement a "latest" state
            timeout=dict(default=10, type='int'),
            dest=dict(type="path", required=True),
            validate_certs=dict(required=False, default=True, type='bool'),
            keep_name=dict(required=False, default=False, type='bool'),
            verify_checksum=dict(required=False, default='download', choices=['never', 'download', 'change', 'always'])
        ),
        add_file_common_args=True,
        mutually_exclusive=([('version', 'version_by_spec')])
    )

    if not HAS_LXML_ETREE:
        module.fail_json(msg=missing_required_lib('lxml'), exception=LXML_ETREE_IMP_ERR)

    if module.params['version_by_spec'] and not HAS_SEMANTIC_VERSION:
        module.fail_json(msg=missing_required_lib('semantic_version'), exception=SEMANTIC_VERSION_IMP_ERR)

    repository_url = module.params["repository_url"]
    if not repository_url:
        repository_url = "https://repo1.maven.org/maven2"
    try:
        parsed_url = urlparse(repository_url)
    except AttributeError as e:
        module.fail_json(msg='url parsing went wrong %s' % e)

    local = parsed_url.scheme == "file"

    if parsed_url.scheme == 's3' and not HAS_BOTO:
        module.fail_json(msg=missing_required_lib('boto3', reason='when using s3:// repository URLs'),
                         exception=BOTO_IMP_ERR)

    group_id = module.params["group_id"]
    artifact_id = module.params["artifact_id"]
    version = module.params["version"]
    version_by_spec = module.params["version_by_spec"]
    classifier = module.params["classifier"]
    extension = module.params["extension"]
    headers = module.params['headers']
    state = module.params["state"]
    dest = module.params["dest"]
    b_dest = to_bytes(dest, errors='surrogate_or_strict')
    keep_name = module.params["keep_name"]
    verify_checksum = module.params["verify_checksum"]
    verify_download = verify_checksum in ['download', 'always']
    verify_change = verify_checksum in ['change', 'always']

    downloader = MavenDownloader(module, repository_url, local, headers)

    if not version_by_spec and not version:
        version = "latest"

    try:
        artifact = Artifact(group_id, artifact_id, version, version_by_spec, classifier, extension)
    except ValueError as e:
        module.fail_json(msg=e.args[0])

    changed = False
    prev_state = "absent"

    if dest.endswith(os.sep):
        b_dest = to_bytes(dest, errors='surrogate_or_strict')
        if not os.path.exists(b_dest):
            (pre_existing_dir, new_directory_list) = split_pre_existing_dir(dest)
            os.makedirs(b_dest)
            directory_args = module.load_file_common_arguments(module.params)
            directory_mode = module.params["directory_mode"]
            if directory_mode is not None:
                directory_args['mode'] = directory_mode
            else:
                directory_args['mode'] = None
            changed = adjust_recursive_directory_permissions(pre_existing_dir, new_directory_list, module, directory_args, changed)

    if os.path.isdir(b_dest):
        version_part = version
        if version == 'latest':
            version_part = downloader.find_latest_version_available(artifact)
        elif version_by_spec:
            version_part = downloader.find_version_by_spec(artifact)

        filename = "{artifact_id}{version_part}{classifier}.{extension}".format(
            artifact_id=artifact_id,
            version_part="-{0}".format(version_part) if keep_name else "",
            classifier="-{0}".format(classifier) if classifier else "",
            extension=extension
        )
        dest = posixpath.join(dest, filename)

        b_dest = to_bytes(dest, errors='surrogate_or_strict')

    if os.path.lexists(b_dest) and ((not verify_change) or not downloader.is_invalid_md5(dest, downloader.find_uri_for_artifact(artifact))):
        prev_state = "present"

    if prev_state == "absent":
        try:
            download_error = downloader.download(module.tmpdir, artifact, verify_download, b_dest)
            if download_error is None:
                changed = True
            else:
                module.fail_json(msg="Cannot retrieve the artifact to destination: " + download_error)
        except ValueError as e:
            module.fail_json(msg=e.args[0])

    module.params['dest'] = dest
    file_args = module.load_file_common_arguments(module.params)
    changed = module.set_fs_attributes_if_different(file_args, changed)
    if changed:
        module.exit_json(state=state, dest=dest, group_id=group_id, artifact_id=artifact_id, version=version, classifier=classifier,
                         extension=extension, repository_url=repository_url, changed=changed)
    else:
        module.exit_json(state=state, dest=dest, changed=changed)
示例#6
0
def main():

    global module

    module = AnsibleModule(
        # not checking because of daisy chain to file module
        argument_spec=dict(
            src=dict(type='path'),
            _original_basename=dict(
                type='str'
            ),  # used to handle 'dest is a directory' via template, a slight hack
            content=dict(type='str', no_log=True),
            dest=dict(type='path', required=True),
            backup=dict(type='bool', default=False),
            force=dict(type='bool', default=True, aliases=['thirsty']),
            validate=dict(type='str'),
            directory_mode=dict(type='raw'),
            remote_src=dict(type='bool'),
            local_follow=dict(type='bool'),
            checksum=dict(type='str'),
        ),
        add_file_common_args=True,
        supports_check_mode=True,
    )

    if module.params.get('thirsty'):
        module.deprecate(
            'The alias "thirsty" has been deprecated and will be removed, use "force" instead',
            version='2.13')

    src = module.params['src']
    b_src = to_bytes(src, errors='surrogate_or_strict')
    dest = module.params['dest']
    # Make sure we always have a directory component for later processing
    if os.path.sep not in dest:
        dest = '.{0}{1}'.format(os.path.sep, dest)
    b_dest = to_bytes(dest, errors='surrogate_or_strict')
    backup = module.params['backup']
    force = module.params['force']
    _original_basename = module.params.get('_original_basename', None)
    validate = module.params.get('validate', None)
    follow = module.params['follow']
    local_follow = module.params['local_follow']
    mode = module.params['mode']
    owner = module.params['owner']
    group = module.params['group']
    remote_src = module.params['remote_src']
    checksum = module.params['checksum']

    if not os.path.exists(b_src):
        module.fail_json(msg="Source %s not found" % (src))
    if not os.access(b_src, os.R_OK):
        module.fail_json(msg="Source %s not readable" % (src))

    # Preserve is usually handled in the action plugin but mode + remote_src has to be done on the
    # remote host
    if module.params['mode'] == 'preserve':
        module.params['mode'] = '0%03o' % stat.S_IMODE(os.stat(b_src).st_mode)
    mode = module.params['mode']

    checksum_dest = None

    if os.path.isfile(src):
        checksum_src = module.sha1(src)
    else:
        checksum_src = None

    # Backwards compat only.  This will be None in FIPS mode
    try:
        if os.path.isfile(src):
            md5sum_src = module.md5(src)
        else:
            md5sum_src = None
    except ValueError:
        md5sum_src = None

    changed = False

    if checksum and checksum_src != checksum:
        module.fail_json(
            msg=
            'Copied file does not match the expected checksum. Transfer failed.',
            checksum=checksum_src,
            expected_checksum=checksum)

    # Special handling for recursive copy - create intermediate dirs
    if _original_basename and dest.endswith(os.sep):
        dest = os.path.join(dest, _original_basename)
        b_dest = to_bytes(dest, errors='surrogate_or_strict')
        dirname = os.path.dirname(dest)
        b_dirname = to_bytes(dirname, errors='surrogate_or_strict')
        if not os.path.exists(b_dirname):
            try:
                (pre_existing_dir,
                 new_directory_list) = split_pre_existing_dir(dirname)
            except AnsibleModuleError as e:
                e.result['msg'] += ' Could not copy to {0}'.format(dest)
                module.fail_json(**e.results)

            os.makedirs(b_dirname)
            directory_args = module.load_file_common_arguments(module.params)
            directory_mode = module.params["directory_mode"]
            if directory_mode is not None:
                directory_args['mode'] = directory_mode
            else:
                directory_args['mode'] = None
            adjust_recursive_directory_permissions(pre_existing_dir,
                                                   new_directory_list, module,
                                                   directory_args, changed)

    if os.path.isdir(b_dest):
        basename = os.path.basename(src)
        if _original_basename:
            basename = _original_basename
        dest = os.path.join(dest, basename)
        b_dest = to_bytes(dest, errors='surrogate_or_strict')

    if os.path.exists(b_dest):
        if os.path.islink(b_dest) and follow:
            b_dest = os.path.realpath(b_dest)
            dest = to_native(b_dest, errors='surrogate_or_strict')
        if not force:
            module.exit_json(msg="file already exists",
                             src=src,
                             dest=dest,
                             changed=False)
        if os.access(b_dest, os.R_OK) and os.path.isfile(b_dest):
            checksum_dest = module.sha1(dest)
    else:
        if not os.path.exists(os.path.dirname(b_dest)):
            try:
                # os.path.exists() can return false in some
                # circumstances where the directory does not have
                # the execute bit for the current user set, in
                # which case the stat() call will raise an OSError
                os.stat(os.path.dirname(b_dest))
            except OSError as e:
                if "permission denied" in to_native(e).lower():
                    module.fail_json(
                        msg="Destination directory %s is not accessible" %
                        (os.path.dirname(dest)))
            module.fail_json(msg="Destination directory %s does not exist" %
                             (os.path.dirname(dest)))

    if not os.access(os.path.dirname(b_dest),
                     os.W_OK) and not module.params['unsafe_writes']:
        module.fail_json(msg="Destination %s not writable" %
                         (os.path.dirname(dest)))

    backup_file = None
    if checksum_src != checksum_dest or os.path.islink(b_dest):
        if not module.check_mode:
            try:
                if backup:
                    if os.path.exists(b_dest):
                        backup_file = module.backup_local(dest)
                # allow for conversion from symlink.
                if os.path.islink(b_dest):
                    os.unlink(b_dest)
                    open(b_dest, 'w').close()
                if validate:
                    # if we have a mode, make sure we set it on the temporary
                    # file source as some validations may require it
                    if mode is not None:
                        module.set_mode_if_different(src, mode, False)
                    if owner is not None:
                        module.set_owner_if_different(src, owner, False)
                    if group is not None:
                        module.set_group_if_different(src, group, False)
                    if "%s" not in validate:
                        module.fail_json(msg="validate must contain %%s: %s" %
                                         (validate))
                    (rc, out, err) = module.run_command(validate % src)
                    if rc != 0:
                        module.fail_json(msg="failed to validate",
                                         exit_status=rc,
                                         stdout=out,
                                         stderr=err)
                b_mysrc = b_src
                if remote_src and os.path.isfile(b_src):
                    _, b_mysrc = tempfile.mkstemp(dir=os.path.dirname(b_dest))

                    shutil.copyfile(b_src, b_mysrc)
                    try:
                        shutil.copystat(b_src, b_mysrc)
                    except OSError as err:
                        if err.errno == errno.ENOSYS and mode == "preserve":
                            module.warn("Unable to copy stats {0}".format(
                                to_native(b_src)))
                        else:
                            raise

                # might be needed below
                if PY3 and hasattr(os, 'listxattr'):
                    try:
                        src_has_acls = 'system.posix_acl_access' in os.listxattr(
                            src)
                    except Exception as e:
                        # assume unwanted ACLs by default
                        src_has_acls = True

                module.atomic_move(
                    b_mysrc,
                    dest,
                    unsafe_writes=module.params['unsafe_writes'])

                if PY3 and hasattr(os, 'listxattr') and platform.system(
                ) == 'Linux' and not remote_src:
                    # atomic_move used above to copy src into dest might, in some cases,
                    # use shutil.copy2 which in turn uses shutil.copystat.
                    # Since Python 3.3, shutil.copystat copies file extended attributes:
                    # https://docs.python.org/3/library/shutil.html#shutil.copystat
                    # os.listxattr (along with others) was added to handle the operation.

                    # This means that on Python 3 we are copying the extended attributes which includes
                    # the ACLs on some systems - further limited to Linux as the documentation above claims
                    # that the extended attributes are copied only on Linux. Also, os.listxattr is only
                    # available on Linux.

                    # If not remote_src, then the file was copied from the controller. In that
                    # case, any filesystem ACLs are artifacts of the copy rather than preservation
                    # of existing attributes. Get rid of them:

                    if src_has_acls:
                        # FIXME If dest has any default ACLs, there are not applied to src now because
                        # they were overridden by copystat. Should/can we do anything about this?
                        # 'system.posix_acl_default' in os.listxattr(os.path.dirname(b_dest))

                        try:
                            clear_facls(dest)
                        except ValueError as e:
                            if 'setfacl' in to_native(e):
                                # No setfacl so we're okay.  The controller couldn't have set a facl
                                # without the setfacl command
                                pass
                            else:
                                raise
                        except RuntimeError as e:
                            # setfacl failed.
                            if 'Operation not supported' in to_native(e):
                                # The file system does not support ACLs.
                                pass
                            else:
                                raise

            except (IOError, OSError):
                module.fail_json(msg="failed to copy: %s to %s" % (src, dest),
                                 traceback=traceback.format_exc())
        changed = True
    else:
        changed = False

    if checksum_src is None and checksum_dest is None:
        if remote_src and os.path.isdir(module.params['src']):
            b_src = to_bytes(module.params['src'],
                             errors='surrogate_or_strict')
            b_dest = to_bytes(module.params['dest'],
                              errors='surrogate_or_strict')

            if src.endswith(os.path.sep) and os.path.isdir(
                    module.params['dest']):
                diff_files_changed = copy_diff_files(b_src, b_dest, module)
                left_only_changed = copy_left_only(b_src, b_dest, module)
                common_dirs_changed = copy_common_dirs(b_src, b_dest, module)
                owner_group_changed = chown_recursive(b_dest, module)
                if diff_files_changed or left_only_changed or common_dirs_changed or owner_group_changed:
                    changed = True

            if src.endswith(
                    os.path.sep) and not os.path.exists(module.params['dest']):
                b_basename = to_bytes(os.path.basename(src),
                                      errors='surrogate_or_strict')
                b_dest = to_bytes(os.path.join(b_dest, b_basename),
                                  errors='surrogate_or_strict')
                b_src = to_bytes(os.path.join(module.params['src'], ""),
                                 errors='surrogate_or_strict')
                if not module.check_mode:
                    shutil.copytree(b_src, b_dest, symlinks=not (local_follow))
                chown_recursive(dest, module)
                changed = True

            if not src.endswith(os.path.sep) and os.path.isdir(
                    module.params['dest']):
                b_basename = to_bytes(os.path.basename(src),
                                      errors='surrogate_or_strict')
                b_dest = to_bytes(os.path.join(b_dest, b_basename),
                                  errors='surrogate_or_strict')
                b_src = to_bytes(os.path.join(module.params['src'], ""),
                                 errors='surrogate_or_strict')
                if not module.check_mode and not os.path.exists(b_dest):
                    shutil.copytree(b_src, b_dest, symlinks=not (local_follow))
                    changed = True
                    chown_recursive(dest, module)
                if module.check_mode and not os.path.exists(b_dest):
                    changed = True
                if os.path.exists(b_dest):
                    diff_files_changed = copy_diff_files(b_src, b_dest, module)
                    left_only_changed = copy_left_only(b_src, b_dest, module)
                    common_dirs_changed = copy_common_dirs(
                        b_src, b_dest, module)
                    owner_group_changed = chown_recursive(b_dest, module)
                    if diff_files_changed or left_only_changed or common_dirs_changed or owner_group_changed:
                        changed = True

            if not src.endswith(os.path.sep) and not os.path.exists(
                    module.params['dest']):
                b_basename = to_bytes(os.path.basename(module.params['src']),
                                      errors='surrogate_or_strict')
                b_dest = to_bytes(os.path.join(b_dest, b_basename),
                                  errors='surrogate_or_strict')
                if not module.check_mode and not os.path.exists(b_dest):
                    os.makedirs(b_dest)
                    b_src = to_bytes(os.path.join(module.params['src'], ""),
                                     errors='surrogate_or_strict')
                    diff_files_changed = copy_diff_files(b_src, b_dest, module)
                    left_only_changed = copy_left_only(b_src, b_dest, module)
                    common_dirs_changed = copy_common_dirs(
                        b_src, b_dest, module)
                    owner_group_changed = chown_recursive(b_dest, module)
                    if diff_files_changed or left_only_changed or common_dirs_changed or owner_group_changed:
                        changed = True
                if module.check_mode and not os.path.exists(b_dest):
                    changed = True

    res_args = dict(dest=dest,
                    src=src,
                    md5sum=md5sum_src,
                    checksum=checksum_src,
                    changed=changed)
    if backup_file:
        res_args['backup_file'] = backup_file

    module.params['dest'] = dest
    if not module.check_mode:
        file_args = module.load_file_common_arguments(module.params)
        res_args['changed'] = module.set_fs_attributes_if_different(
            file_args, res_args['changed'])

    module.exit_json(**res_args)
示例#7
0
def main():
    module = AnsibleModule(
        # not checking because of daisy chain to file module
        argument_spec=dict(
            src=dict(type='path', required=True),
            dest=dict(type='path', required=True),
            remote_src=dict(type='bool', default=False),
            creates=dict(type='path'),
            list_files=dict(type='bool', default=False),
            keep_newer=dict(type='bool', default=False),
            exclude=dict(type='list', default=[]),
            extra_opts=dict(type='list', default=[]),
            validate_certs=dict(type='bool', default=True),
        ),
        add_file_common_args=True,
        # check-mode only works for zip files, we cover that later
        supports_check_mode=True,
    )

    src = module.params['src']
    dest = module.params['dest']
    b_dest = to_bytes(dest, errors='surrogate_or_strict')
    remote_src = module.params['remote_src']
    file_args = module.load_file_common_arguments(module.params)

    # did tar file arrive?
    if not os.path.exists(src):
        if not remote_src:
            module.fail_json(msg="Source '%s' failed to transfer" % src)
        # If remote_src=true, and src= contains ://, try and download the file to a temp directory.
        elif '://' in src:
            src = fetch_file(module, src)
        else:
            module.fail_json(msg="Source '%s' does not exist" % src)
    if not os.access(src, os.R_OK):
        module.fail_json(msg="Source '%s' not readable" % src)

    # skip working with 0 size archives
    try:
        if os.path.getsize(src) == 0:
            module.fail_json(msg="Invalid archive '%s', the file is 0 bytes" % src)
    except Exception as e:
        module.fail_json(msg="Source '%s' not readable, %s" % (src, to_native(e)))

    # is dest OK to receive tar file?
    if not os.path.isdir(b_dest):
        module.fail_json(msg="Destination '%s' is not a directory" % dest)

    handler = pick_handler(src, b_dest, file_args, module)

    res_args = dict(handler=handler.__class__.__name__, dest=dest, src=src)

    # do we need to do unpack?
    check_results = handler.is_unarchived()

    # DEBUG
    # res_args['check_results'] = check_results

    if module.check_mode:
        res_args['changed'] = not check_results['unarchived']
    elif check_results['unarchived']:
        res_args['changed'] = False
    else:
        # do the unpack
        try:
            res_args['extract_results'] = handler.unarchive()
            if res_args['extract_results']['rc'] != 0:
                module.fail_json(msg="failed to unpack %s to %s" % (src, dest), **res_args)
        except IOError:
            module.fail_json(msg="failed to unpack %s to %s" % (src, dest), **res_args)
        else:
            res_args['changed'] = True

    # Get diff if required
    if check_results.get('diff', False):
        res_args['diff'] = {'prepared': check_results['diff']}

    # Run only if we found differences (idempotence) or diff was missing
    if res_args.get('diff', True) and not module.check_mode:
        # do we need to change perms?
        for filename in handler.files_in_archive:
            file_args['path'] = os.path.join(b_dest, to_bytes(filename, errors='surrogate_or_strict'))

            try:
                res_args['changed'] = module.set_fs_attributes_if_different(file_args, res_args['changed'], expand=False)
            except (IOError, OSError) as e:
                module.fail_json(msg="Unexpected error when accessing exploded file: %s" % to_native(e), **res_args)

    if module.params['list_files']:
        res_args['files'] = handler.files_in_archive

    module.exit_json(**res_args)
示例#8
0
def main():
    # Module settings
    argument_spec = dict(
        bandwidth=dict(),
        baseurl=dict(type='list'),
        cost=dict(),
        deltarpm_metadata_percentage=dict(),
        deltarpm_percentage=dict(),
        description=dict(),
        enabled=dict(type='bool'),
        enablegroups=dict(type='bool'),
        exclude=dict(type='list'),
        failovermethod=dict(choices=['roundrobin', 'priority']),
        file=dict(),
        gpgcakey=dict(),
        gpgcheck=dict(type='bool'),
        gpgkey=dict(type='list'),
        http_caching=dict(choices=['all', 'packages', 'none']),
        include=dict(),
        includepkgs=dict(type='list'),
        ip_resolve=dict(choices=['4', '6', 'IPv4', 'IPv6', 'whatever']),
        keepalive=dict(type='bool'),
        keepcache=dict(choices=['0', '1']),
        metadata_expire=dict(),
        metadata_expire_filter=dict(choices=[
            'never', 'read-only:past', 'read-only:present', 'read-only:future'
        ]),
        metalink=dict(),
        mirrorlist=dict(),
        mirrorlist_expire=dict(),
        name=dict(required=True),
        params=dict(type='dict'),
        password=dict(no_log=True),
        priority=dict(),
        protect=dict(type='bool'),
        proxy=dict(),
        proxy_password=dict(no_log=True),
        proxy_username=dict(),
        repo_gpgcheck=dict(type='bool'),
        reposdir=dict(default='/etc/yum.repos.d', type='path'),
        retries=dict(),
        s3_enabled=dict(type='bool'),
        skip_if_unavailable=dict(type='bool'),
        sslcacert=dict(aliases=['ca_cert']),
        ssl_check_cert_permissions=dict(type='bool'),
        sslclientcert=dict(aliases=['client_cert']),
        sslclientkey=dict(aliases=['client_key']),
        sslverify=dict(type='bool', aliases=['validate_certs']),
        state=dict(choices=['present', 'absent'], default='present'),
        throttle=dict(),
        timeout=dict(),
        ui_repoid_vars=dict(),
        username=dict(),
    )

    argument_spec['async'] = dict(type='bool')

    module = AnsibleModule(
        argument_spec=argument_spec,
        add_file_common_args=True,
        supports_check_mode=True,
    )

    # Params was removed
    # https://meetbot.fedoraproject.org/ansible-meeting/2017-09-28/ansible_dev_meeting.2017-09-28-15.00.log.html
    if module.params['params']:
        module.fail_json(
            msg=
            "The params option to yum_repository was removed in Ansible 2.5 since it circumvents Ansible's option handling"
        )

    name = module.params['name']
    state = module.params['state']

    # Check if required parameters are present
    if state == 'present':
        if (module.params['baseurl'] is None
                and module.params['metalink'] is None
                and module.params['mirrorlist'] is None):
            module.fail_json(
                msg=
                "Parameter 'baseurl', 'metalink' or 'mirrorlist' is required.")
        if module.params['description'] is None:
            module.fail_json(msg="Parameter 'description' is required.")

    # Rename "name" and "description" to ensure correct key sorting
    module.params['repoid'] = module.params['name']
    module.params['name'] = module.params['description']
    del module.params['description']

    # Change list type to string for baseurl and gpgkey
    for list_param in ['baseurl', 'gpgkey']:
        if (list_param in module.params
                and module.params[list_param] is not None):
            module.params[list_param] = "\n".join(module.params[list_param])

    # Define repo file name if it doesn't exist
    if module.params['file'] is None:
        module.params['file'] = module.params['repoid']

    # Instantiate the YumRepo object
    yumrepo = YumRepo(module)

    # Get repo status before change
    diff = {
        'before_header': yumrepo.params['dest'],
        'before': yumrepo.dump(),
        'after_header': yumrepo.params['dest'],
        'after': ''
    }

    # Perform action depending on the state
    if state == 'present':
        yumrepo.add()
    elif state == 'absent':
        yumrepo.remove()

    # Get repo status after change
    diff['after'] = yumrepo.dump()

    # Compare repo states
    changed = diff['before'] != diff['after']

    # Save the file only if not in check mode and if there was a change
    if not module.check_mode and changed:
        yumrepo.save()

    # Change file attributes if needed
    if os.path.isfile(module.params['dest']):
        file_args = module.load_file_common_arguments(module.params)
        changed = module.set_fs_attributes_if_different(file_args, changed)

    # Print status of the change
    module.exit_json(changed=changed, repo=name, state=state, diff=diff)
def main():
    module = AnsibleModule(
        argument_spec=dict(
            path=dict(type='list', required=True),
            format=dict(type='str',
                        default='gz',
                        choices=['bz2', 'gz', 'tar', 'xz', 'zip']),
            dest=dict(type='path'),
            exclude_path=dict(type='list'),
            force_archive=dict(type='bool', default=False),
            remove=dict(type='bool', default=False),
        ),
        add_file_common_args=True,
        supports_check_mode=True,
    )

    params = module.params
    check_mode = module.check_mode
    paths = params['path']
    dest = params['dest']
    b_dest = None if not dest else to_bytes(dest, errors='surrogate_or_strict')
    exclude_paths = params['exclude_path']
    remove = params['remove']

    b_expanded_paths = []
    b_expanded_exclude_paths = []
    fmt = params['format']
    b_fmt = to_bytes(fmt, errors='surrogate_or_strict')
    force_archive = params['force_archive']
    globby = False
    changed = False
    state = 'absent'

    # Simple or archive file compression (inapplicable with 'zip' since it's always an archive)
    archive = False
    b_successes = []

    # Fail early
    if not HAS_LZMA and fmt == 'xz':
        module.fail_json(msg=missing_required_lib(
            "lzma or backports.lzma", reason="when using xz format"),
                         exception=LZMA_IMP_ERR)
        module.fail_json(
            msg="lzma or backports.lzma is required when using xz format.")

    for path in paths:
        b_path = os.path.expanduser(
            os.path.expandvars(to_bytes(path, errors='surrogate_or_strict')))

        # Expand any glob characters. If found, add the expanded glob to the
        # list of expanded_paths, which might be empty.
        if (b'*' in b_path or b'?' in b_path):
            b_expanded_paths.extend(glob.glob(b_path))
            globby = True

        # If there are no glob characters the path is added to the expanded paths
        # whether the path exists or not
        else:
            b_expanded_paths.append(b_path)

    # Only attempt to expand the exclude paths if it exists
    if exclude_paths:
        for exclude_path in exclude_paths:
            b_exclude_path = os.path.expanduser(
                os.path.expandvars(
                    to_bytes(exclude_path, errors='surrogate_or_strict')))

            # Expand any glob characters. If found, add the expanded glob to the
            # list of expanded_paths, which might be empty.
            if (b'*' in b_exclude_path or b'?' in b_exclude_path):
                b_expanded_exclude_paths.extend(glob.glob(b_exclude_path))

                # If there are no glob character the exclude path is added to the expanded
                # exclude paths whether the path exists or not.
            else:
                b_expanded_exclude_paths.append(b_exclude_path)

    if not b_expanded_paths:
        return module.fail_json(path=', '.join(paths),
                                expanded_paths=to_native(
                                    b', '.join(b_expanded_paths),
                                    errors='surrogate_or_strict'),
                                msg='Error, no source paths were found')

    # Only try to determine if we are working with an archive or not if we haven't set archive to true
    if not force_archive:
        # If we actually matched multiple files or TRIED to, then
        # treat this as a multi-file archive
        archive = globby or os.path.isdir(
            b_expanded_paths[0]) or len(b_expanded_paths) > 1
    else:
        archive = True

    # Default created file name (for single-file archives) to
    # <file>.<format>
    if not b_dest and not archive:
        b_dest = b'%s.%s' % (b_expanded_paths[0], b_fmt)

    # Force archives to specify 'dest'
    if archive and not b_dest:
        module.fail_json(
            dest=dest,
            path=', '.join(paths),
            msg=
            'Error, must specify "dest" when archiving multiple files or trees'
        )

    b_sep = to_bytes(os.sep, errors='surrogate_or_strict')

    b_archive_paths = []
    b_missing = []
    b_arcroot = b''

    for b_path in b_expanded_paths:
        # Use the longest common directory name among all the files
        # as the archive root path
        if b_arcroot == b'':
            b_arcroot = os.path.dirname(b_path) + b_sep
        else:
            for i in range(len(b_arcroot)):
                if b_path[i] != b_arcroot[i]:
                    break

            if i < len(b_arcroot):
                b_arcroot = os.path.dirname(b_arcroot[0:i + 1])

            b_arcroot += b_sep

        # Don't allow archives to be created anywhere within paths to be removed
        if remove and os.path.isdir(b_path):
            b_path_dir = b_path
            if not b_path.endswith(b'/'):
                b_path_dir += b'/'

            if b_dest.startswith(b_path_dir):
                module.fail_json(
                    path=', '.join(paths),
                    msg=
                    'Error, created archive can not be contained in source paths when remove=True'
                )

        if os.path.lexists(b_path) and b_path not in b_expanded_exclude_paths:
            b_archive_paths.append(b_path)
        else:
            b_missing.append(b_path)

    # No source files were found but the named archive exists: are we 'compress' or 'archive' now?
    if len(b_missing) == len(b_expanded_paths) and b_dest and os.path.exists(
            b_dest):
        # Just check the filename to know if it's an archive or simple compressed file
        if re.search(
                br'(\.tar|\.tar\.gz|\.tgz|\.tbz2|\.tar\.bz2|\.tar\.xz|\.zip)$',
                os.path.basename(b_dest), re.IGNORECASE):
            state = 'archive'
        else:
            state = 'compress'

    # Multiple files, or globbiness
    elif archive:
        if not b_archive_paths:
            # No source files were found, but the archive is there.
            if os.path.lexists(b_dest):
                state = 'archive'
        elif b_missing:
            # SOME source files were found, but not all of them
            state = 'incomplete'

        archive = None
        size = 0
        errors = []

        if os.path.lexists(b_dest):
            size = os.path.getsize(b_dest)

        if state != 'archive':
            if check_mode:
                changed = True

            else:
                try:
                    # Slightly more difficult (and less efficient!) compression using zipfile module
                    if fmt == 'zip':
                        arcfile = zipfile.ZipFile(
                            to_native(b_dest,
                                      errors='surrogate_or_strict',
                                      encoding='ascii'), 'w',
                            zipfile.ZIP_DEFLATED, True)

                    # Easier compression using tarfile module
                    elif fmt == 'gz' or fmt == 'bz2':
                        arcfile = tarfile.open(
                            to_native(b_dest,
                                      errors='surrogate_or_strict',
                                      encoding='ascii'), 'w|' + fmt)

                    # python3 tarfile module allows xz format but for python2 we have to create the tarfile
                    # in memory and then compress it with lzma.
                    elif fmt == 'xz':
                        arcfileIO = io.BytesIO()
                        arcfile = tarfile.open(fileobj=arcfileIO, mode='w')

                    # Or plain tar archiving
                    elif fmt == 'tar':
                        arcfile = tarfile.open(
                            to_native(b_dest,
                                      errors='surrogate_or_strict',
                                      encoding='ascii'), 'w')

                    b_match_root = re.compile(br'^%s' % re.escape(b_arcroot))
                    for b_path in b_archive_paths:
                        if os.path.isdir(b_path):
                            # Recurse into directories
                            for b_dirpath, b_dirnames, b_filenames in os.walk(
                                    b_path, topdown=True):
                                if not b_dirpath.endswith(b_sep):
                                    b_dirpath += b_sep

                                for b_dirname in b_dirnames:
                                    b_fullpath = b_dirpath + b_dirname
                                    n_fullpath = to_native(
                                        b_fullpath,
                                        errors='surrogate_or_strict',
                                        encoding='ascii')
                                    n_arcname = to_native(
                                        b_match_root.sub(b'', b_fullpath),
                                        errors='surrogate_or_strict')

                                    try:
                                        if fmt == 'zip':
                                            arcfile.write(
                                                n_fullpath, n_arcname)
                                        else:
                                            arcfile.add(n_fullpath,
                                                        n_arcname,
                                                        recursive=False)

                                    except Exception as e:
                                        errors.append(
                                            '%s: %s' %
                                            (n_fullpath, to_native(e)))

                                for b_filename in b_filenames:
                                    b_fullpath = b_dirpath + b_filename
                                    n_fullpath = to_native(
                                        b_fullpath,
                                        errors='surrogate_or_strict',
                                        encoding='ascii')
                                    n_arcname = to_native(
                                        b_match_root.sub(b'', b_fullpath),
                                        errors='surrogate_or_strict')

                                    try:
                                        if fmt == 'zip':
                                            arcfile.write(
                                                n_fullpath, n_arcname)
                                        else:
                                            arcfile.add(n_fullpath,
                                                        n_arcname,
                                                        recursive=False)

                                        b_successes.append(b_fullpath)
                                    except Exception as e:
                                        errors.append(
                                            'Adding %s: %s' %
                                            (to_native(b_path), to_native(e)))
                        else:
                            path = to_native(b_path,
                                             errors='surrogate_or_strict',
                                             encoding='ascii')
                            arcname = to_native(b_match_root.sub(b'', b_path),
                                                errors='surrogate_or_strict')
                            if fmt == 'zip':
                                arcfile.write(path, arcname)
                            else:
                                arcfile.add(path, arcname, recursive=False)

                            b_successes.append(b_path)

                except Exception as e:
                    expanded_fmt = 'zip' if fmt == 'zip' else ('tar.' + fmt)
                    module.fail_json(
                        msg='Error when writing %s archive at %s: %s' %
                        (expanded_fmt, dest, to_native(e)),
                        exception=format_exc())

                if arcfile:
                    arcfile.close()
                    state = 'archive'

                if fmt == 'xz':
                    with lzma.open(b_dest, 'wb') as f:
                        f.write(arcfileIO.getvalue())
                    arcfileIO.close()

                if errors:
                    module.fail_json(
                        msg='Errors when writing archive at %s: %s' %
                        (dest, '; '.join(errors)))

        if state in ['archive', 'incomplete'] and remove:
            for b_path in b_successes:
                try:
                    if os.path.isdir(b_path):
                        shutil.rmtree(b_path)
                    elif not check_mode:
                        os.remove(b_path)
                except OSError as e:
                    errors.append(to_native(b_path))

            if errors:
                module.fail_json(dest=dest,
                                 msg='Error deleting some source files: ',
                                 files=errors)

        # Rudimentary check: If size changed then file changed. Not perfect, but easy.
        if not check_mode and os.path.getsize(b_dest) != size:
            changed = True

        if b_successes and state != 'incomplete':
            state = 'archive'

    # Simple, single-file compression
    else:
        b_path = b_expanded_paths[0]

        # No source or compressed file
        if not (os.path.exists(b_path) or os.path.lexists(b_dest)):
            state = 'absent'

        # if it already exists and the source file isn't there, consider this done
        elif not os.path.lexists(b_path) and os.path.lexists(b_dest):
            state = 'compress'

        else:
            if module.check_mode:
                if not os.path.exists(b_dest):
                    changed = True
            else:
                size = 0
                f_in = f_out = arcfile = None

                if os.path.lexists(b_dest):
                    size = os.path.getsize(b_dest)

                try:
                    if fmt == 'zip':
                        arcfile = zipfile.ZipFile(
                            to_native(b_dest,
                                      errors='surrogate_or_strict',
                                      encoding='ascii'), 'w',
                            zipfile.ZIP_DEFLATED, True)
                        arcfile.write(
                            to_native(b_path,
                                      errors='surrogate_or_strict',
                                      encoding='ascii'),
                            to_native(b_path[len(b_arcroot):],
                                      errors='surrogate_or_strict'))
                        arcfile.close()
                        state = 'archive'  # because all zip files are archives
                    elif fmt == 'tar':
                        arcfile = tarfile.open(
                            to_native(b_dest,
                                      errors='surrogate_or_strict',
                                      encoding='ascii'), 'w')
                        arcfile.add(
                            to_native(b_path,
                                      errors='surrogate_or_strict',
                                      encoding='ascii'))
                        arcfile.close()
                    else:
                        f_in = open(b_path, 'rb')

                        n_dest = to_native(b_dest,
                                           errors='surrogate_or_strict',
                                           encoding='ascii')
                        if fmt == 'gz':
                            f_out = gzip.open(n_dest, 'wb')
                        elif fmt == 'bz2':
                            f_out = bz2.BZ2File(n_dest, 'wb')
                        elif fmt == 'xz':
                            f_out = lzma.LZMAFile(n_dest, 'wb')
                        else:
                            raise OSError("Invalid format")

                        shutil.copyfileobj(f_in, f_out)

                    b_successes.append(b_path)

                except OSError as e:
                    module.fail_json(
                        path=to_native(b_path),
                        dest=dest,
                        msg='Unable to write to compressed file: %s' %
                        to_native(e),
                        exception=format_exc())

                if arcfile:
                    arcfile.close()
                if f_in:
                    f_in.close()
                if f_out:
                    f_out.close()

                # Rudimentary check: If size changed then file changed. Not perfect, but easy.
                if os.path.getsize(b_dest) != size:
                    changed = True

            state = 'compress'

        if remove and not check_mode:
            try:
                os.remove(b_path)

            except OSError as e:
                module.fail_json(path=to_native(b_path),
                                 msg='Unable to remove source file: %s' %
                                 to_native(e),
                                 exception=format_exc())

    params['path'] = b_dest
    file_args = module.load_file_common_arguments(params)

    if not check_mode:
        changed = module.set_fs_attributes_if_different(file_args, changed)

    module.exit_json(
        archived=[
            to_native(p, errors='surrogate_or_strict') for p in b_successes
        ],
        dest=dest,
        changed=changed,
        state=state,
        arcroot=to_native(b_arcroot, errors='surrogate_or_strict'),
        missing=[
            to_native(p, errors='surrogate_or_strict') for p in b_missing
        ],
        expanded_paths=[
            to_native(p, errors='surrogate_or_strict')
            for p in b_expanded_paths
        ],
        expanded_exclude_paths=[
            to_native(p, errors='surrogate_or_strict')
            for p in b_expanded_exclude_paths
        ],
    )
示例#10
0
def main():
    argument_spec = url_argument_spec()

    # setup aliases
    argument_spec['url_username']['aliases'] = ['username']
    argument_spec['url_password']['aliases'] = ['password']

    argument_spec.update(
        url=dict(type='str', required=True),
        dest=dict(type='path', required=True),
        backup=dict(type='bool'),
        sha256sum=dict(type='str', default=''),
        checksum=dict(type='str', default=''),
        timeout=dict(type='int', default=10),
        headers=dict(type='raw'),
        tmp_dest=dict(type='path'),
    )

    module = AnsibleModule(
        # not checking because of daisy chain to file module
        argument_spec=argument_spec,
        add_file_common_args=True,
        supports_check_mode=True,
        mutually_exclusive=[['checksum', 'sha256sum']],
    )

    if module.params.get('thirsty'):
        module.deprecate(
            'The alias "thirsty" has been deprecated and will be removed, use "force" instead',
            version='2.13')

    if module.params.get('sha256sum'):
        module.deprecate(
            'The parameter "sha256sum" has been deprecated and will be removed, use "checksum" instead',
            version='2.14')

    url = module.params['url']
    dest = module.params['dest']
    backup = module.params['backup']
    force = module.params['force']
    sha256sum = module.params['sha256sum']
    checksum = module.params['checksum']
    use_proxy = module.params['use_proxy']
    timeout = module.params['timeout']
    tmp_dest = module.params['tmp_dest']

    result = dict(
        changed=False,
        checksum_dest=None,
        checksum_src=None,
        dest=dest,
        elapsed=0,
        url=url,
    )

    # Parse headers to dict
    if isinstance(module.params['headers'], dict):
        headers = module.params['headers']
    elif module.params['headers']:
        try:
            headers = dict(
                item.split(':', 1)
                for item in module.params['headers'].split(','))
            module.deprecate(
                'Supplying `headers` as a string is deprecated. Please use dict/hash format for `headers`',
                version='2.10')
        except Exception:
            module.fail_json(
                msg=
                "The string representation for the `headers` parameter requires a key:value,key:value syntax to be properly parsed.",
                **result)
    else:
        headers = None

    dest_is_dir = os.path.isdir(dest)
    last_mod_time = None

    # workaround for usage of deprecated sha256sum parameter
    if sha256sum:
        checksum = 'sha256:%s' % (sha256sum)

    # checksum specified, parse for algorithm and checksum
    if checksum:
        try:
            algorithm, checksum = checksum.split(':', 1)
        except ValueError:
            module.fail_json(
                msg=
                "The checksum parameter has to be in format <algorithm>:<checksum>",
                **result)

        if checksum.startswith('http://') or checksum.startswith(
                'https://') or checksum.startswith('ftp://'):
            checksum_url = checksum
            # download checksum file to checksum_tmpsrc
            checksum_tmpsrc, checksum_info = url_get(module, checksum_url,
                                                     dest, use_proxy,
                                                     last_mod_time, force,
                                                     timeout, headers,
                                                     tmp_dest)
            with open(checksum_tmpsrc) as f:
                lines = [line.rstrip('\n') for line in f]
            os.remove(checksum_tmpsrc)
            checksum_map = {}
            for line in lines:
                parts = line.split(None, 1)
                if len(parts) == 2:
                    checksum_map[parts[0]] = parts[1]
            filename = url_filename(url)

            # Look through each line in the checksum file for a hash corresponding to
            # the filename in the url, returning the first hash that is found.
            for cksum in (s for (s, f) in checksum_map.items()
                          if f.strip('./') == filename):
                checksum = cksum
                break
            else:
                checksum = None

            if checksum is None:
                module.fail_json(
                    msg="Unable to find a checksum for file '%s' in '%s'" %
                    (filename, checksum_url))
        # Remove any non-alphanumeric characters, including the infamous
        # Unicode zero-width space
        checksum = re.sub(r'\W+', '', checksum).lower()
        # Ensure the checksum portion is a hexdigest
        try:
            int(checksum, 16)
        except ValueError:
            module.fail_json(msg='The checksum format is invalid', **result)

    if not dest_is_dir and os.path.exists(dest):
        checksum_mismatch = False

        # If the download is not forced and there is a checksum, allow
        # checksum match to skip the download.
        if not force and checksum != '':
            destination_checksum = module.digest_from_file(dest, algorithm)

            if checksum != destination_checksum:
                checksum_mismatch = True

        # Not forcing redownload, unless checksum does not match
        if not force and checksum and not checksum_mismatch:
            # Not forcing redownload, unless checksum does not match
            # allow file attribute changes
            module.params['path'] = dest
            file_args = module.load_file_common_arguments(module.params)
            file_args['path'] = dest
            result['changed'] = module.set_fs_attributes_if_different(
                file_args, False)
            if result['changed']:
                module.exit_json(
                    msg="file already exists but file attributes changed",
                    **result)
            module.exit_json(msg="file already exists", **result)

        # If the file already exists, prepare the last modified time for the
        # request.
        mtime = os.path.getmtime(dest)
        last_mod_time = datetime.datetime.utcfromtimestamp(mtime)

        # If the checksum does not match we have to force the download
        # because last_mod_time may be newer than on remote
        if checksum_mismatch:
            force = True

    # download to tmpsrc
    start = datetime.datetime.utcnow()
    tmpsrc, info = url_get(module, url, dest, use_proxy, last_mod_time, force,
                           timeout, headers, tmp_dest)
    result['elapsed'] = (datetime.datetime.utcnow() - start).seconds
    result['src'] = tmpsrc

    # Now the request has completed, we can finally generate the final
    # destination file name from the info dict.

    if dest_is_dir:
        filename = extract_filename_from_headers(info)
        if not filename:
            # Fall back to extracting the filename from the URL.
            # Pluck the URL from the info, since a redirect could have changed
            # it.
            filename = url_filename(info['url'])
        dest = os.path.join(dest, filename)
        result['dest'] = dest

    # raise an error if there is no tmpsrc file
    if not os.path.exists(tmpsrc):
        os.remove(tmpsrc)
        module.fail_json(msg="Request failed",
                         status_code=info['status'],
                         response=info['msg'],
                         **result)
    if not os.access(tmpsrc, os.R_OK):
        os.remove(tmpsrc)
        module.fail_json(msg="Source %s is not readable" % (tmpsrc), **result)
    result['checksum_src'] = module.sha1(tmpsrc)

    # check if there is no dest file
    if os.path.exists(dest):
        # raise an error if copy has no permission on dest
        if not os.access(dest, os.W_OK):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s is not writable" % (dest),
                             **result)
        if not os.access(dest, os.R_OK):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s is not readable" % (dest),
                             **result)
        result['checksum_dest'] = module.sha1(dest)
    else:
        if not os.path.exists(os.path.dirname(dest)):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s does not exist" %
                             (os.path.dirname(dest)),
                             **result)
        if not os.access(os.path.dirname(dest), os.W_OK):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s is not writable" %
                             (os.path.dirname(dest)),
                             **result)

    if module.check_mode:
        if os.path.exists(tmpsrc):
            os.remove(tmpsrc)
        result['changed'] = ('checksum_dest' not in result or
                             result['checksum_src'] != result['checksum_dest'])
        module.exit_json(msg=info.get('msg', ''), **result)

    backup_file = None
    if result['checksum_src'] != result['checksum_dest']:
        try:
            if backup:
                if os.path.exists(dest):
                    backup_file = module.backup_local(dest)
            module.atomic_move(tmpsrc, dest)
        except Exception as e:
            if os.path.exists(tmpsrc):
                os.remove(tmpsrc)
            module.fail_json(msg="failed to copy %s to %s: %s" %
                             (tmpsrc, dest, to_native(e)),
                             exception=traceback.format_exc(),
                             **result)
        result['changed'] = True
    else:
        result['changed'] = False
        if os.path.exists(tmpsrc):
            os.remove(tmpsrc)

    if checksum != '':
        destination_checksum = module.digest_from_file(dest, algorithm)

        if checksum != destination_checksum:
            os.remove(dest)
            module.fail_json(
                msg="The checksum for %s did not match %s; it was %s." %
                (dest, checksum, destination_checksum),
                **result)

    # allow file attribute changes
    module.params['path'] = dest
    file_args = module.load_file_common_arguments(module.params)
    file_args['path'] = dest
    result['changed'] = module.set_fs_attributes_if_different(
        file_args, result['changed'])

    # Backwards compat only.  We'll return None on FIPS enabled systems
    try:
        result['md5sum'] = module.md5(dest)
    except ValueError:
        result['md5sum'] = None

    if backup_file:
        result['backup_file'] = backup_file

    # Mission complete
    module.exit_json(msg=info.get('msg', ''),
                     status_code=info.get('status', ''),
                     **result)