Ejemplo n.º 1
0
    def _parse_spec_group_file(self):
        pkg_specs, grp_specs, module_specs, filenames = [], [], [], []
        already_loaded_comps = False  # Only load this if necessary, it's slow

        for name in self.names:
            if '://' in name:
                name = fetch_file(self.module, name)
                filenames.append(name)
            elif name.endswith(".rpm"):
                filenames.append(name)
            elif name.startswith("@") or ('/' in name):
                # like "dnf install /usr/bin/vi"
                if '/' in name:
                    pkg_spec = self._whatprovides(name)
                    if pkg_spec:
                        pkg_specs.append(pkg_spec)
                        continue

                if not already_loaded_comps:
                    self.base.read_comps()
                    already_loaded_comps = True

                grp_env_mdl_candidate = name[1:].strip()

                if self.with_modules:
                    mdl = self.module_base._get_modules(grp_env_mdl_candidate)
                    if mdl[0]:
                        module_specs.append(grp_env_mdl_candidate)
                    else:
                        grp_specs.append(grp_env_mdl_candidate)
                else:
                    grp_specs.append(grp_env_mdl_candidate)
            else:
                pkg_specs.append(name)
        return pkg_specs, grp_specs, module_specs, filenames
Ejemplo n.º 2
0
 def upload(self, path, endpoint, wait=True, item_type="unknown"):
     if "://" in path:
         tmppath = fetch_file(self, path)
         path = ".".join(tmppath.split(".")[:-2]) + ".tar.gz"
         os.rename(tmppath, path)
         self.add_cleanup_file(path)
     ct, body = self.prepare_multipart(path)
     response = self.make_request("POST", endpoint, **{"data": body, "headers": {"Content-Type": str(ct)}, "binary": True, "return_errors_on_404": True})
     if response["status_code"] in [202]:
         self.json_output["path"] = path
         self.json_output["changed"] = True
         if wait:
             self.wait_for_complete(response["json"]["task"])
         return
     else:
         if "json" in response and "__all__" in response["json"]:
             self.fail_json(msg="Unable to create {0} from {1}: {2}".format(item_type, path, response["json"]["__all__"][0]))
         elif "json" in response and "errors" in response["json"] and "detail" in response["json"]["errors"][0]:
             self.fail_json(msg="Unable to create {0} from {1}: {2}".format(item_type, path, response["json"]["errors"][0]["detail"]))
         elif "json" in response:
             self.fail_json(msg="Unable to create {0} from {1}: {2}".format(item_type, path, response["json"]))
         else:
             self.fail_json(msg="Unable to create {0} from {1}: {2}".format(item_type, path, response["status_code"]))
Ejemplo n.º 3
0
def main():
    module = AnsibleModule(
        # not checking because of daisy chain to file module
        argument_spec=dict(
            src=dict(type='path', required=True),
            dest=dict(type='path', required=True),
            remote_src=dict(type='bool', default=False),
            creates=dict(type='path'),
            list_files=dict(type='bool', default=False),
            keep_newer=dict(type='bool', default=False),
            exclude=dict(type='list', default=[]),
            extra_opts=dict(type='list', default=[]),
            validate_certs=dict(type='bool', default=True),
        ),
        add_file_common_args=True,
        # check-mode only works for zip files, we cover that later
        supports_check_mode=True,
    )

    src = module.params['src']
    dest = module.params['dest']
    b_dest = to_bytes(dest, errors='surrogate_or_strict')
    remote_src = module.params['remote_src']
    file_args = module.load_file_common_arguments(module.params)

    # did tar file arrive?
    if not os.path.exists(src):
        if not remote_src:
            module.fail_json(msg="Source '%s' failed to transfer" % src)
        # If remote_src=true, and src= contains ://, try and download the file to a temp directory.
        elif '://' in src:
            src = fetch_file(module, src)
        else:
            module.fail_json(msg="Source '%s' does not exist" % src)
    if not os.access(src, os.R_OK):
        module.fail_json(msg="Source '%s' not readable" % src)

    # skip working with 0 size archives
    try:
        if os.path.getsize(src) == 0:
            module.fail_json(msg="Invalid archive '%s', the file is 0 bytes" %
                             src)
    except Exception as e:
        module.fail_json(msg="Source '%s' not readable, %s" %
                         (src, to_native(e)))

    # is dest OK to receive tar file?
    if not os.path.isdir(b_dest):
        module.fail_json(msg="Destination '%s' is not a directory" % dest)

    handler = pick_handler(src, b_dest, file_args, module)

    res_args = dict(handler=handler.__class__.__name__, dest=dest, src=src)

    # do we need to do unpack?
    check_results = handler.is_unarchived()

    # DEBUG
    # res_args['check_results'] = check_results

    if module.check_mode:
        res_args['changed'] = not check_results['unarchived']
    elif check_results['unarchived']:
        res_args['changed'] = False
    else:
        # do the unpack
        try:
            res_args['extract_results'] = handler.unarchive()
            if res_args['extract_results']['rc'] != 0:
                module.fail_json(msg="failed to unpack %s to %s" % (src, dest),
                                 **res_args)
        except IOError:
            module.fail_json(msg="failed to unpack %s to %s" % (src, dest),
                             **res_args)
        else:
            res_args['changed'] = True

    # Get diff if required
    if check_results.get('diff', False):
        res_args['diff'] = {'prepared': check_results['diff']}

    # Run only if we found differences (idempotence) or diff was missing
    if res_args.get('diff', True) and not module.check_mode:
        # do we need to change perms?
        for filename in handler.files_in_archive:
            file_args['path'] = os.path.join(
                b_dest, to_bytes(filename, errors='surrogate_or_strict'))

            try:
                res_args['changed'] = module.set_fs_attributes_if_different(
                    file_args, res_args['changed'], expand=False)
            except (IOError, OSError) as e:
                module.fail_json(
                    msg="Unexpected error when accessing exploded file: %s" %
                    to_native(e),
                    **res_args)

    if module.params['list_files']:
        res_args['files'] = handler.files_in_archive

    module.exit_json(**res_args)
Ejemplo n.º 4
0
def main():
    module = AnsibleModule(
        argument_spec=dict(
            state=dict(
                type='str',
                default='present',
                choices=['absent', 'build-dep', 'fixed', 'latest', 'present']),
            update_cache=dict(type='bool', aliases=['update-cache']),
            update_cache_retries=dict(type='int', default=5),
            update_cache_retry_max_delay=dict(type='int', default=12),
            cache_valid_time=dict(type='int', default=0),
            purge=dict(type='bool', default=False),
            package=dict(type='list', elements='str', aliases=['pkg', 'name']),
            deb=dict(type='path'),
            default_release=dict(type='str', aliases=['default-release']),
            install_recommends=dict(type='bool',
                                    aliases=['install-recommends']),
            force=dict(type='bool', default=False),
            upgrade=dict(type='str',
                         choices=['dist', 'full', 'no', 'safe', 'yes'],
                         default='no'),
            dpkg_options=dict(type='str', default=DPKG_OPTIONS),
            autoremove=dict(type='bool', default=False),
            autoclean=dict(type='bool', default=False),
            fail_on_autoremove=dict(type='bool', default=False),
            policy_rc_d=dict(type='int', default=None),
            only_upgrade=dict(type='bool', default=False),
            force_apt_get=dict(type='bool', default=False),
            allow_unauthenticated=dict(type='bool',
                                       default=False,
                                       aliases=['allow-unauthenticated']),
        ),
        mutually_exclusive=[['deb', 'package', 'upgrade']],
        required_one_of=[[
            'autoremove', 'deb', 'package', 'update_cache', 'upgrade'
        ]],
        supports_check_mode=True,
    )

    module.run_command_environ_update = APT_ENV_VARS

    if not HAS_PYTHON_APT:
        if module.check_mode:
            module.fail_json(
                msg="%s must be installed to use check mode. "
                "If run normally this module can auto-install it." %
                PYTHON_APT)
        try:
            # We skip cache update in auto install the dependency if the
            # user explicitly declared it with update_cache=no.
            if module.params.get('update_cache') is False:
                module.warn(
                    "Auto-installing missing dependency without updating cache: %s"
                    % PYTHON_APT)
            else:
                module.warn(
                    "Updating cache and auto-installing missing dependency: %s"
                    % PYTHON_APT)
                module.run_command(['apt-get', 'update'], check_rc=True)

            module.run_command([
                'apt-get', 'install', '--no-install-recommends', PYTHON_APT,
                '-y', '-q'
            ],
                               check_rc=True)
            global apt, apt_pkg
            import apt
            import apt.debfile
            import apt_pkg
        except ImportError:
            module.fail_json(
                msg="Could not import python modules: apt, apt_pkg. "
                "Please install %s package." % PYTHON_APT)

    global APTITUDE_CMD
    APTITUDE_CMD = module.get_bin_path("aptitude", False)
    global APT_GET_CMD
    APT_GET_CMD = module.get_bin_path("apt-get")

    p = module.params

    if p['upgrade'] == 'no':
        p['upgrade'] = None

    use_apt_get = p['force_apt_get']

    if not use_apt_get and not APTITUDE_CMD:
        use_apt_get = True

    updated_cache = False
    updated_cache_time = 0
    install_recommends = p['install_recommends']
    allow_unauthenticated = p['allow_unauthenticated']
    dpkg_options = expand_dpkg_options(p['dpkg_options'])
    autoremove = p['autoremove']
    fail_on_autoremove = p['fail_on_autoremove']
    autoclean = p['autoclean']

    # Get the cache object
    cache = get_cache(module)

    try:
        if p['default_release']:
            try:
                apt_pkg.config['APT::Default-Release'] = p['default_release']
            except AttributeError:
                apt_pkg.Config['APT::Default-Release'] = p['default_release']
            # reopen cache w/ modified config
            cache.open(progress=None)

        mtimestamp, updated_cache_time = get_updated_cache_time()
        # Cache valid time is default 0, which will update the cache if
        #  needed and `update_cache` was set to true
        updated_cache = False
        if p['update_cache'] or p['cache_valid_time']:
            now = datetime.datetime.now()
            tdelta = datetime.timedelta(seconds=p['cache_valid_time'])
            if not mtimestamp + tdelta >= now:
                # Retry to update the cache with exponential backoff
                err = ''
                update_cache_retries = module.params.get(
                    'update_cache_retries')
                update_cache_retry_max_delay = module.params.get(
                    'update_cache_retry_max_delay')
                randomize = random.randint(0, 1000) / 1000.0

                for retry in range(update_cache_retries):
                    try:
                        cache.update()
                        break
                    except apt.cache.FetchFailedException as e:
                        err = to_native(e)

                    # Use exponential backoff plus a little bit of randomness
                    delay = 2**retry + randomize
                    if delay > update_cache_retry_max_delay:
                        delay = update_cache_retry_max_delay + randomize
                    time.sleep(delay)
                else:
                    module.fail_json(msg='Failed to update apt cache: %s' %
                                     (err if err else 'unknown reason'))

                cache.open(progress=None)
                mtimestamp, post_cache_update_time = get_updated_cache_time()
                if updated_cache_time != post_cache_update_time:
                    updated_cache = True
                updated_cache_time = post_cache_update_time

            # If there is nothing else to do exit. This will set state as
            #  changed based on if the cache was updated.
            if not p['package'] and not p['upgrade'] and not p['deb']:
                module.exit_json(changed=updated_cache,
                                 cache_updated=updated_cache,
                                 cache_update_time=updated_cache_time)

        force_yes = p['force']

        if p['upgrade']:
            upgrade(module, p['upgrade'], force_yes, p['default_release'],
                    use_apt_get, dpkg_options, autoremove, fail_on_autoremove,
                    allow_unauthenticated)

        if p['deb']:
            if p['state'] != 'present':
                module.fail_json(msg="deb only supports state=present")
            if '://' in p['deb']:
                p['deb'] = fetch_file(module, p['deb'])
            install_deb(module,
                        p['deb'],
                        cache,
                        install_recommends=install_recommends,
                        allow_unauthenticated=allow_unauthenticated,
                        force=force_yes,
                        fail_on_autoremove=fail_on_autoremove,
                        dpkg_options=p['dpkg_options'])

        unfiltered_packages = p['package'] or ()
        packages = [
            package.strip() for package in unfiltered_packages
            if package != '*'
        ]
        all_installed = '*' in unfiltered_packages
        latest = p['state'] == 'latest'

        if latest and all_installed:
            if packages:
                module.fail_json(
                    msg=
                    'unable to install additional packages when upgrading all installed packages'
                )
            upgrade(module, 'yes', force_yes, p['default_release'],
                    use_apt_get, dpkg_options, autoremove, fail_on_autoremove,
                    allow_unauthenticated)

        if packages:
            for package in packages:
                if package.count('=') > 1:
                    module.fail_json(msg="invalid package spec: %s" % package)
                if latest and '=' in package:
                    module.fail_json(
                        msg='version number inconsistent with state=latest: %s'
                        % package)

        if not packages:
            if autoclean:
                cleanup(module,
                        p['purge'],
                        force=force_yes,
                        operation='autoclean',
                        dpkg_options=dpkg_options)
            if autoremove:
                cleanup(module,
                        p['purge'],
                        force=force_yes,
                        operation='autoremove',
                        dpkg_options=dpkg_options)

        if p['state'] in ('latest', 'present', 'build-dep', 'fixed'):
            state_upgrade = False
            state_builddep = False
            state_fixed = False
            if p['state'] == 'latest':
                state_upgrade = True
            if p['state'] == 'build-dep':
                state_builddep = True
            if p['state'] == 'fixed':
                state_fixed = True

            success, retvals = install(
                module,
                packages,
                cache,
                upgrade=state_upgrade,
                default_release=p['default_release'],
                install_recommends=install_recommends,
                force=force_yes,
                dpkg_options=dpkg_options,
                build_dep=state_builddep,
                fixed=state_fixed,
                autoremove=autoremove,
                fail_on_autoremove=fail_on_autoremove,
                only_upgrade=p['only_upgrade'],
                allow_unauthenticated=allow_unauthenticated)

            # Store if the cache has been updated
            retvals['cache_updated'] = updated_cache
            # Store when the update time was last
            retvals['cache_update_time'] = updated_cache_time

            if success:
                module.exit_json(**retvals)
            else:
                module.fail_json(**retvals)
        elif p['state'] == 'absent':
            remove(module,
                   packages,
                   cache,
                   p['purge'],
                   force=force_yes,
                   dpkg_options=dpkg_options,
                   autoremove=autoremove)

    except apt.cache.LockFailedException as lockFailedException:
        module.fail_json(msg="Failed to lock apt for exclusive operation: %s" %
                         lockFailedException)
    except apt.cache.FetchFailedException as fetchFailedException:
        module.fail_json(msg="Could not fetch updated apt files: %s" %
                         fetchFailedException)
Ejemplo n.º 5
0
def main():
    module = AnsibleModule(
        argument_spec=dict(
            state=dict(type='str', default='present', choices=['absent', 'build-dep', 'fixed', 'latest', 'present']),
            update_cache=dict(type='bool', aliases=['update-cache']),
            update_cache_retries=dict(type='int', default=5),
            update_cache_retry_max_delay=dict(type='int', default=12),
            cache_valid_time=dict(type='int', default=0),
            purge=dict(type='bool', default=False),
            package=dict(type='list', elements='str', aliases=['pkg', 'name']),
            deb=dict(type='path'),
            default_release=dict(type='str', aliases=['default-release']),
            install_recommends=dict(type='bool', aliases=['install-recommends']),
            force=dict(type='bool', default=False),
            upgrade=dict(type='str', choices=['dist', 'full', 'no', 'safe', 'yes'], default='no'),
            dpkg_options=dict(type='str', default=DPKG_OPTIONS),
            autoremove=dict(type='bool', default=False),
            autoclean=dict(type='bool', default=False),
            fail_on_autoremove=dict(type='bool', default=False),
            policy_rc_d=dict(type='int', default=None),
            only_upgrade=dict(type='bool', default=False),
            force_apt_get=dict(type='bool', default=False),
            allow_unauthenticated=dict(type='bool', default=False, aliases=['allow-unauthenticated']),
        ),
        mutually_exclusive=[['deb', 'package', 'upgrade']],
        required_one_of=[['autoremove', 'deb', 'package', 'update_cache', 'upgrade']],
        supports_check_mode=True,
    )

    module.run_command_environ_update = APT_ENV_VARS

    if not HAS_PYTHON_APT:
        # This interpreter can't see the apt Python library- we'll do the following to try and fix that:
        # 1) look in common locations for system-owned interpreters that can see it; if we find one, respawn under it
        # 2) finding none, try to install a matching python-apt package for the current interpreter version;
        #    we limit to the current interpreter version to try and avoid installing a whole other Python just
        #    for apt support
        # 3) if we installed a support package, try to respawn under what we think is the right interpreter (could be
        #    the current interpreter again, but we'll let it respawn anyway for simplicity)
        # 4) if still not working, return an error and give up (some corner cases not covered, but this shouldn't be
        #    made any more complex than it already is to try and cover more, eg, custom interpreters taking over
        #    system locations)

        apt_pkg_name = 'python3-apt' if PY3 else 'python-apt'

        if has_respawned():
            # this shouldn't be possible; short-circuit early if it happens...
            module.fail_json(msg="{0} must be installed and visible from {1}.".format(apt_pkg_name, sys.executable))

        interpreters = ['/usr/bin/python3', '/usr/bin/python2', '/usr/bin/python']

        interpreter = probe_interpreters_for_module(interpreters, 'apt')

        if interpreter:
            # found the Python bindings; respawn this module under the interpreter where we found them
            respawn_module(interpreter)
            # this is the end of the line for this process, it will exit here once the respawned module has completed

        # don't make changes if we're in check_mode
        if module.check_mode:
            module.fail_json(msg="%s must be installed to use check mode. "
                                 "If run normally this module can auto-install it." % apt_pkg_name)

        # We skip cache update in auto install the dependency if the
        # user explicitly declared it with update_cache=no.
        if module.params.get('update_cache') is False:
            module.warn("Auto-installing missing dependency without updating cache: %s" % apt_pkg_name)
        else:
            module.warn("Updating cache and auto-installing missing dependency: %s" % apt_pkg_name)
            module.run_command(['apt-get', 'update'], check_rc=True)

        # try to install the apt Python binding
        module.run_command(['apt-get', 'install', '--no-install-recommends', apt_pkg_name, '-y', '-q'], check_rc=True)

        # try again to find the bindings in common places
        interpreter = probe_interpreters_for_module(interpreters, 'apt')

        if interpreter:
            # found the Python bindings; respawn this module under the interpreter where we found them
            # NB: respawn is somewhat wasteful if it's this interpreter, but simplifies the code
            respawn_module(interpreter)
            # this is the end of the line for this process, it will exit here once the respawned module has completed
        else:
            # we've done all we can do; just tell the user it's busted and get out
            module.fail_json(msg="{0} must be installed and visible from {1}.".format(apt_pkg_name, sys.executable))

    global APTITUDE_CMD
    APTITUDE_CMD = module.get_bin_path("aptitude", False)
    global APT_GET_CMD
    APT_GET_CMD = module.get_bin_path("apt-get")

    p = module.params

    if p['upgrade'] == 'no':
        p['upgrade'] = None

    use_apt_get = p['force_apt_get']

    if not use_apt_get and not APTITUDE_CMD:
        use_apt_get = True

    updated_cache = False
    updated_cache_time = 0
    install_recommends = p['install_recommends']
    allow_unauthenticated = p['allow_unauthenticated']
    dpkg_options = expand_dpkg_options(p['dpkg_options'])
    autoremove = p['autoremove']
    fail_on_autoremove = p['fail_on_autoremove']
    autoclean = p['autoclean']

    # Get the cache object
    cache = get_cache(module)

    try:
        if p['default_release']:
            try:
                apt_pkg.config['APT::Default-Release'] = p['default_release']
            except AttributeError:
                apt_pkg.Config['APT::Default-Release'] = p['default_release']
            # reopen cache w/ modified config
            cache.open(progress=None)

        mtimestamp, updated_cache_time = get_updated_cache_time()
        # Cache valid time is default 0, which will update the cache if
        #  needed and `update_cache` was set to true
        updated_cache = False
        if p['update_cache'] or p['cache_valid_time']:
            now = datetime.datetime.now()
            tdelta = datetime.timedelta(seconds=p['cache_valid_time'])
            if not mtimestamp + tdelta >= now:
                # Retry to update the cache with exponential backoff
                err = ''
                update_cache_retries = module.params.get('update_cache_retries')
                update_cache_retry_max_delay = module.params.get('update_cache_retry_max_delay')
                randomize = random.randint(0, 1000) / 1000.0

                for retry in range(update_cache_retries):
                    try:
                        cache.update()
                        break
                    except apt.cache.FetchFailedException as e:
                        err = to_native(e)

                    # Use exponential backoff plus a little bit of randomness
                    delay = 2 ** retry + randomize
                    if delay > update_cache_retry_max_delay:
                        delay = update_cache_retry_max_delay + randomize
                    time.sleep(delay)
                else:
                    module.fail_json(msg='Failed to update apt cache: %s' % (err if err else 'unknown reason'))

                cache.open(progress=None)
                mtimestamp, post_cache_update_time = get_updated_cache_time()
                if updated_cache_time != post_cache_update_time:
                    updated_cache = True
                updated_cache_time = post_cache_update_time

            # If there is nothing else to do exit. This will set state as
            #  changed based on if the cache was updated.
            if not p['package'] and not p['upgrade'] and not p['deb']:
                module.exit_json(
                    changed=updated_cache,
                    cache_updated=updated_cache,
                    cache_update_time=updated_cache_time
                )

        force_yes = p['force']

        if p['upgrade']:
            upgrade(module, p['upgrade'], force_yes, p['default_release'], use_apt_get, dpkg_options, autoremove, fail_on_autoremove, allow_unauthenticated)

        if p['deb']:
            if p['state'] != 'present':
                module.fail_json(msg="deb only supports state=present")
            if '://' in p['deb']:
                p['deb'] = fetch_file(module, p['deb'])
            install_deb(module, p['deb'], cache,
                        install_recommends=install_recommends,
                        allow_unauthenticated=allow_unauthenticated,
                        force=force_yes, fail_on_autoremove=fail_on_autoremove, dpkg_options=p['dpkg_options'])

        unfiltered_packages = p['package'] or ()
        packages = [package.strip() for package in unfiltered_packages if package != '*']
        all_installed = '*' in unfiltered_packages
        latest = p['state'] == 'latest'

        if latest and all_installed:
            if packages:
                module.fail_json(msg='unable to install additional packages when upgrading all installed packages')
            upgrade(module, 'yes', force_yes, p['default_release'], use_apt_get, dpkg_options, autoremove, fail_on_autoremove, allow_unauthenticated)

        if packages:
            for package in packages:
                if package.count('=') > 1:
                    module.fail_json(msg="invalid package spec: %s" % package)
                if latest and '=' in package:
                    module.fail_json(msg='version number inconsistent with state=latest: %s' % package)

        if not packages:
            if autoclean:
                cleanup(module, p['purge'], force=force_yes, operation='autoclean', dpkg_options=dpkg_options)
            if autoremove:
                cleanup(module, p['purge'], force=force_yes, operation='autoremove', dpkg_options=dpkg_options)

        if p['state'] in ('latest', 'present', 'build-dep', 'fixed'):
            state_upgrade = False
            state_builddep = False
            state_fixed = False
            if p['state'] == 'latest':
                state_upgrade = True
            if p['state'] == 'build-dep':
                state_builddep = True
            if p['state'] == 'fixed':
                state_fixed = True

            success, retvals = install(
                module,
                packages,
                cache,
                upgrade=state_upgrade,
                default_release=p['default_release'],
                install_recommends=install_recommends,
                force=force_yes,
                dpkg_options=dpkg_options,
                build_dep=state_builddep,
                fixed=state_fixed,
                autoremove=autoremove,
                fail_on_autoremove=fail_on_autoremove,
                only_upgrade=p['only_upgrade'],
                allow_unauthenticated=allow_unauthenticated
            )

            # Store if the cache has been updated
            retvals['cache_updated'] = updated_cache
            # Store when the update time was last
            retvals['cache_update_time'] = updated_cache_time

            if success:
                module.exit_json(**retvals)
            else:
                module.fail_json(**retvals)
        elif p['state'] == 'absent':
            remove(module, packages, cache, p['purge'], force=force_yes, dpkg_options=dpkg_options, autoremove=autoremove)

    except apt.cache.LockFailedException as lockFailedException:
        module.fail_json(msg="Failed to lock apt for exclusive operation: %s" % lockFailedException)
    except apt.cache.FetchFailedException as fetchFailedException:
        module.fail_json(msg="Could not fetch updated apt files: %s" % fetchFailedException)
Ejemplo n.º 6
0
def main():
    module = AnsibleModule(
        # not checking because of daisy chain to file module
        argument_spec=dict(
            src=dict(type='path', required=True),
            dest=dict(type='path', required=True),
            remote_src=dict(type='bool', default=False),
            creates=dict(type='path'),
            list_files=dict(type='bool', default=False),
            keep_newer=dict(type='bool', default=False),
            exclude=dict(type='list', elements='str', default=[]),
            include=dict(type='list', elements='str', default=[]),
            extra_opts=dict(type='list', elements='str', default=[]),
            validate_certs=dict(type='bool', default=True),
            io_buffer_size=dict(type='int', default=64 * 1024),

            # Options that are for the action plugin, but ignored by the module itself.
            # We have them here so that the sanity tests pass without ignores, which
            # reduces the likelihood of further bugs added.
            copy=dict(type='bool', default=True),
            decrypt=dict(type='bool', default=True),
        ),
        add_file_common_args=True,
        # check-mode only works for zip files, we cover that later
        supports_check_mode=True,
        mutually_exclusive=[('include', 'exclude')],
    )

    src = module.params['src']
    dest = module.params['dest']
    b_dest = to_bytes(dest, errors='surrogate_or_strict')
    remote_src = module.params['remote_src']
    file_args = module.load_file_common_arguments(module.params)

    # did tar file arrive?
    if not os.path.exists(src):
        if not remote_src:
            module.fail_json(msg="Source '%s' failed to transfer" % src)
        # If remote_src=true, and src= contains ://, try and download the file to a temp directory.
        elif '://' in src:
            src = fetch_file(module, src)
        else:
            module.fail_json(msg="Source '%s' does not exist" % src)
    if not os.access(src, os.R_OK):
        module.fail_json(msg="Source '%s' not readable" % src)

    # skip working with 0 size archives
    try:
        if os.path.getsize(src) == 0:
            module.fail_json(msg="Invalid archive '%s', the file is 0 bytes" %
                             src)
    except Exception as e:
        module.fail_json(msg="Source '%s' not readable, %s" %
                         (src, to_native(e)))

    # is dest OK to receive tar file?
    if not os.path.isdir(b_dest):
        module.fail_json(msg="Destination '%s' is not a directory" % dest)

    handler = pick_handler(src, b_dest, file_args, module)

    res_args = dict(handler=handler.__class__.__name__, dest=dest, src=src)

    # do we need to do unpack?
    check_results = handler.is_unarchived()

    # DEBUG
    # res_args['check_results'] = check_results

    if module.check_mode:
        res_args['changed'] = not check_results['unarchived']
    elif check_results['unarchived']:
        res_args['changed'] = False
    else:
        # do the unpack
        try:
            res_args['extract_results'] = handler.unarchive()
            if res_args['extract_results']['rc'] != 0:
                module.fail_json(msg="failed to unpack %s to %s" % (src, dest),
                                 **res_args)
        except IOError:
            module.fail_json(msg="failed to unpack %s to %s" % (src, dest),
                             **res_args)
        else:
            res_args['changed'] = True

    # Get diff if required
    if check_results.get('diff', False):
        res_args['diff'] = {'prepared': check_results['diff']}

    # Run only if we found differences (idempotence) or diff was missing
    if res_args.get('diff', True) and not module.check_mode:
        # do we need to change perms?
        top_folders = []
        for filename in handler.files_in_archive:
            file_args['path'] = os.path.join(
                b_dest, to_bytes(filename, errors='surrogate_or_strict'))

            try:
                res_args['changed'] = module.set_fs_attributes_if_different(
                    file_args, res_args['changed'], expand=False)
            except (IOError, OSError) as e:
                module.fail_json(
                    msg="Unexpected error when accessing exploded file: %s" %
                    to_native(e),
                    **res_args)

            if '/' in filename:
                top_folder_path = filename.split('/')[0]
                if top_folder_path not in top_folders:
                    top_folders.append(top_folder_path)

        # make sure top folders have the right permissions
        # https://github.com/ansible/ansible/issues/35426
        if top_folders:
            for f in top_folders:
                file_args['path'] = "%s/%s" % (dest, f)
                try:
                    res_args[
                        'changed'] = module.set_fs_attributes_if_different(
                            file_args, res_args['changed'], expand=False)
                except (IOError, OSError) as e:
                    module.fail_json(
                        msg="Unexpected error when accessing exploded file: %s"
                        % to_native(e),
                        **res_args)

    if module.params['list_files']:
        res_args['files'] = handler.files_in_archive

    module.exit_json(**res_args)
Ejemplo n.º 7
0
def run_module():
    module_args = dict(
        src=dict(type="str", required=True),
        dest=dict(type="path", required=True),
        regexp=dict(type="str", required=True),
        creates=dict(type="list", required=False, default=[]),
        agree_eulas=dict(type="bool", required=False, default=False),
        force=dict(type="bool", required=False, default=False),
    )
    module_args.update(urls.url_argument_spec())
    module = AnsibleModule(argument_spec=module_args, supports_check_mode=True)
    result = dict(changed=False)
    creates = module.params["creates"]
    if not creates or not all(glob.glob(pat) for pat in creates):
        if is_url(module.params["src"]):
            dmg_path = urls.fetch_file(module, module.params["src"])
        else:
            dmg_path = module.params["src"]
        mount_point = mount_dmg(module, dmg_path, module.params["agree_eulas"])
        to_copy = []
        regexp = re.compile(module.params["regexp"])
        regexp_path_group = regexp.groupindex.get("path")
        for dir_path, dir_names, file_names in os.walk(mount_point):
            for coll, is_dir in ((dir_names, True), (file_names, False)):
                for idx in reversed(range(len(coll))):
                    name = coll[idx]
                    abs_path = os.path.join(dir_path, name)
                    rel_path = os.path.relpath(abs_path, mount_point)
                    match = re.search(module.params["regexp"], rel_path)
                    if match:
                        if regexp_path_group is None:
                            match_path = name
                        else:
                            match_path = match.group(regexp_path_group)
                            if os.path.isabs(match_path):
                                module.fail_json(
                                    msg=("path group in regexp is an"
                                         " absolute path, not allowed"),
                                    **result)
                        dst_path = os.path.join(module.params["dest"],
                                                match_path)
                        to_copy.append((rel_path, abs_path, dst_path, is_dir))
                        if is_dir:
                            del coll[idx]
        files_copied = []
        result["files_copied"] = files_copied
        if module.check_mode:
            for rel_path, _, dst_path, _ in to_copy:
                files_copied.append((rel_path, dst_path))
        else:
            for src_rel_path, src_abs_path, dst_path, is_dir in to_copy:
                if os.path.exists(dst_path):
                    if not module.params["force"]:
                        continue
                    if os.path.isdir(dst_path):
                        shutil.rmtree(dst_path)
                    else:
                        os.unlink(dst_path)
                if is_dir:
                    shutil.copytree(src_abs_path, dst_path)
                else:
                    shutil.copy(src_abs_path, dst_path)
                files_copied.append((src_rel_path, dst_path))
            result["changed"] = bool(files_copied)
        if not files_copied:
            module.fail_json(msg="No files found to install", **result)
    # in the event of a successful module execution, you will want to
    # simple AnsibleModule.exit_json(), passing the key/value results
    module.exit_json(**result)
Ejemplo n.º 8
0
def run_module():
    module_args = dict(
        url=dict(type="str", required="true"),
        dest=dict(type="path", requried="true"),
        regexp=dict(type="str", required=False, default=".*"),
        skip_macos=dict(type="bool", required=False, default=True),
        creates=dict(type="path", required=False, default=None),
    )
    result = dict(changed=False)
    module = AnsibleModule(argument_spec=module_args, supports_check_mode=True)
    params = module.params

    if params["creates"] and os.path.exists(params["creates"]):
        module.exit_json(msg="%s already exists", changed=False)

    arch_path = fetch_file(module, params["url"])
    with tempfile.TemporaryDirectory() as temp_dir:
        for cmd in (
            ["unzip", arch_path],
            ["tar", "xf", arch_path],
            ["gtar", "xf", arch_path],
            ["7z", "x", arch_path],
            ["7za", "x", arch_path],
        ):
            rc, *_ = module.run_command(cmd, cwd=temp_dir)
            if rc == 0:
                break
        else:
            module.fail_json(msg="Don't know how to extract the archive",
                             **result)

        dest_path = pathlib.Path(params["dest"])
        regexp = re.compile(params["regexp"])
        temp_path = pathlib.Path(temp_dir)
        copied = []
        for dir_path, subdir_names, file_names in os.walk(temp_dir):
            rel_dir_path = pathlib.Path(dir_path).relative_to(temp_path)
            for idx in reversed(range(len(subdir_names))):
                subdir_name = subdir_names[idx]
                if (subdir_name == "__MACOSX" and dir_path == temp_dir
                        and params["skip_macos"]):
                    del subdir_names[idx]
                    continue
                subdir_path = rel_dir_path / subdir_names[idx]
                match = regexp.search(str(subdir_path))
                if match:
                    try:
                        match_name = match.group("dest")
                    except IndexError:
                        match_name = match.group(0)
                    if not match_name:
                        module.fail_json(msg=(
                            "regexp match on %s matched the empty string" %
                            (subdir_path, )))
                    copy_from = temp_path / subdir_path
                    copy_to = dest_path / match_name
                    if not module.check_mode:
                        shutil.copytree(copy_from, copy_to)
                    # Don't need to traverse this directory.
                    del subdir_names[idx]
                    copied.append({
                        "src": str(subdir_path),
                        "dest": str(copy_to)
                    })
            for file_name in file_names:
                file_path = rel_dir_path / file_name
                match = regexp.search(str(file_path))
                if match:
                    try:
                        match_name = match.group("dest")
                    except IndexError:
                        match_name = match.group(0)
                    if not match_name:
                        module.fail_json(msg=(
                            "regexp match on %s matched the empty string" %
                            (file_path, )))
                    copy_from = temp_path / file_path
                    copy_to = dest_path / match_name
                    if not module.check_mode:
                        shutil.copy2(temp_path / file_path,
                                     dest_path / file_path)
                    copied.append({
                        "src": str(file_path),
                        "dest": str(copy_to)
                    })
    result.update(copied=copied, changed=bool(copied))
    module.exit_json(**result)