def _check_argspec(self): aav = AnsibleArgSpecValidator( data=self._task.args, schema=dict(argument_spec=argument_spec), schema_format="argspec", schema_conditionals=dict( required_if=required_if, required_one_of=required_one_of, mutually_exclusive=mutually_exclusive, required_together=required_together, ), name=self._task.action, ) valid, errors, self._task.args = aav.validate() if not valid: raise AnsibleActionFail(errors)
def get_kubeconfig(self, kubeconfig, remote_transport, new_module_args): if isinstance(kubeconfig, string_types): # find the kubeconfig in the expected search path if not remote_transport: # kubeconfig is local # find in expected paths kubeconfig = self._find_needle('files', kubeconfig) # decrypt kubeconfig found actual_file = self._loader.get_real_file(kubeconfig, decrypt=True) new_module_args['kubeconfig'] = actual_file elif isinstance(kubeconfig, dict): new_module_args['kubeconfig'] = kubeconfig else: raise AnsibleActionFail("Error while reading kubeconfig parameter - " "a string or dict expected, but got %s instead" % type(kubeconfig))
def _get_args(self): missing = [] args = {} for option, vals in self._VALID_ARGS.items(): if 'default' not in vals: if self._task.args.get(option, None) is None: missing.append(option) continue args[option] = self._task.args.get(option) else: args[option] = self._task.args.get(option, vals['default']) if missing: raise AnsibleActionFail('Missing required parameters: {}'.format( ', '.join(missing))) return args
def run_command(cmd, description='run command', stop_on_error=False, input=None): """Run a command and catch exceptions for Ansible.""" display.vvv("command: " + ' '.join(cmd)) from subprocess import CalledProcessError, check_output try: output = check_output(cmd, env=_dcos_path(),stderr=subprocess.STDOUT) #output = check_output(cmd, env=_dcos_path()) returncode = 0 except CalledProcessError as e: output = e.output returncode = e.returncode if stop_on_error and returncode != 0: raise AnsibleActionFail('Failed to {}: {}'.format(description, e)) return output
def ensure_dcos_security(): """Check whether the dcos[cli] security extension is installed.""" raw_version = '' try: r = subprocess.check_output(['dcos', 'security', '--version'], env=_dcos_path()).decode() except: display.vvv("dcos security: not installed") install_dcos_security_cli() r = subprocess.check_output(['dcos', 'security', '--version'], env=_dcos_path()).decode() v = _version(r) if v < (1, 2, 0): raise AnsibleActionFail( "DC/OS Security CLI 1.2.x is required, found {}".format(v)) display.vvv("dcos security: all prerequisites seem to be in order")
def _run(self, task_vars=None): """Run the artifact push batcher. All pushed artifacts will be deployed to the inventory target. """ self.changed = False self.installed_artifacts = list() if task_vars is None: task_vars = dict() result = super(ActionModule, self).run(task_vars=task_vars) self.task_vars_meta = task_vars # parse args download_artifacts = self._task.args.get('artifact_urls', list()) local_artifacts = self._task.args.get('artifact_paths', list()) if not local_artifacts and not download_artifacts: raise AnsibleActionFail( 'Neither artifact_paths or artifact_urls has any value.' ' Check configuration and try again.') for artifact in download_artifacts: local_artifacts.append(self._get_url(url=artifact)) rpms = list() for artifact in local_artifacts: filetype = self._get_filetype(filename=artifact) DISPLAY.vv('Artifact type: {}, file: {}'.format( filetype, artifact)) if filetype == 'rpm': pushed_artifact = self.deploy_rpm(filename=artifact) rpms.append(pushed_artifact) elif filetype == 'targz': self.deploy_targz(filename=artifact) if rpms: self.install_rpms(rpms=rpms) result['changed'] = self.changed result['installed_artifacts'] = self.installed_artifacts return result
def run(self, tmp=None, task_vars=None): ''' handler for aws_s3 operations ''' self._supports_async = True if task_vars is None: task_vars = dict() result = super(ActionModule, self).run(tmp, task_vars) del tmp # tmp no longer has any effect source = self._task.args.get('src', None) try: new_module_args = self._task.args.copy() if source: source = os.path.expanduser(source) # For backward compatibility check if the file exists on the remote; it should take precedence if not self._remote_file_exists(source): try: source = self._loader.get_real_file(self._find_needle( 'files', source), decrypt=False) new_module_args['src'] = source except AnsibleFileNotFound as e: # module handles error message for nonexistent files new_module_args['src'] = source except AnsibleError as e: raise AnsibleActionFail(to_text(e)) wrap_async = self._task.async_val and not self._connection.has_native_async # execute the aws_s3 module with the updated args result = merge_hash( result, self._execute_module(module_args=new_module_args, task_vars=task_vars, wrap_async=wrap_async)) if not wrap_async: # remove a temporary path we created self._remove_tmp_path(self._connection._shell.tmpdir) except AnsibleAction as e: result.update(e.result) return result
def run(self, tmp=None, task_vars=None): result = super(ActionModule, self).run(tmp, task_vars) result['changed'] = False args = self._task.args wanted_state = args.get('state', 'present') path = args.get('path') if path is None: raise AnsibleActionFail('path cannot be empty for dcos_secret') store = args.get('store', 'default') value = args.get('value') current_val = secrets.get(path, store=store) current_state = 'present' if current_val is not None else 'absent' if self._play_context.check_mode: if current_state != wanted_state: result['changed'] = True result['msg'] = 'would change secret {} to be {}'.format( path, wanted_state) return result if current_state == wanted_state: display.vvv("Secret {} already {}".format(path, wanted_state)) if wanted_state == 'present' and current_val != value: display.vvv("Updating secret {} with new value".format(path)) secrets.update(path, value, store=store) result['changed'] = True result['msg'] = "Secret {} was updated".format(path) else: display.vvv("Secret {} not {}".format(path, wanted_state)) if wanted_state == 'present': secrets.create(path, value, store=store) result['msg'] = "Secret {} was created".format(path) elif wanted_state == 'absent': secrets.delete(path, store=store) result['msg'] = "Secret {} was deleted".format(path) result['changed'] = True return result
def _run_module(self, module_name, module_args): """Execute an ansible module.""" DISPLAY.vv('Running module name: {}'.format(module_name)) results = self._execute_module(module_name=module_name, module_args=module_args, task_vars=self.task_vars_meta) DISPLAY.vv('Result {}'.format(results)) if results.get('changed', False): self.changed = True if results.get('failed', False): raise AnsibleActionFail('Module {} failed. Message: {}'.format( module_name, results.get('msg'))) return results
def _get_resource_module(self, prefix_os_name=False): if "." in self._name: if len(self._name.split(".")) != 3: msg = ( "name should a fully qualified collection name in the format" " <org-name>.<collection-name>.<resource-module-name>") raise AnsibleActionFail(msg) fqcn_module_name = self._name else: if prefix_os_name: module_name = self._os_name.split(".")[1] + "_" + self._name else: module_name = self._name fqcn_module_name = ".".join( self._os_name.split(".")[:2] + [module_name]) return fqcn_module_name
def run(self, tmp=None, task_vars=None): self._supports_async = True if task_vars is None: task_vars = dict() result = super(ActionModule, self).run(tmp, task_vars) del tmp # tmp no longer has any effect src = self._task.args.get('src', None) remote_src = boolean(self._task.args.get('remote_src', 'no'), strict=False) try: if (src and remote_src) or not src: # everything is remote, so we just execute the module # without changing any of the module arguments raise _AnsibleActionDone(result=self._execute_module( task_vars=task_vars, wrap_async=self._task.async_val)) try: src = self._find_needle('files', src) except AnsibleError as e: raise AnsibleActionFail(to_native(e)) tmp_src = self._connection._shell.join_path( self._connection._shell.tmpdir, os.path.basename(src)) self._transfer_file(src, tmp_src) self._fixup_perms2((self._connection._shell.tmpdir, tmp_src)) new_module_args = self._task.args.copy() new_module_args.update(dict(src=tmp_src, )) result.update( self._execute_module('uri', module_args=new_module_args, task_vars=task_vars, wrap_async=self._task.async_val)) except AnsibleAction as e: result.update(e.result) finally: if not self._task.async_val: self._remove_tmp_path(self._connection._shell.tmpdir) return result
def _login_ibmcloud(self, api_key): try: data = urllib.parse.urlencode({ 'apikey': api_key, 'grant_type': 'urn:ibm:params:oauth:grant-type:apikey' }) headers = {'Content-Type': 'application/x-www-form-urlencoded'} auth_response = open_url(url=self.api_token_endpoint, method='POST', headers=headers, data=data, timeout=self.api_timeout) auth = json.load(auth_response) access_token = auth['access_token'] self.authorization = f'Bearer {access_token}' except Exception as e: raise AnsibleActionFail(f'Failed to log in to IBM Cloud: {e}')
def _get_filetype(self, filename): """Get file type information.""" try: r = subprocess.run("file -b {}".format(filename), shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) except Exception as e: raise Exception('Unable to determine file type: %s' & e) else: if 'RPM' in r.stdout: return 'rpm' elif 'gzip compressed data' in r.stdout: return 'targz' raise AnsibleActionFail( 'Filename {} is an unknown type'.format(filename))
def _get_commands(self, results): """Return a list of commands that were executed by container tool. :param results: Ansible task results. :returns commands: List of commands. """ commands = [] for item in results: try: if item['changed']: commands.extend(item['podman_actions']) except KeyError: if 'cmd' in item: commands.append(' '.join(item['cmd'])) else: raise AnsibleActionFail('Wrong async result data, missing' ' podman_actions or cmd:' ' {}'.format(item)) return commands
def run(self, tmp=None, task_vars=None): results = super(ActionModule, self).run(tmp, task_vars) # initialize response results['started'] = results['finished'] = 0 results['stdout'] = results['stderr'] = '' results['stdout_lines'] = results['stderr_lines'] = [] # read params try: jid = self._task.args["jid"] except KeyError: raise AnsibleActionFail("jid is required", result=results) mode = self._task.args.get("mode", "status") results['ansible_job_id'] = jid async_dir = self._get_async_dir() log_path = self._connection._shell.join_path(async_dir, jid) if mode == 'cleanup': self._remove_tmp_path(log_path, force=True) results['erased'] = log_path else: results['results_file'] = log_path results['started'] = 1 if getattr(self._connection._shell, '_IS_WINDOWS', False): # TODO: eventually fix so we can get remote user (%USERPROFILE%) like we get ~/ for posix module_args = dict(jid=jid, mode=mode, _async_dir=async_dir) results = merge_hash( results, self._execute_module( module_name='ansible.legacy.async_status', task_vars=task_vars, module_args=module_args)) else: # fetch remote file and read locally self._update_results_with_job_file(jid, log_path, results) return results
def ensure_dcos_edgelb(instance_name): """Check whether the dcos[cli] edgelb extension is installed.""" tries = 3 for i in range(tries): try: subprocess.check_output( ['dcos', 'edgelb', '--name=' + instance_name, 'ping'], env=_dcos_path()).decode() except: if i < tries - 1: display.vvv("dcos edgelb: ping failed {} times".format(i + 1)) install_dcos_edgelb_cli() time.sleep(10) continue else: raise AnsibleActionFail( 'Edge-LB: Pool cannot be configured because the API server is not reachable.' ) break
def deploy_targz(self, filename): """Run unarchive deployment.""" DISPLAY.vv('Running archive deployment') package_path = os.path.join(ARTIFACTS_ANCHOR, os.path.basename(filename)) self._run_module(module_name='file', module_args=dict(path=os.path.dirname(package_path), state='directory')) self._transfer_files(filename=filename, destination=package_path) results = self._low_level_execute_command( "tar xvz -C / -f {}".format(package_path), executable='/bin/bash') DISPLAY.vv('Result {}'.format(results)) if results['rc'] > 0: DISPLAY.error(msg='Failed command: {}'.format(results)) raise AnsibleActionFail( 'Unable to perform unarchive {}.'.format(package_path)) self._run_module(module_name='file', module_args=dict(path=package_path, state='absent')) self.installed_artifacts.append(os.path.basename(filename))
def run(self, tmp=None, task_vars=None): """ Action Plugins should implement this method to perform their tasks. Everything else in this base class is a helper method for the action plugin to do that. :kwarg tmp: Temporary directory. Sometimes an action plugin sets up a temporary directory and then calls another module. This parameter allows us to reuse the same directory for both. :kwarg task_vars: The variables (host vars, group vars, config vars, etc) associated with this task. :returns: dictionary of results from the module Implementors of action modules may find the following variables especially useful: * Module parameters. These are stored in self._task.args """ result = {} if self._task. async and not self._supports_async: raise AnsibleActionFail('async is not supported for this task.')
def connect_cluster(**kwargs): """Connect to a DC/OS cluster by url""" changed = False url = kwargs.get('url') if not check_cluster(kwargs.get('name'), url): if url is None: raise AnsibleActionFail( 'Not connected: you need to specify the cluster url') display.vvv('DC/OS cluster not setup, setting up') cli_args = parse_connect_options(**kwargs) display.vvv('args: {}'.format(cli_args)) subprocess.check_call(['dcos', 'cluster', 'setup', url] + cli_args, env=_dcos_path()) changed = True # ensure_auth(**kwargs) return changed
def _pwd_entry_set(self, path, salt, ciphertext, task_vars): secret = b64encode(salt + ciphertext) + b"\n" tmpfile = self._create_content_tempfile(secret) tmp_src = self._connection._shell.join_path( self._connection._shell.tmpdir, 'source') self._transfer_file(tmpfile, tmp_src) os.remove(tmpfile) copy = self._execute_module( module_name='copy', module_args=dict( src=tmp_src, dest=path, mode=0o600 # _original_basename=source_rel, # _copy_mode="single" ), task_vars=task_vars) if copy.get('failed'): raise AnsibleActionFail(copy['msg'])
def run(self, tmp=None, task_vars=None): result = super(ActionModule, self).run(tmp, task_vars) result['changed'] = False if self._play_context.check_mode: # in --check mode, always skip this module execution result['skipped'] = True result['msg'] = 'dcos_connection does not support check mode' return result args = self._task.args url = args.get('url') if url is None: url = dcos.config.get_config_val('core.dcos_url') name = args.get('name') username = args.get('username') password = args.get('password') password_file = args.get('password_file') if not password and password_file is not None: with open(password_file, 'r') as f: password = f.read().strip() if not check_cluster(name, url): if url is None: raise AnsibleActionFail( 'Not connected: you need to specify the cluster url') dcos.cluster.setup_cluster(url, username, password) result['changed'] = True result['msg'] = 'Cluster connection updated to {}'.format(url) if ensure_auth(url, username, password): result['changed'] = True result['msg'] = '\n'.join(result['msg'], 'refreshed auth token') return result
def _get_polylang_languages(self): """Returns: A dict of `mo_id`s keyed by language slug.""" def get_moids_by_slug(): get_cmd = 'pll lang list --format=json --fields=mo_id,slug' return dict([(lang["slug"], lang["mo_id"]) for lang in self._get_wp_json(get_cmd)]) retval = get_moids_by_slug() # mo_id's are created lazily: if None in retval.values(): # `wp pll option sync taxonomies` generates the mo id of # newly-created languages, and may or may not be doing something # else... Oh well self._run_wp_cli_change("pll option sync taxonomies") retval = get_moids_by_slug() # Failing again is fatal. if None in retval.values(): raise AnsibleActionFail( "Cannot find the mo_id of lang '{}'".format(lang["slug"])) return retval
def _get_run_mode(self): error_msg = None if self._config or self._running_config: if not self._name: error_msg = "'name' is required if 'config' option is set" if not self._state: error_msg = "'state' is required if 'config' option is set" run_mode = RunMode.RM_CONFIG elif self._state: if not self._name: error_msg = "'name' is required if 'state' option is set" run_mode = RunMode.RM_GET elif self._name: if not any([self._config, self._running_config, self._state]): error_msg = ("If 'name' is set atleast one of 'config', " "'running_config' or 'state' is required") else: run_mode = RunMode.RM_LIST if error_msg: raise AnsibleActionFail(error_msg) return run_mode
def update_permissions(gid, permissions): """Check for missing/surplus resource permissions. Returns a boolean that indicates whether changes were made. """ wanted_resources = {p['rid'] for p in permissions} available = {r['rid'] for r in iam.list_resources()} missing = wanted_resources - available if missing: raise AnsibleActionFail( "Cannot grant permissions that do not exist: {}".format( ', '.join(missing))) # create (rid, action) tuples in a set for comparison wanted = {(p['rid'], p['action']) for p in permissions} current = set() for permission in iam.list_group_permissions(gid): rid = permission['rid'] for action in permission['actions']: pair = (rid, action['name']) current.add(pair) to_add = wanted - current to_remove = current - wanted for p in to_add: display.vvv("Granting {} permission on {} to group {}".format( p[1], p[0], gid)) iam.grant_permission_to_group(gid, p[0], p[1]) for p in to_remove: display.vvv("Revoking {} permission on {} from group {}".format( p[1], p[0], gid)) iam.revoke_permission_from_group(gid, p[0], p[1]) return len(to_add | to_remove) > 0
def ensure_dcos(): """Check whether the dcos cli is installed.""" try: r = subprocess.check_output(['dcos', '--version'], env=_dcos_path()).decode() except subprocess.CalledProcessError: raise AnsibleActionFail("DC/OS CLI is not installed!") # raw_version = '' # for line in r.strip().split('\n'): # display.vvv(line) # k, v = line.split('=') # if k == 'dcoscli.version': # raw_version = v # v = _version(raw_version) # if v < (0, 5, 0): # raise AnsibleActionFail( # "DC/OS CLI 0.5.x is required, found {}".format(v)) # if v >= (0, 7, 0): # raise AnsibleActionFail( # "DC/OS CLI version > 0.7.x detected, may not work") display.vvv("dcos: all prerequisites seem to be in order")
def test_run_missing_target_missing_sample(self): mock_task = mock.MagicMock() mock_task.async_val = None mock_task.action = "tripleo_undercloud_conf" mock_task.args = dict(path='foo.conf', values={'DEFAULT': { 'undercloud_debug': True }}, sample_path='bar.conf', use_sample=True) mock_connection = mock.MagicMock() play_context = PlayContext() action = tripleo_undercloud_conf.ActionModule(mock_task, mock_connection, play_context, None, None, None) mock_exists = mock.MagicMock() action._file_exists = mock_exists mock_exists.side_effect = AnsibleActionFail('fail') self.assertRaises(AnsibleActionFail, action.run)
def _redress_failure(self, result, failed_when): """Reset failure in `result` according to `failed_when` Returns an error that should be raised soon (after result bookkeeping) if unsuccessful i.e. `failed_when` returns True. :param result: An Ansible result dict. Will be mutated in place to delete the "failed" key (if present) if `failed_when` returns falsy :param failed_when: Either None, or a function that takes `result` as the sole parameter and returns a truthy value if there is a failure, or a falsy value if not. """ if failed_when is None: failed_when = lambda result: 'failed' in result if failed_when(result): return AnsibleActionFail( "Subaction failed: %s - Invoked with %s" % (result.get('msg', '(no message)'), result.get('invocation', '(no invocation information)'))) # We will be returning None; scrub failure evidence out of result to # prevent clueless callers (such as the Ansible core) from freaking out if "rc" in result and str(result["rc"]) != "0": # This is a `command` or the like. result[ 'failed'] = False # Don't just delete it; lest task_executor.py:705 take it # upon itself to inspect ["rc"] again if 'msg' in result: # Not set by us, and typically misleading e.g. "non-zero return code" del result['msg'] else: if 'failed' in result: del result['failed'] return None
def _get_current_file_state(self, basename): """ Returns state of a given plugin file/folder. :param basename: name of element to check (can be a file or folder) """ path = self._get_symlink_path(basename) plugin_stat = self._subaction.query('stat', {'path': path}) if 'failed' in plugin_stat: raise AnsibleActionFail("Cannot stat() {} - Error: {}".format( path, plugin_stat)) file_exists = ('stat' in plugin_stat and plugin_stat['stat']['exists']) if not file_exists: return 'absent' if not plugin_stat['stat']: return 'absent' elif plugin_stat['stat']['islnk']: if (plugin_stat['stat']['lnk_target'] == self._get_symlink_target( basename)): return 'symlinked' else: return 'symlink_damaged' else: return 'installed'
def _get_source_stanza(self, args): dockerfile_text = self._get_immediate_dockerfile(args) if 'source' in args: if dockerfile_text is not None and 'type' not in args['source']: args['source']['type'] = 'Dockerfile' return args['source'] elif dockerfile_text is not None: return {'type': 'Dockerfile', 'dockerfile': dockerfile_text} elif self._get_git_repository(args): git = args['git'] try: retval = { 'type': 'Git', 'git': {'uri': self._get_git_repository(args)} } for ref_alias in ['ref', 'branch', 'tag']: if ref_alias in git: retval['git']['ref'] = git[ref_alias] break if 'path' in git: retval['contextDir'] = git['path'] return retval except KeyError as e: raise AnsibleActionFail("Missing field `%s` under `git`" % e.args[0])
def run(self, tmp=None, task_vars=None): if task_vars is None: task_vars = dict() result = super(ActionModule, self).run(tmp, task_vars) satellite_hammer_views = self._task.args.get('satellite_hammer_views', None) api_activation_keys = self._task.args.get('api_activation_keys', None) api_subscriptions = self._task.args.get('api_subscriptions', None) api_activation_keys_subscriptions = self._task.args.get( 'api_activation_keys_subscriptions', None) try: if satellite_hammer_views is None and type( satellite_hammer_views) == list: raise AnsibleActionFail( 'satellite_hammer_views must be defined and be a list!') if api_activation_keys is None or api_activation_keys.get( 'json', None) is None: raise AnsibleActionFail( 'api_activation_keys must be defined and have json response!' ) if api_subscriptions is None or api_subscriptions.get( 'json', None) is None: raise AnsibleActionFail( 'api_subscriptions must be defined and have json response!' ) if api_activation_keys_subscriptions is None or api_activation_keys_subscriptions.get( 'results', None) is None: raise AnsibleActionFail( 'api_activation_keys_subscriptions must be defined and have results array!' ) chk = map(lambda x: 1 if x.get('json', None) is None else 0, api_activation_keys_subscriptions['results']) chk = reduce(lambda x, y: x + y, chk) if chk > 0: raise AnsibleActionFail( 'api_activation_keys_subscriptions.results[] must have items with json defined!' ) chk = map( lambda x: 1 if x['json'].get('results', None) is None else 0, api_activation_keys_subscriptions['results']) chk = reduce(lambda x, y: x + y, chk) if chk > 0: raise AnsibleActionFail( 'api_activation_keys_subscriptions.results[].json must have results array!' ) try: result.update( findNewActivationKeySubscriptions( satellite_hammer_views, api_activation_keys['json'], api_subscriptions['json'], api_activation_keys_subscriptions)) except Exception as e: # raise AnsibleActionFail("%s: %s" % (type(e).__name__, to_text(e))) raise except AnsibleAction as e: result.update(e.result) return result