def main(): argument_spec = dict( job_id=dict(type='int', required=True), timeout=dict(type='int'), min_interval=dict(type='float', default=1), max_interval=dict(type='float', default=30), ) module = TowerModule(argument_spec, supports_check_mode=True) json_output = {} fail_json = None tower_auth = tower_auth_config(module) with settings.runtime_values(**tower_auth): tower_check_mode(module) job = tower_cli.get_resource('job') params = module.params.copy() # tower-cli gets very noisy when monitoring. # We pass in our our outfile to suppress the out during our monitor call. if PY2: outfile = getwriter('utf-8')(StringIO()) else: outfile = StringIO() params['outfile'] = outfile job_id = params.get('job_id') try: result = job.monitor(job_id, **params) except exc.Timeout: result = job.status(job_id) result['id'] = job_id json_output['msg'] = 'Timeout waiting for job to finish.' json_output['timeout'] = True except exc.NotFound as excinfo: fail_json = dict( msg='Unable to wait, no job_id {0} found: {1}'.format( job_id, excinfo), changed=False) except exc.JobFailure as excinfo: fail_json = dict( msg='Job with id={} failed, error: {}'.format(job_id, excinfo)) fail_json['success'] = False result = job.get(job_id) for k in ('id', 'status', 'elapsed', 'started', 'finished'): fail_json[k] = result.get(k) except (exc.ConnectionError, exc.BadRequest, exc.AuthError) as excinfo: fail_json = dict(msg='Unable to wait for job: {0}'.format(excinfo), changed=False) if fail_json is not None: module.fail_json(**fail_json) json_output['success'] = True for k in ('id', 'status', 'elapsed', 'started', 'finished'): json_output[k] = result.get(k) module.exit_json(**json_output)
def main(): argument_spec = dict(all=dict(type='bool', default=False), ) # We are not going to raise an error here because the __init__ method of TowerAWXKitModule will do that for us if HAS_EXPORTABLE_RESOURCES: for resource in EXPORTABLE_RESOURCES: argument_spec[resource] = dict(type='str') module = TowerAWXKitModule(argument_spec=argument_spec) if not HAS_EXPORTABLE_RESOURCES: module.fail_json( msg="Your version of awxkit does not have import/export") # The export process will never change a Tower system module.json_output['changed'] = False # The exporter code currently works like the following: # Empty string == all assets of that type # Non-Empty string = just one asset of that type (by name or ID) # Asset type not present or None = skip asset type (unless everything is None, then export all) # Here we are going to setup a dict of values to export export_args = {} for resource in EXPORTABLE_RESOURCES: if module.params.get('all') or module.params.get(resource) == 'all': # If we are exporting everything or we got the keyword "all" we pass in an empty string for this asset type export_args[resource] = '' else: # Otherwise we take either the string or None (if the parameter was not passed) to get one or no items export_args[resource] = module.params.get(resource) # Currently the export process does not return anything on error # It simply just logs to Python's logger # Set up a log gobbler to get error messages from export_assets log_capture_string = StringIO() ch = logging.StreamHandler(log_capture_string) for logger_name in ['awxkit.api.pages.api', 'awxkit.api.pages.page']: logger = logging.getLogger(logger_name) logger.setLevel(logging.ERROR) ch.setLevel(logging.ERROR) logger.addHandler(ch) log_contents = '' # Run the export process try: module.json_output['assets'] = module.get_api_v2_object( ).export_assets(**export_args) module.exit_json(**module.json_output) except Exception as e: module.fail_json(msg="Failed to export assets {0}".format(e)) finally: # Finally, consume the logs in case there were any errors and die if there were log_contents = log_capture_string.getvalue() log_capture_string.close() if log_contents != '': module.fail_json(msg=log_contents)
def to_ini(o): if not isinstance(o, MutableMapping): raise AnsibleFilterError('to_ini requires a dict, got %s' % type(o)) data = copy.deepcopy(o) defaults = configparser.RawConfigParser(data.pop('DEFAULT', {})) parser = configparser.RawConfigParser() parser.optionxform = partial(to_text, errors='surrogate_or_strict') for section, items in data.items(): parser.add_section(section) for k, v in items.items(): parser.set(section, k, v) out = StringIO() defaults.write(out) parser.write(out) return out.getvalue().rstrip()
def collect(self, module=None, collected_facts=None): local_facts = {} local_facts['local'] = {} if not module: return local_facts fact_path = module.params.get('fact_path', None) if not fact_path or not os.path.exists(fact_path): return local_facts local = {} for fn in sorted(glob.glob(fact_path + '/*.fact')): # where it will sit under local facts fact_base = os.path.basename(fn).replace('.fact', '') if stat.S_IXUSR & os.stat(fn)[stat.ST_MODE]: # run it # try to read it as json first # if that fails read it with ConfigParser # if that fails, skip it try: rc, out, err = module.run_command(fn) except UnicodeError: fact = 'error loading fact - output of running %s was not utf-8' % fn local[fact_base] = fact local_facts['local'] = local module.warn(fact) return local_facts else: out = get_file_content(fn, default='') # load raw json fact = 'loading %s' % fact_base try: fact = json.loads(out) except ValueError: # load raw ini cp = configparser.ConfigParser() try: cp.readfp(StringIO(out)) except configparser.Error: fact = "error loading fact - please check content" module.warn(fact) else: fact = {} for sect in cp.sections(): if sect not in fact: fact[sect] = {} for opt in cp.options(sect): val = cp.get(sect, opt) fact[sect][opt] = val local[fact_base] = fact local_facts['local'] = local return local_facts
def _do_it(self, action): master, slave = pty.openpty() p = subprocess.Popen(["ansible-connection"], stdin=slave, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdin = os.fdopen(master, 'wb', 0) os.close(slave) src = StringIO() cPickle.dump(self._play_context.serialize(), src) stdin.write(src.getvalue()) src.close() stdin.write(b'\n#END_INIT#\n') stdin.write(to_bytes(action)) stdin.write(b'\n\n') stdin.close() (stdout, stderr) = p.communicate() return (p.returncode, stdout, stderr)
def to_ldif(cls, data): """Convert list of dictionary to LDIF.""" """See https://github.com/atterdag/ansible-filters-ldif for how to format dictionary. """ if isinstance(data, list): try: ldif_data = StringIO() ldif_writer = LDIFWriter(ldif_data) for entry in data: ldif_writer.unparse(str(entry[0]), cls.encode_values(dict(entry[1]))) return ldif_data.getvalue() except Exception: raise AnsibleFilterError( 'Invalid input data for to_ldif filter (%s)' % data) else: raise AnsibleFilterError( 'Input data to_ldif filter is not a list(%s)' % data)
def from_ldif(cls, data): """Convert LDIF data to dictionary.""" try: ldif_record_list = LDIFRecordList(StringIO(data)) ldif_record_list.parse() returned = [] for entry in ldif_record_list.all_records: returned.append([entry[0], cls.decode_values(entry[1])]) return returned except Exception: raise AnsibleFilterError( 'Invalid LDIF data for LDIFRecordList (%s)' % data)
def receive(self, obj=None): recv = StringIO() handled = False self._matched_prompt = None while True: data = self._shell.recv(256) recv.write(data) offset = recv.tell() - 256 if recv.tell() > 256 else 0 recv.seek(offset) window = self._strip(recv.read()) if obj and (obj.get('prompt') and not handled): handled = self._handle_prompt(window, obj) if self._find_prompt(window): self._last_response = recv.getvalue() resp = self._strip(self._last_response) return self._sanitize(resp, obj)
def main(): argument_spec = dict(assets=dict(type='dict', required=True)) module = ControllerAWXKitModule(argument_spec=argument_spec, supports_check_mode=False) assets = module.params.get('assets') if not HAS_EXPORTABLE_RESOURCES: module.fail_json( msg="Your version of awxkit does not appear to have import/export") # Currently the import process does not return anything on error # It simply just logs to Python's logger # Set up a log gobbler to get error messages from import_assets logger = logging.getLogger('awxkit.api.pages.api') logger.setLevel(logging.ERROR) log_capture_string = StringIO() ch = logging.StreamHandler(log_capture_string) ch.setLevel(logging.ERROR) logger.addHandler(ch) log_contents = '' # Run the import process try: module.json_output['changed'] = module.get_api_v2_object( ).import_assets(assets) except Exception as e: module.fail_json(msg="Failed to import assets {0}".format(e)) finally: # Finally, consume the logs in case there were any errors and die if there were log_contents = log_capture_string.getvalue() log_capture_string.close() if log_contents != '': module.fail_json(msg=log_contents) module.exit_json(**module.json_output)
def from_ini(o): if not isinstance(o, string_types): raise AnsibleFilterError('from_ini requires a string, got %s' % type(o)) parser = configparser.RawConfigParser() parser.optionxform = partial(to_text, errors='surrogate_or_strict') parser.readfp(StringIO(o)) d = dict(parser._sections) for k in d: d[k] = dict(d[k]) d[k].pop('__name__', None) d['DEFAULT'] = dict(parser._defaults) return d
def get_a_ssh_config(box_name): """Gives back a map of all the machine's ssh configurations""" output = to_text(subprocess.check_output(["vagrant", "ssh-config", box_name]), errors='surrogate_or_strict') config = SSHConfig() config.parse(StringIO(output)) host_config = config.lookup(box_name) # man 5 ssh_config: # > It is possible to have multiple identity files ... # > all these identities will be tried in sequence. for id in host_config['identityfile']: if os.path.isfile(id): host_config['identityfile'] = id return dict((v, host_config[k]) for k, v in _ssh_to_ansible)
def receive(self, cmd=None): recv = StringIO() handled = False while True: data = self.shell.recv(200) recv.write(data) recv.seek(recv.tell() - 200) window = self.strip(recv.read()) if hasattr(cmd, 'prompt') and not handled: handled = self.handle_prompt(window, cmd) try: if self.find_prompt(window): resp = self.strip(recv.getvalue()) return self.sanitize(cmd, resp) except ShellError: exc = get_exception() exc.command = cmd raise
def receive(self, obj=None): """Handles receiving of output from command""" recv = StringIO() handled = False self._matched_prompt = None while True: data = self._shell.recv(256) recv.write(data) offset = recv.tell() - 256 if recv.tell() > 256 else 0 recv.seek(offset) window = self._strip(recv.read()) if obj and (obj.get('prompt') and not handled): handled = self._handle_prompt(window, obj) if self._find_prompt(window): self._last_response = recv.getvalue() resp = self._strip(self._last_response) return self._sanitize(resp, obj)
def load_config(self, config_path): # Validate the config file is an actual file if not isfile(config_path): raise ConfigFileException( 'The specified config file does not exist') if not access(config_path, R_OK): raise ConfigFileException( "The specified config file cannot be read") # Read in the file contents: with open(config_path, 'r') as f: config_string = f.read() # First try to yaml load the content (which will also load json) try: config_data = yaml.load(config_string, Loader=yaml.SafeLoader) # If this is an actual ini file, yaml will return the whole thing as a string instead of a dict if type(config_data) is not dict: raise AssertionError( "The yaml config file is not properly formatted as a dict." ) except (AttributeError, yaml.YAMLError, AssertionError): # TowerCLI used to support a config file with a missing [general] section by prepending it if missing if '[general]' not in config_string: config_string = '[general]{0}'.format(config_string) config = ConfigParser() try: placeholder_file = StringIO(config_string) # py2 ConfigParser has readfp, that has been deprecated in favor of read_file in py3 # This "if" removes the deprecation warning if hasattr(config, 'read_file'): config.read_file(placeholder_file) else: config.readfp(placeholder_file) # If we made it here then we have values from reading the ini file, so let's pull them out into a dict config_data = {} for honorred_setting in self.honorred_settings: try: config_data[honorred_setting] = config.get( 'general', honorred_setting) except (NoOptionError): pass except Exception as e: raise ConfigFileException( "An unknown exception occured trying to ini load config file: {0}" .format(e)) except Exception as e: raise ConfigFileException( "An unknown exception occured trying to load config file: {0}". format(e)) # If we made it here, we have a dict which has values in it from our config, any final settings logic can be performed here for honorred_setting in self.honorred_settings: if honorred_setting in config_data: # Veriffy SSL must be a boolean if honorred_setting == 'verify_ssl': if type(config_data[honorred_setting]) is str: setattr(self, honorred_setting, strtobool(config_data[honorred_setting])) else: setattr(self, honorred_setting, bool(config_data[honorred_setting])) else: setattr(self, honorred_setting, config_data[honorred_setting])
def collect(self, module=None, collected_facts=None): local_facts = {} local_facts['local'] = {} if not module: return local_facts fact_path = module.params.get('fact_path', None) if not fact_path or not os.path.exists(fact_path): return local_facts local = {} # go over .fact files, run executables, read rest, skip bad with warning and note for fn in sorted(glob.glob(fact_path + '/*.fact')): # use filename for key where it will sit under local facts fact_base = os.path.basename(fn).replace('.fact', '') if stat.S_IXUSR & os.stat(fn)[stat.ST_MODE]: failed = None try: # run it rc, out, err = module.run_command(fn) if rc != 0: failed = 'Failure executing fact script (%s), rc: %s, err: %s' % ( fn, rc, err) except (IOError, OSError) as e: failed = 'Could not execute fact script (%s): %s' % ( fn, to_text(e)) if failed is not None: local[fact_base] = failed module.warn(failed) continue else: # ignores exceptions and returns empty out = get_file_content(fn, default='') try: # ensure we have unicode out = to_text(out, errors='surrogate_or_strict') except UnicodeError: fact = 'error loading fact - output of running "%s" was not utf-8' % fn local[fact_base] = fact module.warn(fact) continue # try to read it as json first try: fact = json.loads(out) except ValueError: # if that fails read it with ConfigParser cp = configparser.ConfigParser() try: cp.readfp(StringIO(out)) except configparser.Error: fact = "error loading facts as JSON or ini - please check content: %s" % fn module.warn(fact) else: fact = {} for sect in cp.sections(): if sect not in fact: fact[sect] = {} for opt in cp.options(sect): val = cp.get(sect, opt) fact[sect][opt] = val except Exception as e: fact = "Failed to convert (%s) to JSON: %s" % (fn, to_text(e)) module.warn(fact) local[fact_base] = fact local_facts['local'] = local return local_facts
def main(): module = AnsibleModule( argument_spec=dict( name=dict(required=False), repo=dict(required=False), state=dict(choices=['present', 'absent'], default='present'), runrefresh=dict(required=False, default=False, type='bool'), description=dict(required=False), disable_gpg_check=dict(required=False, default=False, type='bool'), autorefresh=dict(required=False, default=True, type='bool', aliases=['refresh']), priority=dict(required=False, type='int'), enabled=dict(required=False, default=True, type='bool'), overwrite_multiple=dict(required=False, default=False, type='bool'), auto_import_keys=dict(required=False, default=False, type='bool'), ), supports_check_mode=False, required_one_of=[['state', 'runrefresh']], ) repo = module.params['repo'] alias = module.params['name'] state = module.params['state'] overwrite_multiple = module.params['overwrite_multiple'] auto_import_keys = module.params['auto_import_keys'] runrefresh = module.params['runrefresh'] zypper_version = get_zypper_version(module) warnings = [] # collect warning messages for final output repodata = { 'url': repo, 'alias': alias, 'name': module.params['description'], 'priority': module.params['priority'], } # rewrite bools in the language that zypper lr -x provides for easier comparison if module.params['enabled']: repodata['enabled'] = '1' else: repodata['enabled'] = '0' if module.params['disable_gpg_check']: repodata['gpgcheck'] = '0' else: repodata['gpgcheck'] = '1' if module.params['autorefresh']: repodata['autorefresh'] = '1' else: repodata['autorefresh'] = '0' def exit_unchanged(): module.exit_json(changed=False, repodata=repodata, state=state) # Check run-time module parameters if repo == '*' or alias == '*': if runrefresh: runrefreshrepo(module, auto_import_keys) module.exit_json(changed=False, runrefresh=True) else: module.fail_json( msg='repo=* can only be used with the runrefresh option.') if state == 'present' and not repo: module.fail_json(msg='Module option state=present requires repo') if state == 'absent' and not repo and not alias: module.fail_json( msg='Alias or repo parameter required when state=absent') if repo and repo.endswith('.repo'): if alias: module.fail_json( msg= 'Incompatible option: \'name\'. Do not use name when adding .repo files' ) else: if not alias and state == "present": module.fail_json(msg='Name required when adding non-repo files.') # Download / Open and parse .repo file to ensure idempotency if repo and repo.endswith('.repo'): if repo.startswith(('http://', 'https://')): response, info = fetch_url(module=module, url=repo, force=True) if not response or info['status'] != 200: module.fail_json( msg='Error downloading .repo file from provided URL') repofile_text = to_text(response.read(), errors='surrogate_or_strict') else: try: with open(repo, encoding='utf-8') as file: repofile_text = file.read() except IOError: module.fail_json( msg='Error opening .repo file from provided path') repofile = configparser.ConfigParser() try: repofile.readfp(StringIO(repofile_text)) except configparser.Error: module.fail_json( msg='Invalid format, .repo file could not be parsed') # No support for .repo file with zero or more than one repository if len(repofile.sections()) != 1: err = "Invalid format, .repo file contains %s repositories, expected 1" % len( repofile.sections()) module.fail_json(msg=err) section = repofile.sections()[0] repofile_items = dict(repofile.items(section)) # Only proceed if at least baseurl is available if 'baseurl' not in repofile_items: module.fail_json(msg='No baseurl found in .repo file') # Set alias (name) and url based on values from .repo file alias = section repodata['alias'] = section repodata['url'] = repofile_items['baseurl'] # If gpgkey is part of the .repo file, auto import key if 'gpgkey' in repofile_items: auto_import_keys = True # Map additional values, if available if 'name' in repofile_items: repodata['name'] = repofile_items['name'] if 'enabled' in repofile_items: repodata['enabled'] = repofile_items['enabled'] if 'autorefresh' in repofile_items: repodata['autorefresh'] = repofile_items['autorefresh'] if 'gpgcheck' in repofile_items: repodata['gpgcheck'] = repofile_items['gpgcheck'] exists, mod, old_repos = repo_exists(module, repodata, overwrite_multiple) if alias: shortname = alias else: shortname = repo if state == 'present': if exists and not mod: if runrefresh: runrefreshrepo(module, auto_import_keys, shortname) exit_unchanged() rc, stdout, stderr = addmodify_repo(module, repodata, old_repos, zypper_version, warnings) if rc == 0 and (runrefresh or auto_import_keys): runrefreshrepo(module, auto_import_keys, shortname) elif state == 'absent': if not exists: exit_unchanged() rc, stdout, stderr = remove_repo(module, shortname) if rc == 0: module.exit_json(changed=True, repodata=repodata, state=state, warnings=warnings) else: module.fail_json(msg="Zypper failed with rc %s" % rc, rc=rc, stdout=stdout, stderr=stderr, repodata=repodata, state=state, warnings=warnings)
def main(): argument_spec = dict( assets=dict(required=False), files=dict(required=False, default=[], type='list'), prevent=dict(required=False, default=[], type='list'), password_management=dict(required=False, default='default', choices=['default', 'random']), ) module = TowerModule(argument_spec=argument_spec, supports_check_mode=False) if not HAS_TOWER_CLI: module.fail_json(msg='ansible-tower-cli required for this module') if not TOWER_CLI_HAS_EXPORT: module.fail_json( msg='ansible-tower-cli version does not support export') assets = module.params.get('assets') prevent = module.params.get('prevent') password_management = module.params.get('password_management') files = module.params.get('files') result = dict( changed=False, msg='', output='', ) if not assets and not files: result['msg'] = "Assets or files must be specified" module.fail_json(**result) path = None if assets: # We got assets so we need to dump this out to a temp file and append that to files handle, path = mkstemp(prefix='', suffix='', dir='') with open(path, 'w') as f: f.write(assets) files.append(path) tower_auth = tower_auth_config(module) failed = False with settings.runtime_values(**tower_auth): try: sender = Sender(no_color=False) old_stdout = sys.stdout sys.stdout = captured_stdout = StringIO() try: sender.send(files, prevent, password_management) except TypeError as e: # Newer versions of TowerCLI require 4 parameters sender.send(files, prevent, [], password_management) if sender.error_messages > 0: failed = True result[ 'msg'] = "Transfer Failed with %d errors" % sender.error_messages if sender.changed_messages > 0: result['changed'] = True except TowerCLIError as e: result['msg'] = e.message failed = True finally: if path is not None: os.remove(path) result['output'] = captured_stdout.getvalue().split("\n") sys.stdout = old_stdout # Return stdout so that module returns will work if failed: module.fail_json(**result) else: module.exit_json(**result)
def main(): """ Called to initiate the connect to the remote device """ rc = 0 result = {} messages = list() socket_path = None # Need stdin as a byte stream if PY3: stdin = sys.stdin.buffer else: stdin = sys.stdin # Note: update the below log capture code after Display.display() is refactored. saved_stdout = sys.stdout sys.stdout = StringIO() try: # read the play context data via stdin, which means depickling it vars_data = read_stream(stdin) init_data = read_stream(stdin) if PY3: pc_data = cPickle.loads(init_data, encoding='bytes') variables = cPickle.loads(vars_data, encoding='bytes') else: pc_data = cPickle.loads(init_data) variables = cPickle.loads(vars_data) play_context = PlayContext() play_context.deserialize(pc_data) display.verbosity = play_context.verbosity except Exception as e: rc = 1 result.update({ 'error': to_text(e), 'exception': traceback.format_exc() }) if rc == 0: ssh = connection_loader.get('ssh', class_only=True) ansible_playbook_pid = sys.argv[1] task_uuid = sys.argv[2] cp = ssh._create_control_path(play_context.remote_addr, play_context.port, play_context.remote_user, play_context.connection, ansible_playbook_pid) # create the persistent connection dir if need be and create the paths # which we will be using later tmp_path = unfrackpath(C.PERSISTENT_CONTROL_PATH_DIR) makedirs_safe(tmp_path) socket_path = unfrackpath(cp % dict(directory=tmp_path)) lock_path = unfrackpath("%s/.ansible_pc_lock_%s" % os.path.split(socket_path)) with file_lock(lock_path): if not os.path.exists(socket_path): messages.append( ('vvvv', 'local domain socket does not exist, starting it')) original_path = os.getcwd() r, w = os.pipe() pid = fork_process() if pid == 0: try: os.close(r) wfd = os.fdopen(w, 'w') process = ConnectionProcess(wfd, play_context, socket_path, original_path, task_uuid, ansible_playbook_pid) process.start(variables) except Exception: messages.append(('error', traceback.format_exc())) rc = 1 if rc == 0: process.run() else: process.shutdown() sys.exit(rc) else: os.close(w) rfd = os.fdopen(r, 'r') data = json.loads(rfd.read(), cls=AnsibleJSONDecoder) messages.extend(data.pop('messages')) result.update(data) else: messages.append( ('vvvv', 'found existing local domain socket, using it!')) conn = Connection(socket_path) conn.set_options(var_options=variables) pc_data = to_text(init_data) try: conn.update_play_context(pc_data) conn.set_check_prompt(task_uuid) except Exception as exc: # Only network_cli has update_play context and set_check_prompt, so missing this is # not fatal e.g. netconf if isinstance(exc, ConnectionError) and getattr( exc, 'code', None) == -32601: pass else: result.update({ 'error': to_text(exc), 'exception': traceback.format_exc() }) if os.path.exists(socket_path): messages.extend(Connection(socket_path).pop_messages()) messages.append(('vvvv', sys.stdout.getvalue())) result.update({'messages': messages, 'socket_path': socket_path}) sys.stdout = saved_stdout if 'exception' in result: rc = 1 sys.stderr.write(json.dumps(result, cls=AnsibleJSONEncoder)) else: rc = 0 sys.stdout.write(json.dumps(result, cls=AnsibleJSONEncoder)) sys.exit(rc)