def _build_exec_cmd(self, cmd): """ Build the local kubectl exec command to run cmd on remote_host """ local_cmd = [self.transport_cmd] # Build command options based on doc string doc_yaml = AnsibleLoader(self.documentation).get_single_data() for key in doc_yaml.get('options'): if key.endswith('verify_ssl') and self.get_option(key) != '': # Translate verify_ssl to skip_verify_ssl, and output as string skip_verify_ssl = not self.get_option(key) local_cmd.append(u'{0}={1}'.format(self.connection_options[key], str(skip_verify_ssl).lower())) elif not key.endswith('container') and self.get_option(key) and self.connection_options.get(key): cmd_arg = self.connection_options[key] local_cmd += [cmd_arg, self.get_option(key)] extra_args_name = u'{0}_extra_args'.format(self.transport) if self.get_option(extra_args_name): local_cmd += self.get_option(extra_args_name).split(' ') pod = self.get_option(u'{0}_pod'.format(self.transport)) if not pod: pod = self._play_context.remote_addr # -i is needed to keep stdin open which allows pipelining to work local_cmd += ['exec', '-i', pod] # if the pod has more than one container, then container is required container_arg_name = u'{0}_container'.format(self.transport) if self.get_option(container_arg_name): local_cmd += ['-c', self.get_option(container_arg_name)] local_cmd += ['--'] + cmd return local_cmd
def parse_yaml_linenumbers(data, filename): """Parse yaml as ansible.utils.parse_yaml but with linenumbers. The line numbers are stored in each node's LINE_NUMBER_KEY key. """ def compose_node(parent, index): # the line number where the previous token has ended (plus empty lines) line = loader.line node = Composer.compose_node(loader, parent, index) node.__line__ = line + 1 return node def construct_mapping(node, deep=False): mapping = AnsibleConstructor.construct_mapping(loader, node, deep=deep) if hasattr(node, '__line__'): mapping[LINE_NUMBER_KEY] = node.__line__ else: mapping[LINE_NUMBER_KEY] = mapping._line_number mapping[FILENAME_KEY] = filename return mapping try: import inspect kwargs = {} if 'vault_password' in inspect.getargspec(AnsibleLoader.__init__).args: kwargs['vault_password'] = DEFAULT_VAULT_PASSWORD loader = AnsibleLoader(data, **kwargs) loader.compose_node = compose_node loader.construct_mapping = construct_mapping data = loader.get_single_data() except (yaml.parser.ParserError, yaml.scanner.ScannerError) as e: raise SystemExit("Failed to parse YAML in %s: %s" % (filename, str(e))) return data
def test_parse_number(self): stream = StringIO(u""" 1 """) loader = AnsibleLoader(stream, 'myfile.yml') data = loader.get_single_data() self.assertEqual(data, 1)
def setUp(self): stream = NameStringIO(u""" - hosts: localhost vars: number: 1 string: Ansible utf8_string: Cafè Eñyei dictionary: webster: daniel oed: oxford list: - a - b - 1 - 2 tasks: - name: Test case ping: data: "{{ utf8_string }}" - name: Test 2 ping: data: "Cafè Eñyei" - name: Test 3 command: "printf 'Cafè Eñyei\\n'" """) self.play_filename = '/path/to/myplay.yml' stream.name = self.play_filename self.loader = AnsibleLoader(stream) self.data = self.loader.get_single_data()
def _safe_load(self, stream, file_name=None): ''' Implements yaml.safe_load(), except using our custom loader class. ''' loader = AnsibleLoader(stream, file_name) try: return loader.get_single_data() finally: loader.dispose()
def _build_exec_cmd(self, cmd): """Build the local kubectl exec command to run cmd on remote_host""" local_cmd = [self.transport_cmd] censored_local_cmd = [self.transport_cmd] # Build command options based on doc string doc_yaml = AnsibleLoader(self.documentation).get_single_data() for key in doc_yaml.get("options"): if key.endswith("verify_ssl") and self.get_option(key) != "": # Translate verify_ssl to skip_verify_ssl, and output as string skip_verify_ssl = not self.get_option(key) local_cmd.append( u"{0}={1}".format( self.connection_options[key], str(skip_verify_ssl).lower() ) ) censored_local_cmd.append( u"{0}={1}".format( self.connection_options[key], str(skip_verify_ssl).lower() ) ) elif ( not key.endswith("container") and self.get_option(key) and self.connection_options.get(key) ): cmd_arg = self.connection_options[key] local_cmd += [cmd_arg, self.get_option(key)] # Redact password and token from console log if key.endswith(("_token", "_password")): censored_local_cmd += [cmd_arg, "********"] else: censored_local_cmd += [cmd_arg, self.get_option(key)] extra_args_name = u"{0}_extra_args".format(self.transport) if self.get_option(extra_args_name): local_cmd += self.get_option(extra_args_name).split(" ") censored_local_cmd += self.get_option(extra_args_name).split(" ") pod = self.get_option(u"{0}_pod".format(self.transport)) if not pod: pod = self._play_context.remote_addr # -i is needed to keep stdin open which allows pipelining to work local_cmd += ["exec", "-i", pod] censored_local_cmd += ["exec", "-i", pod] # if the pod has more than one container, then container is required container_arg_name = u"{0}_container".format(self.transport) if self.get_option(container_arg_name): local_cmd += ["-c", self.get_option(container_arg_name)] censored_local_cmd += ["-c", self.get_option(container_arg_name)] local_cmd += ["--"] + cmd censored_local_cmd += ["--"] + cmd return local_cmd, censored_local_cmd
def test_parse_string(self): stream = StringIO(""" Ansible """) loader = AnsibleLoader(stream, 'myfile.yml') data = loader.get_single_data() self.assertEqual(data, u'Ansible') self.assertIsInstance(data, unicode) self.assertEqual(data.ansible_pos, ('myfile.yml', 2, 17))
def test_parse_utf8_string(self): stream = StringIO(u""" Cafè Eñyei """) loader = AnsibleLoader(stream, 'myfile.yml') data = loader.get_single_data() self.assertEqual(data, u'Cafè Eñyei') self.assertIsInstance(data, text_type) self.assertEqual(data.ansible_pos, ('myfile.yml', 2, 17))
def _safe_load(self, stream, file_name=None): ''' Implements yaml.safe_load(), except using our custom loader class. ''' loader = AnsibleLoader(stream, file_name, self._vault_password) try: return loader.get_single_data() finally: try: loader.dispose() except AttributeError: pass # older versions of yaml don't have dispose function, ignore
def _safe_load(self, stream, file_name=None): ''' Implements yaml.safe_load(), except using our custom loader class. ''' loader = AnsibleLoader(stream, file_name, self._b_vault_password) try: return loader.get_single_data() finally: try: loader.dispose() except AttributeError: pass # older versions of yaml don't have dispose function, ignore
def add_fragments(doc, filename, fragment_loader): fragments = doc.pop('extends_documentation_fragment', []) if isinstance(fragments, string_types): fragments = [fragments] # Allow the module to specify a var other than DOCUMENTATION # to pull the fragment from, using dot notation as a separator for fragment_slug in fragments: fragment_slug = fragment_slug.lower() if '.' in fragment_slug: fragment_name, fragment_var = fragment_slug.split('.', 1) fragment_var = fragment_var.upper() else: fragment_name, fragment_var = fragment_slug, 'DOCUMENTATION' fragment_class = fragment_loader.get(fragment_name) if fragment_class is None: raise AnsibleAssertionError('fragment_class is None') fragment_yaml = getattr(fragment_class, fragment_var, '{}') fragment = AnsibleLoader(fragment_yaml, file_name=filename).get_single_data() if 'notes' in fragment: notes = fragment.pop('notes') if notes: if 'notes' not in doc: doc['notes'] = [] doc['notes'].extend(notes) if 'options' not in fragment: raise Exception( "missing options in fragment (%s), possibly misformatted?: %s" % (fragment_name, filename)) # ensure options themselves are directly merged if 'options' in doc: try: merge_fragment(doc['options'], fragment.pop('options')) except Exception as e: raise AnsibleError("%s options (%s) of unknown type: %s" % (to_native(e), fragment_name, filename)) else: doc['options'] = fragment.pop('options') # merge rest of the sections try: merge_fragment(doc, fragment) except Exception as e: raise AnsibleError("%s (%s) of unknown type: %s" % (to_native(e), fragment_name, filename))
def add_fragments(doc, filename): fragments = doc.get('extends_documentation_fragment', []) if isinstance(fragments, string_types): fragments = [fragments] # Allow the module to specify a var other than DOCUMENTATION # to pull the fragment from, using dot notation as a separator for fragment_slug in fragments: fragment_slug = fragment_slug.lower() if '.' in fragment_slug: fragment_name, fragment_var = fragment_slug.split('.', 1) fragment_var = fragment_var.upper() else: fragment_name, fragment_var = fragment_slug, 'DOCUMENTATION' fragment_loader.add_directory('../../plugins/modules/') fragment_class = fragment_loader.get(fragment_name) assert fragment_class is not None fragment_yaml = getattr(fragment_class, fragment_var, '{}') fragment = AnsibleLoader(fragment_yaml, file_name=filename).get_single_data() if 'notes' in fragment: notes = fragment.pop('notes') if notes: if 'notes' not in doc: doc['notes'] = [] doc['notes'].extend(notes) if 'options' not in fragment and 'logging_options' not in fragment and 'connection_options' not in fragment: raise Exception( "missing options in fragment (%s), possibly misformatted?: %s" % (fragment_name, filename)) for key, value in iteritems(fragment): if key in doc: # assumes both structures have same type if isinstance(doc[key], MutableMapping): value.update(doc[key]) elif isinstance(doc[key], MutableSet): value.add(doc[key]) elif isinstance(doc[key], MutableSequence): value = sorted(frozenset(value + doc[key])) else: raise Exception( "Attempt to extend a documentation fragement (%s) of unknown type: %s" % (fragment_name, filename)) doc[key] = value
def load_yaml(yaml_file, vault_secret=None): """ Load a YAML file into a python dictionary. The YAML file can be fully encrypted by Ansible-Vault or can contain multiple inline Ansible-Vault encrypted values. Ansible Vault encryption is ideal to store passwords or encrypt the entire file with sensitive data if required. """ vault = VaultLib() if vault_secret: secret_file = get_file_vault_secret(filename=vault_secret, loader=DataLoader()) secret_file.load() vault.secrets = [('default', secret_file)] data = None if os.path.isfile(yaml_file): with open(yaml_file, 'r', encoding='utf-8') as stream: # Render environment variables using jinja templates contents = stream.read() template = Template(contents) stream = StringIO(template.render(env_var=os.environ)) try: if is_encrypted_file(stream): file_data = stream.read() data = yaml.load(vault.decrypt(file_data, None)) else: loader = AnsibleLoader(stream, None, vault.secrets) try: data = loader.get_single_data() except Exception as exc: raise Exception( f'Error when loading YAML config at {yaml_file} {exc}' ) from exc finally: loader.dispose() except yaml.YAMLError as exc: raise Exception( f'Error when loading YAML config at {yaml_file} {exc}' ) from exc else: LOGGER.debug('No file at %s', yaml_file) if isinstance(data, AnsibleMapping): data = dict(data) return data
def test_parse_list(self): stream = StringIO(u""" - a - b """) loader = AnsibleLoader(stream, 'myfile.yml') data = loader.get_single_data() self.assertEqual(data, [u'a', u'b']) self.assertEqual(len(data), 2) self.assertIsInstance(data[0], text_type) self.assertEqual(data.ansible_pos, ('myfile.yml', 2, 17)) self.assertEqual(data[0].ansible_pos, ('myfile.yml', 2, 19)) self.assertEqual(data[1].ansible_pos, ('myfile.yml', 3, 19))
def add_fragments(doc, filename, fragment_loader): fragments = doc.pop('extends_documentation_fragment', []) if isinstance(fragments, string_types): fragments = [fragments] # Allow the module to specify a var other than DOCUMENTATION # to pull the fragment from, using dot notation as a separator for fragment_slug in fragments: fragment_slug = fragment_slug.lower() if '.' in fragment_slug: fragment_name, fragment_var = fragment_slug.split('.', 1) fragment_var = fragment_var.upper() else: fragment_name, fragment_var = fragment_slug, 'DOCUMENTATION' fragment_class = fragment_loader.get(fragment_name) if fragment_class is None: raise AnsibleAssertionError('fragment_class is None') fragment_yaml = getattr(fragment_class, fragment_var, '{}') fragment = AnsibleLoader(fragment_yaml, file_name=filename).get_single_data() if 'notes' in fragment: notes = fragment.pop('notes') if notes: if 'notes' not in doc: doc['notes'] = [] doc['notes'].extend(notes) if 'options' not in fragment: raise Exception("missing options in fragment (%s), possibly misformatted?: %s" % (fragment_name, filename)) # ensure options themselves are directly merged if 'options' in doc: try: merge_fragment(doc['options'], fragment.pop('options')) except Exception as e: raise AnsibleError("%s options (%s) of unknown type: %s" % (to_native(e), fragment_name, filename)) else: doc['options'] = fragment.pop('options') # merge rest of the sections try: merge_fragment(doc, fragment) except Exception as e: raise AnsibleError("%s (%s) of unknown type: %s" % (to_native(e), fragment_name, filename))
def read_docstub(filename): """ Quickly find short_description using string methods instead of node parsing. This does not return a full set of documentation strings and is intended for operations like ansible-doc -l. """ t_module_data = open(filename, 'r') capturing = False doc_stub = [] for line in t_module_data: # start capturing the stub until indentation returns if capturing and line[0] == ' ': doc_stub.append(line) elif capturing and line[0] != ' ': break if 'short_description:' in line: capturing = True doc_stub.append(line) short_description = r''.join(doc_stub).strip().rstrip('.') data = AnsibleLoader(short_description, file_name=filename).get_single_data() return data
def main(): vault_file = sys.argv[1] new_vault_file = sys.argv[2] in_file = sys.argv[3] target_env = 'test' external_system_name = 'blabla' # Load vault password and prepare secrets for decryption loader = DataLoader() secret = vault.get_file_vault_secret(filename=vault_file, loader=loader) secret.load() vault_secrets = [('default', secret)] _vault = vault.VaultLib(vault_secrets) new_loader = DataLoader() new_secret = vault.get_file_vault_secret(filename=new_vault_file, loader=new_loader) new_secret.load() new_vault_secrets = [('default', new_secret)] _new_vault = vault.VaultLib(new_vault_secrets) # Load encrypted yml for processing with codecs.open(in_file, 'r', encoding='utf-8') as f: loaded_yaml = AnsibleLoader(f, vault_secrets=_vault.secrets).get_single_data() # Modify yml with new encrypted values new_encrypted_variable = objects.AnsibleVaultEncryptedUnicode.from_plaintext(external_system_password, _new_vault, new_vault_secrets[0][1]) loaded_yaml[target_env]['credentials'][external_system_name]['password'] = new_encrypted_variable # Write a new encrypted yml with open("%s.new" % argv[1], 'wb') as fd: yaml.dump(loaded_yaml, fd, Dumper=AnsibleDumper, encoding=None, default_flow_style=False) print(loaded_yaml)
def test_parse_short_dict(self): stream = StringIO(u"""{"foo": "bar"}""") loader = AnsibleLoader(stream, 'myfile.yml') data = loader.get_single_data() self.assertEqual(data, dict(foo=u'bar')) self.assertEqual(data.ansible_pos, ('myfile.yml', 1, 1)) self.assertEqual(data[u'foo'].ansible_pos, ('myfile.yml', 1, 9)) stream = StringIO(u"""foo: bar""") loader = AnsibleLoader(stream, 'myfile.yml') data = loader.get_single_data() self.assertEqual(data, dict(foo=u'bar')) self.assertEqual(data.ansible_pos, ('myfile.yml', 1, 1)) self.assertEqual(data[u'foo'].ansible_pos, ('myfile.yml', 1, 6))
def test_parse_dict(self): stream = StringIO(u""" webster: daniel oed: oxford """) loader = AnsibleLoader(stream, 'myfile.yml') data = loader.get_single_data() self.assertEqual(data, {'webster': 'daniel', 'oed': 'oxford'}) self.assertEqual(len(data), 2) self.assertIsInstance(list(data.keys())[0], text_type) self.assertIsInstance(list(data.values())[0], text_type) # Beginning of the first key self.assertEqual(data.ansible_pos, ('myfile.yml', 2, 17)) self.assertEqual(data[u'webster'].ansible_pos, ('myfile.yml', 2, 26)) self.assertEqual(data[u'oed'].ansible_pos, ('myfile.yml', 3, 22))
def get_scalars(self, raw_buffer_list, predicate): """ Gets all scalar values that satisfy the predicate. Any encrypted value is decrypted. """ secrets = self.generate_secrets() loader = AnsibleLoader(self._buffer_as_string(raw_buffer_list), "some", [("default", secrets)]) vals = [] scalar_nodes = loader.get_single_node() for node in scalar_nodes.value: self.recurse_mappings(node, secrets, vals, predicate) return vals
def add_fragments(doc, filename): fragments = doc.get('extends_documentation_fragment', []) if isinstance(fragments, string_types): fragments = [fragments] # Allow the module to specify a var other than DOCUMENTATION # to pull the fragment from, using dot notation as a separator for fragment_slug in fragments: fragment_slug = fragment_slug.lower() if '.' in fragment_slug: fragment_name, fragment_var = fragment_slug.split('.', 1) fragment_var = fragment_var.upper() else: fragment_name, fragment_var = fragment_slug, 'DOCUMENTATION' fragment_loader.add_directory('../module_utils/') fragment_class = fragment_loader.get(fragment_name) assert fragment_class is not None fragment_yaml = getattr(fragment_class, fragment_var, '{}') fragment = AnsibleLoader(fragment_yaml, file_name=filename).get_single_data() if 'notes' in fragment: notes = fragment.pop('notes') if notes: if 'notes' not in doc: doc['notes'] = [] doc['notes'].extend(notes) if 'options' not in fragment and 'logging_options' not in fragment and 'connection_options' not in fragment: raise Exception("missing options in fragment (%s), possibly misformatted?: %s" % (fragment_name, filename)) for key, value in iteritems(fragment): if key in doc: # assumes both structures have same type if isinstance(doc[key], MutableMapping): value.update(doc[key]) elif isinstance(doc[key], MutableSet): value.add(doc[key]) elif isinstance(doc[key], MutableSequence): value = sorted(frozenset(value + doc[key])) else: raise Exception("Attempt to extend a documentation fragement (%s) of unknown type: %s" % (fragment_name, filename)) doc[key] = value
def decrypt(secrets_file_path, decrypted_file_path): try: vault_password_file = os.environ["ANSIBLE_VAULT_PASSWORD_FILE"] except KeyError: sys.exit(3) with open(vault_password_file, 'r') as vpf: vault_password = vpf.read().replace('\n', '') # Load vault password and prepare secrets for decryption loader = DataLoader() secret = vault.get_file_vault_secret(filename=vault_password_file, loader=loader) secret.load() vault_secrets = [('default', secret)] _vault = vault.VaultLib(vault_secrets) # Load encrypted yml for processing with codecs.open(secrets_file_path, 'r', encoding='utf-8') as f: loaded_yaml = AnsibleLoader(f, vault_secrets=_vault.secrets).get_single_data() # Define decrypted file params flags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY) mode = stat.S_IRUSR | stat.S_IWUSR # 0o600 in octal umask = os.umask(0) # Save current umask to prevent downgrading to 0 # Delete and replace decrypted secrets to ensure file permissions try: os.remove(decrypted_file_path) except OSError: pass # Open the file descriptor umask_original = os.umask(umask) try: decrypted_file_fd = os.open(decrypted_file_path, flags, mode) finally: os.umask(umask_original) # Open file handle and write the decrypted file decrypted_file_out = os.fdopen(decrypted_file_fd, 'w') for k, v in loaded_yaml.items(): line = "export " + str(k) + "=" + str(v) + "\n" decrypted_file_out.write(line) decrypted_file_out.close() print("secrets decrypted")
def read_docstring(filename, verbose=True, ignore_errors=True): """ Search for assignment of the DOCUMENTATION and EXAMPLES variables in the given file. Parse DOCUMENTATION from YAML and return the YAML doc or None together with EXAMPLES, as plain text. """ data = { 'doc': None, 'plainexamples': None, 'returndocs': None, 'metadata': None, # NOTE: not used anymore, kept for compat 'seealso': None, } string_to_vars = { 'DOCUMENTATION': 'doc', 'EXAMPLES': 'plainexamples', 'RETURN': 'returndocs', 'ANSIBLE_METADATA': 'metadata', # NOTE: now unused, but kept for backwards compat } try: with open(filename, 'rb') as b_module_data: M = ast.parse(b_module_data.read()) for child in M.body: if isinstance(child, ast.Assign): for t in child.targets: try: theid = t.id except AttributeError: # skip errors can happen when trying to use the normal code display.warning("Failed to assign id for %s on %s, skipping" % (t, filename)) continue if theid in string_to_vars: varkey = string_to_vars[theid] if isinstance(child.value, ast.Dict): data[varkey] = ast.literal_eval(child.value) else: if theid in ['EXAMPLES', 'RETURN']: # examples 'can' be yaml, return must be, but even if so, we dont want to parse as such here # as it can create undesired 'objects' that don't display well as docs. data[varkey] = to_text(child.value.s) else: # string should be yaml if already not a dict data[varkey] = AnsibleLoader(child.value.s, file_name=filename).get_single_data() display.debug('assigned: %s' % varkey) except Exception: if verbose: display.error("unable to parse %s" % filename) if not ignore_errors: raise return data
def test_front_matter(self): stream = StringIO(u"""---\nfoo: bar""") loader = AnsibleLoader(stream, 'myfile.yml') data = loader.get_single_data() self.assertEqual(data, dict(foo=u'bar')) self.assertEqual(data.ansible_pos, ('myfile.yml', 2, 1)) self.assertEqual(data[u'foo'].ansible_pos, ('myfile.yml', 2, 6)) # Initial indent (See: #6348) stream = StringIO(u""" - foo: bar\n baz: qux""") loader = AnsibleLoader(stream, 'myfile.yml') data = loader.get_single_data() self.assertEqual(data, [{u'foo': u'bar', u'baz': u'qux'}]) self.assertEqual(data.ansible_pos, ('myfile.yml', 1, 2)) self.assertEqual(data[0].ansible_pos, ('myfile.yml', 1, 4)) self.assertEqual(data[0][u'foo'].ansible_pos, ('myfile.yml', 1, 9)) self.assertEqual(data[0][u'baz'].ansible_pos, ('myfile.yml', 2, 9))
def load_yaml(yaml_file, vault_secret=None): ''' Load a YAML file into a python dictionary. The YAML file can be fully encrypted by Ansible-Vault or can contain multiple inline Ansible-Vault encrypted values. Ansible Vault encryption is ideal to store passwords or encrypt the entire file with sensitive data if required. ''' vault = VaultLib() if vault_secret: secret_file = get_file_vault_secret(filename=vault_secret, loader=DataLoader()) secret_file.load() vault.secrets = [('default', secret_file)] data = None if os.path.isfile(yaml_file): with open(yaml_file, 'r') as stream: try: if is_encrypted_file(stream): file_data = stream.read() data = yaml.load(vault.decrypt(file_data, None)) else: loader = AnsibleLoader(stream, None, vault.secrets) try: data = loader.get_single_data() except Exception as exc: raise Exception( "Error when loading YAML config at {} {}".format( yaml_file, exc)) finally: loader.dispose() except yaml.YAMLError as exc: raise Exception( "Error when loading YAML config at {} {}".format( yaml_file, exc)) else: logger.debug("No file at {}".format(yaml_file)) return data
def _build_exec_cmd(self, cmd): """ Build the local nsenter command to run cmd on remote_host """ local_cmd = [self.transport_cmd, '-t', self.target] executable = self.get_option('executable') # Build command options based on doc string doc_yaml = AnsibleLoader(self.documentation).get_single_data() for key in doc_yaml.get('options'): if key in ['nsenter_uid', 'nsenter_gid' ] and self.get_option(key) is not None: cmd_arg = self.connection_options[key] local_cmd += [cmd_arg, self.get_option(key)] elif self.get_option(key) and self.connection_options.get(key): cmd_arg = self.connection_options[key] local_cmd += [cmd_arg] local_cmd += ['-F', '--'] + cmd return local_cmd
def parse_yaml_linenumbers(data, filename): """ Parse yaml as ansible.utils.parse_yaml but with linenumbers. The line numbers are stored in each node's LINE_NUMBER_KEY key. """ def compose_node(parent, index): # the line number where the previous token has ended (plus empty lines) line = loader.line node = Composer.compose_node(loader, parent, index) node.__line__ = line + 1 return node def construct_mapping(node, deep=False): try: mapping = AnsibleConstructor.construct_mapping(loader, node, deep=deep) except yaml.constructor.ConstructorError as e: raise LaterError("syntax error", e) if hasattr(node, "__line__"): mapping[LINE_NUMBER_KEY] = node.__line__ else: mapping[LINE_NUMBER_KEY] = mapping._line_number mapping[FILENAME_KEY] = filename return mapping try: kwargs = {} loader = AnsibleLoader(data, **kwargs) loader.compose_node = compose_node loader.construct_mapping = construct_mapping data = loader.get_single_data() or [] except (yaml.parser.ParserError, yaml.scanner.ScannerError) as e: raise LaterError("syntax error", e) except (yaml.composer.ComposerError) as e: e.problem = "{} {}".format(e.context, e.problem) raise LaterError("syntax error", e) return data
def read_docstring_from_python_module(filename, verbose=True, ignore_errors=True): """ Use tokenization to search for assignment of the documentation variables in the given file. Parse from YAML and return the resulting python structure or None together with examples as plain text. """ found = 0 data = _init_doc_dict() next_string = None with tokenize.open(filename) as f: tokens = tokenize.generate_tokens(f.readline) for token in tokens: if token.type == tokenize.NAME: if token.string in string_to_vars: next_string = string_to_vars[token.string] if next_string is not None and token.type == tokenize.STRING: found += 1 value = token.string if value.startswith(('r', 'b')): value = value.lstrip('rb') if value.startswith(("'", '"')): value = value.strip("'\"") if next_string == 'plainexamples': # keep as string data[next_string] = to_text(value) else: try: data[next_string] = AnsibleLoader( value, file_name=filename).get_single_data() except Exception as e: msg = "Unable to parse docs '%s' in python file '%s': %s" % ( _var2string(next_string), filename, to_native(e)) if not ignore_errors: raise AnsibleParserError(msg, orig_exc=e) elif verbose: display.error(msg) next_string = None # if nothing else worked, fall back to old method if not found: data = read_docstring_from_python_file(filename, verbose, ignore_errors) return data
def read_docstring_from_yaml_file(filename, verbose=True, ignore_errors=True): ''' Read docs from 'sidecar' yaml file doc for a plugin ''' data = _init_doc_dict() file_data = {} try: with open(filename, 'rb') as yamlfile: file_data = AnsibleLoader(yamlfile.read(), file_name=filename).get_single_data() except Exception as e: msg = "Unable to parse yaml file '%s': %s" % (filename, to_native(e)) if not ignore_errors: raise AnsibleParserError(msg, orig_exc=e) elif verbose: display.error(msg) for key in string_to_vars: data[string_to_vars[key]] = file_data.get(key, None) return data
def read_docstring_from_python_file(filename, verbose=True, ignore_errors=True): """ Use ast to search for assignment of the DOCUMENTATION and EXAMPLES variables in the given file. Parse DOCUMENTATION from YAML and return the YAML doc or None together with EXAMPLES, as plain text. """ data = _init_doc_dict() global string_to_vars try: with open(filename, 'rb') as b_module_data: M = ast.parse(b_module_data.read()) for child in M.body: if isinstance(child, ast.Assign): for t in child.targets: try: theid = t.id except AttributeError: # skip errors can happen when trying to use the normal code display.warning( "Building documentation, failed to assign id for %s on %s, skipping" % (t, filename)) continue if theid in string_to_vars: varkey = string_to_vars[theid] if isinstance(child.value, ast.Dict): data[varkey] = ast.literal_eval(child.value) else: if theid == 'EXAMPLES': # examples 'can' be yaml, but even if so, we dont want to parse as such here # as it can create undesired 'objects' that don't display well as docs. data[varkey] = to_text(child.value.s) else: # string should be yaml if already not a dict data[varkey] = AnsibleLoader( child.value.s, file_name=filename).get_single_data() display.debug('Documentation assigned: %s' % varkey) except Exception as e: msg = "Unable to parse documentation in python file '%s': %s" % ( filename, to_native(e)) if not ignore_errors: raise AnsibleParserError(msg, orig_exc=e) elif verbose: display.error(msg) return data
def parse_yaml_linenumbers(data, filename): """Parses yaml as ansible.utils.parse_yaml but with linenumbers. The line numbers are stored in each node's LINE_NUMBER_KEY key. """ def compose_node(parent, index): # the line number where the previous token has ended (plus empty lines) line = loader.line node = Composer.compose_node(loader, parent, index) node.__line__ = line + 1 return node def construct_mapping(node, deep=False): if ANSIBLE_VERSION < 2: mapping = Constructor.construct_mapping(loader, node, deep=deep) else: mapping = AnsibleConstructor.construct_mapping(loader, node, deep=deep) if hasattr(node, '__line__'): mapping[LINE_NUMBER_KEY] = node.__line__ else: mapping[LINE_NUMBER_KEY] = mapping._line_number mapping[FILENAME_KEY] = filename return mapping try: if ANSIBLE_VERSION < 2: loader = yaml.Loader(data) else: import inspect kwargs = {} if 'vault_password' in inspect.getargspec(AnsibleLoader.__init__).args: kwargs['vault_password'] = DEFAULT_VAULT_PASSWORD loader = AnsibleLoader(data, **kwargs) loader.compose_node = compose_node loader.construct_mapping = construct_mapping data = loader.get_single_data() except (yaml.parser.ParserError, yaml.scanner.ScannerError) as e: raise SystemExit("Failed to parse YAML in %s: %s" % (filename, str(e))) return data
def read_assertion_docs(filename, verbose=True, ignore_errors=True): """ Search for assignment of ASSERTIONS variables in the given file. Parse DOCUMENTATION from YAML and return the YAML doc or None together with EXAMPLES, as plain text. """ data = { 'assertions': None, } string_to_vars = { 'ASSERTIONS': 'assertions', } try: with open(filename, 'rb') as b_module_data: M = ast.parse(b_module_data.read()) for child in M.body: if isinstance(child, ast.Assign): for t in child.targets: try: theid = t.id except AttributeError: # skip errors can happen when trying to use the normal code display.warning( "Failed to assign id for %s on %s, skipping" % (t, filename)) continue if theid in string_to_vars: varkey = string_to_vars[theid] if isinstance(child.value, ast.Dict): data[varkey] = ast.literal_eval(child.value) else: if theid == 'ASSERTIONS': # string should be yaml data[varkey] = AnsibleLoader( child.value.s, file_name=filename).get_single_data() else: # not yaml, should be a simple string data[varkey] = to_text(child.value.s) display.debug('assigned :%s' % varkey) except Exception: if verbose: display.error("unable to parse %s" % filename) if not ignore_errors: raise return data
def read_docstring_from_yaml_file(filename, verbose=True, ignore_errors=True): ''' Read docs from 'sidecar' yaml file doc for a plugin ''' global string_to_vars data = _init_doc_dict() file_data = {} try: with open(filename, 'rb') as yamlfile: file_data = AnsibleLoader(yamlfile.read(), file_name=filename).get_single_data() except Exception: if verbose: display.error("unable to parse %s" % filename) if not ignore_errors: raise for key in string_to_vars: data[string_to_vars[key]] = file_data.get(key, None) return data
def parse_yaml_linenumbers(lintable: Lintable) -> AnsibleBaseYAMLObject: """Parse yaml as ansible.utils.parse_yaml but with linenumbers. The line numbers are stored in each node's LINE_NUMBER_KEY key. """ def compose_node(parent: yaml.nodes.Node, index: int) -> yaml.nodes.Node: # the line number where the previous token has ended (plus empty lines) line = loader.line node = Composer.compose_node(loader, parent, index) if not isinstance(node, yaml.nodes.Node): raise RuntimeError("Unexpected yaml data.") setattr(node, '__line__', line + 1) return node def construct_mapping(node: AnsibleBaseYAMLObject, deep: bool = False) -> AnsibleMapping: mapping = AnsibleConstructor.construct_mapping(loader, node, deep=deep) if hasattr(node, '__line__'): mapping[LINE_NUMBER_KEY] = node.__line__ else: mapping[LINE_NUMBER_KEY] = mapping._line_number mapping[FILENAME_KEY] = lintable.path return mapping try: kwargs = {} if 'vault_password' in inspect.getfullargspec( AnsibleLoader.__init__).args: kwargs['vault_password'] = DEFAULT_VAULT_PASSWORD loader = AnsibleLoader(lintable.content, **kwargs) loader.compose_node = compose_node loader.construct_mapping = construct_mapping data = loader.get_single_data() except (yaml.parser.ParserError, yaml.scanner.ScannerError) as e: logging.exception(e) raise SystemExit("Failed to parse YAML in %s: %s" % (lintable.path, str(e))) return data
def _load_config_defs(self, name, module, path): ''' Reads plugin docs to find configuration setting definitions, to push to config manager for later use ''' # plugins w/o class name don't support config if self.class_name: type_name = get_plugin_class(self.class_name) # if type name != 'module_doc_fragment': if type_name in C.CONFIGURABLE_PLUGINS: dstring = AnsibleLoader(getattr(module, 'DOCUMENTATION', ''), file_name=path).get_single_data() if dstring: add_fragments(dstring, path, fragment_loader=fragment_loader) if dstring and 'options' in dstring and isinstance(dstring['options'], dict): C.config.initialize_plugin_configuration_definitions(type_name, name, dstring['options']) display.debug('Loaded config def from plugin (%s/%s)' % (type_name, name))
class TestAnsibleLoaderPlay(unittest.TestCase): def setUp(self): stream = NameStringIO(u""" - hosts: localhost vars: number: 1 string: Ansible utf8_string: Cafè Eñyei dictionary: webster: daniel oed: oxford list: - a - b - 1 - 2 tasks: - name: Test case ping: data: "{{ utf8_string }}" - name: Test 2 ping: data: "Cafè Eñyei" - name: Test 3 command: "printf 'Cafè Eñyei\\n'" """) self.play_filename = '/path/to/myplay.yml' stream.name = self.play_filename self.loader = AnsibleLoader(stream) self.data = self.loader.get_single_data() def tearDown(self): pass def test_data_complete(self): self.assertEqual(len(self.data), 1) self.assertIsInstance(self.data, list) self.assertEqual(frozenset(self.data[0].keys()), frozenset((u'hosts', u'vars', u'tasks'))) self.assertEqual(self.data[0][u'hosts'], u'localhost') self.assertEqual(self.data[0][u'vars'][u'number'], 1) self.assertEqual(self.data[0][u'vars'][u'string'], u'Ansible') self.assertEqual(self.data[0][u'vars'][u'utf8_string'], u'Cafè Eñyei') self.assertEqual(self.data[0][u'vars'][u'dictionary'], {u'webster': u'daniel', u'oed': u'oxford'}) self.assertEqual(self.data[0][u'vars'][u'list'], [u'a', u'b', 1, 2]) self.assertEqual(self.data[0][u'tasks'], [{u'name': u'Test case', u'ping': {u'data': u'{{ utf8_string }}'}}, {u'name': u'Test 2', u'ping': {u'data': u'Cafè Eñyei'}}, {u'name': u'Test 3', u'command': u'printf \'Cafè Eñyei\n\''}, ]) def walk(self, data): # Make sure there's no str in the data self.assertNotIsInstance(data, binary_type) # Descend into various container types if isinstance(data, text_type): # strings are a sequence so we have to be explicit here return elif isinstance(data, (Sequence, Set)): for element in data: self.walk(element) elif isinstance(data, Mapping): for k, v in data.items(): self.walk(k) self.walk(v) # Scalars were all checked so we're good to go return def test_no_str_in_data(self): # Checks that no strings are str type self.walk(self.data) def check_vars(self): # Numbers don't have line/col information yet # self.assertEqual(self.data[0][u'vars'][u'number'].ansible_pos, (self.play_filename, 4, 21)) self.assertEqual(self.data[0][u'vars'][u'string'].ansible_pos, (self.play_filename, 5, 29)) self.assertEqual(self.data[0][u'vars'][u'utf8_string'].ansible_pos, (self.play_filename, 6, 34)) self.assertEqual(self.data[0][u'vars'][u'dictionary'].ansible_pos, (self.play_filename, 8, 23)) self.assertEqual(self.data[0][u'vars'][u'dictionary'][u'webster'].ansible_pos, (self.play_filename, 8, 32)) self.assertEqual(self.data[0][u'vars'][u'dictionary'][u'oed'].ansible_pos, (self.play_filename, 9, 28)) self.assertEqual(self.data[0][u'vars'][u'list'].ansible_pos, (self.play_filename, 11, 23)) self.assertEqual(self.data[0][u'vars'][u'list'][0].ansible_pos, (self.play_filename, 11, 25)) self.assertEqual(self.data[0][u'vars'][u'list'][1].ansible_pos, (self.play_filename, 12, 25)) # Numbers don't have line/col info yet # self.assertEqual(self.data[0][u'vars'][u'list'][2].ansible_pos, (self.play_filename, 13, 25)) # self.assertEqual(self.data[0][u'vars'][u'list'][3].ansible_pos, (self.play_filename, 14, 25)) def check_tasks(self): # # First Task # self.assertEqual(self.data[0][u'tasks'][0].ansible_pos, (self.play_filename, 16, 23)) self.assertEqual(self.data[0][u'tasks'][0][u'name'].ansible_pos, (self.play_filename, 16, 29)) self.assertEqual(self.data[0][u'tasks'][0][u'ping'].ansible_pos, (self.play_filename, 18, 25)) self.assertEqual(self.data[0][u'tasks'][0][u'ping'][u'data'].ansible_pos, (self.play_filename, 18, 31)) # # Second Task # self.assertEqual(self.data[0][u'tasks'][1].ansible_pos, (self.play_filename, 20, 23)) self.assertEqual(self.data[0][u'tasks'][1][u'name'].ansible_pos, (self.play_filename, 20, 29)) self.assertEqual(self.data[0][u'tasks'][1][u'ping'].ansible_pos, (self.play_filename, 22, 25)) self.assertEqual(self.data[0][u'tasks'][1][u'ping'][u'data'].ansible_pos, (self.play_filename, 22, 31)) # # Third Task # self.assertEqual(self.data[0][u'tasks'][2].ansible_pos, (self.play_filename, 24, 23)) self.assertEqual(self.data[0][u'tasks'][2][u'name'].ansible_pos, (self.play_filename, 24, 29)) self.assertEqual(self.data[0][u'tasks'][2][u'command'].ansible_pos, (self.play_filename, 25, 32)) def test_line_numbers(self): # Check the line/column numbers are correct # Note: Remember, currently dicts begin at the start of their first entry self.assertEqual(self.data[0].ansible_pos, (self.play_filename, 2, 19)) self.assertEqual(self.data[0][u'hosts'].ansible_pos, (self.play_filename, 2, 26)) self.assertEqual(self.data[0][u'vars'].ansible_pos, (self.play_filename, 4, 21)) self.check_vars() self.assertEqual(self.data[0][u'tasks'].ansible_pos, (self.play_filename, 16, 21)) self.check_tasks()
def get_docstring(filename, verbose=False): """ Search for assignment of the DOCUMENTATION and EXAMPLES variables in the given file. Parse DOCUMENTATION from YAML and return the YAML doc or None together with EXAMPLES, as plain text. DOCUMENTATION can be extended using documentation fragments loaded by the PluginLoader from the module_docs_fragments directory. """ doc = None plainexamples = None returndocs = None try: # Thank you, Habbie, for this bit of code :-) M = ast.parse(''.join(open(filename))) for child in M.body: if isinstance(child, ast.Assign): for t in child.targets: try: theid = t.id except AttributeError as e: # skip errors can happen when trying to use the normal code display.warning("Failed to assign id for %s on %s, skipping" % (t, filename)) continue if 'DOCUMENTATION' in theid: doc = AnsibleLoader(child.value.s, file_name=filename).get_single_data() fragments = doc.get('extends_documentation_fragment', []) if isinstance(fragments, string_types): fragments = [ fragments ] # Allow the module to specify a var other than DOCUMENTATION # to pull the fragment from, using dot notation as a separator for fragment_slug in fragments: fragment_slug = fragment_slug.lower() if '.' in fragment_slug: fragment_name, fragment_var = fragment_slug.split('.', 1) fragment_var = fragment_var.upper() else: fragment_name, fragment_var = fragment_slug, 'DOCUMENTATION' fragment_class = fragment_loader.get(fragment_name) assert fragment_class is not None fragment_yaml = getattr(fragment_class, fragment_var, '{}') fragment = AnsibleLoader(fragment_yaml, file_name=filename).get_single_data() if fragment.has_key('notes'): notes = fragment.pop('notes') if notes: if not doc.has_key('notes'): doc['notes'] = [] doc['notes'].extend(notes) if 'options' not in fragment.keys(): raise Exception("missing options in fragment, possibly misformatted?") for key, value in fragment.items(): if not doc.has_key(key): doc[key] = value else: if isinstance(doc[key], MutableMapping): doc[key].update(value) elif isinstance(doc[key], MutableSet): doc[key].add(value) elif isinstance(doc[key], MutableSequence): doc[key] = sorted(frozenset(doc[key] + value)) else: raise Exception("Attempt to extend a documentation fragement of unknown type") elif 'EXAMPLES' in theid: plainexamples = child.value.s[1:] # Skip first empty line elif 'RETURN' in theid: returndocs = child.value.s[1:] except: display.error("unable to parse %s" % filename) if verbose == True: display.display("unable to parse %s" % filename) raise return doc, plainexamples, returndocs