def fab_settings_config(self): """ collated contents of fab-settings.yml files, as a FabSettingsConfig object includes environmental-defaults/fab-settings.yml as well as <env>/fab-settings.yml """ with open(self.paths.fab_settings_yml_default, encoding='utf-8') as f: fab_settings_json = from_yaml(f) with open(self.paths.fab_settings_yml, encoding='utf-8') as f: fab_settings_json.update(from_yaml(f) or {}) fab_settings_config = FabSettingsConfig.wrap(fab_settings_json) return fab_settings_config
def _raw_app_processes_config(self): """ collated contents of app-processes.yml files, as an AppProcessesConfig object includes environmental-defaults/app-processes.yml as well as <env>/app-processes.yml """ with open(self.paths.app_processes_yml_default, encoding='utf-8') as f: app_processes_json = from_yaml(f) with open(self.paths.app_processes_yml, encoding='utf-8') as f: app_processes_json.update(from_yaml(f)) raw_app_processes_config = AppProcessesConfig.wrap(app_processes_json) raw_app_processes_config.check() return raw_app_processes_config
def prometheus_config(self): try: with open(self.paths.prometheus_yml, encoding='utf-8') as f: prometheus_json = from_yaml(f) except IOError: return None return PrometheusConfig.wrap(prometheus_json)
def public_vars(self): try: """contents of public.yml, as a dict""" with open(self.paths.public_yml, encoding='utf-8') as f: return from_yaml(f) except FileNotFoundError: return {}
def aws_config(self): try: with open(self.paths.aws_yml, encoding='utf-8') as f: config_yml = from_yaml(f) except IOError: config_yml = {} return AwsConfig.wrap(config_yml)
def load_file(self, file_name): try: source = self._find_needle('files', file_name) except AnsibleError as e: raise AnsibleActionFail(to_text(e)) # Get vault decrypted tmp file try: tmp_source = self._loader.get_real_file(source) except AnsibleFileNotFound as e: # pragma: no cover raise AnsibleActionFail("could not find src=%s, %s" % (source, to_text(e))) # pragma: no cover b_tmp_source = to_bytes(tmp_source, errors='surrogate_or_strict') # template the source data locally & get ready to transfer try: with open(b_tmp_source, 'rb') as f: template_data = to_text(f.read(), errors='surrogate_or_strict') except AnsibleAction: # pragma: no cover raise # pragma: no cover except Exception as e: # pragma: no cover raise AnsibleActionFail( "%s: %s" % (type(e).__name__, to_text(e))) # pragma: no cover finally: self._loader.cleanup_tmp_file(b_tmp_source) templated = self._templar.do_template(template_data, preserve_trailing_newlines=True, escape_backslashes=False) return from_yaml(data=templated, file_name=file_name)
def inspect_playbook(playbook, write_names=False): routing = {} with open(playbook, 'r') as f: content = f.read() if not could_be_playbook(content): if could_be_role(content): print(f' inspecting as task list {playbook}') return inspect_task_list(playbook, write_names=write_names) print(f' skipping playbook {playbook}') return routing else: print(f' inspecting playbook {playbook}') try: data = from_yaml(content) except Exception: print(f' could not read {playbook} as YAML') return routing # print('print data') # print(json.dumps(data, indent=2)) for i, play in enumerate(data): tasks = get_tasks_from_play(play) # if not tasks: # if 'name' in play: # print(f' skipping {play["name"]} because tasks not found') # else: # print(f' skipping {i}th play because tasks not found') if tasks: new_routing = locate_tasks(tasks) routing.update(new_routing) return routing
def _play_ds(self, pattern, async_val, poll): check_raw = context.CLIARGS['module_name'] in C.MODULE_REQUIRE_ARGS module_args_raw = context.CLIARGS['module_args'] module_args = None if module_args_raw and module_args_raw.startswith( '{') and module_args_raw.endswith('}'): try: module_args = from_yaml(module_args_raw.strip(), json_only=True) except AnsibleParserError: pass if not module_args: module_args = parse_kv(module_args_raw, check_raw=check_raw) mytask = { 'action': { 'module': context.CLIARGS['module_name'], 'args': module_args }, 'timeout': context.CLIARGS['task_timeout'] } # avoid adding to tasks that don't support it, unless set, then give user an error if context.CLIARGS[ 'module_name'] not in C._ACTION_ALL_INCLUDE_ROLE_TASKS and any( frozenset((async_val, poll))): mytask['async_val'] = async_val mytask['poll'] = poll return dict(name="Ansible Ad-Hoc", hosts=pattern, gather_facts='no', tasks=[mytask])
def postgresql_config(self): with open(self.paths.postgresql_yml, encoding='utf-8') as f: postgresql_json = from_yaml(f) postgresql_config = PostgresqlConfig.wrap(postgresql_json) postgresql_config.replace_hosts(self) postgresql_config.check() return postgresql_config
def _load_plugin_filter(): filters = defaultdict(frozenset) user_set = False if C.PLUGIN_FILTERS_CFG is None: filter_cfg = '/etc/ansible/plugin_filters.yml' else: filter_cfg = C.PLUGIN_FILTERS_CFG user_set = True if os.path.exists(filter_cfg): with open(filter_cfg, 'rb') as f: try: filter_data = from_yaml(f.read()) except Exception as e: display.warning( u'The plugin filter file, {0} was not parsable.' u' Skipping: {1}'.format(filter_cfg, to_text(e))) return filters try: version = filter_data['filter_version'] except KeyError: display.warning(u'The plugin filter file, {0} was invalid.' u' Skipping.'.format(filter_cfg)) return filters # Try to convert for people specifying version as a float instead of string version = to_text(version) version = version.strip() if version == u'1.0': # Modules and action plugins share the same blacklist since the difference between the # two isn't visible to the users try: filters['ansible.modules'] = frozenset( filter_data['module_blacklist']) except TypeError: display.warning( u'Unable to parse the plugin filter file {0} as' u' module_blacklist is not a list.' u' Skipping.'.format(filter_cfg)) return filters filters['ansible.plugins.action'] = filters['ansible.modules'] else: display.warning( u'The plugin filter file, {0} was a version not recognized by this' u' version of Ansible. Skipping.'.format(filter_cfg)) else: if user_set: display.warning(u'The plugin filter file, {0} does not exist.' u' Skipping.'.format(filter_cfg)) # Specialcase the stat module as Ansible can run very few things if stat is blacklisted. if 'stat' in filters['ansible.modules']: raise AnsibleError( 'The stat module was specified in the module blacklist file, {0}, but' ' Ansible will not function without the stat module. Please remove stat' ' from the blacklist.'.format(to_native(filter_cfg))) return filters
def terraform_config(self): try: with open(self.paths.terraform_yml, encoding='utf-8') as f: config_yml = from_yaml(f) except IOError: return None config_yml['environment'] = config_yml.get('environment', self.meta_config.env_monitoring_id) return TerraformConfig.wrap(config_yml)
def test_from_yaml_simple(): assert from_yaml( u'---\n- test: 1\n test2: "2"\n- caf\xe9: "caf\xe9"') == [{ u'test': 1, u'test2': u"2" }, { u"caf\xe9": u"caf\xe9" }]
def load(self, data, file_name='<string>', show_content=True, json_only=False): '''Backwards compat for now''' return from_yaml(data, file_name, show_content, self._vault.secrets, json_only=json_only)
def elasticsearch_config(self): try: with open(self.paths.elasticsearch_yml, encoding='utf-8') as f: elasticsearch_json = from_yaml(f) except IOError: # It's fine to omit this file elasticsearch_json = {} number_of_replicas = 0 if len(self.groups['elasticsearch']) < 2 else 1 elasticsearch_config = ElasticsearchConfig.wrap(elasticsearch_json) if elasticsearch_config.settings.default.number_of_replicas is None: elasticsearch_config.settings.default.number_of_replicas = number_of_replicas return elasticsearch_config
def users_config(self): user_groups_from_yml = self.meta_config.users absent_users = [] present_users = [] for user_group_from_yml in user_groups_from_yml: with open(self.paths.get_users_yml(user_group_from_yml), encoding='utf-8') as f: user_group_json = from_yaml(f) present_users += user_group_json['dev_users']['present'] absent_users += user_group_json['dev_users']['absent'] self.check_user_group_absent_present_overlaps(absent_users, present_users) all_users_json = {'dev_users': {'absent': absent_users, 'present': present_users}} return UsersConfig.wrap(all_users_json)
def inspect_task_list(filename, write_names=False): with open(filename, 'r') as f: content = f.read() try: data = from_yaml(content) except Exception: print(f' could not read {filename} as YAML') return {} if not could_be_role(data): # print(f' skipping file {subdir}/tasks/{filename} because it looks empty') return {} # else: # print(f' inspecting {subdir}/tasks/{filename}') return locate_tasks(data)
def _load_plugin_filter(): filters = defaultdict(frozenset) if C.PLUGIN_FILTERS_CFG is None: filter_cfg = '/etc/ansible/plugin_filters.yml' user_set = False else: filter_cfg = C.PLUGIN_FILTERS_CFG user_set = True if os.path.exists(filter_cfg): with open(filter_cfg, 'rb') as f: try: filter_data = from_yaml(f.read()) except Exception as e: display.warning(u'The plugin filter file, {0} was not parsable.' u' Skipping: {1}'.format(filter_cfg, to_text(e))) return filters try: version = filter_data['filter_version'] except KeyError: display.warning(u'The plugin filter file, {0} was invalid.' u' Skipping.'.format(filter_cfg)) return filters # Try to convert for people specifying version as a float instead of string version = to_text(version) version = version.strip() if version == u'1.0': # Modules and action plugins share the same blacklist since the difference between the # two isn't visible to the users filters['ansible.modules'] = frozenset(filter_data['module_blacklist']) filters['ansible.plugins.action'] = filters['ansible.modules'] else: display.warning(u'The plugin filter file, {0} was a version not recognized by this' u' version of Ansible. Skipping.') else: if user_set: display.warning(u'The plugin filter file, {0} does not exist.' u' Skipping.'.format(filter_cfg)) # Specialcase the stat module as Ansible can run very few things if stat is blacklisted. if 'stat' in filters['ansible.modules']: raise AnsibleError('The stat module was specified in the module blacklist file, {0}, but' ' Ansible will not function without the stat module. Please remove stat' ' from the blacklist.'.format(filter_cfg)) return filters
def test_from_yaml_simple(): assert from_yaml(u'---\n- test: 1\n test2: "2"\n- caf\xe9: "caf\xe9"') == [{u'test': 1, u'test2': u"2"}, {u"caf\xe9": u"caf\xe9"}]
def crawl(location, write_meta=False): if os.path.isdir(location): print('Inspecting playbooks') playbook_dir = location routing = {} for filename in list_yaml(playbook_dir): new_routing = inspect_playbook(filename) for key, value in new_routing.items(): print(f' {key} --> {value}') routing.update(new_routing) else: print('Inspecting playbook {location}') playbook_dir = os.path.dirname(location) routing = inspect_playbook(location) for key, value in routing.items(): print(f' {key} --> {value}') # roles print('') roles_dir = os.path.join(playbook_dir, 'roles') if not os.path.exists(roles_dir): print('Found no roles dir, exiting') return else: print('Inspecting role directories') for subdir in os.listdir(roles_dir): role_dir = os.path.join(roles_dir, subdir) if not os.path.isdir(role_dir): continue tasks_dir = os.path.join(role_dir, 'tasks') # print(f' inspecting role {subdir}') if not os.path.exists(tasks_dir): continue role_routing = {} for filename in os.listdir(tasks_dir): file_path = os.path.join(tasks_dir, filename) if os.path.isdir(file_path): continue if not could_be_yaml(filename): continue new_routing = inspect_task_list(file_path) for key, value in new_routing.items(): print(f' roles/{subdir}/tasks/{filename}: {key} --> {value}') role_routing.update(new_routing) # print(' routing for role {subdir}:') # print(json.dumps(role_routing, indent=2)) if write_meta: if not os.path.exists(os.path.join(role_dir, 'meta')): continue meta_file = os.path.join(role_dir, 'meta', 'main.yml') if os.path.exists(meta_file): with open(meta_file, 'r') as f: existing_content = f.read() existing_data = from_yaml(existing_content) else: existing_data = {} need_collections = set( value.rsplit('.', 1)[0] for value in role_routing.values()) existing_data['collections'] = sorted(list(need_collections)) if existing_data == {'collections': []}: if os.path.exists(meta_file): os.remove(meta_file) else: with open(meta_file, 'w') as f: f.write(yaml.dump(existing_data, Dumper=AnsibleDumper)) routing.update(role_routing) print('') print('Overall routing:') print(json.dumps(routing, indent=2)) print('') print('Collection needs:') needs = set(fqcn.rsplit('.', 1)[0] for fqcn in routing.values()) for need in needs: print(f' - {need}') print('')
def _list_keywords(): return from_yaml(pkgutil.get_data('ansible', 'keyword_desc.yml'))
os.path.dirname(os.path.abspath(__file__)), 'look_at.py'), abs_target, fq_import ] out = subprocess.check_output(cmd, env=subp_env) out = str(out, encoding='utf-8') doc = str(out).strip('\n') has_doc = bool(doc) doc = doc.strip('\n') is_yaml = False has_req = False if has_doc: try: doc_dict = from_yaml(doc) # doc_dict = yaml.safe_load(doc) if 'requirements' not in doc_dict and len( doc_dict) == 1: doc_dict = list(doc_dict.values())[0] is_yaml = True if 'requirements' in doc_dict: reqs = doc_dict['requirements'] has_req = True if not isinstance(reqs, list): raise Exception( f'!! got requirements as non-list !! {reqs}' ) for req in reqs: if not isinstance(req, str): raise Exception(
def test_bad_yaml(): with pytest.raises(AnsibleParserError): from_yaml(u'foo: bar: baz')
def load(self, data, file_name='<string>', show_content=True): '''Backwards compat for now''' return from_yaml(data, file_name, show_content, self._vault.secrets)
def proxy_config(self): with open(self.paths.proxy_yml, encoding='utf-8') as f: proxy_json = from_yaml(f) proxy_config = ProxyConfig.wrap(proxy_json) proxy_config.check() return proxy_config
def _read_yaml(self, path): with open(path, encoding='utf-8') as f: return from_yaml(f)
def meta_config(self): with open(self.paths.meta_yml, encoding='utf-8') as f: meta_json = from_yaml(f) return MetaConfig.wrap(meta_json)
def public_vars(self): """contents of public.yml, as a dict""" with open(self.paths.public_yml, encoding='utf-8') as f: return from_yaml(f)