def parse_meta(filename): recipe = '' with open(filename, 'r') as f: line = f.readline() while line: # filter all non-YAML elements if not line.startswith('{%') and '{{' not in line: recipe += line line = f.readline() requirements = [] tests = {} try: try: requirements += yaml_safe_load(recipe)['requirements']['host'] except TypeError: pass try: requirements += yaml_safe_load(recipe)['requirements']['run'] except TypeError: pass try: requirements += yaml_safe_load(recipe)['test']['requires'] except TypeError: pass tests['commands'] = yaml_safe_load(recipe)['test']['commands'] tests['source_files'] = yaml_safe_load(recipe)['test']['source_files'] except YAMLError as exc: print(exc) return requirements, tests
def open_network_environment_files(netenv_path, template_files): errors = [] try: network_data = yaml_safe_load(template_files[netenv_path]) except Exception as e: return ({}, {}, [ "Can't open network environment file '{}': {}".format( netenv_path, e) ]) nic_configs = [] resource_registry = network_data.get('resource_registry', {}) for nic_name, relative_path in six.iteritems(resource_registry): if nic_name.endswith("Net::SoftwareConfig"): nic_config_path = os.path.normpath( os.path.join(os.path.dirname(netenv_path), relative_path)) try: nic_configs.append( (nic_name, nic_config_path, yaml_safe_load(template_files[nic_config_path]))) except Exception as e: errors.append( "Can't open the resource '{}' reference file '{}': {}". format(nic_name, nic_config_path, e)) return (network_data, nic_configs, errors)
def main(): module = AnsibleModule( argument_spec=yaml_safe_load(DOCUMENTATION)['options'] ) overcloudrc_path = os.path.expanduser(module.params.get('path')) if not os.path.isfile(overcloudrc_path): module.fail_json( msg="The overcloudrc file at {} does not exist.".format( overcloudrc_path)) # Use bash to source overcloudrc and print the environment: command = ['bash', '-c', 'source ' + overcloudrc_path + ' && env'] proc = subprocess.Popen( command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True ) if proc.wait() != 0: msg = "Could not source '{}'. Return code: {}.\nSTDERR:\n{}".format( overcloudrc_path, proc.returncode, proc.stderr.read()) module.fail_json(msg=msg) facts = {} for line in proc.stdout: (key, _, value) = line.partition("=") if key.startswith("OS_"): facts[key] = value.rstrip() module.exit_json(changed=False, ansible_facts={'overcloudrc': facts})
def main(): module = AnsibleModule( argument_spec=yaml_safe_load(DOCUMENTATION)['options']) drive = module.params.get('drive') queue_path = path.join('/sys/class/block', drive, 'queue') physical_block_size_path = path.join(queue_path, 'physical_block_size') logical_block_size_path = path.join(queue_path, 'logical_block_size') physical_block_size = read_int(module, physical_block_size_path) logical_block_size = read_int(module, logical_block_size_path) if physical_block_size == logical_block_size: module.exit_json( changed=False, msg="The disk %s probably doesn't use Advance Format." % drive, ) else: module.exit_json( # NOTE(shadower): we're marking this as `changed`, to make it # visually stand out when running via Ansible directly instead of # using the API. # # The API & UI is planned to look for the `warnings` field and # display it differently. changed=True, warnings=[ "Physical and logical block sizes of drive %s differ " "(%s vs. %s). This can mean the disk uses Advance " "Format." % (drive, physical_block_size, logical_block_size) ], )
def main(): module = AnsibleModule( argument_spec=yaml_safe_load(DOCUMENTATION)['options']) ini_file_path = module.params.get('path') ignore_missing = module.params.get('ignore_missing_file') # Check that file exists msg = check_file(ini_file_path, ignore_missing) if msg != '': # Opening file failed if ignore_missing: module.exit_json(msg=msg, changed=False, value=None) else: module.fail_json(msg=msg) else: # Try to parse the result from ini file section = module.params.get('section') key = module.params.get('key') default = module.params.get('default') ret, msg, value = get_result(ini_file_path, section, key, default) if ret == ReturnValue.INVALID_FORMAT: module.fail_json(msg=msg) elif ret == ReturnValue.KEY_NOT_FOUND: module.exit_json(msg=msg, changed=False, value=None) elif ret == ReturnValue.OK: module.exit_json(msg=msg, changed=False, value=value)
def main(): module = AnsibleModule( argument_spec=yaml_safe_load(DOCUMENTATION)['options']) msg = module.params.get('msg') module.exit_json(changed=False, warnings=[msg])
def main(): module = AnsibleModule( argument_spec=yaml_safe_load(DOCUMENTATION)['options']) netenv_path = module.params.get('netenv_path') plan_env_path = module.params.get('plan_env_path') ip_pools_path = module.params.get('ip_pools_path') template_files = { name: content[1] for (name, content) in module.params.get('template_files') } errors = validate(netenv_path, template_files) warnings = [] try: warnings = validate_node_pool_size(plan_env_path, ip_pools_path, template_files) except Exception as e: errors.append("{}".format(e)) if errors: module.fail_json(msg="\n".join(errors)) else: module.exit_json( msg="No errors found for the '{}' file.".format(netenv_path), warnings=warnings, )
def __init__(self, *args, **kw): # Init the dict with nothing in it. dict.__init__(self, (), **kw) self.__iter_i = 0 self.__iter_l = [] self.__hash = self.__generate_hash() SproutSchema.__add_identity(self.__hash) # If we were loaded with a dict, process each name by class. if len(args) == 0: return # If we were seeded with a string, presume it's either YAML or JSON, # and parse it into an object. if len(args) == 1 and isinstance(args[0], str): try: a = json.loads(args[0]) except json.JSONDecodeError: # If JSON didn't work, presume its YAML. try: a = yaml_safe_load(args[0]) except YAMLParserError as E: raise E else: a = args[0] self.__do_update(a)
def main(): module = AnsibleModule( argument_spec=yaml_safe_load(DOCUMENTATION)['options']) check_update(module, packages_list=module.params.get('packages_list'), pkg_mgr=module.params.get('pkg_mgr', None))
def evaluate_file(file_name): """Evaluates the conditionals listed in a file and returns their results in a dict.""" log_info('Loading conditionals from file "{0}"'.format(file_name)) results = {} if not is_file(file_name): log_error('File "{0}" does not exist'.format(file_name)) return results try: from yaml import safe_load as yaml_safe_load data = yaml_safe_load(open(file_name, 'r', encoding='utf-8').read()) except Exception as e: log_error(e) return results for c in (data or []): if 'name' not in c: log_error('Ignoring a conditional without name, skipping') continue c_name = c['name'] if c_name in results: log_error('Duplicate conditional "{0}", skipping'. format(c_name)) continue if 'method' not in c: log_error('Conditional "{0}" has no method defined, skipping'. format(c_name)) continue c_method = c['method'] if c_method not in __METHODS: log_error('Conditional "{0}" has an unknown method "{1}", ' 'skipping'.format(c_name, c_method)) continue if ('params' not in c) or (c['params'] is None): log_error('Conditional "{0}" has no "params" block, skipping'. format(c_name)) continue try: results[c_name] = __METHODS[c_method](c_name, c['params'][0]) except Exception as e: # Don't let a single conditional failure remove # everything in this file log_error(e) for k, v in results.items(): log_debug('Conditional: name="{0}", result={1}'.format(k, v)) return results
def main(args): with open(args.sample_path) as sample_data, open( args.spec_path) as spec_data: sample_dict = json_load(sample_data) spec_dict = yaml_safe_load(spec_data) schema_dict = {**spec_dict, "$ref": "#/$defs/SystemProfile"} jsonschema_validate(instance=sample_dict, schema=schema_dict)
def __init__(self, conf): self.conf = conf self._responses = {} if self.conf['response_file']: try: self._responses = yaml_safe_load( self.conf['response_file'])['responses'] except Exception as exc: raise exc
def main(): module = AnsibleModule( argument_spec=yaml_safe_load(DOCUMENTATION)['options']) host = module.params.pop('host') result = module.run_command('ping -c 1 {}'.format(host)) failed = (result[0] != 0) msg = result[1] if result[1] else result[2] module.exit_json(changed=False, failed=failed, msg=msg)
def main(): module = AnsibleModule( argument_spec=yaml_safe_load(DOCUMENTATION)['options']) status = module.params.get('report_status') msg = format_msg_report(module.params.get('report_status'), module.params.get('report_reason'), module.params.get('report_recommendations')) display_type_report(module, status, msg)
def validate_node_pool_size(plan_env_path, ip_pools_path, template_files): warnings = [] plan_env = yaml_safe_load(template_files[plan_env_path]) ip_pools = yaml_safe_load(template_files[ip_pools_path]) param_defaults = plan_env.get('parameter_defaults') node_counts = { param.replace('Count', ''): count for param, count in six.iteritems(param_defaults) if param.endswith('Count') and count > 0 } # TODO(akrivoka): There are a lot of inconsistency issues with parameter # naming in THT :( Once those issues are fixed, this block should be # removed. if 'ObjectStorage' in node_counts: node_counts['SwiftStorage'] = node_counts['ObjectStorage'] del node_counts['ObjectStorage'] param_defaults = ip_pools.get('parameter_defaults') role_pools = { param.replace('IPs', ''): pool for param, pool in six.iteritems(param_defaults) if param.endswith('IPs') and param.replace('IPs', '') in node_counts } for role, node_count in six.iteritems(node_counts): try: pools = role_pools[role] except KeyError: warnings.append( "Found {} node(s) assigned to '{}' role, but no static IP " "pools defined.".format(node_count, role)) continue for pool_name, pool_ips in six.iteritems(pools): if len(pool_ips) < node_count: warnings.append( "Insufficient number of IPs in '{}' pool for '{}' role: " "{} IP(s) found in pool, but {} nodes assigned to role.". format(pool_name, role, len(pool_ips), node_count)) return warnings
def main(): module = AnsibleModule( argument_spec=yaml_safe_load(DOCUMENTATION)['options']) query = module.params.get('query') cmd = ['/usr/bin/osqueryi', '--json', query] try: result = subprocess.check_output(cmd, universal_newlines=True).strip() except subprocess.CalledProcessError as e: module.fail_json(msg="Failed to run osquery: {}".format(e.output)) module.exit_json(changed=False, resultset=json.loads(result))
def main(): module = AnsibleModule( argument_spec=yaml_safe_load(DOCUMENTATION)['options']) haproxy_conf_path = module.params.get('path') try: config = parse_haproxy_conf(haproxy_conf_path) except IOError: module.fail_json(msg="Could not open the haproxy conf file at: '%s'" % haproxy_conf_path) module.exit_json(changed=False, ansible_facts={u'haproxy_conf': config})
def main(): module = AnsibleModule( argument_spec=yaml_safe_load(DOCUMENTATION)['options'] ) nodes = module.params.get('nodes') errors = validate_boot_config(nodes) if errors: module.fail_json("".join(errors)) else: module.exit_json()
def invoke_commands(self, file_path, raw: bool = None): with open(self.MINECRAFT_PATH / file_path, 'r') as F: commands = yaml_safe_load(F) commands_properties = [] self.message_list_builder(commands, commands_properties) if raw: reply = self.make_raw("Full-Stats: Commands", commands_properties) else: reply = self.make_embeds("Full-Stats: Commands", self.values, commands_properties) return reply
def invoke_bukkit(self, file_path, raw: bool = None): with open(self.MINECRAFT_PATH / file_path, 'r') as F: bukkit = yaml_safe_load(F) bukkit_properties = [] self.message_list_builder(bukkit, bukkit_properties) if raw: reply = self.make_raw("Full-Stats: Bukkit", bukkit_properties) else: reply = self.make_embeds("Full-Stats: Bukkit", self.values, bukkit_properties) return reply
def main(): module = AnsibleModule( argument_spec=yaml_safe_load(DOCUMENTATION)['options']) name = module.params.get('name') cmd = ['/usr/bin/hiera', '-c', '/etc/puppet/hiera.yaml', name] result = subprocess.check_output(cmd, universal_newlines=True).rstrip() if result == 'nil': module.fail_json( msg="Failed to retrieve hiera data for {}".format(name)) module.exit_json(changed=False, ansible_facts={name: result})
def main(): module = AnsibleModule( argument_spec=yaml_safe_load(DOCUMENTATION)['options']) status = module.params.get('report_status') msg = format_msg_report(module.params.get('report_status'), module.params.get('report_reason'), module.params.get('report_recommendations')) if status == 'ERROR': module.fail_json(msg=msg) elif status == "SKIPPED": module.exit_json(changed=False, warnings=msg) else: module.exit_json(changed=False, msg=msg)
def main(): module = AnsibleModule( argument_spec=yaml_safe_load(DOCUMENTATION)['options']) nodes = module.params.get('nodes') flavors = module.params.get('flavors') warnings, errors = verify_profiles(nodes, flavors) if errors: module.fail_json(msg="\n".join(errors)) elif warnings: module.exit_json(warnings="\n".join(warnings)) else: module.exit_json(msg="No profile errors detected.")
def main(): module = AnsibleModule( argument_spec=yaml_safe_load(DOCUMENTATION)['options'], supports_check_mode=False, ) module_implementation = DnfModule(module) try: module_implementation.run() except dnf.exceptions.RepoError as de: module.fail_json(msg="Failed to synchronize repodata: {0}".format( to_native(de)), rc=1, results=[], changed=False)
def main(): module = AnsibleModule( argument_spec=yaml_safe_load(DOCUMENTATION)['options']) pcs_status = parse_pcs_status(module.params.get('status')) failures = pcs_status['failures'] failed = len(failures) > 0 if failed: msg = "The pacemaker status contains some failed actions:\n" +\ '\n'.join((format_failure(failure) for failure in failures)) else: msg = "The pacemaker status reports no errors." module.exit_json( failed=failed, msg=msg, )
def main(): module = AnsibleModule( argument_spec=yaml_safe_load(DOCUMENTATION)['options'] ) roles_info = module.params.get('roles_info') flavors = module.params.get('flavors') flavor_result, warnings, errors = validate_roles_and_flavors(roles_info, flavors) if errors: module.fail_json(msg="\n".join(errors)) elif warnings: module.exit_json(warnings="\n".join(warnings)) else: module.exit_json( msg="All flavors configured on roles", flavors=flavor_result)
def main(): module = AnsibleModule( argument_spec=yaml_safe_load(DOCUMENTATION)['options']) start = module.params.get('start') end = module.params.get('end') min_size = module.params.get('min_size') # Check arguments errors = check_arguments(start, end, min_size) if errors: module.fail_json(msg='\n'.join(errors)) else: # Check IP range range_errors = check_IP_range(start, end, min_size) if range_errors: module.fail_json(msg='\n'.join(range_errors)) else: module.exit_json(msg='success')
def main(): module = AnsibleModule( argument_spec=yaml_safe_load(DOCUMENTATION)['options'] ) nodes = {node['name']: node for node in module.params.get('nodes')} flavors = module.params.get('flavors') introspection_data = {name: content for (name, content) in module.params.get('introspection_data')} errors, warnings = validate_node_disks(nodes, flavors, introspection_data) if errors: module.fail_json(msg="\n".join(errors)) elif warnings: module.exit_json(warnings="\n".join(warnings)) else: module.exit_json(msg="Root device hints are either set or not " "necessary.")
def install_calico(self): # This produces an obnoxious diff on every subsequent run # Using a helm chart does not, so we should switch to that # However, we need to figure out how to get the helm chart # accessible by the CDK lambda first. Not clear how to give # s3 perms to it programmatically, and while ECR might be # an option it also doesn't seem like there's a way to push # the chart with existing api calls. # Probably need to do some custom lambda thing. for manifest in manifests: filename = f"{manifest[0]}.yaml" if isfile(filename): with open(filename) as f: manifest_text = f.read() else: manifest_text = requests_get(manifest[1]).text loaded_manifests = [ yaml_safe_load(i) for i in re_split("^---$", manifest_text, flags=MULTILINE) if i ] crds = eks.KubernetesManifest( self.scope, "calico-crds", cluster=self.eks_cluster, manifest=[ crd for crd in loaded_manifests if crd["kind"] == "CustomResourceDefinition" ], ) non_crds = eks.KubernetesManifest( self.scope, "calico", cluster=self.eks_cluster, manifest=[ notcrd for notcrd in loaded_manifests if notcrd["kind"] != "CustomResourceDefinition" ], ) non_crds.node.add_dependency(crds)
def main(): module = AnsibleModule( argument_spec=yaml_safe_load(DOCUMENTATION)['options']) netenv_path = module.params.get('path') template_files = { name: content[1] for (name, content) in module.params.get('template_files') } introspection_data = { name: content for (name, content) in module.params.get('introspection_data') } warnings, errors = validate_switch_vlans(netenv_path, template_files, introspection_data) if errors: module.fail_json(msg="\n".join(errors)) elif warnings: module.exit_json(warnings="\n".join(warnings)) else: module.exit_json(msg="All VLANs configured on attached switches")