def parse_salt_output(output_salt, parsed_summary): """ Parse our salt output from a yamly file and return a summary :param output_salt: string, yaml contents :param parsed_summary: list :return: int, the number of calls that failed """ failed = 0 changed = 0 worked = 0 salt_yml = yaml_ordered_load(output_salt) for salted_host in salt_yml: host_fail = 0 host_work = 0 host_change = 0 parsed_summary.append(blue("\n{}:".format(salted_host), bold=True)) for salt_event in salt_yml[salted_host]: event = salt_yml[salted_host][salt_event] for salt_event_type in event: if salt_event_type == "result": if event[salt_event_type]: worked += 1 host_work += 1 else: failed += 1 host_fail += 1 parsed_summary.append( red("\tFailure: {}".format(salt_event), bold=True)) parsed_summary.append( red("\t Reason: {}".format(event['comment']), bold=False)) parsed_summary.append( red("\t Debug: {}".format(event), bold=False)) elif salt_event_type == "changes" and len( event[salt_event_type]): changed += 1 host_change += 1 parsed_summary.append( white("\tChange: {name} - Comment: {comment}".format( name=event['name'], comment=event['comment']), bold=False)) parsed_summary.append( yellow("\tSuccess: {}".format(host_work), bold=False)) parsed_summary.append( white("\tChanged: {}".format(host_change), bold=False)) parsed_summary.append(red("\tFailed: {}".format(host_fail), bold=False)) parsed_summary.append(blue("\nSummary:", bold=True)) parsed_summary.append(yellow("\tSuccess: {}".format(worked), bold=True)) parsed_summary.append(white("\tChanged: {}".format(changed), bold=True)) parsed_summary.append(red("\tFailed: {}".format(failed), bold=True)) return failed
def parse_salt_output(output_salt, parsed_summary): """ Parse our salt output from a yamly file and return a summary :param output_salt: string, yaml contents :param parsed_summary: list :return: int, the number of calls that failed """ failed = 0 changed = 0 worked = 0 salt_yml = yaml_ordered_load(output_salt) for salted_host in salt_yml: host_fail = 0 host_work = 0 host_change = 0 parsed_summary.append(blue("\n{}:".format(salted_host), bold=True)) for salt_event in salt_yml[salted_host]: event = salt_yml[salted_host][salt_event] for salt_event_type in event: if salt_event_type == "result": if event[salt_event_type]: worked += 1 host_work += 1 else: failed += 1 host_fail += 1 parsed_summary.append(red("\tFailure: {}".format(salt_event), bold=True)) parsed_summary.append(red("\t Reason: {}".format(event['comment']), bold=False)) parsed_summary.append(red("\t Debug: {}".format(event), bold=False)) elif salt_event_type == "changes" and len(event[salt_event_type]): changed += 1 host_change += 1 parsed_summary.append( white("\tChange: {name} - Comment: {comment}".format( name=event['name'], comment=event['comment']), bold=False)) parsed_summary.append(yellow("\tSuccess: {}".format(host_work), bold=False)) parsed_summary.append(white("\tChanged: {}".format(host_change), bold=False)) parsed_summary.append(red("\tFailed: {}".format(host_fail), bold=False)) parsed_summary.append(blue("\nSummary:", bold=True)) parsed_summary.append(yellow("\tSuccess: {}".format(worked), bold=True)) parsed_summary.append(white("\tChanged: {}".format(changed), bold=True)) parsed_summary.append(red("\tFailed: {}".format(failed), bold=True)) return failed
def _load_and_merge_config_files(config_files=[]): """ Walk through our found config files and load them into a set :param config_files: List of config files :return: set of config files """ merged_config = {} for config_file in config_files: config_filename = os.path.expanduser(config_file.get('path')) if os.path.exists(config_filename): try: loaded_config = _load_config_file(config_filename) print(green("Loaded: {}".format(config_filename))) if config_file.get('preferred'): print( red("Deprecated location for {} - Please use {}". format(config_filename, config_file.get('preferred')))) merged_config = dict_deepmerge(loaded_config, merged_config) except Exception as e: if 'preferred' not in config_file: print( yellow("Warning - error loading config: {}".format( config_filename))) print(yellow(e)) else: # let's only print preferred locations that we skipped if 'preferred' not in config_file: print("Skipped: {}".format(config_filename)) return merged_config
def _load_and_merge_config_files(config_files=[]): """ Walk through our found config files and load them into a set :param config_files: List of config files :return: set of config files """ merged_config = {} for config_file in config_files: config_filename = os.path.expanduser(config_file.get('path')) if os.path.exists(config_filename): try: loaded_config = _load_config_file(config_filename) print(green("Loaded: {}".format(config_filename))) if config_file.get('preferred'): print(red("Deprecated location for {} - Please use {}".format(config_filename, config_file.get('preferred')))) merged_config = dict_deepmerge(loaded_config, merged_config) except Exception as e: if 'preferred' not in config_file: print(yellow("Warning - error loading config: {}".format(config_filename))) print(yellow(e)) else: # let's only print preferred locations that we skipped if 'preferred' not in config_file: print("Skipped: {}".format(config_filename)) return merged_config
def __render_templates(files_to_render, dest_location, jinja_env): """ Render and save templates """ errors = [] from jinja2.exceptions import TemplateNotFound for template_file in files_to_render: filename = os.path.abspath(os.path.join(dest_location, template_file)) print("Pillar template_file: {} --> {}".format(template_file, filename)) if not os.path.isdir(os.path.dirname(filename)): os.makedirs(os.path.dirname(filename)) try: print("Attempting to load template_file: {}".format(template_file)) template_rendered = jinja_env.get_template(template_file).render( env=env) print( green("Pillar template_file rendered: {} --> {}".format( template_file, filename))) # Only write the template file if we can actually render it with open(os.path.join(dest_location, template_file), 'w') as f: f.write(template_rendered) except TemplateNotFound: errors.append(template_file) print( red("Pillar template_file not found: {} --> {}".format( template_file, filename))) if not len(errors): print( green("Pillar was successfully rendered in: {}".format( dest_location))) else: print(red("Pillar could not compile the following templates:")) for error in errors: print(red(" - {}").format(error)) return len(errors) == 0
def __render_templates(files_to_render, dest_location, jinja_env): """ Render and save templates """ errors = [] from jinja2.exceptions import TemplateNotFound for template_file in files_to_render: filename = os.path.abspath(os.path.join(dest_location, template_file)) print("Pillar template_file: {} --> {}".format(template_file, filename)) if not os.path.isdir(os.path.dirname(filename)): os.makedirs(os.path.dirname(filename)) try: print("Attempting to load template_file: {}".format(template_file)) template_rendered = jinja_env.get_template(template_file).render(env=env) print(green("Pillar template_file rendered: {} --> {}".format(template_file, filename))) # Only write the template file if we can actually render it with open(os.path.join(dest_location, template_file), 'w') as f: f.write(template_rendered) except TemplateNotFound: errors.append(template_file) print(red("Pillar template_file not found: {} --> {}".format(template_file, filename))) if not len(errors): print(green("Pillar was successfully rendered in: {}".format(dest_location))) else: print(red("Pillar could not compile the following templates:")) for error in errors: print(red(" - {}").format(error)) return len(errors) == 0
def get_rendered_pillar_location( pillar_dir=None, projects_location=None, parse_top_sls=True, target=None ): """ Returns path to rendered pillar. Use to render pillars written in jinja locally not to upload unwanted data to network. i.e. you can use constructs like: {% include 'opg-lpa-dev/pillar/services.sls' %} If you want salt to later render pillars with grain context use constructs like: {% raw %} {{grains.get('roles')}} {% endraw %} {{" {{grains.get('roles')}} "}} To allow for server side templating of top.sls, you will need set: `parse_top_sls=False` In case there is no top.sls in pillar root than it returns: None """ if 'use_project_dir' not in 'env': env.use_project_dir = False if projects_location is None: projects_location = _get_projects_location() pillars = __load_pillar_dirs(pillar_dir, projects_location, target) jinja_env = _set_template_env(pillars, projects_location) dest_location = tempfile.mkdtemp() if parse_top_sls: files_to_render = _pillar_from_top_sls(dest_location, jinja_env) elif len(pillars): files_to_render, output_dir = _pillar_from_dirs(pillars) else: print(yellow("No template files where found to render")) files_to_render = [] if __render_templates(files_to_render, dest_location, jinja_env) is False: print(red("Aborting due to pillar failing to render")) exit(-1) return dest_location
def get_rendered_pillar_location(pillar_dir=None, projects_location=None, parse_top_sls=True, target=None): """ Returns path to rendered pillar. Use to render pillars written in jinja locally not to upload unwanted data to network. i.e. you can use constructs like: {% include 'opg-lpa-dev/pillar/services.sls' %} If you want salt to later render pillars with grain context use constructs like: {% raw %} {{grains.get('roles')}} {% endraw %} {{" {{grains.get('roles')}} "}} To allow for server side templating of top.sls, you will need set: `parse_top_sls=False` In case there is no top.sls in pillar root than it returns: None """ if 'use_project_dir' not in 'env': env.use_project_dir = False if projects_location is None: projects_location = _get_projects_location() pillars = __load_pillar_dirs(pillar_dir, projects_location, target) jinja_env = _set_template_env(pillars, projects_location) dest_location = tempfile.mkdtemp() if parse_top_sls: files_to_render = _pillar_from_top_sls(dest_location, jinja_env) elif len(pillars): files_to_render, output_dir = _pillar_from_dirs(pillars) else: print(yellow("No template files where found to render")) files_to_render = [] if __render_templates(files_to_render, dest_location, jinja_env) is False: print(red("Aborting due to pillar failing to render")) exit(-1) return dest_location
def commit_change_set(self): files_exist = [] for change in self.change_set: if change in self._git_status(): files_exist.append(change) if len(files_exist): try: self._stash_changes() self._checkout_branch() self._pull_branch() self._pop_changes() self._git_commit(files_exist, self.message) except GitCommandError as e: if not e.stderr.contains('No stash found.'): print(red("Git returned: {}".format(e.stderr))) exit(e.status) else: print(yellow(self.NO_CHANGES_TO_COMMIT)) return self.NO_CHANGES_TO_COMMIT
def get_rendered_pillar_location(pillar_dir=None, projects_location=None, parse_top_sls=True): """ Returns path to rendered pillar. Use to render pillars written in jinja locally not to upload unwanted data to network. i.e. you can use constructs like: {% include 'opg-lpa-dev/pillar/services.sls' %} If you want salt to later render pillars with grain context use constructs like: {% raw %} {{grains.get('roles')}} {% endraw %} {{" {{grains.get('roles')}} "}} To allow for server side templating of top.sls, you will need set: `parse_top_sls=False` In case there is no top.sls in pillar root than it returns: None """ from jinja2 import Environment from jinja2 import FileSystemLoader from jinja2.exceptions import TemplateNotFound if projects_location is None: projects_location = _get_projects_location() if pillar_dir is None: if "pillar_dir" in env: pillar_dir = env.pillar_dir else: assert env.project, "env.project or env.pillar_dir must be specified" pillar_dir = os.path.join(projects_location, env.project, 'pillar') jinja_env = Environment( loader=FileSystemLoader([pillar_dir, projects_location])) files_to_render = [] dest_location = tempfile.mkdtemp() if parse_top_sls: # let's parse top.sls to only select files being referred in top.sls try: top_sls = jinja_env.get_template('top.sls').render(env=env) except TemplateNotFound: raise RuntimeError( "Missing top.sls in pillar location. Skipping rendering.") top_content = yaml.load(top_sls) filename = os.path.join(dest_location, 'top.sls') with open(filename, 'w') as f: print("Pillar template_file: {} --> {}".format( 'top.sls', filename)) f.write(top_sls) for k0, v0 in top_content.iteritems(): for k1, v1 in v0.iteritems(): for file_short in v1: # We force this file to be relative in case jinja failed rendering # a variable. This would make the filename start with / and instead of # writing under dest_location it will try to write in / if isinstance(file_short, str): files_to_render.append('./' + file_short.replace('.', '/') + '.sls') else: # let's select all files from pillar directory for root, dirs, files in os.walk(pillar_dir): rel_path = os.path.relpath(root, pillar_dir) for file_name in files: files_to_render.append(os.path.join(rel_path, file_name)) # render and save templates for template_file in files_to_render: filename = os.path.abspath(os.path.join(dest_location, template_file)) print("Pillar template_file: {} --> {}".format(template_file, filename)) if not os.path.isdir(os.path.dirname(filename)): os.makedirs(os.path.dirname(filename)) try: template_rendered = jinja_env.get_template(template_file).render( env=env) except TemplateNotFound: template_rendered = '' print( red("Pillar template_file not found: {} --> {}".format( template_file, filename))) with open(os.path.join(dest_location, template_file), 'w') as f: f.write(template_rendered) print( green("Pillar was successfully rendered in: {}".format(dest_location))) return dest_location
def salt(selector, args, parse_highstate=False, timeout=60): """ `salt` / `salt-call` wrapper that: - checks if `env.saltmaster` is set to select between `salt` or `salt-call` command - checks for output state.highstate and aborts on failure param selector: i.e.: '*', -G 'roles:foo' param args: i.e. state.highstate """ def dump_json(data): return json.dumps(data, indent=4) def stream_jsons(data): """ ugly semi (assumes that input is a pprinted jsons' sequence) salt specific json stream parser as generator of jsons #TODO: work on stream instead of big data blob """ data_buffer = [] for line in data.splitlines(): assert isinstance(line, basestring) data_buffer.append(line) if line.startswith( "}" ): # as salt output is a pretty json this means - end of json blob if data_buffer: yield json.loads("".join(data_buffer), object_pairs_hook=OrderedDict) data_buffer = [] assert not data_buffer if parse_highstate: remote_temp = sudo('mktemp') # Fabric merges stdout & stderr for sudo. So output is useless # Therefore we will store the stdout in json format to separate file and parse it later if 'saltmaster' in env and env.saltmaster: sudo("salt {} {} --out=json -t {}| tee {}".format( selector, args, timeout, remote_temp)) else: sudo("salt-call {} --out=json | tee {}".format(args, remote_temp)) sudo("chmod 664 {}".format(remote_temp)) output_fd = StringIO() get(remote_temp, output_fd) output = output_fd.getvalue() failed = 0 summary = defaultdict(lambda: defaultdict(lambda: 0)) for out_parsed in stream_jsons(output): for server, states in out_parsed.iteritems(): if isinstance(states, list): failed += 1 else: for state, state_fields in states.iteritems(): summary[server]['states'] += 1 color = green if state_fields['changes']: color = yellow summary[server]['changed'] += 1 if not state_fields['result']: color = red summary[server]['failed'] += 1 failed += 1 print(color("{}: ".format(state), bold=True)) print(color(dump_json(state_fields))) else: summary[server]['passed'] += 1 print(color("{}: ".format(state), bold=True)) print(color(dump_json(state_fields))) if failed: print print(red("Summary", bold=True)) print(red(dump_json(summary))) abort('One of states has failed') else: print print(green("Summary", bold=True)) print(green(dump_json(summary))) # let's cleanup but only if everything was ok sudo('rm {}'.format(remote_temp)) else: if 'saltmaster' in env and env.saltmaster: sudo("salt {} {} -t {}".format(selector, args, timeout)) else: sudo("salt-call {}".format(args))
def get_rendered_pillar_location(): """ Returns path to rendered pillar. Use to render pillars written in jinja locally not to upload unwanted data to network. i.e. you can use constructs like: {% include 'opg-lpa-dev/pillar/services.sls' %} In case there is no top.sls in pillar root than it returns: None """ from jinja2 import Environment from jinja2 import FileSystemLoader from jinja2.exceptions import TemplateNotFound assert env.project projects_location = _get_projects_location() jinja_env = Environment(loader=FileSystemLoader([ os.path.join(projects_location, env.project, 'pillar'), projects_location ])) # let's get rendered top.sls for configured project try: top_sls = jinja_env.get_template('top.sls').render(env=env) except TemplateNotFound: print(red("Missing top.sls in pillar location. Skipping rendering.")) return None top_content = yaml.load(top_sls) dest_location = tempfile.mkdtemp() with open(os.path.join(dest_location, 'top.sls'), 'w') as f: f.write(top_sls) # get list of files referenced by top.sls files_to_render = [] for k0, v0 in top_content.iteritems(): for k1, v1 in v0.iteritems(): for file_short in v1: # We force this file to be relative in case jinja failed rendering # a variable. This would make the filename start with / and instead of # writing under dest_location it will try to write in / files_to_render.append('./' + file_short.replace('.', '/') + '.sls') # render and save templates for template_file in files_to_render: filename = os.path.abspath(os.path.join(dest_location, template_file)) print( yellow("Pillar template_file: {} --> {}".format( template_file, filename))) if not os.path.isdir(os.path.dirname(filename)): os.makedirs(os.path.dirname(filename)) try: template_rendered = jinja_env.get_template(template_file).render( env=env) except TemplateNotFound: template_rendered = '' print( yellow("Pillar template_file not found: {} --> {}".format( template_file, filename))) with open(os.path.join(dest_location, template_file), 'w') as f: f.write(template_rendered) print(green("Pillar was rendered in: {}".format(dest_location))) return dest_location
def get_rendered_pillar_location(pillar_dir=None, projects_location=None, parse_top_sls=True): """ Returns path to rendered pillar. Use to render pillars written in jinja locally not to upload unwanted data to network. i.e. you can use constructs like: {% include 'opg-lpa-dev/pillar/services.sls' %} If you want salt to later render pillars with grain context use constructs like: {% raw %} {{grains.get('roles')}} {% endraw %} {{" {{grains.get('roles')}} "}} To allow for server side templating of top.sls, you will need set: `parse_top_sls=False` In case there is no top.sls in pillar root than it returns: None """ from jinja2 import Environment from jinja2 import FileSystemLoader from jinja2.exceptions import TemplateNotFound if projects_location is None: projects_location = _get_projects_location() if pillar_dir is None: if "pillar_dir" in env: pillar_dir = env.pillar_dir else: assert env.project, "env.project or env.pillar_dir must be specified" pillar_dir = os.path.join(projects_location, env.project, 'pillar') jinja_env = Environment( loader=FileSystemLoader([pillar_dir, projects_location])) files_to_render = [] dest_location = tempfile.mkdtemp() if parse_top_sls: # let's parse top.sls to only select files being referred in top.sls try: top_sls = jinja_env.get_template('top.sls').render(env=env) except TemplateNotFound: raise RuntimeError("Missing top.sls in pillar location. Skipping rendering.") top_content = yaml.load(top_sls) filename = os.path.join(dest_location, 'top.sls') with open(filename, 'w') as f: print("Pillar template_file: {} --> {}".format('top.sls', filename)) f.write(top_sls) for k0, v0 in top_content.iteritems(): for k1, v1 in v0.iteritems(): for file_short in v1: # We force this file to be relative in case jinja failed rendering # a variable. This would make the filename start with / and instead of # writing under dest_location it will try to write in / if isinstance(file_short, str): files_to_render.append('./' + file_short.replace('.', '/') + '.sls') else: # let's select all files from pillar directory for root, dirs, files in os.walk(pillar_dir): rel_path = os.path.relpath(root, pillar_dir) for file_name in files: files_to_render.append(os.path.join(rel_path, file_name)) # render and save templates for template_file in files_to_render: filename = os.path.abspath(os.path.join(dest_location, template_file)) print("Pillar template_file: {} --> {}".format(template_file, filename)) if not os.path.isdir(os.path.dirname(filename)): os.makedirs(os.path.dirname(filename)) try: template_rendered = jinja_env.get_template(template_file).render(env=env) except TemplateNotFound: template_rendered = '' print(red("Pillar template_file not found: {} --> {}".format(template_file, filename))) with open(os.path.join(dest_location, template_file), 'w') as f: f.write(template_rendered) print(green("Pillar was successfully rendered in: {}".format(dest_location))) return dest_location
def salt(selector, args, parse_highstate=False, timeout=60): """ `salt` / `salt-call` wrapper that: - checks if `env.saltmaster` is set to select between `salt` or `salt-call` command - checks for output state.highstate and aborts on failure param selector: i.e.: '*', -G 'roles:foo' param args: i.e. state.highstate """ def dump_json(data): return json.dumps(data, indent=4) def stream_jsons(data): """ ugly semi (assumes that input is a pprinted jsons' sequence) salt specific json stream parser as generator of jsons #TODO: work on stream instead of big data blob """ data_buffer = [] for line in data.splitlines(): assert isinstance(line, basestring) data_buffer.append(line) if line.startswith("}"): # as salt output is a pretty json this means - end of json blob if data_buffer: yield json.loads("".join(data_buffer), object_pairs_hook=OrderedDict) data_buffer = [] assert not data_buffer if parse_highstate: remote_temp = sudo('mktemp') # Fabric merges stdout & stderr for sudo. So output is useless # Therefore we will store the stdout in json format to separate file and parse it later if 'saltmaster' in env and env.saltmaster: sudo("salt {} {} --out=json -t {}| tee {}".format(selector, args, timeout, remote_temp)) else: sudo("salt-call {} --out=json | tee {}".format(args, remote_temp)) sudo("chmod 664 {}".format(remote_temp)) output_fd = StringIO() get(remote_temp, output_fd) output = output_fd.getvalue() failed = 0 summary = defaultdict(lambda: defaultdict(lambda: 0)) for out_parsed in stream_jsons(output): for server, states in out_parsed.iteritems(): if isinstance(states, list): failed += 1 else: for state, state_fields in states.iteritems(): summary[server]['states'] += 1 color = green if state_fields['changes']: color = yellow summary[server]['changed'] += 1 if not state_fields['result']: color = red summary[server]['failed'] += 1 failed += 1 print(color("{}: ".format(state), bold=True)) print(color(dump_json(state_fields))) else: summary[server]['passed'] += 1 print(color("{}: ".format(state), bold=True)) print(color(dump_json(state_fields))) if failed: print print(red("Summary", bold=True)) print(red(dump_json(summary))) abort('One of states has failed') else: print print(green("Summary", bold=True)) print(green(dump_json(summary))) # let's cleanup but only if everything was ok sudo('rm {}'.format(remote_temp)) else: if 'saltmaster' in env and env.saltmaster: sudo("salt {} {} -t {}".format(selector, args, timeout)) else: sudo("salt-call {}".format(args))