class InventoryModule(BaseInventoryPlugin): NAME = 'virl' def __init__(self): super(InventoryModule, self).__init__() # from config self.username = None self.password = None self.host = None self.lab = None self.group = None self.display = Display() def verify_file(self, path): if super(InventoryModule, self).verify_file(path): endings = ('virl.yaml', 'virl.yml') if any((path.endswith(ending) for ending in endings)): return True self.display.debug("virl inventory filename must end with 'virl.yml' or 'virl.yaml'") return False def parse(self, inventory, loader, path, cache=True): # call base method to ensure properties are available for use with other helper methods super(InventoryModule, self).parse(inventory, loader, path, cache) # this method will parse 'common format' inventory sources and # update any options declared in DOCUMENTATION as needed # config = self._read_config_data(self, path) self._read_config_data(path) # if NOT using _read_config_data you should call set_options directly, # to process any defined configuration for this plugin, # if you dont define any options you can skip # self.set_options() if 'VIRL_HOST' in os.environ and len(os.environ['VIRL_HOST']): self.host = os.environ['VIRL_HOST'] else: self.host = self.get_option('host') self.display.vvv("virl2.py - VIRL_HOST: {0}".format(self.host)) if 'VIRL_USERNAME' in os.environ and len(os.environ['VIRL_USERNAME']): self.username = os.environ['VIRL_USERNAME'] else: self.username = self.get_option('username') self.display.vvv("virl2.py - VIRL_USERNAME: {0}".format(self.username)) if 'VIRL_PASSWORD' in os.environ and len(os.environ['VIRL_PASSWORD']): self.password = os.environ['VIRL_PASSWORD'] else: self.password = self.get_option('password') if 'VIRL_LAB' in os.environ and len(os.environ['VIRL_LAB']): self.lab = os.environ['VIRL_LAB'] else: self.lab = self.get_option('lab') self.display.vvv("virl2.py - VIRL_LAB: {0}".format(self.lab)) if not self.lab: self.display.vvv("No lab defined. Nothing to do.") return self.group = self.get_option('group') if self.group is None: self.group = 'virl_hosts' self.display.vvv("virl2.py - Group: {0}".format(self.group)) self.inventory.set_variable('all', 'virl_host', self.host) self.inventory.set_variable('all', 'virl_username', self.username) self.inventory.set_variable('all', 'virl_password', self.password) self.inventory.set_variable('all', 'virl_lab', self.lab) url = 'https://{0}'.format(self.host) try: client = ClientLibrary(url, username=self.username, password=self.password, ssl_verify=False) except: raise AnsibleParserError('Unable to log into {0}'.format(url)) labs = (client.find_labs_by_title(self.lab)) if not labs: return try: group = self.inventory.add_group(self.group) except AnsibleError as e: raise AnsibleParserError("Unable to add group %s: %s" % (group, to_text(e))) group_dict = {} lab = labs[0] lab.sync() for node in lab.nodes(): self.inventory.add_host(node.label, group=self.group) virl = { 'state': node.state, 'image_definition': node.image_definition, 'node_definition': node.node_definition, 'cpus': node.cpus, 'ram': node.ram, 'config': node.config, 'data_volume': node.data_volume, } interface_list = [] ansible_host = None for interface in node.interfaces(): if interface.discovered_ipv4 and not ansible_host: ansible_host = interface.discovered_ipv4[0] interface_dict = { 'name': interface.label, 'state': interface.state, 'ipv4_addresses': interface.discovered_ipv4, 'ipv6_addresses': interface.discovered_ipv6, 'mac_address': interface.discovered_mac_address } interface_list.append(interface_dict) virl.update({'interfaces': interface_list}) if ansible_host: self.inventory.set_variable(node.label, 'ansible_host', ansible_host) self.inventory.set_variable(node.label, 'virl_facts', virl) self.display.vvv("Adding {0}({1}) to group {2}, state: {3}, ansible_host: {4}".format( node.label, node.node_definition, self.group, node.state, ansible_host)) # Group by node_definition if node.node_definition not in group_dict: try: group_dict[node.node_definition] = self.inventory.add_group(node.node_definition) except AnsibleError as e: raise AnsibleParserError("Unable to add group %s: %s" % (group, to_text(e))) self.inventory.add_host(node.label, group=node.node_definition)
class VariableManager: def __init__(self): self._fact_cache = FactCache() self._nonpersistent_fact_cache = defaultdict(dict) self._vars_cache = defaultdict(dict) self._extra_vars = defaultdict(dict) self._host_vars_files = defaultdict(dict) self._group_vars_files = defaultdict(dict) self._inventory = None self._omit_token = '__omit_place_holder__%s' % sha1( os.urandom(64)).hexdigest() try: from __main__ import display self._display = display except ImportError: from ansible.utils.display import Display self._display = Display() def __getstate__(self): data = dict( fact_cache=self._fact_cache.copy(), np_fact_cache=self._nonpersistent_fact_cache.copy(), vars_cache=self._vars_cache.copy(), extra_vars=self._extra_vars.copy(), host_vars_files=self._host_vars_files.copy(), group_vars_files=self._group_vars_files.copy(), omit_token=self._omit_token, ) return data def __setstate__(self, data): self._fact_cache = data.get('fact_cache', defaultdict(dict)) self._nonpersistent_fact_cache = data.get('np_fact_cache', defaultdict(dict)) self._vars_cache = data.get('vars_cache', defaultdict(dict)) self._extra_vars = data.get('extra_vars', dict()) self._host_vars_files = data.get('host_vars_files', defaultdict(dict)) self._group_vars_files = data.get('group_vars_files', defaultdict(dict)) self._omit_token = data.get( 'omit_token', '__omit_place_holder__%s' % sha1(os.urandom(64)).hexdigest()) self._inventory = None def _get_cache_entry(self, play=None, host=None, task=None): play_id = "NONE" if play: play_id = play._uuid host_id = "NONE" if host: host_id = host.get_name() task_id = "NONE" if task: task_id = task._uuid return "PLAY:%s;HOST:%s;TASK:%s" % (play_id, host_id, task_id) @property def extra_vars(self): ''' ensures a clean copy of the extra_vars are made ''' return self._extra_vars.copy() @extra_vars.setter def extra_vars(self, value): ''' ensures a clean copy of the extra_vars are used to set the value ''' assert isinstance(value, MutableMapping) self._extra_vars = value.copy() def set_inventory(self, inventory): self._inventory = inventory def _preprocess_vars(self, a): ''' Ensures that vars contained in the parameter passed in are returned as a list of dictionaries, to ensure for instance that vars loaded from a file conform to an expected state. ''' if a is None: return None elif not isinstance(a, list): data = [a] else: data = a for item in data: if not isinstance(item, MutableMapping): raise AnsibleError( "variable files must contain either a dictionary of variables, or a list of dictionaries. Got: %s (%s)" % (a, type(a))) return data def get_vars(self, loader, play=None, host=None, task=None, include_hostvars=True, include_delegate_to=True, use_cache=True): ''' Returns the variables, with optional "context" given via the parameters for the play, host, and task (which could possibly result in different sets of variables being returned due to the additional context). The order of precedence is: - play->roles->get_default_vars (if there is a play context) - group_vars_files[host] (if there is a host context) - host_vars_files[host] (if there is a host context) - host->get_vars (if there is a host context) - fact_cache[host] (if there is a host context) - play vars (if there is a play context) - play vars_files (if there's no host context, ignore file names that cannot be templated) - task->get_vars (if there is a task context) - vars_cache[host] (if there is a host context) - extra vars ''' debug("in VariableManager get_vars()") cache_entry = self._get_cache_entry(play=play, host=host, task=task) if cache_entry in VARIABLE_CACHE and use_cache: debug("vars are cached, returning them now") return VARIABLE_CACHE[cache_entry] all_vars = dict() magic_variables = self._get_magic_variables( loader=loader, play=play, host=host, task=task, include_hostvars=include_hostvars, include_delegate_to=include_delegate_to, ) if play: # first we compile any vars specified in defaults/main.yml # for all roles within the specified play for role in play.get_roles(): all_vars = combine_vars(all_vars, role.get_default_vars()) # if we have a task in this context, and that task has a role, make # sure it sees its defaults above any other roles, as we previously # (v1) made sure each task had a copy of its roles default vars if task and task._role is not None: all_vars = combine_vars(all_vars, task._role.get_default_vars()) if host: # next, if a host is specified, we load any vars from group_vars # files and then any vars from host_vars files which may apply to # this host or the groups it belongs to # we merge in vars from groups specified in the inventory (INI or script) all_vars = combine_vars(all_vars, host.get_group_vars()) # then we merge in the special 'all' group_vars first, if they exist if 'all' in self._group_vars_files: data = preprocess_vars(self._group_vars_files['all']) for item in data: all_vars = combine_vars(all_vars, item) for group in host.get_groups(): if group.name in self._group_vars_files and group.name != 'all': for data in self._group_vars_files[group.name]: data = preprocess_vars(data) for item in data: all_vars = combine_vars(all_vars, item) # then we merge in vars from the host specified in the inventory (INI or script) all_vars = combine_vars(all_vars, host.get_vars()) # then we merge in the host_vars/<hostname> file, if it exists host_name = host.get_name() if host_name in self._host_vars_files: for data in self._host_vars_files[host_name]: data = preprocess_vars(data) for item in data: all_vars = combine_vars(all_vars, item) # finally, the facts caches for this host, if it exists try: host_facts = wrap_var(self._fact_cache.get(host.name, dict())) all_vars = combine_vars(all_vars, host_facts) except KeyError: pass if play: all_vars = combine_vars(all_vars, play.get_vars()) for vars_file_item in play.get_vars_files(): # create a set of temporary vars here, which incorporate the extra # and magic vars so we can properly template the vars_files entries temp_vars = combine_vars(all_vars, self._extra_vars) temp_vars = combine_vars(temp_vars, magic_variables) templar = Templar(loader=loader, variables=temp_vars) # we assume each item in the list is itself a list, as we # support "conditional includes" for vars_files, which mimics # the with_first_found mechanism. #vars_file_list = templar.template(vars_file_item) vars_file_list = vars_file_item if not isinstance(vars_file_list, list): vars_file_list = [vars_file_list] # now we iterate through the (potential) files, and break out # as soon as we read one from the list. If none are found, we # raise an error, which is silently ignored at this point. try: for vars_file in vars_file_list: vars_file = templar.template(vars_file) try: data = preprocess_vars( loader.load_from_file(vars_file)) if data is not None: for item in data: all_vars = combine_vars(all_vars, item) break except AnsibleFileNotFound as e: # we continue on loader failures continue except AnsibleParserError as e: raise else: raise AnsibleFileNotFound( "vars file %s was not found" % vars_file_item) except (UndefinedError, AnsibleUndefinedVariable): if host is not None and self._fact_cache.get( host.name, dict()).get('module_setup') and task is not None: raise AnsibleUndefinedVariable( "an undefined variable was found when attempting to template the vars_files item '%s'" % vars_file_item, obj=vars_file_item) else: # we do not have a full context here, and the missing variable could be # because of that, so just show a warning and continue self._display.vvv( "skipping vars_file '%s' due to an undefined variable" % vars_file_item) continue if not C.DEFAULT_PRIVATE_ROLE_VARS: for role in play.get_roles(): all_vars = combine_vars( all_vars, role.get_vars(include_params=False)) if task: if task._role: all_vars = combine_vars(all_vars, task._role.get_vars()) all_vars = combine_vars(all_vars, task.get_vars()) if host: all_vars = combine_vars( all_vars, self._vars_cache.get(host.get_name(), dict())) all_vars = combine_vars( all_vars, self._nonpersistent_fact_cache.get(host.name, dict())) all_vars = combine_vars(all_vars, self._extra_vars) all_vars = combine_vars(all_vars, magic_variables) # if we have a task and we're delegating to another host, figure out the # variables for that host now so we don't have to rely on hostvars later if task and task.delegate_to is not None and include_delegate_to: all_vars['ansible_delegated_vars'] = self._get_delegated_vars( loader, play, task, all_vars) #VARIABLE_CACHE[cache_entry] = all_vars debug("done with get_vars()") return all_vars def invalidate_hostvars_cache(self, play): hostvars_cache_entry = self._get_cache_entry(play=play) if hostvars_cache_entry in HOSTVARS_CACHE: del HOSTVARS_CACHE[hostvars_cache_entry] def _get_magic_variables(self, loader, play, host, task, include_hostvars, include_delegate_to): ''' Returns a dictionary of so-called "magic" variables in Ansible, which are special variables we set internally for use. ''' variables = dict() variables['playbook_dir'] = loader.get_basedir() if host: variables['group_names'] = [ group.name for group in host.get_groups() if group.name != 'all' ] if self._inventory is not None: variables['groups'] = dict() for (group_name, group) in iteritems(self._inventory.groups): variables['groups'][group_name] = [ h.name for h in group.get_hosts() ] if include_hostvars: hostvars_cache_entry = self._get_cache_entry(play=play) if hostvars_cache_entry in HOSTVARS_CACHE: hostvars = HOSTVARS_CACHE[hostvars_cache_entry] else: hostvars = HostVars(play=play, inventory=self._inventory, loader=loader, variable_manager=self) HOSTVARS_CACHE[hostvars_cache_entry] = hostvars variables['hostvars'] = hostvars variables['vars'] = hostvars[host.get_name()] if play: variables['role_names'] = [r._role_name for r in play.roles] if task: if task._role: variables['role_path'] = task._role._role_path if self._inventory is not None: variables['inventory_dir'] = self._inventory.basedir() variables['inventory_file'] = self._inventory.src() if play: # add the list of hosts in the play, as adjusted for limit/filters # DEPRECATED: play_hosts should be deprecated in favor of ansible_play_hosts, # however this would take work in the templating engine, so for now # we'll add both so we can give users something transitional to use host_list = [x.name for x in self._inventory.get_hosts()] variables['play_hosts'] = host_list variables['ansible_play_hosts'] = host_list # the 'omit' value alows params to be left out if the variable they are based on is undefined variables['omit'] = self._omit_token variables['ansible_version'] = CLI.version_info(gitinfo=False) return variables def _get_delegated_vars(self, loader, play, task, existing_variables): # we unfortunately need to template the delegate_to field here, # as we're fetching vars before post_validate has been called on # the task that has been passed in vars_copy = existing_variables.copy() templar = Templar(loader=loader, variables=vars_copy) items = [] if task.loop is not None: if task.loop in lookup_loader: #TODO: remove convert_bare true and deprecate this in with_ try: loop_terms = listify_lookup_plugin_terms( terms=task.loop_args, templar=templar, loader=loader, fail_on_undefined=True, convert_bare=True) except AnsibleUndefinedVariable as e: if 'has no attribute' in str(e): loop_terms = [] self._display.deprecated( "Skipping task due to undefined attribute, in the future this will be a fatal error." ) else: raise items = lookup_loader.get(task.loop, loader=loader, templar=templar).run( terms=loop_terms, variables=vars_copy) else: raise AnsibleError( "Unexpected failure in finding the lookup named '%s' in the available lookup plugins" % task.loop) else: items = [None] delegated_host_vars = dict() for item in items: # update the variables with the item value for templating, in case we need it if item is not None: vars_copy['item'] = item templar.set_available_variables(vars_copy) delegated_host_name = templar.template(task.delegate_to, fail_on_undefined=False) if delegated_host_name in delegated_host_vars: # no need to repeat ourselves, as the delegate_to value # does not appear to be tied to the loop item variable continue # a dictionary of variables to use if we have to create a new host below new_delegated_host_vars = dict( ansible_host=delegated_host_name, ansible_user=C.DEFAULT_REMOTE_USER, ansible_connection=C.DEFAULT_TRANSPORT, ) # now try to find the delegated-to host in inventory, or failing that, # create a new host on the fly so we can fetch variables for it delegated_host = None if self._inventory is not None: delegated_host = self._inventory.get_host(delegated_host_name) # try looking it up based on the address field, and finally # fall back to creating a host on the fly to use for the var lookup if delegated_host is None: for h in self._inventory.get_hosts( ignore_limits_and_restrictions=True): # check if the address matches, or if both the delegated_to host # and the current host are in the list of localhost aliases if h.address == delegated_host_name or h.name in C.LOCALHOST and delegated_host_name in C.LOCALHOST: delegated_host = h break else: delegated_host = Host(name=delegated_host_name) delegated_host.vars.update(new_delegated_host_vars) else: delegated_host = Host(name=delegated_host_name) delegated_host.vars.update(new_delegated_host_vars) # now we go fetch the vars for the delegated-to host and save them in our # master dictionary of variables to be used later in the TaskExecutor/PlayContext delegated_host_vars[delegated_host_name] = self.get_vars( loader=loader, play=play, host=delegated_host, task=task, include_delegate_to=False, include_hostvars=False, ) return delegated_host_vars def _get_inventory_basename(self, path): ''' Returns the basename minus the extension of the given path, so the bare filename can be matched against host/group names later ''' (name, ext) = os.path.splitext(os.path.basename(path)) if ext not in ('.yml', '.yaml'): return os.path.basename(path) else: return name def _load_inventory_file(self, path, loader): ''' helper function, which loads the file and gets the basename of the file without the extension ''' if loader.is_directory(path): data = dict() try: names = loader.list_directory(path) except os.error as err: raise AnsibleError("This folder cannot be listed: %s: %s." % (path, err.strerror)) # evaluate files in a stable order rather than whatever # order the filesystem lists them. names.sort() # do not parse hidden files or dirs, e.g. .svn/ paths = [ os.path.join(path, name) for name in names if not name.startswith('.') ] for p in paths: _found, results = self._load_inventory_file(path=p, loader=loader) if results is not None: data = combine_vars(data, results) else: file_name, ext = os.path.splitext(path) data = None if not ext or ext not in C.YAML_FILENAME_EXTENSIONS: for test_ext in C.YAML_FILENAME_EXTENSIONS: new_path = path + test_ext if loader.path_exists(new_path): data = loader.load_from_file(new_path) break else: if loader.path_exists(path): data = loader.load_from_file(path) name = self._get_inventory_basename(path) return (name, data) def add_host_vars_file(self, path, loader): ''' Loads and caches a host_vars file in the _host_vars_files dict, where the key to that dictionary is the basename of the file, minus the extension, for matching against a given inventory host name ''' (name, data) = self._load_inventory_file(path, loader) if data: if name not in self._host_vars_files: self._host_vars_files[name] = [] self._host_vars_files[name].append(data) return data else: return dict() def add_group_vars_file(self, path, loader): ''' Loads and caches a host_vars file in the _host_vars_files dict, where the key to that dictionary is the basename of the file, minus the extension, for matching against a given inventory host name ''' (name, data) = self._load_inventory_file(path, loader) if data: if name not in self._group_vars_files: self._group_vars_files[name] = [] self._group_vars_files[name].append(data) return data else: return dict() def set_host_facts(self, host, facts): ''' Sets or updates the given facts for a host in the fact cache. ''' assert isinstance(facts, dict) if host.name not in self._fact_cache: self._fact_cache[host.name] = facts else: try: self._fact_cache.update(host.name, facts) except KeyError: self._fact_cache[host.name] = facts def set_nonpersistent_facts(self, host, facts): ''' Sets or updates the given facts for a host in the fact cache. ''' assert isinstance(facts, dict) if host.name not in self._nonpersistent_fact_cache: self._nonpersistent_fact_cache[host.name] = facts else: try: self._nonpersistent_fact_cache[host.name].update(facts) except KeyError: self._nonpersistent_fact_cache[host.name] = facts def set_host_variable(self, host, varname, value): ''' Sets a value in the vars_cache for a host. ''' host_name = host.get_name() if host_name not in self._vars_cache: self._vars_cache[host_name] = dict() self._vars_cache[host_name][varname] = value
class Grapher(object): """ Main class to make the graph """ DEFAULT_GRAPH_ATTR = { "ratio": "fill", "rankdir": "LR", "concentrate": "true", "ordering": "in" } DEFAULT_EDGE_ATTR = {"sep": "10", "esep": "5"} def __init__(self, data_loader, inventory_manager, variable_manager, playbook_filename, options, graph=None): """ Main grapher responsible to parse the playbook and draw graph :param data_loader: :type data_loader: ansible.parsing.dataloader.DataLoader :param inventory_manager: :type inventory_manager: ansible.inventory.manager.InventoryManager :param variable_manager: :type variable_manager: ansible.vars.manager.VariableManager :param options Command line options :type options: optparse.Values :param playbook_filename: :type playbook_filename: str :param graph: :type graph: Digraph """ self.options = options self.variable_manager = variable_manager self.inventory_manager = inventory_manager self.data_loader = data_loader self.playbook_filename = playbook_filename self.options.output_filename = self.options.output_filename self.rendered_file_path = None self.display = Display(verbosity=options.verbosity) if self.options.tags is None: self.options.tags = ["all"] if self.options.skip_tags is None: self.options.skip_tags = [] self.graph_representation = GraphRepresentation() self.playbook = Playbook.load(self.playbook_filename, loader=self.data_loader, variable_manager=self.variable_manager) if graph is None: self.graph = CustomDigrah(edge_attr=self.DEFAULT_EDGE_ATTR, graph_attr=self.DEFAULT_GRAPH_ATTR, format="svg") def template(self, data, variables, fail_on_undefined=False): """ Template the data using Jinja. Return data if an error occurs during the templating :param fail_on_undefined: :type fail_on_undefined: bool :param data: :type data: Union[str, ansible.parsing.yaml.objects.AnsibleUnicode] :param variables: :type variables: dict :return: """ try: templar = Templar(loader=self.data_loader, variables=variables) return templar.template(data, fail_on_undefined=fail_on_undefined) except AnsibleError as ansible_error: # Sometime we need to export if fail_on_undefined: raise self.display.warning(ansible_error) return data def make_graph(self): """ Loop through the playbook and make the graph. The graph is drawn following this order (https://docs.ansible.com/ansible/2.4/playbooks_reuse_roles.html#using-roles) for each play: draw pre_tasks draw roles if include_role_tasks draw role_tasks draw tasks draw post_tasks :return: :rtype: """ # the root node self.graph.node(self.playbook_filename, style="dotted", id="root_node") # loop through the plays for play_counter, play in enumerate(self.playbook.get_plays(), 1): # the load basedir is relative to the playbook path if play._included_path is not None: self.data_loader.set_basedir(play._included_path) else: self.data_loader.set_basedir(self.playbook._basedir) self.display.vvv("Loader basedir set to {}".format( self.data_loader.get_basedir())) play_vars = self.variable_manager.get_vars(play) play_hosts = [ h.get_name() for h in self.inventory_manager.get_hosts( self.template(play.hosts, play_vars)) ] play_name = "Play #{}: {} ({})".format(play_counter, clean_name(play.get_name()), len(play_hosts)) play_name = self.template(play_name, play_vars) self.display.banner("Graphing " + play_name) play_id = "play_" + str(uuid.uuid4()) self.graph_representation.add_node(play_id) with self.graph.subgraph(name=play_name) as play_subgraph: color, play_font_color = get_play_colors(play) # play node play_subgraph.node(play_name, id=play_id, style="filled", shape="box", color=color, fontcolor=play_font_color, tooltip=" ".join(play_hosts)) # edge from root node to plays play_edge_id = "edge_" + str(uuid.uuid4()) play_subgraph.edge(self.playbook_filename, play_name, id=play_edge_id, style="bold", label=str(play_counter), color=color, fontcolor=color) # loop through the pre_tasks self.display.v("Graphing pre_tasks...") nb_pre_tasks = 0 for pre_task_block in play.pre_tasks: nb_pre_tasks = self._include_tasks_in_blocks( current_play=play, graph=play_subgraph, parent_node_name=play_name, parent_node_id=play_id, block=pre_task_block, color=color, current_counter=nb_pre_tasks, play_vars=play_vars, node_name_prefix="[pre_task] ") # loop through the roles self.display.v("Graphing roles...") role_number = 0 for role in play.get_roles(): # Don't insert tasks from ``import/include_role``, preventing # duplicate graphing if role.from_include: continue role_number += 1 role_name = "[role] " + clean_name(role.get_name()) # the role object doesn't inherit the tags from the play. So we add it manually role.tags = role.tags + play.tags role_not_tagged = "" if not role.evaluate_tags(only_tags=self.options.tags, skip_tags=self.options.skip_tags, all_vars=play_vars): role_not_tagged = NOT_TAGGED with self.graph.subgraph(name=role_name, node_attr={}) as role_subgraph: current_counter = role_number + nb_pre_tasks role_id = "role_" + str(uuid.uuid4()) + role_not_tagged role_subgraph.node(role_name, id=role_id) edge_id = "edge_" + str(uuid.uuid4()) + role_not_tagged # edge from play to role role_subgraph.edge(play_name, role_name, label=str(current_counter), color=color, fontcolor=color, id=edge_id) self.graph_representation.add_link(play_id, edge_id) self.graph_representation.add_link(edge_id, role_id) # loop through the tasks of the roles if self.options.include_role_tasks: role_tasks_counter = 0 for block in role.compile(play): role_tasks_counter = self._include_tasks_in_blocks( current_play=play, graph=role_subgraph, parent_node_name=role_name, parent_node_id=role_id, block=block, color=color, play_vars=play_vars, current_counter=role_tasks_counter, node_name_prefix="[task] ") role_tasks_counter += 1 self.display.v( "{} roles added to the graph".format(role_number)) # loop through the tasks self.display.v("Graphing tasks...") nb_tasks = 0 for task_block in play.tasks: nb_tasks = self._include_tasks_in_blocks( current_play=play, graph=play_subgraph, parent_node_name=play_name, parent_node_id=play_id, block=task_block, color=color, current_counter=role_number + nb_pre_tasks, play_vars=play_vars, node_name_prefix="[task] ") # loop through the post_tasks self.display.v("Graphing post_tasks...") for post_task_block in play.post_tasks: self._include_tasks_in_blocks( current_play=play, graph=play_subgraph, parent_node_name=play_name, parent_node_id=play_id, block=post_task_block, color=color, current_counter=nb_tasks, play_vars=play_vars, node_name_prefix="[post_task] ") self.display.banner("Done graphing {}".format(play_name)) self.display.display("") # just an empty line # moving to the next play def render_graph(self): """ Render the graph :return: The rendered file path :rtype: str """ self.rendered_file_path = self.graph.render( cleanup=not self.options.save_dot_file, filename=self.options.output_filename) if self.options.save_dot_file: # add .gv extension. The render doesn't add an extension final_name = self.options.output_filename + ".dot" os.rename(self.options.output_filename, final_name) self.display.display( "Graphviz dot file has been exported to {}".format(final_name)) return self.rendered_file_path def post_process_svg(self): """ Post process the rendered svg :return The post processed file path :rtype: str :return: """ post_processor = PostProcessor(svg_path=self.rendered_file_path) post_processor.post_process( graph_representation=self.graph_representation) post_processor.write() self.display.display("The graph has been exported to {}".format( self.rendered_file_path)) return self.rendered_file_path def _include_tasks_in_blocks(self, current_play, graph, parent_node_name, parent_node_id, block, color, current_counter, play_vars=None, node_name_prefix=""): """ Recursively read all the tasks of the block and add it to the graph FIXME: This function needs some refactoring. Thinking of a BlockGrapher to handle this :param current_play: :type current_play: ansible.playbook.play.Play :param graph: :type graph: :param parent_node_name: :type parent_node_name: str :param parent_node_id: :type parent_node_id: str :param block: :type block: Union[Block,TaskInclude] :param color: :type color: str :param current_counter: :type current_counter: int :param play_vars: :type play_vars: dict :param node_name_prefix: :type node_name_prefix: str :return: :rtype: """ loop_counter = current_counter # loop through the tasks for counter, task_or_block in enumerate(block.block, 1): if isinstance(task_or_block, Block): loop_counter = self._include_tasks_in_blocks( current_play=current_play, graph=graph, parent_node_name=parent_node_name, parent_node_id=parent_node_id, block=task_or_block, color=color, current_counter=loop_counter, play_vars=play_vars, node_name_prefix=node_name_prefix) elif isinstance( task_or_block, TaskInclude ): # include, include_tasks, include_role are dynamic # So we need to process it explicitly because Ansible does it during th execution of the playbook task_vars = self.variable_manager.get_vars(play=current_play, task=task_or_block) if isinstance(task_or_block, IncludeRole): self.display.v( "An 'include_role' found. Including tasks from '{}'". format(task_or_block.args["name"])) # here we have an include_role. The class IncludeRole is a subclass of TaskInclude. # We do this because the management of an include_role is different. # See :func:`~ansible.playbook.included_file.IncludedFile.process_include_results` from line 155 my_blocks, _ = task_or_block.get_block_list( play=current_play, loader=self.data_loader, variable_manager=self.variable_manager) else: self.display.v( "An 'include_tasks' found. Including tasks from '{}'". format(task_or_block.get_name())) templar = Templar(loader=self.data_loader, variables=task_vars) try: include_file = handle_include_path( original_task=task_or_block, loader=self.data_loader, templar=templar) except AnsibleUndefinedVariable as e: # TODO: mark this task with some special shape or color self.display.warning( "Unable to translate the include task '{}' due to an undefined variable: {}. " "Some variables are available only during the real execution." .format(task_or_block.get_name(), str(e))) loop_counter += 1 self._include_task(task_or_block, loop_counter, task_vars, graph, node_name_prefix, color, parent_node_id, parent_node_name) continue data = self.data_loader.load_from_file(include_file) if data is None: self.display.warning( "file %s is empty and had no tasks to include" % include_file) continue elif not isinstance(data, list): raise AnsibleParserError( "included task files must contain a list of tasks", obj=data) # get the blocks from the include_tasks my_blocks = load_list_of_blocks( data, play=current_play, variable_manager=self.variable_manager, role=task_or_block._role, loader=self.data_loader, parent_block=task_or_block) for b in my_blocks: # loop through the blocks inside the included tasks or role loop_counter = self._include_tasks_in_blocks( current_play=current_play, graph=graph, parent_node_name=parent_node_name, parent_node_id=parent_node_id, block=b, color=color, current_counter=loop_counter, play_vars=task_vars, node_name_prefix=node_name_prefix) else: # check if this task comes from a role and we dont want to include role's task if has_role_parent( task_or_block) and not self.options.include_role_tasks: # skip role's task self.display.vv( "The task '{}' has a role as parent and include_role_tasks is false. " "It will be skipped.".format(task_or_block.get_name())) continue self._include_task(task_or_block=task_or_block, loop_counter=loop_counter + 1, play_vars=play_vars, graph=graph, node_name_prefix=node_name_prefix, color=color, parent_node_id=parent_node_id, parent_node_name=parent_node_name) loop_counter += 1 return loop_counter def _include_task(self, task_or_block, loop_counter, play_vars, graph, node_name_prefix, color, parent_node_id, parent_node_name): """ Include the task in the graph :return: :rtype: """ self.display.vv("Adding the task '{}' to the graph".format( task_or_block.get_name())) # check if the task should be included tagged = '' if not task_or_block.evaluate_tags(only_tags=self.options.tags, skip_tags=self.options.skip_tags, all_vars=play_vars): self.display.vv( "The task '{}' should not be executed. It will be marked as NOT_TAGGED" .format(task_or_block.get_name())) tagged = NOT_TAGGED task_edge_label = str(loop_counter) if len(task_or_block.when) > 0: when = "".join(map(str, task_or_block.when)) task_edge_label += " [when: " + when + "]" task_name = clean_name( node_name_prefix + self.template(task_or_block.get_name(), play_vars)) # get prefix id from node_name id_prefix = node_name_prefix.replace("[", "").replace("]", "").replace(" ", "_") task_id = id_prefix + str(uuid.uuid4()) + tagged edge_id = "edge_" + str(uuid.uuid4()) + tagged graph.node(task_name, shape="octagon", id=task_id) graph.edge(parent_node_name, task_name, label=task_edge_label, color=color, fontcolor=color, style="bold", id=edge_id) self.graph_representation.add_link(parent_node_id, edge_id) self.graph_representation.add_link(edge_id, task_id)