def _load_playbook_data(self, file_name, variable_manager): if os.path.isabs(file_name): self._basedir = os.path.dirname(file_name) else: self._basedir = os.path.normpath(os.path.join(self._basedir, os.path.dirname(file_name))) # set the loaders basedir self._loader.set_basedir(self._basedir) # also add the basedir to the list of module directories push_basedir(self._basedir) ds = self._loader.load_from_file(os.path.basename(file_name)) if not isinstance(ds, list): raise AnsibleParserError("playbooks must be a list of plays", obj=ds) # Parse the playbook entries. For plays, we simply parse them # using the Play() object, and includes are parsed using the # PlaybookInclude() object for entry in ds: if not isinstance(entry, dict): raise AnsibleParserError("playbook entries must be either a valid play or an include statement", obj=entry) if 'include' in entry: pb = PlaybookInclude.load(entry, basedir=self._basedir, variable_manager=variable_manager, loader=self._loader) self._entries.extend(pb._entries) else: entry_obj = Play.load(entry, variable_manager=variable_manager, loader=self._loader) self._entries.append(entry_obj)
def test_play_with_post_tasks(self): p = Play.load(dict( name="test play", hosts=['foo'], gather_facts=False, post_tasks=[dict(action='shell echo "hello world"')], ))
def _load_playbook_data(self, file_name): # add the base directory of the file to the data loader, # so that it knows where to find relatively pathed files basedir = os.path.dirname(file_name) self._loader.set_basedir(basedir) # also add the basedir to the list of module directories push_basedir(basedir) ds = self._loader.load_from_file(file_name) if not isinstance(ds, list): raise AnsibleParserError("playbooks must be a list of plays", obj=ds) # Parse the playbook entries. For plays, we simply parse them # using the Play() object, and includes are parsed using the # PlaybookInclude() object for entry in ds: if not isinstance(entry, dict): raise AnsibleParserError("playbook entries must be either a valid play or an include statement", obj=entry) if 'include' in entry: entry_obj = PlaybookInclude.load(entry, loader=self._loader) else: entry_obj = Play.load(entry, loader=self._loader) self._entries.append(entry_obj)
def test_play_with_post_tasks(self): p = Play.load( dict( name="test play", hosts=['foo'], gather_facts=False, post_tasks=[dict(action='shell echo "hello world"')], ))
def test_play_with_user_conflict(self): p = Play.load(dict( name="test play", hosts=['foo'], user="******", gather_facts=False, )) self.assertEqual(p.remote_user, "testing")
def test_play_with_remote_user(): p = Play.load(dict( name="test play", hosts=['foo'], user="******", gather_facts=False, )) assert p.remote_user == "testing"
def _load_playbook_data(self, file_name, variable_manager, vars=None): if os.path.isabs(file_name): self._basedir = os.path.dirname(file_name) else: self._basedir = os.path.normpath(os.path.join(self._basedir, os.path.dirname(file_name))) # set the loaders basedir cur_basedir = self._loader.get_basedir() self._loader.set_basedir(self._basedir) self._file_name = file_name # dynamically load any plugins from the playbook directory for name, obj in get_all_plugin_loaders(): if obj.subdir: plugin_path = os.path.join(self._basedir, obj.subdir) if os.path.isdir(plugin_path): obj.add_directory(plugin_path) try: ds = self._loader.load_from_file(os.path.basename(file_name)) except UnicodeDecodeError as e: raise AnsibleParserError("Could not read playbook (%s) due to encoding issues: %s" % (file_name, to_native(e))) # check for errors and restore the basedir in case this error is caught and handled if not ds: self._loader.set_basedir(cur_basedir) raise AnsibleParserError("Empty playbook, nothing to do", obj=ds) elif not isinstance(ds, list): self._loader.set_basedir(cur_basedir) raise AnsibleParserError("A playbook must be a list of plays, got a %s instead" % type(ds), obj=ds) # Parse the playbook entries. For plays, we simply parse them # using the Play() object, and includes are parsed using the # PlaybookInclude() object for entry in ds: if not isinstance(entry, dict): # restore the basedir in case this error is caught and handled self._loader.set_basedir(cur_basedir) raise AnsibleParserError("playbook entries must be either a valid play or an include statement", obj=entry) if any(action in entry for action in ('import_playbook', 'include')): if 'include' in entry: display.deprecated("'include' for playbook includes. You should use 'import_playbook' instead", version="2.12") pb = PlaybookInclude.load(entry, basedir=self._basedir, variable_manager=variable_manager, loader=self._loader) if pb is not None: self._entries.extend(pb._entries) else: which = entry.get('import_playbook', entry.get('include', entry)) display.display("skipping playbook '%s' due to conditional test failure" % which, color=C.COLOR_SKIP) else: entry_obj = Play.load(entry, variable_manager=variable_manager, loader=self._loader, vars=vars) self._entries.append(entry_obj) # we're done, so restore the old basedir in the loader self._loader.set_basedir(cur_basedir)
def _load_playbook_data(self, file_name, variable_manager, vars=None): if os.path.isabs(file_name): self._basedir = os.path.dirname(file_name) else: self._basedir = os.path.normpath(os.path.join(self._basedir, os.path.dirname(file_name))) # set the loaders basedir cur_basedir = self._loader.get_basedir() self._loader.set_basedir(self._basedir) add_all_plugin_dirs(self._basedir) self._file_name = file_name try: ds = self._loader.load_from_file(os.path.basename(file_name)) except UnicodeDecodeError as e: raise AnsibleParserError("Could not read playbook (%s) due to encoding issues: %s" % (file_name, to_native(e))) # check for errors and restore the basedir in case this error is caught and handled if ds is None: self._loader.set_basedir(cur_basedir) raise AnsibleParserError("Empty playbook, nothing to do", obj=ds) elif not isinstance(ds, list): self._loader.set_basedir(cur_basedir) raise AnsibleParserError("A playbook must be a list of plays, got a %s instead" % type(ds), obj=ds) elif not ds: self._loader.set_basedir(cur_basedir) raise AnsibleParserError("A playbook must contain at least one play") # Parse the playbook entries. For plays, we simply parse them # using the Play() object, and includes are parsed using the # PlaybookInclude() object for entry in ds: if not isinstance(entry, dict): # restore the basedir in case this error is caught and handled self._loader.set_basedir(cur_basedir) raise AnsibleParserError("playbook entries must be either valid plays or 'import_playbook' statements", obj=entry) if any(action in entry for action in C._ACTION_IMPORT_PLAYBOOK): pb = PlaybookInclude.load(entry, basedir=self._basedir, variable_manager=variable_manager, loader=self._loader) if pb is not None: self._entries.extend(pb._entries) else: which = entry for k in C._ACTION_IMPORT_PLAYBOOK: if k in entry: which = entry[k] break display.display("skipping playbook '%s' due to conditional test failure" % which, color=C.COLOR_SKIP) else: entry_obj = Play.load(entry, variable_manager=variable_manager, loader=self._loader, vars=vars) self._entries.append(entry_obj) # we're done, so restore the old basedir in the loader self._loader.set_basedir(cur_basedir)
def test_play_with_vars_files(value, expected): play = Play.load({ 'name': 'Play with vars_files', 'hosts': ['testhost1'], 'vars_files': value, }) assert play.vars_files == value assert play.get_vars_files() == expected
def _load_playbook_data(self, file_name, variable_manager): if os.path.isabs(file_name): self._basedir = os.path.dirname(file_name) else: self._basedir = os.path.normpath( os.path.join(self._basedir, os.path.dirname(file_name))) # set the loaders basedir cur_basedir = self._loader.get_basedir() self._loader.set_basedir(self._basedir) self._file_name = file_name # dynamically load any plugins from the playbook directory for name, obj in get_all_plugin_loaders(): if obj.subdir: plugin_path = os.path.join(self._basedir, obj.subdir) if os.path.isdir(plugin_path): obj.add_directory(plugin_path) ds = self._loader.load_from_file(os.path.basename(file_name)) if not isinstance(ds, list): # restore the basedir in case this error is caught and handled self._loader.set_basedir(cur_basedir) raise AnsibleParserError("playbooks must be a list of plays", obj=ds) # Parse the playbook entries. For plays, we simply parse them # using the Play() object, and includes are parsed using the # PlaybookInclude() object for entry in ds: if not isinstance(entry, dict): # restore the basedir in case this error is caught and handled self._loader.set_basedir(cur_basedir) raise AnsibleParserError( "playbook entries must be either a valid play or an 'import_playbook' statement", obj=entry) if 'include' in entry or 'import_playbook' in entry: if 'include' in entry: display.deprecated( "You should use 'import_playbook' instead of 'include' for playbook includes" ) pb = PlaybookInclude.load(entry, basedir=self._basedir, variable_manager=variable_manager, loader=self._loader) self._entries.extend(pb._entries) else: entry_obj = Play.load(entry, variable_manager=variable_manager, loader=self._loader) self._entries.append(entry_obj) # we're done, so restore the old basedir in the loader self._loader.set_basedir(cur_basedir)
def test_basic_play(self): p = Play.load(dict( name="test play", hosts=['foo'], gather_facts=False, connection='local', remote_user="******", become=True, become_user="******", ))
def __init__(self): initial_dir = os.getcwd() ansible_basedir = os.path.join( os.environ.get("PROJECT_ENVIRONMENT_FILES_PATH"), "ansible") # Move to project directory os.chdir(os.environ.get("PROJECT_ENVIRONMENT_FILES_PATH")) # Load list of inventories from config:w config = ConfigManager('/etc/ansible/ansible.cfg') sources = config.data.get_setting('DEFAULT_HOST_LIST').value loader = CustomLoader() loader.set_basedir(ansible_basedir) # load the inventory, set the basic playbook directory self._inventory = CustomInventoryManager(loader=loader, sources=sources) var_manager = VariableManager(loader=loader, inventory=self._inventory) play = Play.load(dict(hosts=['all']), loader=loader, variable_manager=var_manager) # Move back to directory of origin os.chdir(initial_dir) control_host = None if 'control' in self._inventory.groups: control_group = self._inventory.groups['control'] if len(control_group.get_hosts()) > 0: control_host = control_group.get_hosts()[0] # Hostvars hostvars = {} for host in self._inventory.get_hosts(): hostvars[host.name] = host.vars # make sure we load all magic variables on top of the global variables self._vars = combine_vars( var_manager.get_vars(play=play, task=Task(), host=control_host), { 'hostvars': hostvars, 'env': os.environ }) # create the template renderer self._templar = Templar(loader=loader, variables=self._vars) # setup some easy variables that we use a lot self._vars['control_ip'] = self.get_var( "hostvars[groups['control'][0]]['ansible_host']") self._vars['edge_ip'] = self.get_var( "hostvars[groups['edge'][0]]['ansible_host']") self._vars['monitor_ip'] = self.get_var( "hostvars[groups['monitor'][0]]['ansible_host']")
def test_play_compile(self): p = Play.load(dict( name="test play", hosts=['foo'], gather_facts=False, tasks=[dict(action='shell echo "hello world"')], )) tasks = p.compile() self.assertEqual(len(tasks), 1) self.assertIsInstance(tasks[0], Task)
def test_play_with_tasks(): p = Play.load(dict( name="test play", hosts=['foo'], gather_facts=False, tasks=[dict(action='shell echo "hello world"')], )) assert len(p.tasks) == 1 assert isinstance(p.tasks[0], Block) assert p.tasks[0].has_tasks() is True
def _load_playbook_data(self, file_name, variable_manager, vars=None): if os.path.isabs(file_name): self._basedir = os.path.dirname(file_name) else: self._basedir = os.path.normpath(os.path.join(self._basedir, os.path.dirname(file_name))) # set the loaders basedir cur_basedir = self._loader.get_basedir() self._loader.set_basedir(self._basedir) self._file_name = file_name # dynamically load any plugins from the playbook directory for name, obj in get_all_plugin_loaders(): if obj.subdir: plugin_path = os.path.join(self._basedir, obj.subdir) if os.path.isdir(plugin_path): obj.add_directory(plugin_path) try: ds = self._loader.load_from_file(os.path.basename(file_name)) except UnicodeDecodeError as e: raise AnsibleParserError("Could not read playbook (%s) due to encoding issues: %s" % (file_name, to_native(e))) if not isinstance(ds, list): # restore the basedir in case this error is caught and handled self._loader.set_basedir(cur_basedir) raise AnsibleParserError("playbooks must be a list of plays", obj=ds) # Parse the playbook entries. For plays, we simply parse them # using the Play() object, and includes are parsed using the # PlaybookInclude() object for entry in ds: if not isinstance(entry, dict): # restore the basedir in case this error is caught and handled self._loader.set_basedir(cur_basedir) raise AnsibleParserError("playbook entries must be either a valid play or an include statement", obj=entry) if any(action in entry for action in ('import_playbook', 'include')): if 'include' in entry: display.deprecated("'include' for playbook includes. You should use 'import_playbook' instead", version="2.8") pb = PlaybookInclude.load(entry, basedir=self._basedir, variable_manager=variable_manager, loader=self._loader) if pb is not None: self._entries.extend(pb._entries) else: which = entry.get('import_playbook', entry.get('include', entry)) display.display("skipping playbook '%s' due to conditional test failure" % which, color=C.COLOR_SKIP) else: entry_obj = Play.load(entry, variable_manager=variable_manager, loader=self._loader, vars=vars) self._entries.append(entry_obj) # we're done, so restore the old basedir in the loader self._loader.set_basedir(cur_basedir)
def test_variable_manager_role_vars_dependencies(self): ''' Tests vars from role dependencies with duplicate dependencies. ''' mock_inventory = MagicMock() fake_loader = DictDataLoader({ # role common-role '/etc/ansible/roles/common-role/tasks/main.yml': """ - debug: msg="{{role_var}}" """, # We do not need allow_duplicates: yes for this role # because eliminating duplicates is done by the execution # strategy, which we do not test here. # role role1 '/etc/ansible/roles/role1/vars/main.yml': """ role_var: "role_var_from_role1" """, '/etc/ansible/roles/role1/meta/main.yml': """ dependencies: - { role: common-role } """, # role role2 '/etc/ansible/roles/role2/vars/main.yml': """ role_var: "role_var_from_role2" """, '/etc/ansible/roles/role2/meta/main.yml': """ dependencies: - { role: common-role } """, }) v = VariableManager(loader=fake_loader, inventory=mock_inventory) v._fact_cache = defaultdict(dict) play1 = Play.load(dict( hosts=['all'], roles=['role1', 'role2'], ), loader=fake_loader, variable_manager=v) # The task defined by common-role exists twice because role1 # and role2 depend on common-role. Check that the tasks see # different values of role_var. blocks = play1.compile() task = blocks[1].block[0] res = v.get_vars(play=play1, task=task) self.assertEqual(res['role_var'], 'role_var_from_role1') task = blocks[2].block[0] res = v.get_vars(play=play1, task=task) self.assertEqual(res['role_var'], 'role_var_from_role2')
def test_play_compile(self): p = Play.load( dict( name="test play", hosts=['foo'], gather_facts=False, tasks=[dict(action='shell echo "hello world"')], )) tasks = p.compile() self.assertEqual(len(tasks), 1) self.assertIsInstance(tasks[0], Block)
def test_play_context(mocker, parser, reset_cli_args): (options, args) = parser.parse_args(['-vv', '--check']) options.args = args context._init_global_context(options) play = Play.load({}) play_context = PlayContext(play=play) assert play_context.remote_addr is None assert play_context.remote_user is None assert play_context.password == '' assert play_context.private_key_file == C.DEFAULT_PRIVATE_KEY_FILE assert play_context.timeout == C.DEFAULT_TIMEOUT assert play_context.verbosity == 2 assert play_context.check_mode is True mock_play = mocker.MagicMock() mock_play.force_handlers = True play_context = PlayContext(play=mock_play) assert play_context.force_handlers is True mock_task = mocker.MagicMock() mock_task.connection = 'mocktask' mock_task.remote_user = '******' mock_task.port = 1234 mock_task.no_log = True mock_task.become = True mock_task.become_method = 'mocktask' mock_task.become_user = '******' mock_task.become_pass = '******' mock_task._local_action = False mock_task.delegate_to = None all_vars = dict( ansible_connection='mock_inventory', ansible_ssh_port=4321, ) mock_templar = mocker.MagicMock() play_context = PlayContext() play_context = play_context.set_task_and_variable_override( task=mock_task, variables=all_vars, templar=mock_templar) assert play_context.connection == 'mock_inventory' assert play_context.remote_user == 'mocktask' assert play_context.no_log is True mock_task.no_log = False play_context = play_context.set_task_and_variable_override( task=mock_task, variables=all_vars, templar=mock_templar) assert play_context.no_log is False
def test_play_compile(self): p = Play.load(dict( name="test play", hosts=['foo'], gather_facts=False, tasks=[dict(action='shell echo "hello world"')], )) blocks = p.compile() # with a single block, there will still be three # implicit meta flush_handler blocks inserted self.assertEqual(len(blocks), 4)
def _load_playbook_data(self, file_name, variable_manager): if os.path.isabs(file_name): self._basedir = os.path.dirname(file_name) else: self._basedir = os.path.normpath( os.path.join(self._basedir, os.path.dirname(file_name))) # set the loaders basedir self._loader.set_basedir(self._basedir) self._file_name = file_name # dynamically load any plugins from the playbook directory for name, obj in get_all_plugin_loaders(): if obj.subdir: plugin_path = os.path.join(self._basedir, obj.subdir) if os.path.isdir(plugin_path): obj.add_directory(plugin_path) ds = self._loader.load_from_file(os.path.basename(file_name)) if not isinstance(ds, list): raise AnsibleParserError("playbooks must be a list of plays", obj=ds) # Parse the playbook entries. For plays, we simply parse them # using the Play() object, and includes are parsed using the # PlaybookInclude() object for entry in ds: if not isinstance(entry, dict): raise AnsibleParserError( "playbook entries must be either a valid play or an include statement", obj=entry) if 'include' in entry: pb = PlaybookInclude.load(entry, basedir=self._basedir, variable_manager=variable_manager, loader=self._loader) if pb is not None: self._entries.extend(pb._entries) else: display.display( "skipping playbook include '%s' due to conditional test failure" % entry.get('include', entry), color=C.COLOR_SKIP) else: entry_obj = Play.load(entry, variable_manager=variable_manager, loader=self._loader) self._entries.append(entry_obj)
def play_from_code(self, code): """Support one task, list of tasks, or whole play without hosts.""" data = orig_data = yaml.safe_load(code) if isinstance(data, dict) and 'tasks' not in data: data = [data] if isinstance(data, list): data = dict(tasks=data) if not isinstance(data, dict): raise UnknownInput( "Expected task, list of tasks, or play, got {}".format( type(orig_data))) if 'hosts' not in data: data['hosts'] = 'localhost' return Play.load(data)
def shell_cli(hosts: list, **kwargs): Options = namedtuple('Options', [ 'connection', 'module_path', 'forks', 'become', 'become_method', 'become_user', 'check', 'diff' ]) options = Options(connection='ssh', module_path=[], forks=10, become=None, become_method=None, become_user=None, check=False, diff=False) loader = DataLoader() passwords = dict(vault_pass='******') results_callback = ResultCallback() inventory = InventoryManager(loader=loader, sources=hosts) variable_manager = VariableManager(loader=loader, inventory=inventory) play_source = dict(name="Shell Exce", hosts=hosts, remote_user=kwargs.get('remote_user'), gather_facts='no', tasks=[ dict(action=dict(module=kwargs.get('module'), args=kwargs.get('shell_cmd'))), ]) play = Play() play = play.load(play_source, variable_manager=variable_manager, loader=loader) tqm = None try: tqm = TaskQueueManager( inventory=inventory, variable_manager=variable_manager, loader=loader, options=options, passwords=passwords, stdout_callback=results_callback, ) rv = tqm.run(play) finally: if tqm is not None: tqm.cleanup() # Remove ansible tmpdir tmp_dir = C.DEFAULT_LOCAL_TMP shutil.rmtree(tmp_dir) return rv
def test_play_with_roles(self): fake_loader = DictDataLoader({ '/etc/ansible/roles/foo/tasks.yml': """ - name: role task shell: echo "hello world" """, }) p = Play.load(dict( name="test play", hosts=['foo'], gather_facts=False, roles=['foo'], ), loader=fake_loader) tasks = p.compile()
def _load_playbook_data(self, file_name, variable_manager): if os.path.isabs(file_name): self._basedir = os.path.dirname(file_name) else: self._basedir = os.path.normpath(os.path.join(self._basedir, os.path.dirname(file_name))) # set the loaders basedir cur_basedir = self._loader.get_basedir() self._loader.set_basedir(self._basedir) self._file_name = file_name # dynamically load any plugins from the playbook directory for name, obj in get_all_plugin_loaders(): if obj.subdir: plugin_path = os.path.join(self._basedir, obj.subdir) if os.path.isdir(plugin_path): obj.add_directory(plugin_path) ds = self._loader.load_from_file(os.path.basename(file_name)) if not isinstance(ds, list): # restore the basedir in case this error is caught and handled self._loader.set_basedir(cur_basedir) raise AnsibleParserError("playbooks must be a list of plays", obj=ds) # Parse the playbook entries. For plays, we simply parse them # using the Play() object, and includes are parsed using the # PlaybookInclude() object for entry in ds: if not isinstance(entry, dict): # restore the basedir in case this error is caught and handled self._loader.set_basedir(cur_basedir) raise AnsibleParserError("playbook entries must be either a valid play or an include statement", obj=entry) if 'include' in entry: pb = PlaybookInclude.load(entry, basedir=self._basedir, variable_manager=variable_manager, loader=self._loader) if pb is not None: self._entries.extend(pb._entries) else: display.display("skipping playbook include '%s' due to conditional test failure" % entry.get('include', entry), color=C.COLOR_SKIP) else: entry_obj = Play.load(entry, variable_manager=variable_manager, loader=self._loader) self._entries.append(entry_obj) # we're done, so restore the old basedir in the loader self._loader.set_basedir(cur_basedir)
def create_action_module(name, args=None, task_vars=None): play = Play.load(dict()) play_context = PlayContext(play=play) module = LintActionModule(task=Task.load(data=dict(local_action=name, args=args), block=Block(play=play)), connection=Connection(play_context, new_stdin=False), play_context=play_context, loader=play._loader, templar=Templar(play._loader), shared_loader_obj=None) module.use_display(NullDisplay()) return module
def test_collection_static_warning(capsys): """Test that collection name is not templated. Also, make sure that users see the warning message for the referenced name. """ collection_name = "foo.{{bar}}" p = Play.load( dict( name="test play", hosts=['foo'], gather_facts=False, connection='local', collections=collection_name, )) assert collection_name in p.collections std_out, std_err = capsys.readouterr() assert '[WARNING]: "collections" is not templatable, but we found: %s' % collection_name in std_err assert '' == std_out
def test_play_with_roles(self): fake_loader = DictDataLoader({ '/etc/ansible/roles/foo/tasks.yml': """ - name: role task shell: echo "hello world" """, }) mock_var_manager = MagicMock() mock_var_manager.get_vars.return_value = dict() p = Play.load(dict( name="test play", hosts=['foo'], gather_facts=False, roles=['foo'], ), loader=fake_loader, variable_manager=mock_var_manager) blocks = p.compile()
def _run_sub_task( self, task_data: Dict[str, Any] = None, hostname: str = None, action_name: str = 'normal' ) -> Dict[str, Any]: task_vars, task, host = self._make_module_task_vars(task_data=task_data, hostname=hostname) try: ansible_host = host.address # get_name()#task_vars['ansible_host']#:'localhost' connection_name = task_vars.get('ansible_connection') # :'local' if not connection_name and is_localhost(host): self._display.warning(f"supposing ansible_collection=local for {host}") connection_name = "local" # TODO What about become and username play: Play = Play.load({ "hosts": hostname}, variable_manager=task.get_variable_manager(), loader=task.get_loader()) play_context = PlayContext(play=play) if not play_context.remote_addr: play_context.remote_addr = ansible_host # cmd_task_vars['ansible_delegated_vars'][spire_server_host][] # ...'ansible_host': 'localhost' # ...'inventory_hostname':'spire_server' # ...'inventory_hostname_short':'spire_server' connection: ConnectionBase = self._shared_loader_obj.connection_loader.get(connection_name, play_context, os.devnull) normal_action = self._shared_loader_obj.action_loader.get(action_name, task=task, connection=connection, play_context=play_context, loader=task.get_loader(), templar=self._templar, shared_loader_obj=self._shared_loader_obj) sub_task_ret: Dict[str, Any] = normal_action.run(task_vars=task_vars) finally: # self._task = original_task pass return sub_task_ret
def run_play(self, name: str, hosts: str, tasks=[], roles=[], gather_facts: bool = None, serial=None, **kwargs): """runs a play on hosts""" play_dict = dict(name=name, hosts=hosts, tasks=tasks, roles=roles) if roles or gather_facts: play_dict['gather_facts'] = 'yes' else: play_dict['gather_facts'] = 'no' if serial: play_dict['serial'] = serial play_dict.update(kwargs) play = Play.load(play_dict, variable_manager=self.variable_manager, loader=self.loader) self.tqm.run(play) return self.results
def _load_playbook_data(self, file_name, variable_manager): if os.path.isabs(file_name): self._basedir = os.path.dirname(file_name) else: self._basedir = os.path.normpath( os.path.join(self._basedir, os.path.dirname(file_name))) # set the loaders basedir self._loader.set_basedir(self._basedir) # also add the basedir to the list of module directories push_basedir(self._basedir) ds = self._loader.load_from_file(os.path.basename(file_name)) if not isinstance(ds, list): raise AnsibleParserError("playbooks must be a list of plays", obj=ds) # Parse the playbook entries. For plays, we simply parse them # using the Play() object, and includes are parsed using the # PlaybookInclude() object for entry in ds: if not isinstance(entry, dict): raise AnsibleParserError( "playbook entries must be either a valid play or an include statement", obj=entry) if 'include' in entry: entry_obj = PlaybookInclude.load( entry, variable_manager=variable_manager, loader=self._loader) else: entry_obj = Play.load(entry, variable_manager=variable_manager, loader=self._loader) self._entries.append(entry_obj)
def _make_module_task_vars( self, task_data: Dict[str, Any], hostname: str = None ) -> Tuple[Dict[str, Any], Task, Host]: original_task: module_task.Task = self._task variable_manager: VariableManager = original_task.get_variable_manager() data_loader = original_task.get_loader() inventory: InventoryManager = variable_manager._inventory host = None if not hostname else inventory.get_host(hostname) play_data = { "hosts": hostname, "tasks": [task_data] } play: Play = Play.load(play_data, variable_manager=variable_manager, loader=data_loader, vars=None) # task: Task = module_task.Task.load(data=task_data, # variable_manager=variable_manager, # loader=data_loader) task = play.get_tasks()[0][0] task_vars = variable_manager.get_vars(play=play, task=task, host=host) return task_vars, task, host
def createplay(self, filename, hosts, taskid, username, password, sshLoginType, systemType, command): # action try: with open('/etc/ansible/{0}'.format(hosts), 'w') as f: f.writelines(hosts) except Exception as e: # logger.error("【错误】 ansible配置文件写入失败 -- mission.py line68") pass self.inventory = InventoryManager( loader=self.loader, sources='/etc/ansible/{0}'.format(hosts)) self.variable_manager = VariableManager(loader=self.loader, inventory=self.inventory) if sshLoginType == 'ssh_key' and systemType == 'linux': command = "iptables-save" #重启防火墙命令 remote_command = command self.variable_manager.set_host_variable(Host(hosts, '22'), 'ansible_ssh_user', username) self.variable_manager.set_host_variable(Host(hosts, '22'), 'ansible_ssh_pass', password) play_source = dict( name="Ansible Play", hosts='all', gather_facts='no', tasks=[ dict(action=dict(module="command", args=remote_command)), dict(action=dict(module="command", args=command)), ]) self.play = Play.load(play_source, variable_manager=self.variable_manager, loader=self.loader) return self.play, self.variable_manager, self.inventory
def test_play_with_roles(mocker): mocker.patch('ansible.playbook.role.definition.RoleDefinition._load_role_path', return_value=('foo', '/etc/ansible/roles/foo')) fake_loader = DictDataLoader({ '/etc/ansible/roles/foo/tasks.yml': """ - name: role task shell: echo "hello world" """, }) mock_var_manager = mocker.MagicMock() mock_var_manager.get_vars.return_value = {} p = Play.load(dict( name="test play", hosts=['foo'], gather_facts=False, roles=['foo'], ), loader=fake_loader, variable_manager=mock_var_manager) blocks = p.compile() assert len(blocks) > 1 assert all(isinstance(block, Block) for block in blocks) assert isinstance(p.get_roles()[0], Role)
def run(self, hosts, tasks): _play = dict(name="Ansible ADHoc Task", hosts=hosts, gather_facts="no", tasks=tasks) std_callback = MyCallbackResult() tmq = TaskQueueManager(inventory=self.inventory, variable_manager=self.variable_manager, loader=self.loader, passwords=dict(), stdout_callback=std_callback) play = Play.load(_play, variable_manager=self.variable_manager, loader=self.loader) try: tmq.run(play) except Exception as e: logging.error(str(e)) finally: tmq.cleanup() shutil.rmtree(C.DEFAULT_LOCAL_TMP) return std_callback.result
def test_pire_server(spire_runners: SpireRunners, test_case: str, initial_state: StateOfServer, initial_version: str, expected_state: StateOfServer, expected_outcome_part: Dict[str, Any], expected_local_probing: Dict[str, Any]) -> None: server_runner: ServerRunner = spire_runners.server file_modes = FileModes(mode_dir="u=xrw,g=xrw,o=", mode_file_not_exe="u=rw,g=rw,o=", mode_file_exe="u=xrw,g=xr,o=xr") spire_runners.to_server_initial_state(initial_state, initial_version, file_modes) # TODCHECK is expected_version being None an issue. # ? use a separate parameter, because this is not the same as the task parameter # ! but the task parameter should be allow to be None if expected state is absent expected_version: str = expected_outcome_part[ 'actual_spire_server_version'] inventory, data_loader, var_manager = give_inventory_and_data_loader() play: Play = Play.load({"hosts": "spire_server"}, variable_manager=var_manager, loader=data_loader) play_context = PlayContext(play=play) # ansible.plugins.connection.local.Connection connection: Connection = connection_loader.get('local', play_context, os.devnull) spire_download_url = None if strings.is_blank(expected_version) \ else server_runner.url_dist_tar_gz(expected_version) data: Dict[str, Any] = { "name": "spire-server-local1", "io_patricecongo.spire.spire_server": { "state": expected_state.state.name, "substate_service_installation": expected_state.substate_service_installation_name(), "substate_service_status": expected_state.substate_service_status_name(), "spire_server_install_dir": server_runner.dir_install, "spire_server_registration_uds_path": server_runner.registration_uds_path, "spire_server_address": server_runner.bind_address, "spire_server_port": server_runner.bind_port, "spire_server_config_dir": server_runner.dir_config, "spire_server_data_dir": server_runner.dir_data, "spire_server_install_dir": server_runner.dir_install, "spire_server_service_dir": server_runner.service.install_dir, "spire_server_log_dir": server_runner.dir_log, "spire_server_install_file_owner": None, "spire_server_install_dir_mode": file_modes.mode_dir, "spire_server_install_file_mode": file_modes.mode_file_not_exe, # "u=xrw,g=xrw", "spire_server_install_file_mode_exe": file_modes.mode_file_exe, # "u=xrw,g=xr,o=xr", "spire_server_version": expected_version, "spire_server_log_level": "DEBUG", "spire_server_log_format": "text", "spire_server_trust_domain": server_runner.trust_domain, "spire_server_service_name": server_runner.service.service_name, "spire_server_service_scope": "user", "spire_server_healthiness_probe_timeout_seconds": 5, "spire_download_url": spire_download_url, "spire_server_ca_key_type": "ec-p256", "spire_server_ca_ttl": "168h", "spire_server_ca_subject_commom_name": "unit-test-ca", "spire_server_ca_subject_country": "de", "spire_server_ca_subject_organization": "dev", "spire_server_jwt_issuer": "unit-test-ca", "spire_server_registration_uds_path": server_runner.registration_uds_path, "spire_server_plugins": [{ "type": "DataStore", "name": "sql", "data": { "database_type": "sqlite3", "connection_string": f"{server_runner.dir_data}/datastore.sqlite3" }, }, { "type": "KeyManager", "name": "disk", "data": { "keys_path": f"{server_runner.dir_data}/keys.json" } }, { "type": "NodeAttestor", "name": "join_token", "data": {} }] } } task: Task = Task.load(data=data, variable_manager=var_manager, loader=data_loader) all_vars = var_manager.get_vars(play=play, task=task, host=inventory.get_host("localhost")) templar = Templar(loader=data_loader, shared_loader_obj=plugins_loader, variables=None) action = spire_server.ActionModule(task=task, connection=connection, play_context=play_context, loader=data_loader, templar=templar, shared_loader_obj=plugins_loader) server_runner.register_extra_cleanup_task(action.cleanup) action._low_level_execute_command = functools.partial( _low_level_execute_command, action._low_level_execute_command) ### ret = action.run(task_vars=all_vars) ### keys_to_check = [ 'changed', 'actual_state', 'actual_substate_service_installation', 'actual_substate_service_status', 'actual_spire_server_version', "failed" ] assert { **expected_state.to_ansible_return_data(), **expected_outcome_part } == {key: ret.get(key) for key in keys_to_check} server_health_res = server_runner.is_healthy() server_srv_running_res = server_runner.service.is_active() server_srv_enabled_res = server_runner.service.is_enabled() assert expected_local_probing == { "healthy": server_health_res.res, "enabled": server_srv_enabled_res.res, "running": server_srv_running_res.res }, \ { "ret": ret, "health": server_health_res, "enabled": server_srv_enabled_res, "running": server_srv_running_res, "expected_local_probing": expected_local_probing } ### check-mode diff data["check_mode"] = True data["diff"] = True task = Task.load(data=data, variable_manager=var_manager, loader=data_loader) all_vars = var_manager.get_vars(play=play, task=task, host=inventory.get_host("localhost")) action = spire_server.ActionModule(task=task, connection=connection, play_context=play_context, loader=data_loader, templar=templar, shared_loader_obj=plugins_loader) server_runner.register_extra_cleanup_task(action.cleanup) action._low_level_execute_command = functools.partial( _low_level_execute_command, action._low_level_execute_command) ret = action.run(task_vars={**all_vars}) ### assert ret == {"changed":False, "diff": []}, \ { "actual": ret, "expected":{"changed":False, "diff": []} }
def test_empty_play(self): p = Play.load(dict()) self.assertEqual(str(p), "PLAY: ")
def execute(self,module_name,ansible_patt,ansible_args=None): loader = DataLoader() variable_manager = VariableManager() inventory = Inventory( loader=loader, variable_manager=variable_manager, host_list=self.ansible_host_list ) variable_manager.set_inventory(inventory) play_source = {} if ansible_args: play_source = { 'name': "AnsiApi Play", 'hosts': ansible_patt, 'gather_facts': 'no', 'tasks': [{ 'action': { 'module': module_name, 'args': ansible_args } }] } else: play_source = { 'name': "AnsiApi Play", 'hosts': ansible_patt, 'gather_facts': 'no', 'tasks': [{ 'action': { 'module': module_name } }] } play = Play.load( play_source, variable_manager=variable_manager, loader=loader ) task_queue_manager = None callback = AnsiCallBack() try: task_queue_manager = TaskQueueManager( inventory=inventory, variable_manager=variable_manager, loader=loader, options=self.options, passwords=self.passwords, stdout_callback=callback ) task_queue_manager.run(play) finally: if task_queue_manager is not None: task_queue_manager.cleanup() return self.evaluate_results(callback)
def test_empty_play(self): p = Play.load(dict()) self.assertEqual(str(p), "PLAY: <no name specified>")
def execute(self, *args, **kwargs): """ Puts args and kwargs in a way ansible can understand. Calls ansible and interprets the result. """ assert self.is_hooked_up, "the module should be hooked up to the api" # legacy key=value pairs shorthand approach if args: self.module_args = module_args = self.get_module_args(args, kwargs) else: self.module_args = module_args = kwargs loader = DataLoader() inventory_manager = SourcelessInventoryManager(loader=loader) hosts_with_ports = tuple(self.api.hosts_with_ports) for host, port in hosts_with_ports: inventory_manager._inventory.add_host(host, group='all', port=port) for key, value in self.api.options.extra_vars.items(): inventory_manager._inventory.set_variable('all', key, value) variable_manager = VariableManager( loader=loader, inventory=inventory_manager) play_source = { 'name': "Suitable Play", 'hosts': [h for h, p in hosts_with_ports], # *must* be a list 'gather_facts': 'no', 'tasks': [{ 'action': { 'module': self.module_name, 'args': module_args, }, 'environment': self.api.environment }] } play = Play.load( play_source, variable_manager=variable_manager, loader=loader, ) if self.api.strategy: play.strategy = self.api.strategy log.info(u'running {}'.format(u'- {module_name}: {module_args}'.format( module_name=self.module_name, module_args=module_args ))) start = datetime.utcnow() task_queue_manager = None callback = SilentCallbackModule() # ansible uses various levels of verbosity (from -v to -vvvvvv) # offering various amounts of debug information # # we keep it a bit simpler by activating all of it during debug, and # falling back to the default of 0 otherwise verbosity = self.api.options.verbosity == logging.DEBUG and 6 or 0 try: with ansible_verbosity(verbosity): task_queue_manager = TaskQueueManager( inventory=inventory_manager, variable_manager=variable_manager, loader=loader, options=self.api.options, passwords=getattr(self.api.options, 'passwords', {}), stdout_callback=callback ) task_queue_manager.run(play) finally: if task_queue_manager is not None: task_queue_manager.cleanup() log.debug(u'took {} to complete'.format(datetime.utcnow() - start)) return self.evaluate_results(callback)
def test_variable_manager_precedence(self, mock_basedir): ''' Tests complex variations and combinations of get_vars() with different objects to modify the context under which variables are merged. ''' v = VariableManager() v._fact_cache = defaultdict(dict) fake_loader = DictDataLoader({ # inventory1 '/etc/ansible/inventory1': """ [group2:children] group1 [group1] host1 host_var=host_var_from_inventory_host1 [group1:vars] group_var = group_var_from_inventory_group1 [group2:vars] group_var = group_var_from_inventory_group2 """, # role defaults_only1 '/etc/ansible/roles/defaults_only1/defaults/main.yml': """ default_var: "default_var_from_defaults_only1" host_var: "host_var_from_defaults_only1" group_var: "group_var_from_defaults_only1" group_var_all: "group_var_all_from_defaults_only1" extra_var: "extra_var_from_defaults_only1" """, '/etc/ansible/roles/defaults_only1/tasks/main.yml': """ - debug: msg="here i am" """, # role defaults_only2 '/etc/ansible/roles/defaults_only2/defaults/main.yml': """ default_var: "default_var_from_defaults_only2" host_var: "host_var_from_defaults_only2" group_var: "group_var_from_defaults_only2" group_var_all: "group_var_all_from_defaults_only2" extra_var: "extra_var_from_defaults_only2" """, }) mock_basedir.return_value = './' inv1 = Inventory(loader=fake_loader, variable_manager=v, host_list='/etc/ansible/inventory1') inv1.set_playbook_basedir('./') play1 = Play.load(dict( hosts=['all'], roles=['defaults_only1', 'defaults_only2'], ), loader=fake_loader, variable_manager=v) # first we assert that the defaults as viewed as a whole are the merged results # of the defaults from each role, with the last role defined "winning" when # there is a variable naming conflict res = v.get_vars(loader=fake_loader, play=play1) self.assertEqual(res['default_var'], 'default_var_from_defaults_only2') # next, we assert that when vars are viewed from the context of a task within a # role, that task will see its own role defaults before any other role's blocks = play1.compile() task = blocks[1].block[0] res = v.get_vars(loader=fake_loader, play=play1, task=task) self.assertEqual(res['default_var'], 'default_var_from_defaults_only1') # next we assert the precendence of inventory variables v.set_inventory(inv1) h1 = inv1.get_host('host1') res = v.get_vars(loader=fake_loader, play=play1, host=h1) self.assertEqual(res['group_var'], 'group_var_from_inventory_group1') self.assertEqual(res['host_var'], 'host_var_from_inventory_host1') # next we test with group_vars/ files loaded fake_loader.push("/etc/ansible/group_vars/all", """ group_var_all: group_var_all_from_group_vars_all """) fake_loader.push("/etc/ansible/group_vars/group1", """ group_var: group_var_from_group_vars_group1 """) fake_loader.push("/etc/ansible/group_vars/group3", """ # this is a dummy, which should not be used anywhere group_var: group_var_from_group_vars_group3 """) fake_loader.push("/etc/ansible/host_vars/host1", """ host_var: host_var_from_host_vars_host1 """) fake_loader.push("group_vars/group1", """ playbook_group_var: playbook_group_var """) fake_loader.push("host_vars/host1", """ playbook_host_var: playbook_host_var """) v.add_group_vars_file("/etc/ansible/group_vars/all", loader=fake_loader) v.add_group_vars_file("/etc/ansible/group_vars/group1", loader=fake_loader) v.add_group_vars_file("/etc/ansible/group_vars/group2", loader=fake_loader) v.add_group_vars_file("group_vars/group1", loader=fake_loader) v.add_host_vars_file("/etc/ansible/host_vars/host1", loader=fake_loader) v.add_host_vars_file("host_vars/host1", loader=fake_loader) res = v.get_vars(loader=fake_loader, play=play1, host=h1) self.assertEqual(res['group_var'], 'group_var_from_group_vars_group1') self.assertEqual(res['group_var_all'], 'group_var_all_from_group_vars_all') self.assertEqual(res['playbook_group_var'], 'playbook_group_var') self.assertEqual(res['host_var'], 'host_var_from_host_vars_host1') self.assertEqual(res['playbook_host_var'], 'playbook_host_var') # add in the fact cache v._fact_cache['host1'] = dict(fact_cache_var="fact_cache_var_from_fact_cache") res = v.get_vars(loader=fake_loader, play=play1, host=h1) self.assertEqual(res['fact_cache_var'], 'fact_cache_var_from_fact_cache')
def test_empty_play(self): p = Play.load(dict()) self.assertEqual(str(p), '')
def execute(self, *args, **kwargs): """ Puts args and kwargs in a way ansible can understand. Calls ansible and interprets the result. """ assert self.is_hooked_up, "the module should be hooked up to the api" self.module_args = module_args = self.get_module_args(args, kwargs) loader = DataLoader() variable_manager = VariableManager() inventory = UncachedInventory( loader=loader, variable_manager=variable_manager, host_list=self.api.servers ) variable_manager.set_inventory(inventory) play_source = { 'name': "Suitable Play", 'hosts': self.api.servers, 'gather_facts': 'no', 'tasks': [{ 'action': { 'module': self.module_name, 'args': module_args } }] } play = Play.load( play_source, variable_manager=variable_manager, loader=loader ) log.info(u'running {}'.format(u'- {module_name}: {module_args}'.format( module_name=self.module_name, module_args=module_args ))) start = datetime.utcnow() task_queue_manager = None callback = SilentCallbackModule() try: task_queue_manager = TaskQueueManager( inventory=inventory, variable_manager=variable_manager, loader=loader, options=self.api.options, passwords=getattr(self.api.options, 'passwords', {}), stdout_callback=callback ) task_queue_manager.run(play) finally: if task_queue_manager is not None: task_queue_manager.cleanup() log.info(u'took {} to complete'.format(datetime.utcnow() - start)) return self.evaluate_results(callback)
def execute(self, *args, **kwargs): """ Puts args and kwargs in a way ansible can understand. Calls ansible and interprets the result. """ assert self.is_hooked_up, "the module should be hooked up to the api" if set_global_context: set_global_context(self.api.options) # legacy key=value pairs shorthand approach if args: self.module_args = module_args = self.get_module_args(args, kwargs) else: self.module_args = module_args = kwargs loader = DataLoader() inventory_manager = SourcelessInventoryManager(loader=loader) for host, port in self.api.hosts_with_ports: inventory_manager._inventory.add_host(host, group='all', port=port) for key, value in self.api.options.extra_vars.items(): inventory_manager._inventory.set_variable('all', key, value) variable_manager = VariableManager( loader=loader, inventory=inventory_manager) play_source = { 'name': "Suitable Play", 'hosts': 'all', 'gather_facts': 'no', 'tasks': [{ 'action': { 'module': self.module_name, 'args': module_args, }, 'environment': self.api.environment, }] } try: play = Play.load( play_source, variable_manager=variable_manager, loader=loader, ) if self.api.strategy: play.strategy = self.api.strategy log.info( u'running {}'.format(u'- {module_name}: {module_args}'.format( module_name=self.module_name, module_args=module_args )) ) start = datetime.utcnow() task_queue_manager = None callback = SilentCallbackModule() # ansible uses various levels of verbosity (from -v to -vvvvvv) # offering various amounts of debug information # # we keep it a bit simpler by activating all of it during debug, # and falling back to the default of 0 otherwise verbosity = self.api.options.verbosity == logging.DEBUG and 6 or 0 with ansible_verbosity(verbosity): # host_key_checking is special, since not each connection # plugin handles it the same way, we need to apply both # environment variable and Ansible constant when running a # command in the runner to be successful with host_key_checking(self.api.host_key_checking): kwargs = dict( inventory=inventory_manager, variable_manager=variable_manager, loader=loader, options=self.api.options, passwords=getattr(self.api.options, 'passwords', {}), stdout_callback=callback ) if set_global_context: del kwargs['options'] task_queue_manager = TaskQueueManager(**kwargs) try: task_queue_manager.run(play) except SystemExit: # Mitogen forks our process and exits it in one # instance before returning # # This is fine, but it does lead to a very messy exit # by py.test which will essentially return with a test # that is first successful and then failed as each # forked process dies. # # To avoid this we commit suicide if we are run inside # a pytest session. Normally this would just result # in a exit code of zero, which is good. if 'pytest' in sys.modules: try: atexit._run_exitfuncs() except Exception: pass os.kill(os.getpid(), signal.SIGKILL) raise finally: if task_queue_manager is not None: task_queue_manager.cleanup() if set_global_context: # Ansible 2.8 introduces a global context which persists # during the lifetime of the process - for Suitable this # singleton/cache needs to be cleared after each call # to make sure that API calls do not carry over state. # # The docs hint at a future inclusion of local contexts, which # would of course be preferable. from ansible.utils.context_objects import GlobalCLIArgs GlobalCLIArgs._Singleton__instance = None log.debug(u'took {} to complete'.format(datetime.utcnow() - start)) return self.evaluate_results(callback)