def test_collection_invalid_data_play(): """Test that collection as a dict at the play level fails with parser error""" collection_name = {'name': 'foo'} with pytest.raises(AssibleParserError): Play.load(dict( name="test play", hosts=['foo'], gather_facts=False, connection='local', collections=collection_name, ))
def test_play_with_post_tasks(self): p = Play.load( dict( name="test play", hosts=['foo'], gather_facts=False, post_tasks=[dict(action='shell echo "hello world"')], ))
def test_play_with_user_conflict(self): p = Play.load( dict( name="test play", hosts=['foo'], user="******", gather_facts=False, )) self.assertEqual(p.remote_user, "testing")
def test_basic_play(self): p = Play.load( dict( name="test play", hosts=['foo'], gather_facts=False, connection='local', remote_user="******", become=True, become_user="******", ))
def test_variable_manager_role_vars_dependencies(self): ''' Tests vars from role dependencies with duplicate dependencies. ''' mock_inventory = MagicMock() fake_loader = DictDataLoader({ # role common-role '/etc/assible/roles/common-role/tasks/main.yml': """ - debug: msg="{{role_var}}" """, # We do not need allow_duplicates: yes for this role # because eliminating duplicates is done by the execution # strategy, which we do not test here. # role role1 '/etc/assible/roles/role1/vars/main.yml': """ role_var: "role_var_from_role1" """, '/etc/assible/roles/role1/meta/main.yml': """ dependencies: - { role: common-role } """, # role role2 '/etc/assible/roles/role2/vars/main.yml': """ role_var: "role_var_from_role2" """, '/etc/assible/roles/role2/meta/main.yml': """ dependencies: - { role: common-role } """, }) v = VariableManager(loader=fake_loader, inventory=mock_inventory) play1 = Play.load(dict( hosts=['all'], roles=['role1', 'role2'], ), loader=fake_loader, variable_manager=v) # The task defined by common-role exists twice because role1 # and role2 depend on common-role. Check that the tasks see # different values of role_var. blocks = play1.compile() task = blocks[1].block[0] res = v.get_vars(play=play1, task=task) self.assertEqual(res['role_var'], 'role_var_from_role1') task = blocks[2].block[0] res = v.get_vars(play=play1, task=task) self.assertEqual(res['role_var'], 'role_var_from_role2')
def test_play_context(mocker, parser, reset_cli_args): options = parser.parse_args(['-vv', '--check']) context._init_global_context(options) play = Play.load({}) play_context = PlayContext(play=play) assert play_context.remote_addr is None assert play_context.remote_user is None assert play_context.password == '' assert play_context.private_key_file == C.DEFAULT_PRIVATE_KEY_FILE assert play_context.timeout == C.DEFAULT_TIMEOUT assert play_context.verbosity == 2 assert play_context.check_mode is True mock_play = mocker.MagicMock() mock_play.force_handlers = True play_context = PlayContext(play=mock_play) assert play_context.force_handlers is True mock_task = mocker.MagicMock() mock_task.connection = 'mocktask' mock_task.remote_user = '******' mock_task.port = 1234 mock_task.no_log = True mock_task.become = True mock_task.become_method = 'mocktask' mock_task.become_user = '******' mock_task.become_pass = '******' mock_task._local_action = False mock_task.delegate_to = None all_vars = dict( assible_connection='mock_inventory', assible_ssh_port=4321, ) mock_templar = mocker.MagicMock() play_context = PlayContext() play_context = play_context.set_task_and_variable_override( task=mock_task, variables=all_vars, templar=mock_templar) assert play_context.connection == 'mock_inventory' assert play_context.remote_user == 'mocktask' assert play_context.no_log is True mock_task.no_log = False play_context = play_context.set_task_and_variable_override( task=mock_task, variables=all_vars, templar=mock_templar) assert play_context.no_log is False
def test_play_compile(self): p = Play.load( dict( name="test play", hosts=['foo'], gather_facts=False, tasks=[dict(action='shell echo "hello world"')], )) blocks = p.compile() # with a single block, there will still be three # implicit meta flush_handler blocks inserted self.assertEqual(len(blocks), 4)
def test_collection_static_warning(capsys): """Test that collection name is not templated. Also, make sure that users see the warning message for the referenced name. """ collection_name = "foo.{{bar}}" p = Play.load(dict( name="test play", hosts=['foo'], gather_facts=False, connection='local', collections=collection_name, )) assert collection_name in p.collections std_out, std_err = capsys.readouterr() assert '[WARNING]: "collections" is not templatable, but we found: %s' % collection_name in std_err assert '' == std_out
def test_play_with_roles(self): fake_loader = DictDataLoader({ '/etc/assible/roles/foo/tasks.yml': """ - name: role task shell: echo "hello world" """, }) mock_var_manager = MagicMock() mock_var_manager.get_vars.return_value = dict() p = Play.load(dict( name="test play", hosts=['foo'], gather_facts=False, roles=['foo'], ), loader=fake_loader, variable_manager=mock_var_manager) blocks = p.compile()
def run(self): ''' create and execute the single task playbook ''' super(AdHocCLI, self).run() # only thing left should be host pattern pattern = to_text(context.CLIARGS['args'], errors='surrogate_or_strict') sshpass = None becomepass = None (sshpass, becomepass) = self.ask_passwords() passwords = {'conn_pass': sshpass, 'become_pass': becomepass} # get basic objects loader, inventory, variable_manager = self._play_prereqs() try: hosts = self.get_host_list(inventory, context.CLIARGS['subset'], pattern) except AssibleError: if context.CLIARGS['subset']: raise else: hosts = [] display.warning("No hosts matched, nothing to do") if context.CLIARGS['listhosts']: display.display(' hosts (%d):' % len(hosts)) for host in hosts: display.display(' %s' % host) return 0 if context.CLIARGS[ 'module_name'] in C.MODULE_REQUIRE_ARGS and not context.CLIARGS[ 'module_args']: err = "No argument passed to %s module" % context.CLIARGS[ 'module_name'] if pattern.endswith(".yml"): err = err + ' (did you mean to run assible-playbook?)' raise AssibleOptionsError(err) # Avoid modules that don't work with ad-hoc if context.CLIARGS['module_name'] in ('import_playbook', ): raise AssibleOptionsError( "'%s' is not a valid action for ad-hoc commands" % context.CLIARGS['module_name']) play_ds = self._play_ds(pattern, context.CLIARGS['seconds'], context.CLIARGS['poll_interval']) play = Play().load(play_ds, variable_manager=variable_manager, loader=loader) # used in start callback playbook = Playbook(loader) playbook._entries.append(play) playbook._file_name = '__adhoc_playbook__' if self.callback: cb = self.callback elif context.CLIARGS['one_line']: cb = 'oneline' # Respect custom 'stdout_callback' only with enabled 'bin_assible_callbacks' elif C.DEFAULT_LOAD_CALLBACK_PLUGINS and C.DEFAULT_STDOUT_CALLBACK != 'default': cb = C.DEFAULT_STDOUT_CALLBACK else: cb = 'minimal' run_tree = False if context.CLIARGS['tree']: C.DEFAULT_CALLBACK_WHITELIST.append('tree') C.TREE_DIR = context.CLIARGS['tree'] run_tree = True # now create a task queue manager to execute the play self._tqm = None try: self._tqm = TaskQueueManager( inventory=inventory, variable_manager=variable_manager, loader=loader, passwords=passwords, stdout_callback=cb, run_additional_callbacks=C.DEFAULT_LOAD_CALLBACK_PLUGINS, run_tree=run_tree, forks=context.CLIARGS['forks'], ) self._tqm.load_callbacks() self._tqm.send_callback('v2_playbook_on_start', playbook) result = self._tqm.run(play) self._tqm.send_callback('v2_playbook_on_stats', self._tqm._stats) finally: if self._tqm: self._tqm.cleanup() if loader: loader.cleanup_all_tmp_files() return result
def main(): host_list = ['localhost', 'www.example.com', 'www.google.com'] # since the API is constructed for CLI it expects certain options to always be set in the context object context.CLIARGS = ImmutableDict( connection='smart', module_path=['/to/mymodules', '/usr/share/assible'], forks=10, become=None, become_method=None, become_user=None, check=False, diff=False) # required for # https://github.com/assible/assible/blob/devel/lib/assible/inventory/manager.py#L204 sources = ','.join(host_list) if len(host_list) == 1: sources += ',' # initialize needed objects loader = DataLoader( ) # Takes care of finding and reading yaml, json and ini files passwords = dict(vault_pass='******') # Instantiate our ResultsCollectorJSONCallback for handling results as they come in. Assible expects this to be one of its main display outlets results_callback = ResultsCollectorJSONCallback() # create inventory, use path to host config file as source or hosts in a comma separated string inventory = InventoryManager(loader=loader, sources=sources) # variable manager takes care of merging all the different sources to give you a unified view of variables available in each context variable_manager = VariableManager(loader=loader, inventory=inventory) # instantiate task queue manager, which takes care of forking and setting up all objects to iterate over host list and tasks # IMPORTANT: This also adds library dirs paths to the module loader # IMPORTANT: and so it must be initialized before calling `Play.load()`. tqm = TaskQueueManager( inventory=inventory, variable_manager=variable_manager, loader=loader, passwords=passwords, stdout_callback= results_callback, # Use our custom callback instead of the ``default`` callback plugin, which prints to stdout ) # create data structure that represents our play, including tasks, this is basically what our YAML loader does internally. play_source = dict( name="Assible Play", hosts=host_list, gather_facts='no', tasks=[ dict(action=dict(module='shell', args='ls'), register='shell_out'), dict(action=dict(module='debug', args=dict(msg='{{shell_out.stdout}}'))), dict( action=dict(module='command', args=dict( cmd='/usr/bin/uptime'))), ]) # Create play object, playbook objects use .load instead of init or new methods, # this will also automatically create the task objects from the info provided in play_source play = Play().load(play_source, variable_manager=variable_manager, loader=loader) # Actually run it try: result = tqm.run( play ) # most interesting data for a play is actually sent to the callback's methods finally: # we always need to cleanup child procs and the structures we use to communicate with them tqm.cleanup() if loader: loader.cleanup_all_tmp_files() # Remove assible tmpdir shutil.rmtree(C.DEFAULT_LOCAL_TMP, True) print("UP ***********") for host, result in results_callback.host_ok.items(): print('{0} >>> {1}'.format(host, result._result['stdout'])) print("FAILED *******") for host, result in results_callback.host_failed.items(): print('{0} >>> {1}'.format(host, result._result['msg'])) print("DOWN *********") for host, result in results_callback.host_unreachable.items(): print('{0} >>> {1}'.format(host, result._result['msg']))
def test_empty_play(self): p = Play.load(dict()) self.assertEqual(str(p), '')
def default(self, arg, forceshell=False): """ actually runs modules """ if arg.startswith("#"): return False if not self.cwd: display.error("No host found") return False if arg.split()[0] in self.modules: module = arg.split()[0] module_args = ' '.join(arg.split()[1:]) else: module = 'shell' module_args = arg if forceshell is True: module = 'shell' module_args = arg result = None try: check_raw = module in ('command', 'shell', 'script', 'raw') play_ds = dict( name="Assible Shell", hosts=self.cwd, gather_facts='no', tasks=[ dict(action=dict(module=module, args=parse_kv(module_args, check_raw=check_raw))) ], remote_user=self.remote_user, become=self.become, become_user=self.become_user, become_method=self.become_method, check_mode=self.check_mode, diff=self.diff, ) play = Play().load(play_ds, variable_manager=self.variable_manager, loader=self.loader) except Exception as e: display.error(u"Unable to build command: %s" % to_text(e)) return False try: cb = 'minimal' # FIXME: make callbacks configurable # now create a task queue manager to execute the play self._tqm = None try: self._tqm = TaskQueueManager( inventory=self.inventory, variable_manager=self.variable_manager, loader=self.loader, passwords=self.passwords, stdout_callback=cb, run_additional_callbacks=C.DEFAULT_LOAD_CALLBACK_PLUGINS, run_tree=False, forks=self.forks, ) result = self._tqm.run(play) finally: if self._tqm: self._tqm.cleanup() if self.loader: self.loader.cleanup_all_tmp_files() if result is None: display.error("No hosts found") return False except KeyboardInterrupt: display.error('User interrupted execution') return False except Exception as e: display.error(to_text(e)) # FIXME: add traceback in very very verbose mode return False
def _load_playbook_data(self, file_name, variable_manager, vars=None): if os.path.isabs(file_name): self._basedir = os.path.dirname(file_name) else: self._basedir = os.path.normpath( os.path.join(self._basedir, os.path.dirname(file_name))) # set the loaders basedir cur_basedir = self._loader.get_basedir() self._loader.set_basedir(self._basedir) add_all_plugin_dirs(self._basedir) self._file_name = file_name try: ds = self._loader.load_from_file(os.path.basename(file_name)) except UnicodeDecodeError as e: raise AssibleParserError( "Could not read playbook (%s) due to encoding issues: %s" % (file_name, to_native(e))) # check for errors and restore the basedir in case this error is caught and handled if ds is None: self._loader.set_basedir(cur_basedir) raise AssibleParserError("Empty playbook, nothing to do", obj=ds) elif not isinstance(ds, list): self._loader.set_basedir(cur_basedir) raise AssibleParserError( "A playbook must be a list of plays, got a %s instead" % type(ds), obj=ds) elif not ds: display.deprecated( "Empty plays will currently be skipped, in the future they will cause a syntax error", version='2.12', collection_name='assible.builtin') # Parse the playbook entries. For plays, we simply parse them # using the Play() object, and includes are parsed using the # PlaybookInclude() object for entry in ds: if not isinstance(entry, dict): # restore the basedir in case this error is caught and handled self._loader.set_basedir(cur_basedir) raise AssibleParserError( "playbook entries must be either a valid play or an include statement", obj=entry) if any(action in entry for action in ('import_playbook', 'include')): if 'include' in entry: display.deprecated( "'include' for playbook includes. You should use 'import_playbook' instead", version="2.12", collection_name='assible.builtin') pb = PlaybookInclude.load(entry, basedir=self._basedir, variable_manager=variable_manager, loader=self._loader) if pb is not None: self._entries.extend(pb._entries) else: which = entry.get('import_playbook', entry.get('include', entry)) display.display( "skipping playbook '%s' due to conditional test failure" % which, color=C.COLOR_SKIP) else: entry_obj = Play.load(entry, variable_manager=variable_manager, loader=self._loader, vars=vars) self._entries.append(entry_obj) # we're done, so restore the old basedir in the loader self._loader.set_basedir(cur_basedir)
def test_variable_manager_precedence(self): # FIXME: this needs to be redone as dataloader is not the automatic source of data anymore return # pylint: disable=unreachable ''' Tests complex variations and combinations of get_vars() with different objects to modify the context under which variables are merged. ''' # FIXME: BCS makethiswork # return True mock_inventory = MagicMock() inventory1_filedata = """ [group2:children] group1 [group1] host1 host_var=host_var_from_inventory_host1 [group1:vars] group_var = group_var_from_inventory_group1 [group2:vars] group_var = group_var_from_inventory_group2 """ fake_loader = DictDataLoader({ # inventory1 '/etc/assible/inventory1': inventory1_filedata, # role defaults_only1 '/etc/assible/roles/defaults_only1/defaults/main.yml': """ default_var: "default_var_from_defaults_only1" host_var: "host_var_from_defaults_only1" group_var: "group_var_from_defaults_only1" group_var_all: "group_var_all_from_defaults_only1" extra_var: "extra_var_from_defaults_only1" """, '/etc/assible/roles/defaults_only1/tasks/main.yml': """ - debug: msg="here i am" """, # role defaults_only2 '/etc/assible/roles/defaults_only2/defaults/main.yml': """ default_var: "default_var_from_defaults_only2" host_var: "host_var_from_defaults_only2" group_var: "group_var_from_defaults_only2" group_var_all: "group_var_all_from_defaults_only2" extra_var: "extra_var_from_defaults_only2" """, }) inv1 = InventoryManager(loader=fake_loader, sources=['/etc/assible/inventory1']) v = VariableManager(inventory=mock_inventory, loader=fake_loader) play1 = Play.load(dict( hosts=['all'], roles=['defaults_only1', 'defaults_only2'], ), loader=fake_loader, variable_manager=v) # first we assert that the defaults as viewed as a whole are the merged results # of the defaults from each role, with the last role defined "winning" when # there is a variable naming conflict res = v.get_vars(play=play1) self.assertEqual(res['default_var'], 'default_var_from_defaults_only2') # next, we assert that when vars are viewed from the context of a task within a # role, that task will see its own role defaults before any other role's blocks = play1.compile() task = blocks[1].block[0] res = v.get_vars(play=play1, task=task) self.assertEqual(res['default_var'], 'default_var_from_defaults_only1') # next we assert the precedence of inventory variables v.set_inventory(inv1) h1 = inv1.get_host('host1') res = v.get_vars(play=play1, host=h1) self.assertEqual(res['group_var'], 'group_var_from_inventory_group1') self.assertEqual(res['host_var'], 'host_var_from_inventory_host1') # next we test with group_vars/ files loaded fake_loader.push("/etc/assible/group_vars/all", """ group_var_all: group_var_all_from_group_vars_all """) fake_loader.push("/etc/assible/group_vars/group1", """ group_var: group_var_from_group_vars_group1 """) fake_loader.push("/etc/assible/group_vars/group3", """ # this is a dummy, which should not be used anywhere group_var: group_var_from_group_vars_group3 """) fake_loader.push("/etc/assible/host_vars/host1", """ host_var: host_var_from_host_vars_host1 """) fake_loader.push("group_vars/group1", """ playbook_group_var: playbook_group_var """) fake_loader.push("host_vars/host1", """ playbook_host_var: playbook_host_var """) res = v.get_vars(play=play1, host=h1) # self.assertEqual(res['group_var'], 'group_var_from_group_vars_group1') # self.assertEqual(res['group_var_all'], 'group_var_all_from_group_vars_all') # self.assertEqual(res['playbook_group_var'], 'playbook_group_var') # self.assertEqual(res['host_var'], 'host_var_from_host_vars_host1') # self.assertEqual(res['playbook_host_var'], 'playbook_host_var') # add in the fact cache v._fact_cache['host1'] = dict(fact_cache_var="fact_cache_var_from_fact_cache") res = v.get_vars(play=play1, host=h1) self.assertEqual(res['fact_cache_var'], 'fact_cache_var_from_fact_cache')