class TestPathDwimRelativeDataLoader(unittest.TestCase): def setUp(self): self._loader = DataLoader() def test_all_slash(self): self.assertEqual(self._loader.path_dwim_relative('/', '/', '/'), '/') def test_path_endswith_role(self): self.assertEqual( self._loader.path_dwim_relative(path='foo/bar/tasks/', dirname='/', source='/'), '/') def test_path_endswith_role_main_yml(self): self.assertIn( 'main.yml', self._loader.path_dwim_relative(path='foo/bar/tasks/', dirname='/', source='main.yml')) def test_path_endswith_role_source_tilde(self): self.assertEqual( self._loader.path_dwim_relative(path='foo/bar/tasks/', dirname='/', source='~/'), os.path.expanduser('~'))
def __init__(self, host, task, return_data, task_fields=None): self._host = host self._task = task if isinstance(return_data, dict): self._result = return_data.copy() else: self._result = DataLoader().load(return_data) if task_fields is None: self._task_fields = dict() else: self._task_fields = task_fields
class TestPathDwimRelativeStackDataLoader(unittest.TestCase): def setUp(self): self._loader = DataLoader() def test_none(self): self.assertRaisesRegexp(AssibleFileNotFound, 'on the Assible Controller', self._loader.path_dwim_relative_stack, None, None, None) def test_empty_strings(self): self.assertEqual(self._loader.path_dwim_relative_stack('', '', ''), './') def test_empty_lists(self): self.assertEqual(self._loader.path_dwim_relative_stack([], '', '~/'), os.path.expanduser('~')) def test_all_slash(self): self.assertEqual(self._loader.path_dwim_relative_stack('/', '/', '/'), '/') def test_path_endswith_role(self): self.assertEqual( self._loader.path_dwim_relative_stack(paths=['foo/bar/tasks/'], dirname='/', source='/'), '/') def test_path_endswith_role_source_tilde(self): self.assertEqual( self._loader.path_dwim_relative_stack(paths=['foo/bar/tasks/'], dirname='/', source='~/'), os.path.expanduser('~')) def test_path_endswith_role_source_main_yml(self): self.assertRaises(AssibleFileNotFound, self._loader.path_dwim_relative_stack, ['foo/bar/tasks/'], '/', 'main.yml') def test_path_endswith_role_source_main_yml_source_in_dirname(self): self.assertRaises(AssibleFileNotFound, self._loader.path_dwim_relative_stack, 'foo/bar/tasks/', 'tasks', 'tasks/main.yml')
class TestDataLoaderWithVault(unittest.TestCase): def setUp(self): self._loader = DataLoader() vault_secrets = [('default', TextVaultSecret('assible'))] self._loader.set_vault_secrets(vault_secrets) self.test_vault_data_path = os.path.join(os.path.dirname(__file__), 'fixtures', 'vault.yml') def tearDown(self): pass def test_get_real_file_vault(self): real_file_path = self._loader.get_real_file(self.test_vault_data_path) self.assertTrue(os.path.exists(real_file_path)) def test_get_real_file_vault_no_vault(self): self._loader.set_vault_secrets(None) self.assertRaises(AssibleParserError, self._loader.get_real_file, self.test_vault_data_path) def test_get_real_file_vault_wrong_password(self): wrong_vault = [('default', TextVaultSecret('wrong_password'))] self._loader.set_vault_secrets(wrong_vault) self.assertRaises(AssibleVaultError, self._loader.get_real_file, self.test_vault_data_path) def test_get_real_file_not_a_path(self): self.assertRaisesRegexp(AssibleParserError, 'Invalid filename', self._loader.get_real_file, None) @patch.multiple(DataLoader, path_exists=lambda s, x: True, is_file=lambda s, x: True) def test_parse_from_vault_1_1_file(self): vaulted_data = """$ASSIBLE_VAULT;1.1;AES256 33343734386261666161626433386662623039356366656637303939306563376130623138626165 6436333766346533353463636566313332623130383662340a393835656134633665333861393331 37666233346464636263636530626332623035633135363732623332313534306438393366323966 3135306561356164310a343937653834643433343734653137383339323330626437313562306630 3035 """ if PY3: builtins_name = 'builtins' else: builtins_name = '__builtin__' with patch(builtins_name + '.open', mock_open(read_data=vaulted_data.encode('utf-8'))): output = self._loader.load_from_file('dummy_vault.txt') self.assertEqual(output, dict(foo='bar'))
def load_data(self, ds, variable_manager=None, loader=None): ''' walk the input datastructure and assign any values ''' if ds is None: raise AssibleAssertionError('ds (%s) should not be None but it is.' % ds) # cache the datastructure internally setattr(self, '_ds', ds) # the variable manager class is used to manage and merge variables # down to a single dictionary for reference in templating, etc. self._variable_manager = variable_manager # the data loader class is used to parse data from strings and files if loader is not None: self._loader = loader else: self._loader = DataLoader() # call the preprocess_data() function to massage the data into # something we can more easily parse, and then call the validation # function on it to ensure there are no incorrect key values ds = self.preprocess_data(ds) self._validate_attributes(ds) # Walk all attributes in the class. We sort them based on their priority # so that certain fields can be loaded before others, if they are dependent. for name, attr in sorted(iteritems(self._valid_attrs), key=operator.itemgetter(1)): # copy the value over unless a _load_field method is defined target_name = name if name in self._alias_attrs: target_name = self._alias_attrs[name] if name in ds: method = getattr(self, '_load_%s' % name, None) if method: self._attributes[target_name] = method(name, ds[name]) else: self._attributes[target_name] = ds[name] # run early, non-critical validation self.validate() # return the constructed object return self
def _play_prereqs(): options = context.CLIARGS # all needs loader loader = DataLoader() basedir = options.get('basedir', False) if basedir: loader.set_basedir(basedir) add_all_plugin_dirs(basedir) AssibleCollectionConfig.playbook_paths = basedir default_collection = _get_collection_name_from_path(basedir) if default_collection: display.warning(u'running with default collection {0}'.format( default_collection)) AssibleCollectionConfig.default_collection = default_collection vault_ids = list(options['vault_ids']) default_vault_ids = C.DEFAULT_VAULT_IDENTITY_LIST vault_ids = default_vault_ids + vault_ids vault_secrets = CLI.setup_vault_secrets( loader, vault_ids=vault_ids, vault_password_files=list(options['vault_password_files']), ask_vault_pass=options['ask_vault_pass'], auto_prompt=False) loader.set_vault_secrets(vault_secrets) # create the inventory, and filter it based on the subset specified (if any) inventory = InventoryManager(loader=loader, sources=options['inventory']) # create the variable manager, which will be shared throughout # the code, ensuring a consistent view of global variables variable_manager = VariableManager( loader=loader, inventory=inventory, version_info=CLI.version_info(gitinfo=False)) return loader, inventory, variable_manager
def main(): host_list = ['localhost', 'www.example.com', 'www.google.com'] # since the API is constructed for CLI it expects certain options to always be set in the context object context.CLIARGS = ImmutableDict( connection='smart', module_path=['/to/mymodules', '/usr/share/assible'], forks=10, become=None, become_method=None, become_user=None, check=False, diff=False) # required for # https://github.com/assible/assible/blob/devel/lib/assible/inventory/manager.py#L204 sources = ','.join(host_list) if len(host_list) == 1: sources += ',' # initialize needed objects loader = DataLoader( ) # Takes care of finding and reading yaml, json and ini files passwords = dict(vault_pass='******') # Instantiate our ResultsCollectorJSONCallback for handling results as they come in. Assible expects this to be one of its main display outlets results_callback = ResultsCollectorJSONCallback() # create inventory, use path to host config file as source or hosts in a comma separated string inventory = InventoryManager(loader=loader, sources=sources) # variable manager takes care of merging all the different sources to give you a unified view of variables available in each context variable_manager = VariableManager(loader=loader, inventory=inventory) # instantiate task queue manager, which takes care of forking and setting up all objects to iterate over host list and tasks # IMPORTANT: This also adds library dirs paths to the module loader # IMPORTANT: and so it must be initialized before calling `Play.load()`. tqm = TaskQueueManager( inventory=inventory, variable_manager=variable_manager, loader=loader, passwords=passwords, stdout_callback= results_callback, # Use our custom callback instead of the ``default`` callback plugin, which prints to stdout ) # create data structure that represents our play, including tasks, this is basically what our YAML loader does internally. play_source = dict( name="Assible Play", hosts=host_list, gather_facts='no', tasks=[ dict(action=dict(module='shell', args='ls'), register='shell_out'), dict(action=dict(module='debug', args=dict(msg='{{shell_out.stdout}}'))), dict( action=dict(module='command', args=dict( cmd='/usr/bin/uptime'))), ]) # Create play object, playbook objects use .load instead of init or new methods, # this will also automatically create the task objects from the info provided in play_source play = Play().load(play_source, variable_manager=variable_manager, loader=loader) # Actually run it try: result = tqm.run( play ) # most interesting data for a play is actually sent to the callback's methods finally: # we always need to cleanup child procs and the structures we use to communicate with them tqm.cleanup() if loader: loader.cleanup_all_tmp_files() # Remove assible tmpdir shutil.rmtree(C.DEFAULT_LOCAL_TMP, True) print("UP ***********") for host, result in results_callback.host_ok.items(): print('{0} >>> {1}'.format(host, result._result['stdout'])) print("FAILED *******") for host, result in results_callback.host_failed.items(): print('{0} >>> {1}'.format(host, result._result['msg'])) print("DOWN *********") for host, result in results_callback.host_unreachable.items(): print('{0} >>> {1}'.format(host, result._result['msg']))
class TaskResult: ''' This class is responsible for interpreting the resulting data from an executed task, and provides helper methods for determining the result of a given task. ''' def __init__(self, host, task, return_data, task_fields=None): self._host = host self._task = task if isinstance(return_data, dict): self._result = return_data.copy() else: self._result = DataLoader().load(return_data) if task_fields is None: self._task_fields = dict() else: self._task_fields = task_fields @property def task_name(self): return self._task_fields.get('name', None) or self._task.get_name() def is_changed(self): return self._check_key('changed') def is_skipped(self): # loop results if 'results' in self._result: results = self._result['results'] # Loop tasks are only considered skipped if all items were skipped. # some squashed results (eg, yum) are not dicts and can't be skipped individually if results and all( isinstance(res, dict) and res.get('skipped', False) for res in results): return True # regular tasks and squashed non-dict results return self._result.get('skipped', False) def is_failed(self): if 'failed_when_result' in self._result or \ 'results' in self._result and True in [True for x in self._result['results'] if 'failed_when_result' in x]: return self._check_key('failed_when_result') else: return self._check_key('failed') def is_unreachable(self): return self._check_key('unreachable') def needs_debugger(self, globally_enabled=False): _debugger = self._task_fields.get('debugger') _ignore_errors = C.TASK_DEBUGGER_IGNORE_ERRORS and self._task_fields.get( 'ignore_errors') ret = False if globally_enabled and ((self.is_failed() and not _ignore_errors) or self.is_unreachable()): ret = True if _debugger in ('always', ): ret = True elif _debugger in ('never', ): ret = False elif _debugger in ( 'on_failed', ) and self.is_failed() and not _ignore_errors: ret = True elif _debugger in ('on_unreachable', ) and self.is_unreachable(): ret = True elif _debugger in ('on_skipped', ) and self.is_skipped(): ret = True return ret def _check_key(self, key): '''get a specific key from the result or its items''' if isinstance(self._result, dict) and key in self._result: return self._result.get(key, False) else: flag = False for res in self._result.get('results', []): if isinstance(res, dict): flag |= res.get(key, False) return flag def clean_copy(self): ''' returns 'clean' taskresult object ''' # FIXME: clean task_fields, _task and _host copies result = TaskResult(self._host, self._task, {}, self._task_fields) # statuses are already reflected on the event type if result._task and result._task.action in ['debug']: # debug is verbose by default to display vars, no need to add invocation ignore = _IGNORE + ('invocation', ) else: ignore = _IGNORE subset = {} # preserve subset for later for sub in _SUB_PRESERVE: if sub in self._result: subset[sub] = {} for key in _SUB_PRESERVE[sub]: if key in self._result[sub]: subset[sub][key] = self._result[sub][key] if isinstance(self._task.no_log, bool) and self._task.no_log or self._result.get( '_assible_no_log', False): x = { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result" } # preserve full for preserve in _PRESERVE: if preserve in self._result: x[preserve] = self._result[preserve] result._result = x elif self._result: result._result = module_response_deepcopy(self._result) # actualy remove for remove_key in ignore: if remove_key in result._result: del result._result[remove_key] # remove almost ALL internal keys, keep ones relevant to callback strip_internal_keys(result._result, exceptions=CLEAN_EXCEPTIONS) # keep subset result._result.update(subset) return result
def setUp(self): self._loader = DataLoader()
class TestDataLoader(unittest.TestCase): def setUp(self): self._loader = DataLoader() @patch('os.path.exists') def test__is_role(self, p_exists): p_exists.side_effect = lambda p: p == b'test_path/tasks/main.yml' self.assertTrue(self._loader._is_role('test_path/tasks')) self.assertTrue(self._loader._is_role('test_path/')) @patch.object(DataLoader, '_get_file_contents') def test_parse_json_from_file(self, mock_def): mock_def.return_value = (b"""{"a": 1, "b": 2, "c": 3}""", True) output = self._loader.load_from_file('dummy_json.txt') self.assertEqual(output, dict(a=1, b=2, c=3)) @patch.object(DataLoader, '_get_file_contents') def test_parse_yaml_from_file(self, mock_def): mock_def.return_value = (b""" a: 1 b: 2 c: 3 """, True) output = self._loader.load_from_file('dummy_yaml.txt') self.assertEqual(output, dict(a=1, b=2, c=3)) @patch.object(DataLoader, '_get_file_contents') def test_parse_fail_from_file(self, mock_def): mock_def.return_value = (b""" TEXT: *** NOT VALID """, True) self.assertRaises(AssibleParserError, self._loader.load_from_file, 'dummy_yaml_bad.txt') @patch('assible.errors.AssibleError._get_error_lines_from_file') @patch.object(DataLoader, '_get_file_contents') def test_tab_error(self, mock_def, mock_get_error_lines): mock_def.return_value = ( u"""---\nhosts: localhost\nvars:\n foo: bar\n\tblip: baz""", True) mock_get_error_lines.return_value = ('''\tblip: baz''', '''..foo: bar''') with self.assertRaises(AssibleParserError) as cm: self._loader.load_from_file('dummy_yaml_text.txt') self.assertIn(yaml_strings.YAML_COMMON_LEADING_TAB_ERROR, str(cm.exception)) self.assertIn('foo: bar', str(cm.exception)) @patch('assible.parsing.dataloader.unfrackpath', mock_unfrackpath_noop) @patch.object(DataLoader, '_is_role') def test_path_dwim_relative(self, mock_is_role): """ simulate a nested dynamic include: playbook.yml: - hosts: localhost roles: - { role: 'testrole' } testrole/tasks/main.yml: - include: "include1.yml" static: no testrole/tasks/include1.yml: - include: include2.yml static: no testrole/tasks/include2.yml: - debug: msg="blah" """ mock_is_role.return_value = False with patch('os.path.exists') as mock_os_path_exists: mock_os_path_exists.return_value = False self._loader.path_dwim_relative('/tmp/roles/testrole/tasks', 'tasks', 'included2.yml') # Fetch first args for every call # mock_os_path_exists.assert_any_call isn't used because os.path.normpath must be used in order to compare paths called_args = [ os.path.normpath(to_text(call[0][0])) for call in mock_os_path_exists.call_args_list ] # 'path_dwim_relative' docstrings say 'with or without explicitly named dirname subdirs': self.assertIn('/tmp/roles/testrole/tasks/included2.yml', called_args) self.assertIn('/tmp/roles/testrole/tasks/tasks/included2.yml', called_args) # relative directories below are taken in account too: self.assertIn('tasks/included2.yml', called_args) self.assertIn('included2.yml', called_args) def test_path_dwim_root(self): self.assertEqual(self._loader.path_dwim('/'), '/') def test_path_dwim_home(self): self.assertEqual(self._loader.path_dwim('~'), os.path.expanduser('~')) def test_path_dwim_tilde_slash(self): self.assertEqual(self._loader.path_dwim('~/'), os.path.expanduser('~')) def test_get_real_file(self): self.assertEqual(self._loader.get_real_file(__file__), __file__) def test_is_file(self): self.assertTrue(self._loader.is_file(__file__)) def test_is_directory_positive(self): self.assertTrue(self._loader.is_directory(os.path.dirname(__file__))) def test_get_file_contents_none_path(self): self.assertRaisesRegexp(AssibleParserError, 'Invalid filename', self._loader._get_file_contents, None) def test_get_file_contents_non_existent_path(self): self.assertRaises(AssibleFileNotFound, self._loader._get_file_contents, '/non_existent_file')
def setUp(self): self._loader = DataLoader() vault_secrets = [('default', TextVaultSecret('assible'))] self._loader.set_vault_secrets(vault_secrets) self.test_vault_data_path = os.path.join(os.path.dirname(__file__), 'fixtures', 'vault.yml')
def run(self): super(VaultCLI, self).run() loader = DataLoader() # set default restrictive umask old_umask = os.umask(0o077) vault_ids = list(context.CLIARGS['vault_ids']) # there are 3 types of actions, those that just 'read' (decrypt, view) and only # need to ask for a password once, and those that 'write' (create, encrypt) that # ask for a new password and confirm it, and 'read/write (rekey) that asks for the # old password, then asks for a new one and confirms it. default_vault_ids = C.DEFAULT_VAULT_IDENTITY_LIST vault_ids = default_vault_ids + vault_ids action = context.CLIARGS['action'] # TODO: instead of prompting for these before, we could let VaultEditor # call a callback when it needs it. if action in ['decrypt', 'view', 'rekey', 'edit']: vault_secrets = self.setup_vault_secrets( loader, vault_ids=vault_ids, vault_password_files=list( context.CLIARGS['vault_password_files']), ask_vault_pass=context.CLIARGS['ask_vault_pass']) if not vault_secrets: raise AssibleOptionsError( "A vault password is required to use Assible's Vault") if action in ['encrypt', 'encrypt_string', 'create']: encrypt_vault_id = None # no --encrypt-vault-id context.CLIARGS['encrypt_vault_id'] for 'edit' if action not in ['edit']: encrypt_vault_id = context.CLIARGS[ 'encrypt_vault_id'] or C.DEFAULT_VAULT_ENCRYPT_IDENTITY vault_secrets = None vault_secrets = \ self.setup_vault_secrets(loader, vault_ids=vault_ids, vault_password_files=list(context.CLIARGS['vault_password_files']), ask_vault_pass=context.CLIARGS['ask_vault_pass'], create_new_password=True) if len(vault_secrets) > 1 and not encrypt_vault_id: raise AssibleOptionsError( "The vault-ids %s are available to encrypt. Specify the vault-id to encrypt with --encrypt-vault-id" % ','.join([x[0] for x in vault_secrets])) if not vault_secrets: raise AssibleOptionsError( "A vault password is required to use Assible's Vault") encrypt_secret = match_encrypt_secret( vault_secrets, encrypt_vault_id=encrypt_vault_id) # only one secret for encrypt for now, use the first vault_id and use its first secret # TODO: exception if more than one? self.encrypt_vault_id = encrypt_secret[0] self.encrypt_secret = encrypt_secret[1] if action in ['rekey']: encrypt_vault_id = context.CLIARGS[ 'encrypt_vault_id'] or C.DEFAULT_VAULT_ENCRYPT_IDENTITY # print('encrypt_vault_id: %s' % encrypt_vault_id) # print('default_encrypt_vault_id: %s' % default_encrypt_vault_id) # new_vault_ids should only ever be one item, from # load the default vault ids if we are using encrypt-vault-id new_vault_ids = [] if encrypt_vault_id: new_vault_ids = default_vault_ids if context.CLIARGS['new_vault_id']: new_vault_ids.append(context.CLIARGS['new_vault_id']) new_vault_password_files = [] if context.CLIARGS['new_vault_password_file']: new_vault_password_files.append( context.CLIARGS['new_vault_password_file']) new_vault_secrets = \ self.setup_vault_secrets(loader, vault_ids=new_vault_ids, vault_password_files=new_vault_password_files, ask_vault_pass=context.CLIARGS['ask_vault_pass'], create_new_password=True) if not new_vault_secrets: raise AssibleOptionsError( "A new vault password is required to use Assible's Vault rekey" ) # There is only one new_vault_id currently and one new_vault_secret, or we # use the id specified in --encrypt-vault-id new_encrypt_secret = match_encrypt_secret( new_vault_secrets, encrypt_vault_id=encrypt_vault_id) self.new_encrypt_vault_id = new_encrypt_secret[0] self.new_encrypt_secret = new_encrypt_secret[1] loader.set_vault_secrets(vault_secrets) # FIXME: do we need to create VaultEditor here? its not reused vault = VaultLib(vault_secrets) self.editor = VaultEditor(vault) context.CLIARGS['func']() # and restore umask os.umask(old_umask)