def _play_prereqs(options): # all needs loader loader = DataLoader() basedir = getattr(options, 'basedir', False) if basedir: loader.set_basedir(basedir) vault_ids = options.vault_ids default_vault_ids = C.DEFAULT_VAULT_IDENTITY_LIST vault_ids = default_vault_ids + vault_ids vault_secrets = CLI.setup_vault_secrets(loader, vault_ids=vault_ids, vault_password_files=options.vault_password_files, ask_vault_pass=options.ask_vault_pass, auto_prompt=False) loader.set_vault_secrets(vault_secrets) # create the inventory, and filter it based on the subset specified (if any) inventory = InventoryManager(loader=loader, sources=options.inventory) # create the variable manager, which will be shared throughout # the code, ensuring a consistent view of global variables variable_manager = VariableManager(loader=loader, inventory=inventory) # load vars from cli options variable_manager.extra_vars = load_extra_vars(loader=loader, options=options) variable_manager.options_vars = load_options_vars(options, CLI.version_info(gitinfo=False)) return loader, inventory, variable_manager
def test_variable_manager_task_vars(self): # FIXME: BCS make this work return # pylint: disable=unreachable fake_loader = DictDataLoader({}) mock_task = MagicMock() mock_task._role = None mock_task.loop = None mock_task.get_vars.return_value = dict(foo="bar") mock_task.get_include_params.return_value = dict() mock_all = MagicMock() mock_all.get_vars.return_value = {} mock_all.get_file_vars.return_value = {} mock_host = MagicMock() mock_host.get.name.return_value = 'test01' mock_host.get_vars.return_value = {} mock_host.get_host_vars.return_value = {} mock_inventory = MagicMock() mock_inventory.hosts.get.return_value = mock_host mock_inventory.hosts.get.name.return_value = 'test01' mock_inventory.get_host.return_value = mock_host mock_inventory.groups.__getitem__.return_value = mock_all v = VariableManager(loader=fake_loader, inventory=mock_inventory) self.assertEqual(v.get_vars(task=mock_task, use_cache=False).get("foo"), "bar")
class Inventory24(Inventory): def __init__(self, inventory, ask_vault_pass, vault_password_files, vault_ids): from ansible.cli import CLI super(Inventory24, self).__init__() loader = DataLoader() if vault_ids or vault_password_files or ask_vault_pass: CLI.setup_vault_secrets(loader, vault_ids, vault_password_files, ask_vault_pass) self.inventory = ansible.inventory.manager.InventoryManager(loader=loader, sources=inventory) self.variable_manager = VariableManager(loader=loader) self.variable_manager.set_inventory(self.inventory) # internal fuctions that actually do the work # adapted almost entirely from lib/ansible/vars/manager.py def _plugins_inventory(self, entities): import os from ansible.plugins.loader import vars_loader from ansible.utils.vars import combine_vars ''' merges all entities by inventory source ''' data = {} for inventory_dir in self.variable_manager._inventory._sources: if ',' in inventory_dir: # skip host lists continue elif not os.path.isdir(inventory_dir): # always pass 'inventory directory' inventory_dir = os.path.dirname(inventory_dir) for plugin in vars_loader.all(): data = combine_vars(data, self._get_plugin_vars(plugin, inventory_dir, entities)) return data def _get_plugin_vars(self, plugin, path, entities): from ansible.inventory.host import Host data = {} try: data = plugin.get_vars(self.variable_manager._loader, path, entities) except AttributeError: for entity in entities: if isinstance(entity, Host): data.update(plugin.get_host_vars(entity.name)) else: data.update(plugin.get_group_vars(entity.name)) return data def get_group_vars(self, group): return self._plugins_inventory([group]) def get_host_vars(self, host): try: all_vars = self.variable_manager.get_vars(host=host, include_hostvars=True) except ansible.errors.AnsibleParserError: raise NoVaultSecretFound # play, host, task, include_hostvars, include_delegate_to magic_vars = ['ansible_playbook_python', 'groups', 'group_names', 'inventory_dir', 'inventory_file', 'inventory_hostname', 'inventory_hostname_short', 'omit', 'playbook_dir'] return {k: v for (k, v) in all_vars.items() if k not in magic_vars} def get_group(self, group_name): return self.inventory.groups[group_name]
def get_ansible_host_ip(): loader = DataLoader() inventory = InventoryManager(loader=loader, sources='hosts') variable_manager = VariableManager(loader=loader, inventory=inventory) hostnames = [] for host in inventory.get_hosts(): hostnames.append(variable_manager.get_vars(host=host)) ip = ' '.join([str(i['ansible_host']) for i in hostnames]) return str(ip)
def main(): host_list = ['localhost', 'www.example.com', 'www.google.com'] Options = namedtuple('Options', ['connection', 'module_path', 'forks', 'remote_user', 'private_key_file', 'ssh_common_args', 'ssh_extra_args', 'sftp_extra_args', 'scp_extra_args', 'become', 'become_method', 'become_user', 'verbosity', 'check']) # initialize needed objects variable_manager = VariableManager() loader = DataLoader() options = Options(connection='smart', module_path='/usr/share/ansible', forks=100, remote_user=None, private_key_file=None, ssh_common_args=None, ssh_extra_args=None, sftp_extra_args=None, scp_extra_args=None, become=None, become_method=None, become_user=None, verbosity=None, check=False) passwords = dict() # create inventory and pass to var manager inventory = Inventory(loader=loader, variable_manager=variable_manager, host_list=host_list) variable_manager.set_inventory(inventory) # create play with tasks play_source = dict( name="Ansible Play", hosts=host_list, gather_facts='no', tasks=[dict(action=dict(module='command', args=dict(cmd='/usr/bin/uptime')))] ) play = Play().load(play_source, variable_manager=variable_manager, loader=loader) # actually run it tqm = None callback = ResultsCollector() try: tqm = TaskQueueManager( inventory=inventory, variable_manager=variable_manager, loader=loader, options=options, passwords=passwords, ) tqm._stdout_callback = callback result = tqm.run(play) finally: if tqm is not None: tqm.cleanup() print("UP ***********") for host, result in callback.host_ok.items(): print('{} >>> {}'.format(host, result._result['stdout'])) print("FAILED *******") for host, result in callback.host_failed.items(): print('{} >>> {}'.format(host, result._result['msg'])) print("DOWN *********") for host, result in callback.host_unreachable.items(): print('{} >>> {}'.format(host, result._result['msg']))
def run(self, playbook, hosts, extra_vars={}, log='', with_output=False, use_root=False): self.result['playbook'] = playbook if not os.path.exists(playbook): result = { 'errno': -3, 'msg': 'not exists playbook: ' + playbook } else: AT = Ansi_Template() hosts, host_file = AT.make_host_template(hosts, use_root) extra_vars['ansible_hosts'] = ':'.join(hosts) inventory = InventoryManager(loader=self.loader, sources=[host_file]) variable_manager = VariableManager(loader=self.loader, inventory=inventory) variable_manager.extra_vars = extra_vars pbex = PlaybookExecutor(playbooks=[playbook], inventory=inventory, variable_manager=variable_manager, loader=self.loader, options=self.options, passwords={}) new_display = New_Display(log_file=log, debug=self.debug) results_callback = ResultCallback(new_display=new_display) pbex._tqm._stdout_callback = results_callback try: errno = pbex.run() result = results_callback.tasks result['errno'] = errno except AnsibleParserError as e: msg = 'syntax problems: {0}'.format(str(e)) result = { 'errno': -2, 'msg': msg } self.write_log(log, msg) print('syntax problems: {0}'.format(str(e))) if with_output: result['output'] = '\n'.join(new_display.log_add) if result['errno'] != -2 and not result['summary']: msg = 'no host executed' result = { 'errno': -1, 'msg': msg } self.write_log(log, msg) os.unlink(host_file) self.result.update(result) if self.result['errno'] != 0: self.result['msg'] = '无法配置完成,请联系管理员!' return self.result
def test_basic_manager(self): fake_loader = DictDataLoader({}) mock_inventory = MagicMock() v = VariableManager(loader=fake_loader, inventory=mock_inventory) variables = v.get_vars(use_cache=False) # Check var manager expected values, never check: ['omit', 'vars'] # FIXME: add the following ['ansible_version', 'ansible_playbook_python', 'groups'] for varname, value in (('playbook_dir', os.path.abspath('.')), ): self.assertEqual(variables[varname], value)
def test_variable_manager_play_vars(self): fake_loader = DictDataLoader({}) mock_play = MagicMock() mock_play.get_vars.return_value = dict(foo="bar") mock_play.get_roles.return_value = [] mock_play.get_vars_files.return_value = [] mock_inventory = MagicMock() v = VariableManager(loader=fake_loader, inventory=mock_inventory) self.assertEqual(v.get_vars(play=mock_play, use_cache=False).get("foo"), "bar")
def test_variable_manager_role_vars_dependencies(self): ''' Tests vars from role dependencies with duplicate dependencies. ''' mock_inventory = MagicMock() fake_loader = DictDataLoader({ # role common-role '/etc/ansible/roles/common-role/tasks/main.yml': """ - debug: msg="{{role_var}}" """, # We do not need allow_duplicates: yes for this role # because eliminating duplicates is done by the execution # strategy, which we do not test here. # role role1 '/etc/ansible/roles/role1/vars/main.yml': """ role_var: "role_var_from_role1" """, '/etc/ansible/roles/role1/meta/main.yml': """ dependencies: - { role: common-role } """, # role role2 '/etc/ansible/roles/role2/vars/main.yml': """ role_var: "role_var_from_role2" """, '/etc/ansible/roles/role2/meta/main.yml': """ dependencies: - { role: common-role } """, }) v = VariableManager(loader=fake_loader, inventory=mock_inventory) v._fact_cache = defaultdict(dict) play1 = Play.load(dict( hosts=['all'], roles=['role1', 'role2'], ), loader=fake_loader, variable_manager=v) # The task defined by common-role exists twice because role1 # and role2 depend on common-role. Check that the tasks see # different values of role_var. blocks = play1.compile() task = blocks[1].block[0] res = v.get_vars(play=play1, task=task) self.assertEqual(res['role_var'], 'role_var_from_role1') task = blocks[2].block[0] res = v.get_vars(play=play1, task=task) self.assertEqual(res['role_var'], 'role_var_from_role2')
def _create_pbex_args(playbooks, extra_vars): loader = DataLoader() inventory = InventoryManager(loader=loader) variable_manager = VariableManager(loader=loader, inventory=inventory) variable_manager.extra_vars = extra_vars return { 'playbooks': playbooks, 'inventory': inventory, 'variable_manager': variable_manager, 'loader': loader, 'options': _PBEX_OPTIONS, 'passwords': {}, }
def test_flush_cache(self): cli = PlaybookCLI(args=["ansible-playbook", "--flush-cache", "foobar.yml"]) cli.parse() self.assertTrue(cli.options.flush_cache) variable_manager = VariableManager() fake_loader = DictDataLoader({'foobar.yml': ""}) inventory = InventoryManager(loader=fake_loader, sources='testhost,') variable_manager.set_host_facts(inventory.get_host('testhost'), {'canary': True}) self.assertTrue('testhost' in variable_manager._fact_cache) cli._flush_cache(inventory, variable_manager) self.assertFalse('testhost' in variable_manager._fact_cache)
def test_variable_manager_extra_vars(self): fake_loader = DictDataLoader({}) extra_vars = dict(a=1, b=2, c=3) mock_inventory = MagicMock() v = VariableManager(loader=fake_loader, inventory=mock_inventory) v.extra_vars = extra_vars vars = v.get_vars(use_cache=False) for (key, val) in iteritems(extra_vars): self.assertEqual(vars.get(key), val) self.assertIsNot(v.extra_vars, extra_vars)
def __init__(self, inventory, ask_vault_pass, vault_password_files, vault_ids): from ansible.cli import CLI super(Inventory24, self).__init__() loader = DataLoader() if vault_ids or vault_password_files or ask_vault_pass: CLI.setup_vault_secrets(loader, vault_ids, vault_password_files, ask_vault_pass) self.inventory = ansible.inventory.manager.InventoryManager(loader=loader, sources=inventory) self.variable_manager = VariableManager(loader=loader) self.variable_manager.set_inventory(self.inventory)
def read_inventory_file(filename): """ filename is a path to an ansible inventory file returns a mapping of group names ("webworker", "proxy", etc.) to lists of hostnames as listed in the inventory file. ("Hostnames" can also be IP addresses.) If the hostname in the file includes :<port>, that will be included here as well. """ data_loader = DataLoader() inventory = get_inventory(filename, data_loader=data_loader) var_manager = VariableManager(data_loader, inventory) port_map = {host.name: var_manager.get_vars(host=host).get('ansible_port') for host in inventory.get_hosts()} return {group: [ '{}:{}'.format(host, port_map[host]) if port_map[host] is not None else host for host in hosts ] for group, hosts in get_inventory(filename).get_groups_dict().items()}
def ansible_runner_24x(playbook_path, extra_vars, options=None, inventory_src='localhost', console=True): loader = DataLoader() variable_manager = VariableManager(loader=loader) variable_manager.extra_vars = extra_vars inventory = Inventory(loader=loader, sources=[inventory_src]) variable_manager.set_inventory(inventory) passwords = {} pbex = PlaybookExecutor([playbook_path], inventory, variable_manager, loader, options, passwords) return pbex
def ansible_runner_2x(playbook_path, extra_vars, options=None, inventory_src='localhost', console=True): variable_manager = VariableManager() loader = DataLoader() variable_manager.extra_vars = extra_vars inventory = Inventory(loader=loader, variable_manager=variable_manager, host_list=inventory_src) passwords = {} pbex = PlaybookExecutor([playbook_path], inventory, variable_manager, loader, options, passwords) return pbex
class Inventory20(Inventory): def __init__(self, inventory, ask_vault_pass, vault_password_files, vault_ids): if vault_ids or len(vault_password_files) > 1: raise NotImplementedError from ansible.cli import CLI super(Inventory20, self).__init__() loader = DataLoader() if ask_vault_pass: self.vault_pass = CLI.ask_vault_passwords() elif vault_password_files: self.vault_pass = CLI.read_vault_password_file(vault_password_files[0], loader) if self.vault_pass is not None: loader.set_vault_password(self.vault_pass) self.variable_manager = VariableManager() try: self.inventory = ansible.inventory.Inventory(loader=loader, variable_manager=self.variable_manager, host_list=inventory) except ansible.errors.AnsibleError: raise NoVaultSecretFound self.variable_manager.set_inventory(self.inventory) def _handle_missing_return_result(self, fn, member): import inspect # http://stackoverflow.com/a/197053 vars = inspect.getargspec(fn) if 'return_results' in vars[0]: return fn(member, return_results=True) else: return fn(member) def get_group_vars(self, group): return self._handle_missing_return_result(self.inventory.get_group_vars, group) def get_host_vars(self, host): return self._handle_missing_return_result(self.inventory.get_host_vars, host) def get_group(self, group_name): return self.inventory.get_group(group_name)
def execute_playbook(self, playbook_name, playbook_input): try: loader = DataLoader() inventory = InventoryManager(loader=loader, sources=['localhost']) variable_manager = VariableManager(loader=loader, inventory=inventory) Options = namedtuple('Options', ['listtags', 'listtasks', 'listhosts', 'syntax', 'connection', 'module_path', 'forks', 'remote_user', 'private_key_file', 'ssh_common_args', 'ssh_extra_args', 'sftp_extra_args', 'scp_extra_args', 'become', 'become_method', 'become_user', 'verbosity', 'check', 'diff']) options = Options(listtags=False, listtasks=False, listhosts=False, syntax=False, connection='ssh', module_path=None, forks=100, remote_user=None, private_key_file=None, ssh_common_args=None, ssh_extra_args=None, sftp_extra_args=None, scp_extra_args=None, become=None, become_method=None, become_user=None, verbosity=None, check=False, diff=False) variable_manager.extra_vars = {"playbook_input": playbook_input} pbex = PlaybookExecutor(playbooks=[playbook_name], inventory=inventory, variable_manager=variable_manager, loader=loader, options=options, passwords=None) ret_val = pbex.run() if ret_val != 0: msg = "playbook returned with error" self._logger.error(msg) raise Exception(msg) except Exception as exp: pass
def setUp(self): self.loader = DictDataLoader({ '/etc/ansible/roles/l1/tasks/main.yml': """ - shell: echo 'hello world from l1' - include_role: name=l2 """, '/etc/ansible/roles/l1/tasks/alt.yml': """ - shell: echo 'hello world from l1 alt' - include_role: name=l2 tasks_from=alt defaults_from=alt """, '/etc/ansible/roles/l1/defaults/main.yml': """ test_variable: l1-main l1_variable: l1-main """, '/etc/ansible/roles/l1/defaults/alt.yml': """ test_variable: l1-alt l1_variable: l1-alt """, '/etc/ansible/roles/l2/tasks/main.yml': """ - shell: echo 'hello world from l2' - include_role: name=l3 """, '/etc/ansible/roles/l2/tasks/alt.yml': """ - shell: echo 'hello world from l2 alt' - include_role: name=l3 tasks_from=alt defaults_from=alt """, '/etc/ansible/roles/l2/defaults/main.yml': """ test_variable: l2-main l2_variable: l2-main """, '/etc/ansible/roles/l2/defaults/alt.yml': """ test_variable: l2-alt l2_variable: l2-alt """, '/etc/ansible/roles/l3/tasks/main.yml': """ - shell: echo 'hello world from l3' """, '/etc/ansible/roles/l3/tasks/alt.yml': """ - shell: echo 'hello world from l3 alt' """, '/etc/ansible/roles/l3/defaults/main.yml': """ test_variable: l3-main l3_variable: l3-main """, '/etc/ansible/roles/l3/defaults/alt.yml': """ test_variable: l3-alt l3_variable: l3-alt """ }) self.var_manager = VariableManager(loader=self.loader)
def __init__(self, inventory, ask_vault_pass, vault_password_files, vault_ids): if vault_ids or len(vault_password_files) > 1: raise NotImplementedError from ansible.cli import CLI super(Inventory20, self).__init__() loader = DataLoader() if ask_vault_pass: self.vault_pass = CLI.ask_vault_passwords() elif vault_password_files: self.vault_pass = CLI.read_vault_password_file(vault_password_files[0], loader) if self.vault_pass is not None: loader.set_vault_password(self.vault_pass) self.variable_manager = VariableManager() try: self.inventory = ansible.inventory.Inventory(loader=loader, variable_manager=self.variable_manager, host_list=inventory) except ansible.errors.AnsibleError: raise NoVaultSecretFound self.variable_manager.set_inventory(self.inventory)
def __init__(self, inventory, options=None): self.options = self.update_options(options) self.inventory = inventory self.loader = DataLoader() self.variable_manager = VariableManager(loader=self.loader, inventory=self.inventory)
def execute(self, *args, **kwargs): """ Puts args and kwargs in a way ansible can understand. Calls ansible and interprets the result. """ assert self.is_hooked_up, "the module should be hooked up to the api" self.module_args = module_args = self.get_module_args(args, kwargs) loader = DataLoader() inventory_manager = SourcelessInventoryManager(loader=loader) for host in self.api.servers: inventory_manager._inventory.add_host(host, group='all') for key, value in self.api.options.extra_vars.items(): inventory_manager._inventory.set_variable('all', key, value) variable_manager = VariableManager(loader=loader, inventory=inventory_manager) play_source = { 'name': "Suitable Play", 'hosts': self.api.servers, 'gather_facts': 'no', 'tasks': [{ 'action': { 'module': self.module_name, 'args': module_args } }] } play = Play.load(play_source, variable_manager=variable_manager, loader=loader) log.info(u'running {}'.format(u'- {module_name}: {module_args}'.format( module_name=self.module_name, module_args=module_args))) start = datetime.utcnow() task_queue_manager = None callback = SilentCallbackModule() try: task_queue_manager = TaskQueueManager( inventory=inventory_manager, variable_manager=variable_manager, loader=loader, options=self.api.options, passwords=getattr(self.api.options, 'passwords', {}), stdout_callback=callback) task_queue_manager.run(play) finally: if task_queue_manager is not None: task_queue_manager.cleanup() log.info(u'took {} to complete'.format(datetime.utcnow() - start)) return self.evaluate_results(callback)
loader = DataLoader() loader.set_vault_secrets([('default', VaultSecret(_bytes=to_bytes('123456')))]) context.CLIARGS = ImmutableDict(tags={}, listtags=False, listtasks=False, listhosts=False, syntax=False, module_path=None, forks=100, private_key_file=None, start_at_task=None) inventory = InventoryManager(loader=loader, sources=[code_path + '/ansible/inventory']) variable_manager = VariableManager( loader=loader, inventory=inventory, version_info=CLI.version_info(gitinfo=False)) variable_manager._extra_vars = {'firstvar': False} pbex = PlaybookExecutor(playbooks=[code_path + '/ansible/variables.yml'], inventory=inventory, variable_manager=variable_manager, loader=loader, passwords={}) results = pbex.run() print(results)
class TestIncludeRole(unittest.TestCase): def setUp(self): self.loader = DictDataLoader({ '/etc/ansible/roles/l1/tasks/main.yml': """ - shell: echo 'hello world from l1' - include_role: name=l2 """, '/etc/ansible/roles/l1/tasks/alt.yml': """ - shell: echo 'hello world from l1 alt' - include_role: name=l2 tasks_from=alt defaults_from=alt """, '/etc/ansible/roles/l1/defaults/main.yml': """ test_variable: l1-main l1_variable: l1-main """, '/etc/ansible/roles/l1/defaults/alt.yml': """ test_variable: l1-alt l1_variable: l1-alt """, '/etc/ansible/roles/l2/tasks/main.yml': """ - shell: echo 'hello world from l2' - include_role: name=l3 """, '/etc/ansible/roles/l2/tasks/alt.yml': """ - shell: echo 'hello world from l2 alt' - include_role: name=l3 tasks_from=alt defaults_from=alt """, '/etc/ansible/roles/l2/defaults/main.yml': """ test_variable: l2-main l2_variable: l2-main """, '/etc/ansible/roles/l2/defaults/alt.yml': """ test_variable: l2-alt l2_variable: l2-alt """, '/etc/ansible/roles/l3/tasks/main.yml': """ - shell: echo 'hello world from l3' """, '/etc/ansible/roles/l3/tasks/alt.yml': """ - shell: echo 'hello world from l3 alt' """, '/etc/ansible/roles/l3/defaults/main.yml': """ test_variable: l3-main l3_variable: l3-main """, '/etc/ansible/roles/l3/defaults/alt.yml': """ test_variable: l3-alt l3_variable: l3-alt """ }) self.var_manager = VariableManager(loader=self.loader) def tearDown(self): pass def flatten_tasks(self, tasks): for task in tasks: if isinstance(task, IncludeRole): blocks, handlers = task.get_block_list(loader=self.loader) for block in blocks: for t in self.flatten_tasks(block.block): yield t elif isinstance(task, Task): yield task else: for t in self.flatten_tasks(task.block): yield t def get_tasks_vars(self, play, tasks): for task in self.flatten_tasks(tasks): if task.implicit: # skip meta: role_complete continue role = task._role if not role: continue yield (role.get_name(), self.var_manager.get_vars(play=play, task=task)) @patch('ansible.playbook.role.definition.unfrackpath', mock_unfrackpath_noop) def test_simple(self): """Test one-level include with default tasks and variables""" play = Play.load(dict( name="test play", hosts=['foo'], gather_facts=False, tasks=[ {'include_role': 'name=l3'} ] ), loader=self.loader, variable_manager=self.var_manager) tasks = play.compile() tested = False for role, task_vars in self.get_tasks_vars(play, tasks): tested = True self.assertEqual(task_vars.get('l3_variable'), 'l3-main') self.assertEqual(task_vars.get('test_variable'), 'l3-main') self.assertTrue(tested) @patch('ansible.playbook.role.definition.unfrackpath', mock_unfrackpath_noop) def test_simple_alt_files(self): """Test one-level include with alternative tasks and variables""" play = Play.load(dict( name="test play", hosts=['foo'], gather_facts=False, tasks=[{'include_role': 'name=l3 tasks_from=alt defaults_from=alt'}]), loader=self.loader, variable_manager=self.var_manager) tasks = play.compile() tested = False for role, task_vars in self.get_tasks_vars(play, tasks): tested = True self.assertEqual(task_vars.get('l3_variable'), 'l3-alt') self.assertEqual(task_vars.get('test_variable'), 'l3-alt') self.assertTrue(tested) @patch('ansible.playbook.role.definition.unfrackpath', mock_unfrackpath_noop) def test_nested(self): """ Test nested includes with default tasks and variables. Variables from outer roles should be inherited, but overridden in inner roles. """ play = Play.load(dict( name="test play", hosts=['foo'], gather_facts=False, tasks=[ {'include_role': 'name=l1'} ] ), loader=self.loader, variable_manager=self.var_manager) tasks = play.compile() expected_roles = ['l1', 'l2', 'l3'] for role, task_vars in self.get_tasks_vars(play, tasks): expected_roles.remove(role) # Outer-most role must not have variables from inner roles yet if role == 'l1': self.assertEqual(task_vars.get('l1_variable'), 'l1-main') self.assertEqual(task_vars.get('l2_variable'), None) self.assertEqual(task_vars.get('l3_variable'), None) self.assertEqual(task_vars.get('test_variable'), 'l1-main') # Middle role must have variables from outer role, but not inner elif role == 'l2': self.assertEqual(task_vars.get('l1_variable'), 'l1-main') self.assertEqual(task_vars.get('l2_variable'), 'l2-main') self.assertEqual(task_vars.get('l3_variable'), None) self.assertEqual(task_vars.get('test_variable'), 'l2-main') # Inner role must have variables from both outer roles elif role == 'l3': self.assertEqual(task_vars.get('l1_variable'), 'l1-main') self.assertEqual(task_vars.get('l2_variable'), 'l2-main') self.assertEqual(task_vars.get('l3_variable'), 'l3-main') self.assertEqual(task_vars.get('test_variable'), 'l3-main') else: self.fail() self.assertFalse(expected_roles) @patch('ansible.playbook.role.definition.unfrackpath', mock_unfrackpath_noop) def test_nested_alt_files(self): """ Test nested includes with alternative tasks and variables. Variables from outer roles should be inherited, but overridden in inner roles. """ play = Play.load(dict( name="test play", hosts=['foo'], gather_facts=False, tasks=[ {'include_role': 'name=l1 tasks_from=alt defaults_from=alt'} ] ), loader=self.loader, variable_manager=self.var_manager) tasks = play.compile() expected_roles = ['l1', 'l2', 'l3'] for role, task_vars in self.get_tasks_vars(play, tasks): expected_roles.remove(role) # Outer-most role must not have variables from inner roles yet if role == 'l1': self.assertEqual(task_vars.get('l1_variable'), 'l1-alt') self.assertEqual(task_vars.get('l2_variable'), None) self.assertEqual(task_vars.get('l3_variable'), None) self.assertEqual(task_vars.get('test_variable'), 'l1-alt') # Middle role must have variables from outer role, but not inner elif role == 'l2': self.assertEqual(task_vars.get('l1_variable'), 'l1-alt') self.assertEqual(task_vars.get('l2_variable'), 'l2-alt') self.assertEqual(task_vars.get('l3_variable'), None) self.assertEqual(task_vars.get('test_variable'), 'l2-alt') # Inner role must have variables from both outer roles elif role == 'l3': self.assertEqual(task_vars.get('l1_variable'), 'l1-alt') self.assertEqual(task_vars.get('l2_variable'), 'l2-alt') self.assertEqual(task_vars.get('l3_variable'), 'l3-alt') self.assertEqual(task_vars.get('test_variable'), 'l3-alt') else: self.fail() self.assertFalse(expected_roles)
['connection', 'module_path', 'forks', 'become', 'become_method', 'become_user', 'check', 'diff']) # 用来加载解析yaml文件或JSON内容,并且支持vault的解密 loader = DataLoader() print(loader) options = Options(connection='local', module_path=None, forks=2, become=None, become_method=None, become_user=None, check=False, diff=False) # connection参数,如果执行本地节点用'local', 远端节点用'smart' # passwords = dict(vault_pass='******') #密钥方式取消这步 results_callback = ResultCallback() # 根据inventory加载对应变量,此处host_list参数可以有两种格式: # 1: hosts文件(需要), # 2: 可以是IP列表,此处使用IP列表 # variable_manager = VariableManager() inventory = InventoryManager variable_manager = VariableManager(loader=loader, inventory=inventory) variable_manager.set_inventory(inventory) play_source = dict( name="Ansible Play", hosts='*', gather_facts='no', tasks=[ dict(action=dict(module='shell', args='df -Th'), register='shell_out'), # dict(action=dict(module='debug', args=dict(msg='{{shell_out.stdout}}'))) ] ) play = Play().load(play_source, variable_manager=variable_manager, loader=loader) tqm = None try:
def exec_playbook(host_list, username, password, group_name): loader = DataLoader() passwords = dict() inventory_manager = InventoryManager(loader=loader, sources=','.join(host_list)) variable_manager = VariableManager(loader=loader, inventory=inventory_manager) variable_manager.extra_vars = { 'ansible_ssh_user': username, 'ansible_ssh_pass': password } play_source = dict( name='Greengrass Group Playbook', hosts='all', remote_user=username, gather_facts='yes', tasks=[ dict(name='Set Playbook Facts', action=dict(module='set_fact', args=dict(group_name=group_name))), dict(name='Stop Greengrass Core', become=True, action=dict(module='systemd', args=dict(name='greengrass', state='stopped'))), dict(name='Copy Greengrass Config', become=True, action=dict(module='copy', args=dict(src='.gg/config/', dest='/greengrass/config/'))), dict(name='Copy Greengrass Root Certs', become=True, action=dict(module='copy', args=dict(src='{{ item }}', dest='/greengrass/certs/')), with_fileglob=['.gg/certs/root.*']), dict(name='Copy Greengrass Core Certs', become=True, action=dict(module='copy', args=dict(src='{{ item }}', dest='/greengrass/certs')), with_fileglob=['.gg/certs/{{ groups_name }}*']), dict(name='Start Greengrass Core', become=True, action=dict(module='greengrass', args=dict(name='greengrass', enabled=True, state='started'))) ]) play = Play().load(play_source, variable_manager=variable_manager, loader=loader) tqm = None try: tqm = TaskQueueManager( inventory=inventory_manager, variable_manager=variable_manager, loader=loader, options=options, passwords=passwords, ) result = tqm.run(play) finally: if tqm is not None: tqm.cleanup() shutil.rmtree(C.DEFAULT_LOCAL_TMP, True)
# diff=False) passwords = dict(vault_pass='******', become_pass='******') # Instantiate our ResultCallback for handling results as they come in results_callback = ResultCallback() # create inventory and pass to var manager inventory = InventoryManager(loader=loader, sources=None) #variable_manager = VariableManager(loader=loader, inventory=inventory) hostname = 'test1' hostport = 22 hostip = '127.0.0.1' password = '******' username = '******' variable_manager = VariableManager(loader=loader, inventory=inventory) my_host = Host(name=hostname, port=hostport) variable_manager.set_host_variable(host=my_host, varname='ansible_ssh_host', value=hostip) variable_manager.set_host_variable(host=my_host, varname='ansible_ssh_pass', value=password) variable_manager.set_host_variable(host=my_host, varname='ansible_ssh_port', value=hostport) variable_manager.set_host_variable(host=my_host, varname='ansible_ssh_user', value=username)
class MyInventory(): """ this is IOPS ansible inventory object. """ def __init__(self, resource, loader, variable_manager): self.resource = resource self.loader = DataLoader() self.inventory = InventoryManager(loader=self.loader, sources=['/etc/ansible/hosts']) # self.variable_manager.set_inventory(self.inventory) self.variable_manager = VariableManager(loader=self.loader, inventory=self.inventory) self.dynamic_inventory() def add_dynamic_group(self, hosts, groupname, groupvars=None): """ add hosts to a group """ self.inventory.add_group(groupname) my_group = Group(name=groupname) # if group variables exists, add them to group if groupvars: for key, value in groupvars.iteritems(): my_group.set_variable(key, value) # add hosts to group for host in hosts: # set connection variables hostname = host.get("hostname") hostip = host.get('ip', hostname) hostport = host.get("port") username = host.get("username") password = host.get("password") ssh_key = host.get("ssh_key") my_host = Host(name=hostname, port=hostport) self.variable_manager.set_host_variable(host=my_host, varname='ansible_ssh_host', value=hostip) self.variable_manager.set_host_variable(host=my_host, varname='ansible_ssh_pass', value=password) self.variable_manager.set_host_variable(host=my_host, varname='ansible_ssh_port', value=hostport) self.variable_manager.set_host_variable(host=my_host, varname='ansible_ssh_user', value=username) self.variable_manager.set_host_variable( host=my_host, varname='ansible_ssh_private_key_file', value=ssh_key) # my_host.set_variable('ansible_ssh_pass', password) # my_host.set_variable('ansible_ssh_private_key_file', ssh_key) # set other variables for key, value in host.iteritems(): if key not in ["hostname", "port", "username", "password"]: self.variable_manager.set_host_variable(host=my_host, varname=key, value=value) # add to group self.inventory.add_host(host=hostname, group=groupname, port=hostport) ghost = Host(name="192.168.8.119") def dynamic_inventory(self): """ add hosts to inventory. """ if isinstance(self.resource, list): self.add_dynamic_group(self.resource, 'default_group') elif isinstance(self.resource, dict): for groupname, hosts_and_vars in self.resource.iteritems(): self.add_dynamic_group(hosts_and_vars.get("hosts"), groupname, hosts_and_vars.get("vars"))
if isinstance(o, list): return [recursive_dump(oi) for oi in o] if isinstance(o, dict): return {k: recursive_dump(v) for k, v in o.items()} return o role_dir = Path(sys.argv[1]) role_name = role_dir.name role_base_dir = role_dir.parent dummy_play = Play() role_def = RoleInclude.load( role_name, dummy_play, current_role_path=str(role_base_dir), variable_manager=VariableManager(loader=DataLoader())) print(role_def.get_name()) print(role_def.get_role_path()) def dump_block(b: Block) -> Dict[str, Any]: name = b.__class__.__name__ + ':' + str(id(b)) if b._parent is not None: name += ' (parent: ' + b._parent.__class__.__name__ if isinstance(b._parent, TaskInclude): name += str(b._parent.args) name += ')' return {name: dump_block_tasks(b.block)} def dump_blocks(bl: Sequence[Block]) -> Dict[str, Any]:
def run_playbook(self, playbook_info): try: # create job log to capture the start of the playbook device_id = playbook_info['extra_vars']['playbook_input'] ['device_id'] if device_id is None: device_id = "" msg = "Starting to execute the playbook %s for device " \ "%s with input %s and params %s " % \ (playbook_info['uri'], device_id, json.dumps(playbook_info['extra_vars']['playbook_input'] ['input']), json.dumps(playbook_info['extra_vars']['playbook_input'] ['params'])) self._logger.debug(msg) if not os.path.exists(playbook_info['uri']): raise JobException( "Playbook %s does not " "exist" % playbook_info['uri'], self._execution_id) loader = DataLoader() inventory = InventoryManager(loader=loader, sources=['localhost']) variable_manager = VariableManager(loader=loader, inventory=inventory) Options = namedtuple('Options', [ 'listtags', 'listtasks', 'listhosts', 'syntax', 'connection', 'module_path', 'forks', 'remote_user', 'private_key_file', 'ssh_common_args', 'ssh_extra_args', 'sftp_extra_args', 'scp_extra_args', 'become', 'become_method', 'become_user', 'verbosity', 'check', 'diff' ]) options = Options(listtags=False, listtasks=False, listhosts=False, syntax=False, connection='ssh', module_path=None, forks=100, remote_user='******', private_key_file=None, ssh_common_args=None, ssh_extra_args=None, sftp_extra_args=None, scp_extra_args=None, become=None, become_method=None, become_user=None, verbosity=None, check=False, diff=False) variable_manager.extra_vars = playbook_info['extra_vars'] passwords = dict(vault_pass='******') pbex = PlaybookExecutor(playbooks=[playbook_info['uri']], inventory=inventory, variable_manager=variable_manager, loader=loader, options=options, passwords=passwords) pbex.run() self._logger.debug("Completed executing the playbook %s. " "Collecting output." % playbook_info['uri']) output = self.get_plugin_output(pbex) self._logger.debug("Output for playbook %s : " "%s" % (playbook_info['uri'], json.dumps(output))) # create job log to capture completion of the playbook execution msg = "Completed to execute the playbook %s for" \ " device %s" % (playbook_info['uri'], playbook_info['extra_vars']['playbook_input'] ['device_id']) self._logger.debug(msg) return output except Exception as e: msg = "Error while executing the playbook %s : %s" % \ (playbook_info['uri'], repr(e)) raise JobException(msg, self._execution_id)
def run_playbook(hosts, playbook, tags=[], private_key_file=private_key_file): # initialize needed objects loader = DataLoader() options = Options(connection='ssh', private_key_file=private_key_file, module_path='', forks=100, become=True, become_method='sudo', become_user='******', check=False, tags=tags) passwords = dict(vault_pass='') results_callback = ResultCallback() host_file = NamedTemporaryFile(delete=False) host_file.write(b'[servers]\n') for i, h in enumerate(hosts): print(i, " : ", h) host_file.write(bytes('{0} num={1}\n'.format(h, i), encoding='utf8')) host_file.close() # set inventory inventory = InventoryManager(loader=loader, sources=host_file.name) variable_manager = VariableManager(loader=loader, inventory=inventory) # setup playbook executor, before the run pbex = PlaybookExecutor(playbooks=[playbook], inventory=inventory, variable_manager=variable_manager, loader=loader, options=options, passwords=passwords) pbex._tqm._stdout_callback = results_callback # run playbook and get stats result = pbex.run() stats = pbex._tqm._stats outputs = { 0: 'deployment successful', 1: 'error occurred during deployment', 2: 'one or more hosts failed', 4: 'one or more hosts unreachable', 255: 'unknown error occurred during deployment' } run_success = True hosts = sorted(stats.processed.keys()) for h in hosts: t = stats.summarize(h) if t['unreachable'] > 0 or t['failures'] > 0: run_success = False os.remove(host_file.name) try: out = outputs[result] except KeyError: out = 'unrecognised error code' return result, out
print('- ' * 80) inventory.add_host(host='foreman.example.com', port=22, group='foreman') print(inventory.get_groups_dict()) print(inventory.get_hosts(pattern='*')) print('- ' * 80, end='\n\n') """ VariableManager 实例化需要两个参数: 1. 参数一为读取yml文件的信息,需要实例化DataLoader。 2. 参数二为资产管理配置变量。 """ loader = DataLoader() inventory = InventoryManager(loader='', sources=ansible_host_sources) variable = VariableManager(loader=loader, inventory=inventory) # 获取变量 print(variable.get_vars()) host = inventory.get_host(hostname='django2.example.com') host_vars = variable.get_vars(host=host) print(host_vars) print('- ' * 80) # 设置主机变量方法,传入的host是inventory.get_host获得的主机对象 host = inventory.get_host(hostname='django2.example.com') variable.set_host_variable(host=host, varname='ansible_ssh_pass', value='12345') host_vars = variable.get_vars(host=host)
def run_ansible(module_name, module_args, host_list, ansible_user="******"): """ansible api""" # 负责查找和读取yaml、json和ini文件 loader = DataLoader() # 初始化需要的对象 Options = namedtuple('Options', [ 'connection', 'module_path', 'forks', 'become', 'become_method', 'private_key_file', 'become_user', 'remote_user', 'check', 'diff' ]) options = Options(connection='ssh', module_path=None, forks=5, become=True, become_method='sudo', private_key_file="/root/.ssh/id_rsa", become_user='******', remote_user=ansible_user, check=False, diff=False) passwords = dict(vault_pass='******') # 实例化ResultCallback来处理结果 callback = ResultsCollector() # 创建库存(inventory)并传递给VariableManager inventory = InventoryManager(loader=loader, sources='') for ip in host_list: inventory.add_host(host=ip, port=22) # 管理变量的类,包括主机,组,扩展等变量 variable_manager = VariableManager(loader=loader, inventory=inventory) for ip in host_list: host = inventory.get_host(hostname=ip) variable_manager.set_host_variable(host=host, varname='ansible_ssh_pass', value='lzx@2019') # 创建任务 host = ",".join(host_list) play_source = dict(name="Ansible Play", hosts=host, gather_facts='no', tasks=[ dict(action=dict(module=module_name, args=module_args), register='shell_out'), ]) play = Play().load(play_source, variable_manager=variable_manager, loader=loader) # 开始执行 tqm = None tqm = TaskQueueManager( inventory=inventory, variable_manager=variable_manager, loader=loader, options=options, passwords=passwords, stdout_callback=callback, ) result = tqm.run(play) result_raw = {'success': {}, 'failed': {}, 'unreachable': {}} for host, result in callback.host_ok.items(): result_raw['success'][host] = result._result for host, result in callback.host_failed.items(): result_raw['failed'][host] = result._result for host, result in callback.host_unreachable.items(): result_raw['unreachable'][host] = result._result for host, result in callback.host_skipped.items(): result_raw['skipped'][host] = result._result return json.dumps(result_raw, indent=4, ensure_ascii=False)
class MyInventory: """ Dynamic Creates and manages inventory By default, there are six group names """ def __init__(self, resources): self.resource = resources self.loader = DataLoader() self.inventory = InventoryManager( loader=self.loader, sources=['%s/conf/inventorys' % settings.BASE_DIR]) self.variable_manager = VariableManager(loader=self.loader, inventory=self.inventory) self.dynamic_inventory() def add_dynamic_group(self, hosts, groupname, groupvars=None): """ add hosts to a group """ self.inventory.add_group(groupname) my_group = Group(name=groupname) # if group variables exists, add them to group if groupvars: for key, value in groupvars.items(): my_group.set_variable(key, value) # add hosts to group for host in hosts: # set connection variables hostname = host.get("hostname", None) hostip = host.get('ip', None) if hostip is None: print("IP地址为空,跳过该元素。") continue hostport = host.get("port", 22) user = host.get("user", 'root') password = host.get("password", None) ssh_key = host.get("ssh_key", None) if hostname is None: hostname = hostip my_host = Host(name=hostname, port=hostport) self.variable_manager.set_host_variable(host=my_host, varname='ansible_ssh_port', value=hostport) self.variable_manager.set_host_variable(host=my_host, varname='ansible_ssh_host', value=hostip) self.variable_manager.set_host_variable(host=my_host, varname='ansible_ssh_user', value=user) if password: self.variable_manager.set_host_variable( host=my_host, varname='ansible_ssh_pass', value=password) if ssh_key: self.variable_manager.set_host_variable( host=my_host, varname='ansible_ssh_private_key_file', value=ssh_key) # set other variables for key, value in host.items(): if key not in ["hostname", "port", "user", "password"]: self.variable_manager.set_host_variable(host=my_host, varname=key, value=value) # add to group self.inventory.add_host(host=hostname, group=groupname, port=hostport) def dynamic_inventory(self): """ add hosts to inventory. """ if isinstance(self.resource, list): self.add_dynamic_group(self.resource, 'default_group') elif isinstance(self.resource, dict): for groupname, hosts_and_vars in self.resource.items(): self.add_dynamic_group(hosts_and_vars.get("hosts"), groupname, hosts_and_vars.get("vars")) elif isinstance(self.resource, str): return @property def INVENTORY(self): """ 返回资产实例 :return: """ return self.inventory @property def VARIABLE_MANAGER(self): """ 返回变量管理器实例 :return: """ return self.variable_manager
def createFromInventory(cls, inventory_file): retval = cls() # Load Ansible inventory, use ansible's mechanism to merge variable from diffent levels loader = DataLoader() # Sources can be a single path or comma separated paths inventory = InventoryManager(loader=loader, sources=inventory_file) variable_manager = VariableManager(loader=loader, inventory=inventory) for host in inventory.hosts: hv = inventory.get_host(host).get_vars() h = inventory.get_host(host) # Get host's vars host_vars = variable_manager.get_vars(host=h, task=None, include_hostvars=True, include_delegate_to=True, use_cache=True) print(json.dumps(host_vars, indent=4)) # There variables are expected by VM machine definition # "name" - aka hostname # "template" - a VM template to clone from # "ram" # "cpu" # "scsi" # - adapter # "disks" # - # "vmware" # - vm_folder # - dataloader # "network" # - adapter # "addresses" name = host_vars['inventory_hostname_short'] defaultMachine = Machine(name) mdict = dict({'addresses': {}}) if "template" in host_vars: mdict['template'] = host_vars['template'] if "ram" in host_vars: mdict['ram'] = host_vars['ram'] if "cpu" in host_vars: mdict['cpu'] = host_vars['cpu'] if "scsi" in host_vars: mdict['scsi'] = host_vars['scsi'] if "disks" in host_vars: mdict['disks'] = host_vars['disks'] if "vmware" in host_vars: mdict['vmware'] = host_vars['vmware'] if "network" in host_vars and host_vars['network']: mdict['network'] = host_vars['network'] # Note: machine has key address (singular) while cluster has key addresses (plural) if "address" in host_vars and host_vars['address'] and isinstance( host_vars['address'], str): ip = host_vars['address'] mdict['addresses'][name] = ip machine = defaultMachine.updateFromDict(mdict) retval.machines.append(machine) logging.debug( 'Machine loaded: {:<20} CPU: {:<2} RAM: {:<4} Storage: {}'. format(machine.name, machine.cpu, machine.ram, str(machine.disks))) if 'cluster' in host_vars and not retval.cluster: logging.debug(host_vars['cluster']) cluster = host_vars['cluster'] retval.cluster = Cluster.createFromDict(cluster) if retval.cluster: retval.cluster.nodes.append(name) return retval
def run_ansible(module_name, module_args, host_list, option_dict): # 初始化需要的对象 Options = namedtuple('Options', [ 'connection', 'module_path', 'forks', 'become', 'become_method', 'private_key_file', 'become_user', 'remote_user', 'check', 'diff' ]) #负责查找和读取yaml、json和ini文件 loader = DataLoader() options = Options(connection='ssh', module_path=None, forks=5, become=option_dict['become'], become_method='sudo', private_key_file="/root/.ssh/id_rsa", become_user='******', remote_user=option_dict['remote_user'], check=False, diff=False) passwords = dict(vault_pass='******') # 实例化ResultCallback来处理结果 callback = ResultsCollector() # 创建库存(inventory)并传递给VariableManager inventory = InventoryManager(loader=loader, sources=['/etc/ansible/hosts']) variable_manager = VariableManager(loader=loader, inventory=inventory) # 创建任务 host = ",".join(host_list) play_source = dict(name="Ansible Play", hosts=host, gather_facts='no', tasks=[ dict(action=dict(module=module_name, args=module_args), register='shell_out'), ]) play = Play().load(play_source, variable_manager=variable_manager, loader=loader) # 开始执行 tqm = None tqm = TaskQueueManager( inventory=inventory, variable_manager=variable_manager, loader=loader, options=options, passwords=passwords, stdout_callback=callback, ) result = tqm.run(play) result_raw = {'success': {}, 'failed': {}, 'unreachable': {}} for host, result in callback.host_ok.items(): result_raw['success'][host] = result._result['stdout_lines'] for host, result in callback.host_failed.items(): result_raw['failed'][host] = result._result['stderr_lines'] for host, result in callback.host_unreachable.items(): result_raw['unreachable'][host] = result._result["msg"] return json.dumps(result_raw, indent=4)
from ansible.parsing.dataloader import DataLoader from ansible.inventory.manager import InventoryManager from ansible.vars.manager import VariableManager loader = DataLoader() inventory = InventoryManager(loader=loader, sources='hosts') variable_manager = VariableManager(loader=loader, inventory=inventory) host = inventory.get_host('192.168.10.150') # get_vars() # 查看变量 print(variable_manager.get_vars(host=host)) # set_host_variable() # 修改指定主机的变量信息 variable_manager.set_host_variable(host=host, varname="ansible_ssh_pass", value="1111111") print(variable_manager.get_vars(host=host)) print(variable_manager.__dict__) # _extra_vars={} # 添加指定对象的扩展变量,全局有效 variable_manager._extra_vars = {'mysite': "ys.blog.com"} print(variable_manager.get_vars(host=host))
def main(): # Set default inventory_file = None ansible_loader = DataLoader() discovered_devices = {} output_file = 'discovery_output.json' links = [] # Reading options try: opts, args = getopt.getopt(sys.argv[1:], 'di:') except getopt.GetoptError as err: logger.error(err) sys.exit(255) for opt, arg in opts: if opt == '-d': logging.basicConfig(level=logging.DEBUG) elif opt == '-i': inventory_file = arg else: logging.error('unhandled option ({})'.format(opt)) sys.exit(255) # Checking options and environment if not inventory_file: logging.error('inventory file not given (use -i)') sys.exit(255) if not os.path.isfile(inventory_file): logging.error( 'inventory file does not exist ({})'.format(inventory_file)) sys.exit(255) if os.path.exists(output_file): try: os.remove(output_file) except: logging.error( 'output file cannot be deleted ({})'.format(output_file)) sys.exit(255) # Reading the inventory file try: ansible_inventory = InventoryManager(loader=ansible_loader, sources=inventory_file) except: logging.error('cannot read inventory file ({})'.format(inventory_file)) sys.exit(255) variable_manager = VariableManager(loader=ansible_loader, inventory=ansible_inventory) # Discover each host for host in ansible_inventory.get_hosts(): driver = napalm.get_network_driver(host.vars['napalm_driver']) device = driver(hostname=host.vars['ansible_host'], username=host.vars['ansible_username'], password=host.vars['ansible_password'], optional_args={'port': 22}) device.open() facts = device.get_facts() neighbors = device.get_cdp_neighbors_detail() device.close() # Saving data discovered_devices.setdefault(facts['fqdn'], {}) discovered_devices[facts['fqdn']] = { 'facts': facts, 'neighbors': neighbors } # For each device for device_name, device in discovered_devices.items(): if device['neighbors']: # For each interface where a neighbor exists for device_if_name, neighbors in device['neighbors'].items(): # For each neighbor for neighbor in neighbors: remote_device_name = neighbor['remote_system_name'] remote_if_name = neighbor['remote_port'] if device_name > remote_device_name: source = remote_device_name source_if = remote_if_name destination = device_name destination_if = device_if_name else: source = device_name source_if = device_if_name destination = remote_device_name destination_if = remote_if_name link = { 'source': source, 'source_if': source_if, 'destination': destination, 'destination_if': destination_if } if not link in links: links.append(link) try: discovery_output = open(output_file, 'w+') discovery_output.write(json.dumps(links)) discovery_output.close() except: logging.error('output file is not writable ({})'.format(output_file)) sys.exit(255)
def run_ansible(ansible_playbook_path, hypervisor_file_path=None): loader = DataLoader() src = [hypervisor_file_path] if hypervisor_file_path else [] inventory = InventoryManager(loader=loader, sources=src) variable_manager = VariableManager(loader=loader, inventory=inventory) passwords = {} # Since ansible has deprecated the options and introduced # new library context starting from 2.8 try: from ansible import context from ansible.module_utils.common.collections import ImmutableDict context.CLIARGS = ImmutableDict(listtags=False, listtasks=False, listhosts=False, syntax=False, connection='smart', module_path=None, forks=100, remote_user='******', timeout=10, become=False, become_ask_pass=False, ask_pass=False, become_method='sudo', become_user='******', verbosity=1, check=False, diff=False, step=False, start_at_task=None) playbook = PlaybookExecutor(playbooks=[ansible_playbook_path], inventory=inventory, variable_manager=variable_manager, loader=loader, passwords=passwords) except (ImportError, ValueError): from collections import namedtuple Options = namedtuple('Options', [ 'listtags', 'listtasks', 'listhosts', 'syntax', 'connection', 'module_path', 'forks', 'remote_user', 'private_key_file', 'ssh_common_args', 'ssh_extra_args', 'sftp_extra_args', 'scp_extra_args', 'become', 'become_method', 'become_user', 'verbosity', 'check', 'diff' ]) options = Options(listtags=False, listtasks=False, listhosts=False, syntax=False, connection='ssh', module_path=None, forks=100, remote_user='******', private_key_file=None, ssh_common_args=None, ssh_extra_args=None, sftp_extra_args=None, scp_extra_args=None, become=False, become_method='sudo', become_user='******', verbosity=1, check=False, diff=False) playbook = PlaybookExecutor(playbooks=[ansible_playbook_path], inventory=inventory, variable_manager=variable_manager, loader=loader, options=options, passwords=passwords) rc = playbook.run() if rc == 0: sys.stdout.write("\n Running Ansible Completed !! \n") else: sys.stdout.write("\n Running Ansible seems to have some error !! \n")
boto3 = pytest.importorskip("boto3") botocore = pytest.importorskip("botocore") Options = ( namedtuple( 'Options', [ 'connection', 'module_path', 'forks', 'become', 'become_method', 'become_user', 'remote_user', 'private_key_file', 'ssh_common_args', 'sftp_extra_args', 'scp_extra_args', 'ssh_extra_args', 'verbosity', 'check', 'diff' ] ) ) # initialize needed objects loader = DataLoader() variable_manager = VariableManager(loader=loader) options = ( Options( connection='local', module_path='cloud/amazon', forks=1, become=None, become_method=None, become_user=None, check=True, remote_user=None, private_key_file=None, ssh_common_args=None, sftp_extra_args=None, scp_extra_args=None, ssh_extra_args=None, verbosity=3, diff=False ) ) passwords = dict(vault_pass='') aws_region = 'us-west-2' # create inventory and pass to var manager
def run_ansible(inventory_filename, become=None, hosts="all", forks=10): """Run ansible with the provided inventory file and host group.""" # Since the API is constructed for CLI it expects certain options to # always be set in the context object. context.CLIARGS = ImmutableDict( connection="ssh", module_path=[], forks=forks, become=become, become_method="sudo", become_user=None, check=False, diff=False, verbosity=0, ) # Initialize required objects. # Takes care of finding and reading yaml, json and ini files. loader = DataLoader() passwords = dict(vault_pass="******") # nosec # Instantiate our ResultCallback for handling results as they come in. # Ansible expects this to be one of its main display outlets. results_callback = ResultCallback() # Create inventory, use path to host config file as source or # hosts in a comma separated string. logging.debug("Reading inventory from: %s", inventory_filename) inventory = InventoryManager(loader=loader, sources=inventory_filename) # Variable manager takes care of merging all the different sources to # give you a unified view of variables available in each context. variable_manager = VariableManager(loader=loader, inventory=inventory) # Create data structure that represents our play, including tasks, # this is basically what our YAML loader does internally. play_source = dict( name="Ansible Play", hosts=hosts, gather_facts="yes", tasks=[ dict(action=dict(module="stat", get_checksum=False, path=LAST_SCAN_LOG_FILENAME)), dict(action=dict(module="stat", get_checksum=False, path=LAST_DETECTION_FILENAME)), dict(action=dict( module="stat", get_checksum=False, path=CLAMAV_DB_FILENAME)), ], ) # Create play object, playbook objects use .load instead of init or new methods, # this will also automatically create the task objects from the # info provided in play_source. play = Play().load(play_source, variable_manager=variable_manager, loader=loader) # Run it - instantiate task queue manager, which takes care of forking # and setting up all objects to iterate over host list and tasks. tqm = None try: tqm = TaskQueueManager( inventory=inventory, variable_manager=variable_manager, loader=loader, passwords=passwords, stdout_callback=results_callback, # Use our custom callback. ) logging.debug("Starting task queue manager with forks=%d.", forks) tqm.run(play) finally: # We always need to cleanup child procs and # the structures we use to communicate with them. if tqm is not None: logging.debug("Cleaning up task queue manager.") tqm.cleanup() # Remove ansible temporary directory logging.debug("Cleaning up temporary file in %s", ANSIBLE_CONST.DEFAULT_LOCAL_TMP) shutil.rmtree(ANSIBLE_CONST.DEFAULT_LOCAL_TMP, True) return results_callback.results
class TestIncludeRole(unittest.TestCase): def setUp(self): self.loader = DictDataLoader({ '/etc/ansible/roles/l1/tasks/main.yml': """ - shell: echo 'hello world from l1' - include_role: name=l2 """, '/etc/ansible/roles/l1/tasks/alt.yml': """ - shell: echo 'hello world from l1 alt' - include_role: name=l2 tasks_from=alt defaults_from=alt """, '/etc/ansible/roles/l1/defaults/main.yml': """ test_variable: l1-main l1_variable: l1-main """, '/etc/ansible/roles/l1/defaults/alt.yml': """ test_variable: l1-alt l1_variable: l1-alt """, '/etc/ansible/roles/l2/tasks/main.yml': """ - shell: echo 'hello world from l2' - include_role: name=l3 """, '/etc/ansible/roles/l2/tasks/alt.yml': """ - shell: echo 'hello world from l2 alt' - include_role: name=l3 tasks_from=alt defaults_from=alt """, '/etc/ansible/roles/l2/defaults/main.yml': """ test_variable: l2-main l2_variable: l2-main """, '/etc/ansible/roles/l2/defaults/alt.yml': """ test_variable: l2-alt l2_variable: l2-alt """, '/etc/ansible/roles/l3/tasks/main.yml': """ - shell: echo 'hello world from l3' """, '/etc/ansible/roles/l3/tasks/alt.yml': """ - shell: echo 'hello world from l3 alt' """, '/etc/ansible/roles/l3/defaults/main.yml': """ test_variable: l3-main l3_variable: l3-main """, '/etc/ansible/roles/l3/defaults/alt.yml': """ test_variable: l3-alt l3_variable: l3-alt """ }) self.var_manager = VariableManager(loader=self.loader) def tearDown(self): pass def get_tasks_vars(self, play, tasks): for task in flatten_tasks(tasks): role = task._role if not role: continue yield (role.get_name(), self.var_manager.get_vars(play=play, task=task)) @patch('ansible.playbook.role.definition.unfrackpath', mock_unfrackpath_noop) def test_simple(self): """Test one-level include with default tasks and variables""" play = Play.load(dict( name="test play", hosts=['foo'], gather_facts=False, tasks=[ {'include_role': 'name=l3'} ] ), loader=self.loader, variable_manager=self.var_manager) tasks = play.compile() for role, task_vars in self.get_tasks_vars(play, tasks): self.assertEqual(task_vars.get('l3_variable'), 'l3-main') self.assertEqual(task_vars.get('test_variable'), 'l3-main') @patch('ansible.playbook.role.definition.unfrackpath', mock_unfrackpath_noop) def test_simple_alt_files(self): """Test one-level include with alternative tasks and variables""" play = Play.load(dict( name="test play", hosts=['foo'], gather_facts=False, tasks=[{'include_role': 'name=l3 tasks_from=alt defaults_from=alt'}]), loader=self.loader, variable_manager=self.var_manager) tasks = play.compile() for role, task_vars in self.get_tasks_vars(play, tasks): self.assertEqual(task_vars.get('l3_variable'), 'l3-alt') self.assertEqual(task_vars.get('test_variable'), 'l3-alt') @patch('ansible.playbook.role.definition.unfrackpath', mock_unfrackpath_noop) def test_nested(self): """ Test nested includes with default tasks and variables. Variables from outer roles should be inherited, but overridden in inner roles. """ play = Play.load(dict( name="test play", hosts=['foo'], gather_facts=False, tasks=[ {'include_role': 'name=l1'} ] ), loader=self.loader, variable_manager=self.var_manager) tasks = play.compile() for role, task_vars in self.get_tasks_vars(play, tasks): # Outer-most role must not have variables from inner roles yet if role == 'l1': self.assertEqual(task_vars.get('l1_variable'), 'l1-main') self.assertEqual(task_vars.get('l2_variable'), None) self.assertEqual(task_vars.get('l3_variable'), None) self.assertEqual(task_vars.get('test_variable'), 'l1-main') # Middle role must have variables from outer role, but not inner elif role == 'l2': self.assertEqual(task_vars.get('l1_variable'), 'l1-main') self.assertEqual(task_vars.get('l2_variable'), 'l2-main') self.assertEqual(task_vars.get('l3_variable'), None) self.assertEqual(task_vars.get('test_variable'), 'l2-main') # Inner role must have variables from both outer roles elif role == 'l3': self.assertEqual(task_vars.get('l1_variable'), 'l1-main') self.assertEqual(task_vars.get('l2_variable'), 'l2-main') self.assertEqual(task_vars.get('l3_variable'), 'l3-main') self.assertEqual(task_vars.get('test_variable'), 'l3-main') @patch('ansible.playbook.role.definition.unfrackpath', mock_unfrackpath_noop) def test_nested_alt_files(self): """ Test nested includes with alternative tasks and variables. Variables from outer roles should be inherited, but overridden in inner roles. """ play = Play.load(dict( name="test play", hosts=['foo'], gather_facts=False, tasks=[ {'include_role': 'name=l1 tasks_from=alt defaults_from=alt'} ] ), loader=self.loader, variable_manager=self.var_manager) tasks = play.compile() for role, task_vars in self.get_tasks_vars(play, tasks): # Outer-most role must not have variables from inner roles yet if role == 'l1': self.assertEqual(task_vars.get('l1_variable'), 'l1-alt') self.assertEqual(task_vars.get('l2_variable'), None) self.assertEqual(task_vars.get('l3_variable'), None) self.assertEqual(task_vars.get('test_variable'), 'l1-alt') # Middle role must have variables from outer role, but not inner elif role == 'l2': self.assertEqual(task_vars.get('l1_variable'), 'l1-alt') self.assertEqual(task_vars.get('l2_variable'), 'l2-alt') self.assertEqual(task_vars.get('l3_variable'), None) self.assertEqual(task_vars.get('test_variable'), 'l2-alt') # Inner role must have variables from both outer roles elif role == 'l3': self.assertEqual(task_vars.get('l1_variable'), 'l1-alt') self.assertEqual(task_vars.get('l2_variable'), 'l2-alt') self.assertEqual(task_vars.get('l3_variable'), 'l3-alt') self.assertEqual(task_vars.get('test_variable'), 'l3-alt')
def execute_playbook(play_book, host_list=[]): host_list = host_list # since the API is constructed for CLI it expects certain options to always be set in the context object context.CLIARGS = ImmutableDict(connection='smart', module_path=None, become=None, become_method=None, become_user=None, check=False, forks=4) sources = ','.join(host_list) if len(host_list) == 1: sources += ',' # initialize needed objects loader = DataLoader( ) # Takes care of finding and reading yaml, json and ini files passwords = dict(vault_pass='******') # Instantiate our ResultsCollectorJSONCallback for handling results as they come in. Ansible expects this to be one of its main display outlets results_callback = ResultsCollectorJSONCallback() inventory = InventoryManager(loader=loader, sources=sources) variable_manager = VariableManager(loader=loader, inventory=inventory) tqm = TaskQueueManager( inventory=inventory, variable_manager=variable_manager, loader=loader, passwords=passwords, stdout_callback= results_callback, # Use our custom callback instead of the ``default`` callback plugin, which prints to stdout ) pbex = PlaybookExecutor(playbooks=[play_book], inventory=inventory, variable_manager=variable_manager, loader=loader, passwords=passwords) playbook = Playbook.load(pbex._playbooks[0], variable_manager=variable_manager, loader=loader) play = playbook.get_plays()[0] # Actually run it try: result = tqm.run( play ) # most interesting data for a play is actually sent to the callback's methods finally: # we always need to cleanup child procs and the structures we use to communicate with them tqm.cleanup() if loader: loader.cleanup_all_tmp_files() # Remove ansible tmpdir shutil.rmtree(C.DEFAULT_LOCAL_TMP, True) results_raw = {'success': {}, 'failed': {}, 'unreachable': {}} for host, result in results_callback.host_ok.items(): results_raw['success'][host] = result._result for host, result in results_callback.host_failed.items(): results_raw['failed'][host] = result._result for host, result in results_callback.host_unreachable.items(): results_raw['unreachable'][host] = result._result return results_raw
def test_variable_manager_precedence(self): #FIXME: this needs to be redone as dataloader is not the automatic source of data anymore return ''' Tests complex variations and combinations of get_vars() with different objects to modify the context under which variables are merged. ''' #FIXME: BCS makethiswork #return True mock_inventory = MagicMock() inventory1_filedata = """ [group2:children] group1 [group1] host1 host_var=host_var_from_inventory_host1 [group1:vars] group_var = group_var_from_inventory_group1 [group2:vars] group_var = group_var_from_inventory_group2 """ fake_loader = DictDataLoader({ # inventory1 '/etc/ansible/inventory1': inventory1_filedata, # role defaults_only1 '/etc/ansible/roles/defaults_only1/defaults/main.yml': """ default_var: "default_var_from_defaults_only1" host_var: "host_var_from_defaults_only1" group_var: "group_var_from_defaults_only1" group_var_all: "group_var_all_from_defaults_only1" extra_var: "extra_var_from_defaults_only1" """, '/etc/ansible/roles/defaults_only1/tasks/main.yml': """ - debug: msg="here i am" """, # role defaults_only2 '/etc/ansible/roles/defaults_only2/defaults/main.yml': """ default_var: "default_var_from_defaults_only2" host_var: "host_var_from_defaults_only2" group_var: "group_var_from_defaults_only2" group_var_all: "group_var_all_from_defaults_only2" extra_var: "extra_var_from_defaults_only2" """, }) inv1 = InventoryManager(loader=fake_loader, sources=['/etc/ansible/inventory1']) v = VariableManager(inventory=mock_inventory, loader=fake_loader) v._fact_cache = defaultdict(dict) play1 = Play.load(dict( hosts=['all'], roles=['defaults_only1', 'defaults_only2'], ), loader=fake_loader, variable_manager=v) # first we assert that the defaults as viewed as a whole are the merged results # of the defaults from each role, with the last role defined "winning" when # there is a variable naming conflict res = v.get_vars(play=play1) self.assertEqual(res['default_var'], 'default_var_from_defaults_only2') # next, we assert that when vars are viewed from the context of a task within a # role, that task will see its own role defaults before any other role's blocks = play1.compile() task = blocks[1].block[0] res = v.get_vars(play=play1, task=task) self.assertEqual(res['default_var'], 'default_var_from_defaults_only1') # next we assert the precendence of inventory variables v.set_inventory(inv1) h1 = inv1.get_host('host1') res = v.get_vars(play=play1, host=h1) self.assertEqual(res['group_var'], 'group_var_from_inventory_group1') self.assertEqual(res['host_var'], 'host_var_from_inventory_host1') # next we test with group_vars/ files loaded fake_loader.push( "/etc/ansible/group_vars/all", """ group_var_all: group_var_all_from_group_vars_all """) fake_loader.push( "/etc/ansible/group_vars/group1", """ group_var: group_var_from_group_vars_group1 """) fake_loader.push( "/etc/ansible/group_vars/group3", """ # this is a dummy, which should not be used anywhere group_var: group_var_from_group_vars_group3 """) fake_loader.push( "/etc/ansible/host_vars/host1", """ host_var: host_var_from_host_vars_host1 """) fake_loader.push( "group_vars/group1", """ playbook_group_var: playbook_group_var """) fake_loader.push( "host_vars/host1", """ playbook_host_var: playbook_host_var """) res = v.get_vars(play=play1, host=h1) #self.assertEqual(res['group_var'], 'group_var_from_group_vars_group1') #self.assertEqual(res['group_var_all'], 'group_var_all_from_group_vars_all') #self.assertEqual(res['playbook_group_var'], 'playbook_group_var') #self.assertEqual(res['host_var'], 'host_var_from_host_vars_host1') #self.assertEqual(res['playbook_host_var'], 'playbook_host_var') # add in the fact cache v._fact_cache['host1'] = dict( fact_cache_var="fact_cache_var_from_fact_cache") res = v.get_vars(play=play1, host=h1) self.assertEqual(res['fact_cache_var'], 'fact_cache_var_from_fact_cache')
#!/usr/bin/env python # coding=utf-8 from collections import namedtuple from ansible.executor import playbook_executor from ansible.inventory.manager import InventoryManager from ansible.parsing.dataloader import DataLoader from ansible.vars.manager import VariableManager loader = DataLoader() inventory = InventoryManager(loader=loader, sources='localhost,') extra_vars = {'name': 'create'} variable_manager = VariableManager(loader=loader, inventory=inventory) variable_manager.extra_vars = extra_vars Options = namedtuple('Options', ['connection', 'module_path', 'forks', 'become', 'become_method', 'become_user', 'check', 'diff', 'listhosts', 'listtasks', 'listtags', 'syntax']) options = Options(connection='local', module_path=['/to/mymodules'], forks=10, become=None, become_method=None, become_user=None, check=False, diff=False, listhosts=False, listtasks=False, listtags=False, syntax=False) passwords = dict(vault_pass='******') pb_executor = playbook_executor.PlaybookExecutor( playbooks=['test.yaml'], inventory=inventory,
class AnsibleAPI(object): """ This is a General object for parallel execute modules. """ def __init__(self, resource, *args, **kwargs): self.resource = resource self.inventory = None self.variable_manager = None self.loader = None self.options = None self.passwords = None self.callback = None self.__initializeData() self.results_raw = {} def __initializeData(self): """ 初始化ansible """ Options = namedtuple('Options', [ 'connection', 'module_path', 'forks', 'timeout', 'remote_user', 'ask_pass', 'private_key_file', 'ssh_common_args', 'ssh_extra_args', 'sftp_extra_args', 'scp_extra_args', 'become', 'become_method', 'become_user', 'ask_value_pass', 'verbosity', 'check', 'listhosts', 'listtasks', 'listtags', 'syntax' ]) # initialize needed objects self.variable_manager = VariableManager() self.loader = DataLoader() self.options = Options(connection='smart', module_path='/usr/local/bin/ansible', forks=100, timeout=10, remote_user='******', ask_pass=False, private_key_file=None, ssh_common_args=None, ssh_extra_args=None, sftp_extra_args=None, scp_extra_args=None, become=None, become_method=None, become_user='******', ask_value_pass=False, verbosity=None, check=False, listhosts=False, listtasks=False, listtags=False, syntax=False) self.passwords = dict(sshpass=None, becomepass=None) self.inventory = InventoryManager( loader=self.loader, sources=self.resource ) # MyInventory(self.resource, self.loader, self.variable_manager).inventory self.variable_manager.set_inventory(self.inventory) def run(self, host_list, module_name, module_args): """ run module from andible ad-hoc. module_name: ansible module_name module_args: ansible module args """ # create play with tasks play_source = dict( name="Ansible Play", hosts=host_list, gather_facts='no', tasks=[dict(action=dict(module=module_name, args=module_args))]) play = Play().load(play_source, variable_manager=self.variable_manager, loader=self.loader) # actually run it tqm = None self.callback = AnsibleTaskResultCallback() try: tqm = TaskQueueManager( inventory=self.inventory, variable_manager=self.variable_manager, loader=self.loader, options=self.options, passwords=self.passwords, ) tqm._stdout_callback = self.callback tqm.run(play) finally: if tqm is not None: tqm.cleanup() def run_playbook(self, host_list, role_name, role_uuid, temp_param): """ run ansible palybook """ try: self.callback = AnsibleTaskResultCallback() filenames = ['' + '/handlers/ansible/v1_0/sudoers.yml' ] # playbook的路径 template_file = '' # 模板文件的路径 if not os.path.exists(template_file): sys.exit() extra_vars = { } # 额外的参数 sudoers.yml以及模板中的参数,它对应ansible-playbook test.yml --extra-vars "host='aa' name='cc' " host_list_str = ','.join([item for item in host_list]) extra_vars['host_list'] = host_list_str extra_vars['username'] = role_name extra_vars['template_dir'] = template_file extra_vars['command_list'] = temp_param.get('cmdList') extra_vars['role_uuid'] = 'role-%s' % role_uuid self.variable_manager.extra_vars = extra_vars # actually run it executor = PlaybookExecutor( playbooks=filenames, inventory=self.inventory, variable_manager=self.variable_manager, loader=self.loader, options=self.options, passwords=self.passwords, ) executor._tqm._stdout_callback = self.callback executor.run() except Exception as e: logger.error("error:", e.message) def get_result(self): self.results_raw = {'success': {}, 'failed': {}, 'unreachable': {}} for host, result in self.callback.host_ok.items(): self.results_raw['success'][host] = result._result for host, result in self.callback.host_failed.items(): self.results_raw['failed'][host] = result._result.get( 'msg') or result._result for host, result in self.callback.host_unreachable.items(): self.results_raw['unreachable'][host] = result._result['msg'] return self.results_raw
def test_variable_manager_precedence(self): # FIXME: this needs to be redone as dataloader is not the automatic source of data anymore return # pylint: disable=unreachable ''' Tests complex variations and combinations of get_vars() with different objects to modify the context under which variables are merged. ''' # FIXME: BCS makethiswork # return True mock_inventory = MagicMock() inventory1_filedata = """ [group2:children] group1 [group1] host1 host_var=host_var_from_inventory_host1 [group1:vars] group_var = group_var_from_inventory_group1 [group2:vars] group_var = group_var_from_inventory_group2 """ fake_loader = DictDataLoader({ # inventory1 '/etc/ansible/inventory1': inventory1_filedata, # role defaults_only1 '/etc/ansible/roles/defaults_only1/defaults/main.yml': """ default_var: "default_var_from_defaults_only1" host_var: "host_var_from_defaults_only1" group_var: "group_var_from_defaults_only1" group_var_all: "group_var_all_from_defaults_only1" extra_var: "extra_var_from_defaults_only1" """, '/etc/ansible/roles/defaults_only1/tasks/main.yml': """ - debug: msg="here i am" """, # role defaults_only2 '/etc/ansible/roles/defaults_only2/defaults/main.yml': """ default_var: "default_var_from_defaults_only2" host_var: "host_var_from_defaults_only2" group_var: "group_var_from_defaults_only2" group_var_all: "group_var_all_from_defaults_only2" extra_var: "extra_var_from_defaults_only2" """, }) inv1 = InventoryManager(loader=fake_loader, sources=['/etc/ansible/inventory1']) v = VariableManager(inventory=mock_inventory, loader=fake_loader) v._fact_cache = defaultdict(dict) play1 = Play.load(dict( hosts=['all'], roles=['defaults_only1', 'defaults_only2'], ), loader=fake_loader, variable_manager=v) # first we assert that the defaults as viewed as a whole are the merged results # of the defaults from each role, with the last role defined "winning" when # there is a variable naming conflict res = v.get_vars(play=play1) self.assertEqual(res['default_var'], 'default_var_from_defaults_only2') # next, we assert that when vars are viewed from the context of a task within a # role, that task will see its own role defaults before any other role's blocks = play1.compile() task = blocks[1].block[0] res = v.get_vars(play=play1, task=task) self.assertEqual(res['default_var'], 'default_var_from_defaults_only1') # next we assert the precedence of inventory variables v.set_inventory(inv1) h1 = inv1.get_host('host1') res = v.get_vars(play=play1, host=h1) self.assertEqual(res['group_var'], 'group_var_from_inventory_group1') self.assertEqual(res['host_var'], 'host_var_from_inventory_host1') # next we test with group_vars/ files loaded fake_loader.push("/etc/ansible/group_vars/all", """ group_var_all: group_var_all_from_group_vars_all """) fake_loader.push("/etc/ansible/group_vars/group1", """ group_var: group_var_from_group_vars_group1 """) fake_loader.push("/etc/ansible/group_vars/group3", """ # this is a dummy, which should not be used anywhere group_var: group_var_from_group_vars_group3 """) fake_loader.push("/etc/ansible/host_vars/host1", """ host_var: host_var_from_host_vars_host1 """) fake_loader.push("group_vars/group1", """ playbook_group_var: playbook_group_var """) fake_loader.push("host_vars/host1", """ playbook_host_var: playbook_host_var """) res = v.get_vars(play=play1, host=h1) # self.assertEqual(res['group_var'], 'group_var_from_group_vars_group1') # self.assertEqual(res['group_var_all'], 'group_var_all_from_group_vars_all') # self.assertEqual(res['playbook_group_var'], 'playbook_group_var') # self.assertEqual(res['host_var'], 'host_var_from_host_vars_host1') # self.assertEqual(res['playbook_host_var'], 'playbook_host_var') # add in the fact cache v._fact_cache['host1'] = dict(fact_cache_var="fact_cache_var_from_fact_cache") res = v.get_vars(play=play1, host=h1) self.assertEqual(res['fact_cache_var'], 'fact_cache_var_from_fact_cache')
from ansible.parsing.dataloader import DataLoader from ansible.vars.manager import VariableManager from ansible.inventory.manager import InventoryManager from ansible.executor.playbook_executor import PlaybookExecutor from ansible.playbook import Playbook from ansible.playbook.task import Task from ansible.utils.unicode import to_text try: from __main__ import display except ImportError: from ansible.utils.display import Display display = Display() variable_manager = VariableManager() loader = DataLoader() #inventory = InventoryManager(loader=loader, variable_manager=variable_manager, sources='./sample_hosts') inventory = InventoryManager(loader=loader, sources='./sample_hosts') playbook_path = 'sample.yml' if not os.path.exists(playbook_path): print '[INFO] The playbook does not exist' sys.exit() Options = namedtuple('Options', ['listtags', 'listtasks', 'listhosts', 'syntax', 'connection','module_path', 'forks', 'remote_user', 'private_key_file', 'ssh_common_args', 'ssh_extra_args', 'sftp_extra_args', 'scp_extra_args', 'become', 'become_method', 'become_user', 'verbosity', 'check']) options = Options(listtags=False, listtasks=False, listhosts=False, syntax=False, connection='ssh', module_path=None, forks=100, remote_user='******', private_key_file=None, ssh_common_args=None, ssh_extra_args=None, sftp_extra_args=None, scp_extra_args=None, become=True, become_method=None, become_user='******', verbosity=None, check=False) variable_manager.extra_vars = {'hosts': 'mywebserver'} # This can accomodate various other command line arguments.`
def get_variable_manager(data_loader, inventory_manager): return VariableManager(data_loader, inventory_manager)
# initialize needed objects loader = DataLoader( ) # Takes care of finding and reading yaml, json and ini files passwords = dict(vault_pass='******') # Instantiate our ResultCallback for handling results as they come in. Ansible expects this to be one of its main display outlets results_callback = ResultCallback() # create inventory, use path to host config file as source or hosts in a comma separated string inventory = InventoryManager( loader=loader, sources= '/Users/macpro/Desktop/ansible-flow-tutorial/inventories/env1/inventory') # variable manager takes care of merging all the different sources to give you a unifed view of variables available in each context variable_manager = VariableManager(loader=loader, inventory=inventory) # create datastructure that represents our play, including tasks, this is basically what our YAML loader does internally. play_source = dict( name="Ansible Play", hosts='localhost, dpdk', gather_facts='no', tasks=[ dict(action=dict(module='shell', args='hostname'), register='shell_out'), # dict(action=dict(module='shell', args='ls'), register='shell_out'), # dict(action=dict(module='debug', args=dict(msg='{{shell_out.stdout}}'))) ]) # Create play object, playbook objects use .load instead of init or new methods,
def __init__( self, hosts=C.DEFAULT_HOST_LIST, module_name=C.DEFAULT_MODULE_NAME, # * command module_args=C.DEFAULT_MODULE_ARGS, # * 'cmd args' forks=C.DEFAULT_FORKS, # 5 timeout=C.DEFAULT_TIMEOUT, # SSH timeout = 10s pattern="all", # all remote_user=C.DEFAULT_REMOTE_USER, # root module_path=None, # dirs of custome modules connection_type="smart", become=None, become_method=None, become_user=None, check=False, passwords=None, extra_vars = None, private_key_file=None ): # storage & defaults self.pattern = pattern self.loader = DataLoader() self.module_name = module_name self.module_args = module_args self.check_module_args() self.gather_facts = 'no' self.resultcallback = ResultCallback() self.options = Options( connection=connection_type, timeout=timeout, module_path=module_path, forks=forks, become=become, become_method=become_method, become_user=become_user, check=check, remote_user=remote_user, extra_vars=extra_vars or [], private_key_file=private_key_file, diff=False ) self.inventory = MyInventory(host_list=hosts) self.variable_manager = VariableManager(self.loader, self.inventory) self.variable_manager.extra_vars = load_extra_vars(loader=self.loader, options=self.options) self.variable_manager.options_vars = load_options_vars(self.options, "") self.passwords = passwords or {} self.play_source = dict( name="Ansible Ad-hoc", hosts=self.pattern, gather_facts=self.gather_facts, tasks=[dict(action=dict( module=self.module_name, args=self.module_args))] ) self.play = Play().load( self.play_source, variable_manager=self.variable_manager, loader=self.loader) self.runner = TaskQueueManager( inventory=self.inventory, variable_manager=self.variable_manager, loader=self.loader, options=self.options, passwords=self.passwords, stdout_callback=self.resultcallback )
def execute_playbook(self, playbook_info): output = None try: loader = DataLoader() inventory = InventoryManager(loader=loader, sources=['localhost']) variable_manager = VariableManager(loader=loader, inventory=inventory) Options = namedtuple('Options', ['listtags', 'listtasks', 'listhosts', 'syntax', 'connection', 'module_path', 'forks', 'remote_user', 'private_key_file', 'ssh_common_args', 'ssh_extra_args', 'sftp_extra_args', 'scp_extra_args', 'become', 'become_method', 'become_user', 'verbosity', 'check', 'diff']) options = Options(listtags=False, listtasks=False, listhosts=False, syntax=False, connection='ssh', module_path=None, forks=100, remote_user=None, private_key_file=None, ssh_common_args=None, ssh_extra_args=None, sftp_extra_args=None, scp_extra_args=None, become=None, become_method=None, become_user=None, verbosity=None, check=False, diff=False) variable_manager.extra_vars = playbook_info['extra_vars'] pbex = PlaybookExecutor(playbooks=[playbook_info['uri']], inventory=inventory, variable_manager=variable_manager, loader=loader, options=options, passwords=None) ret_val = pbex.run() output = self.get_plugin_output(pbex) if ret_val != 0: msg = MsgBundle.getMessage(MsgBundle. PLAYBOOK_RETURN_WITH_ERROR) raise Exception(msg) if output is None or output.get('status') is None: msg = MsgBundle.getMessage(MsgBundle. PLAYBOOK_OUTPUT_MISSING) raise Exception(msg) if output.get('status').lower() == "failure": msg = MsgBundle.getMessage(MsgBundle. PLAYBOOK_STATUS_FAILED) raise Exception(msg) return output except Exception as exp: msg = MsgBundle.getMessage(MsgBundle.PLAYBOOK_EXECUTE_ERROR, playbook_uri=playbook_info['uri'], execution_id=playbook_info['extra_vars'] ['playbook_input']['job_execution_id'], exc_msg=repr(exp)) if exp.message: msg = msg + "\n" + exp.message JM_LOGGER.error(msg) # after handling exception, write an END # to stop listening to the file if created unique_pb_id = playbook_info['extra_vars'][ 'playbook_input']['unique_pb_id'] exec_id = playbook_info['extra_vars']['playbook_input'][ 'job_execution_id'] self._job_file_write.write_to_file( exec_id, unique_pb_id, JobFileWrite.PLAYBOOK_OUTPUT, json.dumps(output) ) with open("/tmp/"+exec_id, "a") as f: f.write(unique_pb_id + 'END' + PLAYBOOK_EOL_PATTERN) sys.exit(msg)
class PackageInstallerTask(): #: Name of the task. name = 'PackageInstallerTask' def __init__(self, host_dict): self.package = None self.connection_type = None self.connection_port = None self.connection_user = None self.ansible_password = None self.loader = self.get_loader() self.variable_manager = VariableManager(loader=self.loader) self.inventory = create_inventory(host_dict, self.variable_manager, self.loader, self.ansible_password) def get_loader(self, base_dir='../playbooks'): loader = DataLoader( ) # Takes care of finding and reading yml, json and ini files loader.set_basedir(base_dir) return loader def variable_manager_initialization(self, ip_address): for host in self.inventory._inventory.hosts.values(): for group in host.groups: if group.name == 'ubuntu': self.variable_manager.set_host_variable( host, 'ansible_connection', self.connection_type) self.variable_manager.set_host_variable( host, 'ansible_host', ip_address) self.variable_manager.set_host_variable( host, 'ansible_port', self.connection_port) self.variable_manager.set_host_variable( host, 'ansible_user', self.connection_user) self.variable_manager.set_host_variable( host, 'ansible_ssh_private_key_file', '/home/ansible/.ssh/id_rsa') self.variable_manager.set_host_variable( host, 'ansible_become_user', self.connection_user) self.variable_manager.set_host_variable( host, 'ansible_become', 'yes') self.variable_manager.set_host_variable( host, 'ansible_become_password', 'Mp3b27') if group.name == 'windows': self.variable_manager.set_host_variable( host, 'ansible_user', self.connection_user) self.variable_manager.set_host_variable( host, 'ansible_password', self.ansible_password) self.variable_manager.set_host_variable( host, 'ansible_port', self.connection_port) self.variable_manager.set_host_variable( host, 'ansible_username', self.connection_user) self.variable_manager.set_host_variable( host, 'ansible_connection', 'winrm') self.variable_manager.set_host_variable( host, 'ansible_winrm_transport', 'ntlm') self.variable_manager.set_host_variable( host, 'ansible_winrm_server_cert_validation', 'ignore') def _install(self, ip_address): # initialize needed objects # self.variable_manager_initialization(ip_address) host_list = [ip_address] hosts = ','.join(host_list) if len(host_list) == 1: hosts += ',' package = [self.package] # create data structure that represents our play # including tasks, this is basically what our YAML loader does internally. print(f'hosts : {ip_address} \n roles : {package}') play_source = { 'hosts': hosts, 'gather_facts': True, 'become': True, 'become_user': '******', 'become_method': 'sudo', 'roles': package } passwords = dict() results_callback = ResultsCollectorJSONCallback() tqm = TaskQueueManager( inventory=self.inventory, variable_manager=self.variable_manager, loader=self.loader, passwords=passwords, stdout_callback=results_callback, ) # Create play object, playbook objects use .load instead of init or new methods, # this will also automatically create the task objects from the info provided in play_source play = Play().load(play_source, variable_manager=self.variable_manager, loader=self.loader, vars={'ansible_become_pass': '******'}) # Actually run it try: result = tqm.run( play ) # most interesting data for a play is actually sent to the callback's methods finally: # we always need to cleanup child procs and the structures we use to communicate with them tqm.cleanup() if self.loader: self.loader.cleanup_all_tmp_files() shutil.rmtree(C.DEFAULT_LOCAL_TMP, True) print("UP ***********") for host, result in results_callback.host_ok.items(): print('{0} >>> {1}'.format(host, result._result)) print("FAILED *******") for host, result in results_callback.host_failed.items(): print('{0} >>> {1}'.format(host, result._result['msg'])) print("DOWN *********") for host, result in results_callback.host_unreachable.items(): print('{0} >>> {1}'.format(host, result._result['msg'])) def run(self, ip_address, package, connection_type, connection_user, connection_port, ansible_password): print(f'ip: {ip_address}, connection_user: {connection_user}') self.package = package self.connection_type = connection_type self.connection_port = connection_port self.connection_user = connection_user self.ansible_password = ansible_password self._install(ip_address)
def _run_ansible(self, args): """Actually build an run an ansible play and return the results""" zclass = args.pop('zbx_class') # The leadup to the TaskQueueManager() call below is # copy pasted from Ansible's example: # https://docs.ansible.com/ansible/developing_api.html#python-api-2-0 # pylint: disable=invalid-name Options = namedtuple('Options', ['connection', 'module_path', 'forks', 'become', 'become_method', 'become_user', 'check']) loader = DataLoader() options = Options(connection='local', module_path=None, forks=1, become=None, become_method=None, become_user=None, check=False) passwords = dict(vault_pass='******') results_callback = ResultsCallback() inventory = InventoryManager(loader=loader) variable_manager = VariableManager(loader=loader, inventory=inventory) play_source = dict(name="Ansible Play", hosts=self.pattern, gather_facts='no', tasks=[ dict(action=dict(module=zclass, args=args)), ] ) play = Play().load(play_source, variable_manager=variable_manager, loader=loader) tqm = None try: tqm = TaskQueueManager(inventory=inventory, variable_manager=variable_manager, loader=loader, options=options, passwords=passwords, stdout_callback=results_callback ) return_code = tqm.run(play) finally: if tqm is not None: tqm.cleanup() if return_code != 0: raise ResultsException("Ansible module run failed, no results given.") if results_callback.result.is_unreachable(): message = "Ansible module run failed: module output:\n%s" % \ json.dumps(results_callback.raw_result, indent=4) raise ResultsException(message) if results_callback.result.is_failed(): raise ResultsException(results_callback.raw_result) return results_callback.raw_result
from ansible.parsing.dataloader import DataLoader from ansible.inventory.manager import InventoryManager from ansible.vars.manager import VariableManager from pprint import pprint if __name__ == '__main__': inventory_file_name = './hosts.yml' data_loader = DataLoader() inventory = InventoryManager(loader=data_loader, sources=[inventory_file_name]) variable_manager = VariableManager(loader=data_loader, inventory=inventory) pprint(inventory.get_groups_dict()['all']) pprint(inventory.list_hosts()) pprint(variable_manager.get_vars()) data = inventory.get_groups_dict() pprint(data)