def test_vars_files_for_host(self): # host != None # vars in filename2 # no vars in filename3 # make a vars file fd, temp_path = mkstemp() f = open(temp_path, "wb") f.write("foo: bar\n") f.close() # build play attributes playbook = FakePlayBook() ds = { "hosts": "localhost", "vars_files": ["{{ temp_path }}"]} basedir = "." playbook.VARS_CACHE['localhost']['temp_path'] = temp_path # create play and do first run play = Play(playbook, ds, basedir) # the second run is started by calling update_vars_files play.update_vars_files(['localhost']) os.remove(temp_path) assert 'foo' in play.playbook.VARS_CACHE['localhost'], "vars_file vars were not loaded into vars_cache" assert play.playbook.VARS_CACHE['localhost']['foo'] == 'bar', "foo does not equal bar"
def _load_playbook_data(self, file_name, variable_manager): if os.path.isabs(file_name): self._basedir = os.path.dirname(file_name) else: self._basedir = os.path.normpath(os.path.join(self._basedir, os.path.dirname(file_name))) # set the loaders basedir self._loader.set_basedir(self._basedir) # also add the basedir to the list of module directories push_basedir(self._basedir) ds = self._loader.load_from_file(os.path.basename(file_name)) if not isinstance(ds, list): raise AnsibleParserError("playbooks must be a list of plays", obj=ds) # Parse the playbook entries. For plays, we simply parse them # using the Play() object, and includes are parsed using the # PlaybookInclude() object for entry in ds: if not isinstance(entry, dict): raise AnsibleParserError("playbook entries must be either a valid play or an include statement", obj=entry) if 'include' in entry: pb = PlaybookInclude.load(entry, basedir=self._basedir, variable_manager=variable_manager, loader=self._loader) self._entries.extend(pb._entries) else: entry_obj = Play.load(entry, variable_manager=variable_manager, loader=self._loader) self._entries.append(entry_obj)
def _load_playbook_data(self, file_name): # add the base directory of the file to the data loader, # so that it knows where to find relatively pathed files basedir = os.path.dirname(file_name) self._loader.set_basedir(basedir) # also add the basedir to the list of module directories push_basedir(basedir) ds = self._loader.load_from_file(file_name) if not isinstance(ds, list): raise AnsibleParserError("playbooks must be a list of plays", obj=ds) # Parse the playbook entries. For plays, we simply parse them # using the Play() object, and includes are parsed using the # PlaybookInclude() object for entry in ds: if not isinstance(entry, dict): raise AnsibleParserError("playbook entries must be either a valid play or an include statement", obj=entry) if 'include' in entry: entry_obj = PlaybookInclude.load(entry, loader=self._loader) else: entry_obj = Play.load(entry, loader=self._loader) self._entries.append(entry_obj)
def test_play_with_post_tasks(self): p = Play.load(dict( name="test play", hosts=['foo'], gather_facts=False, post_tasks=[dict(action='shell echo "hello world"')], ))
def test_play_with_user_conflict(self): p = Play.load(dict( name="test play", hosts=['foo'], user="******", gather_facts=False, )) self.assertEqual(p.remote_user, "testing")
def test_basic_play(self): p = Play.load(dict( name="test play", hosts=['foo'], gather_facts=False, connection='local', remote_user="******", become=True, become_user="******", ))
def test_vars_files_two_vars_different_scope(self): # # Use a play var and an inventory var to create the filename # # self.playbook.inventory.get_variables(host) # {'group_names': ['ungrouped'], 'inventory_hostname': 'localhost', # 'ansible_ssh_user': '******', 'inventory_hostname_short': 'localhost'} # make a temp dir temp_dir = mkdtemp() # make a temp file fd, temp_file = mkstemp(dir=temp_dir) f = open(temp_file, "wb") f.write("foo: bar\n") f.close() # build play attributes playbook = FakePlayBook() playbook.inventory.hosts['localhost'] = {'inventory_hostname': os.path.basename(temp_file)} ds = { "hosts": "localhost", "vars": { "temp_dir": os.path.dirname(temp_file)}, "vars_files": ["{{ temp_dir + '/' + inventory_hostname }}"]} basedir = "." # create play and do first run play = Play(playbook, ds, basedir) # do the host run play.update_vars_files(['localhost']) # cleanup shutil.rmtree(temp_dir) assert 'foo' not in play.vars, \ "mixed scope vars_file loaded into play vars" assert 'foo' in play.playbook.VARS_CACHE['localhost'], \ "differently scoped templated vars_files filename not loaded" assert play.playbook.VARS_CACHE['localhost']['foo'] == 'bar', \ "foo is not bar"
def _load_playbook_data(self, file_name, variable_manager, vars=None): if os.path.isabs(file_name): self._basedir = os.path.dirname(file_name) else: self._basedir = os.path.normpath(os.path.join(self._basedir, os.path.dirname(file_name))) # set the loaders basedir cur_basedir = self._loader.get_basedir() self._loader.set_basedir(self._basedir) self._file_name = file_name # dynamically load any plugins from the playbook directory for name, obj in get_all_plugin_loaders(): if obj.subdir: plugin_path = os.path.join(self._basedir, obj.subdir) if os.path.isdir(plugin_path): obj.add_directory(plugin_path) try: ds = self._loader.load_from_file(os.path.basename(file_name)) except UnicodeDecodeError as e: raise AnsibleParserError("Could not read playbook (%s) due to encoding issues: %s" % (file_name, to_native(e))) if not isinstance(ds, list): # restore the basedir in case this error is caught and handled self._loader.set_basedir(cur_basedir) raise AnsibleParserError("playbooks must be a list of plays", obj=ds) # Parse the playbook entries. For plays, we simply parse them # using the Play() object, and includes are parsed using the # PlaybookInclude() object for entry in ds: if not isinstance(entry, dict): # restore the basedir in case this error is caught and handled self._loader.set_basedir(cur_basedir) raise AnsibleParserError("playbook entries must be either a valid play or an include statement", obj=entry) if any(action in entry for action in ('import_playbook', 'include')): if 'include' in entry: display.deprecated("'include' for playbook includes. You should use 'import_playbook' instead", version="2.8") pb = PlaybookInclude.load(entry, basedir=self._basedir, variable_manager=variable_manager, loader=self._loader) if pb is not None: self._entries.extend(pb._entries) else: which = entry.get('import_playbook', entry.get('include', entry)) display.display("skipping playbook '%s' due to conditional test failure" % which, color=C.COLOR_SKIP) else: entry_obj = Play.load(entry, variable_manager=variable_manager, loader=self._loader, vars=vars) self._entries.append(entry_obj) # we're done, so restore the old basedir in the loader self._loader.set_basedir(cur_basedir)
def test_play_compile(self): p = Play.load(dict( name="test play", hosts=['foo'], gather_facts=False, tasks=[dict(action='shell echo "hello world"')], )) tasks = p.compile() self.assertEqual(len(tasks), 1) self.assertIsInstance(tasks[0], Task)
def test_variable_manager_role_vars_dependencies(self): ''' Tests vars from role dependencies with duplicate dependencies. ''' mock_inventory = MagicMock() fake_loader = DictDataLoader({ # role common-role '/etc/ansible/roles/common-role/tasks/main.yml': """ - debug: msg="{{role_var}}" """, # We do not need allow_duplicates: yes for this role # because eliminating duplicates is done by the execution # strategy, which we do not test here. # role role1 '/etc/ansible/roles/role1/vars/main.yml': """ role_var: "role_var_from_role1" """, '/etc/ansible/roles/role1/meta/main.yml': """ dependencies: - { role: common-role } """, # role role2 '/etc/ansible/roles/role2/vars/main.yml': """ role_var: "role_var_from_role2" """, '/etc/ansible/roles/role2/meta/main.yml': """ dependencies: - { role: common-role } """, }) v = VariableManager(loader=fake_loader, inventory=mock_inventory) v._fact_cache = defaultdict(dict) play1 = Play.load(dict( hosts=['all'], roles=['role1', 'role2'], ), loader=fake_loader, variable_manager=v) # The task defined by common-role exists twice because role1 # and role2 depend on common-role. Check that the tasks see # different values of role_var. blocks = play1.compile() task = blocks[1].block[0] res = v.get_vars(play=play1, task=task) self.assertEqual(res['role_var'], 'role_var_from_role1') task = blocks[2].block[0] res = v.get_vars(play=play1, task=task) self.assertEqual(res['role_var'], 'role_var_from_role2')
def test_play_compile(self): p = Play.load(dict( name="test play", hosts=['foo'], gather_facts=False, tasks=[dict(action='shell echo "hello world"')], )) blocks = p.compile() # with a single block, there will still be three # implicit meta flush_handler blocks inserted self.assertEqual(len(blocks), 4)
def test_play_with_roles(self): fake_loader = DictDataLoader({ '/etc/ansible/roles/foo/tasks.yml': """ - name: role task shell: echo "hello world" """, }) p = Play.load(dict( name="test play", hosts=['foo'], gather_facts=False, roles=['foo'], ), loader=fake_loader) tasks = p.compile()
def _load_playbook_data(self, file_name, variable_manager): if os.path.isabs(file_name): self._basedir = os.path.dirname(file_name) else: self._basedir = os.path.normpath(os.path.join(self._basedir, os.path.dirname(file_name))) # set the loaders basedir cur_basedir = self._loader.get_basedir() self._loader.set_basedir(self._basedir) self._file_name = file_name # dynamically load any plugins from the playbook directory for name, obj in get_all_plugin_loaders(): if obj.subdir: plugin_path = os.path.join(self._basedir, obj.subdir) if os.path.isdir(plugin_path): obj.add_directory(plugin_path) ds = self._loader.load_from_file(os.path.basename(file_name)) if not isinstance(ds, list): # restore the basedir in case this error is caught and handled self._loader.set_basedir(cur_basedir) raise AnsibleParserError("playbooks must be a list of plays", obj=ds) # Parse the playbook entries. For plays, we simply parse them # using the Play() object, and includes are parsed using the # PlaybookInclude() object for entry in ds: if not isinstance(entry, dict): # restore the basedir in case this error is caught and handled self._loader.set_basedir(cur_basedir) raise AnsibleParserError("playbook entries must be either a valid play or an include statement", obj=entry) if 'include' in entry: pb = PlaybookInclude.load(entry, basedir=self._basedir, variable_manager=variable_manager, loader=self._loader) if pb is not None: self._entries.extend(pb._entries) else: display.display("skipping playbook include '%s' due to conditional test failure" % entry.get('include', entry), color=C.COLOR_SKIP) else: entry_obj = Play.load(entry, variable_manager=variable_manager, loader=self._loader) self._entries.append(entry_obj) # we're done, so restore the old basedir in the loader self._loader.set_basedir(cur_basedir)
def test_play_with_roles(self): fake_loader = DictDataLoader({ '/etc/ansible/roles/foo/tasks.yml': """ - name: role task shell: echo "hello world" """, }) mock_var_manager = MagicMock() mock_var_manager.get_vars.return_value = dict() p = Play.load(dict( name="test play", hosts=['foo'], gather_facts=False, roles=['foo'], ), loader=fake_loader, variable_manager=mock_var_manager) blocks = p.compile()
def run_ansible(module_name, module_args, host_list, option_dict): # 初始化需要的对象 Options = namedtuple('Options', [ 'connection', 'module_path', 'forks', 'become', 'become_method', 'private_key_file', 'become_user', 'remote_user', 'check', 'diff' ]) #负责查找和读取yaml、json和ini文件 loader = DataLoader() options = Options(connection='ssh', module_path=None, forks=5, become=option_dict['become'], become_method='sudo', private_key_file="/root/.ssh/id_rsa", become_user='******', remote_user=option_dict['remote_user'], check=False, diff=False) passwords = dict(vault_pass='******') # 实例化ResultCallback来处理结果 callback = ResultsCollector() # 创建库存(inventory)并传递给VariableManager inventory = InventoryManager(loader=loader, sources=['/etc/ansible/hosts']) variable_manager = VariableManager(loader=loader, inventory=inventory) # 创建任务 host = ",".join(host_list) play_source = dict(name="Ansible Play", hosts=host, gather_facts='no', tasks=[ dict(action=dict(module=module_name, args=module_args), register='shell_out'), ]) play = Play().load(play_source, variable_manager=variable_manager, loader=loader) # 开始执行 tqm = None tqm = TaskQueueManager( inventory=inventory, variable_manager=variable_manager, loader=loader, options=options, passwords=passwords, stdout_callback=callback, ) result = tqm.run(play) result_raw = {'success': {}, 'failed': {}, 'unreachable': {}} for host, result in callback.host_ok.items(): result_raw['success'][host] = result._result['stdout_lines'] for host, result in callback.host_failed.items(): result_raw['failed'][host] = result._result['stderr_lines'] for host, result in callback.host_unreachable.items(): result_raw['unreachable'][host] = result._result["msg"] return json.dumps(result_raw, indent=4)
variable_manager.set_inventory(inventory) # create play with tasks """ play_source = dict( name = "Ansible Play THIS COULD BE YOUR AD!", hosts = '192.168.56.212', gather_facts = 'no', tasks = [ dict( #action=dict(module='debug', args=dict(msg='Hello Galaxy!')) action=dict(module='sysctl', args=dict(name='net.ipv4.ip_forward', value=1, state='present')) )] ) """ #play = Play().load(play_source, variable_manager=variable_manager, loader=loader) play = Play() play.name = "custom play" play.variable_manager=variable_manager play.loader=loader play.hosts = ['192.168.56.212'] play.gather_facts = 'no' play.tasks = Task().load("asdasd") # actually run it tqm = None try: tqm = TaskQueueManager( inventory=inventory, variable_manager=variable_manager, loader=loader, options=options,
def execute(self, *args, **kwargs): """ Puts args and kwargs in a way ansible can understand. Calls ansible and interprets the result. """ assert self.is_hooked_up, "the module should be hooked up to the api" # legacy key=value pairs shorthand approach if args: self.module_args = module_args = self.get_module_args(args, kwargs) else: self.module_args = module_args = kwargs loader = DataLoader() inventory_manager = SourcelessInventoryManager(loader=loader) hosts_with_ports = tuple(self.api.hosts_with_ports) for host, port in hosts_with_ports: inventory_manager._inventory.add_host(host, group='all', port=port) for key, value in self.api.options.extra_vars.items(): inventory_manager._inventory.set_variable('all', key, value) variable_manager = VariableManager( loader=loader, inventory=inventory_manager) play_source = { 'name': "Suitable Play", 'hosts': [h for h, p in hosts_with_ports], # *must* be a list 'gather_facts': 'no', 'tasks': [{ 'action': { 'module': self.module_name, 'args': module_args, }, 'environment': self.api.environment }] } play = Play.load( play_source, variable_manager=variable_manager, loader=loader, ) if self.api.strategy: play.strategy = self.api.strategy log.info(u'running {}'.format(u'- {module_name}: {module_args}'.format( module_name=self.module_name, module_args=module_args ))) start = datetime.utcnow() task_queue_manager = None callback = SilentCallbackModule() # ansible uses various levels of verbosity (from -v to -vvvvvv) # offering various amounts of debug information # # we keep it a bit simpler by activating all of it during debug, and # falling back to the default of 0 otherwise verbosity = self.api.options.verbosity == logging.DEBUG and 6 or 0 try: with ansible_verbosity(verbosity): task_queue_manager = TaskQueueManager( inventory=inventory_manager, variable_manager=variable_manager, loader=loader, options=self.api.options, passwords=getattr(self.api.options, 'passwords', {}), stdout_callback=callback ) task_queue_manager.run(play) finally: if task_queue_manager is not None: task_queue_manager.cleanup() log.debug(u'took {} to complete'.format(datetime.utcnow() - start)) return self.evaluate_results(callback)
{ u'shell': u'ls -lh', u'name': u'li' }, { u'shell': u'touch ~/{{app_name}}', u'name': u'zi' } ], u'hosts': u'localhost' } variable_manager.extra_vars = {'app_name': 'test'} play1 = Play().load(play_source, variable_manager=variable_manager, loader=loader) play2 = Play().load(play_source, variable_manager=variable_manager, loader=loader) tqm = None try: tqm = TaskQueueManager( inventory=inventory, variable_manager=variable_manager, loader=loader, options=options, passwords=passwords, stdout_callback= results_callback, # Use our custom callback instead of the ``default`` callback plugin
def main(): host_list = ['localhost', 'www.example.com', 'www.google.com'] Options = namedtuple('Options', [ 'connection', 'module_path', 'forks', 'remote_user', 'private_key_file', 'ssh_common_args', 'ssh_extra_args', 'sftp_extra_args', 'scp_extra_args', 'become', 'become_method', 'become_user', 'verbosity', 'check' ]) # initialize needed objects variable_manager = VariableManager() loader = DataLoader() options = Options(connection='smart', module_path='/usr/share/ansible', forks=100, remote_user=None, private_key_file=None, ssh_common_args=None, ssh_extra_args=None, sftp_extra_args=None, scp_extra_args=None, become=None, become_method=None, become_user=None, verbosity=None, check=False) passwords = dict() # create inventory and pass to var manager inventory = Inventory(loader=loader, variable_manager=variable_manager, host_list=host_list) variable_manager.set_inventory(inventory) # create play with tasks play_source = dict( name="Ansible Play", hosts=host_list, gather_facts='no', tasks=[ dict( action=dict(module='command', args=dict( cmd='/usr/bin/uptime'))) ]) play = Play().load(play_source, variable_manager=variable_manager, loader=loader) # actually run it tqm = None callback = ResultsCollector() try: tqm = TaskQueueManager( inventory=inventory, variable_manager=variable_manager, loader=loader, options=options, passwords=passwords, ) tqm._stdout_callback = callback result = tqm.run(play) finally: if tqm is not None: tqm.cleanup() print "UP ***********" for host, result in callback.host_ok.items(): print '{} >>> {}'.format(host, result._result['stdout']) print "FAILED *******" for host, result in callback.host_failed.items(): print '{} >>> {}'.format(host, result._result['msg']) print "DOWN *********" for host, result in callback.host_unreachable.items(): print '{} >>> {}'.format(host, result._result['msg'])
def recursive_dump(o: Any) -> Any: if isinstance(o, Base): return o.__class__.__name__, recursive_dump(o.dump_attrs()) if isinstance(o, list): return [recursive_dump(oi) for oi in o] if isinstance(o, dict): return {k: recursive_dump(v) for k, v in o.items()} return o role_dir = Path(sys.argv[1]) role_name = role_dir.name role_base_dir = role_dir.parent dummy_play = Play() role_def = RoleInclude.load( role_name, dummy_play, current_role_path=str(role_base_dir), variable_manager=VariableManager(loader=DataLoader())) print(role_def.get_name()) print(role_def.get_role_path()) def dump_block(b: Block) -> Dict[str, Any]: name = b.__class__.__name__ + ':' + str(id(b)) if b._parent is not None: name += ' (parent: ' + b._parent.__class__.__name__ if isinstance(b._parent, TaskInclude): name += str(b._parent.args)
def run(self): ''' use Runner lib to do SSH things ''' super(AdHocCLI, self).run() # only thing left should be host pattern pattern = self.args[0] # ignore connection password cause we are local if self.options.connection == "local": self.options.ask_pass = False sshpass = None becomepass = None vault_pass = None self.normalize_become_options() (sshpass, becomepass) = self.ask_passwords() passwords = {'conn_pass': sshpass, 'become_pass': becomepass} if self.options.vault_password_file: # read vault_pass from a file vault_pass = read_vault_file(self.options.vault_password_file) elif self.options.ask_vault_pass: vault_pass = self.ask_vault_passwords(ask_vault_pass=True, ask_new_vault_pass=False, confirm_new=False)[0] loader = DataLoader(vault_password=vault_pass) variable_manager = VariableManager() inventory = Inventory(loader=loader, variable_manager=variable_manager, host_list=self.options.inventory) variable_manager.set_inventory(inventory) hosts = inventory.list_hosts(pattern) if len(hosts) == 0: self.display.warning( "provided hosts list is empty, only localhost is available") if self.options.listhosts: for host in hosts: self.display.display(' %s' % host) return 0 if self.options.module_name in C.MODULE_REQUIRE_ARGS and not self.options.module_args: raise AnsibleOptionsError("No argument passed to %s module" % self.options.module_name) #TODO: implement async support #if self.options.seconds: # callbacks.display("background launch...\n\n", color='cyan') # results, poller = runner.run_async(self.options.seconds) # results = self.poll_while_needed(poller) #else: # results = runner.run() # create a pseudo-play to execute the specified module via a single task play_ds = self._play_ds(pattern) play = Play().load(play_ds, variable_manager=variable_manager, loader=loader) # now create a task queue manager to execute the play self._tqm = None try: self._tqm = TaskQueueManager( inventory=inventory, variable_manager=variable_manager, loader=loader, display=self.display, options=self.options, passwords=passwords, stdout_callback='minimal', ) result = self._tqm.run(play) finally: if self._tqm: self._tqm.cleanup() return result
def Order_Run(host, module_name, module_args): class ResultCallback(CallbackBase): def __init__(self, *args, **kwargs): self.host_ok = {} self.host_unreachable = {} self.host_failed = {} def v2_runner_on_unreachable(self, result): self.host_unreachable[result._host.get_name()] = result def v2_runner_on_ok(self, result, *args, **kwargs): self.host_ok[result._host.get_name()] = result def v2_runner_on_failed(self, result, *args, **kwargs): self.host_failed[result._host.get_name()] = result variable_manager = VariableManager() loader = DataLoader() invertory = Inventory(loader=loader, variable_manager=variable_manager, host_list=host) Options = namedtuple('Options', [ 'listtags', 'listtasks', 'listhosts', 'syntax', 'connection', 'module_path', 'forks', 'remote_user', 'private_key_file', 'ssh_common_args', 'ssh_extra_args', 'sftp_extra_args', 'scp_extra_args', 'become', 'become_method', 'become_user', 'verbosity', 'check' ]) options = Options(listtags=False, listtasks=False, listhosts=False, syntax=False, connection='ssh', module_path=None, forks=100, remote_user='******', private_key_file=None, ssh_common_args=None, ssh_extra_args=None, sftp_extra_args=None, scp_extra_args=None, become=True, become_method=None, become_user='******', verbosity=None, check=False) passwords = {} play_source = dict( name='Ansible Play', hosts=host, gather_facts='no', tasks=[dict(action=dict(module=module_name, args=module_args))]) play = Play().load(play_source, variable_manager=variable_manager, loader=loader) tqm = None callback = ResultCallback() try: tqm = TaskQueueManager( inventory=invertory, variable_manager=variable_manager, loader=loader, options=options, passwords=passwords, stdout_callback=callback, run_additional_callbacks=C.DEFAULT_LOAD_CALLBACK_PLUGINS, run_tree=False, ) tqm.run(play) finally: if tqm is not None: tqm.cleanup() results_raw = {} results_raw['success'] = {} results_raw['failed'] = {} results_raw['unreachable'] = {} for host, result in callback.host_ok.items(): if result._result.get('stdout_lines'): if result._result['stdout_lines']: results_raw['success'][host] = json.dumps( result._result['stdout_lines']) else: result._result['stdout_lines'] = 'success'.split( ) # 成功返回success字符串 results_raw['success'][host] = json.dumps( result._result['stdout_lines']) for host, result in callback.host_failed.items(): if result._result.get('stderr'): results_raw['failed'][host] = result._result['stderr'] elif result._result.get('msg'): results_raw['failed'][host] = result._result[ 'msg'] # copy模块error信息返回 for host, result in callback.host_unreachable.items(): results_raw['unreachable'][host] = result._result['msg'] return results_raw
def _load_playbook_data(self, file_name, variable_manager, vars=None): if os.path.isabs(file_name): self._basedir = os.path.dirname(file_name) else: self._basedir = os.path.normpath( os.path.join(self._basedir, os.path.dirname(file_name))) # set the loaders basedir cur_basedir = self._loader.get_basedir() self._loader.set_basedir(self._basedir) self._file_name = file_name try: ds = self._loader.load_from_file(os.path.basename(file_name)) except UnicodeDecodeError as e: raise AnsibleParserError( "Could not read playbook (%s) due to encoding issues: %s" % (file_name, to_native(e))) # check for errors and restore the basedir in case this error is caught and handled if not ds: self._loader.set_basedir(cur_basedir) raise AnsibleParserError("Empty playbook, nothing to do", obj=ds) elif not isinstance(ds, list): self._loader.set_basedir(cur_basedir) raise AnsibleParserError( "A playbook must be a list of plays, got a %s instead" % type(ds), obj=ds) # Parse the playbook entries. For plays, we simply parse them # using the Play() object, and includes are parsed using the # PlaybookInclude() object for entry in ds: if not isinstance(entry, dict): # restore the basedir in case this error is caught and handled self._loader.set_basedir(cur_basedir) raise AnsibleParserError( "playbook entries must be either a valid play or an include statement", obj=entry) if any(action in entry for action in ('import_playbook', 'include')): if 'include' in entry: display.deprecated( "'include' for playbook includes. You should use 'import_playbook' instead", version="2.12") pb = PlaybookInclude.load(entry, basedir=self._basedir, variable_manager=variable_manager, loader=self._loader) if pb is not None: self._entries.extend(pb._entries) else: which = entry.get('import_playbook', entry.get('include', entry)) display.display( "skipping playbook '%s' due to conditional test failure" % which, color=C.COLOR_SKIP) else: entry_obj = Play.load(entry, variable_manager=variable_manager, loader=self._loader, vars=vars) self._entries.append(entry_obj) # we're done, so restore the old basedir in the loader self._loader.set_basedir(cur_basedir)
#create play with tasks play_src = dict( name="router list", hosts="192.168.10.238", gather_facts="no", become="true", tasks=[ dict(name="show facts about all routers", action=dict(module="os_router_facts", auth=dict(auth_url="http://192.168.10.238:5000/v2.0", username="******", password="******",project_name="<enter>"))), dict(name="Show openstack routers", action=dict(module="debug", var="openstack_routers")) ] ) play = Play().load(play_src, variable_manager=variable_manager, loader=loader) #actually run it tqm = None try: tqm = TaskQueueManager( inventory=inventory, variable_manager=variable_manager, loader=loader, options=options, passwords=passwords, stdout_callback="default", ) result = tqm.run(play) finally: if tqm is not None:
def test_empty_play(self): p = Play.load(dict()) self.assertEqual(str(p), '')
def run_ansible(inventory_filename, become=None, hosts="all", forks=10): """Run ansible with the provided inventory file and host group.""" # Since the API is constructed for CLI it expects certain options to # always be set in the context object. context.CLIARGS = ImmutableDict( connection="ssh", module_path=[], forks=forks, become=become, become_method="sudo", become_user=None, check=False, diff=False, verbosity=0, ) # Initialize required objects. # Takes care of finding and reading yaml, json and ini files. loader = DataLoader() passwords = dict(vault_pass="******") # nosec # Instantiate our ResultCallback for handling results as they come in. # Ansible expects this to be one of its main display outlets. results_callback = ResultCallback() # Create inventory, use path to host config file as source or # hosts in a comma separated string. logging.debug("Reading inventory from: %s", inventory_filename) inventory = InventoryManager(loader=loader, sources=inventory_filename) # Variable manager takes care of merging all the different sources to # give you a unified view of variables available in each context. variable_manager = VariableManager(loader=loader, inventory=inventory) # Create data structure that represents our play, including tasks, # this is basically what our YAML loader does internally. play_source = dict( name="Ansible Play", hosts=hosts, gather_facts="yes", tasks=[ dict(action=dict(module="stat", get_checksum=False, path=LAST_SCAN_LOG_FILENAME)), dict(action=dict(module="stat", get_checksum=False, path=LAST_DETECTION_FILENAME)), dict(action=dict( module="stat", get_checksum=False, path=CLAMAV_DB_FILENAME)), ], ) # Create play object, playbook objects use .load instead of init or new methods, # this will also automatically create the task objects from the # info provided in play_source. play = Play().load(play_source, variable_manager=variable_manager, loader=loader) # Run it - instantiate task queue manager, which takes care of forking # and setting up all objects to iterate over host list and tasks. tqm = None try: tqm = TaskQueueManager( inventory=inventory, variable_manager=variable_manager, loader=loader, passwords=passwords, stdout_callback=results_callback, # Use our custom callback. ) logging.debug("Starting task queue manager with forks=%d.", forks) tqm.run(play) finally: # We always need to cleanup child procs and # the structures we use to communicate with them. if tqm is not None: logging.debug("Cleaning up task queue manager.") tqm.cleanup() # Remove ansible temporary directory logging.debug("Cleaning up temporary file in %s", ANSIBLE_CONST.DEFAULT_LOCAL_TMP) shutil.rmtree(ANSIBLE_CONST.DEFAULT_LOCAL_TMP, True) return results_callback.results
def start(self, json_data): configMysql = ConfigParser.ConfigParser() configMysql.read(config.get('DEFAULT', 'DB_CONNECTION_FILE')) db = mysql.connector.connect(host=configMysql.get('INVENTORY', 'host'), user=configMysql.get('INVENTORY', 'user'), passwd=configMysql.get( 'INVENTORY', 'pass'), db=configMysql.get('INVENTORY', 'dbname'), charset='utf8mb4') cursor = db.cursor() get_hosts = "select name from hosts" get_ips = "select ip from hosts" cursor.execute(get_hosts) hosts = [item[0] for item in cursor.fetchall()] cursor.execute(get_ips) ips = [item[0] for item in cursor.fetchall()] data = {} data["hosts"] = hosts + ips # create play with tasks play_source = json.loads(json_data) Options = namedtuple('Options', [ 'module_path', 'forks', 'become', 'become_method', 'become_user', 'check' ]) # initialize needed objects variable_manager = VariableManager() loader = DataLoader() options = Options(module_path='/path/to/mymodules', forks=100, become=None, become_method=None, become_user=None, check=False) passwords = dict(vault_pass='******') # Instantiate our ResultCallback for handling results as they come in results_callback = ResultCallback() # create inventory and pass to var manager inventory = Inventory(loader=loader, variable_manager=variable_manager, host_list=data["hosts"]) play = Play().load(play_source, variable_manager=variable_manager, loader=loader) # actually run it tqm = None try: tqm = TaskQueueManager( inventory=inventory, options=options, passwords=passwords, loader=loader, variable_manager=variable_manager, stdout_callback=results_callback, ) tqm.run(play) finally: if tqm is not None: tqm.cleanup() return results_callback.data
def exec_playbook(host_list, username, password, group_name): loader = DataLoader() passwords = dict() inventory_manager = InventoryManager(loader=loader, sources=','.join(host_list)) variable_manager = VariableManager(loader=loader, inventory=inventory_manager) variable_manager.extra_vars = { 'ansible_ssh_user': username, 'ansible_ssh_pass': password } play_source = dict( name='Greengrass Group Playbook', hosts='all', remote_user=username, gather_facts='yes', tasks=[ dict(name='Set Playbook Facts', action=dict(module='set_fact', args=dict(group_name=group_name))), dict(name='Stop Greengrass Core', become=True, action=dict(module='systemd', args=dict(name='greengrass', state='stopped'))), dict(name='Copy Greengrass Config', become=True, action=dict(module='copy', args=dict(src='.gg/config/', dest='/greengrass/config/'))), dict(name='Copy Greengrass Root Certs', become=True, action=dict(module='copy', args=dict(src='{{ item }}', dest='/greengrass/certs/')), with_fileglob=['.gg/certs/root.*']), dict(name='Copy Greengrass Core Certs', become=True, action=dict(module='copy', args=dict(src='{{ item }}', dest='/greengrass/certs')), with_fileglob=['.gg/certs/{{ groups_name }}*']), dict(name='Start Greengrass Core', become=True, action=dict(module='greengrass', args=dict(name='greengrass', enabled=True, state='started'))) ]) play = Play().load(play_source, variable_manager=variable_manager, loader=loader) tqm = None try: tqm = TaskQueueManager( inventory=inventory_manager, variable_manager=variable_manager, loader=loader, options=options, passwords=passwords, ) result = tqm.run(play) finally: if tqm is not None: tqm.cleanup() shutil.rmtree(C.DEFAULT_LOCAL_TMP, True)
def chaosansible_run( host_list: list = ("localhost"), configuration: Configuration = None, facts: bool = False, become: bool = False, run_once: bool = False, ansible: dict = {}, num_target: str = "all", secrets: Secrets = None, ): """ Run a task through ansible and eventually gather facts from host """ # Check for correct inputs if ansible: if ansible.get("module") is None: raise InvalidActivity("No ansible module defined") if ansible.get("args") is None: raise InvalidActivity("No ansible module args defined") configuration = configuration or {} # Ansible configuration elements module_path = configuration.get("ansible_module_path") become_user = configuration.get("ansible_become_user") ssh_key_path = configuration.get("ansible_ssh_private_key") ansible_user = configuration.get("ansible_user") become_ask_pass = configuration.get("become_ask_pass") ssh_extra_args = configuration.get("ansible_ssh_extra_args") context.CLIARGS = ImmutableDict( connection="smart", verbosity=0, module_path=module_path, forks=10, become=become, become_method="sudo", become_user=become_user, check=False, diff=False, private_key_file=ssh_key_path, remote_user=ansible_user, ssh_extra_args=ssh_extra_args, ) # Update host_list regarding the number of desired target. # Need to generate a new host-list because after being update # and will be used later if num_target != "all": new_host_list = random_host(host_list, int(num_target)) else: new_host_list = host_list[:] # Create an inventory sources = ",".join(new_host_list) if len(new_host_list) == 1: sources += "," loader = DataLoader() inventory = InventoryManager(loader=loader, sources=sources) # Instantiate callback for storing results results_callback = ResultsCollectorJSONCallback() variable_manager = VariableManager(loader=loader, inventory=inventory) if become_ask_pass: passwords = dict(become_pass=become_ask_pass) else: passwords = None # Ansible taskmanager tqm = TaskQueueManager( inventory=inventory, variable_manager=variable_manager, loader=loader, passwords=passwords, stdout_callback=results_callback, run_additional_callbacks=False, ) # Ansible playbook play_source = dict( name="Ansible Play", hosts=new_host_list, gather_facts=facts, tasks=[ dict( name="facts", action=dict(module="debug", args=dict(var="ansible_facts")), ), ], ) # In cas we only want to gather facts if ansible: module = ansible.get("module") args = ansible.get("args") play_source["tasks"].append( dict( name="task", run_once=run_once, action=dict(module=module, args=args), register="shell_out", ) ) # Create an ansible playbook play = Play().load(play_source, variable_manager=variable_manager, loader=loader) # Run it try: result = tqm.run(play) finally: tqm.cleanup() if loader: loader.cleanup_all_tmp_files() # Remove ansible tmpdir shutil.rmtree(C.DEFAULT_LOCAL_TMP, True) if len(results_callback.host_failed) > 0: print("Ansible error(s): ") for error in results_callback.host_failed: print(results_callback.host_failed[error].__dict__) raise FailedActivity("Failed to run ansible task") elif len(results_callback.host_unreachable) > 0: print("Unreachable host(s): ") for error in results_callback.host_unreachable: print(error) raise FailedActivity("At least one target is down") else: results = {} for host, result in results_callback.host_ok.items(): results[host] = result return json.dumps(results)
def run_ansible(module_name, module_args, host_list, ansible_user="******"): """ansible api""" # 负责查找和读取yaml、json和ini文件 loader = DataLoader() # 初始化需要的对象 Options = namedtuple('Options', [ 'connection', 'module_path', 'forks', 'become', 'become_method', 'private_key_file', 'become_user', 'remote_user', 'check', 'diff' ]) options = Options(connection='ssh', module_path=None, forks=5, become=True, become_method='sudo', private_key_file="/root/.ssh/id_rsa", become_user='******', remote_user=ansible_user, check=False, diff=False) passwords = dict(vault_pass='******') # 实例化ResultCallback来处理结果 callback = ResultsCollector() # 创建库存(inventory)并传递给VariableManager inventory = InventoryManager(loader=loader, sources='') for ip in host_list: inventory.add_host(host=ip, port=22) # 管理变量的类,包括主机,组,扩展等变量 variable_manager = VariableManager(loader=loader, inventory=inventory) for ip in host_list: host = inventory.get_host(hostname=ip) variable_manager.set_host_variable(host=host, varname='ansible_ssh_pass', value='lzx@2019') # 创建任务 host = ",".join(host_list) play_source = dict(name="Ansible Play", hosts=host, gather_facts='no', tasks=[ dict(action=dict(module=module_name, args=module_args), register='shell_out'), ]) play = Play().load(play_source, variable_manager=variable_manager, loader=loader) # 开始执行 tqm = None tqm = TaskQueueManager( inventory=inventory, variable_manager=variable_manager, loader=loader, options=options, passwords=passwords, stdout_callback=callback, ) result = tqm.run(play) result_raw = {'success': {}, 'failed': {}, 'unreachable': {}} for host, result in callback.host_ok.items(): result_raw['success'][host] = result._result for host, result in callback.host_failed.items(): result_raw['failed'][host] = result._result for host, result in callback.host_unreachable.items(): result_raw['unreachable'][host] = result._result for host, result in callback.host_skipped.items(): result_raw['skipped'][host] = result._result return json.dumps(result_raw, indent=4, ensure_ascii=False)
def exec_ansible(host, tasks, remote_user='******', become=False, become_user=None): # initialize needed objects loader = DataLoader() Options = namedtuple('Options', [ 'connection', 'module_path', 'forks', 'become', 'become_method', 'become_user', 'check', 'diff', 'remote_user']) options = Options( connection='smart', module_path=MODULE_PATH, forks=200, become=become, become_method='sudo', become_user=become_user, remote_user=remote_user, check=False, diff=False) passwords = dict(vault_pass='******') # Instantiate our ResultCallback for handling results as they come in results_callback = ResultCallback() # create inventory and pass to var manager inventory = InventoryManager(loader=loader) inventory.add_host(host) variable_manager = VariableManager(loader=loader, inventory=inventory) # create play with tasks play_source = dict( name="Ansible Play", hosts=[host], gather_facts='no', tasks=tasks) play = Play().load( play_source, variable_manager=variable_manager, loader=loader) # actually run it tqm = None try: display.warning(tasks) tqm = TaskQueueManager( inventory=inventory, variable_manager=variable_manager, loader=loader, options=options, passwords=passwords, stdout_callback=results_callback ) tqm.run(play) except Exception as e: display.warning('error') display.warning(e) finally: if tqm is not None: tqm.cleanup() def get_result(): results_raw = {'success': {}, 'failed': {}, 'unreachable': {}} for _hosts, result in results_callback.host_ok.items(): results_raw['success'][_hosts] = result for _hosts, result in results_callback.host_failed.items(): results_raw['failed'][_hosts] = result for _hosts, result in results_callback.host_unreachable.items(): results_raw['unreachable'][_hosts] = result return results_raw _get_result = get_result() return _get_result
def run(self): # insert node for ip in self.hosts: self._node_map[ip] = Service.new_node(self.task_id, ip) variable_manager = VariableManager() Logger.debug("start write ssh_key for task: {} global_id : {}".format(self.task_id, self.global_id)) key_files = [] group = Group(self.task_id) for h in self.hosts: # get ssh_key content key_content = _get_ssh_key(h) Logger.debug("read ssh_key for host: {} global_id: {}".format(h, self.global_id)) # write ssh private key key_path = _write_ssh_key(h, key_content) #key_path="./tmp/97" Logger.debug("write ssh_key for host: {} global_id: {}".format(h, self.global_id)) host_vars = dict(ansible_port=22, ansible_user=self.user, ansible_ssh_private_key_file="./" + key_path) Logger.debug("key_path: {} global_id: {}".format(key_path, self.global_id)) key_files.append(key_path) host = Host(h) host.vars = host_vars group.add_host(host) # add params to each host if self.params is not None and isinstance(self.params, dict): for h in group.hosts: for key in self.params.keys(): variable_manager.set_host_variable(h, key, self.params[key]) Logger.debug("success write ssh_key for task: {} global_id: {}".format(self.task_id, self.global_id)) # other options ssh_args = '-oControlMaster=auto -oControlPersist=60s -oStrictHostKeyChecking=no' options = _Options(connection='ssh', module_path='./ansible/library', forks=self.forks, timeout=10, remote_user=None, private_key_file=None, ssh_common_args=ssh_args, ssh_extra_args=None, sftp_extra_args=None, scp_extra_args=None, become=None, become_method=None, become_user=None, verbosity=None, check=False) if self.tasktype=="ansible_task": Logger.debug("ansible tasks set******************* global_id: {}".format(self.global_id)) play_source = dict( name=self.task_id, hosts=self.task_id, gather_facts='yes', tasks=self.tasks ) else: Logger.debug("ansible role set******************* global_id: {}".format(self.global_id)) play_source = dict( name=self.task_id, hosts=self.task_id, gather_facts='yes', roles=self.tasks ) Logger.debug("start load play for task: {} global_id: {}".format(self.task_id, self.global_id)) # make playbook playbook = Play().load(play_source, variable_manager=variable_manager, loader=_Loader) inventory = Inventory(loader=_Loader, variable_manager=variable_manager) inventory.add_group(group) call_back = SyncCallbackModule(debug=True, step_callback=self._step_callback, global_id=self.global_id, source=self.source, tag_hosts=self.hosts) Logger.debug("success load play for task: {} global_id: {}".format(self.task_id, self.global_id)) # task queue tqm = TaskQueueManager( inventory=inventory, variable_manager=variable_manager, loader=_Loader, options=options, passwords=None, stdout_callback=call_back ) try: back = tqm.run(playbook) Logger.info("back: {} global_id : {}".format(str(back), self.global_id)) if back != 0: raise Exception("playbook run failed") return back finally: if tqm is not None: tqm.cleanup() _rm_tmp_key(key_files)
def run(self, inventory_content, pattern='all'): ''' 运行adhoc ''' self.pattern = pattern self.inventory_content = inventory_content if not self.options.module_name: self.logger.error(self.log_prefix + '准备工作失败,原因:执行模块不能为空') return (False, '执行模块不能为空,请输入模块名') else: if self.options.module_name in C.MODULE_REQUIRE_ARGS and not self.options.module_args: self.logger.error(self.log_prefix + '准备工作失败,原因:执行模块参数为空') return (False, '执行模块参数为空,请输入模块参数') for name, obj in get_all_plugin_loaders(): name = name if obj.subdir: plugin_path = os.path.join('.', obj.subdir) if os.path.isdir(plugin_path): obj.add_directory(plugin_path) self._gen_tasks() play = Play().load(self.tasks_dict, variable_manager=self.variable_manager, loader=self.loader) try: self.host_list = self.inventory.list_hosts(self.pattern) except: self.host_list = [] if len(self.host_list) == 0: self.logger.error(self.log_prefix + '准备工作失败,原因:没有匹配主机名') return (False, '执行失败,没有匹配主机名') self._loading_callback() self._tqm = None try: self._tqm = TaskQueueManager( inventory=self.inventory, variable_manager=self.variable_manager, loader=self.loader, options=self.options, passwords=self.passwords, stdout_callback=self.callback, # run_additional_callbacks=C.DEFAULT_LOAD_CALLBACK_PLUGINS, # run_tree=False, ) self._tqm.run(play) finally: if self._tqm: self._tqm.cleanup() if self.loader: self.loader.cleanup_all_tmp_files() self.logger.info(self.log_prefix + '发送成功') return True
def default(self, arg, forceshell=False): """ actually runs modules """ if arg.startswith("#"): return False if not self.cwd: display.error("No host found") return False if arg.split()[0] in self.modules: module = arg.split()[0] module_args = ' '.join(arg.split()[1:]) else: module = 'shell' module_args = arg if forceshell is True: module = 'shell' module_args = arg result = None try: check_raw = module in ('command', 'shell', 'script', 'raw') play_ds = dict( name="Ansible Shell", hosts=self.cwd, gather_facts='no', tasks=[ dict(action=dict(module=module, args=parse_kv(module_args, check_raw=check_raw))) ], remote_user=self.remote_user, become=self.become, become_user=self.become_user, become_method=self.become_method, check_mode=self.check_mode, diff=self.diff, ) play = Play().load(play_ds, variable_manager=self.variable_manager, loader=self.loader) except Exception as e: display.error(u"Unable to build command: %s" % to_text(e)) return False try: cb = 'minimal' # FIXME: make callbacks configurable # now create a task queue manager to execute the play self._tqm = None try: self._tqm = TaskQueueManager( inventory=self.inventory, variable_manager=self.variable_manager, loader=self.loader, passwords=self.passwords, stdout_callback=cb, run_additional_callbacks=C.DEFAULT_LOAD_CALLBACK_PLUGINS, run_tree=False, forks=self.forks, ) result = self._tqm.run(play) finally: if self._tqm: self._tqm.cleanup() if self.loader: self.loader.cleanup_all_tmp_files() if result is None: display.error("No hosts found") return False except KeyboardInterrupt: display.error('User interrupted execution') return False except Exception as e: display.error(to_text(e)) # FIXME: add traceback in very very verbose mode return False
def test_variable_manager_precedence(self): # FIXME: this needs to be redone as dataloader is not the automatic source of data anymore return ''' Tests complex variations and combinations of get_vars() with different objects to modify the context under which variables are merged. ''' # FIXME: BCS makethiswork # return True mock_inventory = MagicMock() inventory1_filedata = """ [group2:children] group1 [group1] host1 host_var=host_var_from_inventory_host1 [group1:vars] group_var = group_var_from_inventory_group1 [group2:vars] group_var = group_var_from_inventory_group2 """ fake_loader = DictDataLoader({ # inventory1 '/etc/ansible/inventory1': inventory1_filedata, # role defaults_only1 '/etc/ansible/roles/defaults_only1/defaults/main.yml': """ default_var: "default_var_from_defaults_only1" host_var: "host_var_from_defaults_only1" group_var: "group_var_from_defaults_only1" group_var_all: "group_var_all_from_defaults_only1" extra_var: "extra_var_from_defaults_only1" """, '/etc/ansible/roles/defaults_only1/tasks/main.yml': """ - debug: msg="here i am" """, # role defaults_only2 '/etc/ansible/roles/defaults_only2/defaults/main.yml': """ default_var: "default_var_from_defaults_only2" host_var: "host_var_from_defaults_only2" group_var: "group_var_from_defaults_only2" group_var_all: "group_var_all_from_defaults_only2" extra_var: "extra_var_from_defaults_only2" """, }) inv1 = InventoryManager(loader=fake_loader, sources=['/etc/ansible/inventory1']) v = VariableManager(inventory=mock_inventory, loader=fake_loader) v._fact_cache = defaultdict(dict) play1 = Play.load(dict( hosts=['all'], roles=['defaults_only1', 'defaults_only2'], ), loader=fake_loader, variable_manager=v) # first we assert that the defaults as viewed as a whole are the merged results # of the defaults from each role, with the last role defined "winning" when # there is a variable naming conflict res = v.get_vars(play=play1) self.assertEqual(res['default_var'], 'default_var_from_defaults_only2') # next, we assert that when vars are viewed from the context of a task within a # role, that task will see its own role defaults before any other role's blocks = play1.compile() task = blocks[1].block[0] res = v.get_vars(play=play1, task=task) self.assertEqual(res['default_var'], 'default_var_from_defaults_only1') # next we assert the precendence of inventory variables v.set_inventory(inv1) h1 = inv1.get_host('host1') res = v.get_vars(play=play1, host=h1) self.assertEqual(res['group_var'], 'group_var_from_inventory_group1') self.assertEqual(res['host_var'], 'host_var_from_inventory_host1') # next we test with group_vars/ files loaded fake_loader.push( "/etc/ansible/group_vars/all", """ group_var_all: group_var_all_from_group_vars_all """) fake_loader.push( "/etc/ansible/group_vars/group1", """ group_var: group_var_from_group_vars_group1 """) fake_loader.push( "/etc/ansible/group_vars/group3", """ # this is a dummy, which should not be used anywhere group_var: group_var_from_group_vars_group3 """) fake_loader.push( "/etc/ansible/host_vars/host1", """ host_var: host_var_from_host_vars_host1 """) fake_loader.push( "group_vars/group1", """ playbook_group_var: playbook_group_var """) fake_loader.push( "host_vars/host1", """ playbook_host_var: playbook_host_var """) res = v.get_vars(play=play1, host=h1) # self.assertEqual(res['group_var'], 'group_var_from_group_vars_group1') # self.assertEqual(res['group_var_all'], 'group_var_all_from_group_vars_all') # self.assertEqual(res['playbook_group_var'], 'playbook_group_var') # self.assertEqual(res['host_var'], 'host_var_from_host_vars_host1') # self.assertEqual(res['playbook_host_var'], 'playbook_host_var') # add in the fact cache v._fact_cache['host1'] = dict( fact_cache_var="fact_cache_var_from_fact_cache") res = v.get_vars(play=play1, host=h1) self.assertEqual(res['fact_cache_var'], 'fact_cache_var_from_fact_cache')
def run(self): ''' create and execute the single task playbook ''' super(AdHocCLI, self).run() # only thing left should be host pattern pattern = to_text(context.CLIARGS['args'], errors='surrogate_or_strict') sshpass = None becomepass = None (sshpass, becomepass) = self.ask_passwords() passwords = {'conn_pass': sshpass, 'become_pass': becomepass} # get basic objects loader, inventory, variable_manager = self._play_prereqs() try: hosts = self.get_host_list(inventory, context.CLIARGS['subset'], pattern) except AnsibleError: if context.CLIARGS['subset']: raise else: hosts = [] display.warning("No hosts matched, nothing to do") if context.CLIARGS['listhosts']: display.display(' hosts (%d):' % len(hosts)) for host in hosts: display.display(' %s' % host) return 0 if context.CLIARGS[ 'module_name'] in C.MODULE_REQUIRE_ARGS and not context.CLIARGS[ 'module_args']: err = "No argument passed to %s module" % context.CLIARGS[ 'module_name'] if pattern.endswith(".yml"): err = err + ' (did you mean to run ansible-playbook?)' raise AnsibleOptionsError(err) # Avoid modules that don't work with ad-hoc if context.CLIARGS['module_name'] in ('import_playbook', ): raise AnsibleOptionsError( "'%s' is not a valid action for ad-hoc commands" % context.CLIARGS['module_name']) play_ds = self._play_ds(pattern, context.CLIARGS['seconds'], context.CLIARGS['poll_interval']) play = Play().load(play_ds, variable_manager=variable_manager, loader=loader) # used in start callback playbook = Playbook(loader) playbook._entries.append(play) playbook._file_name = '__adhoc_playbook__' if self.callback: cb = self.callback elif context.CLIARGS['one_line']: cb = 'oneline' # Respect custom 'stdout_callback' only with enabled 'bin_ansible_callbacks' elif C.DEFAULT_LOAD_CALLBACK_PLUGINS and C.DEFAULT_STDOUT_CALLBACK != 'default': cb = C.DEFAULT_STDOUT_CALLBACK else: cb = 'minimal' run_tree = False if context.CLIARGS['tree']: C.DEFAULT_CALLBACK_WHITELIST.append('tree') C.TREE_DIR = context.CLIARGS['tree'] run_tree = True # now create a task queue manager to execute the play self._tqm = None try: self._tqm = TaskQueueManager( inventory=inventory, variable_manager=variable_manager, loader=loader, passwords=passwords, stdout_callback=cb, run_additional_callbacks=C.DEFAULT_LOAD_CALLBACK_PLUGINS, run_tree=run_tree, forks=context.CLIARGS['forks'], ) self._tqm.send_callback('v2_playbook_on_start', playbook) result = self._tqm.run(play) self._tqm.send_callback('v2_playbook_on_stats', self._tqm._stats) finally: if self._tqm: self._tqm.cleanup() if loader: loader.cleanup_all_tmp_files() return result
def run(self): host_list = [] [host_list.append(i.get("host", '0.0.0.0')) for i in self.hostinfo] Options = namedtuple('Options', [ 'connection', 'module_path', 'forks', 'remote_user', 'private_key_file', 'ssh_common_args', 'ssh_extra_args', 'sftp_extra_args', 'scp_extra_args', 'become', 'become_method', 'become_user', 'verbosity', 'check', 'diff' ]) # required for # https://github.com/ansible/ansible/blob/devel/lib/ansible/inventory/manager.py#L204 sources = ','.join(host_list) if len(host_list) == 1: sources += ',' # initialize needed objects loader = DataLoader() options = Options( connection='smart', module_path=['/usr/share/ansible'], forks=100, remote_user=None, private_key_file=None, ssh_common_args=None, ssh_extra_args=None, sftp_extra_args=None, scp_extra_args=None, become=None, become_method=None, become_user=None, verbosity=None, check=False, diff=False, ) passwords = dict(sshpass=None, becomepass=None) # create inventory and pass to var manager inventory = InventoryManager(loader=loader, sources=sources) inventory.add_group('default') for host in self.hostinfo: inventory.add_host(host=host.get('host'), port=host.get('port')) hostname = inventory.get_host(hostname=host.get('host')) hostname.set_variable('ansible_ssh_host', hostname) hostname.set_variable('ansible_ssh_port', host.get('port')) hostname.set_variable('ansible_ssh_user', host.get('user')) hostname.set_variable('ansible_ssh_pass', host.get('password')) hostname.set_variable('ansible_sudo_pass', host.get('password')) variable_manager = VariableManager(loader=loader, inventory=inventory) # for host in self.hostinfo: # inventory.add_host(host=host.get('host'), port=host.get('port')) # hostname = inventory.get_host(hostname=host.get('host')) # variable_manager.set_host_variable(host=hostname, varname='ansible_ssh_pass', value=host.get('password')) # variable_manager.set_host_variable(host=hostname, varname='ansible_ssh_user', value=host.get('user')) # variable_manager.set_host_variable(host=hostname, varname='ansible_ssh_port', value=host.get('port')) # # print("设置全局管理变量 %s"%host.get('host')) # print(inventory.__dict__) # for i in inventory.__dict__: # if i == '_inventory': # print(inventory.__dict__[i].__dict__) print(inventory.get_host(hostname='172.18.108.96')) # create play with tasks tasks = [] # print(self.taskinfo) for task in self.taskinfo: # tasks.append(dict(action=dict(module=task.get("module"), args=task.get("args")))) play_source = dict( name="Ansible API Play", hosts=host_list, gather_facts='no', # tasks=[dict(action=dict(module='command', args=dict(cmd='/usr/bin/uptime')))] # tasks=[dict(action=dict(module='shell', args='/usr/sbin/ip a'))] tasks=[ dict(action=dict(module=task.get("module"), args=task.get("args"))) ] # dict(action=dict(module='setup', args='')), # dict(action=dict(module='setup', args=''),register='shell_out'), # dict(action=dict(module='debug', args=dict(msg='{{shell_out.stdout}}'))) ) play = Play().load(play_source, variable_manager=variable_manager, loader=loader) # actually run it tqm = None callback = ResultsCollector() try: tqm = TaskQueueManager( inventory=inventory, variable_manager=variable_manager, loader=loader, options=options, passwords=passwords, ) tqm._stdout_callback = callback result = tqm.run(play) print(result) finally: if tqm is not None: tqm.cleanup() # print("UP ***********") # print(callback.host_ok.items()) for host, result in callback.host_ok.items(): try: # print('{0} >>> {1}'.format(host, result._result['stdout'])) self.resultinfo.append({ host: { "message": result._result['stdout'], "code": 0 } }) except: # print('{0} >>> {1}'.format(host, result._result['ansible_facts'])) self.resultinfo.append({ host: { "message": result._result['ansible_facts'], "code": 0 } }) # print("FAILED *******") for host, result in callback.host_failed.items(): # print('{0} >>> {1}'.format(host, result._result['msg'])) self.resultinfo.append( {host: { "message": result._result['msg'], "code": 1 }}) # print("DOWN *********") for host, result in callback.host_unreachable.items(): # print('{0} >>> {1}'.format(host, result._result['msg'])) self.resultinfo.append( {host: { "message": result._result['msg'], "code": -1 }}) return self.resultinfo
def _get_play_(self, playbook): return Play().load(playbook, variable_manager=self._variable_manager, loader=self._loader)
def execute(self, *args, **kwargs): """ Puts args and kwargs in a way ansible can understand. Calls ansible and interprets the result. """ assert self.is_hooked_up, "the module should be hooked up to the api" self.module_args = module_args = self.get_module_args(args, kwargs) loader = DataLoader() variable_manager = VariableManager() inventory = UncachedInventory( loader=loader, variable_manager=variable_manager, host_list=self.api.servers ) variable_manager.set_inventory(inventory) play_source = { 'name': "Suitable Play", 'hosts': self.api.servers, 'gather_facts': 'no', 'tasks': [{ 'action': { 'module': self.module_name, 'args': module_args } }] } play = Play.load( play_source, variable_manager=variable_manager, loader=loader ) log.info(u'running {}'.format(u'- {module_name}: {module_args}'.format( module_name=self.module_name, module_args=module_args ))) start = datetime.utcnow() task_queue_manager = None callback = SilentCallbackModule() try: task_queue_manager = TaskQueueManager( inventory=inventory, variable_manager=variable_manager, loader=loader, options=self.api.options, passwords=getattr(self.api.options, 'passwords', {}), stdout_callback=callback ) task_queue_manager.run(play) finally: if task_queue_manager is not None: task_queue_manager.cleanup() log.info(u'took {} to complete'.format(datetime.utcnow() - start)) return self.evaluate_results(callback)
def execute_play(play_source, with_metadata=False): loader = DataLoader() inventory = InventoryManager(loader=loader, sources=["localhost"]) variable_manager = VariableManager(loader=loader, inventory=inventory) Options = namedtuple('Options', [ 'listtags', 'listtasks', 'listhosts', 'syntax', 'connection', 'module_path', 'forks', 'remote_user', 'private_key_file', 'ssh_common_args', 'ssh_extra_args', 'sftp_extra_args', 'scp_extra_args', 'become', 'become_method', 'become_user', 'verbosity', 'check', 'diff' ]) options = Options(listtags=False, listtasks=False, listhosts=False, syntax=False, connection='local', module_path=None, forks=100, remote_user='******', private_key_file=None, ssh_common_args=None, ssh_extra_args=None, sftp_extra_args=None, scp_extra_args=None, become=False, become_method=None, become_user='******', verbosity=None, check=False, diff=False) variable_manager.extra_vars = {'hosts': 'localhost'} passwords = {} # Instantiate our ResultCallback for handling results as they come in if (with_metadata): results_callback = ResultCallback() else: results_callback = None play = Play().load(play_source, variable_manager=variable_manager, loader=loader) # actually run it tqm = None try: tqm = TaskQueueManager( inventory=inventory, variable_manager=variable_manager, loader=loader, options=options, passwords=passwords, stdout_callback= results_callback, # Use our custom callback instead of the ``default`` callback plugin ) result = tqm.run(play) if with_metadata: while (not results_callback.done): time.sleep(1) logging.debug("continue") result = results_callback.batch_result return result finally: if tqm is not None: tqm.cleanup()
def handle(self, *args, **options): AnsibleOptions = namedtuple('AnsibleOptions', [ 'connection', 'module_path', 'forks', 'become', 'become_method', 'become_user', 'check', 'diff' ]) ansible_options = AnsibleOptions(connection='smart', module_path=[], forks=10, become=None, become_method=None, become_user=None, check=False, diff=False) loader = DataLoader() passwords = {} results_callback = ResultCallback() inventory = InventoryManager(loader=loader, sources=os.path.join( settings.BASE_DIR, 'etc', 'hosts')) variable_manager = VariableManager(loader=loader, inventory=inventory) path_resource = '/tmp/resource.cmdb.py' play_source = { 'name': "cmdb", 'hosts': 'all', 'gather_facts': 'no', 'tasks': [{ 'name': 'collect_host', 'setup': '', }, { 'name': 'copyfile', 'copy': 'src={0} dest={1}'.format( os.path.join(settings.BASE_DIR, 'etc', 'resource.py'), path_resource) }, { 'name': 'collect_resource', 'command': 'python {0}'.format(path_resource) }] } play = Play().load(play_source, variable_manager=variable_manager, loader=loader) tqm = None try: tqm = TaskQueueManager( inventory=inventory, variable_manager=variable_manager, loader=loader, options=ansible_options, passwords=passwords, stdout_callback=results_callback, ) result = tqm.run(play) finally: if tqm is not None: tqm.cleanup() shutil.rmtree(C.DEFAULT_LOCAL_TMP, True)
def test_variable_manager_precedence(self, mock_basedir): ''' Tests complex variations and combinations of get_vars() with different objects to modify the context under which variables are merged. ''' v = VariableManager() v._fact_cache = defaultdict(dict) fake_loader = DictDataLoader({ # inventory1 '/etc/ansible/inventory1': """ [group2:children] group1 [group1] host1 host_var=host_var_from_inventory_host1 [group1:vars] group_var = group_var_from_inventory_group1 [group2:vars] group_var = group_var_from_inventory_group2 """, # role defaults_only1 '/etc/ansible/roles/defaults_only1/defaults/main.yml': """ default_var: "default_var_from_defaults_only1" host_var: "host_var_from_defaults_only1" group_var: "group_var_from_defaults_only1" group_var_all: "group_var_all_from_defaults_only1" extra_var: "extra_var_from_defaults_only1" """, '/etc/ansible/roles/defaults_only1/tasks/main.yml': """ - debug: msg="here i am" """, # role defaults_only2 '/etc/ansible/roles/defaults_only2/defaults/main.yml': """ default_var: "default_var_from_defaults_only2" host_var: "host_var_from_defaults_only2" group_var: "group_var_from_defaults_only2" group_var_all: "group_var_all_from_defaults_only2" extra_var: "extra_var_from_defaults_only2" """, }) mock_basedir.return_value = './' inv1 = Inventory(loader=fake_loader, variable_manager=v, host_list='/etc/ansible/inventory1') inv1.set_playbook_basedir('./') play1 = Play.load(dict( hosts=['all'], roles=['defaults_only1', 'defaults_only2'], ), loader=fake_loader, variable_manager=v) # first we assert that the defaults as viewed as a whole are the merged results # of the defaults from each role, with the last role defined "winning" when # there is a variable naming conflict res = v.get_vars(loader=fake_loader, play=play1) self.assertEqual(res['default_var'], 'default_var_from_defaults_only2') # next, we assert that when vars are viewed from the context of a task within a # role, that task will see its own role defaults before any other role's blocks = play1.compile() task = blocks[1].block[0] res = v.get_vars(loader=fake_loader, play=play1, task=task) self.assertEqual(res['default_var'], 'default_var_from_defaults_only1') # next we assert the precendence of inventory variables v.set_inventory(inv1) h1 = inv1.get_host('host1') res = v.get_vars(loader=fake_loader, play=play1, host=h1) self.assertEqual(res['group_var'], 'group_var_from_inventory_group1') self.assertEqual(res['host_var'], 'host_var_from_inventory_host1') # next we test with group_vars/ files loaded fake_loader.push("/etc/ansible/group_vars/all", """ group_var_all: group_var_all_from_group_vars_all """) fake_loader.push("/etc/ansible/group_vars/group1", """ group_var: group_var_from_group_vars_group1 """) fake_loader.push("/etc/ansible/group_vars/group3", """ # this is a dummy, which should not be used anywhere group_var: group_var_from_group_vars_group3 """) fake_loader.push("/etc/ansible/host_vars/host1", """ host_var: host_var_from_host_vars_host1 """) fake_loader.push("group_vars/group1", """ playbook_group_var: playbook_group_var """) fake_loader.push("host_vars/host1", """ playbook_host_var: playbook_host_var """) v.add_group_vars_file("/etc/ansible/group_vars/all", loader=fake_loader) v.add_group_vars_file("/etc/ansible/group_vars/group1", loader=fake_loader) v.add_group_vars_file("/etc/ansible/group_vars/group2", loader=fake_loader) v.add_group_vars_file("group_vars/group1", loader=fake_loader) v.add_host_vars_file("/etc/ansible/host_vars/host1", loader=fake_loader) v.add_host_vars_file("host_vars/host1", loader=fake_loader) res = v.get_vars(loader=fake_loader, play=play1, host=h1) self.assertEqual(res['group_var'], 'group_var_from_group_vars_group1') self.assertEqual(res['group_var_all'], 'group_var_all_from_group_vars_all') self.assertEqual(res['playbook_group_var'], 'playbook_group_var') self.assertEqual(res['host_var'], 'host_var_from_host_vars_host1') self.assertEqual(res['playbook_host_var'], 'playbook_host_var') # add in the fact cache v._fact_cache['host1'] = dict(fact_cache_var="fact_cache_var_from_fact_cache") res = v.get_vars(loader=fake_loader, play=play1, host=h1) self.assertEqual(res['fact_cache_var'], 'fact_cache_var_from_fact_cache')
def _run_ansible(self, args): """Actually build an run an ansible play and return the results""" zclass = args.pop('zbx_class') # The leadup to the TaskQueueManager() call below is # copy pasted from Ansible's example: # https://docs.ansible.com/ansible/developing_api.html#python-api-2-0 # pylint: disable=invalid-name Options = namedtuple('Options', ['connection', 'module_path', 'forks', 'become', 'become_method', 'become_user', 'check']) loader = DataLoader() options = Options(connection='local', module_path=None, forks=1, become=None, become_method=None, become_user=None, check=False) passwords = dict(vault_pass='******') results_callback = ResultsCallback() inventory = InventoryManager(loader=loader) variable_manager = VariableManager(loader=loader, inventory=inventory) play_source = dict(name="Ansible Play", hosts=self.pattern, gather_facts='no', tasks=[ dict(action=dict(module=zclass, args=args)), ] ) play = Play().load(play_source, variable_manager=variable_manager, loader=loader) tqm = None try: tqm = TaskQueueManager(inventory=inventory, variable_manager=variable_manager, loader=loader, options=options, passwords=passwords, stdout_callback=results_callback ) return_code = tqm.run(play) finally: if tqm is not None: tqm.cleanup() if return_code != 0: raise ResultsException("Ansible module run failed, no results given.") if results_callback.result.is_unreachable(): message = "Ansible module run failed: module output:\n%s" % \ json.dumps(results_callback.raw_result, indent=4) raise ResultsException(message) if results_callback.result.is_failed(): raise ResultsException(results_callback.raw_result) return results_callback.raw_result
def execute(self, *args, **kwargs): """ Puts args and kwargs in a way ansible can understand. Calls ansible and interprets the result. """ assert self.is_hooked_up, "the module should be hooked up to the api" if set_global_context: set_global_context(self.api.options) # legacy key=value pairs shorthand approach if args: self.module_args = module_args = self.get_module_args(args, kwargs) else: self.module_args = module_args = kwargs loader = DataLoader() inventory_manager = SourcelessInventoryManager(loader=loader) for host, port in self.api.hosts_with_ports: inventory_manager._inventory.add_host(host, group='all', port=port) for key, value in self.api.options.extra_vars.items(): inventory_manager._inventory.set_variable('all', key, value) variable_manager = VariableManager( loader=loader, inventory=inventory_manager) play_source = { 'name': "Suitable Play", 'hosts': 'all', 'gather_facts': 'no', 'tasks': [{ 'action': { 'module': self.module_name, 'args': module_args, }, 'environment': self.api.environment, }] } try: play = Play.load( play_source, variable_manager=variable_manager, loader=loader, ) if self.api.strategy: play.strategy = self.api.strategy log.info( u'running {}'.format(u'- {module_name}: {module_args}'.format( module_name=self.module_name, module_args=module_args )) ) start = datetime.utcnow() task_queue_manager = None callback = SilentCallbackModule() # ansible uses various levels of verbosity (from -v to -vvvvvv) # offering various amounts of debug information # # we keep it a bit simpler by activating all of it during debug, # and falling back to the default of 0 otherwise verbosity = self.api.options.verbosity == logging.DEBUG and 6 or 0 with ansible_verbosity(verbosity): # host_key_checking is special, since not each connection # plugin handles it the same way, we need to apply both # environment variable and Ansible constant when running a # command in the runner to be successful with host_key_checking(self.api.host_key_checking): kwargs = dict( inventory=inventory_manager, variable_manager=variable_manager, loader=loader, options=self.api.options, passwords=getattr(self.api.options, 'passwords', {}), stdout_callback=callback ) if set_global_context: del kwargs['options'] task_queue_manager = TaskQueueManager(**kwargs) try: task_queue_manager.run(play) except SystemExit: # Mitogen forks our process and exits it in one # instance before returning # # This is fine, but it does lead to a very messy exit # by py.test which will essentially return with a test # that is first successful and then failed as each # forked process dies. # # To avoid this we commit suicide if we are run inside # a pytest session. Normally this would just result # in a exit code of zero, which is good. if 'pytest' in sys.modules: try: atexit._run_exitfuncs() except Exception: pass os.kill(os.getpid(), signal.SIGKILL) raise finally: if task_queue_manager is not None: task_queue_manager.cleanup() if set_global_context: # Ansible 2.8 introduces a global context which persists # during the lifetime of the process - for Suitable this # singleton/cache needs to be cleared after each call # to make sure that API calls do not carry over state. # # The docs hint at a future inclusion of local contexts, which # would of course be preferable. from ansible.utils.context_objects import GlobalCLIArgs GlobalCLIArgs._Singleton__instance = None log.debug(u'took {} to complete'.format(datetime.utcnow() - start)) return self.evaluate_results(callback)
from collections import namedtuple from ansible.parsing.dataloader import DataLoader from ansible.vars import VariableManager from ansible.inventory import Inventory from ansible.playbook.play import Play from ansible.executor.task_queue_manager import TaskQueueManager import jinja2 import os '''hosts = NamedTemporaryFile(delete=False) hosts.write(rendered_inventory) hosts.close()''' pb = Play( playbook='test.yml', host_list="ops", # Our hosts, the rendered inventory file runner_callbacks="runner_cb", private_key_file='id_rsa.pub' ) results = pb.run() print results
def execute(self, *args, **kwargs): """ Puts args and kwargs in a way ansible can understand. Calls ansible and interprets the result. """ assert self.is_hooked_up, "the module should be hooked up to the api" self.module_args = module_args = self.get_module_args(args, kwargs) loader = DataLoader() inventory_manager = SourcelessInventoryManager(loader=loader) for host in self.api.servers: inventory_manager._inventory.add_host(host, group='all') for key, value in self.api.options.extra_vars.items(): inventory_manager._inventory.set_variable('all', key, value) variable_manager = VariableManager(loader=loader, inventory=inventory_manager) play_source = { 'name': "Suitable Play", 'hosts': self.api.servers, 'gather_facts': 'no', 'tasks': [{ 'action': { 'module': self.module_name, 'args': module_args } }] } play = Play.load(play_source, variable_manager=variable_manager, loader=loader) log.info(u'running {}'.format(u'- {module_name}: {module_args}'.format( module_name=self.module_name, module_args=module_args))) start = datetime.utcnow() task_queue_manager = None callback = SilentCallbackModule() try: task_queue_manager = TaskQueueManager( inventory=inventory_manager, variable_manager=variable_manager, loader=loader, options=self.api.options, passwords=getattr(self.api.options, 'passwords', {}), stdout_callback=callback) task_queue_manager.run(play) finally: if task_queue_manager is not None: task_queue_manager.cleanup() log.info(u'took {} to complete'.format(datetime.utcnow() - start)) return self.evaluate_results(callback)
def test_empty_play(self): p = Play.load(dict()) self.assertEqual(str(p), "PLAY: <no name specified>")
def __init__( self, hosts=C.DEFAULT_HOST_LIST, module_name=C.DEFAULT_MODULE_NAME, # * command module_args=C.DEFAULT_MODULE_ARGS, # * 'cmd args' forks=C.DEFAULT_FORKS, # 5 timeout=C.DEFAULT_TIMEOUT, # SSH timeout = 10s pattern="all", # all remote_user=C.DEFAULT_REMOTE_USER, # root module_path=None, # dirs of custome modules connection_type="smart", become=None, become_method=None, become_user=None, check=False, passwords=None, extra_vars = None, private_key_file=None ): # storage & defaults self.pattern = pattern self.loader = DataLoader() self.module_name = module_name self.module_args = module_args self.check_module_args() self.gather_facts = 'no' self.resultcallback = ResultCallback() self.options = Options( connection=connection_type, timeout=timeout, module_path=module_path, forks=forks, become=become, become_method=become_method, become_user=become_user, check=check, remote_user=remote_user, extra_vars=extra_vars or [], private_key_file=private_key_file, diff=False ) self.inventory = MyInventory(host_list=hosts) self.variable_manager = VariableManager(self.loader, self.inventory) self.variable_manager.extra_vars = load_extra_vars(loader=self.loader, options=self.options) self.variable_manager.options_vars = load_options_vars(self.options, "") self.passwords = passwords or {} self.play_source = dict( name="Ansible Ad-hoc", hosts=self.pattern, gather_facts=self.gather_facts, tasks=[dict(action=dict( module=self.module_name, args=self.module_args))] ) self.play = Play().load( self.play_source, variable_manager=self.variable_manager, loader=self.loader) self.runner = TaskQueueManager( inventory=self.inventory, variable_manager=self.variable_manager, loader=self.loader, options=self.options, passwords=self.passwords, stdout_callback=self.resultcallback )
def execute(self,module_name,ansible_patt,ansible_args=None): loader = DataLoader() variable_manager = VariableManager() inventory = Inventory( loader=loader, variable_manager=variable_manager, host_list=self.ansible_host_list ) variable_manager.set_inventory(inventory) play_source = {} if ansible_args: play_source = { 'name': "AnsiApi Play", 'hosts': ansible_patt, 'gather_facts': 'no', 'tasks': [{ 'action': { 'module': module_name, 'args': ansible_args } }] } else: play_source = { 'name': "AnsiApi Play", 'hosts': ansible_patt, 'gather_facts': 'no', 'tasks': [{ 'action': { 'module': module_name } }] } play = Play.load( play_source, variable_manager=variable_manager, loader=loader ) task_queue_manager = None callback = AnsiCallBack() try: task_queue_manager = TaskQueueManager( inventory=inventory, variable_manager=variable_manager, loader=loader, options=self.options, passwords=self.passwords, stdout_callback=callback ) task_queue_manager.run(play) finally: if task_queue_manager is not None: task_queue_manager.cleanup() return self.evaluate_results(callback)
def _run(self, *module_args, **complex_args): """Execute an ansible adhoc command returning the result in a AdhocResult object.""" # Assemble module argument string if module_args: complex_args.update(dict(_raw_params=' '.join(module_args))) # Assert hosts matching the provided pattern exist hosts = self.options['inventory_manager'].list_hosts() no_hosts = False if len(hosts) == 0: no_hosts = True warnings.warn( "provided hosts list is empty, only localhost is available") self.options['inventory_manager'].subset(self.options.get('subset')) hosts = self.options['inventory_manager'].list_hosts( self.options['host_pattern']) if len(hosts) == 0 and not no_hosts: raise ansible.errors.AnsibleError( "Specified hosts and/or --limit does not match any hosts") parser = CLI.base_parser( runas_opts=True, inventory_opts=True, async_opts=True, output_opts=True, connect_opts=True, check_opts=True, runtask_opts=True, vault_opts=True, fork_opts=True, module_opts=True, ) (options, args) = parser.parse_args([]) # Pass along cli options options.verbosity = 5 options.connection = self.options.get('connection') options.remote_user = self.options.get('user') options.become = self.options.get('become') options.become_method = self.options.get('become_method') options.become_user = self.options.get('become_user') options.module_path = self.options.get('module_path') # Initialize callback to capture module JSON responses cb = ResultAccumulator() kwargs = dict( inventory=self.options['inventory_manager'], variable_manager=self.options['variable_manager'], loader=self.options['loader'], options=options, stdout_callback=cb, passwords=dict(conn_pass=None, become_pass=None), ) # create a pseudo-play to execute the specified module via a single task play_ds = dict(name="pytest-ansible", hosts=self.options['host_pattern'], gather_facts='no', tasks=[ dict(action=dict(module=self.options['module_name'], args=complex_args), ), ]) play = Play().load(play_ds, variable_manager=self.options['variable_manager'], loader=self.options['loader']) # now create a task queue manager to execute the play tqm = None try: tqm = TaskQueueManager(**kwargs) tqm.run(play) finally: if tqm: tqm.cleanup() # Raise exception if host(s) unreachable # FIXME - if multiple hosts were involved, should an exception be raised? if cb.unreachable: raise AnsibleConnectionFailure("Host unreachable", dark=cb.unreachable, contacted=cb.contacted) # Success! return AdHocResult(contacted=cb.contacted)
def main(): host_list = ['localhost', 'www.example.com', 'www.google.com'] # since the API is constructed for CLI it expects certain options to always be set in the context object context.CLIARGS = ImmutableDict(connection='smart', module_path=['/usr/share/ansible'], forks=10, become=None, become_method=None, become_user=None, check=False, diff=False) # required for # https://github.com/ansible/ansible/blob/devel/lib/ansible/inventory/manager.py#L204 sources = ','.join(host_list) if len(host_list) == 1: sources += ',' # initialize needed objects loader = DataLoader() passwords = dict() # create inventory and pass to var manager inventory = InventoryManager(loader=loader, sources=sources) variable_manager = VariableManager(loader=loader, inventory=inventory) # create play with tasks play_source = dict( name="Ansible Play", hosts=host_list, gather_facts='no', tasks=[ dict( action=dict(module='command', args=dict( cmd='/usr/bin/uptime'))) ]) play = Play().load(play_source, variable_manager=variable_manager, loader=loader) # actually run it tqm = None callback = ResultCallback1() try: tqm = TaskQueueManager( inventory=inventory, variable_manager=variable_manager, loader=loader, passwords=passwords, stdout_callback=callback, ) result = tqm.run(play) finally: if tqm is not None: tqm.cleanup() if loader: loader.cleanup_all_tmp_files() print("UP ***********", callback) for host, result in callback.host_ok.items(): print('{0} >>> {1}'.format(host, result._result['stdout'])) print("FAILED *******") for host, result in callback.host_failed.items(): print('{0} >>> {1}'.format(host, result._result['msg'])) print("DOWN *********") for host, result in callback.host_unreachable.items(): print('{0} >>> {1}'.format(host, result._result['msg']))
def handle(self, *args, **options): Options = namedtuple('Options', [ 'connection', 'module_path', 'forks', 'become', 'become_method', 'become_user', 'check', 'diff' ]) options = Options(connection='smart', module_path=[], forks=10, become=None, become_method=None, become_user=None, check=False, diff=False) loader = DataLoader() passwords = {} results_callback = ResultCallback() inventory = InventoryManager(loader=loader, sources=os.path.join( settings.BASE_DIR, 'etc', 'hosts')) # 剧本的位置 variable_manager = VariableManager(loader=loader, inventory=inventory) dest_path = '/tmp/resources.py' # ansible all -i etc/hosts -m setup play_source = { 'name': "CMDB Collect", 'hosts': 'all', # 在哪些主机上执行 'gather_facts': 'no', 'tasks': [ # 执行的任务列表 { 'name': 'collect_server_info', # 任务名称 'setup': '' # 执行任务模块 }, { 'name': 'copy_file', # 任务名称 'copy': 'src=' + os.path.join(settings.BASE_DIR, 'etc', 'resources.py') + ' dest=' + dest_path # 执行任务模块 }, { 'name': 'collect_resource', # 任务名称 'command': 'python3 ' + dest_path # 执行任务模块 } ] } play = Play().load(play_source, variable_manager=variable_manager, loader=loader) tqm = None try: tqm = TaskQueueManager( inventory=inventory, variable_manager=variable_manager, loader=loader, options=options, passwords=passwords, stdout_callback=results_callback, ) result = tqm.run(play) finally: if tqm is not None: tqm.cleanup() shutil.rmtree(C.DEFAULT_LOCAL_TMP, True)