def spec_handler(self, parser, args): """Execute plugin's main playbook. if "--generate-answers-file": only generate answers file if "--dry-run": only generate vars dict else: run Ansible with vars dict as input if "-o": write vars dict to file :param parser: argparse object :param args: dict, input arguments as parsed by the parser. :return: * Ansible exit code if ansible is executed. * None if "--generate-answers-file" or "--dry-run" answers file is generated """ workspace_manager = CoreServices.workspace_manager() active_workspace = workspace_manager.get_active_workspace() if not active_workspace: active_workspace = workspace_manager.create() workspace_manager.activate(active_workspace.name) LOG.warn("There are no workspaces. New workspace added: %s", active_workspace.name) # TODO(yfried): when accepting inventory from CLI, need to update: # workspace.inventory = CLI[inventory] if self.specification is None: # FIXME(yfried): Create a proper exception type raise Exception("Unable to create specification " "for '{}' plugin. Check plugin " "config and settings folders".format(self.name)) parsed_args = self.specification.parse_args(parser, args) if parsed_args is None: return None # unpack parsed arguments nested_args, control_args = parsed_args if control_args.get('debug', None): logger.LOG.setLevel(logging.DEBUG) vars_dict = VarsDictManager.generate_settings( # TODO(yfried): consider whether to use type (for legacy) or name self.plugin.config["plugin_type"], nested_args, ) VarsDictManager.merge_extra_vars(vars_dict, control_args.get('extra-vars')) LOG.debug("Dumping vars dict...") vars_yaml = yaml.safe_dump(vars_dict, default_flow_style=False) output_filename = control_args.get("output") if output_filename: LOG.debug("Output file: {}".format(output_filename)) with open(output_filename, 'w') as output_file: output_file.write(vars_yaml) else: print(vars_yaml) if control_args.get("dry-run"): return None result = execute.ansible_playbook( inventory=active_workspace.inventory, playbook_path=self.plugin.playbook, verbose=control_args.get('verbose', None), extra_vars=vars_dict, ansible_args=control_args.get('ansible-args', None)) return result
def spec_handler(self, parser, args): """Execute plugin's main playbook. if "--generate-answers-file": only generate answers file if "--dry-run": only generate vars dict else: run Ansible with vars dict as input if "-o": write vars dict to file :param parser: argparse object :param args: dict, input arguments as parsed by the parser. :return: * Ansible exit code if ansible is executed. * None if "--generate-answers-file" or "--dry-run" answers file is generated """ workspace_manager = CoreServices.workspace_manager() active_workspace = workspace_manager.get_active_workspace() if not active_workspace: active_workspace = workspace_manager.create() workspace_manager.activate(active_workspace.name) LOG.warning("There are no workspaces. New workspace added: %s", active_workspace.name) # TODO(yfried): when accepting inventory from CLI, need to update: # workspace.inventory = CLI[inventory] if self.specification is None: # FIXME(yfried): Create a proper exception type raise Exception("Unable to create specification " "for '{}' plugin. Check plugin " "config and settings folders".format(self.name)) parsed_args = self.specification.parse_args(parser, args) if parsed_args is None: return None # unpack parsed arguments nested_args, control_args, custom_args = parsed_args if control_args.get('debug', None): logger.LOG.setLevel(logging.DEBUG) vars_dict = VarsDictManager.generate_settings( # TODO(yfried): consider whether to use type (for legacy) or name self.plugin.type, nested_args, ) # Update vars_dict with custom ansible variables (if needed) vars_dict.update(custom_args) VarsDictManager.merge_extra_vars(vars_dict, control_args.get('extra-vars')) LOG.debug("Dumping vars dict...") vars_yaml = yaml.safe_dump(vars_dict, default_flow_style=False) output_filename = control_args.get("output") if output_filename: LOG.debug("Output file: {}".format(output_filename)) with open(output_filename, 'w') as output_file: output_file.write(vars_yaml) else: print(vars_yaml) if control_args.get("dry-run"): return None result = execute.ansible_playbook( inventory=active_workspace.inventory, playbook_path=self.plugin.playbook, verbose=control_args.get('verbose', None), extra_vars=vars_dict, ansible_args=control_args.get('ansible-args', None)) return result
def spec_handler(self, parser, args): """Execute plugin's main playbook. if "--generate-answers-file": only generate answers file if "--dry-run": only generate vars dict else: run Ansible with vars dict as input if "-o": write vars dict to file :param parser: argparse object :param args: dict, input arguments as parsed by the parser. :return: * Ansible exit code if ansible is executed. * None if "--generate-answers-file" or "--dry-run" answers file is generated """ workspace_manager = CoreServices.workspace_manager() active_workspace = workspace_manager.get_active_workspace() if not active_workspace: active_workspace = workspace_manager.create() workspace_manager.activate(active_workspace.name) LOG.warning("There are no workspaces. New workspace added: %s", active_workspace.name) # TODO(yfried): when accepting inventory from CLI, need to update: # workspace.inventory = CLI[inventory] if self.specification is None: # FIXME(yfried): Create a proper exception type raise Exception("Unable to create specification " "for '{}' plugin. Check plugin " "config and settings folders".format(self.name)) parsed_args = self.specification.parse_args(parser, args) if parsed_args is None: return None # unpack parsed arguments nested_args, control_args, custom_args = parsed_args if control_args.get('debug', None): logger.LOG.setLevel(logging.DEBUG) vars_dict = VarsDictManager.generate_settings( # TODO(yfried): consider whether to use type (for legacy) or name self.plugin.type, nested_args, ) # Update vars_dict with custom ansible variables (if needed) vars_dict.update(custom_args) VarsDictManager.merge_extra_vars(vars_dict, control_args.get('extra-vars')) LOG.debug("Dumping vars dict...") vars_yaml = yaml.safe_dump(vars_dict, default_flow_style=False) output_filename = control_args.get("output") if output_filename: LOG.debug("Output file: {}".format(output_filename)) with open(output_filename, 'w') as output_file: output_file.write(vars_yaml) else: print(vars_yaml) if control_args.get("dry-run"): return None # register plugins_dir path otherwise roles introduced by the plugin # are not found during the plugin execution # save the current ANSIBLE_ROLES_PATH so that it can be restored later ansible_roles_path = os.environ.get('ANSIBLE_ROLES_PATH', '') if self.plugin.roles_path: # check whether the path defined by user exists role_path = os.path.join(self.plugin.path, self.plugin.roles_path) if not os.path.exists(role_path): LOG.warning("Plugin's config.role_path: %s, doesn't exist", role_path) # roles path points to the dir which contains installed plugins roles_path = os.path.join(role_path, '../') if ansible_roles_path: new_path = ':'.join([ansible_roles_path, roles_path]) else: new_path = roles_path os.environ['ANSIBLE_ROLES_PATH'] = new_path result = execute.ansible_playbook( ir_workspace=active_workspace, ir_plugin=self.plugin, playbook_path=self.plugin.playbook, verbose=control_args.get('verbose', None), extra_vars=vars_dict, ansible_args=control_args.get('ansible-args', None)) # restore original ANSIBLE_ROLES_PATH os.environ['ANSIBLE_ROLES_PATH'] = ansible_roles_path return result