def node_list(**kwargs): """Return node names list for active workspace""" workspace_manager = CoreServices.workspace_manager() ws = workspace_manager.get_active_workspace() nodes = [node[0] for node in workspace_manager.node_list( workspace_name=ws.name)] return nodes
def node_list(**kwargs): """Return node names list for active workspace""" workspace_manager = CoreServices.workspace_manager() ws = workspace_manager.get_active_workspace() nodes = [ node[0] for node in workspace_manager.node_list(workspace_name=ws.name) ] return nodes
def group_list(**kwargs): """Return node groups names list for workspace""" workspace_manager = CoreServices.workspace_manager() ws_name = kwargs[ "parsed_args"].name or workspace_manager.get_active_workspace().name return [group[0] for group in workspace_manager.group_list( workspace_name=ws_name)]
def ssh_to_host(hostname, remote_command=None): """ Compose cmd string of ssh and execute Uses Ansible to parse inventory file, gets ssh connection options :param hostname: str. Hostname from inventory """ workspace_manager = CoreServices.workspace_manager() workspace = workspace_manager.get_active_workspace() if workspace is None: raise exceptions.IRNoActiveWorkspaceFound() inventory_file = workspace.inventory from ansible.parsing.dataloader import DataLoader from ansible.inventory.manager import InventoryManager invent = InventoryManager(DataLoader(), sources=inventory_file) host = invent.get_host(hostname) if host is None: raise exceptions.IRSshException( "Host {} is not in inventory {}".format(hostname, inventory_file)) if _get_magic_var(host, "connection") == "local": raise exceptions.IRSshException("Only ssh transport acceptable.") cmd = " ".join(["ssh {priv_key} {comm_args}", "{extra_args} -p {port} -t {user}@{host}"]) cmd_fields = {} cmd_fields["user"] = _get_magic_var(host, "remote_user", default="root") cmd_fields["port"] = _get_magic_var(host, "port", default=22) cmd_fields["host"] = _get_magic_var(host, "remote_addr") priv_key = _get_magic_var(host, "private_key_file") # NOTE(yfried): # ssh client needs key to be in the directory you're running one from # ('ssh -i id_rsa ...') or to be provided by absolute path. # assume paths are relative to inventory file. abspath = os.path.join(os.path.abspath(os.path.dirname(inventory_file)), priv_key) priv_key = abspath if os.path.exists(abspath) else priv_key cmd_fields["priv_key"] = "-i {}".format(priv_key if priv_key else "") cmd_fields["comm_args"] = _get_magic_var(host, "ssh_common_args") cmd_fields["extra_args"] = _get_magic_var(host, "ssh_extra_args") LOG.debug("Establishing ssh connection to {}".format(cmd_fields["host"])) compiled_cmd = cmd.format(**cmd_fields) if remote_command is not None: compiled_cmd = " ".join( [compiled_cmd, '"{}"'.format(remote_command)]) result = os.WEXITSTATUS(os.system(compiled_cmd)) LOG.debug("Connection to {} closed".format(cmd_fields["host"])) return result
def ssh_to_host(hostname, remote_command=None): """ Compose cmd string of ssh and execute Uses Ansible to parse inventory file, gets ssh connection options :param hostname: str. Hostname from inventory """ workspace_manager = CoreServices.workspace_manager() workspace = workspace_manager.get_active_workspace() if workspace is None: raise exceptions.IRNoActiveWorkspaceFound() inventory_file = workspace.inventory invent = inventory.Inventory(DataLoader(), VariableManager(), host_list=inventory_file) host = invent.get_host(hostname) if host is None: raise exceptions.IRSshException( "Host {} is not in inventory {}".format(hostname, inventory_file)) if _get_magic_var(host, "connection") == "local": raise exceptions.IRSshException("Only ssh transport acceptable.") cmd = " ".join([ "ssh {priv_key} {comm_args}", "{extra_args} -p {port} -t {user}@{host}" ]) cmd_fields = {} cmd_fields["user"] = _get_magic_var(host, "remote_user", default="root") cmd_fields["port"] = _get_magic_var(host, "port", default=22) cmd_fields["host"] = _get_magic_var(host, "remote_addr") priv_key = _get_magic_var(host, "private_key_file") # NOTE(yfried): # ssh client needs key to be in the directory you're running one from # ('ssh -i id_rsa ...') or to be provided by absolute path. # assume paths are relative to inventory file. abspath = os.path.join(os.path.abspath(os.path.dirname(inventory_file)), priv_key) priv_key = abspath if os.path.exists(abspath) else priv_key cmd_fields["priv_key"] = "-i {}".format(priv_key if priv_key else "") cmd_fields["comm_args"] = _get_magic_var(host, "ssh_common_args") cmd_fields["extra_args"] = _get_magic_var(host, "ssh_extra_args") LOG.debug("Establishing ssh connection to {}".format(cmd_fields["host"])) compiled_cmd = cmd.format(**cmd_fields) if remote_command is not None: compiled_cmd = " ".join([compiled_cmd, '"{}"'.format(remote_command)]) result = os.WEXITSTATUS(os.system(compiled_cmd)) LOG.debug("Connection to {} closed".format(cmd_fields["host"])) return result
def group_list(**kwargs): """Return node groups names list for workspace""" workspace_manager = CoreServices.workspace_manager() ws_name = kwargs[ "parsed_args"].name or workspace_manager.get_active_workspace().name return [ group[0] for group in workspace_manager.group_list(workspace_name=ws_name) ]
def resolve(self, value): """Set active workspace's inventory Assumes active workspace exists, as ComplexType objects are resolved after workspace is verified. Calls workspace.inventory setter. See source for more information. :param value: path to inventory file. """ CoreServices.workspace_manager()\ .get_active_workspace().inventory = value
def parse_cli_input(cls, arg_parser, args=None): """Parse CLI input. :param arg_parser: argparse object :param args: replace sys.argv[1:] :return: dict. Parsed CLI input """ parse_args, unknown_args = arg_parser.parse_known_args(args) # todo(obaranov) Pass all the unknown arguments to the ansible # For now just raise exception if unknown_args: raise exceptions.IRUnrecognizedOptionsException(unknown_args) parse_args = parse_args.__dict__ # Save all command line arguments to a file all_argument_file = CoreServices.workspace_manager().get_active_workspace().path + '/' \ + parse_args['subcommand'] + '_all_argument_file.txt' with open(all_argument_file, 'w') as file: for arg in parse_args: if 'subcommand' in arg: arg_name = arg[10:] if arg_name == '': data = 'plugin' + ': ' + str(parse_args[arg]) else: data = arg_name + ': ' + str(parse_args[arg]) file.write(data) file.write('\n') # move sub commands to the nested dicts result = collections.defaultdict(dict) expr = '^(?P<subcmd_name>subcommand)+(?P<arg_name>.*)$$' for arg, value in parse_args.items(): if value is None: continue match = re.search(expr, arg) if match and match.groupdict().get('subcmd_name', None) \ and not match.groupdict().get('arg_name', None): # create empty nested dict for subcommands if value not in result: result[value] = {} if match and match.groupdict().get('arg_name', None): # we have subcommand. put it as nested dict arg_name = match.group('arg_name') cmd_name = match.group('subcmd_name') sub_name = parse_args[cmd_name] result[sub_name][arg_name] = value return result
def ssh_to_host(hostname, remote_command=None): """ Compose cmd string of ssh and execute Uses Ansible to parse inventory file, gets ssh connection options :param hostname: str. Hostname from inventory """ workspace_manager = CoreServices.workspace_manager() workspace = workspace_manager.get_active_workspace() if workspace is None: raise exceptions.IRNoActiveWorkspaceFound() inventory_file = workspace.inventory invent = inventory.Inventory(DataLoader(), VariableManager(), host_list=inventory_file) host = invent.get_host(hostname) if host is None: raise exceptions.IRSshException( "Host {} is not in inventory {}".format(hostname, inventory_file)) if _get_magic_var(host, "connection") == "local": raise exceptions.IRSshException("Only ssh transport acceptable.") cmd = " ".join([ "ssh {priv_key} {comm_args}", "{extra_args} -p {port} -t {user}@{host}" ]) cmd_fields = {} cmd_fields["user"] = _get_magic_var(host, "remote_user", default="root") cmd_fields["port"] = _get_magic_var(host, "port", default=22) cmd_fields["host"] = _get_magic_var(host, "remote_addr") priv_key = _get_magic_var(host, "private_key_file") cmd_fields["priv_key"] = "-i {}".format(priv_key if priv_key else "") cmd_fields["comm_args"] = _get_magic_var(host, "ssh_common_args") cmd_fields["extra_args"] = _get_magic_var(host, "ssh_extra_args") LOG.debug("Establishing ssh connection to {}".format(cmd_fields["host"])) compiled_cmd = cmd.format(**cmd_fields) if remote_command is not None: compiled_cmd = " ".join([compiled_cmd, '"{}"'.format(remote_command)]) os.system(compiled_cmd) LOG.debug("Connection to {} closed".format(cmd_fields["host"]))
def ssh_to_host(hostname, remote_command=None): """ Compose cmd string of ssh and execute Uses Ansible to parse inventory file, gets ssh connection options :param hostname: str. Hostname from inventory """ workspace_manager = CoreServices.workspace_manager() workspace = workspace_manager.get_active_workspace() if workspace is None: raise exceptions.IRNoActiveWorkspaceFound() inventory_file = workspace.inventory invent = inventory.Inventory(DataLoader(), VariableManager(), host_list=inventory_file) host = invent.get_host(hostname) if host is None: raise exceptions.IRSshException( "Host {} is not in inventory {}".format(hostname, inventory_file)) if _get_magic_var(host, "connection") == "local": raise exceptions.IRSshException("Only ssh transport acceptable.") cmd = " ".join(["ssh {priv_key} {comm_args}", "{extra_args} -p {port} -t {user}@{host}"]) cmd_fields = {} cmd_fields["user"] = _get_magic_var(host, "remote_user", default="root") cmd_fields["port"] = _get_magic_var(host, "port", default=22) cmd_fields["host"] = _get_magic_var(host, "remote_addr") priv_key = _get_magic_var(host, "private_key_file") cmd_fields["priv_key"] = "-i {}".format(priv_key if priv_key else "") cmd_fields["comm_args"] = _get_magic_var(host, "ssh_common_args") cmd_fields["extra_args"] = _get_magic_var(host, "ssh_extra_args") LOG.debug("Establishing ssh connection to {}".format(cmd_fields["host"])) compiled_cmd = cmd.format(**cmd_fields) if remote_command is not None: compiled_cmd = " ".join( [compiled_cmd, '"{}"'.format(remote_command)]) os.system(compiled_cmd) LOG.debug("Connection to {} closed".format(cmd_fields["host"]))
def test_infrared_home_dir(infrared_home): test_settings = CoreSettings() test_settings.install_plugin_at_start = False CoreServices.setup(test_settings) assert os.path.isdir(infrared_home) assert os.path.isdir(os.path.join(infrared_home, '.workspaces')) assert os.path.isdir(os.path.join(infrared_home, '.library')) assert os.path.isfile(os.path.join(infrared_home, '.plugins.ini')) assert os.path.isdir(os.path.join(infrared_home, 'plugins')) assert CoreServices.workspace_manager().workspace_dir == os.path.join( infrared_home, '.workspaces') assert CoreServices.plugins_manager().config_file == os.path.join( infrared_home, '.plugins.ini') assert CoreServices.plugins_manager().plugins_dir == os.path.join( infrared_home, 'plugins')
def test_infrared_home_dir(infrared_home): os.environ['ANSIBLE_CONFIG'] = os.path.join(infrared_home, 'ansible.cfg') test_settings = CoreSettings() test_settings.install_plugin_at_start = False CoreServices.setup(test_settings) assert os.path.isdir(infrared_home) assert os.path.isdir(os.path.join(infrared_home, '.workspaces')) assert os.path.isfile(os.path.join(infrared_home, '.plugins.ini')) assert os.path.isdir(os.path.join(infrared_home, 'plugins')) assert os.path.isfile(os.path.join(infrared_home, 'ansible.cfg')) assert CoreServices.workspace_manager().workspace_dir == os.path.join( infrared_home, '.workspaces') assert CoreServices.plugins_manager().config_file == os.path.join( infrared_home, '.plugins.ini') assert CoreServices.plugins_manager().plugins_dir == os.path.join( infrared_home, 'plugins') assert CoreServices.ansible_config_manager( ).ansible_config_path == os.path.join(infrared_home, 'ansible.cfg')
def test_infrared_home_dir(infrared_home): os.environ['ANSIBLE_CONFIG'] = os.path.join(infrared_home, 'ansible.cfg') test_settings = CoreSettings() test_settings.install_plugin_at_start = False CoreServices.setup(test_settings) assert os.path.isdir(infrared_home) assert os.path.isdir(os.path.join(infrared_home, '.workspaces')) assert os.path.isfile(os.path.join(infrared_home, '.plugins.ini')) assert os.path.isdir(os.path.join(infrared_home, 'plugins')) assert os.path.isfile(os.path.join(infrared_home, 'ansible.cfg')) assert CoreServices.workspace_manager().workspace_dir == os.path.join( infrared_home, '.workspaces') assert CoreServices.plugins_manager().config_file == os.path.join( infrared_home, '.plugins.ini') assert CoreServices.plugins_manager().plugins_dir == os.path.join( infrared_home, 'plugins') assert CoreServices.ansible_config_manager().ansible_config_path == os.path.join( infrared_home, 'ansible.cfg')
def spec_handler(self, parser, args): """Execute plugin's main playbook. if "--generate-answers-file": only generate answers file if "--dry-run": only generate vars dict else: run Ansible with vars dict as input if "-o": write vars dict to file :param parser: argparse object :param args: dict, input arguments as parsed by the parser. :return: * Ansible exit code if ansible is executed. * None if "--generate-answers-file" or "--dry-run" answers file is generated """ workspace_manager = CoreServices.workspace_manager() active_workspace = workspace_manager.get_active_workspace() if not active_workspace: active_workspace = workspace_manager.create() workspace_manager.activate(active_workspace.name) LOG.warning("There are no workspaces. New workspace added: %s", active_workspace.name) # TODO(yfried): when accepting inventory from CLI, need to update: # workspace.inventory = CLI[inventory] if self.specification is None: # FIXME(yfried): Create a proper exception type raise Exception("Unable to create specification " "for '{}' plugin. Check plugin " "config and settings folders".format(self.name)) parsed_args = self.specification.parse_args(parser, args) if parsed_args is None: return None # unpack parsed arguments nested_args, control_args, custom_args = parsed_args if control_args.get('debug', None): logger.LOG.setLevel(logging.DEBUG) vars_dict = VarsDictManager.generate_settings( # TODO(yfried): consider whether to use type (for legacy) or name self.plugin.type, nested_args, ) # Update vars_dict with custom ansible variables (if needed) vars_dict.update(custom_args) VarsDictManager.merge_extra_vars(vars_dict, control_args.get('extra-vars')) LOG.debug("Dumping vars dict...") vars_yaml = yaml.safe_dump(vars_dict, default_flow_style=False) output_filename = control_args.get("output") if output_filename: LOG.debug("Output file: {}".format(output_filename)) with open(output_filename, 'w') as output_file: output_file.write(vars_yaml) else: print(vars_yaml) if control_args.get("dry-run"): return None result = execute.ansible_playbook( inventory=active_workspace.inventory, playbook_path=self.plugin.playbook, verbose=control_args.get('verbose', None), extra_vars=vars_dict, ansible_args=control_args.get('ansible-args', None)) return result
def workspace_list(**kwargs): """Return workspace names list""" workspace_manager = CoreServices.workspace_manager() return [ws.name for ws in workspace_manager.list()]
def __init__(self, name, *args, **kwargs): super(WorkspaceManagerSpec, self).__init__(name, **kwargs) self.workspace_manager = CoreServices.workspace_manager()
def spec_handler(self, parser, args): """Execute plugin's main playbook. if "--generate-answers-file": only generate answers file if "--dry-run": only generate vars dict else: run Ansible with vars dict as input if "-o": write vars dict to file :param parser: argparse object :param args: dict, input arguments as parsed by the parser. :return: * Ansible exit code if ansible is executed. * None if "--generate-answers-file" or "--dry-run" answers file is generated """ workspace_manager = CoreServices.workspace_manager() active_workspace = workspace_manager.get_active_workspace() if not active_workspace: active_workspace = workspace_manager.create() workspace_manager.activate(active_workspace.name) LOG.warn("There are no workspaces. New workspace added: %s", active_workspace.name) # TODO(yfried): when accepting inventory from CLI, need to update: # workspace.inventory = CLI[inventory] if self.specification is None: # FIXME(yfried): Create a proper exception type raise Exception("Unable to create specification " "for '{}' plugin. Check plugin " "config and settings folders".format(self.name)) parsed_args = self.specification.parse_args(parser, args) if parsed_args is None: return None # unpack parsed arguments nested_args, control_args = parsed_args if control_args.get('debug', None): logger.LOG.setLevel(logging.DEBUG) vars_dict = VarsDictManager.generate_settings( # TODO(yfried): consider whether to use type (for legacy) or name self.plugin.config["plugin_type"], nested_args, ) VarsDictManager.merge_extra_vars(vars_dict, control_args.get('extra-vars')) LOG.debug("Dumping vars dict...") vars_yaml = yaml.safe_dump(vars_dict, default_flow_style=False) output_filename = control_args.get("output") if output_filename: LOG.debug("Output file: {}".format(output_filename)) with open(output_filename, 'w') as output_file: output_file.write(vars_yaml) else: print(vars_yaml) if control_args.get("dry-run"): return None result = execute.ansible_playbook( inventory=active_workspace.inventory, playbook_path=self.plugin.playbook, verbose=control_args.get('verbose', None), extra_vars=vars_dict, ansible_args=control_args.get('ansible-args', None)) return result
def spec_handler(self, parser, args): """Execute plugin's main playbook. if "--generate-answers-file": only generate answers file if "--dry-run": only generate vars dict else: run Ansible with vars dict as input if "-o": write vars dict to file :param parser: argparse object :param args: dict, input arguments as parsed by the parser. :return: * Ansible exit code if ansible is executed. * None if "--generate-answers-file" or "--dry-run" answers file is generated """ workspace_manager = CoreServices.workspace_manager() active_workspace = workspace_manager.get_active_workspace() if not active_workspace: active_workspace = workspace_manager.create() workspace_manager.activate(active_workspace.name) LOG.warning("There are no workspaces. New workspace added: %s", active_workspace.name) # TODO(yfried): when accepting inventory from CLI, need to update: # workspace.inventory = CLI[inventory] if self.specification is None: # FIXME(yfried): Create a proper exception type raise Exception("Unable to create specification " "for '{}' plugin. Check plugin " "config and settings folders".format(self.name)) parsed_args = self.specification.parse_args(parser, args) if parsed_args is None: return None # unpack parsed arguments nested_args, control_args, custom_args = parsed_args if control_args.get('debug', None): logger.LOG.setLevel(logging.DEBUG) vars_dict = VarsDictManager.generate_settings( # TODO(yfried): consider whether to use type (for legacy) or name self.plugin.type, nested_args, ) # Update vars_dict with custom ansible variables (if needed) vars_dict.update(custom_args) VarsDictManager.merge_extra_vars(vars_dict, control_args.get('extra-vars')) LOG.debug("Dumping vars dict...") vars_yaml = yaml.safe_dump(vars_dict, default_flow_style=False) output_filename = control_args.get("output") if output_filename: LOG.debug("Output file: {}".format(output_filename)) with open(output_filename, 'w') as output_file: output_file.write(vars_yaml) else: print(vars_yaml) if control_args.get("dry-run"): return None # register plugins_dir path otherwise roles introduced by the plugin # are not found during the plugin execution # save the current ANSIBLE_ROLES_PATH so that it can be restored later ansible_roles_path = os.environ.get('ANSIBLE_ROLES_PATH', '') if self.plugin.roles_path: # check whether the path defined by user exists role_path = os.path.join(self.plugin.path, self.plugin.roles_path) if not os.path.exists(role_path): LOG.warning("Plugin's config.role_path: %s, doesn't exist", role_path) # roles path points to the dir which contains installed plugins roles_path = os.path.join(role_path, '../') if ansible_roles_path: new_path = ':'.join([ansible_roles_path, roles_path]) else: new_path = roles_path os.environ['ANSIBLE_ROLES_PATH'] = new_path result = execute.ansible_playbook( ir_workspace=active_workspace, ir_plugin=self.plugin, playbook_path=self.plugin.playbook, verbose=control_args.get('verbose', None), extra_vars=vars_dict, ansible_args=control_args.get('ansible-args', None)) # restore original ANSIBLE_ROLES_PATH os.environ['ANSIBLE_ROLES_PATH'] = ansible_roles_path return result