Beispiel #1
0
def main(args=None):
    # configure core services
    CoreServices.setup('infrared.cfg')

    # Init Managers
    plugin_manager = CoreServices.plugins_manager()

    specs_manager = api.SpecManager()

    specs_manager.register_spec(
        WorkspaceManagerSpec('workspace',
                             description="Workspace manager. "
                                         "Allows to create and use an "
                                         "isolated environment for plugins "
                                         "execution."))
    specs_manager.register_spec(
        PluginManagerSpec('plugin',
                          plugin_manager=plugin_manager,
                          description="Plugin management"))

    specs_manager.register_spec(
        SSHSpec(
            'ssh',
            description="Interactive ssh session to node from inventory."))

    # register all plugins
    for plugin in plugin_manager.PLUGINS_DICT.values():
        specs_manager.register_spec(api.InfraredPluginsSpec(plugin))

    return specs_manager.run_specs(args) or 0
Beispiel #2
0
def main(args=None):
    CoreServices.setup()

    # inject existing libraries.
    # because of that all the ansible modules should be imported after that
    CoreServices.dependency_manager().inject_libraries()

    specs_manager = api.SpecManager()

    # Init Managers
    specs_manager.register_spec(
        WorkspaceManagerSpec('workspace',
                             description="Workspace manager. "
                                         "Allows to create and use an "
                                         "isolated environment for plugins "
                                         "execution."))
    specs_manager.register_spec(
        PluginManagerSpec('plugin',
                          description="Plugin management"))

    specs_manager.register_spec(
        SSHSpec(
            'ssh',
            description="Interactive ssh session to node from inventory."))

    # register all plugins
    for plugin in CoreServices.plugins_manager().PLUGINS_DICT.values():
        specs_manager.register_spec(api.InfraredPluginsSpec(plugin))

    argcomplete.autocomplete(specs_manager.parser)
    return specs_manager.run_specs(args) or 0
def workspace_manager_fixture(tmpdir_factory):
    """Sets the default workspace direcotry to the temporary one. """

    temp_workspace_dir = tmpdir_factory.mktemp('pmtest')
    workspace_manager = workspaces.WorkspaceManager(str(temp_workspace_dir))
    from infrared.core.services import CoreServices
    CoreServices.register_service("workspace_manager", workspace_manager)
    yield workspace_manager
Beispiel #4
0
def node_list(**kwargs):
    """Return node names list for active workspace"""
    workspace_manager = CoreServices.workspace_manager()
    ws = workspace_manager.get_active_workspace()
    nodes = [node[0] for node in workspace_manager.node_list(
        workspace_name=ws.name)]
    return nodes
Beispiel #5
0
def test_install_dependencies_already_exist(plugin_manager_fixture):
    """
    Test that skipping existing plugins and not trying to install them again
    :param plugin_manager_fixture: Fixture object which yields
    InfraredPluginManger object
    """

    plugin_manager = plugin_manager_fixture()
    plugin_dir = "plugin_with_dependencies"
    plugin_dict = get_plugin_spec_flatten_dict(
        os.path.join(SAMPLE_PLUGINS_DIR, plugin_dir))

    # set the expected dictionary of installed plugins
    expected_installed_plugins = {
        plugin_dict["name"]: {
            "src": plugin_dict["dir"]
        }
    }

    # validates that the plugin is not in configuration file at the beginning
    validate_plugins_presence_in_conf(
        plugin_manager, expected_installed_plugins, present=False)

    # add the plugin with its dependencies
    plugin_manager.add_plugin(plugin_source=plugin_dict["dir"])

    # add the same dependency one more time
    assert CoreServices.dependency_manager()._install_local_dependency(
        PluginDependency(plugin_dict['dependencies'][0])) is False
Beispiel #6
0
def test_infrared_home_dir(infrared_home):
    test_settings = CoreSettings()
    test_settings.install_plugin_at_start = False
    CoreServices.setup(test_settings)

    assert os.path.isdir(infrared_home)
    assert os.path.isdir(os.path.join(infrared_home, '.workspaces'))
    assert os.path.isdir(os.path.join(infrared_home, '.library'))
    assert os.path.isfile(os.path.join(infrared_home, '.plugins.ini'))
    assert os.path.isdir(os.path.join(infrared_home, 'plugins'))
    assert CoreServices.workspace_manager().workspace_dir == os.path.join(
        infrared_home, '.workspaces')
    assert CoreServices.plugins_manager().config_file == os.path.join(
        infrared_home, '.plugins.ini')
    assert CoreServices.plugins_manager().plugins_dir == os.path.join(
        infrared_home, 'plugins')
Beispiel #7
0
def plugin_list(**kwargs):
    """Return plugins names list"""
    plugin_manager = CoreServices.plugins_manager()
    installed_plugins = plugin_manager.get_installed_plugins()
    completions = []
    for ptype, plugin in installed_plugins.items():
        completions.extend(plugin.keys())
    return completions
Beispiel #8
0
def group_list(**kwargs):
    """Return node groups names list for workspace"""

    workspace_manager = CoreServices.workspace_manager()
    ws_name = kwargs[
        "parsed_args"].name or workspace_manager.get_active_workspace().name
    return [group[0] for group in workspace_manager.group_list(
        workspace_name=ws_name)]
Beispiel #9
0
def ssh_to_host(hostname, remote_command=None):
    """ Compose cmd string of ssh and execute

        Uses Ansible to parse inventory file, gets ssh connection options
        :param hostname: str. Hostname from inventory
    """

    workspace_manager = CoreServices.workspace_manager()
    workspace = workspace_manager.get_active_workspace()
    if workspace is None:
        raise exceptions.IRNoActiveWorkspaceFound()
    inventory_file = workspace.inventory

    from ansible.parsing.dataloader import DataLoader
    from ansible.inventory.manager import InventoryManager
    invent = InventoryManager(DataLoader(), sources=inventory_file)

    host = invent.get_host(hostname)
    if host is None:
        raise exceptions.IRSshException(
            "Host {} is not in inventory {}".format(hostname, inventory_file))

    if _get_magic_var(host, "connection") == "local":
        raise exceptions.IRSshException("Only ssh transport acceptable.")

    cmd = " ".join(["ssh {priv_key} {comm_args}",
                    "{extra_args} -p {port} -t {user}@{host}"])

    cmd_fields = {}
    cmd_fields["user"] = _get_magic_var(host, "remote_user", default="root")
    cmd_fields["port"] = _get_magic_var(host, "port", default=22)
    cmd_fields["host"] = _get_magic_var(host, "remote_addr")

    priv_key = _get_magic_var(host, "private_key_file")
    # NOTE(yfried):
    # ssh client needs key to be in the directory you're running one from
    # ('ssh -i id_rsa ...') or to be provided by absolute path.
    # assume paths are relative to inventory file.
    abspath = os.path.join(os.path.abspath(os.path.dirname(inventory_file)),
                           priv_key)
    priv_key = abspath if os.path.exists(abspath) else priv_key

    cmd_fields["priv_key"] = "-i {}".format(priv_key if priv_key else "")

    cmd_fields["comm_args"] = _get_magic_var(host, "ssh_common_args")
    cmd_fields["extra_args"] = _get_magic_var(host, "ssh_extra_args")

    LOG.debug("Establishing ssh connection to {}".format(cmd_fields["host"]))

    compiled_cmd = cmd.format(**cmd_fields)
    if remote_command is not None:
        compiled_cmd = " ".join(
            [compiled_cmd, '"{}"'.format(remote_command)])

    result = os.WEXITSTATUS(os.system(compiled_cmd))
    LOG.debug("Connection to {} closed".format(cmd_fields["host"]))
    return result
Beispiel #10
0
    def plugin_manager_helper(plugins_conf_dict=None):

        if plugins_conf_dict is None:
            plugins_conf_dict = {}

        plugins_conf_dict.update(SUPPORTED_TYPES_DICT)

        with lp_file.open(mode='w') as fp:
            config = ConfigParser.ConfigParser()
            for section, section_data in plugins_conf_dict.items():
                config.add_section(section)
                for option, value in section_data.items():
                    config.set(section, option, value)
            config.write(fp)

        # replace core service with or test service
        CoreServices.register_service(ServiceName.PLUGINS_MANAGER,
                                      InfraredPluginManager(lp_file.strpath))
        return CoreServices.plugins_manager()
Beispiel #11
0
    def plugin_manager_helper(plugins_conf_dict=None):

        if plugins_conf_dict is None:
            plugins_conf_dict = {}

        plugins_conf_dict.update(SUPPORTED_TYPES_DICT)

        with lp_file.open(mode='w') as fp:
            config = configparser.ConfigParser()
            for section, section_data in plugins_conf_dict.items():
                config.add_section(section)
                for option, value in section_data.items():
                    config.set(section, option, value)
            config.write(fp)

        CoreServices.register_service(
            ServiceName.PLUGINS_MANAGER,
            InfraredPluginManager(lp_file.strpath,
                                  os.path.join(lp_file.dirname, "plugins")))
        return CoreServices.plugins_manager()
Beispiel #12
0
    def run_specs(self, args=None):
        spec_args = vars(self.parser.parse_args(args))
        subcommand = spec_args.get('subcommand', '')
        if not spec_args.get('no_log_commands'):
            if self.execution_logger is None:
                self.execution_logger = CoreServices.execution_logger_manager()
            self.execution_logger.command()

        if subcommand in self.spec_objects:
            return self.spec_objects[subcommand].spec_handler(
                self.parser, args=args)
Beispiel #13
0
    def resolve(self, value):
        """Set active workspace's inventory

        Assumes active workspace exists, as ComplexType objects are resolved
        after workspace is verified.

        Calls workspace.inventory setter. See source for more information.

        :param value: path to inventory file.
        """
        CoreServices.workspace_manager()\
            .get_active_workspace().inventory = value
Beispiel #14
0
    def resolve(self, value):
        """Set active workspace's inventory

        Assumes active workspace exists, as ComplexType objects are resolved
        after workspace is verified.

        Calls workspace.inventory setter. See source for more information.

        :param value: path to inventory file.
        """
        CoreServices.workspace_manager()\
            .get_active_workspace().inventory = value
Beispiel #15
0
    def plugin_manager_helper(plugins_conf_dict=None):

        if plugins_conf_dict is None:
            plugins_conf_dict = {}

        plugins_conf_dict.update(SUPPORTED_TYPES_DICT)

        with lp_file.open(mode='w') as fp:
            config = ConfigParser.ConfigParser()
            for section, section_data in plugins_conf_dict.items():
                config.add_section(section)
                for option, value in section_data.items():
                    config.set(section, option, value)
            config.write(fp)

        # replace core service with or test service
        # dependency manager will live in the temp folder
        # so we can keep it unmocked.
        CoreServices.register_service(
            ServiceName.DEPENDENCY_MANAGER, PluginDependencyManager(
                os.path.join(lp_file.dirname, ".library")))
        CoreServices.register_service(
            ServiceName.PLUGINS_MANAGER, InfraredPluginManager(
                lp_file.strpath, CoreServices.dependency_manager(),
                os.path.join(lp_file.dirname, "plugins")))
        return CoreServices.plugins_manager()
Beispiel #16
0
    def plugin_manager_helper(plugins_conf_dict=None):

        if plugins_conf_dict is None:
            plugins_conf_dict = {}

        plugins_conf_dict.update(SUPPORTED_TYPES_DICT)

        with lp_file.open(mode='w') as fp:
            config = ConfigParser.ConfigParser()
            for section, section_data in plugins_conf_dict.items():
                config.add_section(section)
                for option, value in section_data.items():
                    config.set(section, option, value)
            config.write(fp)

        # replace core service with or test service
        # dependency manager will live in the temp folder
        # so we can keep it unmocked.
        CoreServices.register_service(
            ServiceName.DEPENDENCY_MANAGER,
            PluginDependencyManager(os.path.join(lp_file.dirname, ".library")))
        CoreServices.register_service(
            ServiceName.PLUGINS_MANAGER,
            InfraredPluginManager(lp_file.strpath,
                                  CoreServices.dependency_manager(),
                                  os.path.join(lp_file.dirname, "plugins")))
        return CoreServices.plugins_manager()
Beispiel #17
0
    def parse_cli_input(cls, arg_parser, args=None):
        """Parse CLI input.

        :param arg_parser: argparse object
        :param args: replace sys.argv[1:]
        :return: dict. Parsed CLI input
        """

        parse_args, unknown_args = arg_parser.parse_known_args(args)
        # todo(obaranov) Pass all the unknown arguments to the ansible
        # For now just raise exception
        if unknown_args:
            raise exceptions.IRUnrecognizedOptionsException(unknown_args)

        parse_args = parse_args.__dict__

        # Save all command line arguments to a file
        all_argument_file = CoreServices.workspace_manager().get_active_workspace().path + '/' \
            + parse_args['subcommand'] + '_all_argument_file.txt'
        with open(all_argument_file, 'w') as file:
            for arg in parse_args:
                if 'subcommand' in arg:
                    arg_name = arg[10:]
                    if arg_name == '':
                        data = 'plugin' + ': ' + str(parse_args[arg])
                    else:
                        data = arg_name + ': ' + str(parse_args[arg])
                    file.write(data)
                    file.write('\n')

        # move sub commands to the nested dicts
        result = collections.defaultdict(dict)
        expr = '^(?P<subcmd_name>subcommand)+(?P<arg_name>.*)$$'
        for arg, value in parse_args.items():
            if value is None:
                continue
            match = re.search(expr, arg)
            if match and match.groupdict().get('subcmd_name', None) \
                    and not match.groupdict().get('arg_name', None):
                # create empty nested dict for subcommands
                if value not in result:
                    result[value] = {}
            if match and match.groupdict().get('arg_name', None):
                # we have subcommand. put it as nested dict
                arg_name = match.group('arg_name')
                cmd_name = match.group('subcmd_name')
                sub_name = parse_args[cmd_name]
                result[sub_name][arg_name] = value

        return result
Beispiel #18
0
def ssh_to_host(hostname, remote_command=None):
    """ Compose cmd string of ssh and execute

        Uses Ansible to parse inventory file, gets ssh connection options
        :param hostname: str. Hostname from inventory
    """

    workspace_manager = CoreServices.workspace_manager()
    workspace = workspace_manager.get_active_workspace()
    if workspace is None:
        raise exceptions.IRNoActiveWorkspaceFound()
    inventory_file = workspace.inventory

    invent = inventory.Inventory(DataLoader(),
                                 VariableManager(),
                                 host_list=inventory_file)

    host = invent.get_host(hostname)
    if host is None:
        raise exceptions.IRSshException(
            "Host {} is not in inventory {}".format(hostname, inventory_file))

    if _get_magic_var(host, "connection") == "local":
        raise exceptions.IRSshException("Only ssh transport acceptable.")

    cmd = " ".join([
        "ssh {priv_key} {comm_args}", "{extra_args} -p {port} -t {user}@{host}"
    ])

    cmd_fields = {}
    cmd_fields["user"] = _get_magic_var(host, "remote_user", default="root")
    cmd_fields["port"] = _get_magic_var(host, "port", default=22)
    cmd_fields["host"] = _get_magic_var(host, "remote_addr")

    priv_key = _get_magic_var(host, "private_key_file")
    cmd_fields["priv_key"] = "-i {}".format(priv_key if priv_key else "")

    cmd_fields["comm_args"] = _get_magic_var(host, "ssh_common_args")
    cmd_fields["extra_args"] = _get_magic_var(host, "ssh_extra_args")

    LOG.debug("Establishing ssh connection to {}".format(cmd_fields["host"]))

    compiled_cmd = cmd.format(**cmd_fields)
    if remote_command is not None:
        compiled_cmd = " ".join([compiled_cmd, '"{}"'.format(remote_command)])

    os.system(compiled_cmd)

    LOG.debug("Connection to {} closed".format(cmd_fields["host"]))
Beispiel #19
0
def ssh_to_host(hostname, remote_command=None):
    """ Compose cmd string of ssh and execute

        Uses Ansible to parse inventory file, gets ssh connection options
        :param hostname: str. Hostname from inventory
    """

    workspace_manager = CoreServices.workspace_manager()
    workspace = workspace_manager.get_active_workspace()
    if workspace is None:
        raise exceptions.IRNoActiveWorkspaceFound()
    inventory_file = workspace.inventory

    invent = inventory.Inventory(DataLoader(), VariableManager(),
                                 host_list=inventory_file)

    host = invent.get_host(hostname)
    if host is None:
        raise exceptions.IRSshException(
            "Host {} is not in inventory {}".format(hostname, inventory_file))

    if _get_magic_var(host, "connection") == "local":
        raise exceptions.IRSshException("Only ssh transport acceptable.")

    cmd = " ".join(["ssh {priv_key} {comm_args}",
                    "{extra_args} -p {port} -t {user}@{host}"])

    cmd_fields = {}
    cmd_fields["user"] = _get_magic_var(host, "remote_user", default="root")
    cmd_fields["port"] = _get_magic_var(host, "port", default=22)
    cmd_fields["host"] = _get_magic_var(host, "remote_addr")

    priv_key = _get_magic_var(host, "private_key_file")
    cmd_fields["priv_key"] = "-i {}".format(priv_key if priv_key else "")

    cmd_fields["comm_args"] = _get_magic_var(host, "ssh_common_args")
    cmd_fields["extra_args"] = _get_magic_var(host, "ssh_extra_args")

    LOG.debug("Establishing ssh connection to {}".format(cmd_fields["host"]))

    compiled_cmd = cmd.format(**cmd_fields)
    if remote_command is not None:
        compiled_cmd = " ".join(
            [compiled_cmd, '"{}"'.format(remote_command)])

    os.system(compiled_cmd)

    LOG.debug("Connection to {} closed".format(cmd_fields["host"]))
Beispiel #20
0
def test_install_dependencies(plugin_manager_fixture):
    """
    Test installing plugin dependencies
    Validate that the plugin's dependencies were installed
    :param plugin_manager_fixture: Fixture object which yields
    InfraredPluginManger object
    """
    plugin_manager = plugin_manager_fixture()
    plugin_dir = "plugin_with_dependencies"

    plugin_dict = get_plugin_spec_flatten_dict(
        os.path.join(SAMPLE_PLUGINS_DIR, plugin_dir))

    # set the expected dictionary of installed plugins
    expected_installed_plugins = {
        plugin_dict["name"]: {
            "src": plugin_dict["dir"]
        }
    }

    # validates that the plugin is not in configuration file at the beginning
    validate_plugins_presence_in_conf(plugin_manager,
                                      expected_installed_plugins,
                                      present=False)

    # add the plugin with its dependencies
    plugin_manager.add_plugin(plugin_source=plugin_dict["dir"])

    # validates that the plugin is in config file after adding plugin
    validate_plugins_presence_in_conf(plugin_manager,
                                      expected_installed_plugins,
                                      present=True)

    # check that copytree tried to copy the dependency to the library folder
    expected_dependency_dir = os.path.join(
        CoreServices.dependency_manager().library_root_dir,
        os.path.basename(plugin_dict["dependencies"][0]["source"]))
    assert os.path.isdir(expected_dependency_dir)
    assert os.path.isdir(
        os.path.join(expected_dependency_dir, 'callback_plugins'))
    assert os.path.isdir(
        os.path.join(expected_dependency_dir, 'filter_plugins'))
    assert os.path.isdir(os.path.join(expected_dependency_dir, 'library'))
    assert os.path.isdir(os.path.join(expected_dependency_dir, 'roles'))
Beispiel #21
0
def test_install_dependencies(plugin_manager_fixture):
    """
    Test installing plugin dependencies
    Validate that the plugin's dependencies were installed
    :param plugin_manager_fixture: Fixture object which yields
    InfraredPluginManger object
    """
    plugin_manager = plugin_manager_fixture()
    plugin_dir = "plugin_with_dependencies"

    plugin_dict = get_plugin_spec_flatten_dict(
        os.path.join(SAMPLE_PLUGINS_DIR, plugin_dir))

    # set the expected dictionary of installed plugins
    expected_installed_plugins = {
        plugin_dict["name"]: {
            "src": plugin_dict["dir"]
        }
    }

    # validates that the plugin is not in configuration file at the beginning
    validate_plugins_presence_in_conf(
        plugin_manager, expected_installed_plugins, present=False)

    # add the plugin with its dependencies
    plugin_manager.add_plugin(plugin_source=plugin_dict["dir"])

    # validates that the plugin is in config file after adding plugin
    validate_plugins_presence_in_conf(
        plugin_manager, expected_installed_plugins, present=True)

    # check that copytree tried to copy the dependency to the library folder
    expected_dependency_dir = os.path.join(
        CoreServices.dependency_manager().library_root_dir,
        os.path.basename(plugin_dict["dependencies"][0]["source"]))
    assert os.path.isdir(expected_dependency_dir)
    assert os.path.isdir(os.path.join(
        expected_dependency_dir, 'callback_plugins'))
    assert os.path.isdir(os.path.join(
        expected_dependency_dir, 'filter_plugins'))
    assert os.path.isdir(os.path.join(
        expected_dependency_dir, 'library'))
    assert os.path.isdir(os.path.join(
        expected_dependency_dir, 'roles'))
Beispiel #22
0
def test_infrared_home_dir(infrared_home):
    os.environ['ANSIBLE_CONFIG'] = os.path.join(infrared_home, 'ansible.cfg')
    test_settings = CoreSettings()
    test_settings.install_plugin_at_start = False
    CoreServices.setup(test_settings)

    assert os.path.isdir(infrared_home)
    assert os.path.isdir(os.path.join(infrared_home, '.workspaces'))
    assert os.path.isfile(os.path.join(infrared_home, '.plugins.ini'))
    assert os.path.isdir(os.path.join(infrared_home, 'plugins'))
    assert os.path.isfile(os.path.join(infrared_home, 'ansible.cfg'))
    assert CoreServices.workspace_manager().workspace_dir == os.path.join(
        infrared_home, '.workspaces')
    assert CoreServices.plugins_manager().config_file == os.path.join(
        infrared_home, '.plugins.ini')
    assert CoreServices.plugins_manager().plugins_dir == os.path.join(
        infrared_home, 'plugins')
    assert CoreServices.ansible_config_manager(
    ).ansible_config_path == os.path.join(infrared_home, 'ansible.cfg')
Beispiel #23
0
def test_infrared_home_dir(infrared_home):
    os.environ['ANSIBLE_CONFIG'] = os.path.join(infrared_home, 'ansible.cfg')
    test_settings = CoreSettings()
    test_settings.install_plugin_at_start = False
    CoreServices.setup(test_settings)

    assert os.path.isdir(infrared_home)
    assert os.path.isdir(os.path.join(infrared_home, '.workspaces'))
    assert os.path.isfile(os.path.join(infrared_home, '.plugins.ini'))
    assert os.path.isdir(os.path.join(infrared_home, 'plugins'))
    assert os.path.isfile(os.path.join(infrared_home, 'ansible.cfg'))
    assert CoreServices.workspace_manager().workspace_dir == os.path.join(
        infrared_home, '.workspaces')
    assert CoreServices.plugins_manager().config_file == os.path.join(
        infrared_home, '.plugins.ini')
    assert CoreServices.plugins_manager().plugins_dir == os.path.join(
        infrared_home, 'plugins')
    assert CoreServices.ansible_config_manager().ansible_config_path == os.path.join(
        infrared_home, 'ansible.cfg')
Beispiel #24
0
def workspace_list(**kwargs):
    """Return workspace names list"""
    workspace_manager = CoreServices.workspace_manager()
    return [ws.name for ws in workspace_manager.list()]
Beispiel #25
0
    def spec_handler(self, parser, args):
        """Execute plugin's main playbook.

        if "--generate-answers-file":
            only generate answers file
        if "--dry-run":
            only generate vars dict
        else:
            run Ansible with vars dict as input
        if "-o":
            write vars dict to file

        :param parser: argparse object
        :param args: dict, input arguments as parsed by the parser.
        :return:
            * Ansible exit code if ansible is executed.
            * None if "--generate-answers-file" or "--dry-run" answers file is
              generated
        """
        workspace_manager = CoreServices.workspace_manager()

        active_workspace = workspace_manager.get_active_workspace()
        if not active_workspace:
            active_workspace = workspace_manager.create()
            workspace_manager.activate(active_workspace.name)
            LOG.warning("There are no workspaces. New workspace added: %s",
                        active_workspace.name)

        # TODO(yfried): when accepting inventory from CLI, need to update:
        # workspace.inventory = CLI[inventory]

        if self.specification is None:
            # FIXME(yfried): Create a proper exception type
            raise Exception("Unable to create specification "
                            "for '{}' plugin. Check plugin "
                            "config and settings folders".format(self.name))
        parsed_args = self.specification.parse_args(parser, args)
        if parsed_args is None:
            return None

        # unpack parsed arguments
        nested_args, control_args, custom_args = parsed_args

        if control_args.get('debug', None):
            logger.LOG.setLevel(logging.DEBUG)

        vars_dict = VarsDictManager.generate_settings(
            # TODO(yfried): consider whether to use type (for legacy) or name
            self.plugin.type,
            nested_args,
        )

        # Update vars_dict with custom ansible variables (if needed)
        vars_dict.update(custom_args)

        VarsDictManager.merge_extra_vars(vars_dict,
                                         control_args.get('extra-vars'))

        LOG.debug("Dumping vars dict...")
        vars_yaml = yaml.safe_dump(vars_dict,
                                   default_flow_style=False)
        output_filename = control_args.get("output")
        if output_filename:
            LOG.debug("Output file: {}".format(output_filename))
            with open(output_filename, 'w') as output_file:
                output_file.write(vars_yaml)
        else:
            print(vars_yaml)
        if control_args.get("dry-run"):
            return None

        result = execute.ansible_playbook(
            inventory=active_workspace.inventory,
            playbook_path=self.plugin.playbook,
            verbose=control_args.get('verbose', None),
            extra_vars=vars_dict,
            ansible_args=control_args.get('ansible-args', None))
        return result
Beispiel #26
0
def workspace_list(**kwargs):
    """Return workspace names list"""
    workspace_manager = CoreServices.workspace_manager()
    return [ws.name for ws in workspace_manager.list()]
Beispiel #27
0
 def __init__(self, name, *args, **kwargs):
     super(WorkspaceManagerSpec, self).__init__(name, **kwargs)
     self.workspace_manager = CoreServices.workspace_manager()
Beispiel #28
0
 def __init__(self, name, *args, **kwargs):
     super(PluginManagerSpec, self).__init__(name, *args, **kwargs)
     self.plugin_manager = CoreServices.plugins_manager()
Beispiel #29
0
    def spec_handler(self, parser, args):
        """Execute plugin's main playbook.

        if "--generate-answers-file":
            only generate answers file
        if "--dry-run":
            only generate vars dict
        else:
            run Ansible with vars dict as input
        if "-o":
            write vars dict to file

        :param parser: argparse object
        :param args: dict, input arguments as parsed by the parser.
        :return:
            * Ansible exit code if ansible is executed.
            * None if "--generate-answers-file" or "--dry-run" answers file is
              generated
        """
        workspace_manager = CoreServices.workspace_manager()

        active_workspace = workspace_manager.get_active_workspace()
        if not active_workspace:
            active_workspace = workspace_manager.create()
            workspace_manager.activate(active_workspace.name)
            LOG.warn("There are no workspaces. New workspace added: %s",
                     active_workspace.name)

        # TODO(yfried): when accepting inventory from CLI, need to update:
        # workspace.inventory = CLI[inventory]

        if self.specification is None:
            # FIXME(yfried): Create a proper exception type
            raise Exception("Unable to create specification "
                            "for '{}' plugin. Check plugin "
                            "config and settings folders".format(self.name))
        parsed_args = self.specification.parse_args(parser, args)
        if parsed_args is None:
            return None

        # unpack parsed arguments
        nested_args, control_args = parsed_args

        if control_args.get('debug', None):
            logger.LOG.setLevel(logging.DEBUG)

        vars_dict = VarsDictManager.generate_settings(
            # TODO(yfried): consider whether to use type (for legacy) or name
            self.plugin.config["plugin_type"],
            nested_args,
        )

        VarsDictManager.merge_extra_vars(vars_dict,
                                         control_args.get('extra-vars'))

        LOG.debug("Dumping vars dict...")
        vars_yaml = yaml.safe_dump(vars_dict,
                                   default_flow_style=False)
        output_filename = control_args.get("output")
        if output_filename:
            LOG.debug("Output file: {}".format(output_filename))
            with open(output_filename, 'w') as output_file:
                output_file.write(vars_yaml)
        else:
            print(vars_yaml)
        if control_args.get("dry-run"):
            return None

        result = execute.ansible_playbook(
            inventory=active_workspace.inventory,
            playbook_path=self.plugin.playbook,
            verbose=control_args.get('verbose', None),
            extra_vars=vars_dict,
            ansible_args=control_args.get('ansible-args', None))
        return result
Beispiel #30
0
    def spec_handler(self, parser, args):
        """Execute plugin's main playbook.

        if "--generate-answers-file":
            only generate answers file
        if "--dry-run":
            only generate vars dict
        else:
            run Ansible with vars dict as input
        if "-o":
            write vars dict to file

        :param parser: argparse object
        :param args: dict, input arguments as parsed by the parser.
        :return:
            * Ansible exit code if ansible is executed.
            * None if "--generate-answers-file" or "--dry-run" answers file is
              generated
        """
        workspace_manager = CoreServices.workspace_manager()

        active_workspace = workspace_manager.get_active_workspace()
        if not active_workspace:
            active_workspace = workspace_manager.create()
            workspace_manager.activate(active_workspace.name)
            LOG.warning("There are no workspaces. New workspace added: %s",
                        active_workspace.name)

        # TODO(yfried): when accepting inventory from CLI, need to update:
        # workspace.inventory = CLI[inventory]

        if self.specification is None:
            # FIXME(yfried): Create a proper exception type
            raise Exception("Unable to create specification "
                            "for '{}' plugin. Check plugin "
                            "config and settings folders".format(self.name))
        parsed_args = self.specification.parse_args(parser, args)
        if parsed_args is None:
            return None

        # unpack parsed arguments
        nested_args, control_args, custom_args = parsed_args

        if control_args.get('debug', None):
            logger.LOG.setLevel(logging.DEBUG)

        vars_dict = VarsDictManager.generate_settings(
            # TODO(yfried): consider whether to use type (for legacy) or name
            self.plugin.type,
            nested_args,
        )

        # Update vars_dict with custom ansible variables (if needed)
        vars_dict.update(custom_args)

        VarsDictManager.merge_extra_vars(vars_dict,
                                         control_args.get('extra-vars'))

        LOG.debug("Dumping vars dict...")
        vars_yaml = yaml.safe_dump(vars_dict, default_flow_style=False)
        output_filename = control_args.get("output")
        if output_filename:
            LOG.debug("Output file: {}".format(output_filename))
            with open(output_filename, 'w') as output_file:
                output_file.write(vars_yaml)
        else:
            print(vars_yaml)
        if control_args.get("dry-run"):
            return None

        # register plugins_dir path otherwise roles introduced by the plugin
        # are not found during the plugin execution
        # save the current ANSIBLE_ROLES_PATH so that it can be restored later
        ansible_roles_path = os.environ.get('ANSIBLE_ROLES_PATH', '')
        if self.plugin.roles_path:
            # check whether the path defined by user exists
            role_path = os.path.join(self.plugin.path, self.plugin.roles_path)
            if not os.path.exists(role_path):
                LOG.warning("Plugin's config.role_path: %s, doesn't exist",
                            role_path)
            # roles path points to the dir which contains installed plugins
            roles_path = os.path.join(role_path, '../')
            if ansible_roles_path:
                new_path = ':'.join([ansible_roles_path, roles_path])
            else:
                new_path = roles_path
            os.environ['ANSIBLE_ROLES_PATH'] = new_path

        result = execute.ansible_playbook(
            ir_workspace=active_workspace,
            ir_plugin=self.plugin,
            playbook_path=self.plugin.playbook,
            verbose=control_args.get('verbose', None),
            extra_vars=vars_dict,
            ansible_args=control_args.get('ansible-args', None))

        # restore original ANSIBLE_ROLES_PATH
        os.environ['ANSIBLE_ROLES_PATH'] = ansible_roles_path
        return result
Beispiel #31
0
def ssh_to_host(hostname, remote_command=None):
    """Compose cmd string of ssh and execute

    Uses Ansible to parse inventory file, gets ssh connection options
    :param hostname: str. Hostname from inventory
    """

    workspace_manager = CoreServices.workspace_manager()
    workspace = workspace_manager.get_active_workspace()
    if workspace is None:
        raise exceptions.IRNoActiveWorkspaceFound()
    inventory_file = workspace.inventory

    from ansible.inventory.manager import InventoryManager
    from ansible.parsing.dataloader import DataLoader
    invent = InventoryManager(DataLoader(), sources=inventory_file)

    host = invent.get_host(hostname)
    if host is None:
        raise exceptions.IRSshException(
            "Host {} is not in inventory {}".format(hostname, inventory_file))

    if _get_magic_var(host, "connection") == "local":
        raise exceptions.IRSshException("Only ssh transport acceptable.")

    cmd = " ".join(["ssh {priv_key} {comm_args}",
                    "{extra_args} -p {port} -t {user}@{host}"])

    cmd_fields = {}
    cmd_fields["user"] = _get_magic_var(host, "remote_user", default="root")
    cmd_fields["port"] = _get_magic_var(host, "port", default=22)
    cmd_fields["host"] = _get_magic_var(host, "remote_addr")

    priv_key = _get_magic_var(host, "private_key_file")
    # NOTE(yfried):
    # ssh client needs key to be in the directory you're running one from
    # ('ssh -i id_rsa ...') or to be provided by absolute path.
    # assume paths are relative to inventory file.
    priv_key = os.path.join(os.path.abspath(os.path.dirname(inventory_file)),
                            priv_key)
    if not os.access(priv_key, os.R_OK):
        raise exceptions.IRSshException("Private key file mentioned does not "
                                        "exist: {}".format(priv_key))

    if priv_key:
        cmd_fields["priv_key"] = "-i {}".format(priv_key)
    else:
        cmd_fields["priv_key"] = ""

    cmd_fields["comm_args"] = _get_magic_var(host, "ssh_common_args")
    cmd_fields["extra_args"] = _get_magic_var(host, "ssh_extra_args")

    compiled_cmd = cmd.format(**cmd_fields)
    LOG.debug("Establishing ssh connection to {} using: {}".format(cmd_fields["host"], compiled_cmd))
    if remote_command is not None:
        compiled_cmd = " ".join(
            [compiled_cmd, '"{}"'.format(remote_command)])

    result = os.WEXITSTATUS(os.system(compiled_cmd))
    LOG.debug("Connection to {} closed".format(cmd_fields["host"]))
    return result
Beispiel #32
0
 def __init__(self, name, *args, **kwargs):
     super(WorkspaceManagerSpec, self).__init__(name, **kwargs)
     self.workspace_manager = CoreServices.workspace_manager()
Beispiel #33
0
 def __init__(self, name, *args, **kwargs):
     super(PluginManagerSpec, self).__init__(name, *args, **kwargs)
     self.plugin_manager = CoreServices.plugins_manager()