Ejemplo n.º 1
0
class DummyProvisionerPlugin:
    """Dummy provisioner class"""
    def __init__(
        self,
        environment: Environment,
        instance_id: str,
        fixtures: Dict[str, Dict[str, Any]] = None,
    ):
        self._environment: Environment = environment
        """ Environemnt in which this plugin exists """
        self._instance_id: str = instance_id
        """ Unique id for this plugin instance """

        self.fixtures = Fixtures()
        """This plugin keeps fixtures."""
        if fixtures is not None:
            for child_instance_id, child_instance_dict in fixtures.items():
                child = environment.add_fixture_from_dict(
                    instance_id=child_instance_id,
                    plugin_dict=child_instance_dict)
                self.fixtures.add(child)

    def apply(self):
        """pretend to bring a cluster up"""
        logger.info("%s:execute: apply()", self._instance_id)

    def prepare(self):
        """pretend to prepare the cluster"""
        logger.info("%s:execute: apply()", self._instance_id)

    def destroy(self):
        """pretend to brind a cluster down"""
        logger.info("%s:execute: apply()", self._instance_id)
Ejemplo n.º 2
0
class DummyWorkloadPlugin:
    """Dummy workload class."""
    def __init__(
        self,
        environment: Environment,
        instance_id: str,
        fixtures: Dict[str, Dict[str, Any]] = None,
    ):
        """Set class properties.

        Parameters:
        -----------
        outputs (Dict[Dict]) : pass in a dictionary which defines outputs that
            should be returned

        clients (Dict[Dict]) : pass in a dictionary which defines which clients
            should be requested when working on a provisioner

        """
        self._environment: Environment = environment
        """ Environemnt in which this plugin exists """
        self._instance_id: str = instance_id
        """ Unique id for this plugin instance """

        self.fixtures = Fixtures()
        """This plugin keeps fixtures."""
        if fixtures is not None:
            for child_instance_id, child_instance_dict in fixtures.items():
                child = environment.add_fixture_from_dict(
                    instance_id=child_instance_id,
                    plugin_dict=child_instance_dict)
                self.fixtures.add(child)
Ejemplo n.º 3
0
class DummyClientPlugin:
    """Dummy client class

    As with all dummies, this is a failsafe plugin, that should never throw any
    exceptions if used according to metta standards.

    It can be used as a placeholder during development, or it can be used to
    log client events and output for greater development and debugging.

    The client will log any method call, including unknown methods, and so it
    can be used in place of any client, if you don't need the methods to return
    anything
    """
    def __init__(
        self,
        environment: Environment,
        instance_id: str,
        fixtures: Dict[str, Dict[str, Any]] = None,
    ):
        """Sset class properties

        Arguments:
        ----------

        environment (Environment) : Environment in which thisplugin exists.

        instance_id (str) : unique identifier for this plugin instance.

        fixtures (dict) : You can pass in some fixture definitions which this
            class will turn into fixtures and make retrievable.  This is a big
            part of the dummy.

        """
        self._environment: Environment = environment
        """ Environemnt in which this plugin exists """
        self._instance_id: str = instance_id
        """ Unique id for this plugin instance """

        self.fixtures = Fixtures()
        """This plugin keeps fixtures."""
        if fixtures is not None:
            for child_instance_id, child_instance_dict in fixtures.items():
                child = environment.add_fixture_from_dict(
                    instance_id=child_instance_id,
                    plugin_dict=child_instance_dict)
                self.fixtures.add(child)
Ejemplo n.º 4
0
    def _filter(
        self,
        plugin_id: str = "",
        instance_id: str = "",
        interfaces: List[str] = None,
        labels: List[str] = None,
        skip_cli_plugins: bool = True,
    ):
        """Filter fixtures centrally."""
        matches = self._environment.fixtures().filter(plugin_id=plugin_id,
                                                      instance_id=instance_id,
                                                      interfaces=interfaces,
                                                      labels=labels)

        if not skip_cli_plugins:
            return matches

        # filter out cli plugins
        fixtures = Fixtures()
        for fixture in matches:
            if METTA_PLUGIN_INTERFACE_ROLE_CLI not in fixture.interfaces:
                fixtures.add(fixture)
        return fixtures
Ejemplo n.º 5
0
class TerraformProvisionerPlugin:
    """Terraform provisioner plugin.

    Provisioner plugin that allows control of and interaction with a terraform
    cluster.

    ## Requirements

    1. this plugin uses subprocess to call a terraform binary, so you have to
        install terraform in the environment

    ## Usage

    ### Plan

    The plan must exists somewhere on disk, and be accessible.

    You must specify the path and related configuration in config, which are
    read in the .prepare() execution.

    ### Vars/State

    This plugin reads TF vars from config and writes them to a vars file.  We
    could run without relying on vars file, but having a vars file allows cli
    interaction with the cluster if this plugin messes up.

    You can override where Terraform vars/state files are written to allow
    sharing of a plan across test suites.

    Parameters:
    -----------
    environment (Environment) : All metta plugins receive the environment
        object in which they were created.
    instance_id (str) : all metta plugins receive their own string identity.

    label (str) : Configerus load label for finding plugin config.
    base (str) : Configerus get base key in which the plugin should look for
        all config.

    """
    def __init__(
        self,
        environment: Environment,
        instance_id: str,
        label: str = TERRAFORM_PROVISIONER_CONFIG_LABEL,
        base: Any = LOADED_KEY_ROOT,
    ):
        """Run the super constructor but also set class properties.

        Interpret provided config and configure the object with all of the
        needed pieces for executing terraform commands

        """
        self._environment: Environment = environment
        """ Environemnt in which this plugin exists """
        self._instance_id: str = instance_id
        """ Unique id for this plugin instance """

        self._config_label = label
        """ configerus load label that should contain all of the config """
        self._config_base = base
        """ configerus get key that should contain all tf config """

        self.fixtures: Fixtures = Fixtures()
        """Children fixtures, typically just the client plugin."""

        # Make the client fixture in the constructor.  The TF client fixture is
        # quite state safe, and should only need to be created once, unlike
        # other provisioner clients which may be vulnerable to state change.
        self.make_fixtures()

    # deep argument is an info() standard across plugins
    # pylint: disable=unused-argument
    def info(self, deep: bool = False):
        """Get info about the provisioner plugin.

        Returns:
        --------
        Dict of keyed introspective information about the plugin.

        """
        terraform_config: Loaded = self._environment.config().load(
            self._config_label)

        info = {
            "config": {
                "label":
                self._config_label,
                "base":
                self._config_base,
                "tfvars":
                terraform_config.get(
                    [
                        self._config_base,
                        TERRAFORM_PROVISIONER_CONFIG_TFVARS_KEY
                    ],
                    default="NONE",
                ),
                "chart_path":
                terraform_config.get(
                    [
                        self._config_base,
                        TERRAFORM_PROVISIONER_CONFIG_CHART_PATH_KEY
                    ],
                    default="MISSING",
                ),
                "state_path":
                terraform_config.get(
                    [
                        self._config_base,
                        TERRAFORM_PROVISIONER_CONFIG_STATE_PATH_KEY
                    ],
                    default="MISSING",
                ),
                "tfvars_path":
                terraform_config.get(
                    [
                        self._config_base,
                        TERRAFORM_PROVISIONER_CONFIG_TFVARS_PATH_KEY
                    ],
                    default="MISSING",
                ),
            },
            "client": {
                "instance_id": self.client_instance_id(),
            },
        }

        return info

    def prepare(self):
        """Run terraform init."""
        logger.info("Running Terraform INIT")
        self._get_client_plugin().init()

    def apply(self, lock: bool = True):
        """Create all terraform resources described in the plan."""
        logger.info("Running Terraform APPLY")
        self._get_client_plugin().apply(lock=lock)

    def destroy(self, lock: bool = True):
        """Remove all terraform resources in state."""
        logger.info("Running Terraform DESTROY")
        self._get_client_plugin().destroy(lock=lock)
        # accessing parent property for clearing out existing output fixtures
        # pylint: disable=attribute-defined-outside-init
        self.fixtures = Fixtures()

    def make_fixtures(self):
        """Make the client plugin for terraform interaction."""
        try:
            terraform_config = self._environment.config().load(
                self._config_label,
                force_reload=True,
                validator=TERRAFORM_VALIDATE_TARGET)
            """ get a configerus LoadedConfig for the label """
        except ValidationError as err:
            raise ValueError("Terraform config failed validation") from err

        try:
            chart_path = terraform_config.get([
                self._config_base, TERRAFORM_PROVISIONER_CONFIG_CHART_PATH_KEY
            ])
            """ subprocess commands for terraform will be run in this path """
        except Exception as err:
            raise ValueError(
                "Plugin config did not give us a working/plan path:"
                f" {terraform_config.get()}") from err

        state_path = terraform_config.get(
            [self._config_base, TERRAFORM_PROVISIONER_CONFIG_STATE_PATH_KEY],
            default=os.path.join(chart_path,
                                 TERRAFORM_PROVISIONER_DEFAULT_STATE_SUBPATH),
        )
        """ terraform state path """

        tfvars = terraform_config.get(
            [self._config_base, TERRAFORM_PROVISIONER_CONFIG_TFVARS_KEY],
            default={},
        )
        """ List of vars to pass to terraform.  Will be written to a file """

        tfvars_path = terraform_config.get(
            [self._config_base, TERRAFORM_PROVISIONER_CONFIG_TFVARS_PATH_KEY],
            default=os.path.join(chart_path,
                                 TERRAFORM_PROVISIONER_DEFAULT_TFVARS_FILE),
        )
        """ vars file which will be written before running terraform """

        logger.debug("Creating Terraform client")

        fixture = self._environment.new_fixture(
            plugin_id=METTA_TERRAFORM_CLIENT_PLUGIN_ID,
            instance_id=self.client_instance_id(),
            priority=70,
            arguments={
                "chart_path": chart_path,
                "state_path": state_path,
                "tfvars": tfvars,
                "tfvars_path": tfvars_path,
            },
            labels={
                "parent_plugin_id": METTA_TERRAFORM_PROVISIONER_PLUGIN_ID,
                "parent_instance_id": self._instance_id,
            },
            replace_existing=True,
        )
        # keep this fixture attached to the workload to make it retrievable.
        self.fixtures.add(fixture, replace_existing=True)

    def client_instance_id(self) -> str:
        """Construct an instanceid for the child client plugin."""
        return f"{self._instance_id}-{METTA_TERRAFORM_CLIENT_PLUGIN_ID}"

    def _get_client_plugin(self) -> TerraformClientPlugin:
        """Retrieve the client plugin if we can."""
        return self.fixtures.get_plugin(
            plugin_id=METTA_TERRAFORM_CLIENT_PLUGIN_ID)
Ejemplo n.º 6
0
class TestkitProvisionerPlugin:
    """Testkit provisioner plugin.

    Provisioner plugin that allows control of and interaction with a testkit
    cluster.

    ## Requirements

    1. this plugin uses subprocess to call a testkit binary, so you have to
       install testkit in the environment

    ## Usage

    @TODO

    """
    def __init__(
        self,
        environment,
        instance_id,
        label: str = TESTKIT_PROVISIONER_CONFIG_LABEL,
        base: Any = TESTKIT_PROVISIONER_CONFIG_BASE,
    ):
        """Initialize Testkit provisioner.

        Parameters:
        -----------
        environment (Environment) : metta environment object that this plugin
            is attached.
        instance_id (str) : label for this plugin instances.

        label (str) : config load label for plugin configuration.
        base (str) : config base for loaded config for plugin configuration.

        """
        self._environment: Environment = environment
        """ Environemnt in which this plugin exists """
        self._instance_id: str = instance_id
        """ Unique id for this plugin instance """

        self._config_label = label
        """ configerus load label that should contain all of the config """
        self._config_base = base

        self.fixtures = Fixtures()
        """This object makes and keeps track of fixtures for MKE/MSR clients."""

        try:
            self._write_config_file()
            self.make_fixtures()
            # pylint: disable= broad-except
        except Exception as err:
            # there are many reasons this can fail, and we just want to
            # see if we can get fixtures early.
            # No need to ask forgiveness for this one.
            logger.debug("Could not make initial fixtures: %s", err)

    # the deep argument is a standard for the info hook
    # pylint: disable=unused-argument
    def info(self, deep: bool = False) -> Dict[str, Any]:
        """Get info about a provisioner plugin."""
        testkit_config = self._environment.config().load(self._config_label)

        return {
            "plugin": {
                "config": {
                    "config_label": self._config_label,
                    "config_base": self._config_base,
                },
                "config_file":
                testkit_config.get(
                    [self._config_base, TESTKIT_CONFIG_KEY_CONFIGFILE],
                    default="MISSING",
                ),
                "working_dir":
                testkit_config.get(
                    [self._config_base, TESTKIT_CONFIG_KEY_SYSTEMNAME],
                    default="MISSING"),
                "systems":
                testkit_config.get(
                    [self._config_base, TESTKIT_CONFIG_KEY_SYSTEMNAME],
                    default="MISSING",
                ),
            },
            "client": self._get_client_plugin().info(deep=deep),
        }

    def prepare(self):
        """Prepare any needed resources.

        We don't create the testkit file here so that it is created as late as
        possible.  This allows more options for dynamic config sources in the
        testkit config.

        """
        # Make sure that we are running on up to date config
        self._write_config_file()
        self.make_fixtures()

    def apply(self):
        """Create the testkit yaml file and run testkit to create a cluster."""
        # Make sure that we are running on up to date config
        self._write_config_file()
        self.make_fixtures()

        testkit_config = self._environment.config().load(self._config_label,
                                                         force_reload=True)
        """ load the plugin configuration so we can retrieve options """
        opts = testkit_config.get(
            [self._config_base, TESTKIT_CONFIG_KEY_CREATE_OPTIONS], default={})
        """ retrieve testkit client options from config """
        opt_list = []
        for key, value in opts.items():
            if isinstance(value, str):
                opt_list.append(f'--{key}="{value}"')
            else:
                opt_list.append(f"--{key}={value}")

        # run the testkit client command to provisioner the cluster
        self._get_client_plugin().create(opts=opt_list)

    def destroy(self):
        """Destroy any created resources."""
        # run the testkit client command to provisioner the cluster
        self._get_client_plugin().destroy()
        self._rm_config_file()

    def _write_config_file(self):
        """Write the config file for testkit."""
        try:
            # load all of the testkit configuration, force a reload to get up to date contents
            testkit_config = self._environment.config().load(
                self._config_label, force_reload=True)
            config = testkit_config.get(
                [self._config_base, TESTKIT_CONFIG_KEY_CONFIG],
                validator=TESTKIT_CONFIG_VALIDATE_TARGET,
            )
            """ config source of launchpad yaml """
        except KeyError as err:
            raise ValueError(
                "Could not find launchpad configuration from config.") from err
        except ValidationError as err:
            raise ValueError("Launchpad config failed validation") from err

        config_file = testkit_config.get(
            [self._config_base, TESTKIT_CONFIG_KEY_CONFIGFILE],
            default=TESTKIT_CONFIG_DEFAULT_CONFIGFILE,
        )
        """ config_file value from plugin configuration """

        # write the configto our yaml file target (creating the path)
        os.makedirs(os.path.dirname(os.path.realpath(config_file)),
                    exist_ok=True)
        with open(os.path.realpath(config_file), "w", encoding="utf8") as file:
            yaml.dump(config, file)

    def _rm_config_file(self):
        """Remove the written config file."""
        testkit_config = self._environment.config().load(self._config_label)
        config_file = testkit_config.get(
            [self._config_base, TESTKIT_CONFIG_KEY_CONFIGFILE],
            default=TESTKIT_CONFIG_DEFAULT_CONFIGFILE,
        )
        if os.path.isfile(config_file):
            os.remove(config_file)

    def make_fixtures(self):
        """Make related fixtures from a testkit installation.

        Creates:
        --------

        Testkit client : a client for interaction with the teskit cli

        """
        testkit_config = self._environment.config().load(self._config_label,
                                                         force_reload=True)
        """ load the plugin configuration so we can retrieve options """

        try:
            testkit_config = self._environment.config().load(
                self._config_label, force_reload=True)
            """ loaded plugin configuration label """
        except KeyError as err:
            raise ValueError(
                "Testkit plugin configuration did not have any config"
            ) from err

        system_name = testkit_config.get(
            [self._config_base, TESTKIT_CONFIG_KEY_SYSTEMNAME])
        """ hat will testkit call the system """

        # instances = testkit_config.get([self._config_base, TESTKIT_CONFIG_KEY_INSTANCES])
        # """ what instances to create """

        config_file = testkit_config.get(
            [self._config_base, TESTKIT_CONFIG_KEY_CONFIGFILE],
            default=TESTKIT_CONFIG_DEFAULT_CONFIGFILE,
        )
        """ config_file value from plugin configuration """

        systems = testkit_config.get(
            [self._config_base, TESTKIT_CONFIG_KEY_SYSTEMS],
            default={},
        )

        fixture = self._environment.new_fixture(
            plugin_id=METTA_TESTKIT_CLIENT_PLUGIN_ID,
            instance_id=self.client_instance_id(),
            priority=70,
            arguments={
                "config_file": config_file,
                "system_name": system_name,
                "systems": systems,
            },
            labels={
                "parent_plugin_id": METTA_TESTKIT_PROVISIONER_PLUGIN_ID,
                "parent_instance_id": self._instance_id,
            },
            replace_existing=True,
        )
        # keep this fixture attached to the workload to make it retrievable.
        self.fixtures.add(fixture, replace_existing=True)

    def client_instance_id(self) -> str:
        """Construct an instanceid for the child client plugin."""
        return f"{self._instance_id}-{METTA_TESTKIT_CLIENT_PLUGIN_ID}"

    def _get_client_plugin(self) -> TestkitClientPlugin:
        """Retrieve the client plugin if we can."""
        try:
            return self.fixtures.get_plugin(
                instance_id=self.client_instance_id())
        except KeyError as err:
            raise RuntimeError(
                "Testkit provisioner cannot find its client plugin, and "
                "cannot process any client actions.  Was a client created?"
            ) from err
Ejemplo n.º 7
0
class TestkitClientPlugin:
    """Testkit client plugin.

    client plugin that allows control of and interaction with a testkit
    cluster.

    ## Requirements

    1. this plugin uses subprocess to call a testkit binary, so you have to
       install testkit in the environment

    ## Usage

    @TODO

    """

    # pylint: disable=too-many-arguments
    def __init__(
        self,
        environment,
        instance_id,
        system_name: str,
        config_file: str,
        systems: Dict[str, Dict[str, str]] = None,
    ):
        """Initialize Testkit provisioner.

        Parameters:
        -----------
        environment (Environment) : metta environment object that this plugin
            is attached.
        instance_id (str) : label for this plugin instances.
        config_file (str) : string path to the testkit config file.
        systems (Dict[str, Dict[str, str]]) : A dictionary of systems which this
            client is expected to provide using testkit.

            This is something which should be determinable using the config/file
            client directly, but sits outside of information encapsulated in
            the tool/conf.

            What we are talking about here is information to answer questions:

                Did testkit install MKE? if so, what accesspoint and U/P can I
                use to build an MKE client to access it.

            This is not an ideal approach but rather a necessity.

        """
        self._environment: Environment = environment
        """ Environemnt in which this plugin exists """
        self._instance_id: str = instance_id
        """ Unique id for this plugin instance """

        self._system_name: str = system_name
        """ What will testkit call the system, used client ops """

        self._testkit = TestkitClient(config_file=config_file)
        """ testkit client object """

        self._systems = systems
        """What systems will testkit install, so what fixtures are needed"""

        self.fixtures = Fixtures()
        """This object makes and keeps track of fixtures for MKE/MSR clients."""
        try:
            self.make_fixtures()
            # pylint: disable= broad-except
        except Exception as err:
            # there are many reasons this can fail, and we just want to
            # see if we can get fixtures early.
            # No need to ask forgiveness for this one.
            logger.debug("Could not make initial fixtures: %s", err)

    # the deep argument is a standard for the info hook
    # pylint: disable=unused-argument
    def info(self, deep: bool = False) -> Dict[str, Any]:
        """Get info about a provisioner plugin."""
        info = {
            "plugin": {
                "system_name": self._system_name,
            },
            "client": self._testkit.info(deep=deep),
        }
        if deep:
            try:
                info["hosts"] = self.hosts()
            # pylint: disable=broad-except
            except Exception:
                pass

        return info

    def version(self):
        """Return testkit client version."""
        return self._testkit.version()

    def create(self, opts: List[str]):
        """Run the testkit create command."""
        self._testkit.create(opts=opts)
        self.make_fixtures()

        mke_plugin = self._get_mke_client_plugin()
        mke_plugin.api_get_bundle(force=True)
        mke_plugin.make_fixtures()

    def destroy(self):
        """Remove a system from testkit."""
        return self._testkit.system_rm(system_name=self._system_name)

    def hosts(self):
        """List testkit system machines."""
        return self._testkit.machine_ls(system_name=self._system_name)

    def exec(self, host: str, cmd: str):
        """List testkit system machines."""
        return self._testkit.machine_ssh(machine=host, cmd=cmd)

    def system_ls(self):
        """List all of the systems testkit can see using our config."""
        return self._testkit.system_ls()

    # pylint: disable=too-many-branches
    def make_fixtures(self):
        """Make related fixtures from a testkit installation.

        Creates:
        --------

        MKE client : if we have manager nodes, then we create an MKE client
            which will then create docker and kubernestes clients if they are
            appropriate.

        MSR Client : if we have an MSR node, then the related client is
            created.

        """
        if self._systems is None:
            return

        testkit_hosts = self._testkit.machine_ls(system_name=self._system_name)
        """ list of all of the testkit hosts. """

        manager_hosts = []
        worker_hosts = []
        mke_hosts = []
        msr_hosts = []
        for host in testkit_hosts:
            host["address"] = host["public_ip"]
            if host["swarm_manager"] == "yes":
                manager_hosts.append(host)
            else:
                worker_hosts.append(host)

            if host["ucp_controller"] == "yes":
                mke_hosts.append(host)

        if len(msr_hosts) == 0 and len(worker_hosts) > 0:
            # Testkit installs MSR on the first work node, but the api is
            # accessible using port 444 in order to not conflict.
            first_worker = worker_hosts[0]
            first_worker_ip = first_worker["public_ip"]
            first_worker["msr_accesspoint"] = f"{first_worker_ip}:444"
            msr_hosts.append(first_worker)

        if len(mke_hosts
               ) > 0 and METTA_MIRANTIS_CLIENT_MKE_PLUGIN_ID in self._systems:
            instance_id = f"{self._instance_id}-{METTA_MIRANTIS_CLIENT_MKE_PLUGIN_ID}"
            arguments = self._systems[METTA_MIRANTIS_CLIENT_MKE_PLUGIN_ID]
            arguments["hosts"] = mke_hosts

            if "accesspoint" in arguments and arguments["accesspoint"]:
                arguments["accesspoint"] = clean_accesspoint(
                    arguments["accesspoint"])

            logger.debug(
                "Launchpad client is creating an MKE client plugin: %s",
                instance_id)
            fixture = self._environment.new_fixture(
                plugin_id=METTA_MIRANTIS_CLIENT_MKE_PLUGIN_ID,
                instance_id=instance_id,
                priority=70,
                arguments=arguments,
                labels={
                    "parent_plugin_id": METTA_TESTKIT_CLIENT_PLUGIN_ID,
                    "parent_instance_id": self._instance_id,
                },
                replace_existing=True,
            )
            self.fixtures.add(fixture, replace_existing=True)

            # We got an MKE client, so let's activate it.

        else:
            logger.debug(
                "No MKE master hosts found, not creating an MKE client.")

        if len(msr_hosts
               ) > 0 and METTA_MIRANTIS_CLIENT_MSR_PLUGIN_ID in self._systems:
            instance_id = f"{self._instance_id}-{METTA_MIRANTIS_CLIENT_MSR_PLUGIN_ID}"
            arguments = self._systems[METTA_MIRANTIS_CLIENT_MSR_PLUGIN_ID]
            arguments["hosts"] = msr_hosts

            if "accesspoint" in arguments and arguments["accesspoint"]:
                arguments["accesspoint"] = clean_accesspoint(
                    arguments["accesspoint"])

            logger.debug(
                "Launchpad client is creating an MSR client plugin: %s",
                instance_id)
            fixture = self._environment.new_fixture(
                plugin_id=METTA_MIRANTIS_CLIENT_MSR_PLUGIN_ID,
                instance_id=instance_id,
                priority=70,
                arguments=arguments,
                labels={
                    "parent_plugin_id": METTA_TESTKIT_CLIENT_PLUGIN_ID,
                    "parent_instance_id": self._instance_id,
                },
                replace_existing=True,
            )
            self.fixtures.add(fixture, replace_existing=True)

        else:
            logger.debug(
                "No MSR master hosts found, not creating an MSR client.")

    def _get_mke_client_plugin(self) -> MKEAPIClientPlugin:
        """Retrieve the MKE client plugin if we can."""
        try:
            return self.fixtures.get_plugin(
                plugin_id=METTA_MIRANTIS_CLIENT_MKE_PLUGIN_ID)
        except KeyError as err:
            raise RuntimeError(
                "Launchpad client cannot find its MKE client plugin, and "
                "cannot process any client actions.  Was a client created?"
            ) from err
Ejemplo n.º 8
0
class TerraformClientPlugin:
    """Metta terraform client."""

    # pylint: disable=too-many-arguments
    def __init__(
        self,
        environment: Environment,
        instance_id: str,
        chart_path: str,
        state_path: str,
        tfvars: Dict[str, Any],
        tfvars_path: str,
    ):
        """Initial client configuration."""
        self._environment: Environment = environment
        """ Environemnt in which this plugin exists """
        self._instance_id: str = instance_id
        """ Unique id for this plugin instance """

        self.tfvars: Dict[str, Any] = tfvars
        """Terraform vars to pass to TF as a tfvars file."""
        self._tfvars_path: str = os.path.realpath(tfvars_path)
        """Path to write for the tfvars file."""

        self._tf_handler = TerraformClient(
            working_dir=os.path.realpath(chart_path),
            state_path=os.path.realpath(state_path),
            tfvars_path=os.path.realpath(tfvars_path),
        )
        """Terraform handler which actually runs terraform commands."""

        self.fixtures = Fixtures()
        """All fixtures added to this plugin, which are primarily TF output plugins."""

        # if the cluster is already provisioned then we can get outputs from it
        try:
            self.make_fixtures()
        # pylint: disable=broad-except
        except Exception:
            pass

    # deep argument is an info() standard across plugins
    # pylint: disable=unused-argument
    def info(self, deep: bool = False):
        """Get info about the client plugin.

        Returns:
        --------
        Dict of keyed introspective information about the plugin.

        """
        info = {
            "config": {
                "tfvars": self.tfvars,
                "tfvars_path": self._tfvars_path,
            },
            "client": self._tf_handler.info(deep=deep),
        }

        return info

    def state(self):
        """Return the terraform state contents."""
        return self._tf_handler.state()

    def init(self, upgrade: bool = False):
        """Run terraform init."""
        self._tf_handler.init(upgrade=upgrade)

    def apply(self, lock: bool = True):
        """Apply a terraform plan."""
        self._make_tfvars_file()
        self._tf_handler.apply(lock=lock)
        self.make_fixtures()

    def destroy(self, lock: bool = True):
        """Apply a terraform plan."""
        self._tf_handler.destroy(lock=lock)
        self._rm_tfvars_file()

    def test(self):
        """Apply a terraform plan."""
        self._make_tfvars_file()
        return self._tf_handler.test()

    def plan(self):
        """Check a terraform plan."""
        self._make_tfvars_file()
        return self._tf_handler.plan()

    def providers_schema(self):
        """Retrieve terraform providers schema.

        Returns:
        --------
        json Schema

        """
        return self._tf_handler.providers_schema()

    def graph(self, type: str = "plan"):
        """Retrieve terraform graph.

        Returns:
        --------
        Terraform graph

        """
        return self._tf_handler.graph(type=type)

    def output(self, name: str = ""):
        """Retrieve terraform outputs.

        Run the terraform output command, to retrieve outputs.
        Outputs are returned always as json as it is the only way to machine
        parse outputs properly.

        Returns:
        --------
        If you provided a name, then a single output is returned, otherwise a
        dict of outputs is returned.

        """
        return self._tf_handler.output(name=name)

    def _make_tfvars_file(self):
        """Write the vars file."""
        os.makedirs(os.path.dirname(self._tfvars_path), exist_ok=True)
        with open(self._tfvars_path, "w", encoding="utf8") as var_file:
            json.dump(self.tfvars, var_file, sort_keys=True, indent=4)

    def _rm_tfvars_file(self):
        """Remove any created vars file."""
        tfvars_path = self._tfvars_path
        if os.path.isfile(tfvars_path):
            os.remove(tfvars_path)

    def make_fixtures(self):
        """Retrieve an output from terraform.

        For other METTA plugins we can just load configuration, and creating
        output plugin instances from various value in config.

        We do that here, but  we also want to check of any outputs exported by
        the terraform root module, which we get using the tf client.

        If we find a root module output without a matching config output
        defintition then we make some assumptions about plugin type and add it
        to the list. We make some simple investigation into output plugin types
        and pick either the contrib.common.dict or contrib.common.text plugins.

        If we find a root module output that matches an output that was
        declared in config then we use that.  This allows config to define a
        plugin_id which will then be populated automatically.  If you know what
        type of data you are expecting from a particular tf output then you can
        prepare and config for it to do things like setting default values.

        Priorities can be used in the config.

        """
        # now we ask TF what output it nows about and merge together those as
        # new output plugins.
        # tf.outputs() produces a list of (sensitive:bool, type: [str,  object,
        # value:Any])
        for output_key, output_struct in self.output().items():
            # Here is the kind of info we can get out of terraform
            # output_sensitive = bool(output_struct['sensitive'])
            # """ Whether or not the output contains sensitive data """
            output_type = output_struct["type"][0]
            # output_spec = output_struct['type'][1]
            # """ A structured spec for the type """
            output_value = output_struct["value"]

            # see if we already have an output plugin for this name
            fixture = self.fixtures.get(
                interfaces=[METTA_PLUGIN_INTERFACE_ROLE_OUTPUT],
                instance_id=output_key,
                exception_if_missing=False,
            )
            if fixture is not None:
                if hasattr(fixture.plugin, "set_data"):
                    fixture.plugin.set_data(output_value)
                elif hasattr(fixture.plugin, "set_text"):
                    fixture.plugin.set_text(str(output_value))

            else:
                # we only know how to create 2 kinds of outputs
                if output_type == "object":
                    fixture = self._environment.new_fixture(
                        plugin_id=METTA_PLUGIN_ID_OUTPUT_DICT,
                        instance_id=output_key,
                        priority=self._environment.plugin_priority(delta=5),
                        arguments={"data": output_value},
                        labels={
                            "parent_plugin_id":
                            METTA_TERRAFORM_CLIENT_PLUGIN_ID,
                            "parent_instance_id": self._instance_id,
                        },
                    )
                else:
                    fixture = self._environment.new_fixture(
                        plugin_id=METTA_PLUGIN_ID_OUTPUT_TEXT,
                        instance_id=output_key,
                        priority=self._environment.plugin_priority(delta=5),
                        arguments={"text": str(output_value)},
                        labels={
                            "parent_plugin_id":
                            METTA_TERRAFORM_CLIENT_PLUGIN_ID,
                            "parent_instance_id": self._instance_id,
                        },
                    )

                self.fixtures.add(fixture)
Ejemplo n.º 9
0
class ComboProvisionerPlugin:
    """Combo Provisioner plugin class.

    This provisioner plugin is configered with a list of backends, which it
    will iterate across for every provisioner method call.  The backends have a
    priority which define the order of their call and every provisioner method
    will follow that order (or reverse it.)

    """
    def __init__(
        self,
        environment: Environment,
        instance_id: str,
        label: str = COMBO_PROVISIONER_CONFIG_LABEL,
        base: Any = LOADED_KEY_ROOT,
    ):
        """Run the super constructor but also set class properties.

        Interpret provided config and configure the object with all of the
        needed pieces for executing terraform commands

        Parameters:
        -----------
        environment (Environment) : All metta plugins receive the environment
            object in which they were created.
        instance_id (str) : all metta plugins receive their own string identity.

        label (str) : Configerus load label for finding plugin config.
        base (str) : Configerus get base key in which the plugin should look for
            all config.

        """
        self._environment: Environment = environment
        """Environemnt in which this plugin exists."""
        self._instance_id: str = instance_id
        """Unique id for this plugin instance."""

        try:
            combo_config = self._environment.config().load(label)
        except KeyError as err:
            raise ValueError(
                "Combo provisioner could not find any configurations") from err

        # Run confgerus validation on the config using our above defined jsonschema
        try:
            combo_config.get(base, validator=COMBO_PROVISIONER_VALIDATE_TARGET)
        except ValidationError as err:
            raise ValueError(
                "Combo provisioner config failed validation") from err

        try:
            backends_list = combo_config.get(
                [base, COMBO_PROVISIONER_CONFIG_BACKENDS_KEY])
            if not isinstance(backends_list, list):
                raise ValueError(
                    "Combo provisioner could not understand the backend list."
                    " A list was expected.")
        except KeyError as err:
            raise ValueError(
                "Combo provisioner received no backend list from config."
            ) from err

        # for each of our string instance_ids we add the backend in order by finding if from the
        # environment and adding it to our UCCTFixturesPlugin fixtures list.
        self.backends = Fixtures()
        for backend in backends_list:
            backend_instance_id = backend[METTA_PLUGIN_CONFIG_KEY_INSTANCEID]
            try:
                fixture = self._environment.fixtures().get(
                    interfaces=[METTA_PLUGIN_INTERFACE_ROLE_PROVISIONER],
                    instance_id=backend_instance_id,
                )
            except KeyError as err:
                raise ValueError(
                    "Combo provisioner was given a backend provisioner key that it could not "
                    f"correlate with a existing fixture: {backend_instance_id}"
                ) from err

            if hasattr(backend, METTA_FIXTURE_CONFIG_KEY_PRIORITY):
                fixture.priority = backend[METTA_FIXTURE_CONFIG_KEY_PRIORITY]

            self.backends.add(fixture)

    def _get_backend_iter(self, low_to_high: bool = False):
        """Get the sorted backend fixtures.

        Parameters:
        -----------
        low-to-high (bool) : ask for the fixtures in a lowest to highest
            (reverse) order.

        Returns:
        --------
        Iterator which is either the backends fixtures object, or the backends
            reversed()

        """
        if low_to_high:
            return reversed(self.backends)
        return self.backends

    def info(self, deep: bool = False):
        """Return structured data about self."""
        backends_info = []
        # List backends in high->low priority as this shows the order of apply
        for backend in self.backends:
            backends_info.append(backend.info(deep=deep))

        return {"backends": backends_info}

    def prepare(self):
        """Prepare the provisioner to apply resources."""
        for backend_fixture in self._get_backend_iter():
            logger.info(
                "--> running backend prepare: [High->Low] %s",
                backend_fixture.instance_id,
            )
            backend_fixture.plugin.prepare()

    def apply(self):
        """Bring a cluster to the configured state."""
        for backend_fixture in self._get_backend_iter():
            logger.info("--> running backend apply: [High->Low] %s",
                        backend_fixture.instance_id)
            backend_fixture.plugin.apply()

    def destroy(self):
        """Remove all resources created for the cluster."""
        for backend_fixture in self._get_backend_iter(low_to_high=True):
            logger.info(
                "--> running backend destroy: [Low->High] %s",
                backend_fixture.instance_id,
            )
            backend_fixture.plugin.destroy()

    # --- Fixture management
    #
    # We duplicate the UCCTFixturesPlugin methods, despite using it as a parent,
    # so that we can identify as that object, but because we need to allow all
    # backends to participate in fixture definition in order of priority.
    #
    # We of course have to override the any method which depends on our ordered
    # backend retrievals of get_fixtures() so that it doesn't run the parent
    # get_fixtures.

    def get_fixtures(self,
                     instance_id: str = "",
                     plugin_id: str = "",
                     interfaces: List[str] = None) -> Fixtures:
        """Retrieve any matching fixtures from any of the backends."""
        matches = Fixtures()
        for backend_fixture in self._get_backend_iter():
            plugin = backend_fixture.plugin
            if hasattr(plugin, "fixtures"):
                matches.merge(
                    plugin.fixtures.filter(
                        plugin_id=plugin_id,
                        interfaces=interfaces,
                        instance_id=instance_id,
                    ))
        return matches

    def get_fixture(
        self,
        plugin_id: str = "",
        interfaces: List[str] = None,
        instance_id: str = "",
        exception_if_missing: bool = True,
    ) -> Fixture:
        """Retrieve the first matching fixture from ordered backends."""
        matches = self.get_fixtures(
            plugin_id=plugin_id,
            instance_id=instance_id,
            interfaces=interfaces,
        )

        if len(matches) > 0:
            return matches.get()

        if exception_if_missing:
            raise KeyError("No matching fixture was found")
        return None

    def get_plugin(
        self,
        plugin_id: str = "",
        interfaces: List[str] = None,
        instance_id: str = "",
        exception_if_missing: bool = True,
    ) -> object:
        """Retrieve one of the backend fixtures."""
        fixture = self.get_fixture(
            plugin_id=plugin_id,
            interfaces=interfaces,
            instance_id=instance_id,
            exception_if_missing=exception_if_missing,
        )

        if fixture is not None:
            return fixture.plugin

        # this if is not needed, as get_fixture() handles exception_if_missing
        if exception_if_missing:
            raise KeyError("No matching plugin was found")
        return None
Ejemplo n.º 10
0
class LaunchpadProvisionerPlugin:
    """Launchpad provisioner class.

    Use this to provision a system using Mirantis launchpad

    """
    def __init__(
        self,
        environment: Environment,
        instance_id: str,
        label: str = METTA_LAUNCHPAD_CONFIG_LABEL,
        base: Any = LOADED_KEY_ROOT,
    ):
        """Configure a new Launchpad provisioner plugin instance."""
        self._environment: Environment = environment
        """ Environemnt in which this plugin exists """
        self._instance_id: str = instance_id
        """ Unique id for this plugin instance """

        self._config_label = label
        """ configerus load label that should contain all of the config """
        self._config_base = base
        """ configerus get key that should contain all tf config """

        self.fixtures = Fixtures()
        """ keep a collection of fixtures that this provisioner creates """

        # attempt to be declarative and make the client plugin in case the
        # terraform chart has already been run.
        try:
            # Make the child client plugin.
            self.make_fixtures()

        # dont' block the construction on an exception
        # pylint: disable=broad-except
        except Exception:
            pass

    def info(self, deep: bool = False) -> Dict[str, Any]:
        """Get info about the plugin.

        Returns:
        --------
        Dict of introspective information about this plugin_info

        """
        # Loaded plugin configuration
        launchpad_config_loaded = self._environment.config().load(
            self._config_label)

        return {
            "plugin": {
                "config_label": self._config_label,
                "config_base": self._config_base,
            },
            "config": {
                "working_dir":
                launchpad_config_loaded.get(
                    [self._config_base, METTA_LAUNCHPAD_CLI_WORKING_DIR_KEY],
                    default="MISSING"),
                "root_path":
                launchpad_config_loaded.get(
                    [self._config_base, METTA_LAUNCHPAD_CONFIG_ROOT_PATH_KEY],
                    default="NONE"),
                "config_file":
                launchpad_config_loaded.get(
                    [self._config_base, METTA_LAUNCHPAD_CLI_CONFIG_FILE_KEY],
                    default="MISSING"),
                "cli_options":
                launchpad_config_loaded.get(
                    [self._config_base, METTA_LAUNCHPAD_CLI_OPTIONS_KEY],
                    default="NONE"),
            },
            "client": self._get_client_plugin().info(deep=deep),
        }

    # pylint: disable=no-self-use
    def prepare(self):
        """Prepare the provisioning cluster for install.

        We ignore this.

        """
        logger.info(
            "Running Launchpad Prepare().  Launchpad has no prepare stage.")

    def apply(self, debug: bool = False):
        """Bring a cluster up.

        Not that we re-write the yaml file as it may depend on config which was not
        available when this object was first constructed.

        Raises:
        -------
        ValueError if the object has been configured (prepare) with config that
            doesn't work, or if the backend doesn't give valid yml

        Exception if launchpad fails.

        """
        logger.info("Using launchpad to install products onto backend cluster")
        self._write_launchpad_yml()
        self.make_fixtures(
        )  # we wouldn't need this if we could update the systems
        self._get_client_plugin().apply(debug=debug)

    def destroy(self):
        """Ask the client to remove installed resources."""
        if self._has_launchpad_yml():
            logger.info(
                "Using launchpad to remove installed products from the backend cluster"
            )
            self._get_client_plugin().reset()
            self._rm_launchpad_yml()

    # ----- CLUSTER INTERACTION -----

    def _has_launchpad_yml(self) -> bool:
        """Check if the launchpad yml file exists."""
        # Loaded configerus config for the plugin. Ready for .get().
        plugin_config = self._environment.config().load(self._config_label)

        config_file: str = plugin_config.get(
            [self._config_base, METTA_LAUNCHPAD_CLI_CONFIG_FILE_KEY],
            default=METTA_LAUNCHPAD_CLI_CONFIG_FILE_DEFAULT,
        )
        return bool(config_file) and os.path.exists(config_file)

    def _write_launchpad_yml(self):
        """Write config contents to a yaml file for launchpad."""
        self._rm_launchpad_yml()

        # load and validation all of the launchpad configuration.
        launchpad_loaded = self._environment.config().load(
            self._config_label,
            validator=METTA_LAUNCHPAD_PROVISIONER_VALIDATE_TARGET,
            force_reload=True,
        )

        # load all of the launchpad configuration, force a reload to get up to date contents
        config_contents: Dict[str, Any] = launchpad_loaded.get(
            [self._config_base, METTA_LAUNCHPAD_CONFIG_KEY],
            validator=METTA_LAUNCHPAD_CONFIG_VALIDATE_TARGET,
        )

        # decide on a path for the runtime launchpad.yml file
        config_path: str = os.path.realpath(
            launchpad_loaded.get(
                [self._config_base, METTA_LAUNCHPAD_CLI_CONFIG_FILE_KEY],
                default=METTA_LAUNCHPAD_CLI_CONFIG_FILE_DEFAULT,
            ))

        # Our launchpad config differs slightly from the schema that launchpad
        # consumes, so we need a small conversion
        config_contents = self._convert_launchpad_config_to_file_format(
            config_contents)

        # write the launchpad output to our yaml file target (after creating the path)
        logger.debug(
            "Updating launchpad yaml file: %s =>/n%s",
            config_path,
            yaml.dump(config_contents),
        )
        with open(config_path, "w", encoding="utf8") as config_file_object:
            yaml.dump(config_contents, config_file_object)

    def _rm_launchpad_yml(self):
        """Update config and write the cfg and inventory files."""
        # Loaded configerus config for the plugin. Ready for .get().
        plugin_config = self._environment.config().load(self._config_label)

        config_file: str = plugin_config.get(
            [self._config_base, METTA_LAUNCHPAD_CLI_CONFIG_FILE_KEY],
            default=METTA_LAUNCHPAD_CLI_CONFIG_FILE_DEFAULT,
        )
        if config_file and os.path.exists(config_file):
            logger.debug("Launchpad provisioner removing created files.")
            os.remove(config_file)

    def make_fixtures(self):
        """Make the client plugin for terraform interaction."""
        try:
            # load and validation all of the launchpad configuration.
            launchpad_config_loaded = self._environment.config().load(
                self._config_label,
                validator=METTA_LAUNCHPAD_PROVISIONER_VALIDATE_TARGET,
                force_reload=True,
            )
        except ValidationError as err:
            raise ValueError("Launchpad config failed validation.") from err

        # if launchpad needs to be run in a certain path, set it with this config
        working_dir: str = launchpad_config_loaded.get(
            [self._config_base, METTA_LAUNCHPAD_CLI_WORKING_DIR_KEY],
            default=METTA_LAUNCHPADCLIENT_WORKING_DIR_DEFAULT,
        )

        # decide on a path for the runtime launchpad.yml file
        config_file: str = launchpad_config_loaded.get(
            [self._config_base, METTA_LAUNCHPAD_CLI_CONFIG_FILE_KEY],
            default=METTA_LAUNCHPAD_CLI_CONFIG_FILE_DEFAULT,
        )
        # List of launchpad cli options to pass to the client for all operations.
        cli_options: Dict[str, Any] = launchpad_config_loaded.get(
            [self._config_base, METTA_LAUNCHPAD_CLI_OPTIONS_KEY], default={})
        # List of systems that the client should configure for children plugins.
        systems: Dict[str, Dict[str, Any]] = launchpad_config_loaded.get(
            [self._config_base, METTA_LAUNCHPAD_CLIENT_SYSTEMS_KEY],
            default={})

        fixture = self._environment.new_fixture(
            plugin_id=METTA_LAUNCHPAD_CLIENT_PLUGIN_ID,
            instance_id=self.client_instance_id(),
            priority=70,
            arguments={
                "config_file": config_file,
                "working_dir": working_dir,
                "cli_options": cli_options,
                "systems": systems,
            },
            labels={
                "parent_plugin_id": METTA_LAUNCHPAD_PROVISIONER_PLUGIN_ID,
                "parent_instance_id": self._instance_id,
            },
            replace_existing=True,
        )
        # keep this fixture attached to the workload to make it retrievable.
        self.fixtures.add(fixture, replace_existing=True)

    def client_instance_id(self) -> str:
        """Construct an instanceid for the child client plugin."""
        return f"{self._instance_id}-{METTA_LAUNCHPAD_CLIENT_PLUGIN_ID}"

    def _get_client_plugin(self) -> LaunchpadClientPlugin:
        """Retrieve the client plugin if we can."""
        try:
            return self.fixtures.get_plugin(
                plugin_id=METTA_LAUNCHPAD_CLIENT_PLUGIN_ID)
        except KeyError as err:
            raise RuntimeError(
                "Launchpad provisioner cannot find its client plugin, and "
                "cannot process any client actions.  Was a client created?"
            ) from err

    def _convert_launchpad_config_to_file_format(self, config):
        """Convert our launchpad config to the schema that launchpad uses."""
        # 1 discover the hosts counts
        hosts = []
        managers = []
        workers = []
        msrs = []
        for host in config["spec"]["hosts"]:
            hosts.append(host)
            if host["role"] == "manager":
                managers.append(host)
            if host["role"] == "worker":
                workers.append(host)
            if host["role"] == "msr":
                msrs.append(host)

        # convert install flags and update flags to lists from dicts
        def dtol(dic):
            """Convert dict flags to lists."""
            items: List[str] = []
            for key, value in dic.items():
                if value is True:
                    items.append(f"--{key}")
                else:
                    items.append(f"--{key}={value}")
            return items

        try:
            config["spec"]["mke"]["installFlags"] = dtol(
                config["spec"]["mke"]["installFlags"])
        except KeyError:
            pass
        try:
            config["spec"]["mke"]["upgradeFlags"] = dtol(
                config["spec"]["mke"]["upgradeFlags"])
        except KeyError:
            pass
        try:
            config["spec"]["msr"]["installFlags"] = dtol(
                config["spec"]["msr"]["installFlags"])
        except KeyError:
            pass
        try:
            config["spec"]["msr"]["upgradeFlags"] = dtol(
                config["spec"]["msr"]["upgradeFlags"])
        except KeyError:
            pass

        # If no msrs, then drop the msr block and force the type.
        if len(msrs) == 0:
            config["kind"] = "mke"
            config["spec"].pop("msr")

        return config