Exemple #1
0
    def _some_fixtures(self):
        """Create some fixtures to test against."""
        fixtures = Fixtures()

        fixtures.new(
            plugin_id="one",
            instance_id="1",
            priority=50,
            interfaces=["A", "B"],
            plugin=None,
        )
        fixtures.new(plugin_id="one",
                     instance_id="2",
                     priority=74,
                     interfaces=["A"],
                     plugin=None)
        fixtures.new(
            plugin_id="two",
            instance_id="2",
            priority=60,
            interfaces=["A", "B"],
            plugin=None,
        )
        fixtures.new(
            plugin_id="two",
            instance_id="3",
            priority=10,
            interfaces=["A", "B", "C"],
            plugin=None,
        )

        return fixtures
Exemple #2
0
    def __init__(
        self,
        environment: Environment,
        instance_id: str,
        fixtures: Dict[str, Dict[str, Any]] = None,
    ):
        """Sset class properties

        Arguments:
        ----------

        environment (Environment) : Environment in which thisplugin exists.

        instance_id (str) : unique identifier for this plugin instance.

        fixtures (dict) : You can pass in some fixture definitions which this
            class will turn into fixtures and make retrievable.  This is a big
            part of the dummy.

        """
        self._environment: Environment = environment
        """ Environemnt in which this plugin exists """
        self._instance_id: str = instance_id
        """ Unique id for this plugin instance """

        self.fixtures = Fixtures()
        """This plugin keeps fixtures."""
        if fixtures is not None:
            for child_instance_id, child_instance_dict in fixtures.items():
                child = environment.add_fixture_from_dict(
                    instance_id=child_instance_id,
                    plugin_dict=child_instance_dict)
                self.fixtures.add(child)
Exemple #3
0
    def __init__(
        self,
        environment: Environment,
        instance_id: str,
        fixtures: Dict[str, Dict[str, Any]] = None,
    ):
        """Set class properties.

        Parameters:
        -----------
        outputs (Dict[Dict]) : pass in a dictionary which defines outputs that
            should be returned

        clients (Dict[Dict]) : pass in a dictionary which defines which clients
            should be requested when working on a provisioner

        """
        self._environment: Environment = environment
        """ Environemnt in which this plugin exists """
        self._instance_id: str = instance_id
        """ Unique id for this plugin instance """

        self.fixtures = Fixtures()
        """This plugin keeps fixtures."""
        if fixtures is not None:
            for child_instance_id, child_instance_dict in fixtures.items():
                child = environment.add_fixture_from_dict(
                    instance_id=child_instance_id,
                    plugin_dict=child_instance_dict)
                self.fixtures.add(child)
Exemple #4
0
 def destroy(self, lock: bool = True):
     """Remove all terraform resources in state."""
     logger.info("Running Terraform DESTROY")
     self._get_client_plugin().destroy(lock=lock)
     # accessing parent property for clearing out existing output fixtures
     # pylint: disable=attribute-defined-outside-init
     self.fixtures = Fixtures()
Exemple #5
0
class DummyProvisionerPlugin:
    """Dummy provisioner class"""
    def __init__(
        self,
        environment: Environment,
        instance_id: str,
        fixtures: Dict[str, Dict[str, Any]] = None,
    ):
        self._environment: Environment = environment
        """ Environemnt in which this plugin exists """
        self._instance_id: str = instance_id
        """ Unique id for this plugin instance """

        self.fixtures = Fixtures()
        """This plugin keeps fixtures."""
        if fixtures is not None:
            for child_instance_id, child_instance_dict in fixtures.items():
                child = environment.add_fixture_from_dict(
                    instance_id=child_instance_id,
                    plugin_dict=child_instance_dict)
                self.fixtures.add(child)

    def apply(self):
        """pretend to bring a cluster up"""
        logger.info("%s:execute: apply()", self._instance_id)

    def prepare(self):
        """pretend to prepare the cluster"""
        logger.info("%s:execute: apply()", self._instance_id)

    def destroy(self):
        """pretend to brind a cluster down"""
        logger.info("%s:execute: apply()", self._instance_id)
Exemple #6
0
    def __init__(
        self,
        environment: Environment,
        instance_id: str,
        label: str = METTA_LAUNCHPAD_CONFIG_LABEL,
        base: Any = LOADED_KEY_ROOT,
    ):
        """Configure a new Launchpad provisioner plugin instance."""
        self._environment: Environment = environment
        """ Environemnt in which this plugin exists """
        self._instance_id: str = instance_id
        """ Unique id for this plugin instance """

        self._config_label = label
        """ configerus load label that should contain all of the config """
        self._config_base = base
        """ configerus get key that should contain all tf config """

        self.fixtures = Fixtures()
        """ keep a collection of fixtures that this provisioner creates """

        # attempt to be declarative and make the client plugin in case the
        # terraform chart has already been run.
        try:
            # Make the child client plugin.
            self.make_fixtures()

        # dont' block the construction on an exception
        # pylint: disable=broad-except
        except Exception:
            pass
Exemple #7
0
    def __init__(
        self,
        environment,
        instance_id,
        system_name: str,
        config_file: str,
        systems: Dict[str, Dict[str, str]] = None,
    ):
        """Initialize Testkit provisioner.

        Parameters:
        -----------
        environment (Environment) : metta environment object that this plugin
            is attached.
        instance_id (str) : label for this plugin instances.
        config_file (str) : string path to the testkit config file.
        systems (Dict[str, Dict[str, str]]) : A dictionary of systems which this
            client is expected to provide using testkit.

            This is something which should be determinable using the config/file
            client directly, but sits outside of information encapsulated in
            the tool/conf.

            What we are talking about here is information to answer questions:

                Did testkit install MKE? if so, what accesspoint and U/P can I
                use to build an MKE client to access it.

            This is not an ideal approach but rather a necessity.

        """
        self._environment: Environment = environment
        """ Environemnt in which this plugin exists """
        self._instance_id: str = instance_id
        """ Unique id for this plugin instance """

        self._system_name: str = system_name
        """ What will testkit call the system, used client ops """

        self._testkit = TestkitClient(config_file=config_file)
        """ testkit client object """

        self._systems = systems
        """What systems will testkit install, so what fixtures are needed"""

        self.fixtures = Fixtures()
        """This object makes and keeps track of fixtures for MKE/MSR clients."""
        try:
            self.make_fixtures()
            # pylint: disable= broad-except
        except Exception as err:
            # there are many reasons this can fail, and we just want to
            # see if we can get fixtures early.
            # No need to ask forgiveness for this one.
            logger.debug("Could not make initial fixtures: %s", err)
Exemple #8
0
    def __init__(
        self,
        environment: Environment,
        instance_id: str,
        label: str = METTA_FIXTURES_CONFIG_STATES_KEY,
        base: Any = LOADED_KEY_ROOT,
    ):
        """Run the super constructor but also set class properties.

        Interpret provided config and configure the object with all of the
        needed pieces for executing terraform commands.

        This repeats a lof of the Environment functionality as an override,
        mainly so that we can change some small details about plugin generation.

        """
        self._environment: Environment = environment
        """ Environemnt in which this plugin exists """
        self._instance_id: str = instance_id
        """ Unique id for this plugin instance """

        self._config: Config = environment._config
        """Config object, overridden in activate()."""

        self._config_label = label
        """ configerus load label that should contain all of the config """
        self._config_base = base
        """ configerus get key that should contain all tf config """

        self._fixtures: Fixtures = Fixtures()
        """Children fixtures, typically just the client plugin."""
    def prepare(self, fixtures: Fixtures = None):
        """Find the dependent fixtures."""
        if fixtures is None:
            fixtures = self._environment.fixtures()

        self._ansible_client = fixtures.get_plugin(
            plugin_id=METTA_ANSIBLE_ANSIBLECLI_CORECLIENT_PLUGIN_ID)
Exemple #10
0
    def __init__(
        self,
        environment: Environment,
        instance_id: str,
        label: str = TERRAFORM_PROVISIONER_CONFIG_LABEL,
        base: Any = LOADED_KEY_ROOT,
    ):
        """Run the super constructor but also set class properties.

        Interpret provided config and configure the object with all of the
        needed pieces for executing terraform commands

        """
        self._environment: Environment = environment
        """ Environemnt in which this plugin exists """
        self._instance_id: str = instance_id
        """ Unique id for this plugin instance """

        self._config_label = label
        """ configerus load label that should contain all of the config """
        self._config_base = base
        """ configerus get key that should contain all tf config """

        self.fixtures: Fixtures = Fixtures()
        """Children fixtures, typically just the client plugin."""

        # Make the client fixture in the constructor.  The TF client fixture is
        # quite state safe, and should only need to be created once, unlike
        # other provisioner clients which may be vulnerable to state change.
        self.make_fixtures()
Exemple #11
0
 def get_fixtures(self,
                  instance_id: str = "",
                  plugin_id: str = "",
                  interfaces: List[str] = None) -> Fixtures:
     """Retrieve any matching fixtures from any of the backends."""
     matches = Fixtures()
     for backend_fixture in self._get_backend_iter():
         plugin = backend_fixture.plugin
         if hasattr(plugin, "fixtures"):
             matches.merge(
                 plugin.fixtures.filter(
                     plugin_id=plugin_id,
                     interfaces=interfaces,
                     instance_id=instance_id,
                 ))
     return matches
Exemple #12
0
class DummyClientPlugin:
    """Dummy client class

    As with all dummies, this is a failsafe plugin, that should never throw any
    exceptions if used according to metta standards.

    It can be used as a placeholder during development, or it can be used to
    log client events and output for greater development and debugging.

    The client will log any method call, including unknown methods, and so it
    can be used in place of any client, if you don't need the methods to return
    anything
    """
    def __init__(
        self,
        environment: Environment,
        instance_id: str,
        fixtures: Dict[str, Dict[str, Any]] = None,
    ):
        """Sset class properties

        Arguments:
        ----------

        environment (Environment) : Environment in which thisplugin exists.

        instance_id (str) : unique identifier for this plugin instance.

        fixtures (dict) : You can pass in some fixture definitions which this
            class will turn into fixtures and make retrievable.  This is a big
            part of the dummy.

        """
        self._environment: Environment = environment
        """ Environemnt in which this plugin exists """
        self._instance_id: str = instance_id
        """ Unique id for this plugin instance """

        self.fixtures = Fixtures()
        """This plugin keeps fixtures."""
        if fixtures is not None:
            for child_instance_id, child_instance_dict in fixtures.items():
                child = environment.add_fixture_from_dict(
                    instance_id=child_instance_id,
                    plugin_dict=child_instance_dict)
                self.fixtures.add(child)
Exemple #13
0
    def __init__(
        self,
        environment: Environment,
        instance_id: str,
        fixtures: Dict[str, Dict[str, Any]] = None,
    ):
        self._environment: Environment = environment
        """ Environemnt in which this plugin exists """
        self._instance_id: str = instance_id
        """ Unique id for this plugin instance """

        self.fixtures = Fixtures()
        """This plugin keeps fixtures."""
        if fixtures is not None:
            for child_instance_id, child_instance_dict in fixtures.items():
                child = environment.add_fixture_from_dict(
                    instance_id=child_instance_id,
                    plugin_dict=child_instance_dict)
                self.fixtures.add(child)
Exemple #14
0
    def __init__(
        self,
        environment,
        instance_id,
        label: str = TESTKIT_PROVISIONER_CONFIG_LABEL,
        base: Any = TESTKIT_PROVISIONER_CONFIG_BASE,
    ):
        """Initialize Testkit provisioner.

        Parameters:
        -----------
        environment (Environment) : metta environment object that this plugin
            is attached.
        instance_id (str) : label for this plugin instances.

        label (str) : config load label for plugin configuration.
        base (str) : config base for loaded config for plugin configuration.

        """
        self._environment: Environment = environment
        """ Environemnt in which this plugin exists """
        self._instance_id: str = instance_id
        """ Unique id for this plugin instance """

        self._config_label = label
        """ configerus load label that should contain all of the config """
        self._config_base = base

        self.fixtures = Fixtures()
        """This object makes and keeps track of fixtures for MKE/MSR clients."""

        try:
            self._write_config_file()
            self.make_fixtures()
            # pylint: disable= broad-except
        except Exception as err:
            # there are many reasons this can fail, and we just want to
            # see if we can get fixtures early.
            # No need to ask forgiveness for this one.
            logger.debug("Could not make initial fixtures: %s", err)
Exemple #15
0
    def prepare(self, fixtures: Fixtures = None):
        """Create a workload instance from a set of fixtures.

        Parameters:
        -----------
        fixtures (Fixtures) : a set of fixtures that this workload will use to
            retrieve a docker client plugin.

        """
        if fixtures is None:
            fixtures = self._environment.fixtures()

        self._docker_client = fixtures.get_plugin(
            interfaces=[METTA_PLUGIN_ID_DOCKER_CLIENT])
Exemple #16
0
    def _filter(
        self,
        plugin_id: str = "",
        instance_id: str = "",
        interfaces: List[str] = None,
        labels: List[str] = None,
        skip_cli_plugins: bool = True,
    ):
        """Filter fixtures centrally."""
        matches = self._environment.fixtures().filter(plugin_id=plugin_id,
                                                      instance_id=instance_id,
                                                      interfaces=interfaces,
                                                      labels=labels)

        if not skip_cli_plugins:
            return matches

        # filter out cli plugins
        fixtures = Fixtures()
        for fixture in matches:
            if METTA_PLUGIN_INTERFACE_ROLE_CLI not in fixture.interfaces:
                fixtures.add(fixture)
        return fixtures
Exemple #17
0
    def __init__(
        self,
        environment: Environment,
        instance_id: str,
        chart_path: str,
        state_path: str,
        tfvars: Dict[str, Any],
        tfvars_path: str,
    ):
        """Initial client configuration."""
        self._environment: Environment = environment
        """ Environemnt in which this plugin exists """
        self._instance_id: str = instance_id
        """ Unique id for this plugin instance """

        self.tfvars: Dict[str, Any] = tfvars
        """Terraform vars to pass to TF as a tfvars file."""
        self._tfvars_path: str = os.path.realpath(tfvars_path)
        """Path to write for the tfvars file."""

        self._tf_handler = TerraformClient(
            working_dir=os.path.realpath(chart_path),
            state_path=os.path.realpath(state_path),
            tfvars_path=os.path.realpath(tfvars_path),
        )
        """Terraform handler which actually runs terraform commands."""

        self.fixtures = Fixtures()
        """All fixtures added to this plugin, which are primarily TF output plugins."""

        # if the cluster is already provisioned then we can get outputs from it
        try:
            self.make_fixtures()
        # pylint: disable=broad-except
        except Exception:
            pass
    def prepare(self, fixtures: Fixtures = None):
        """Find the dependent fixtures."""
        if fixtures is None:
            fixtures = self._environment.fixtures()

        self._ansibleplaybook_client = fixtures.get_plugin(
            plugin_id=METTA_ANSIBLE_ANSIBLECLI_PLAYBOOKCLIENT_PLUGIN_ID)

        # Loaded configerus config for the plugin. Ready for .get().
        plugin_config = self._environment.config().load(self._config_label)

        playbook_contents: str = plugin_config.get(
            [self._config_base, ANSIBLE_WORKLOAD_CONFIG_PLAYBOOK_KEY],
            default={})
        playbook_path: str = plugin_config.get(
            [self._config_base, ANSIBLE_WORKLOAD_CONFIG_PLAYBOOK_PATH_KEY],
            default="",
        )
        vars_values: str = plugin_config.get(
            [self._config_base, ANSIBLE_WORKLOAD_CONFIG_PLAYBOOK_VARS_KEY],
            default={})
        vars_path: str = plugin_config.get(
            [
                self._config_base,
                ANSIBLE_WORKLOAD_CONFIG_PLAYBOOK_VARS_PATH_KEY
            ],
            default="",
        )

        if playbook_contents:
            os.makedirs(os.path.dirname(os.path.realpath(playbook_path)),
                        exist_ok=True)
            with open(playbook_path, "w",
                      encoding="utf8") as playbook_fileobject:
                yaml.safe_dump(playbook_contents, playbook_fileobject)
        else:
            if playbook_path and os.path.exists(playbook_path):
                os.remove(playbook_path)
        if vars_values:
            os.makedirs(os.path.dirname(os.path.realpath(vars_path)),
                        exist_ok=True)
            with open(vars_path, "w", encoding="utf8") as vars_fileobject:
                yaml.safe_dump(vars_values, vars_fileobject)
        else:
            if vars_path and os.path.exists(vars_path):
                os.remove(vars_path)
    def __init__(
        self,
        environment: Environment,
        instance_id: str,
        label: str = ANSIBLE_PROVISIONER_CONFIG_LABEL,
        base: Any = LOADED_KEY_ROOT,
    ):
        """Keep metta info and plugin config info.

        Parameters:
        -----------
        environment (Environment) : All metta plugins receive the environment
            object in which they were created.
        instance_id (str) : all metta plugins receive their own string identity.

        label (str) : Configerus load label for finding plugin config.
        base (str) : Configerus get base key in which the plugin should look for
            all config.

        """
        self._environment: Environment = environment
        """Environemnt in which this plugin exists."""
        self._instance_id: str = instance_id
        """Unique id for this plugin instance."""

        self._config_label: str = label
        """ configerus load label that should contain all of the config """
        self._config_base: str = base
        """ configerus get key that should contain all plugin config """

        self.fixtures: Fixtures = Fixtures()
        """Fixtures created by this plugin - typically various clients."""

        # In order to allow declarative interaction. Try to make an ansible client
        # for this plugin, but allow it to fail.
        try:
            self._update_config()
            self._make_clients()

        # no exception in init should block building the object
        # pylint: disable=broad-except
        except Exception:
            logger.debug("Inital ansible plugin build failed : %s", self._instance_id)
Exemple #20
0
    def prepare(self, fixtures: Fixtures = None):
        """Get the kubeapi client from a set of fixtures.

        Parameters:
        -----------
        fixtures (Fixtures) : a set of fixtures that this workload will use to
            retrieve a kubernetes client plugin.

        """
        if fixtures is None:
            fixtures = self._environment.fixtures()

        try:
            self.client = fixtures.get_plugin(
                plugin_id=METTA_PLUGIN_ID_KUBERNETES_CLIENT)
        except KeyError as err:
            raise NotImplementedError(
                "Workload could not find the needed client: "
                f"{METTA_PLUGIN_ID_KUBERNETES_CLIENT}") from err
Exemple #21
0
    def __init__(
        self,
        environment: Environment,
        instance_id: str,
        label: str = SONOBUOY_WORKLOAD_CONFIG_LABEL,
        base: Any = SONOBUOY_WORKLOAD_CONFIG_BASE,
    ):
        """Initialize workload plugin.

        Parameters:
        -----------
        label (str) : Configerus label for loading config
        base (Any) : configerus base key which should contain all of the config

        """
        self._environment: Environment = environment
        """ Environemnt in which this plugin exists """
        self._instance_id: str = instance_id
        """ Unique id for this plugin instance """

        logger.info("Preparing sonobuoy settings")

        self._config_label: str = label
        """ configerus load label that should contain all of the config """
        self._config_base: str = base
        """ configerus get key that should contain all tf config """

        self.fixtures: Fixtures = Fixtures()
        """This plugin creates fixtures, so they are tracked here."""

        # go for early declarative testing to the plugin.
        try:
            self.prepare()
        # pylint: disable=broad-except
        except Exception:
            logger.debug("not able to early prepare sonobuoy.")
Exemple #22
0
class ComboProvisionerPlugin:
    """Combo Provisioner plugin class.

    This provisioner plugin is configered with a list of backends, which it
    will iterate across for every provisioner method call.  The backends have a
    priority which define the order of their call and every provisioner method
    will follow that order (or reverse it.)

    """
    def __init__(
        self,
        environment: Environment,
        instance_id: str,
        label: str = COMBO_PROVISIONER_CONFIG_LABEL,
        base: Any = LOADED_KEY_ROOT,
    ):
        """Run the super constructor but also set class properties.

        Interpret provided config and configure the object with all of the
        needed pieces for executing terraform commands

        Parameters:
        -----------
        environment (Environment) : All metta plugins receive the environment
            object in which they were created.
        instance_id (str) : all metta plugins receive their own string identity.

        label (str) : Configerus load label for finding plugin config.
        base (str) : Configerus get base key in which the plugin should look for
            all config.

        """
        self._environment: Environment = environment
        """Environemnt in which this plugin exists."""
        self._instance_id: str = instance_id
        """Unique id for this plugin instance."""

        try:
            combo_config = self._environment.config().load(label)
        except KeyError as err:
            raise ValueError(
                "Combo provisioner could not find any configurations") from err

        # Run confgerus validation on the config using our above defined jsonschema
        try:
            combo_config.get(base, validator=COMBO_PROVISIONER_VALIDATE_TARGET)
        except ValidationError as err:
            raise ValueError(
                "Combo provisioner config failed validation") from err

        try:
            backends_list = combo_config.get(
                [base, COMBO_PROVISIONER_CONFIG_BACKENDS_KEY])
            if not isinstance(backends_list, list):
                raise ValueError(
                    "Combo provisioner could not understand the backend list."
                    " A list was expected.")
        except KeyError as err:
            raise ValueError(
                "Combo provisioner received no backend list from config."
            ) from err

        # for each of our string instance_ids we add the backend in order by finding if from the
        # environment and adding it to our UCCTFixturesPlugin fixtures list.
        self.backends = Fixtures()
        for backend in backends_list:
            backend_instance_id = backend[METTA_PLUGIN_CONFIG_KEY_INSTANCEID]
            try:
                fixture = self._environment.fixtures().get(
                    interfaces=[METTA_PLUGIN_INTERFACE_ROLE_PROVISIONER],
                    instance_id=backend_instance_id,
                )
            except KeyError as err:
                raise ValueError(
                    "Combo provisioner was given a backend provisioner key that it could not "
                    f"correlate with a existing fixture: {backend_instance_id}"
                ) from err

            if hasattr(backend, METTA_FIXTURE_CONFIG_KEY_PRIORITY):
                fixture.priority = backend[METTA_FIXTURE_CONFIG_KEY_PRIORITY]

            self.backends.add(fixture)

    def _get_backend_iter(self, low_to_high: bool = False):
        """Get the sorted backend fixtures.

        Parameters:
        -----------
        low-to-high (bool) : ask for the fixtures in a lowest to highest
            (reverse) order.

        Returns:
        --------
        Iterator which is either the backends fixtures object, or the backends
            reversed()

        """
        if low_to_high:
            return reversed(self.backends)
        return self.backends

    def info(self, deep: bool = False):
        """Return structured data about self."""
        backends_info = []
        # List backends in high->low priority as this shows the order of apply
        for backend in self.backends:
            backends_info.append(backend.info(deep=deep))

        return {"backends": backends_info}

    def prepare(self):
        """Prepare the provisioner to apply resources."""
        for backend_fixture in self._get_backend_iter():
            logger.info(
                "--> running backend prepare: [High->Low] %s",
                backend_fixture.instance_id,
            )
            backend_fixture.plugin.prepare()

    def apply(self):
        """Bring a cluster to the configured state."""
        for backend_fixture in self._get_backend_iter():
            logger.info("--> running backend apply: [High->Low] %s",
                        backend_fixture.instance_id)
            backend_fixture.plugin.apply()

    def destroy(self):
        """Remove all resources created for the cluster."""
        for backend_fixture in self._get_backend_iter(low_to_high=True):
            logger.info(
                "--> running backend destroy: [Low->High] %s",
                backend_fixture.instance_id,
            )
            backend_fixture.plugin.destroy()

    # --- Fixture management
    #
    # We duplicate the UCCTFixturesPlugin methods, despite using it as a parent,
    # so that we can identify as that object, but because we need to allow all
    # backends to participate in fixture definition in order of priority.
    #
    # We of course have to override the any method which depends on our ordered
    # backend retrievals of get_fixtures() so that it doesn't run the parent
    # get_fixtures.

    def get_fixtures(self,
                     instance_id: str = "",
                     plugin_id: str = "",
                     interfaces: List[str] = None) -> Fixtures:
        """Retrieve any matching fixtures from any of the backends."""
        matches = Fixtures()
        for backend_fixture in self._get_backend_iter():
            plugin = backend_fixture.plugin
            if hasattr(plugin, "fixtures"):
                matches.merge(
                    plugin.fixtures.filter(
                        plugin_id=plugin_id,
                        interfaces=interfaces,
                        instance_id=instance_id,
                    ))
        return matches

    def get_fixture(
        self,
        plugin_id: str = "",
        interfaces: List[str] = None,
        instance_id: str = "",
        exception_if_missing: bool = True,
    ) -> Fixture:
        """Retrieve the first matching fixture from ordered backends."""
        matches = self.get_fixtures(
            plugin_id=plugin_id,
            instance_id=instance_id,
            interfaces=interfaces,
        )

        if len(matches) > 0:
            return matches.get()

        if exception_if_missing:
            raise KeyError("No matching fixture was found")
        return None

    def get_plugin(
        self,
        plugin_id: str = "",
        interfaces: List[str] = None,
        instance_id: str = "",
        exception_if_missing: bool = True,
    ) -> object:
        """Retrieve one of the backend fixtures."""
        fixture = self.get_fixture(
            plugin_id=plugin_id,
            interfaces=interfaces,
            instance_id=instance_id,
            exception_if_missing=exception_if_missing,
        )

        if fixture is not None:
            return fixture.plugin

        # this if is not needed, as get_fixture() handles exception_if_missing
        if exception_if_missing:
            raise KeyError("No matching plugin was found")
        return None
Exemple #23
0
    def __init__(
        self,
        environment: Environment,
        instance_id: str,
        config_file: str = METTA_LAUNCHPAD_CLI_CONFIG_FILE_DEFAULT,
        working_dir: str = METTA_LAUNCHPADCLIENT_WORKING_DIR_DEFAULT,
        cli_options: Dict[str, bool] = None,
        systems: Dict[str, Dict[str, str]] = None,
    ):
        """Collect enough data to create a LaunchpadClient object.

        Parameters:
        -----------
        config_file (str) : Path to the launchpad yml config file.

        working_dir (str) : Path CWD to use for subprocess with launchpad.  This may be needed
            if PEMs in the yaml are relative paths.

        cli_options (Dict[str, Any]) : Additional -- flags which should be passed to all
            launchpad commands.

        systems (Dict[str, Any]) : Dictionary to provide client generation for infrastructure
            created by Launchpad.  The two primary examples are MKE and MSR.

            Each key of the Dict is the identifier of the system, and its values are arguments
            to the constructor of the system, except the host list.

            An example for MKE:
                "mke": {
                    "accesspoint": "192.168.172.11",
                    "username": "******"",
                    "password": "******",
                }

                A host list will be added to the arguments.

        """
        self._environment: Environment = environment
        """ Environemnt in which this plugin exists """
        self._instance_id: str = instance_id
        """ Unique id for this plugin instance """

        self.config_file: str = config_file
        """Path to the launchpad yml file."""

        self.systems: Dict[str,
                           Dict[str,
                                str]] = systems if systems is not None else {}
        """Access endpoint & U/P for systems created by launchpad, such as the MKE client."""

        self._fixtures: Fixtures = Fixtures()
        """This plugin makes fixtures, and keeps track of them here."""

        logger.debug("Creating Launchpad client handler")
        self.launchpad: LaunchpadClient = LaunchpadClient(
            config_file=config_file,
            working_dir=working_dir,
            cli_options=cli_options,
        )

        # If we can, it makes sense to build the MKE and MSR client fixtures now.
        # This will only be possible in cases where we have an installed cluster.
        # We try that here, even though it is verbose and ugly, so that we have
        # the clients available for introspection, for all consumers.
        # We probably shouldn't, but it allows some flexibility.
        # attempt to be declarative and make the client plugin in case the
        # terraform chart has already been run.
        # self.make_fixtures()
        try:
            self.make_fixtures()

        # dont' block the construction on an exception
        # pylint: disable=broad-except
        except Exception:
            pass
Exemple #24
0
    def __init__(
        self,
        environment: Environment,
        instance_id: str,
        label: str = COMBO_PROVISIONER_CONFIG_LABEL,
        base: Any = LOADED_KEY_ROOT,
    ):
        """Run the super constructor but also set class properties.

        Interpret provided config and configure the object with all of the
        needed pieces for executing terraform commands

        Parameters:
        -----------
        environment (Environment) : All metta plugins receive the environment
            object in which they were created.
        instance_id (str) : all metta plugins receive their own string identity.

        label (str) : Configerus load label for finding plugin config.
        base (str) : Configerus get base key in which the plugin should look for
            all config.

        """
        self._environment: Environment = environment
        """Environemnt in which this plugin exists."""
        self._instance_id: str = instance_id
        """Unique id for this plugin instance."""

        try:
            combo_config = self._environment.config().load(label)
        except KeyError as err:
            raise ValueError(
                "Combo provisioner could not find any configurations") from err

        # Run confgerus validation on the config using our above defined jsonschema
        try:
            combo_config.get(base, validator=COMBO_PROVISIONER_VALIDATE_TARGET)
        except ValidationError as err:
            raise ValueError(
                "Combo provisioner config failed validation") from err

        try:
            backends_list = combo_config.get(
                [base, COMBO_PROVISIONER_CONFIG_BACKENDS_KEY])
            if not isinstance(backends_list, list):
                raise ValueError(
                    "Combo provisioner could not understand the backend list."
                    " A list was expected.")
        except KeyError as err:
            raise ValueError(
                "Combo provisioner received no backend list from config."
            ) from err

        # for each of our string instance_ids we add the backend in order by finding if from the
        # environment and adding it to our UCCTFixturesPlugin fixtures list.
        self.backends = Fixtures()
        for backend in backends_list:
            backend_instance_id = backend[METTA_PLUGIN_CONFIG_KEY_INSTANCEID]
            try:
                fixture = self._environment.fixtures().get(
                    interfaces=[METTA_PLUGIN_INTERFACE_ROLE_PROVISIONER],
                    instance_id=backend_instance_id,
                )
            except KeyError as err:
                raise ValueError(
                    "Combo provisioner was given a backend provisioner key that it could not "
                    f"correlate with a existing fixture: {backend_instance_id}"
                ) from err

            if hasattr(backend, METTA_FIXTURE_CONFIG_KEY_PRIORITY):
                fixture.priority = backend[METTA_FIXTURE_CONFIG_KEY_PRIORITY]

            self.backends.add(fixture)
Exemple #25
0
    def prepare(self, fixtures: Fixtures = None):
        """Create a workload instance from a set of fixtures.

        Parameters:
        -----------
        fixtures (Fixtures) : a set of fixtures that this workload will use to
            retrieve a kubernetes api client plugin.

        """
        if fixtures is None:
            fixtures = self._environment.fixtures()

        # Retrieve and Validate the config overall using jsonschema
        try:
            # get a configerus LoadedConfig for the sonobuoy label
            loaded = self._environment.config().load(
                self._config_label, validator=SONOBUOY_VALIDATE_TARGET)
        except ValidationError as err:
            raise ValueError("Invalid sonobuoy config received") from err

        # We need to discover all of the plugins to run.
        #
        # For plugins with inline definitions, we need to create file
        # definitions to pass to sonobuoy.
        resources_path: str = loaded.get([self._config_base, "resources.path"],
                                         default="./")
        resources_prefix: str = loaded.get(
            [self._config_base, "resources.prefix"], default="sonobuoy-")
        resources_plugin_prefix: str = f"{resources_prefix}plugin"

        # If we have config then write then to a file
        config_path: str = ""
        sonobuoy_config: Dict[str,
                              Any] = loaded.get([self._config_base, "config"],
                                                default=[])
        if sonobuoy_config:
            config_path = resources_path + resources_prefix + "config.json"
            with open(config_path, "w", encoding="utf-8") as config_file:
                json.dump(sonobuoy_config, config_file)

        # if we have plugins then prepare them
        plugins: List[Plugin] = []
        for plugin_id in loaded.get(
            [self._config_base, SONOBUOY_CONFIG_KEY_PLUGINS],
                default={}).keys():

            plugin_envs = loaded.get(
                [
                    self._config_base,
                    SONOBUOY_CONFIG_KEY_PLUGINS,
                    plugin_id,
                    SONOBUOY_CONFIG_KEY_PLUGINENVS,
                ],
                default=plugin_id,
            )

            # plugin_def gives us a plugin definition which defines how we pass
            # to sonobuoy using the -p flag.
            #
            # If a plugin def is missing then plugin_id is used.
            #
            # It can be one of three types:
            # 1. a core plugin id like 'e2e'
            # 2. a path to a plugin yml file which defines a plugin.
            # 3. an object which defines the plugin conf which will be written
            #    to a yaml file.
            plugin_def = loaded.get(
                [
                    self._config_base,
                    SONOBUOY_CONFIG_KEY_PLUGINS,
                    plugin_id,
                    SONOBUOY_CONFIG_KEY_PLUGINDEF,
                ],
                default="",
            )
            plugin_path = loaded.get(
                [
                    self._config_base,
                    SONOBUOY_CONFIG_KEY_PLUGINS,
                    plugin_id,
                    SONOBUOY_CONFIG_KEY_PLUGINPATH,
                ],
                default="",
            )

            if plugin_def:
                # here we received a plugin definition which we must write to
                # a file.
                if not plugin_path:
                    plugin_path = (resources_path + resources_plugin_prefix +
                                   "-" + plugin_id + ".yml")

                with open(plugin_path, "w", encoding="utf-8") as plugin_file:
                    yaml.dump(plugin_def, plugin_file, encoding="utf-8")
                plugin_def = plugin_path

                plugins.append(
                    Plugin(plugin_id=plugin_id,
                           plugin_def=plugin_path,
                           envs=plugin_envs))
                continue

            if plugin_path:
                plugins.append(
                    Plugin(plugin_id=plugin_id,
                           plugin_def=plugin_path,
                           envs=plugin_envs))
                continue

            plugins.append(
                Plugin(plugin_id=plugin_id,
                       plugin_def=plugin_id,
                       envs=plugin_envs))

        # String path to where to keep the results.
        # maybe get this from config?
        results_path: str = loaded.get(
            [self._config_base, SONOBUOY_CONFIG_KEY_RESULTSPATH],
            default=SONOBUOY_DEFAULT_RESULTS_PATH,
        )

        kubeclient: KubernetesApiClientPlugin = fixtures.get_plugin(
            plugin_id=METTA_PLUGIN_ID_KUBERNETES_CLIENT, )

        client_fixture = self._environment.new_fixture(
            plugin_id=METTA_SONOBUOY_CLIENT_PLUGIN_ID,
            instance_id=self.client_instance_id(),
            priority=70,
            arguments={
                "kubeclient": kubeclient,
                "plugins": plugins,
                "config_path": config_path,
                "results_path": results_path,
            },
            labels={
                "container": "plugin",
                "environment": self._environment.instance_id(),
                "parent_plugin_id": METTA_SONOBUOY_WORKLOAD_PLUGIN_ID,
                "parent_instance_id": self._instance_id,
            },
            replace_existing=True,
        )
        # keep this fixture attached to the workload to make it retrievable.
        self.fixtures.add(client_fixture, replace_existing=True)
Exemple #26
0
class TestkitProvisionerPlugin:
    """Testkit provisioner plugin.

    Provisioner plugin that allows control of and interaction with a testkit
    cluster.

    ## Requirements

    1. this plugin uses subprocess to call a testkit binary, so you have to
       install testkit in the environment

    ## Usage

    @TODO

    """
    def __init__(
        self,
        environment,
        instance_id,
        label: str = TESTKIT_PROVISIONER_CONFIG_LABEL,
        base: Any = TESTKIT_PROVISIONER_CONFIG_BASE,
    ):
        """Initialize Testkit provisioner.

        Parameters:
        -----------
        environment (Environment) : metta environment object that this plugin
            is attached.
        instance_id (str) : label for this plugin instances.

        label (str) : config load label for plugin configuration.
        base (str) : config base for loaded config for plugin configuration.

        """
        self._environment: Environment = environment
        """ Environemnt in which this plugin exists """
        self._instance_id: str = instance_id
        """ Unique id for this plugin instance """

        self._config_label = label
        """ configerus load label that should contain all of the config """
        self._config_base = base

        self.fixtures = Fixtures()
        """This object makes and keeps track of fixtures for MKE/MSR clients."""

        try:
            self._write_config_file()
            self.make_fixtures()
            # pylint: disable= broad-except
        except Exception as err:
            # there are many reasons this can fail, and we just want to
            # see if we can get fixtures early.
            # No need to ask forgiveness for this one.
            logger.debug("Could not make initial fixtures: %s", err)

    # the deep argument is a standard for the info hook
    # pylint: disable=unused-argument
    def info(self, deep: bool = False) -> Dict[str, Any]:
        """Get info about a provisioner plugin."""
        testkit_config = self._environment.config().load(self._config_label)

        return {
            "plugin": {
                "config": {
                    "config_label": self._config_label,
                    "config_base": self._config_base,
                },
                "config_file":
                testkit_config.get(
                    [self._config_base, TESTKIT_CONFIG_KEY_CONFIGFILE],
                    default="MISSING",
                ),
                "working_dir":
                testkit_config.get(
                    [self._config_base, TESTKIT_CONFIG_KEY_SYSTEMNAME],
                    default="MISSING"),
                "systems":
                testkit_config.get(
                    [self._config_base, TESTKIT_CONFIG_KEY_SYSTEMNAME],
                    default="MISSING",
                ),
            },
            "client": self._get_client_plugin().info(deep=deep),
        }

    def prepare(self):
        """Prepare any needed resources.

        We don't create the testkit file here so that it is created as late as
        possible.  This allows more options for dynamic config sources in the
        testkit config.

        """
        # Make sure that we are running on up to date config
        self._write_config_file()
        self.make_fixtures()

    def apply(self):
        """Create the testkit yaml file and run testkit to create a cluster."""
        # Make sure that we are running on up to date config
        self._write_config_file()
        self.make_fixtures()

        testkit_config = self._environment.config().load(self._config_label,
                                                         force_reload=True)
        """ load the plugin configuration so we can retrieve options """
        opts = testkit_config.get(
            [self._config_base, TESTKIT_CONFIG_KEY_CREATE_OPTIONS], default={})
        """ retrieve testkit client options from config """
        opt_list = []
        for key, value in opts.items():
            if isinstance(value, str):
                opt_list.append(f'--{key}="{value}"')
            else:
                opt_list.append(f"--{key}={value}")

        # run the testkit client command to provisioner the cluster
        self._get_client_plugin().create(opts=opt_list)

    def destroy(self):
        """Destroy any created resources."""
        # run the testkit client command to provisioner the cluster
        self._get_client_plugin().destroy()
        self._rm_config_file()

    def _write_config_file(self):
        """Write the config file for testkit."""
        try:
            # load all of the testkit configuration, force a reload to get up to date contents
            testkit_config = self._environment.config().load(
                self._config_label, force_reload=True)
            config = testkit_config.get(
                [self._config_base, TESTKIT_CONFIG_KEY_CONFIG],
                validator=TESTKIT_CONFIG_VALIDATE_TARGET,
            )
            """ config source of launchpad yaml """
        except KeyError as err:
            raise ValueError(
                "Could not find launchpad configuration from config.") from err
        except ValidationError as err:
            raise ValueError("Launchpad config failed validation") from err

        config_file = testkit_config.get(
            [self._config_base, TESTKIT_CONFIG_KEY_CONFIGFILE],
            default=TESTKIT_CONFIG_DEFAULT_CONFIGFILE,
        )
        """ config_file value from plugin configuration """

        # write the configto our yaml file target (creating the path)
        os.makedirs(os.path.dirname(os.path.realpath(config_file)),
                    exist_ok=True)
        with open(os.path.realpath(config_file), "w", encoding="utf8") as file:
            yaml.dump(config, file)

    def _rm_config_file(self):
        """Remove the written config file."""
        testkit_config = self._environment.config().load(self._config_label)
        config_file = testkit_config.get(
            [self._config_base, TESTKIT_CONFIG_KEY_CONFIGFILE],
            default=TESTKIT_CONFIG_DEFAULT_CONFIGFILE,
        )
        if os.path.isfile(config_file):
            os.remove(config_file)

    def make_fixtures(self):
        """Make related fixtures from a testkit installation.

        Creates:
        --------

        Testkit client : a client for interaction with the teskit cli

        """
        testkit_config = self._environment.config().load(self._config_label,
                                                         force_reload=True)
        """ load the plugin configuration so we can retrieve options """

        try:
            testkit_config = self._environment.config().load(
                self._config_label, force_reload=True)
            """ loaded plugin configuration label """
        except KeyError as err:
            raise ValueError(
                "Testkit plugin configuration did not have any config"
            ) from err

        system_name = testkit_config.get(
            [self._config_base, TESTKIT_CONFIG_KEY_SYSTEMNAME])
        """ hat will testkit call the system """

        # instances = testkit_config.get([self._config_base, TESTKIT_CONFIG_KEY_INSTANCES])
        # """ what instances to create """

        config_file = testkit_config.get(
            [self._config_base, TESTKIT_CONFIG_KEY_CONFIGFILE],
            default=TESTKIT_CONFIG_DEFAULT_CONFIGFILE,
        )
        """ config_file value from plugin configuration """

        systems = testkit_config.get(
            [self._config_base, TESTKIT_CONFIG_KEY_SYSTEMS],
            default={},
        )

        fixture = self._environment.new_fixture(
            plugin_id=METTA_TESTKIT_CLIENT_PLUGIN_ID,
            instance_id=self.client_instance_id(),
            priority=70,
            arguments={
                "config_file": config_file,
                "system_name": system_name,
                "systems": systems,
            },
            labels={
                "parent_plugin_id": METTA_TESTKIT_PROVISIONER_PLUGIN_ID,
                "parent_instance_id": self._instance_id,
            },
            replace_existing=True,
        )
        # keep this fixture attached to the workload to make it retrievable.
        self.fixtures.add(fixture, replace_existing=True)

    def client_instance_id(self) -> str:
        """Construct an instanceid for the child client plugin."""
        return f"{self._instance_id}-{METTA_TESTKIT_CLIENT_PLUGIN_ID}"

    def _get_client_plugin(self) -> TestkitClientPlugin:
        """Retrieve the client plugin if we can."""
        try:
            return self.fixtures.get_plugin(
                instance_id=self.client_instance_id())
        except KeyError as err:
            raise RuntimeError(
                "Testkit provisioner cannot find its client plugin, and "
                "cannot process any client actions.  Was a client created?"
            ) from err
Exemple #27
0
class TerraformClientPlugin:
    """Metta terraform client."""

    # pylint: disable=too-many-arguments
    def __init__(
        self,
        environment: Environment,
        instance_id: str,
        chart_path: str,
        state_path: str,
        tfvars: Dict[str, Any],
        tfvars_path: str,
    ):
        """Initial client configuration."""
        self._environment: Environment = environment
        """ Environemnt in which this plugin exists """
        self._instance_id: str = instance_id
        """ Unique id for this plugin instance """

        self.tfvars: Dict[str, Any] = tfvars
        """Terraform vars to pass to TF as a tfvars file."""
        self._tfvars_path: str = os.path.realpath(tfvars_path)
        """Path to write for the tfvars file."""

        self._tf_handler = TerraformClient(
            working_dir=os.path.realpath(chart_path),
            state_path=os.path.realpath(state_path),
            tfvars_path=os.path.realpath(tfvars_path),
        )
        """Terraform handler which actually runs terraform commands."""

        self.fixtures = Fixtures()
        """All fixtures added to this plugin, which are primarily TF output plugins."""

        # if the cluster is already provisioned then we can get outputs from it
        try:
            self.make_fixtures()
        # pylint: disable=broad-except
        except Exception:
            pass

    # deep argument is an info() standard across plugins
    # pylint: disable=unused-argument
    def info(self, deep: bool = False):
        """Get info about the client plugin.

        Returns:
        --------
        Dict of keyed introspective information about the plugin.

        """
        info = {
            "config": {
                "tfvars": self.tfvars,
                "tfvars_path": self._tfvars_path,
            },
            "client": self._tf_handler.info(deep=deep),
        }

        return info

    def state(self):
        """Return the terraform state contents."""
        return self._tf_handler.state()

    def init(self, upgrade: bool = False):
        """Run terraform init."""
        self._tf_handler.init(upgrade=upgrade)

    def apply(self, lock: bool = True):
        """Apply a terraform plan."""
        self._make_tfvars_file()
        self._tf_handler.apply(lock=lock)
        self.make_fixtures()

    def destroy(self, lock: bool = True):
        """Apply a terraform plan."""
        self._tf_handler.destroy(lock=lock)
        self._rm_tfvars_file()

    def test(self):
        """Apply a terraform plan."""
        self._make_tfvars_file()
        return self._tf_handler.test()

    def plan(self):
        """Check a terraform plan."""
        self._make_tfvars_file()
        return self._tf_handler.plan()

    def providers_schema(self):
        """Retrieve terraform providers schema.

        Returns:
        --------
        json Schema

        """
        return self._tf_handler.providers_schema()

    def graph(self, type: str = "plan"):
        """Retrieve terraform graph.

        Returns:
        --------
        Terraform graph

        """
        return self._tf_handler.graph(type=type)

    def output(self, name: str = ""):
        """Retrieve terraform outputs.

        Run the terraform output command, to retrieve outputs.
        Outputs are returned always as json as it is the only way to machine
        parse outputs properly.

        Returns:
        --------
        If you provided a name, then a single output is returned, otherwise a
        dict of outputs is returned.

        """
        return self._tf_handler.output(name=name)

    def _make_tfvars_file(self):
        """Write the vars file."""
        os.makedirs(os.path.dirname(self._tfvars_path), exist_ok=True)
        with open(self._tfvars_path, "w", encoding="utf8") as var_file:
            json.dump(self.tfvars, var_file, sort_keys=True, indent=4)

    def _rm_tfvars_file(self):
        """Remove any created vars file."""
        tfvars_path = self._tfvars_path
        if os.path.isfile(tfvars_path):
            os.remove(tfvars_path)

    def make_fixtures(self):
        """Retrieve an output from terraform.

        For other METTA plugins we can just load configuration, and creating
        output plugin instances from various value in config.

        We do that here, but  we also want to check of any outputs exported by
        the terraform root module, which we get using the tf client.

        If we find a root module output without a matching config output
        defintition then we make some assumptions about plugin type and add it
        to the list. We make some simple investigation into output plugin types
        and pick either the contrib.common.dict or contrib.common.text plugins.

        If we find a root module output that matches an output that was
        declared in config then we use that.  This allows config to define a
        plugin_id which will then be populated automatically.  If you know what
        type of data you are expecting from a particular tf output then you can
        prepare and config for it to do things like setting default values.

        Priorities can be used in the config.

        """
        # now we ask TF what output it nows about and merge together those as
        # new output plugins.
        # tf.outputs() produces a list of (sensitive:bool, type: [str,  object,
        # value:Any])
        for output_key, output_struct in self.output().items():
            # Here is the kind of info we can get out of terraform
            # output_sensitive = bool(output_struct['sensitive'])
            # """ Whether or not the output contains sensitive data """
            output_type = output_struct["type"][0]
            # output_spec = output_struct['type'][1]
            # """ A structured spec for the type """
            output_value = output_struct["value"]

            # see if we already have an output plugin for this name
            fixture = self.fixtures.get(
                interfaces=[METTA_PLUGIN_INTERFACE_ROLE_OUTPUT],
                instance_id=output_key,
                exception_if_missing=False,
            )
            if fixture is not None:
                if hasattr(fixture.plugin, "set_data"):
                    fixture.plugin.set_data(output_value)
                elif hasattr(fixture.plugin, "set_text"):
                    fixture.plugin.set_text(str(output_value))

            else:
                # we only know how to create 2 kinds of outputs
                if output_type == "object":
                    fixture = self._environment.new_fixture(
                        plugin_id=METTA_PLUGIN_ID_OUTPUT_DICT,
                        instance_id=output_key,
                        priority=self._environment.plugin_priority(delta=5),
                        arguments={"data": output_value},
                        labels={
                            "parent_plugin_id":
                            METTA_TERRAFORM_CLIENT_PLUGIN_ID,
                            "parent_instance_id": self._instance_id,
                        },
                    )
                else:
                    fixture = self._environment.new_fixture(
                        plugin_id=METTA_PLUGIN_ID_OUTPUT_TEXT,
                        instance_id=output_key,
                        priority=self._environment.plugin_priority(delta=5),
                        arguments={"text": str(output_value)},
                        labels={
                            "parent_plugin_id":
                            METTA_TERRAFORM_CLIENT_PLUGIN_ID,
                            "parent_instance_id": self._instance_id,
                        },
                    )

                self.fixtures.add(fixture)
Exemple #28
0
class TestkitClientPlugin:
    """Testkit client plugin.

    client plugin that allows control of and interaction with a testkit
    cluster.

    ## Requirements

    1. this plugin uses subprocess to call a testkit binary, so you have to
       install testkit in the environment

    ## Usage

    @TODO

    """

    # pylint: disable=too-many-arguments
    def __init__(
        self,
        environment,
        instance_id,
        system_name: str,
        config_file: str,
        systems: Dict[str, Dict[str, str]] = None,
    ):
        """Initialize Testkit provisioner.

        Parameters:
        -----------
        environment (Environment) : metta environment object that this plugin
            is attached.
        instance_id (str) : label for this plugin instances.
        config_file (str) : string path to the testkit config file.
        systems (Dict[str, Dict[str, str]]) : A dictionary of systems which this
            client is expected to provide using testkit.

            This is something which should be determinable using the config/file
            client directly, but sits outside of information encapsulated in
            the tool/conf.

            What we are talking about here is information to answer questions:

                Did testkit install MKE? if so, what accesspoint and U/P can I
                use to build an MKE client to access it.

            This is not an ideal approach but rather a necessity.

        """
        self._environment: Environment = environment
        """ Environemnt in which this plugin exists """
        self._instance_id: str = instance_id
        """ Unique id for this plugin instance """

        self._system_name: str = system_name
        """ What will testkit call the system, used client ops """

        self._testkit = TestkitClient(config_file=config_file)
        """ testkit client object """

        self._systems = systems
        """What systems will testkit install, so what fixtures are needed"""

        self.fixtures = Fixtures()
        """This object makes and keeps track of fixtures for MKE/MSR clients."""
        try:
            self.make_fixtures()
            # pylint: disable= broad-except
        except Exception as err:
            # there are many reasons this can fail, and we just want to
            # see if we can get fixtures early.
            # No need to ask forgiveness for this one.
            logger.debug("Could not make initial fixtures: %s", err)

    # the deep argument is a standard for the info hook
    # pylint: disable=unused-argument
    def info(self, deep: bool = False) -> Dict[str, Any]:
        """Get info about a provisioner plugin."""
        info = {
            "plugin": {
                "system_name": self._system_name,
            },
            "client": self._testkit.info(deep=deep),
        }
        if deep:
            try:
                info["hosts"] = self.hosts()
            # pylint: disable=broad-except
            except Exception:
                pass

        return info

    def version(self):
        """Return testkit client version."""
        return self._testkit.version()

    def create(self, opts: List[str]):
        """Run the testkit create command."""
        self._testkit.create(opts=opts)
        self.make_fixtures()

        mke_plugin = self._get_mke_client_plugin()
        mke_plugin.api_get_bundle(force=True)
        mke_plugin.make_fixtures()

    def destroy(self):
        """Remove a system from testkit."""
        return self._testkit.system_rm(system_name=self._system_name)

    def hosts(self):
        """List testkit system machines."""
        return self._testkit.machine_ls(system_name=self._system_name)

    def exec(self, host: str, cmd: str):
        """List testkit system machines."""
        return self._testkit.machine_ssh(machine=host, cmd=cmd)

    def system_ls(self):
        """List all of the systems testkit can see using our config."""
        return self._testkit.system_ls()

    # pylint: disable=too-many-branches
    def make_fixtures(self):
        """Make related fixtures from a testkit installation.

        Creates:
        --------

        MKE client : if we have manager nodes, then we create an MKE client
            which will then create docker and kubernestes clients if they are
            appropriate.

        MSR Client : if we have an MSR node, then the related client is
            created.

        """
        if self._systems is None:
            return

        testkit_hosts = self._testkit.machine_ls(system_name=self._system_name)
        """ list of all of the testkit hosts. """

        manager_hosts = []
        worker_hosts = []
        mke_hosts = []
        msr_hosts = []
        for host in testkit_hosts:
            host["address"] = host["public_ip"]
            if host["swarm_manager"] == "yes":
                manager_hosts.append(host)
            else:
                worker_hosts.append(host)

            if host["ucp_controller"] == "yes":
                mke_hosts.append(host)

        if len(msr_hosts) == 0 and len(worker_hosts) > 0:
            # Testkit installs MSR on the first work node, but the api is
            # accessible using port 444 in order to not conflict.
            first_worker = worker_hosts[0]
            first_worker_ip = first_worker["public_ip"]
            first_worker["msr_accesspoint"] = f"{first_worker_ip}:444"
            msr_hosts.append(first_worker)

        if len(mke_hosts
               ) > 0 and METTA_MIRANTIS_CLIENT_MKE_PLUGIN_ID in self._systems:
            instance_id = f"{self._instance_id}-{METTA_MIRANTIS_CLIENT_MKE_PLUGIN_ID}"
            arguments = self._systems[METTA_MIRANTIS_CLIENT_MKE_PLUGIN_ID]
            arguments["hosts"] = mke_hosts

            if "accesspoint" in arguments and arguments["accesspoint"]:
                arguments["accesspoint"] = clean_accesspoint(
                    arguments["accesspoint"])

            logger.debug(
                "Launchpad client is creating an MKE client plugin: %s",
                instance_id)
            fixture = self._environment.new_fixture(
                plugin_id=METTA_MIRANTIS_CLIENT_MKE_PLUGIN_ID,
                instance_id=instance_id,
                priority=70,
                arguments=arguments,
                labels={
                    "parent_plugin_id": METTA_TESTKIT_CLIENT_PLUGIN_ID,
                    "parent_instance_id": self._instance_id,
                },
                replace_existing=True,
            )
            self.fixtures.add(fixture, replace_existing=True)

            # We got an MKE client, so let's activate it.

        else:
            logger.debug(
                "No MKE master hosts found, not creating an MKE client.")

        if len(msr_hosts
               ) > 0 and METTA_MIRANTIS_CLIENT_MSR_PLUGIN_ID in self._systems:
            instance_id = f"{self._instance_id}-{METTA_MIRANTIS_CLIENT_MSR_PLUGIN_ID}"
            arguments = self._systems[METTA_MIRANTIS_CLIENT_MSR_PLUGIN_ID]
            arguments["hosts"] = msr_hosts

            if "accesspoint" in arguments and arguments["accesspoint"]:
                arguments["accesspoint"] = clean_accesspoint(
                    arguments["accesspoint"])

            logger.debug(
                "Launchpad client is creating an MSR client plugin: %s",
                instance_id)
            fixture = self._environment.new_fixture(
                plugin_id=METTA_MIRANTIS_CLIENT_MSR_PLUGIN_ID,
                instance_id=instance_id,
                priority=70,
                arguments=arguments,
                labels={
                    "parent_plugin_id": METTA_TESTKIT_CLIENT_PLUGIN_ID,
                    "parent_instance_id": self._instance_id,
                },
                replace_existing=True,
            )
            self.fixtures.add(fixture, replace_existing=True)

        else:
            logger.debug(
                "No MSR master hosts found, not creating an MSR client.")

    def _get_mke_client_plugin(self) -> MKEAPIClientPlugin:
        """Retrieve the MKE client plugin if we can."""
        try:
            return self.fixtures.get_plugin(
                plugin_id=METTA_MIRANTIS_CLIENT_MKE_PLUGIN_ID)
        except KeyError as err:
            raise RuntimeError(
                "Launchpad client cannot find its MKE client plugin, and "
                "cannot process any client actions.  Was a client created?"
            ) from err
Exemple #29
0
class TerraformProvisionerPlugin:
    """Terraform provisioner plugin.

    Provisioner plugin that allows control of and interaction with a terraform
    cluster.

    ## Requirements

    1. this plugin uses subprocess to call a terraform binary, so you have to
        install terraform in the environment

    ## Usage

    ### Plan

    The plan must exists somewhere on disk, and be accessible.

    You must specify the path and related configuration in config, which are
    read in the .prepare() execution.

    ### Vars/State

    This plugin reads TF vars from config and writes them to a vars file.  We
    could run without relying on vars file, but having a vars file allows cli
    interaction with the cluster if this plugin messes up.

    You can override where Terraform vars/state files are written to allow
    sharing of a plan across test suites.

    Parameters:
    -----------
    environment (Environment) : All metta plugins receive the environment
        object in which they were created.
    instance_id (str) : all metta plugins receive their own string identity.

    label (str) : Configerus load label for finding plugin config.
    base (str) : Configerus get base key in which the plugin should look for
        all config.

    """
    def __init__(
        self,
        environment: Environment,
        instance_id: str,
        label: str = TERRAFORM_PROVISIONER_CONFIG_LABEL,
        base: Any = LOADED_KEY_ROOT,
    ):
        """Run the super constructor but also set class properties.

        Interpret provided config and configure the object with all of the
        needed pieces for executing terraform commands

        """
        self._environment: Environment = environment
        """ Environemnt in which this plugin exists """
        self._instance_id: str = instance_id
        """ Unique id for this plugin instance """

        self._config_label = label
        """ configerus load label that should contain all of the config """
        self._config_base = base
        """ configerus get key that should contain all tf config """

        self.fixtures: Fixtures = Fixtures()
        """Children fixtures, typically just the client plugin."""

        # Make the client fixture in the constructor.  The TF client fixture is
        # quite state safe, and should only need to be created once, unlike
        # other provisioner clients which may be vulnerable to state change.
        self.make_fixtures()

    # deep argument is an info() standard across plugins
    # pylint: disable=unused-argument
    def info(self, deep: bool = False):
        """Get info about the provisioner plugin.

        Returns:
        --------
        Dict of keyed introspective information about the plugin.

        """
        terraform_config: Loaded = self._environment.config().load(
            self._config_label)

        info = {
            "config": {
                "label":
                self._config_label,
                "base":
                self._config_base,
                "tfvars":
                terraform_config.get(
                    [
                        self._config_base,
                        TERRAFORM_PROVISIONER_CONFIG_TFVARS_KEY
                    ],
                    default="NONE",
                ),
                "chart_path":
                terraform_config.get(
                    [
                        self._config_base,
                        TERRAFORM_PROVISIONER_CONFIG_CHART_PATH_KEY
                    ],
                    default="MISSING",
                ),
                "state_path":
                terraform_config.get(
                    [
                        self._config_base,
                        TERRAFORM_PROVISIONER_CONFIG_STATE_PATH_KEY
                    ],
                    default="MISSING",
                ),
                "tfvars_path":
                terraform_config.get(
                    [
                        self._config_base,
                        TERRAFORM_PROVISIONER_CONFIG_TFVARS_PATH_KEY
                    ],
                    default="MISSING",
                ),
            },
            "client": {
                "instance_id": self.client_instance_id(),
            },
        }

        return info

    def prepare(self):
        """Run terraform init."""
        logger.info("Running Terraform INIT")
        self._get_client_plugin().init()

    def apply(self, lock: bool = True):
        """Create all terraform resources described in the plan."""
        logger.info("Running Terraform APPLY")
        self._get_client_plugin().apply(lock=lock)

    def destroy(self, lock: bool = True):
        """Remove all terraform resources in state."""
        logger.info("Running Terraform DESTROY")
        self._get_client_plugin().destroy(lock=lock)
        # accessing parent property for clearing out existing output fixtures
        # pylint: disable=attribute-defined-outside-init
        self.fixtures = Fixtures()

    def make_fixtures(self):
        """Make the client plugin for terraform interaction."""
        try:
            terraform_config = self._environment.config().load(
                self._config_label,
                force_reload=True,
                validator=TERRAFORM_VALIDATE_TARGET)
            """ get a configerus LoadedConfig for the label """
        except ValidationError as err:
            raise ValueError("Terraform config failed validation") from err

        try:
            chart_path = terraform_config.get([
                self._config_base, TERRAFORM_PROVISIONER_CONFIG_CHART_PATH_KEY
            ])
            """ subprocess commands for terraform will be run in this path """
        except Exception as err:
            raise ValueError(
                "Plugin config did not give us a working/plan path:"
                f" {terraform_config.get()}") from err

        state_path = terraform_config.get(
            [self._config_base, TERRAFORM_PROVISIONER_CONFIG_STATE_PATH_KEY],
            default=os.path.join(chart_path,
                                 TERRAFORM_PROVISIONER_DEFAULT_STATE_SUBPATH),
        )
        """ terraform state path """

        tfvars = terraform_config.get(
            [self._config_base, TERRAFORM_PROVISIONER_CONFIG_TFVARS_KEY],
            default={},
        )
        """ List of vars to pass to terraform.  Will be written to a file """

        tfvars_path = terraform_config.get(
            [self._config_base, TERRAFORM_PROVISIONER_CONFIG_TFVARS_PATH_KEY],
            default=os.path.join(chart_path,
                                 TERRAFORM_PROVISIONER_DEFAULT_TFVARS_FILE),
        )
        """ vars file which will be written before running terraform """

        logger.debug("Creating Terraform client")

        fixture = self._environment.new_fixture(
            plugin_id=METTA_TERRAFORM_CLIENT_PLUGIN_ID,
            instance_id=self.client_instance_id(),
            priority=70,
            arguments={
                "chart_path": chart_path,
                "state_path": state_path,
                "tfvars": tfvars,
                "tfvars_path": tfvars_path,
            },
            labels={
                "parent_plugin_id": METTA_TERRAFORM_PROVISIONER_PLUGIN_ID,
                "parent_instance_id": self._instance_id,
            },
            replace_existing=True,
        )
        # keep this fixture attached to the workload to make it retrievable.
        self.fixtures.add(fixture, replace_existing=True)

    def client_instance_id(self) -> str:
        """Construct an instanceid for the child client plugin."""
        return f"{self._instance_id}-{METTA_TERRAFORM_CLIENT_PLUGIN_ID}"

    def _get_client_plugin(self) -> TerraformClientPlugin:
        """Retrieve the client plugin if we can."""
        return self.fixtures.get_plugin(
            plugin_id=METTA_TERRAFORM_CLIENT_PLUGIN_ID)
Exemple #30
0
"""

Global mutables.

Keep all package global variables in this module.

"""
from configerus.config import Config
from configerus.contrib.dict import PLUGIN_ID_SOURCE_DICT as CONFIGERUS_SOURCE_DICT

from mirantis.testing.metta.fixture import Fixtures

# A global config object, which metta will create if needed.  This is not
# Absolutely required, but it makes sense in scenarios where Metta is asked
# to bootstrap itself and discover environments.
global_config: Config = Config()
global_config.add_source(plugin_id=CONFIGERUS_SOURCE_DICT,
                         instance_id="metta-global",
                         priority=10).set_data(
                             {"metta": {
                                 "instance_id": "metta"
                             }})

# The global fixtures set allows us to manage session/global based fixtures
# which allows us to keep components such as environments as plugins, and
# to use them in a global scope.
global_fixtures: Fixtures = Fixtures()
""" Keep a set fixtures which are kept in global scope """