Esempio n. 1
0
    def abort(self, soft=False):
        current_status = self.get_status(self.task["uuid"])

        if current_status in self.NOT_IMPLEMENTED_STAGES_FOR_ABORT:
            raise exceptions.RallyException(
                _LE("Failed to abort task '%(uuid)s'. It doesn't implemented "
                    "for '%(stages)s' stages. Current task status is "
                    "'%(status)s'.") %
                {
                    "uuid": self.task["uuid"],
                    "status": current_status,
                    "stages": ", ".join(self.NOT_IMPLEMENTED_STAGES_FOR_ABORT)
                })
        elif current_status in [
                consts.TaskStatus.FINISHED, consts.TaskStatus.CRASHED,
                consts.TaskStatus.ABORTED
        ]:
            raise exceptions.RallyException(
                _LE("Failed to abort task '%s', since it already "
                    "finished.") % self.task["uuid"])

        new_status = (consts.TaskStatus.SOFT_ABORTING
                      if soft else consts.TaskStatus.ABORTING)
        self.update_status(new_status,
                           allowed_statuses=(consts.TaskStatus.RUNNING,
                                             consts.TaskStatus.SOFT_ABORTING))
Esempio n. 2
0
    def check_db_revision(self):
        rev = rally_version.database_revision()

        # Check that db exists
        if rev["revision"] is None:
            raise exceptions.RallyException(
                _LE("Database is missing. Create database by command "
                    "`rally-manage db create'"))

        # Check that db is updated
        if rev["revision"] != rev["current_head"]:
            raise exceptions.RallyException(
                _LE("Database seems to be outdated. Run upgrade from "
                    "revision %(revision)s to %(current_head)s by command "
                    "`rally-manage db upgrade'") % rev)
Esempio n. 3
0
    def wrapper(cls):
        if not issubclass(cls, Plugin):
            raise exceptions.RallyException(_LE(
                "Plugin's Base can be only a subclass of Plugin class."))

        parent = cls._get_base()
        if parent != Plugin:
            raise exceptions.RallyException(_LE(
                "'%(plugin_cls)s' can not be marked as plugin base, since it "
                "inherits from '%(parent)s' which is also plugin base.") % {
                "plugin_cls": cls.__name__,
                "parent": parent.__name__})

        cls.base_ref = cls
        return cls
Esempio n. 4
0
    def install_extension(self, source, version=None, extra_settings=None):
        """Install a Tempest plugin."""
        if extra_settings:
            raise NotImplementedError(
                _LE("'%s' verifiers don't support extra installation settings "
                    "for extensions.") % self.get_name())
        version = version or "master"
        egg = re.sub("\.git$", "", os.path.basename(source.strip("/")))
        full_source = "git+{0}@{1}#egg={2}".format(source, version, egg)
        # NOTE(ylobankov): Use 'develop mode' installation to provide an
        #                  ability to advanced users to change tests or
        #                  develop new ones in verifier repo on the fly.
        cmd = ["pip", "install",
               "--src", os.path.join(self.base_dir, "extensions"),
               "-e", full_source]
        if self.verifier.system_wide:
            cmd.insert(2, "--no-deps")
        utils.check_output(cmd, cwd=self.base_dir, env=self.environ)

        # Very often Tempest plugins are inside projects and requirements
        # for plugins are listed in the test-requirements.txt file.
        test_reqs_path = os.path.join(self.base_dir, "extensions",
                                      egg, "test-requirements.txt")
        if os.path.exists(test_reqs_path):
            if not self.verifier.system_wide:
                utils.check_output(["pip", "install", "-r", test_reqs_path],
                                   cwd=self.base_dir, env=self.environ)
            else:
                self.check_system_wide(reqs_file_path=test_reqs_path)
Esempio n. 5
0
    def setup(self):
        is_config_app_dir = False
        pckg_path = os.path.expanduser(self.config["app_package"])
        if zipfile.is_zipfile(pckg_path):
            zip_name = pckg_path
        elif os.path.isdir(pckg_path):
            is_config_app_dir = True
            zip_name = fileutils.pack_dir(pckg_path)
        else:
            msg = (_LE("There is no zip archive or directory by this path:"
                       " %s") % pckg_path)
            raise exceptions.ContextSetupFailure(msg=msg,
                                                 ctx_name=self.get_name())

        for user, tenant_id in utils.iterate_per_tenants(
                self.context["users"]):
            clients = osclients.Clients(user["endpoint"])
            self.context["tenants"][tenant_id]["packages"] = []
            if is_config_app_dir:
                self.context["tenants"][tenant_id]["murano_ctx"] = zip_name
            package = clients.murano().packages.create(
                {
                    "categories": ["Web"],
                    "tags": ["tag"]
                }, {"file": open(zip_name)})

            self.context["tenants"][tenant_id]["packages"].append(package)
Esempio n. 6
0
def get_interpreter(python_version):
    """Discovers PATH to find proper python interpreter

    :param python_version: (major, minor) version numbers
    :type python_version: tuple
    """

    if not isinstance(python_version, tuple):
        msg = (_LE("given format of python version `%s` is invalid") %
               python_version)
        raise exceptions.InvalidArgumentsException(msg)

    interpreter_name = "python%s.%s" % python_version
    interpreter = spawn.find_executable(interpreter_name)
    if interpreter:
        return interpreter
    else:
        interpreters = filter(
            os.path.isfile, [os.path.join(p, interpreter_name)
                             for p in os.environ.get("PATH", "").split(":")])
        cmd = "%s -c 'import sys; print(sys.version_info[:2])'"
        for interpreter in interpreters:
            try:
                out = sp_check_output(cmd % interpreter, shell=True)
            except subprocess.CalledProcessError:
                pass
            else:
                if out.strip() == str(python_version):
                    return interpreter
Esempio n. 7
0
    def create(cls, config, name):
        """Create a deployment.

        :param config: a dict with deployment configuration
        :param name: a str represents a name of the deployment
        :returns: Deployment object
        """

        try:
            deployment = objects.Deployment(name=name, config=config)
        except exceptions.DeploymentNameExists as e:
            if logging.is_debug():
                LOG.exception(e)
            raise

        deployer = deploy_engine.Engine.get_engine(
            deployment["config"]["type"], deployment)
        try:
            deployer.validate()
        except jsonschema.ValidationError:
            LOG.error(
                _LE("Deployment %s: Schema validation error.") %
                deployment["uuid"])
            deployment.update_status(consts.DeployStatus.DEPLOY_FAILED)
            raise

        with deployer:
            credentials = deployer.make_deploy()
            deployment.update_credentials(credentials)
            return deployment
Esempio n. 8
0
    def _migrate_up(self, engine, version, with_data=False):
        """Migrate up to a new version of the db.

        We allow for data insertion and post checks at every
        migration version with special _pre_upgrade_### and
        _check_### functions in the main test.
        """
        # NOTE(sdague): try block is here because it's impossible to debug
        # where a failed data migration happens otherwise
        check_version = version
        try:
            if with_data:
                data = None
                pre_upgrade = getattr(self, "_pre_upgrade_%s" % check_version,
                                      None)
                if pre_upgrade:
                    data = pre_upgrade(engine)
            self._migrate(engine, version, "upgrade")
            self.assertEqual(version, self._get_version_from_db(engine))
            if with_data:
                check = getattr(self, "_check_%s" % check_version, None)
                if check:
                    check(engine, data)
        except Exception:
            LOG.error(
                _LE("Failed to migrate to version {ver} on engine {eng}").
                format(ver=version, eng=engine))
            raise
Esempio n. 9
0
    def create(cls, config, name):
        """Create a deployment.

        :param config: a dict with deployment configuration
        :param name: a str represents a name of the deployment
        :returns: Deployment object
        """

        try:
            deployment = objects.Deployment(name=name, config=config)
        except exceptions.DeploymentNameExists as e:
            if logging.is_debug():
                LOG.exception(e)
            raise

        deployer = deploy_engine.Engine.get_engine(
            deployment["config"]["type"], deployment)
        try:
            deployer.validate()
        except jsonschema.ValidationError:
            LOG.error(_LE("Deployment %s: Schema validation error.") %
                      deployment["uuid"])
            deployment.update_status(consts.DeployStatus.DEPLOY_FAILED)
            raise

        with deployer:
            credentials = deployer.make_deploy()
            deployment.update_credentials(credentials)
            return deployment
Esempio n. 10
0
    def _migrate_up(self, engine, version, with_data=False):
        """Migrate up to a new version of the db.

        We allow for data insertion and post checks at every
        migration version with special _pre_upgrade_### and
        _check_### functions in the main test.
        """
        # NOTE(sdague): try block is here because it's impossible to debug
        # where a failed data migration happens otherwise
        check_version = version
        try:
            if with_data:
                data = None
                pre_upgrade = getattr(
                    self, "_pre_upgrade_%s" % check_version, None)
                if pre_upgrade:
                    data = pre_upgrade(engine)
            self._migrate(engine, version, "upgrade")
            self.assertEqual(version, self._get_version_from_db(engine))
            if with_data:
                check = getattr(self, "_check_%s" % check_version, None)
                if check:
                    check(engine, data)
        except Exception:
            LOG.error(_LE("Failed to migrate to version {ver} on engine {eng}")
                      .format(ver=version, eng=engine))
            raise
Esempio n. 11
0
    def setup(self):
        is_config_app_dir = False
        pckg_path = os.path.expanduser(self.config["app_package"])
        if zipfile.is_zipfile(pckg_path):
            zip_name = pckg_path
        elif os.path.isdir(pckg_path):
            is_config_app_dir = True
            zip_name = fileutils.pack_dir(pckg_path)
        else:
            msg = (_LE("There is no zip archive or directory by this path:"
                       " %s") % pckg_path)
            raise exceptions.ContextSetupFailure(msg=msg,
                                                 ctx_name=self.get_name())

        for user, tenant_id in utils.iterate_per_tenants(
                self.context["users"]):
            clients = osclients.Clients(user["credential"])
            self.context["tenants"][tenant_id]["packages"] = []
            if is_config_app_dir:
                self.context["tenants"][tenant_id]["murano_ctx"] = zip_name
            package = clients.murano().packages.create(
                {"categories": ["Web"], "tags": ["tag"]},
                {"file": open(zip_name)})

            self.context["tenants"][tenant_id]["packages"].append(package)
Esempio n. 12
0
    def recreate(cls, deployment, config=None):
        """Performs a cleanup and then makes a deployment again.

        :param deployment: UUID or name of the deployment
        :param config: an optional dict with deployment config to update before
                       redeploy
        """
        deployment = objects.Deployment.get(deployment)
        deployer = deploy_engine.Engine.get_engine(
            deployment["config"]["type"], deployment)

        if config:
            if deployment["config"]["type"] != config["type"]:
                raise exceptions.RallyException(
                    "Can't change deployment type.")
            try:
                deployer.validate(config)
            except jsonschema.ValidationError:
                LOG.error(_LE("Config schema validation error."))
                raise

        with deployer:
            deployer.make_cleanup()

            if config:
                deployment.update_config(config)

            credentials = deployer.make_deploy()
            deployment.update_credentials(credentials)
Esempio n. 13
0
    def get(cls, name, namespace=None):
        """Return plugin by its name from specified namespace.

        This method iterates over all subclasses of cls and returns plugin
        by name from specified namespace.

        If namespace is not specified it will return first found plugin from
        any of namespaces.

        :param name: Plugin's name
        :param namespace: Namespace where to search for plugins
        """
        potential_result = []

        for p in cls.get_all(namespace=namespace):
            if p.get_name() == name:
                potential_result.append(p)

        if len(potential_result) == 1:
            return potential_result[0]
        elif potential_result:
            hint = _LE("Try to choose the correct Plugin base or namespace to "
                       "search in.")
            if namespace:
                needle = "%s at %s namespace" % (name, namespace)
            else:
                needle = "%s at any of namespaces" % name
            raise exceptions.MultipleMatchesFound(
                needle=needle,
                haystack=", ".join(p.get_name() for p in potential_result),
                hint=hint)

        raise exceptions.PluginNotFound(
            name=name, namespace=namespace or "any of")
Esempio n. 14
0
def get_interpreter(python_version):
    """Discovers PATH to find proper python interpreter

    :param python_version: (major, minor) version numbers
    :type python_version: tuple
    """

    if not isinstance(python_version, tuple):
        msg = (_LE("given format of python version `%s` is invalid") %
               python_version)
        raise exceptions.InvalidArgumentsException(msg)

    interpreter_name = "python%s.%s" % python_version
    interpreter = spawn.find_executable(interpreter_name)
    if interpreter:
        return interpreter
    else:
        interpreters = filter(os.path.isfile, [
            os.path.join(p, interpreter_name)
            for p in os.environ.get("PATH", "").split(":")
        ])
        cmd = "%s -c 'import sys; print(sys.version_info[:2])'"
        for interpreter in interpreters:
            try:
                out = sp_check_output(cmd % interpreter, shell=True)
            except subprocess.CalledProcessError:
                pass
            else:
                if out.strip() == str(python_version):
                    return interpreter
Esempio n. 15
0
    def uninstall_extension(self, name):
        """Uninstall a verifier extension.

        :param name: Name of extension to uninstall
        :raises NotImplementedError: This feature is verifier-specific, so you
            should override this method in your plugin if it supports
            extensions
        """
        raise NotImplementedError(
            _LE("'%s' verifiers don't support extensions.") % self.get_name())
Esempio n. 16
0
    def extend_configuration(self, extra_options):
        """Extend verifier configuration with new options.

        :param extra_options: Options to be used for extending configuration
        :raises NotImplementedError: This feature is verifier-specific, so you
            should override this method in your plugin if it supports
            configuration
        """
        raise NotImplementedError(
            _LE("'%s' verifiers don't support configuration at all.") %
            self.get_name())
Esempio n. 17
0
    def validate(cls, config, non_hidden=False):
        super(OpenStackAPIVersions, cls).validate(config, non_hidden=non_hidden)
        for client in config:
            client_cls = osclients.OSClient.get(client)
            if "service_type" in config[client] and "service_name" in config[client]:
                raise exceptions.ValidationError(
                    _LE("Setting both 'service_type' and 'service_name' properties" " is restricted.")
                )
            try:
                if "service_type" in config[client] or "service_name" in config[client]:
                    client_cls.is_service_type_configurable()

                if "version" in config[client]:
                    client_cls.validate_version(config[client]["version"])

            except exceptions.RallyException as e:
                raise exceptions.ValidationError(
                    _LE("Invalid settings for '%(client)s': %(error)s")
                    % {"client": client, "error": e.format_message()}
                )
Esempio n. 18
0
 def get_engine(name, deployment):
     """Returns instance of a deploy engine with corresponding name."""
     try:
         engine_cls = Engine.get(name)
         return engine_cls(deployment)
     except exceptions.PluginNotFound as e:
         LOG.error(_LE("Deployment %(uuid)s: Deploy engine for %(name)s "
                   "does not exist.") %
                   {"uuid": deployment["uuid"], "name": name})
         deployment.update_status(consts.DeployStatus.DEPLOY_FAILED)
         raise exceptions.PluginNotFound(
             namespace=e.kwargs.get("namespace"), name=name)
Esempio n. 19
0
 def get_engine(name, deployment):
     """Returns instance of a deploy engine with corresponding name."""
     try:
         engine_cls = Engine.get(name)
         return engine_cls(deployment)
     except exceptions.PluginNotFound as e:
         LOG.error(_LE("Deployment %(uuid)s: Deploy engine for %(name)s "
                   "does not exist.") %
                   {"uuid": deployment["uuid"], "name": name})
         deployment.update_status(consts.DeployStatus.DEPLOY_FAILED)
         raise exceptions.PluginNotFound(
             namespace=e.kwargs.get("namespace"), name=name)
Esempio n. 20
0
    def override_configuration(self, new_configuration):
        """Override verifier configuration.

        :param new_configuration: Content which should be used while overriding
            existing configuration
        :raises NotImplementedError: This feature is verifier-specific, so you
            should override this method in your plugin if it supports
            configuration
        """
        raise NotImplementedError(
            _LE("'%s' verifiers don't support configuration at all.") %
            self.get_name())
Esempio n. 21
0
    def abort(self, soft=False):
        current_status = self.get_status(self.task["uuid"])

        if current_status in self.NOT_IMPLEMENTED_STAGES_FOR_ABORT:
            raise exceptions.RallyException(
                _LE("Failed to abort task '%(uuid)s'. It doesn't implemented "
                    "for '%(stages)s' stages. Current task status is "
                    "'%(status)s'.") %
                {"uuid": self.task["uuid"], "status": current_status,
                 "stages": ", ".join(self.NOT_IMPLEMENTED_STAGES_FOR_ABORT)})
        elif current_status in [consts.TaskStatus.FINISHED,
                                consts.TaskStatus.FAILED,
                                consts.TaskStatus.ABORTED]:
            raise exceptions.RallyException(
                _LE("Failed to abort task '%s', since it already "
                    "finished.") % self.task.uuid)

        new_status = (consts.TaskStatus.SOFT_ABORTING
                      if soft else consts.TaskStatus.ABORTING)
        self.update_status(new_status, allowed_statuses=(
            consts.TaskStatus.RUNNING, consts.TaskStatus.SOFT_ABORTING))
Esempio n. 22
0
    def _validate_result_schema(self):
        """Validates result format."""
        try:
            jsonschema.validate(self._result, objects.task.HOOK_RESULT_SCHEMA)
        except jsonschema.ValidationError as validation_error:
            LOG.error(_LE("Hook %s returned result "
                          "in wrong format.") % self.get_name())
            LOG.exception(validation_error)

            self._result = self._format_result(
                status=consts.HookStatus.VALIDATION_FAILED,
                error=utils.format_exc(validation_error),
            )
Esempio n. 23
0
    def validate(cls, config):
        super(OpenStackAPIVersions, cls).validate(config)
        for client in config:
            client_cls = osclients.OSClient.get(client)
            if ("service_type" in config[client] and
                    "service_name" in config[client]):
                raise exceptions.ValidationError(_LE(
                    "Setting both 'service_type' and 'service_name' properties"
                    " is restricted."))
            try:
                if ("service_type" in config[client] or
                        "service_name" in config[client]):
                    client_cls.is_service_type_configurable()

                if "version" in config[client]:
                    client_cls.validate_version(config[client]["version"])

            except exceptions.RallyException as e:
                raise exceptions.ValidationError(
                    _LE("Invalid settings for '%(client)s': %(error)s") % {
                        "client": client,
                        "error": e.format_message()})
Esempio n. 24
0
    def run_sync(self):
        """Run hook synchronously."""
        try:
            with rutils.Timer() as timer:
                self.run()
        except Exception as exc:
            LOG.error(_LE("Hook %s failed during run."), self.get_name())
            LOG.exception(exc)
            self.set_error(*utils.format_exc(exc))

        self._started_at = timer.timestamp()
        self._result["started_at"] = self._started_at
        self._finished_at = timer.finish_timestamp()
        self._result["finished_at"] = self._finished_at
Esempio n. 25
0
    def install_extension(self, source, version=None, extra_settings=None):
        """Install a verifier extension.

        :param source: Path or URL to the repo to clone verifier extension from
        :param version: Branch, tag or commit ID to checkout before verifier
            extension installation
        :param extra_settings: Extra installation settings for verifier
            extension
        :raises NotImplementedError: This feature is verifier-specific, so you
            should override this method in your plugin if it supports
            extensions
        """
        raise NotImplementedError(
            _LE("'%s' verifiers don't support extensions.") % self.get_name())
Esempio n. 26
0
    def run_sync(self):
        """Run hook synchronously."""
        try:
            with rutils.Timer() as timer:
                self.run()
        except Exception as exc:
            LOG.error(_LE("Hook %s failed during run.") % self.get_name())
            LOG.exception(exc)
            self.set_error(*utils.format_exc(exc))

        self._started_at = timer.timestamp()
        self._result["started_at"] = self._started_at
        self._finished_at = timer.finish_timestamp()
        self._result["finished_at"] = self._finished_at
Esempio n. 27
0
 def backup(self, original_path):
     if original_path in self._stored_data:
         raise exceptions.RallyException(
             _LE("Failed to back up %s since it was already stored.") %
             original_path)
     backup_path = generate_random_path(self._tempdir)
     LOG.debug("Creating backup of %s in %s", (original_path, backup_path))
     try:
         shutil.copytree(original_path, backup_path, symlinks=True)
     except Exception:
         # Ooops. something went wrong
         self.rollback()
         raise
     self._stored_data[original_path] = backup_path
Esempio n. 28
0
    def get(cls,
            name,
            namespace=None,
            allow_hidden=False,
            fallback_to_default=True):
        """Return plugin by its name from specified namespace.

        This method iterates over all subclasses of cls and returns plugin
        by name from specified namespace.

        If namespace is not specified, it will return first found plugin from
        any of namespaces.

        :param name: Plugin's name
        :param namespace: Namespace where to search for plugins
        :param allow_hidden: if False and found plugin is hidden then
            PluginNotFound will be raised
        :param fallback_to_default: if True, then it tries to find
            plugin within "default" namespace
        """
        potential_result = cls.get_all(name=name,
                                       namespace=namespace,
                                       allow_hidden=True)

        if fallback_to_default and len(potential_result) == 0:
            # try to find in default namespace
            potential_result = cls.get_all(name=name,
                                           namespace="default",
                                           allow_hidden=True)

        if len(potential_result) == 1:
            plugin = potential_result[0]
            if allow_hidden or not plugin.is_hidden():
                return plugin

        elif potential_result:
            hint = _LE("Try to choose the correct Plugin base or namespace to "
                       "search in.")
            if namespace:
                needle = "%s at %s namespace" % (name, namespace)
            else:
                needle = "%s at any of namespaces" % name
            raise exceptions.MultipleMatchesFound(
                needle=needle,
                haystack=", ".join(p.get_name() for p in potential_result),
                hint=hint)

        raise exceptions.PluginNotFound(name=name,
                                        namespace=namespace or "any of")
Esempio n. 29
0
 def _init_testr(self):
     """Initialize testr."""
     test_repository_dir = os.path.join(self.base_dir, ".testrepository")
     # NOTE(andreykurilin): Is there any possibility that .testrepository
     #   presents in clear repo?!
     if not os.path.isdir(test_repository_dir):
         LOG.debug("Initializing testr.")
         try:
             utils.check_output(["testr", "init"], cwd=self.repo_dir,
                                env=self.environ)
         except (subprocess.CalledProcessError, OSError):
             if os.path.exists(test_repository_dir):
                 shutil.rmtree(test_repository_dir)
             raise exceptions.RallyException(
                 _LE("Failed to initialize testr."))
Esempio n. 30
0
    def setup(self):
        if not zipfile.is_zipfile(self.config["app_package"]):
            msg = (_LE("There is no zip archive by this path: %s")
                   % self.config["app_package"])
            raise OSError(msg)

        for user, tenant_id in utils.iterate_per_tenants(
                self.context["users"]):
            clients = osclients.Clients(user["endpoint"])
            self.context["tenants"][tenant_id]["packages"] = []
            package = clients.murano().packages.create(
                {"categories": ["Web"], "tags": ["tag"]},
                {"file": open(self.config["app_package"])})

            self.context["tenants"][tenant_id]["packages"].append(package)
Esempio n. 31
0
    def setup(self):
        if not zipfile.is_zipfile(self.config["app_package"]):
            msg = (_LE("There is no zip archive by this path: %s")
                   % self.config["app_package"])
            raise OSError(msg)

        for user, tenant_id in utils.iterate_per_tenants(
                self.context["users"]):
            clients = osclients.Clients(user["endpoint"])
            self.context["tenants"][tenant_id]["packages"] = []
            package = clients.murano().packages.create(
                {"categories": ["Web"], "tags": ["tag"]},
                {"file": open(self.config["app_package"])})

            self.context["tenants"][tenant_id]["packages"].append(package)
Esempio n. 32
0
    def _thread_method(self):
        # Run hook synchronously
        self.run_sync()

        try:
            self.validate_result_schema()
        except jsonschema.ValidationError as validation_error:
            LOG.error(
                _LE("Hook %s returned result "
                    "in wrong format.") % self.get_name())
            LOG.exception(validation_error)

            self._result = self._format_result(
                status=consts.HookStatus.VALIDATION_FAILED,
                error=utils.format_exc(validation_error),
            )
Esempio n. 33
0
 def __exit__(self, exc_type, exc_value, exc_traceback):
     if exc_type is not None:
         exc_info = None
         if not issubclass(exc_type, exceptions.InvalidArgumentsException):
             exc_info = (exc_type, exc_value, exc_traceback)
         LOG.error(_LE("Deployment %(uuid)s: Error has occurred into "
                       "context of the deployment"),
                   {"uuid": self.deployment["uuid"]},
                   exc_info=exc_info)
         status = self.deployment["status"]
         if status in (consts.DeployStatus.DEPLOY_INIT,
                       consts.DeployStatus.DEPLOY_STARTED):
             self.deployment.update_status(
                 consts.DeployStatus.DEPLOY_FAILED)
         elif status == consts.DeployStatus.DEPLOY_FINISHED:
             self.deployment.update_status(
                 consts.DeployStatus.DEPLOY_INCONSISTENT)
         elif status == consts.DeployStatus.CLEANUP_STARTED:
             self.deployment.update_status(
                 consts.DeployStatus.CLEANUP_FAILED)
Esempio n. 34
0
 def __exit__(self, exc_type, exc_value, exc_traceback):
     if exc_type is not None:
         exc_info = None
         if not issubclass(exc_type, exceptions.InvalidArgumentsException):
             exc_info = (exc_type, exc_value, exc_traceback)
         LOG.error(_LE("Deployment %(uuid)s: Error has occurred into "
                   "context of the deployment"),
                   {"uuid": self.deployment["uuid"]},
                   exc_info=exc_info)
         status = self.deployment["status"]
         if status in (consts.DeployStatus.DEPLOY_INIT,
                       consts.DeployStatus.DEPLOY_STARTED):
             self.deployment.update_status(
                 consts.DeployStatus.DEPLOY_FAILED)
         elif status == consts.DeployStatus.DEPLOY_FINISHED:
             self.deployment.update_status(
                 consts.DeployStatus.DEPLOY_INCONSISTENT)
         elif status == consts.DeployStatus.CLEANUP_STARTED:
             self.deployment.update_status(
                 consts.DeployStatus.CLEANUP_FAILED)
Esempio n. 35
0
    def __init__(self,
                 config_file=None,
                 config_args=None,
                 rally_endpoint=None,
                 plugin_paths=None,
                 skip_db_check=False):
        """Initialize Rally API instance

        :param config_file: Path to rally configuration file. If None, default
                            path will be selected
        :type config_file: str
        :param config_args: Arguments for initialization current configuration
        :type config_args: list
        :param rally_endpoint: [Restricted]Rally endpoint connection string.
        :type rally_endpoint: str
        :param plugin_paths: Additional custom plugin locations
        :type plugin_paths: list
        :param skip_db_check: Allows to skip db revision check
        :type skip_db_check: bool
        """
        if rally_endpoint:
            raise NotImplementedError(
                _LE("Sorry, but Rally-as-a-Service is "
                    "not ready yet."))
        try:
            config_files = ([config_file]
                            if config_file else self._default_config_file())
            CONF(config_args or [],
                 project="rally",
                 version=rally_version.version_string(),
                 default_config_files=config_files)
            logging.setup("rally")
            if not CONF.get("log_config_append"):
                # The below two lines are to disable noise from request module.
                # The standard way should be we make such lots of settings on
                # the root rally. However current oslo codes doesn't support
                # such interface. So I choose to use a 'hacking' way to avoid
                # INFO logs from request module where user didn't give specific
                # log configuration. And we could remove this hacking after
                # oslo.log has such interface.
                LOG.debug(
                    "INFO logs from urllib3 and requests module are hide.")
                requests_log = logging.getLogger("requests").logger
                requests_log.setLevel(logging.WARNING)
                urllib3_log = logging.getLogger("urllib3").logger
                urllib3_log.setLevel(logging.WARNING)

                LOG.debug("urllib3 insecure warnings are hidden.")
                for warning in ("InsecurePlatformWarning", "SNIMissingWarning",
                                "InsecureRequestWarning"):
                    warning_cls = getattr(urllib3.exceptions, warning, None)
                    if warning_cls is not None:
                        urllib3.disable_warnings(warning_cls)

            # NOTE(wtakase): This is for suppressing boto error logging.
            LOG.debug("ERROR log from boto module is hide.")
            boto_log = logging.getLogger("boto").logger
            boto_log.setLevel(logging.CRITICAL)

            # Set alembic log level to ERROR
            alembic_log = logging.getLogger("alembic").logger
            alembic_log.setLevel(logging.ERROR)

        except cfg.ConfigFilesNotFoundError as e:
            cfg_files = e.config_files
            raise exceptions.RallyException(
                _LE("Failed to read configuration file(s): %s") % cfg_files)

        # Check that db is upgraded to the latest revision
        if not skip_db_check:
            self.check_db_revision()

        # Load plugins
        plugin_paths = plugin_paths or []
        if "plugin_paths" in CONF:
            plugin_paths.extend(CONF.get("plugin_paths") or [])
        for path in plugin_paths:
            discover.load_plugins(path)

        # NOTE(andreykurilin): There is no reason to auto-discover API's. We
        # have only 4 classes, so let's do it in good old way - hardcode them:)
        self._deployment = _Deployment
        self._task = _Task
        self._verifier = _Verifier
        self._verification = _Verification