Beispiel #1
0
def print_help_on_errors():
    heading = "Getting further help:"
    console.println(console.format.bold(heading))
    console.println(console.format.underline_for(heading))
    console.println("* Check the log files in {} for errors.".format(
        paths.logs()))
    console.println("* Read the documentation at {}".format(
        console.format.link(doc_link())))
    console.println("* Ask a question on the forum at {}".format(
        console.format.link(
            "https://discuss.elastic.co/c/elasticsearch/rally")))
    console.println(
        "* Raise an issue at {} and include the log files in {}.".format(
            console.format.link("https://github.com/elastic/rally/issues"),
            paths.logs()))
Beispiel #2
0
def install_default_log_config():
    """
    Ensures a log configuration file is present on this machine. The default
    log configuration is based on the template in resources/logging.json.

    It also ensures that the default log path has been created so log files
    can be successfully opened in that directory.
    """
    log_config = log_config_path()
    if not io.exists(log_config):
        io.ensure_dir(io.dirname(log_config))
        source_path = io.normalize_path(
            os.path.join(os.path.dirname(__file__), "resources",
                         "logging.json"))
        with open(log_config, "w", encoding="UTF-8") as target:
            with open(source_path, "r", encoding="UTF-8") as src:
                contents = src.read().replace("${LOG_PATH}", paths.logs())
                target.write(contents)
    io.ensure_dir(paths.logs())
Beispiel #3
0
def install_default_log_config():
    """
    Ensures a log configuration file is present on this machine. The default
    log configuration is based on the template in resources/logging.json.

    It also ensures that the default log path has been created so log files
    can be successfully opened in that directory.
    """
    log_config = log_config_path()
    if not io.exists(log_config):
        io.ensure_dir(io.dirname(log_config))
        source_path = io.normalize_path(
            os.path.join(os.path.dirname(__file__), "resources",
                         "logging.json"))
        with open(log_config, "w", encoding="UTF-8") as target:
            with open(source_path, "r", encoding="UTF-8") as src:
                # Ensure we have a trailing path separator as after LOG_PATH there will only be the file name
                log_path = os.path.join(paths.logs(), "")
                # the logging path might contain backslashes that we need to escape
                log_path = io.escape_path(log_path)
                contents = src.read().replace("${LOG_PATH}", log_path)
                target.write(contents)
    io.ensure_dir(paths.logs())
Beispiel #4
0
def remove_obsolete_default_log_config():
    """
    Log rotation is problematic because Rally uses multiple processes and there is a lurking race condition when
    rolling log files. Hence, we do not rotate logs from within Rally and leverage established tools like logrotate for that.

    Checks whether the user has a problematic out-of-the-box logging configuration delivered with Rally 1.0.0 which
    used log rotation and removes it so it can be replaced by a new one in a later step.
    """
    log_config = log_config_path()
    if io.exists(log_config):
        source_path = io.normalize_path(
            os.path.join(os.path.dirname(__file__), "resources",
                         "logging_1_0_0.json"))
        with open(source_path, "r", encoding="UTF-8") as src:
            contents = src.read().replace("${LOG_PATH}", paths.logs())
            source_hash = hashlib.sha512(contents.encode()).hexdigest()
        with open(log_config, "r", encoding="UTF-8") as target:
            target_hash = hashlib.sha512(target.read().encode()).hexdigest()
        if source_hash == target_hash:
            os.rename(log_config, "{}.bak".format(log_config))
Beispiel #5
0
def create(cfg, sources, distribution, build, car, plugins=None):
    logger = logging.getLogger(__name__)
    if plugins is None:
        plugins = []
    revisions = _extract_revisions(cfg.opts("mechanic", "source.revision"))
    distribution_version = cfg.opts("mechanic",
                                    "distribution.version",
                                    mandatory=False)
    supply_requirements = _supply_requirements(sources, distribution, build,
                                               plugins, revisions,
                                               distribution_version)
    build_needed = any([build for _, _, build in supply_requirements.values()])
    src_config = cfg.all_opts("source")
    suppliers = []

    if build_needed:
        java_home = _java_home(car)
        es_src_dir = os.path.join(
            _src_dir(cfg), _config_value(src_config,
                                         "elasticsearch.src.subdir"))
        builder = Builder(es_src_dir, java_home, paths.logs())
    else:
        builder = None

    es_supplier_type, es_version, es_build = supply_requirements[
        "elasticsearch"]
    if es_supplier_type == "source":
        es_src_dir = os.path.join(
            _src_dir(cfg), _config_value(src_config,
                                         "elasticsearch.src.subdir"))
        suppliers.append(
            ElasticsearchSourceSupplier(es_version,
                                        es_src_dir,
                                        remote_url=cfg.opts(
                                            "source", "remote.repo.url"),
                                        car=car,
                                        builder=builder))
        repo = None
    else:
        es_src_dir = None
        distributions_root = os.path.join(
            cfg.opts("node", "root.dir"), cfg.opts("source",
                                                   "distribution.dir"))

        dist_cfg = {}
        # car / plugin defines defaults...
        dist_cfg.update(car.variables)
        for plugin in plugins:
            for k, v in plugin.variables.items():
                dist_cfg["plugin_{}_{}".format(plugin.name, k)] = v
        # ... but the user can override it in rally.ini
        dist_cfg.update(cfg.all_opts("distributions"))
        repo = DistributionRepository(name=cfg.opts("mechanic",
                                                    "distribution.repository"),
                                      distribution_config=dist_cfg,
                                      version=es_version)
        suppliers.append(
            ElasticsearchDistributionSupplier(repo, distributions_root))

    for plugin in plugins:
        supplier_type, plugin_version, build_plugin = supply_requirements[
            plugin.name]

        if supplier_type == "source":
            if CorePluginSourceSupplier.can_handle(plugin):
                logger.info("Adding core plugin source supplier for [%s].",
                            plugin.name)
                assert es_src_dir is not None, "Cannot build core plugin %s when Elasticsearch is not built from source." % plugin.name
                suppliers.append(
                    CorePluginSourceSupplier(plugin, es_src_dir, builder))
            elif ExternalPluginSourceSupplier.can_handle(plugin):
                logger.info("Adding external plugin source supplier for [%s].",
                            plugin.name)
                suppliers.append(
                    ExternalPluginSourceSupplier(
                        plugin, plugin_version, _src_dir(cfg, mandatory=False),
                        src_config, builder))
            else:
                raise exceptions.RallyError(
                    "Plugin %s can neither be treated as core nor as external plugin. Requirements: %s"
                    % (plugin.name, supply_requirements[plugin.name]))
        else:
            logger.info("Adding plugin distribution supplier for [%s].",
                        plugin.name)
            assert repo is not None, "Cannot benchmark plugin %s from a distribution version but Elasticsearch from sources" % plugin.name
            suppliers.append(PluginDistributionSupplier(repo, plugin))

    return CompositeSupplier(suppliers)
Beispiel #6
0
def create(cfg, sources, distribution, car, plugins=None):
    logger = logging.getLogger(__name__)
    if plugins is None:
        plugins = []
    caching_enabled = cfg.opts("source",
                               "cache",
                               mandatory=False,
                               default_value=True)
    revisions = _extract_revisions(
        cfg.opts("mechanic", "source.revision", mandatory=sources))
    distribution_version = cfg.opts("mechanic",
                                    "distribution.version",
                                    mandatory=False)
    supply_requirements = _supply_requirements(sources, distribution, plugins,
                                               revisions, distribution_version)
    build_needed = any([build for _, _, build in supply_requirements.values()])
    es_supplier_type, es_version, _ = supply_requirements["elasticsearch"]
    src_config = cfg.all_opts("source")
    suppliers = []

    target_os = cfg.opts("mechanic", "target.os", mandatory=False)
    target_arch = cfg.opts("mechanic", "target.arch", mandatory=False)
    template_renderer = TemplateRenderer(version=es_version,
                                         os_name=target_os,
                                         arch=target_arch)

    if build_needed:
        raw_build_jdk = car.mandatory_var("build.jdk")
        try:
            build_jdk = int(raw_build_jdk)
        except ValueError:
            raise exceptions.SystemSetupError(
                f"Car config key [build.jdk] is invalid: [{raw_build_jdk}] (must be int)"
            )

        es_src_dir = os.path.join(
            _src_dir(cfg), _config_value(src_config,
                                         "elasticsearch.src.subdir"))
        builder = Builder(es_src_dir, build_jdk, paths.logs())
    else:
        builder = None

    distributions_root = os.path.join(cfg.opts("node", "root.dir"),
                                      cfg.opts("source", "distribution.dir"))
    dist_cfg = {}
    # car / plugin defines defaults...
    dist_cfg.update(car.variables)
    for plugin in plugins:
        for k, v in plugin.variables.items():
            dist_cfg["plugin_{}_{}".format(plugin.name, k)] = v
    # ... but the user can override it in rally.ini
    dist_cfg.update(cfg.all_opts("distributions"))

    if caching_enabled:
        logger.info("Enabling source artifact caching.")
        max_age_days = int(
            cfg.opts("source", "cache.days", mandatory=False, default_value=7))
        if max_age_days <= 0:
            raise exceptions.SystemSetupError(
                f"cache.days must be a positive number but is {max_age_days}")

        source_distributions_root = os.path.join(distributions_root, "src")
        _prune(source_distributions_root, max_age_days)
    else:
        logger.info("Disabling source artifact caching.")
        source_distributions_root = None

    if es_supplier_type == "source":
        es_src_dir = os.path.join(
            _src_dir(cfg), _config_value(src_config,
                                         "elasticsearch.src.subdir"))

        source_supplier = ElasticsearchSourceSupplier(
            es_version,
            es_src_dir,
            remote_url=cfg.opts("source", "remote.repo.url"),
            car=car,
            builder=builder,
            template_renderer=template_renderer)

        if caching_enabled:
            es_file_resolver = ElasticsearchFileNameResolver(
                dist_cfg, template_renderer)
            source_supplier = CachedSourceSupplier(source_distributions_root,
                                                   source_supplier,
                                                   es_file_resolver)

        suppliers.append(source_supplier)
        repo = None
    else:
        es_src_dir = None
        repo = DistributionRepository(name=cfg.opts("mechanic",
                                                    "distribution.repository"),
                                      distribution_config=dist_cfg,
                                      template_renderer=template_renderer)
        suppliers.append(
            ElasticsearchDistributionSupplier(repo, es_version,
                                              distributions_root))

    for plugin in plugins:
        supplier_type, plugin_version, _ = supply_requirements[plugin.name]

        if supplier_type == "source":
            if CorePluginSourceSupplier.can_handle(plugin):
                logger.info("Adding core plugin source supplier for [%s].",
                            plugin.name)
                assert es_src_dir is not None, f"Cannot build core plugin {plugin.name} when Elasticsearch is not built from source."
                plugin_supplier = CorePluginSourceSupplier(
                    plugin, es_src_dir, builder)
            elif ExternalPluginSourceSupplier.can_handle(plugin):
                logger.info("Adding external plugin source supplier for [%s].",
                            plugin.name)
                plugin_supplier = ExternalPluginSourceSupplier(
                    plugin, plugin_version, _src_dir(cfg, mandatory=False),
                    src_config, builder)
            else:
                raise exceptions.RallyError(
                    "Plugin %s can neither be treated as core nor as external plugin. Requirements: %s"
                    % (plugin.name, supply_requirements[plugin.name]))

            if caching_enabled:
                plugin_file_resolver = PluginFileNameResolver(
                    plugin.name, plugin_version)
                plugin_supplier = CachedSourceSupplier(
                    source_distributions_root, plugin_supplier,
                    plugin_file_resolver)
            suppliers.append(plugin_supplier)
        else:
            logger.info("Adding plugin distribution supplier for [%s].",
                        plugin.name)
            assert repo is not None, "Cannot benchmark plugin %s from a distribution version but Elasticsearch from sources" % plugin.name
            suppliers.append(PluginDistributionSupplier(repo, plugin))

    return CompositeSupplier(suppliers)