def test_migrate_from_11_to_12_with_custom_src_config(
            self, path_rename, path_exists):
        path_exists.return_value = False

        config_file = InMemoryConfigStore("test")
        sample_config = {
            "meta": {
                "config.version": 11
            },
            "node": {
                "root.dir": io.normalize_path("~/.rally/benchmarks")
            },
            "source": {
                "local.src.dir":
                io.normalize_path("~/Projects/elasticsearch/master/es")
            }
        }
        config_file.store(sample_config)
        config.migrate(config_file, 11, 12, out=null_output)

        self.assertTrue(config_file.backup_created)
        self.assertEqual("12", config_file.config["meta"]["config.version"])
        self.assertEqual(io.normalize_path("~/Projects/elasticsearch/master"),
                         config_file.config["node"]["src.root.dir"])
        self.assertEqual(
            "es", config_file.config["source"]["elasticsearch.src.subdir"])
        # did all the migrations but nothing moved
        path_rename.assert_not_called()
Beispiel #2
0
 def test_normalize_path(self):
     self.assertEqual("/already/a/normalized/path",
                      io.normalize_path("/already/a/normalized/path"))
     self.assertEqual("/not/normalized",
                      io.normalize_path("/not/normalized/path/../"))
     self.assertEqual(os.getenv("HOME"),
                      io.normalize_path("~/Documents/.."))
Beispiel #3
0
 def test_normalize_path(self):
     self.assertEqual("/already/a/normalized/path",
                      io.normalize_path("/already/a/normalized/path"))
     self.assertEqual("/not/normalized",
                      io.normalize_path("/not/normalized/path/../"))
     self.assertEqual(os.path.expanduser("~"),
                      io.normalize_path("~/Documents/.."))
Beispiel #4
0
def write_single_report(report_file,
                        report_format,
                        cwd,
                        headers,
                        data_plain,
                        data_rich,
                        write_header=True):
    if report_format == "markdown":
        formatter = format_as_markdown
    elif report_format == "csv":
        formatter = format_as_csv
    else:
        raise exceptions.SystemSetupError("Unknown report format '%s'" %
                                          report_format)

    print_internal(formatter(headers, data_rich))
    if len(report_file) > 0:
        normalized_report_file = rio.normalize_path(report_file, cwd)
        logger.info(
            "Writing report to [%s] (user specified: [%s]) in format [%s]" %
            (normalized_report_file, report_file, report_format))
        # ensure that the parent folder already exists when we try to write the file...
        rio.ensure_dir(rio.dirname(normalized_report_file))
        with open(normalized_report_file, mode="a+", encoding="utf-8") as f:
            f.writelines(formatter(headers, data_plain, write_header))
Beispiel #5
0
def load_race_configs(cfg):
    chart_spec_path = cfg.opts("generator", "chart.spec.path", mandatory=False)
    if chart_spec_path:
        import json
        race_configs = []
        with open(io.normalize_path(chart_spec_path),
                  mode="rt",
                  encoding="utf-8") as f:
            for item in json.load(f):
                t = load_track(cfg, item["track"])
                race_configs_per_track = []
                for configuration in item["configurations"]:
                    race_configs_per_track.append(
                        RaceConfig(track=t, cfg=configuration))
                if race_configs_per_track:
                    race_configs.append(race_configs_per_track)
    else:
        car_names = cfg.opts("mechanic", "car.names")
        if len(car_names) > 1:
            raise exceptions.SystemSetupError(
                "Chart generator supports only a single car but got %s" %
                car_names)
        else:
            car_name = car_names[0]
        race_configs = [[
            RaceConfig(track=load_track(cfg),
                       challenge=cfg.opts("track", "challenge.name"),
                       car=car_name,
                       node_count=cfg.opts("generator", "node.count"),
                       charts=["indexing", "query", "gc", "io"])
        ]]
    return race_configs
Beispiel #6
0
def create_track(cfg):
    logger = logging.getLogger(__name__)

    track_name = cfg.opts("track", "track.name")
    indices = cfg.opts("generator", "indices")
    root_path = cfg.opts("generator", "output.path")
    target_hosts = cfg.opts("client", "hosts")
    client_options = cfg.opts("client", "options")

    logger.info("Creating track [%s] matching indices [%s]", track_name, indices)

    client = EsClientFactory(
        hosts=target_hosts.all_hosts[opts.TargetHosts.DEFAULT], client_options=client_options.all_client_options[opts.TargetHosts.DEFAULT]
    ).create()

    info = client.info()
    console.info(f"Connected to Elasticsearch cluster [{info['name']}] version [{info['version']['number']}].\n", logger=logger)

    output_path = os.path.abspath(os.path.join(io.normalize_path(root_path), track_name))
    io.ensure_dir(output_path)

    indices, corpora = extract_mappings_and_corpora(client, output_path, indices)
    if len(indices) == 0:
        raise RuntimeError("Failed to extract any indices for track!")

    template_vars = {"track_name": track_name, "indices": indices, "corpora": corpora}

    track_path = os.path.join(output_path, "track.json")
    templates_path = os.path.join(cfg.opts("node", "rally.root"), "resources")
    process_template(templates_path, "track.json.j2", template_vars, track_path)

    console.println("")
    console.info(f"Track {track_name} has been created. Run it with: {PROGRAM_NAME} --track-path={output_path}")
Beispiel #7
0
def load_race_configs(cfg):
    chart_spec_path = cfg.opts("generator", "chart.spec.path", mandatory=False)
    if chart_spec_path:
        import json
        race_configs = []
        with open(io.normalize_path(chart_spec_path), mode="rt", encoding="utf-8") as f:
            for item in json.load(f):
                # load track based on its name and replace it
                item["track"] = load_track(cfg, item["track"])
                race_configs.append(item)
    else:
        t = load_track(cfg)

        car_names = cfg.opts("mechanic", "car.names")
        if len(car_names) > 1:
            raise exceptions.SystemSetupError("Chart generator supports only a single car but got %s" % car_names)
        else:
            car_name = car_names[0]

        race_configs = [
            {
                "track": t,
                "combinations": [
                    {
                        "challenge": cfg.opts("track", "challenge.name"),
                        "car": car_name,
                        "node-count": cfg.opts("generator", "node.count")
                    }
                ]
            }
        ]
    return race_configs
Beispiel #8
0
def generate(cfg):
    if cfg.opts("generator", "chart.type") == "time-series":
        chart_type = TimeSeriesCharts
    else:
        chart_type = BarCharts

    console.info("Loading track data...", flush=True)
    race_configs = load_race_configs(cfg)
    env = cfg.opts("system", "env.name")

    structures = []
    console.info("Generating charts...", flush=True)
    for race_config in race_configs:

        charts = generate_index_ops(chart_type, race_config, env) + \
                 generate_io(chart_type, race_config, env) + \
                 generate_gc(chart_type, race_config, env) + \
                 generate_queries(chart_type, race_config, env)

        dashboard = generate_dashboard(env, race_config["track"], charts)

        structures.extend(charts)
        structures.append(dashboard)

    output_path = cfg.opts("generator", "output.path")
    if output_path:
        with open(io.normalize_path(output_path), mode="wt", encoding="utf-8") as f:
            print(json.dumps(structures, indent=4), file=f)
    else:
        print(json.dumps(structures, indent=4))
Beispiel #9
0
    def write_report(self, metrics_table):
        headers = ["Metric", "Value"]
        report_format = self._config.opts("report", "reportformat")
        report_file = self._config.opts("report", "reportfile")

        if report_format == "markdown":
            report = tabulate.tabulate(metrics_table, headers=headers, tablefmt="pipe", numalign="right", stralign="right")
        elif report_format == "csv":
            with io.StringIO() as out:
                writer = csv.writer(out)
                writer.writerow(headers)
                for metric_record in metrics_table:
                    writer.writerow(metric_record)
                report = out.getvalue()
        else:
            raise exceptions.SystemSetupError("Unknown report format '%s'" % report_format)

        print_internal(report)
        if len(report_file) > 0:
            normalized_report_file = rio.normalize_path(report_file)
            logger.info("Writing report to [%s] (user specified: [%s]) in format [%s]" %
                        (normalized_report_file, report_file, report_format))
            print("\nWriting report also to '%s'" % normalized_report_file)
            # ensure that the parent folder already exists when we try to write the file...
            rio.ensure_dir(rio.dirname(normalized_report_file))
            with open(normalized_report_file, mode="w", encoding="UTF-8") as f:
                f.writelines(report)
Beispiel #10
0
    def write_single_report(self, report_format, report_file, headers, data, force_cmd_line_output=True):
        if report_format == "markdown":
            report = tabulate.tabulate(data, headers=headers, tablefmt="pipe", numalign="right", stralign="right")
        elif report_format == "csv":
            with io.StringIO() as out:
                writer = csv.writer(out)
                writer.writerow(headers)
                for metric_record in data:
                    writer.writerow(metric_record)
                report = out.getvalue()
        else:
            raise exceptions.SystemSetupError("Unknown report format '%s'" % report_format)

        if force_cmd_line_output:
            print_internal(report)
        if len(report_file) > 0:
            normalized_report_file = rio.normalize_path(report_file)
            logger.info("Writing report to [%s] (user specified: [%s]) in format [%s]" %
                        (normalized_report_file, report_file, report_format))
            if force_cmd_line_output:
                print("\nWriting report also to '%s'" % normalized_report_file)
            # ensure that the parent folder already exists when we try to write the file...
            rio.ensure_dir(rio.dirname(normalized_report_file))
            with open(normalized_report_file, mode="w", encoding="UTF-8") as f:
                f.writelines(report)
Beispiel #11
0
    def write_single_report(self,
                            report_file,
                            headers,
                            data,
                            write_header=True,
                            show_also_in_console=True):
        report_format = self._config.opts("report", "reportformat")
        if report_format == "markdown":
            formatter = self.format_as_markdown
        elif report_format == "csv":
            formatter = self.format_as_csv
        else:
            raise exceptions.SystemSetupError("Unknown report format '%s'" %
                                              report_format)

        if show_also_in_console:
            print_internal(formatter(headers, data))
        if len(report_file) > 0:
            normalized_report_file = rio.normalize_path(report_file)
            logger.info(
                "Writing report to [%s] (user specified: [%s]) in format [%s]"
                % (normalized_report_file, report_file, report_format))
            # if show_also_in_console:
            #     print("\nWriting report also to '%s'" % normalized_report_file)
            # ensure that the parent folder already exists when we try to write the file...
            rio.ensure_dir(rio.dirname(normalized_report_file))
            with open(normalized_report_file, mode="a+",
                      encoding="UTF-8") as f:
                f.writelines(formatter(headers, data, write_header))
Beispiel #12
0
def to_dict(arg):
    if io.has_extension(arg, ".json"):
        with open(io.normalize_path(arg), mode="rt", encoding="utf-8") as f:
            return json.load(f)
    elif arg.startswith("{"):
        return json.loads(arg)
    else:
        return kv_to_map(csv_to_list(arg))
Beispiel #13
0
def to_dict(arg, default_parser=kv_to_map):
    if io.has_extension(arg, ".json"):
        with open(io.normalize_path(arg), mode="rt", encoding="utf-8") as f:
            return json.load(f)
    try:
        return json.loads(arg)
    except json.decoder.JSONDecodeError:
        return default_parser(csv_to_list(arg))
Beispiel #14
0
    def test_migrate_from_11_to_12_with_default_src_config_repo_checked_out(self, path_rename, path_exists):
        path_exists.return_value = True

        config_file = InMemoryConfigStore("test")
        sample_config = {
            "meta": {
                "config.version": 11
            },
            "node": {
                "root.dir": io.normalize_path("~/.rally/benchmarks")
            },
            "source": {
                "local.src.dir": io.normalize_path("~/.rally/benchmarks/src")
            }
        }
        config_file.store(sample_config)
        config.migrate(config_file, 11, 12, out=null_output)

        self.assertTrue(config_file.backup_created)
        self.assertEqual("12", config_file.config["meta"]["config.version"])
        self.assertEqual(io.normalize_path("~/.rally/benchmarks/src"), config_file.config["node"]["src.root.dir"])
        self.assertEqual("elasticsearch", config_file.config["source"]["elasticsearch.src.subdir"])

        path_rename.assert_has_calls(
            [
                mock.call(io.normalize_path("~/.rally/benchmarks/src"), io.normalize_path("~/.rally/benchmarks/tmp_src_mig")),
                mock.call(io.normalize_path("~/.rally/benchmarks/tmp_src_mig"),
                          io.normalize_path("~/.rally/benchmarks/src/elasticsearch")),
             ]
        )
Beispiel #15
0
    def test_migrate_from_11_to_12_with_default_src_config_repo_checked_out(
            self, path_rename, path_exists):
        path_exists.return_value = True

        config_file = InMemoryConfigStore("test")
        sample_config = {
            "meta": {
                "config.version": 11
            },
            "node": {
                "root.dir": io.normalize_path("~/.rally/benchmarks")
            },
            "source": {
                "local.src.dir": io.normalize_path("~/.rally/benchmarks/src")
            }
        }
        config_file.store(sample_config)
        config.migrate(config_file, 11, 12, out=null_output)

        self.assertTrue(config_file.backup_created)
        self.assertEqual("12", config_file.config["meta"]["config.version"])
        self.assertEqual(io.normalize_path("~/.rally/benchmarks/src"),
                         config_file.config["node"]["src.root.dir"])
        self.assertEqual(
            "elasticsearch",
            config_file.config["source"]["elasticsearch.src.subdir"])

        path_rename.assert_has_calls([
            mock.call(io.normalize_path("~/.rally/benchmarks/src"),
                      io.normalize_path("~/.rally/benchmarks/tmp_src_mig")),
            mock.call(
                io.normalize_path("~/.rally/benchmarks/tmp_src_mig"),
                io.normalize_path("~/.rally/benchmarks/src/elasticsearch")),
        ])
Beispiel #16
0
def version():
    release = __version__
    try:
        if git.is_working_copy(io.normalize_path("%s/.." % rally_root_path())):
            revision = git.head_revision(rally_root_path())
            return "%s (git revision: %s)" % (release, revision.strip())
    except BaseException:
        pass
    # cannot determine head revision so user has probably installed Rally via pip instead of git clone
    return release
Beispiel #17
0
def version():
    release = __version__
    try:
        if git.is_working_copy(io.normalize_path("%s/.." % rally_root_path())):
            revision = git.head_revision(rally_root_path())
            return "%s (git revision: %s)" % (release, revision.strip())
    except BaseException:
        pass
    # cannot determine head revision so user has probably installed Rally via pip instead of git clone
    return release
Beispiel #18
0
    def test_create_simple_config(self, guess_install_location, working_copy):
        guess_install_location.side_effect = ["/tests/usr/bin/git"]
        # Rally checks in the parent and sibling directories whether there is an ES working copy. We don't want this detection logic
        # to succeed spuriously (e.g. on developer machines).
        working_copy.return_value = False
        mock_input = MockInput([""])

        f = config.ConfigFactory(i=mock_input, sec_i=mock_input, o=null_output)

        config_store = InMemoryConfigStore("test")
        f.create_config(config_store)
        self.assertIsNotNone(config_store.config)

        for section, _ in config_store.config.items():
            for k, v in config_store.config[section].items():
                print("%s::%s: %s" % (section, k, v))

        root_dir = io.normalize_path(os.path.abspath("./in-memory/benchmarks"))
        self.assertTrue("meta" in config_store.config)
        self.assertEqual(str(config.Config.CURRENT_CONFIG_VERSION), config_store.config["meta"]["config.version"])

        self.assertTrue("system" in config_store.config)
        self.assertEqual("local", config_store.config["system"]["env.name"])

        self.assertTrue("node" in config_store.config)

        self.assertEqual(root_dir, config_store.config["node"]["root.dir"])
        self.assertEqual(os.path.join(root_dir, "src"), config_store.config["node"]["src.root.dir"])

        self.assertTrue("source" in config_store.config)
        self.assertEqual("https://github.com/elastic/elasticsearch.git", config_store.config["source"]["remote.repo.url"])
        self.assertEqual("elasticsearch", config_store.config["source"]["elasticsearch.src.subdir"])

        self.assertTrue("benchmarks" in config_store.config)
        self.assertEqual(os.path.join(root_dir, "data"), config_store.config["benchmarks"]["local.dataset.cache"])

        self.assertTrue("reporting" in config_store.config)
        self.assertEqual("in-memory", config_store.config["reporting"]["datastore.type"])
        self.assertEqual("", config_store.config["reporting"]["datastore.host"])
        self.assertEqual("", config_store.config["reporting"]["datastore.port"])
        self.assertEqual("", config_store.config["reporting"]["datastore.secure"])
        self.assertEqual("", config_store.config["reporting"]["datastore.user"])
        self.assertEqual("", config_store.config["reporting"]["datastore.password"])

        self.assertTrue("tracks" in config_store.config)
        self.assertEqual("https://github.com/elastic/rally-tracks", config_store.config["tracks"]["default.url"])

        self.assertTrue("teams" in config_store.config)
        self.assertEqual("https://github.com/elastic/rally-teams", config_store.config["teams"]["default.url"])

        self.assertTrue("defaults" in config_store.config)
        self.assertEqual("False", config_store.config["defaults"]["preserve_benchmark_candidate"])

        self.assertTrue("distributions" in config_store.config)
        self.assertEqual("true", config_store.config["distributions"]["release.cache"])
Beispiel #19
0
    def __init__(
        self,
        host="localhost",
        port=None,
        http_auth=None,
        use_ssl=False,
        ssl_assert_fingerprint=None,
        headers=None,
        ssl_context=None,
        http_compress=None,
        cloud_id=None,
        api_key=None,
        opaque_id=None,
        loop=None,
        trace_config=None,
        **kwargs,
    ):
        super().__init__(
            host=host,
            port=port,
            http_auth=http_auth,
            use_ssl=use_ssl,
            ssl_assert_fingerprint=ssl_assert_fingerprint,
            # provided to the base class via `maxsize` to keep base class state consistent despite Rally
            # calling the attribute differently.
            maxsize=kwargs.get("max_connections", 0),
            headers=headers,
            ssl_context=ssl_context,
            http_compress=http_compress,
            cloud_id=cloud_id,
            api_key=api_key,
            opaque_id=opaque_id,
            loop=loop,
            **kwargs,
        )

        self._trace_configs = [trace_config] if trace_config else None
        self._enable_cleanup_closed = kwargs.get("enable_cleanup_closed",
                                                 False)

        static_responses = kwargs.get("static_responses")
        self.use_static_responses = static_responses is not None

        if self.use_static_responses:
            # read static responses once and reuse them
            if not StaticRequest.RESPONSES:
                with open(io.normalize_path(static_responses)) as f:
                    StaticRequest.RESPONSES = ResponseMatcher(json.load(f))

            self._request_class = StaticRequest
            self._response_class = StaticResponse
        else:
            self._request_class = aiohttp.ClientRequest
            self._response_class = RawClientResponse
Beispiel #20
0
def revision():
    """
    :return: The current git revision if Rally is installed in development mode or ``None``.
    """
    # noinspection PyBroadException
    try:
        if git.is_working_copy(io.normalize_path("%s/.." % paths.rally_root())):
            raw_revision = git.head_revision(paths.rally_root())
            return raw_revision.strip()
    except BaseException:
        pass
    return None
Beispiel #21
0
 def store_default_config(self, template_path=None):
     io.ensure_dir(self.config_dir)
     if template_path:
         source_path = template_path
     else:
         source_path = io.normalize_path(
             os.path.join(os.path.dirname(__file__), "resources",
                          "rally.ini"))
     with open(self.location, "wt", encoding="utf-8") as target:
         with open(source_path, "rt", encoding="utf-8") as src:
             contents = src.read()
             target.write(
                 Template(contents).substitute(CONFIG_DIR=self.config_dir))
Beispiel #22
0
def write_single_report(report_file, report_format, cwd, numbers_align, headers, data_plain, data_rich):
    if report_format == "markdown":
        formatter = partial(format_as_markdown, numbers_align=numbers_align)
    elif report_format == "csv":
        formatter = format_as_csv
    else:
        raise exceptions.SystemSetupError("Unknown report format '%s'" % report_format)
    print_internal(formatter(headers, data_rich))
    if len(report_file) > 0:
        normalized_report_file = rio.normalize_path(report_file, cwd)
        # ensure that the parent folder already exists when we try to write the file...
        rio.ensure_dir(rio.dirname(normalized_report_file))
        with open(normalized_report_file, mode="a+", encoding="utf-8") as f:
            f.writelines(formatter(headers, data_plain))
Beispiel #23
0
    def test_migrate_from_11_to_12_with_custom_src_config(self, path_rename, path_exists):
        path_exists.return_value = False

        config_file = InMemoryConfigStore("test")
        sample_config = {
            "meta": {
                "config.version": 11
            },
            "node": {
                "root.dir": io.normalize_path("~/.rally/benchmarks")
            },
            "source": {
                "local.src.dir": io.normalize_path("~/Projects/elasticsearch/master/es")
            }
        }
        config_file.store(sample_config)
        config.migrate(config_file, 11, 12, out=null_output)

        self.assertTrue(config_file.backup_created)
        self.assertEqual("12", config_file.config["meta"]["config.version"])
        self.assertEqual(io.normalize_path("~/Projects/elasticsearch/master"), config_file.config["node"]["src.root.dir"])
        self.assertEqual("es", config_file.config["source"]["elasticsearch.src.subdir"])
        # did all the migrations but nothing moved
        path_rename.assert_not_called()
Beispiel #24
0
def csv_to_list(csv):
    if csv is None:
        return None
    if io.has_extension(csv, ".json"):
        with open(io.normalize_path(csv), mode="rt", encoding="utf-8") as f:
            content = f.read()
            if not RE_JSON_ARRAY_START.match(content):
                raise ValueError(f"csv args only support arrays in json but you supplied [{csv}]")
            return json.loads(content)
    elif RE_JSON_ARRAY_START.match(csv):
        return json.loads(csv)
    elif len(csv.strip()) == 0:
        return []
    else:
        return [e.strip() for e in csv.split(",")]
Beispiel #25
0
def version():
    """
    :return: The release version string and an optional suffix for the current git revision if Rally is installed in development mode.
    """
    release = __version__
    # noinspection PyBroadException
    try:
        if git.is_working_copy(io.normalize_path("%s/.." %
                                                 paths.rally_root())):
            revision = git.head_revision(paths.rally_root())
            return "%s (git revision: %s)" % (release, revision.strip())
    except BaseException:
        pass
    # cannot determine head revision so user has probably installed Rally via pip instead of git clone
    return release
Beispiel #26
0
def install_default_log_config():
    """
    Ensures a log configuration file is present on this machine. The default
    log configuration is based on the template in resources/logging.json.

    It also ensures that the default log path has been created so log files
    can be successfully opened in that directory.
    """
    log_config = log_config_path()
    if not io.exists(log_config):
        io.ensure_dir(io.dirname(log_config))
        source_path = io.normalize_path(os.path.join(os.path.dirname(__file__), "resources", "logging.json"))
        with open(log_config, "w", encoding="UTF-8") as target:
            with open(source_path, "r", encoding="UTF-8") as src:
                contents = src.read().replace("${LOG_PATH}", default_log_path())
                target.write(contents)
    io.ensure_dir(default_log_path())
Beispiel #27
0
def remove_obsolete_default_log_config():
    """
    Log rotation is problematic because Rally uses multiple processes and there is a lurking race condition when
    rolling log files. Hence, we do not rotate logs from within Rally and leverage established tools like logrotate for that.

    Checks whether the user has a problematic out-of-the-box logging configuration delivered with Rally 1.0.0 which
    used log rotation and removes it so it can be replaced by a new one in a later step.
    """
    log_config = log_config_path()
    if io.exists(log_config):
        source_path = io.normalize_path(os.path.join(os.path.dirname(__file__), "resources", "logging_1_0_0.json"))
        with open(source_path, "r", encoding="UTF-8") as src:
            contents = src.read().replace("${LOG_PATH}", default_log_path())
            source_hash = hashlib.sha512(contents.encode()).hexdigest()
        with open(log_config, "r", encoding="UTF-8") as target:
            target_hash = hashlib.sha512(target.read().encode()).hexdigest()
        if source_hash == target_hash:
            os.rename(log_config, "{}.bak".format(log_config))
Beispiel #28
0
def write_single_report(report_file, report_format, cwd, headers, data_plain, data_rich, write_header=True, show_also_in_console=True):
    if report_format == "markdown":
        formatter = format_as_markdown
    elif report_format == "csv":
        formatter = format_as_csv
    else:
        raise exceptions.SystemSetupError("Unknown report format '%s'" % report_format)

    if show_also_in_console:
        print_internal(formatter(headers, data_rich))
    if len(report_file) > 0:
        normalized_report_file = rio.normalize_path(report_file, cwd)
        logger.info("Writing report to [%s] (user specified: [%s]) in format [%s]" %
                    (normalized_report_file, report_file, report_format))
        # ensure that the parent folder already exists when we try to write the file...
        rio.ensure_dir(rio.dirname(normalized_report_file))
        with open(normalized_report_file, mode="a+", encoding="UTF-8") as f:
            f.writelines(formatter(headers, data_plain, write_header))
Beispiel #29
0
def generate(cfg):
    if cfg.opts("generator", "chart.type") == "time-series":
        chart_type = TimeSeriesCharts
    else:
        chart_type = BarCharts

    race_configs = load_race_configs(cfg)
    env = cfg.opts("system", "env.name")

    structures = generate_index_ops(chart_type, race_configs, env) + \
                 generate_queries(chart_type, race_configs, env) + \
                 generate_io(chart_type, race_configs, env) + \
                 generate_gc(chart_type, race_configs, env)

    output_path = cfg.opts("generator", "output.path")
    if output_path:
        with open(io.normalize_path(output_path), mode="wt",
                  encoding="utf-8") as f:
            print(json.dumps(structures, indent=4), file=f)
    else:
        print(json.dumps(structures, indent=4))
Beispiel #30
0
def install_default_log_config():
    """
    Ensures a log configuration file is present on this machine. The default
    log configuration is based on the template in resources/logging.json.

    It also ensures that the default log path has been created so log files
    can be successfully opened in that directory.
    """
    log_config = log_config_path()
    if not io.exists(log_config):
        io.ensure_dir(io.dirname(log_config))
        source_path = io.normalize_path(
            os.path.join(os.path.dirname(__file__), "resources",
                         "logging.json"))
        with open(log_config, "w", encoding="UTF-8") as target:
            with open(source_path, "r", encoding="UTF-8") as src:
                # Ensure we have a trailing path separator as after LOG_PATH there will only be the file name
                log_path = os.path.join(paths.logs(), "")
                # the logging path might contain backslashes that we need to escape
                log_path = io.escape_path(log_path)
                contents = src.read().replace("${LOG_PATH}", log_path)
                target.write(contents)
    io.ensure_dir(paths.logs())
Beispiel #31
0
def configure_track_params(arg_parser, args, cfg, command_requires_track=True):
    cfg.add(config.Scope.applicationOverride, "track", "repository.revision",
            args.track_revision)
    # We can assume here that if a track-path is given, the user did not specify a repository either (although argparse sets it to
    # its default value)
    if args.track_path:
        cfg.add(config.Scope.applicationOverride, "track", "track.path",
                os.path.abspath(io.normalize_path(args.track_path)))
        cfg.add(config.Scope.applicationOverride, "track", "repository.name",
                None)
        if args.track_revision:
            # stay as close as possible to argparse errors although we have a custom validation.
            arg_parser.error(
                "argument --track-revision not allowed with argument --track-path"
            )
        if command_requires_track and args.track:
            # stay as close as possible to argparse errors although we have a custom validation.
            arg_parser.error(
                "argument --track not allowed with argument --track-path")
    else:
        cfg.add(config.Scope.applicationOverride, "track", "repository.name",
                args.track_repository)
        if command_requires_track:
            if not args.track:
                raise arg_parser.error("argument --track is required")
            cfg.add(config.Scope.applicationOverride, "track", "track.name",
                    args.track)

    if command_requires_track:
        cfg.add(config.Scope.applicationOverride, "track", "params",
                opts.to_dict(args.track_params))
        cfg.add(config.Scope.applicationOverride, "track", "challenge.name",
                args.challenge)
        cfg.add(config.Scope.applicationOverride, "track", "include.tasks",
                opts.csv_to_list(args.include_tasks))
        cfg.add(config.Scope.applicationOverride, "track", "exclude.tasks",
                opts.csv_to_list(args.exclude_tasks))
Beispiel #32
0
def configure_mechanic_params(args, cfg, command_requires_car=True):
    if args.team_path:
        cfg.add(config.Scope.applicationOverride, "mechanic", "team.path",
                os.path.abspath(io.normalize_path(args.team_path)))
        cfg.add(config.Scope.applicationOverride, "mechanic",
                "repository.name", None)
        cfg.add(config.Scope.applicationOverride, "mechanic",
                "repository.revision", None)
    else:
        cfg.add(config.Scope.applicationOverride, "mechanic",
                "repository.name", args.team_repository)
        cfg.add(config.Scope.applicationOverride, "mechanic",
                "repository.revision", args.team_revision)

    if command_requires_car:
        if args.distribution_version:
            cfg.add(config.Scope.applicationOverride, "mechanic",
                    "distribution.version", args.distribution_version)
        cfg.add(config.Scope.applicationOverride, "mechanic",
                "distribution.repository", args.distribution_repository)
        cfg.add(config.Scope.applicationOverride, "mechanic", "car.names",
                opts.csv_to_list(args.car))
        cfg.add(config.Scope.applicationOverride, "mechanic", "car.params",
                opts.to_dict(args.car_params))
Beispiel #33
0
def main():
    check_python_version()
    log.install_default_log_config()
    log.configure_logging()
    logger = logging.getLogger(__name__)
    start = time.time()

    # Early init of console output so we start to show everything consistently.
    console.init(quiet=False)

    arg_parser = create_arg_parser()
    args = arg_parser.parse_args()

    console.init(quiet=args.quiet)
    console.println(BANNER)

    cfg = config.Config(config_name=args.configuration_name)
    sub_command = derive_sub_command(args, cfg)
    ensure_configuration_present(cfg, args, sub_command)

    if args.effective_start_date:
        cfg.add(config.Scope.application, "system", "time.start",
                args.effective_start_date)
        cfg.add(config.Scope.application, "system", "time.start.user_provided",
                True)
    else:
        cfg.add(config.Scope.application, "system", "time.start",
                datetime.datetime.utcnow())
        cfg.add(config.Scope.application, "system", "time.start.user_provided",
                False)

    cfg.add(config.Scope.applicationOverride, "system", "trial.id",
            str(uuid.uuid4()))
    cfg.add(config.Scope.applicationOverride, "system", "quiet.mode",
            args.quiet)
    cfg.add(config.Scope.applicationOverride, "system", "offline.mode",
            args.offline)

    # Local config per node
    cfg.add(config.Scope.application, "node", "rally.root", paths.rally_root())
    cfg.add(config.Scope.application, "node", "rally.cwd", os.getcwd())

    cfg.add(config.Scope.applicationOverride, "mechanic", "source.revision",
            args.revision)
    if args.distribution_version:
        cfg.add(config.Scope.applicationOverride, "mechanic",
                "distribution.version", args.distribution_version)
    cfg.add(config.Scope.applicationOverride, "mechanic",
            "distribution.repository", args.distribution_repository)
    cfg.add(config.Scope.applicationOverride, "mechanic", "car.names",
            opts.csv_to_list(args.car))
    if args.team_path:
        cfg.add(config.Scope.applicationOverride, "mechanic", "team.path",
                os.path.abspath(io.normalize_path(args.team_path)))
        cfg.add(config.Scope.applicationOverride, "mechanic",
                "repository.name", None)
    else:
        cfg.add(config.Scope.applicationOverride, "mechanic",
                "repository.name", args.team_repository)
    cfg.add(config.Scope.applicationOverride, "mechanic", "car.plugins",
            opts.csv_to_list(args.elasticsearch_plugins))
    cfg.add(config.Scope.applicationOverride, "mechanic", "car.params",
            opts.to_dict(args.car_params))
    cfg.add(config.Scope.applicationOverride, "mechanic", "plugin.params",
            opts.to_dict(args.plugin_params))
    if args.keep_cluster_running:
        cfg.add(config.Scope.applicationOverride, "mechanic", "keep.running",
                True)
        # force-preserve the cluster nodes.
        cfg.add(config.Scope.applicationOverride, "mechanic",
                "preserve.install", True)
    else:
        cfg.add(config.Scope.applicationOverride, "mechanic", "keep.running",
                False)
        cfg.add(config.Scope.applicationOverride, "mechanic",
                "preserve.install", convert.to_bool(args.preserve_install))
    cfg.add(config.Scope.applicationOverride, "mechanic", "runtime.jdk",
            args.runtime_jdk)
    cfg.add(config.Scope.applicationOverride, "mechanic", "telemetry.devices",
            opts.csv_to_list(args.telemetry))
    cfg.add(config.Scope.applicationOverride, "mechanic", "telemetry.params",
            opts.to_dict(args.telemetry_params))

    cfg.add(config.Scope.applicationOverride, "race", "pipeline",
            args.pipeline)
    cfg.add(config.Scope.applicationOverride, "race", "laps", args.laps)
    cfg.add(config.Scope.applicationOverride, "race", "user.tag",
            args.user_tag)

    # We can assume here that if a track-path is given, the user did not specify a repository either (although argparse sets it to
    # its default value)
    if args.track_path:
        cfg.add(config.Scope.applicationOverride, "track", "track.path",
                os.path.abspath(io.normalize_path(args.track_path)))
        cfg.add(config.Scope.applicationOverride, "track", "repository.name",
                None)
        if args.track:
            # stay as close as possible to argparse errors although we have a custom validation.
            arg_parser.error(
                "argument --track not allowed with argument --track-path")
        # cfg.add(config.Scope.applicationOverride, "track", "track.name", None)
    else:
        # cfg.add(config.Scope.applicationOverride, "track", "track.path", None)
        cfg.add(config.Scope.applicationOverride, "track", "repository.name",
                args.track_repository)
        # set the default programmatically because we need to determine whether the user has provided a value
        chosen_track = args.track if args.track else "geonames"
        cfg.add(config.Scope.applicationOverride, "track", "track.name",
                chosen_track)

    cfg.add(config.Scope.applicationOverride, "track", "params",
            opts.to_dict(args.track_params))
    cfg.add(config.Scope.applicationOverride, "track", "challenge.name",
            args.challenge)
    cfg.add(config.Scope.applicationOverride, "track", "include.tasks",
            opts.csv_to_list(args.include_tasks))
    cfg.add(config.Scope.applicationOverride, "track", "test.mode.enabled",
            args.test_mode)

    cfg.add(config.Scope.applicationOverride, "reporting", "format",
            args.report_format)
    cfg.add(config.Scope.applicationOverride, "reporting", "values",
            args.show_in_report)
    cfg.add(config.Scope.applicationOverride, "reporting", "output.path",
            args.report_file)
    if sub_command == "compare":
        cfg.add(config.Scope.applicationOverride, "reporting",
                "baseline.timestamp", args.baseline)
        cfg.add(config.Scope.applicationOverride, "reporting",
                "contender.timestamp", args.contender)
    if sub_command == "generate":
        cfg.add(config.Scope.applicationOverride, "generator", "chart.type",
                args.chart_type)
        cfg.add(config.Scope.applicationOverride, "generator", "output.path",
                args.output_path)

        if args.chart_spec_path and (args.track or args.challenge or args.car
                                     or args.node_count):
            console.println(
                "You need to specify either --chart-spec-path or --track, --challenge, --car and "
                "--node-count but not both.")
            exit(1)
        if args.chart_spec_path:
            cfg.add(config.Scope.applicationOverride, "generator",
                    "chart.spec.path", args.chart_spec_path)
        else:
            # other options are stored elsewhere already
            cfg.add(config.Scope.applicationOverride, "generator",
                    "node.count", args.node_count)

    cfg.add(config.Scope.applicationOverride, "driver", "profiling",
            args.enable_driver_profiling)
    cfg.add(config.Scope.applicationOverride, "driver", "on.error",
            args.on_error)
    cfg.add(config.Scope.applicationOverride, "driver", "load_driver_hosts",
            opts.csv_to_list(args.load_driver_hosts))
    if sub_command != "list":
        # Also needed by mechanic (-> telemetry) - duplicate by module?
        target_hosts = opts.TargetHosts(args.target_hosts)
        cfg.add(config.Scope.applicationOverride, "client", "hosts",
                target_hosts)
        client_options = opts.ClientOptions(args.client_options,
                                            target_hosts=target_hosts)
        cfg.add(config.Scope.applicationOverride, "client", "options",
                client_options)
        if "timeout" not in client_options.default:
            console.info(
                "You did not provide an explicit timeout in the client options. Assuming default of 10 seconds."
            )
        if list(target_hosts.all_hosts) != list(
                client_options.all_client_options):
            console.println(
                "--target-hosts and --client-options must define the same keys for multi cluster setups."
            )
            exit(1)
    # split by component?
    if sub_command == "list":
        cfg.add(config.Scope.applicationOverride, "system",
                "list.config.option", args.configuration)
        cfg.add(config.Scope.applicationOverride, "system",
                "list.races.max_results", args.limit)

    logger.info("OS [%s]", str(os.uname()))
    logger.info("Python [%s]", str(sys.implementation))
    logger.info("Rally version [%s]", version.version())
    logger.info("Command line arguments: %s", args)
    # Configure networking
    net.init()
    if not args.offline:
        if not net.has_internet_connection():
            console.warn(
                "No Internet connection detected. Automatic download of track data sets etc. is disabled.",
                logger=logger)
            cfg.add(config.Scope.applicationOverride, "system", "offline.mode",
                    True)
        else:
            logger.info("Detected a working Internet connection.")

    success = dispatch_sub_command(cfg, sub_command)

    end = time.time()
    if success:
        console.println("")
        console.info("SUCCESS (took %d seconds)" % (end - start),
                     overline="-",
                     underline="-")
    else:
        console.println("")
        console.info("FAILURE (took %d seconds)" % (end - start),
                     overline="-",
                     underline="-")
        sys.exit(64)
Beispiel #34
0
def migrate(config_file, current_version, target_version, out=print, i=input):
    prompter = Prompter(i=i, o=out, assume_defaults=False)
    logger.info("Upgrading configuration from version [%s] to [%s]." %
                (current_version, target_version))
    # Something is really fishy. We don't want to downgrade the configuration.
    if current_version >= target_version:
        raise ConfigError(
            "The existing config file is available in a later version already. Expected version <= [%s] but found [%s]"
            % (target_version, current_version))
    # but first a backup...
    config_file.backup()
    config = config_file.load(interpolation=None)

    if current_version == 0 and target_version > current_version:
        logger.info("Migrating config from version [0] to [1]")
        current_version = 1
        config["meta"] = {}
        config["meta"]["config.version"] = str(current_version)
        # in version 1 we changed some directories from being absolute to being relative
        config["system"]["log.root.dir"] = "logs"
        config["provisioning"]["local.install.dir"] = "install"
        config["reporting"]["report.base.dir"] = "reports"
    if current_version == 1 and target_version > current_version:
        logger.info("Migrating config from version [1] to [2]")
        current_version = 2
        config["meta"]["config.version"] = str(current_version)
        # no need to ask the user now if we are about to upgrade to version 4
        config["reporting"]["datastore.type"] = "in-memory"
        config["reporting"]["datastore.host"] = ""
        config["reporting"]["datastore.port"] = ""
        config["reporting"]["datastore.secure"] = ""
        config["reporting"]["datastore.user"] = ""
        config["reporting"]["datastore.password"] = ""
        config["system"]["env.name"] = "local"
    if current_version == 2 and target_version > current_version:
        logger.info("Migrating config from version [2] to [3]")
        current_version = 3
        config["meta"]["config.version"] = str(current_version)
        # Remove obsolete settings
        config["reporting"].pop("report.base.dir")
        config["reporting"].pop("output.html.report.filename")
    if current_version == 3 and target_version > current_version:
        root_dir = config["system"]["root.dir"]
        out("""
            *****************************************************************************************

            You have an old configuration of Rally. Rally has now a much simpler setup
            routine which will autodetect lots of settings for you and it also does not
            require you to setup a metrics store anymore.

            Rally will now migrate your configuration but if you don't need advanced features
            like a metrics store, then you should delete the configuration directory:

              rm -rf {0}

            and then rerun Rally's configuration routine:

              {1} configure

            Please also note you have {2:.1f} GB of data in your current benchmark directory at

              {3}

            You might want to clean up this directory also.

            For more details please see {4}

            *****************************************************************************************

            Pausing for 10 seconds to let you consider this message.
            """.format(
            config_file.config_dir, PROGRAM_NAME,
            convert.bytes_to_gb(io.get_size(root_dir)), root_dir,
            console.format.link(
                "https://github.com/elastic/rally/blob/master/CHANGELOG.md#030"
            )))
        time.sleep(10)
        logger.info("Migrating config from version [3] to [4]")
        current_version = 4
        config["meta"]["config.version"] = str(current_version)
        if len(config["reporting"]["datastore.host"]) > 0:
            config["reporting"]["datastore.type"] = "elasticsearch"
        else:
            config["reporting"]["datastore.type"] = "in-memory"
        # Remove obsolete settings
        config["build"].pop("maven.bin")
        config["benchmarks"].pop("metrics.stats.disk.device")

    if current_version == 4 and target_version > current_version:
        config["tracks"] = {}
        config["tracks"][
            "default.url"] = "https://github.com/elastic/rally-tracks"
        current_version = 5
        config["meta"]["config.version"] = str(current_version)

    if current_version == 5 and target_version > current_version:
        config["defaults"] = {}
        config["defaults"]["preserve_benchmark_candidate"] = str(False)
        current_version = 6
        config["meta"]["config.version"] = str(current_version)

    if current_version == 6 and target_version > current_version:
        # Remove obsolete settings
        config.pop("provisioning")
        config["system"].pop("log.root.dir")
        current_version = 7
        config["meta"]["config.version"] = str(current_version)

    if current_version == 7 and target_version > current_version:
        # move [system][root.dir] to [node][root.dir]
        if "node" not in config:
            config["node"] = {}
        config["node"]["root.dir"] = config["system"].pop("root.dir")
        # also move all references!
        for section in config:
            for k, v in config[section].items():
                config[section][k] = v.replace("${system:root.dir}",
                                               "${node:root.dir}")
        current_version = 8
        config["meta"]["config.version"] = str(current_version)
    if current_version == 8 and target_version > current_version:
        config["teams"] = {}
        config["teams"][
            "default.url"] = "https://github.com/elastic/rally-teams"
        current_version = 9
        config["meta"]["config.version"] = str(current_version)
    if current_version == 9 and target_version > current_version:
        config["distributions"] = {}
        config["distributions"]["release.1.url"] = "https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-" \
                                                   "{{VERSION}}.tar.gz"
        config["distributions"]["release.2.url"] = "https://download.elasticsearch.org/elasticsearch/release/org/elasticsearch/" \
                                                   "distribution/tar/elasticsearch/{{VERSION}}/elasticsearch-{{VERSION}}.tar.gz"
        config["distributions"][
            "release.url"] = "https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-{{VERSION}}.tar.gz"
        config["distributions"]["release.cache"] = "true"
        current_version = 10
        config["meta"]["config.version"] = str(current_version)
    if current_version == 10 and target_version > current_version:
        config["runtime"]["java.home"] = config["runtime"].pop("java8.home")
        current_version = 11
        config["meta"]["config.version"] = str(current_version)
    if current_version == 11 and target_version > current_version:
        # As this is a rather complex migration, we log more than usual to understand potential migration problems better.
        if "source" in config:
            if "local.src.dir" in config["source"]:
                previous_root = config["source"].pop("local.src.dir")
                logger.info("Set [source][local.src.dir] to [%s]." %
                            previous_root)
                # if this directory was Rally's default location, then move it on the file system because to allow for checkouts of plugins
                # in the sibling directory.
                if previous_root == os.path.join(config["node"]["root.dir"],
                                                 "src"):
                    new_root_dir_all_sources = previous_root
                    new_es_sub_dir = "elasticsearch"
                    new_root = os.path.join(new_root_dir_all_sources,
                                            new_es_sub_dir)
                    # only attempt to move if the directory exists. It may be possible that users never ran a source benchmark although they
                    # have configured it. In that case the source directory will not yet exist.
                    if io.exists(previous_root):
                        logger.info(
                            "Previous source directory was at Rally's default location [%s]. Moving to [%s]."
                            % (previous_root, new_root))
                        try:
                            # we need to do this in two steps as we need to move the sources to a subdirectory
                            tmp_path = io.normalize_path(
                                os.path.join(new_root_dir_all_sources,
                                             os.pardir, "tmp_src_mig"))
                            os.rename(previous_root, tmp_path)
                            io.ensure_dir(new_root)
                            os.rename(tmp_path, new_root)
                        except OSError:
                            logger.exception(
                                "Could not move source directory from [%s] to [%s]."
                                % (previous_root, new_root))
                            # A warning is sufficient as Rally should just do a fresh checkout if moving did not work.
                            console.warn(
                                "Elasticsearch source directory could not be moved from [%s] to [%s]. Please check the logs."
                                % (previous_root, new_root))
                    else:
                        logger.info(
                            "Source directory is configured at Rally's default location [%s] but does not exist yet."
                            % previous_root)
                else:
                    logger.info(
                        "Previous source directory was the custom directory [%s]."
                        % previous_root)
                    new_root_dir_all_sources = io.normalize_path(
                        os.path.join(previous_root, os.path.pardir))
                    # name of the elasticsearch project directory.
                    new_es_sub_dir = io.basename(previous_root)

                logger.info("Setting [node][src.root.dir] to [%s]." %
                            new_root_dir_all_sources)
                config["node"]["src.root.dir"] = new_root_dir_all_sources
                logger.info(
                    "Setting [source][elasticsearch.src.subdir] to [%s]" %
                    new_es_sub_dir)
                config["source"]["elasticsearch.src.subdir"] = new_es_sub_dir
            else:
                logger.info(
                    "Key [local.src.dir] not found. Advancing without changes."
                )
        else:
            logger.info(
                "No section named [source] found in config. Advancing without changes."
            )
        current_version = 12
        config["meta"]["config.version"] = str(current_version)

    if current_version == 12 and target_version > current_version:
        # the current configuration allows to benchmark from sources
        if "build" in config and "gradle.bin" in config["build"]:
            java_9_home = io.guess_java_home(major_version=9)
            from esrally.utils import jvm
            if java_9_home and not jvm.is_early_access_release(java_9_home):
                logger.debug("Autodetected a JDK 9 installation at [%s]" %
                             java_9_home)
                if "runtime" not in config:
                    config["runtime"] = {}
                config["runtime"]["java9.home"] = java_9_home
            else:
                logger.debug(
                    "Could not autodetect a JDK 9 installation. Checking [java.home] already points to a JDK 9."
                )
                detected = False
                if "runtime" in config:
                    java_home = config["runtime"]["java.home"]
                    if jvm.major_version(
                            java_home
                    ) == 9 and not jvm.is_early_access_release(java_home):
                        config["runtime"]["java9.home"] = java_home
                        detected = True

                if not detected:
                    logger.debug(
                        "Could not autodetect a JDK 9 installation. Asking user."
                    )
                    raw_java_9_home = prompter.ask_property(
                        "Enter the JDK 9 root directory",
                        check_path_exists=True,
                        mandatory=False)
                    if raw_java_9_home and jvm.major_version(
                            raw_java_9_home
                    ) == 9 and not jvm.is_early_access_release(
                            raw_java_9_home):
                        java_9_home = io.normalize_path(
                            raw_java_9_home) if raw_java_9_home else None
                        config["runtime"]["java9.home"] = java_9_home
                    else:
                        out("********************************************************************************"
                            )
                        out("You don't have a valid JDK 9 installation and cannot benchmark source builds."
                            )
                        out("")
                        out("You can still benchmark binary distributions with e.g.:"
                            )
                        out("")
                        out("  %s --distribution-version=6.0.0" % PROGRAM_NAME)
                        out("********************************************************************************"
                            )
                        out("")

        current_version = 13
        config["meta"]["config.version"] = str(current_version)

    # all migrations done
    config_file.store(config)
    logger.info("Successfully self-upgraded configuration to version [%s]" %
                target_version)
Beispiel #35
0
    def create_config(self,
                      config_file,
                      advanced_config=False,
                      assume_defaults=False,
                      use_gradle_wrapper=False,
                      java_home=None,
                      runtime_java_home=None):
        """
        Either creates a new configuration file or overwrites an existing one. Will ask the user for input on configurable properties
        and writes them to the configuration file in ~/.rally/rally.ini.

        :param config_file:
        :param advanced_config: Whether to ask for properties that are not necessary for everyday use (on a dev machine). Default: False.
        :param assume_defaults: If True, assume the user accepted all values for which defaults are provided. Mainly intended for automatic
        configuration in CI run. Default: False.
        :param use_gradle_wrapper: If True, use the Gradle wrapper, otherwise use the system's Gradle version. Default: False.
        """
        self.prompter = Prompter(self.i, self.sec_i, self.o, assume_defaults)
        if advanced_config:
            self.o(
                "Running advanced configuration. You can get additional help at:"
            )
            self.o("")
            self.o("  %s" %
                   console.format.link("%sconfiguration.html" % DOC_LINK))
            self.o("")
        else:
            self.o(
                "Running simple configuration. Run the advanced configuration with:"
            )
            self.o("")
            self.o("  %s configure --advanced-config" % PROGRAM_NAME)
            self.o("")

        if config_file.present:
            self.o("\nWARNING: Will overwrite existing config file at [%s]\n" %
                   config_file.location)
            logger.debug("Detected an existing configuration file at [%s]" %
                         config_file.location)
        else:
            logger.debug(
                "Did not detect a configuration file at [%s]. Running initial configuration routine."
                % config_file.location)

        # Autodetect settings
        self.o("* Autodetecting available third-party software")
        git_path = io.guess_install_location("git")
        gradle_bin = "./gradlew" if use_gradle_wrapper else io.guess_install_location(
            "gradle")

        java_8_home = runtime_java_home if runtime_java_home else io.guess_java_home(
            major_version=8)
        java_9_home = java_home if java_home else io.guess_java_home(
            major_version=9)
        from esrally.utils import jvm
        if java_8_home:
            auto_detected_java_home = java_8_home
        # Don't auto-detect an EA release and bring trouble to the user later on. They can still configure it manually if they want to.
        elif java_9_home and not jvm.is_early_access_release(java_9_home):
            auto_detected_java_home = java_9_home
        else:
            auto_detected_java_home = None

        self.print_detection_result("git    ", git_path)
        self.print_detection_result("gradle ", gradle_bin)
        self.print_detection_result(
            "JDK    ",
            auto_detected_java_home,
            warn_if_missing=True,
            additional_message=
            "You cannot benchmark Elasticsearch on this machine without a JDK."
        )
        self.o("")

        # users that don't have Gradle available cannot benchmark from sources
        benchmark_from_sources = gradle_bin

        if not benchmark_from_sources:
            self.o(
                "********************************************************************************"
            )
            self.o(
                "You don't have the required software to benchmark Elasticsearch source builds."
            )
            self.o("")
            self.o("You can still benchmark binary distributions with e.g.:")
            self.o("")
            self.o("  %s --distribution-version=6.0.0" % PROGRAM_NAME)
            self.o(
                "********************************************************************************"
            )
            self.o("")

        root_dir = io.normalize_path(
            os.path.abspath(os.path.join(config_file.config_dir,
                                         "benchmarks")))
        if advanced_config:
            root_dir = io.normalize_path(
                self._ask_property("Enter the benchmark data directory",
                                   default_value=root_dir))
        else:
            self.o("* Setting up benchmark data directory in %s" % root_dir)

        if benchmark_from_sources:
            if not java_9_home or jvm.is_early_access_release(java_9_home):
                raw_java_9_home = self._ask_property(
                    "Enter the JDK 9 root directory",
                    check_path_exists=True,
                    mandatory=False)
                if raw_java_9_home and jvm.major_version(
                        raw_java_9_home
                ) == 9 and not jvm.is_early_access_release(raw_java_9_home):
                    java_9_home = io.normalize_path(
                        raw_java_9_home) if raw_java_9_home else None
                else:
                    benchmark_from_sources = False
                    self.o(
                        "********************************************************************************"
                    )
                    self.o(
                        "You don't have a valid JDK 9 installation and cannot benchmark source builds."
                    )
                    self.o("")
                    self.o(
                        "You can still benchmark binary distributions with e.g.:"
                    )
                    self.o("")
                    self.o("  %s --distribution-version=6.0.0" % PROGRAM_NAME)
                    self.o(
                        "********************************************************************************"
                    )
                    self.o("")

        if benchmark_from_sources:
            # We try to autodetect an existing ES source directory
            guess = self._guess_es_src_dir()
            if guess:
                source_dir = guess
                logger.debug(
                    "Autodetected Elasticsearch project directory at [%s]." %
                    source_dir)
            else:
                default_src_dir = os.path.join(root_dir, "src",
                                               "elasticsearch")
                logger.debug(
                    "Could not autodetect Elasticsearch project directory. Providing [%s] as default."
                    % default_src_dir)
                source_dir = default_src_dir

            if advanced_config:
                source_dir = io.normalize_path(
                    self._ask_property(
                        "Enter your Elasticsearch project directory:",
                        default_value=source_dir))
            if not advanced_config:
                self.o("* Setting up benchmark source directory in %s" %
                       source_dir)
                self.o("")

            # Not everybody might have SSH access. Play safe with the default. It may be slower but this will work for everybody.
            repo_url = "https://github.com/elastic/elasticsearch.git"

        if auto_detected_java_home:
            java_home = auto_detected_java_home
            local_benchmarks = True
        else:
            raw_java_home = self._ask_property(
                "Enter the JDK root directory (version 8 or later)",
                check_path_exists=True,
                mandatory=False)
            java_home = io.normalize_path(
                raw_java_home) if raw_java_home else None
            if not java_home:
                local_benchmarks = False
                self.o("")
                self.o(
                    "********************************************************************************"
                )
                self.o(
                    "You don't have a JDK installed but Elasticsearch requires one to run. This means"
                )
                self.o(
                    "that you cannot benchmark Elasticsearch on this machine.")
                self.o("")
                self.o("You can still benchmark against remote machines e.g.:")
                self.o("")
                self.o(
                    "  %s --pipeline=benchmark-only --target-host=\"NODE_IP:9200\""
                    % PROGRAM_NAME)
                self.o("")
                self.o("See %s for further info." %
                       console.format.link("%srecipes.html" % DOC_LINK))
                self.o(
                    "********************************************************************************"
                )
                self.o("")
            else:
                local_benchmarks = True

        if advanced_config:
            data_store_choice = self._ask_property(
                "Where should metrics be kept?"
                "\n\n"
                "(1) In memory (simpler but less options for analysis)\n"
                "(2) Elasticsearch (requires a separate ES instance, keeps all raw samples for analysis)"
                "\n\n",
                default_value="1",
                choices=["1", "2"])
            if data_store_choice == "1":
                env_name = "local"
                data_store_type = "in-memory"
                data_store_host, data_store_port, data_store_secure, data_store_user, data_store_password = "", "", "", "", ""
            else:
                data_store_type = "elasticsearch"
                data_store_host, data_store_port, data_store_secure, data_store_user, data_store_password = self._ask_data_store(
                )

                env_name = self._ask_env_name()

            preserve_install = convert.to_bool(
                self._ask_property(
                    "Do you want Rally to keep the Elasticsearch benchmark candidate "
                    "installation including the index (will use several GB per trial run)?",
                    default_value=False))
        else:
            # Does not matter for an in-memory store
            env_name = "local"
            data_store_type = "in-memory"
            data_store_host, data_store_port, data_store_secure, data_store_user, data_store_password = "", "", "", "", ""
            preserve_install = False

        config = configparser.ConfigParser()
        config["meta"] = {}
        config["meta"]["config.version"] = str(Config.CURRENT_CONFIG_VERSION)

        config["system"] = {}
        config["system"]["env.name"] = env_name

        config["node"] = {}
        config["node"]["root.dir"] = root_dir

        if benchmark_from_sources:
            # user has provided the Elasticsearch directory but the root for Elasticsearch and related plugins will be one level above
            final_source_dir = io.normalize_path(
                os.path.abspath(os.path.join(source_dir, os.pardir)))
            config["node"]["src.root.dir"] = final_source_dir

            config["source"] = {}
            config["source"]["remote.repo.url"] = repo_url
            # the Elasticsearch directory is just the last path component (relative to the source root directory)
            config["source"]["elasticsearch.src.subdir"] = io.basename(
                source_dir)

        if gradle_bin:
            config["build"] = {}
            config["build"]["gradle.bin"] = gradle_bin

        config["runtime"] = {}
        if java_home:
            config["runtime"]["java.home"] = java_home
        if java_9_home:
            config["runtime"]["java9.home"] = java_9_home

        config["benchmarks"] = {}
        config["benchmarks"]["local.dataset.cache"] = "${node:root.dir}/data"

        config["reporting"] = {}
        config["reporting"]["datastore.type"] = data_store_type
        config["reporting"]["datastore.host"] = data_store_host
        config["reporting"]["datastore.port"] = data_store_port
        config["reporting"]["datastore.secure"] = data_store_secure
        config["reporting"]["datastore.user"] = data_store_user
        config["reporting"]["datastore.password"] = data_store_password

        config["tracks"] = {}
        config["tracks"][
            "default.url"] = "https://github.com/elastic/rally-tracks"

        config["teams"] = {}
        config["teams"][
            "default.url"] = "https://github.com/elastic/rally-teams"

        config["defaults"] = {}
        config["defaults"]["preserve_benchmark_candidate"] = str(
            preserve_install)

        config["distributions"] = {}
        config["distributions"]["release.1.url"] = "https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-" \
                                                   "{{VERSION}}.tar.gz"
        config["distributions"]["release.2.url"] = "https://download.elasticsearch.org/elasticsearch/release/org/elasticsearch/" \
                                                   "distribution/tar/elasticsearch/{{VERSION}}/elasticsearch-{{VERSION}}.tar.gz"
        config["distributions"][
            "release.url"] = "https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-{{VERSION}}.tar.gz"
        config["distributions"]["release.cache"] = "true"

        config_file.store(config)

        self.o(
            "Configuration successfully written to %s. Happy benchmarking!" %
            config_file.location)
        self.o("")
        if local_benchmarks and benchmark_from_sources:
            self.o(
                "To benchmark Elasticsearch with the default benchmark, run:")
            self.o("")
            self.o("  %s" % PROGRAM_NAME)
            self.o("")
        elif local_benchmarks:
            self.o(
                "To benchmark Elasticsearch 6.0.0 with the default benchmark, run:"
            )
            self.o("")
            self.o("  %s --distribution-version=6.0.0" % PROGRAM_NAME)
            self.o("")
        else:
            # we've already printed an info for the user. No need to repeat that.
            pass

        self.o("More info about Rally:")
        self.o("")
        self.o("* Type %s --help" % PROGRAM_NAME)
        self.o("* Read the documentation at %s" %
               console.format.link(DOC_LINK))
        self.o("* Ask a question on the forum at %s" % console.format.link(
            "https://discuss.elastic.co/c/elasticsearch/rally"))
Beispiel #36
0
 def test_normalize_path(self):
     self.assertEqual("/already/a/normalized/path", io.normalize_path("/already/a/normalized/path"))
     self.assertEqual("/not/normalized", io.normalize_path("/not/normalized/path/../"))
     self.assertEqual(os.getenv("HOME"), io.normalize_path("~/Documents/.."))
Beispiel #37
0
 def test_normalize_path(self):
     self.assertEqual("/already/a/normalized/path", io.normalize_path("/already/a/normalized/path"))
     self.assertEqual("/not/normalized", io.normalize_path("/not/normalized/path/../"))
     self.assertEqual(os.path.expanduser("~"), io.normalize_path("~/Documents/.."))
Beispiel #38
0
    def create_config(self, config_file, advanced_config=False, assume_defaults=False):
        """
        Either creates a new configuration file or overwrites an existing one. Will ask the user for input on configurable properties
        and writes them to the configuration file in ~/.rally/rally.ini.

        :param config_file:
        :param advanced_config: Whether to ask for properties that are not necessary for everyday use (on a dev machine). Default: False.
        :param assume_defaults: If True, assume the user accepted all values for which defaults are provided. Mainly intended for automatic
        configuration in CI run. Default: False.
        """
        self.assume_defaults = assume_defaults
        if advanced_config:
            self.o("Running advanced configuration. You can get additional help at:")
            self.o("")
            self.o("  %s" % console.format.link("%sconfiguration.html" % DOC_LINK))
            self.o("")

            logger.info("Running advanced configuration routine.")
            self.o("")
        else:
            self.o("Running simple configuration. Run the advanced configuration with:")
            self.o("")
            self.o("  %s configure --advanced-config" % PROGRAM_NAME)
            self.o("")
            logger.info("Running simple configuration routine.")

        if config_file.present:
            self.o("\nWARNING: Will overwrite existing config file at [%s]\n" % config_file.location)
            logger.debug("Detected an existing configuration file at [%s]" % config_file.location)
        else:
            logger.debug("Did not detect a configuration file at [%s]. Running initial configuration routine." % config_file.location)

        # Autodetect settings
        self.o("[✓] Autodetecting available third-party software")
        git_path = io.guess_install_location("git")
        gradle_bin = io.guess_install_location("gradle")
        # default_jdk_7 = io.guess_java_home(major_version=7)
        default_jdk_8 = io.guess_java_home(major_version=8)

        self.print_detection_result("git    ", git_path)
        self.print_detection_result("gradle ", gradle_bin)
        self.print_detection_result("JDK 8  ", default_jdk_8,
                                    warn_if_missing=True,
                                    additional_message="You cannot benchmark Elasticsearch 5.x without a JDK 8 installation")
        # self.print_detection_result("JDK 9 ", default_jdk_9, warn_if_missing=True)
        self.o("")

        # users that don't have Gradle available cannot benchmark from sources
        benchmark_from_sources = gradle_bin

        if not benchmark_from_sources:
            self.o("**********************************************************************************")
            self.o("You don't have the necessary software to benchmark source builds of Elasticsearch.")
            self.o("")
            self.o("You can still benchmark binary distributions with e.g.:")
            self.o("")
            self.o("  %s --distribution-version=5.0.0" % PROGRAM_NAME)
            self.o("**********************************************************************************")
            self.o("")

        root_dir = "%s/benchmarks" % config_file.config_dir
        self.o("[✓] Setting up benchmark data directory in [%s] (needs several GB)." % root_dir)

        if benchmark_from_sources:
            # We try to autodetect an existing ES source directory
            guess = self._guess_es_src_dir()
            if guess:
                source_dir = guess
                self.o("[✓] Autodetected Elasticsearch project directory at [%s]." % source_dir)
                logger.debug("Autodetected Elasticsearch project directory at [%s]." % source_dir)
            else:
                default_src_dir = "%s/src" % root_dir
                logger.debug("Could not autodetect Elasticsearch project directory. Providing [%s] as default." % default_src_dir)
                source_dir = io.normalize_path(self._ask_property("Enter your Elasticsearch project directory:",
                                                                  default_value=default_src_dir))
            # Not everybody might have SSH access. Play safe with the default. It may be slower but this will work for everybody.
            repo_url = "https://github.com/elastic/elasticsearch.git"

        if default_jdk_8:
            jdk8_home = default_jdk_8
        else:
            self.o("")
            jdk8_home = io.normalize_path(self._ask_property("Enter the JDK 8 root directory:", check_path_exists=True))

        if advanced_config:
            env_name = self._ask_env_name()
            data_store_type = "elasticsearch"
            data_store_host, data_store_port, data_store_secure, data_store_user, data_store_password = self._ask_data_store()

            preserve_install = convert.to_bool(self._ask_property("Do you want Rally to keep the Elasticsearch benchmark candidate "
                                                                  "installation including the index (will use lots of disk space)?",
                                                                  default_value=False))
        else:
            # Does not matter too much for an in-memory store
            env_name = "local"
            data_store_type = "in-memory"
            data_store_host, data_store_port, data_store_secure, data_store_user, data_store_password = "", "", "", "", ""
            preserve_install = False

        config = configparser.ConfigParser()
        config["meta"] = {}
        config["meta"]["config.version"] = str(Config.CURRENT_CONFIG_VERSION)

        config["system"] = {}
        config["system"]["root.dir"] = root_dir
        config["system"]["log.root.dir"] = "logs"
        config["system"]["env.name"] = env_name

        if benchmark_from_sources:
            config["source"] = {}
            config["source"]["local.src.dir"] = source_dir
            config["source"]["remote.repo.url"] = repo_url

            config["build"] = {}
            config["build"]["gradle.bin"] = gradle_bin

        config["provisioning"] = {}
        config["provisioning"]["local.install.dir"] = "install"

        config["runtime"] = {}
        config["runtime"]["java8.home"] = jdk8_home

        config["benchmarks"] = {}
        config["benchmarks"]["local.dataset.cache"] = "${system:root.dir}/data"

        config["reporting"] = {}
        config["reporting"]["datastore.type"] = data_store_type
        config["reporting"]["datastore.host"] = data_store_host
        config["reporting"]["datastore.port"] = data_store_port
        config["reporting"]["datastore.secure"] = data_store_secure
        config["reporting"]["datastore.user"] = data_store_user
        config["reporting"]["datastore.password"] = data_store_password

        config["tracks"] = {}
        config["tracks"]["default.url"] = "https://github.com/elastic/rally-tracks"

        config["defaults"] = {}
        config["defaults"]["preserve_benchmark_candidate"] = str(preserve_install)

        config_file.store(config)

        self.o("[✓] Configuration successfully written to [%s]. Happy benchmarking!" % config_file.location)
        self.o("")
        if benchmark_from_sources:
            self.o("To benchmark Elasticsearch with the default benchmark run:")
            self.o("")
            self.o("  %s" % PROGRAM_NAME)
        else:
            self.o("To benchmark Elasticsearch 5.0.0 with the default benchmark run:")
            self.o("")
            self.o("  %s --distribution-version=5.0.0" % PROGRAM_NAME)

        self.o("")
        self.o("For help, type %s --help or see the user documentation at %s"
               % (PROGRAM_NAME, console.format.link(DOC_LINK)))