def main(): pre_configure_logging() args = parse_args() print(BANNER) cfg = config.Config(config_name=args.configuration_name) sub_command = derive_sub_command(args, cfg) ensure_configuration_present(cfg, args, sub_command) # Add global meta info derived by rally itself cfg.add(config.Scope.application, "meta", "time.start", args.effective_start_date) cfg.add(config.Scope.application, "system", "rally.root", rally_root_path()) cfg.add(config.Scope.application, "system", "invocation.root.dir", paths.Paths(cfg).invocation_root()) # Add command line config cfg.add(config.Scope.applicationOverride, "source", "revision", args.revision) cfg.add(config.Scope.applicationOverride, "source", "distribution.version", args.distribution_version) cfg.add(config.Scope.applicationOverride, "source", "distribution.repository", args.distribution_repository) cfg.add(config.Scope.applicationOverride, "system", "pipeline", args.pipeline) cfg.add(config.Scope.applicationOverride, "system", "track.repository", args.track_repository) cfg.add(config.Scope.applicationOverride, "system", "track", args.track) cfg.add(config.Scope.applicationOverride, "system", "quiet.mode", args.quiet) cfg.add(config.Scope.applicationOverride, "system", "offline.mode", args.offline) cfg.add(config.Scope.applicationOverride, "system", "user.tag", args.user_tag) cfg.add(config.Scope.applicationOverride, "telemetry", "devices", csv_to_list(args.telemetry)) cfg.add(config.Scope.applicationOverride, "benchmarks", "challenge", args.challenge) cfg.add(config.Scope.applicationOverride, "benchmarks", "car", args.car) cfg.add(config.Scope.applicationOverride, "benchmarks", "rounds", args.rounds) cfg.add(config.Scope.applicationOverride, "provisioning", "datapaths", csv_to_list(args.data_paths)) cfg.add(config.Scope.applicationOverride, "provisioning", "install.preserve", convert.to_bool(args.preserve_install)) cfg.add(config.Scope.applicationOverride, "launcher", "external.target.hosts", csv_to_list(args.target_hosts)) cfg.add(config.Scope.applicationOverride, "launcher", "client.options", kv_to_map(csv_to_list(args.client_options))) cfg.add(config.Scope.applicationOverride, "report", "reportformat", args.report_format) cfg.add(config.Scope.applicationOverride, "report", "reportfile", args.report_file) if sub_command == "list": cfg.add(config.Scope.applicationOverride, "system", "list.config.option", args.configuration) cfg.add(config.Scope.applicationOverride, "system", "list.races.max_results", args.limit) if sub_command == "compare": cfg.add(config.Scope.applicationOverride, "report", "comparison.baseline.timestamp", args.baseline) cfg.add(config.Scope.applicationOverride, "report", "comparison.contender.timestamp", args.contender) configure_logging(cfg) logger.info("Rally version [%s]" % version()) logger.info("Command line arguments: %s" % args) success = dispatch_sub_command(cfg, sub_command) if not success: sys.exit(1)
def test_create_suppliers_for_es_distribution_plugin_source_skip(self): cfg = config.Config() cfg.add(config.Scope.application, "mechanic", "distribution.version", "6.0.0") # default value from command line cfg.add(config.Scope.application, "mechanic", "source.revision", "community-plugin:current") cfg.add(config.Scope.application, "mechanic", "distribution.repository", "release") cfg.add( config.Scope.application, "distributions", "release.url", "https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-{{VERSION}}.tar.gz" ) cfg.add(config.Scope.application, "distributions", "release.cache", True) cfg.add(config.Scope.application, "node", "root.dir", "/opt/rally") cfg.add(config.Scope.application, "source", "plugin.community-plugin.src.dir", "/home/user/Projects/community-plugin") car = team.Car("default", root_path=None, config_paths=[]) core_plugin = team.PluginDescriptor("analysis-icu", core_plugin=True) external_plugin = team.PluginDescriptor("community-plugin", core_plugin=False, variables={"enabled": True}) # --pipeline=from-sources-skip-build composite_supplier = supplier.create( cfg, sources=True, distribution=False, build=False, challenge_root_path="/", car=car, plugins=[core_plugin, external_plugin]) self.assertEqual(3, len(composite_supplier.suppliers)) self.assertIsInstance(composite_supplier.suppliers[0], supplier.ElasticsearchDistributionSupplier) self.assertIsInstance(composite_supplier.suppliers[1], supplier.PluginDistributionSupplier) self.assertEqual(core_plugin, composite_supplier.suppliers[1].plugin) self.assertIsInstance(composite_supplier.suppliers[2], supplier.ExternalPluginSourceSupplier) self.assertEqual(external_plugin, composite_supplier.suppliers[2].plugin) self.assertIsNone(composite_supplier.suppliers[2].builder)
def test_env_options_order(self, sleep): cfg = config.Config() cfg.add(config.Scope.application, "system", "env.name", "test") proc_launcher = launcher.ProcessLauncher(cfg) node_telemetry = [telemetry.FlightRecorder(telemetry_params={}, log_root="/tmp/telemetry", java_major_version=8)] t = telemetry.Telemetry(["jfr"], devices=node_telemetry) env = proc_launcher._prepare_env(node_name="node0", java_home="/java_home", t=t) assert env["PATH"] == "/java_home/bin" + os.pathsep + os.environ["PATH"] assert env["ES_JAVA_OPTS"] == ( "-XX:+ExitOnOutOfMemoryError -XX:+UnlockDiagnosticVMOptions -XX:+DebugNonSafepoints " "-XX:+UnlockCommercialFeatures -XX:+FlightRecorder " "-XX:FlightRecorderOptions=disk=true,maxage=0s,maxsize=0,dumponexit=true,dumponexitpath=/tmp/telemetry/profile.jfr " # pylint: disable=line-too-long "-XX:StartFlightRecording=defaultrecording=true" )
def test_setup_external_cluster_single_node(self): cfg = config.Config() cfg.add(config.Scope.application, "telemetry", "devices", []) cfg.add(config.Scope.application, "launcher", "external.target.hosts", ["localhost:9200"]) cfg.add(config.Scope.application, "launcher", "client.options", {}) m = launcher.ExternalLauncher(cfg, cluster_factory_class=MockClusterFactory) cluster = m.start(MockMetricsStore()) self.assertEqual(cluster.hosts, [{ "host": "localhost", "port": "9200" }]) # automatically determined by launcher on attach self.assertEqual(cfg.opts("source", "distribution.version"), "5.0.0")
def test_start_stop_nodes(self, cleanup): supplier = lambda: "/home/user/src/elasticsearch/es.tar.gz" provisioners = [mock.Mock(), mock.Mock()] launcher = MechanicTests.TestLauncher() cfg = config.Config() cfg.add(config.Scope.application, "system", "race.id", "17") cfg.add(config.Scope.application, "mechanic", "preserve.install", False) metrics_store = mock.Mock() m = MechanicTests.TestMechanic(cfg, metrics_store, supplier, provisioners, launcher) m.start_engine() self.assertTrue(launcher.started) for p in provisioners: self.assertTrue(p.prepare.called) m.stop_engine() self.assertFalse(launcher.started) self.assertEqual(cleanup.call_count, 2)
def test_stops_container_when_no_metrics_store_is_provided( self, run_subprocess_with_logging, add_metadata_for_node): cfg = config.Config() metrics_store = None docker = launcher.DockerLauncher(cfg) nodes = [ cluster.Node(0, "/bin", "127.0.0.1", "testnode", telemetry.Telemetry()) ] docker.stop(nodes, metrics_store=metrics_store) self.assertEqual(0, add_metadata_for_node.call_count) run_subprocess_with_logging.assert_called_once_with( "docker-compose -f /bin/docker-compose.yml down")
def test_pass_java_opts(self): cfg = config.Config() cfg.add(config.Scope.application, "mechanic", "keep.running", False) cfg.add(config.Scope.application, "system", "env.name", "test") cfg.add(config.Scope.application, "system", "passenv", "ES_JAVA_OPTS") os.environ["ES_JAVA_OPTS"] = "-XX:-someJunk" proc_launcher = launcher.ProcessLauncher(cfg) t = telemetry.Telemetry() # no JAVA_HOME -> use the bundled JDK env = proc_launcher._prepare_env(node_name="node0", java_home=None, t=t) # unmodified self.assertEqual(os.environ["ES_JAVA_OPTS"], env["ES_JAVA_OPTS"])
def test_bundled_jdk_not_in_path(self): cfg = config.Config() cfg.add(config.Scope.application, "mechanic", "keep.running", False) cfg.add(config.Scope.application, "system", "env.name", "test") os.environ["JAVA_HOME"] = "/path/to/java" proc_launcher = launcher.ProcessLauncher(cfg) t = telemetry.Telemetry() # no JAVA_HOME -> use the bundled JDK env = proc_launcher._prepare_env(node_name="node0", java_home=None, t=t) # unmodified self.assertEqual(os.environ["PATH"], env["PATH"]) self.assertIsNone(env.get("JAVA_HOME"))
def test_pass_env_vars(self): cfg = config.Config() cfg.add(config.Scope.application, "system", "env.name", "test") cfg.add(config.Scope.application, "system", "passenv", "JAVA_HOME,FOO1") os.environ["JAVA_HOME"] = "/path/to/java" os.environ["FOO1"] = "BAR1" proc_launcher = launcher.ProcessLauncher(cfg) t = telemetry.Telemetry() # no JAVA_HOME -> use the bundled JDK env = proc_launcher._prepare_env(node_name="node0", java_home=None, t=t) # unmodified assert env["JAVA_HOME"] == os.environ["JAVA_HOME"] assert env["FOO1"] == os.environ["FOO1"] assert env["ES_JAVA_OPTS"] == "-XX:+ExitOnOutOfMemoryError"
def test_error_on_cluster_launch(self, sleep): cfg = config.Config() cfg.add(config.Scope.application, "client", "hosts", self.test_host) # Simulate that the client will raise an error upon startup cfg.add(config.Scope.application, "client", "options", opts.ClientOptions("raise-error-on-info:true")) cfg.add(config.Scope.application, "mechanic", "telemetry.devices", []) cfg.add(config.Scope.application, "mechanic", "telemetry.params", {}) cfg.add(config.Scope.application, "mechanic", "preserve.install", False) cfg.add(config.Scope.application, "mechanic", "skip.rest.api.check", False) cfg.add(config.Scope.application, "system", "env.name", "test") ms = get_metrics_store(cfg) cluster_launcher = launcher.ClusterLauncher(cfg, ms, client_factory_class=MockClientFactory) with self.assertRaisesRegex(exceptions.LaunchError, "Elasticsearch REST API layer is not available. Forcefully terminated cluster."): cluster_launcher.start()
def test_create_suppliers_for_es_only_config(self): cfg = config.Config() cfg.add(config.Scope.application, "mechanic", "distribution.version", "6.0.0") # default value from command line cfg.add(config.Scope.application, "mechanic", "source.revision", "current") cfg.add(config.Scope.application, "mechanic", "distribution.repository", "release") cfg.add(config.Scope.application, "distributions", "release.url", "https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-{{VERSION}}.tar.gz") cfg.add(config.Scope.application, "distributions", "release.cache", True) cfg.add(config.Scope.application, "node", "root.dir", "/opt/rally") car = team.Car("default", root_path=None, config_paths=[]) composite_supplier = supplier.create(cfg, sources=False, distribution=True, build=False, challenge_root_path="/", car=car) self.assertEqual(1, len(composite_supplier.suppliers)) self.assertIsInstance(composite_supplier.suppliers[0], supplier.ElasticsearchDistributionSupplier)
def test_checkout_revision(self, mock_is_working_copy, mock_pull_revision, mock_head_revision): cfg = config.Config() cfg.add(config.Scope.application, "source", "local.src.dir", "/src") cfg.add(config.Scope.application, "source", "remote.repo.url", "some-github-url") cfg.add(config.Scope.application, "source", "revision", "67c2f42") mock_is_working_copy.return_value = True mock_head_revision.return_value = "HEAD" s = supplier.Supplier(cfg) s.fetch() mock_is_working_copy.assert_called_with("/src") mock_pull_revision.assert_called_with("/src", "67c2f42") mock_head_revision.assert_called_with("/src")
def test_cleanup_nothing(self, mock_path_exists, mock_rm): mock_path_exists.return_value = False cfg = config.Config() cfg.add(config.Scope.application, "system", "challenge.root.dir", "/rally-root/track/challenge") cfg.add(config.Scope.application, "provisioning", "local.install.dir", "es-bin") cfg.add(config.Scope.application, "provisioning", "install.preserve", False) p = provisioner.Provisioner(cfg) p.cleanup() mock_path_exists.assert_called_once_with( "/rally-root/track/challenge/es-bin") mock_rm.assert_not_called()
def test_launches_cluster(self): cfg = config.Config() cfg.add(config.Scope.application, "client", "hosts", self.test_host) cfg.add(config.Scope.application, "client", "options", self.client_options) cfg.add(config.Scope.application, "mechanic", "telemetry.devices", []) cfg.add(config.Scope.application, "mechanic", "telemetry.params", {}) cfg.add(config.Scope.application, "mechanic", "preserve.install", False) cfg.add(config.Scope.application, "mechanic", "skip.rest.api.check", False) cfg.add(config.Scope.application, "system", "env.name", "test") ms = get_metrics_store(cfg) cluster_launcher = launcher.ClusterLauncher(cfg, ms, client_factory_class=MockClientFactory) cluster = cluster_launcher.start() self.assertEqual([{"host": "10.0.0.10", "port": 9200}, {"host": "10.0.0.11", "port": 9200}], cluster.hosts) self.assertIsNotNone(cluster.telemetry)
def setUp(self): self.cfg = config.Config() self.cfg.add(config.Scope.application, "system", "env.name", "unittest") self.cfg.add(config.Scope.application, "system", "time.start", datetime(year=2017, month=8, day=20, hour=1, minute=0, second=0)) self.cfg.add(config.Scope.application, "track", "challenge.name", "default") self.cfg.add(config.Scope.application, "track", "test.mode.enabled", True) self.cfg.add(config.Scope.application, "mechanic", "car.names", ["default"]) self.cfg.add(config.Scope.application, "client", "hosts", ["localhost:9200"]) self.cfg.add(config.Scope.application, "client", "options", {}) self.cfg.add(config.Scope.application, "driver", "cluster.health", "green") self.cfg.add(config.Scope.application, "driver", "load_driver_hosts", ["localhost"]) default_challenge = track.Challenge("default", description="default challenge", default=True, schedule=[ track.Task(operation=track.Operation("index", operation_type=track.OperationType.Index), clients=4) ]) another_challenge = track.Challenge("other", description="non-default challenge", default=False) self.track = track.Track(name="unittest", short_description="unittest track", challenges=[another_challenge, default_challenge])
def test_setup_external_cluster_multiple_nodes(self): cfg = config.Config() cfg.add(config.Scope.application, "telemetry", "devices", []) cfg.add(config.Scope.application, "launcher", "external.target.hosts", ["search.host-a.internal:9200", "search.host-b.internal:9200"]) m = launcher.ExternalLauncher(cfg, cluster_factory_class=MockClusterFactory) cluster = m.start(None, MockTrackSetup(), MockMetricsStore()) self.assertEqual(cluster.hosts, [{ "host": "search.host-a.internal", "port": "9200" }, { "host": "search.host-b.internal", "port": "9200" }])
def test_can_migrate_outdated_config(self): base_cfg = config.Config(config_name="unittest", config_file_class=InMemoryConfigStore) base_cfg.add(config.Scope.application, "meta", "config.version", config.Config.CURRENT_CONFIG_VERSION) base_cfg.add(config.Scope.application, "benchmarks", "local.dataset.cache", "/base-config/data-set-cache") base_cfg.add(config.Scope.application, "unit-test", "sample.property", "let me copy you") cfg = config.auto_load_local_config( base_cfg, additional_sections=["unit-test"], config_file_class=InMemoryConfigStore, present=True, config={ "distributions": { "release.url": "https://acme.com/releases", "release.cache": "true", }, "system": { "env.name": "existing-unit-test-config" }, # outdated "meta": { # ensure we don't attempt to migrate if that version is unsupported "config.version": max(config.Config.CURRENT_CONFIG_VERSION - 1, config.Config.EARLIEST_SUPPORTED_VERSION) }, "benchmarks": { "local.dataset.cache": "/tmp/rally/data" }, "runtime": { "java8.home": "/opt/jdk8" }, }, ) assert cfg.config_file.present # did not just copy base config assert base_cfg.opts("benchmarks", "local.dataset.cache") != cfg.opts( "benchmarks", "local.dataset.cache") # migrated existing config assert int(cfg.opts( "meta", "config.version")) == config.Config.CURRENT_CONFIG_VERSION
def test_launches_cluster_with_telemetry_client_timeout_enabled(self): cfg = config.Config() cfg.add(config.Scope.application, "client", "hosts", self.test_host) cfg.add(config.Scope.application, "client", "options", self.client_options) cfg.add(config.Scope.application, "mechanic", "telemetry.devices", []) cfg.add(config.Scope.application, "mechanic", "telemetry.params", {}) cfg.add(config.Scope.application, "mechanic", "preserve.install", False) cluster_launcher = launcher.ClusterLauncher(cfg, MockMetricsStore(), client_factory_class=MockClientFactory) cluster = cluster_launcher.start() for telemetry_device in cluster.telemetry.devices: if hasattr(telemetry_device, "clients"): # Process all clients options for multi cluster aware telemetry devices, like CcrStats for _, client in telemetry_device.clients.items(): self.assertDictEqual({"retry-on-timeout": True, "timeout": 60}, client.client_options) else: self.assertDictEqual({"retry-on-timeout": True, "timeout": 60}, telemetry_device.client.client_options)
def test_cleanup_nothing_on_preserve(self, mock_path_exists, mock_rm): mock_path_exists.return_value = False cfg = config.Config() cfg.add(config.Scope.application, "system", "challenge.root.dir", "/rally-root/track/challenge") cfg.add(config.Scope.application, "provisioning", "local.install.dir", "es-bin") cfg.add(config.Scope.application, "provisioning", "install.preserve", True) cfg.add(config.Scope.application, "provisioning", "datapaths", ["/tmp/some/data-path-dir"]) p = provisioner.Provisioner(cfg) p.cleanup() mock_path_exists.assert_not_called() mock_rm.assert_not_called()
def test_checkout_ts(self, mock_is_working_copy, mock_pull_ts, mock_head_revision): cfg = config.Config() cfg.add(config.Scope.application, "source", "local.src.dir", "/src") cfg.add(config.Scope.application, "source", "remote.repo.url", "some-github-url") cfg.add(config.Scope.application, "source", "revision", "@2015-01-01-01:00:00") mock_is_working_copy.return_value = True mock_head_revision.return_value = "HEAD" s = supplier.SourceRepository(cfg) s.fetch() mock_is_working_copy.assert_called_with("/src") mock_pull_ts.assert_called_with("/src", "2015-01-01-01:00:00") mock_head_revision.assert_called_with("/src")
def test_formats_table(self): cfg = config.Config() r = reporter.ComparisonReporter(cfg) formatted = r.format_as_table([]) # 1 header line, 1 separation line + 0 data lines self.assertEqual(1 + 1 + 0, len(formatted.splitlines())) # ["Metric", "Operation", "Baseline", "Contender", "Diff", "Unit"] metrics_table = [ ["Min Throughput", "index", "17300", "18000", "700", "ops/s"], ["Median Throughput", "index", "17500", "18500", "1000", "ops/s"], ["Max Throughput", "index", "17700", "19000", "1300", "ops/s"] ] formatted = r.format_as_table(metrics_table) # 1 header line, 1 separation line + 3 data lines self.assertEqual(1 + 1 + 3, len(formatted.splitlines()))
def test_runs_a_known_pipeline(self): mock_pipeline = mock.Mock() p = racecontrol.Pipeline("unit-test-pipeline", "Pipeline intended for unit-testing", mock_pipeline) cfg = config.Config() cfg.add(config.Scope.benchmark, "race", "pipeline", "unit-test-pipeline") cfg.add(config.Scope.benchmark, "mechanic", "distribution.version", "") racecontrol.run(cfg) mock_pipeline.assert_called_once_with(cfg) # ensure we remove it again from the list of registered pipelines to avoid unwanted side effects del p
def test_intial_checkout_latest(self, mock_is_working_copy, mock_clone, mock_pull, mock_head_revision): cfg = config.Config() cfg.add(config.Scope.application, "source", "local.src.dir", "/src") cfg.add(config.Scope.application, "source", "remote.repo.url", "some-github-url") cfg.add(config.Scope.application, "source", "revision", "latest") mock_is_working_copy.return_value = False mock_head_revision.return_value = "HEAD" s = supplier.SourceRepository(cfg) s.fetch() mock_is_working_copy.assert_called_with("/src") mock_clone.assert_called_with("/src", "some-github-url") mock_pull.assert_called_with("/src") mock_head_revision.assert_called_with("/src")
def test_setup_external_cluster_multiple_nodes(self): cfg = config.Config() cfg.add(config.Scope.application, "mechanic", "telemetry.devices", []) cfg.add(config.Scope.application, "client", "hosts", self.test_host) cfg.add(config.Scope.application, "client", "options", self.client_options) cfg.add(config.Scope.application, "mechanic", "distribution.version", "2.3.3") cfg.add(config.Scope.application, "system", "env.name", "test") ms = get_metrics_store(cfg) m = launcher.ExternalLauncher(cfg, ms, client_factory_class=MockClientFactory) m.start() # did not change user defined value self.assertEqual(cfg.opts("mechanic", "distribution.version"), "2.3.3")
def test_setup_external_cluster_single_node(self): cfg = config.Config() cfg.add(config.Scope.application, "mechanic", "telemetry.devices", []) cfg.add(config.Scope.application, "client", "hosts", self.test_host) cfg.add(config.Scope.application, "client", "options", self.client_options) cfg.add(config.Scope.application, "system", "env.name", "test") ms = get_metrics_store(cfg) m = launcher.ExternalLauncher(cfg, ms, client_factory_class=MockClientFactory) m.start() # automatically determined by launcher on attach self.assertEqual(cfg.opts("mechanic", "distribution.version"), "5.0.0")
def test_setup_external_cluster_cannot_determine_version(self): client_options = opts.ClientOptions( "timeout:60,raise-error-on-info:true") cfg = config.Config() cfg.add(config.Scope.application, "mechanic", "telemetry.devices", []) cfg.add(config.Scope.application, "client", "hosts", self.test_host) cfg.add(config.Scope.application, "client", "options", client_options) cfg.add(config.Scope.application, "system", "env.name", "test") ms = get_metrics_store(cfg) m = launcher.ExternalLauncher(cfg, ms, client_factory_class=MockClientFactory) m.start() # automatically determined by launcher on attach self.assertIsNone(cfg.opts("mechanic", "distribution.version"))
def test_fails_without_benchmark_only_pipeline_in_docker(self): mock_pipeline = mock.Mock() test_pipeline_name = "unit-test-pipeline" racecontrol.Pipeline("unit-test-pipeline", "Pipeline intended for unit-testing", mock_pipeline) cfg = config.Config() cfg.add(config.Scope.benchmark, "race", "pipeline", "unit-test-pipeline") with self.assertRaises(exceptions.SystemSetupError) as ctx: racecontrol.run(cfg) self.assertEqual( "Only the [benchmark-only] pipeline is supported by the Rally Docker image.\n" "Add --pipeline=benchmark-only in your Rally arguments and try again.\n" "For more details read the docs for the benchmark-only pipeline in https://esrally.readthedocs.io/en/latest/pipelines.html#benchmark-only\n", ctx.exception.args[0]) del racecontrol.pipelines[test_pipeline_name]
def test_start_stop_nodes(self, cleanup): supplier = lambda: "/home/user/src/elasticsearch/es.tar.gz" provisioners = [mock.Mock(), mock.Mock()] launcher = self.MockLauncher() cfg = config.Config() cfg.add(config.Scope.application, "system", "race.id", "17") cfg.add(config.Scope.application, "mechanic", "preserve.install", False) metrics_store = mock.Mock() m = self.MockMechanic(cfg, metrics_store, supplier, provisioners, launcher) m.start_engine() assert launcher.started for p in provisioners: assert p.prepare.called m.stop_engine() assert not launcher.started assert cleanup.call_count == 2
def setUp(self): self.cfg = config.Config() self.cfg.add(config.Scope.application, "system", "env.name", "unittest") self.cfg.add(config.Scope.application, "system", "time.start", datetime(year=2017, month=8, day=20, hour=1, minute=0, second=0)) self.cfg.add(config.Scope.application, "system", "trial.id", "6ebc6e53-ee20-4b0c-99b4-09697987e9f4") self.cfg.add(config.Scope.application, "track", "challenge.name", "default") self.cfg.add(config.Scope.application, "track", "params", {}) self.cfg.add(config.Scope.application, "track", "test.mode.enabled", True) self.cfg.add(config.Scope.application, "mechanic", "car.names", ["default"]) self.cfg.add(config.Scope.application, "client", "hosts", ["localhost:9200"]) self.cfg.add(config.Scope.application, "client", "options", {}) self.cfg.add(config.Scope.application, "driver", "load_driver_hosts", ["localhost"]) self.cfg.add(config.Scope.application, "reporting", "datastore.type", "in-memory") default_challenge = track.Challenge("default", default=True, schedule=[ track.Task(name="index", operation=track.Operation("index", operation_type=track.OperationType.Bulk), clients=4) ]) another_challenge = track.Challenge("other", default=False) self.track = track.Track(name="unittest", description="unittest track", challenges=[another_challenge, default_challenge])
def test_can_create_non_existing_config(self): base_cfg = config.Config(config_name="unittest", config_file_class=InMemoryConfigStore) base_cfg.add(config.Scope.application, "meta", "config.version", config.Config.CURRENT_CONFIG_VERSION) base_cfg.add(config.Scope.application, "benchmarks", "local.dataset.cache", "/base-config/data-set-cache") base_cfg.add(config.Scope.application, "reporting", "datastore.type", "elasticsearch") base_cfg.add(config.Scope.application, "tracks", "metrics.url", "http://github.com/org/metrics") base_cfg.add(config.Scope.application, "teams", "private.url", "http://github.com/org/teams") base_cfg.add(config.Scope.application, "distributions", "release.cache", False) base_cfg.add(config.Scope.application, "defaults", "preserve_benchmark_candidate", True) cfg = config.auto_load_local_config(base_cfg, config_file_class=InMemoryConfigStore) self.assertTrue(cfg.config_file.present) # did not just copy base config self.assertNotEqual(base_cfg.opts("benchmarks", "local.dataset.cache"), cfg.opts("benchmarks", "local.dataset.cache")) # copied sections from base config self.assert_equals_base_config(base_cfg, cfg, "reporting", "datastore.type") self.assert_equals_base_config(base_cfg, cfg, "tracks", "metrics.url") self.assert_equals_base_config(base_cfg, cfg, "teams", "private.url") self.assert_equals_base_config(base_cfg, cfg, "distributions", "release.cache") self.assert_equals_base_config(base_cfg, cfg, "defaults", "preserve_benchmark_candidate")