Beispiel #1
0
 def setUp(self):
     params.register_param_source_for_name("driver-test-param-source",
                                           DriverTestParamSource)
     params.register_param_source_for_name(
         "driver-test-param-source-with-progress",
         DriverTestParamSourceWithProgress)
     self.test_track = track.Track(name="unittest")
Beispiel #2
0
    def test_execute_schedule_throughput_throttled(self, es):
        es.bulk.return_value = {
            "errors": False
        }

        params.register_param_source_for_name("driver-test-param-source", DriverTestParamSource)
        test_track = track.Track(name="unittest", short_description="unittest track", description="unittest track",
                                 source_root_url="http://example.org",
                                 indices=None,
                                 challenges=None)

        # in one second (0.5 warmup + 0.5 measurement) we should get 1000 [ops/s] / 4 [clients] = 250 samples
        for target_throughput, bounds in {10: [2, 4], 100: [24, 26], 1000: [245, 255]}.items():
            task = track.Task(track.Operation("time-based", track.OperationType.Index.name, params={
                "body": ["action_metadata_line", "index_line"],
                "action_metadata_present": True
            },
                                              param_source="driver-test-param-source"),
                              warmup_time_period=0.5, time_period=0.5, clients=4, target_throughput=target_throughput)
            schedule = driver.schedule_for(test_track, task, 0)
            sampler = driver.Sampler(client_id=0, operation=task.operation, start_timestamp=0)

            driver.execute_schedule(schedule, es, sampler)

            samples = sampler.samples

            sample_size = len(samples)
            lower_bound = bounds[0]
            upper_bound = bounds[1]
            self.assertTrue(lower_bound <= sample_size <= upper_bound,
                            msg="Expected sample size to be between %d and %d but was %d" % (lower_bound, upper_bound, sample_size))
Beispiel #3
0
    def test_cancel_execute_schedule(self, es):
        es.bulk.return_value = {
            "errors": False
        }

        params.register_param_source_for_name("driver-test-param-source", DriverTestParamSource)
        test_track = track.Track(name="unittest", short_description="unittest track",
                                 source_root_url="http://example.org",
                                 indices=None,
                                 challenges=None)

        # in one second (0.5 warmup + 0.5 measurement) we should get 1000 [ops/s] / 4 [clients] = 250 samples
        for target_throughput, bounds in {10: [2, 4], 100: [24, 26], 1000: [245, 255]}.items():
            task = track.Task(track.Operation("time-based", track.OperationType.Index.name, params={
                "body": ["action_metadata_line", "index_line"],
                "action_metadata_present": True,
                "bulk-size": 1
            },
                                              param_source="driver-test-param-source"),
                              warmup_time_period=0.5, time_period=0.5, clients=4,
                              params={"target-throughput": target_throughput, "clients": 4})
            schedule = driver.schedule_for(test_track, task, 0)
            sampler = driver.Sampler(client_id=0, task=task, start_timestamp=0)

            cancel = threading.Event()
            complete = threading.Event()
            execute_schedule = driver.Executor(task, schedule, es, sampler, cancel, complete)

            cancel.set()
            execute_schedule()

            samples = sampler.samples

            sample_size = len(samples)
            self.assertEqual(0, sample_size)
Beispiel #4
0
    def test_execute_schedule_throughput_throttled(self, es):
        es.bulk.return_value = {"errors": False}

        params.register_param_source_for_name("driver-test-param-source",
                                              DriverTestParamSource)
        test_track = track.Track(name="unittest",
                                 description="unittest track",
                                 indices=None,
                                 challenges=None)

        # in one second (0.5 warmup + 0.5 measurement) we should get 1000 [ops/s] / 4 [clients] = 250 samples
        for target_throughput, bounds in {
                10: [2, 4],
                100: [24, 26],
                1000: [245, 255]
        }.items():
            task = track.Task("time-based",
                              track.Operation(
                                  "time-based",
                                  track.OperationType.Bulk.name,
                                  params={
                                      "body":
                                      ["action_metadata_line", "index_line"],
                                      "action_metadata_present":
                                      True,
                                      "bulk-size":
                                      1
                                  },
                                  param_source="driver-test-param-source"),
                              warmup_time_period=0.5,
                              time_period=0.5,
                              clients=4,
                              params={
                                  "target-throughput": target_throughput,
                                  "clients": 4
                              },
                              completes_parent=True)
            schedule = driver.schedule_for(test_track, task, 0)
            sampler = driver.Sampler(client_id=0, task=task, start_timestamp=0)

            cancel = threading.Event()
            complete = threading.Event()

            execute_schedule = driver.Executor(task, schedule, es, sampler,
                                               cancel, complete)
            execute_schedule()

            samples = sampler.samples

            sample_size = len(samples)
            lower_bound = bounds[0]
            upper_bound = bounds[1]
            self.assertTrue(
                lower_bound <= sample_size <= upper_bound,
                msg="Expected sample size to be between %d and %d but was %d" %
                (lower_bound, upper_bound, sample_size))
            self.assertTrue(
                complete.is_set(),
                "Executor should auto-complete a task that terminates its parent"
            )
Beispiel #5
0
    def test_can_register_class_as_param_source(self):
        source_name = "params-test-class-param-source"

        params.register_param_source_for_name(source_name, ParamsRegistrationTests.ParamSourceClass)
        source = params.param_source_for_name(source_name, None, {"parameter": 42})
        self.assertEqual({"class-key": 42}, source.params())

        params._unregister_param_source_for_name(source_name)
Beispiel #6
0
    def test_can_register_function_as_param_source(self):
        source_name = "params-test-function-param-source"

        params.register_param_source_for_name(source_name, ParamsRegistrationTests.param_source_function)
        source = params.param_source_for_name(source_name, None, {"parameter": 42})
        self.assertEqual({"key": 42}, source.params())

        params._unregister_param_source_for_name(source_name)
    def test_can_register_class_as_param_source(self):
        source_name = "params-test-class-param-source"

        params.register_param_source_for_name(source_name, ParamsRegistrationTests.ParamSourceClass)
        source = params.param_source_for_name(source_name, None, {"parameter": 42})
        self.assertEqual({"class-key": 42}, source.params())

        params._unregister_param_source_for_name(source_name)
    def test_can_register_function_as_param_source(self):
        source_name = "params-test-function-param-source"

        params.register_param_source_for_name(source_name, ParamsRegistrationTests.param_source_function)
        source = params.param_source_for_name(source_name, None, {"parameter": 42})
        self.assertEqual({"key": 42}, source.params())

        params._unregister_param_source_for_name(source_name)
Beispiel #9
0
 def setUp(self):
     params.register_param_source_for_name("driver-test-param-source",
                                           DriverTestParamSource)
     self.test_track = track.Track(name="unittest",
                                   description="unittest track",
                                   source_root_url="http://example.org",
                                   indices=None,
                                   challenges=None)
Beispiel #10
0
    def test_execute_schedule_in_throughput_mode(self, es):
        es.bulk.return_value = {"errors": False}

        params.register_param_source_for_name("driver-test-param-source",
                                              DriverTestParamSource)
        test_track = track.Track(name="unittest",
                                 description="unittest track",
                                 indices=None,
                                 challenges=None)

        task = track.Task(
            "time-based",
            track.Operation(
                "time-based",
                track.OperationType.Bulk.name,
                params={
                    "body": ["action_metadata_line", "index_line"],
                    "action-metadata-present": True,
                    "bulk-size": 1,
                    # we need this because DriverTestParamSource does not know that we only have one bulk and hence size() returns incorrect results
                    "size": 1
                },
                param_source="driver-test-param-source"),
            warmup_time_period=0,
            clients=4)
        schedule = driver.schedule_for(test_track, task, 0)

        sampler = driver.Sampler(client_id=2,
                                 task=task,
                                 start_timestamp=time.perf_counter())
        cancel = threading.Event()
        complete = threading.Event()

        execute_schedule = driver.Executor(task, schedule, es, sampler, cancel,
                                           complete)
        execute_schedule()

        samples = sampler.samples

        self.assertTrue(len(samples) > 0)
        self.assertFalse(complete.is_set(),
                         "Executor should not auto-complete a normal task")
        previous_absolute_time = -1.0
        previous_relative_time = -1.0
        for sample in samples:
            self.assertEqual(2, sample.client_id)
            self.assertEqual(task, sample.task)
            self.assertLess(previous_absolute_time, sample.absolute_time)
            previous_absolute_time = sample.absolute_time
            self.assertLess(previous_relative_time, sample.relative_time)
            previous_relative_time = sample.relative_time
            # we don't have any warmup time period
            self.assertEqual(metrics.SampleType.Normal, sample.sample_type)
            # latency equals service time in throughput mode
            self.assertEqual(sample.latency_ms, sample.service_time_ms)
            self.assertEqual(1, sample.total_ops)
            self.assertEqual("docs", sample.total_ops_unit)
            self.assertEqual(1, sample.request_meta_data["bulk-size"])
Beispiel #11
0
    def test_execute_schedule_in_throughput_mode(self, es):
        es.bulk.return_value = {"errors": False}

        params.register_param_source_for_name("driver-test-param-source",
                                              DriverTestParamSource)
        test_track = track.Track(name="unittest",
                                 short_description="unittest track",
                                 description="unittest track",
                                 source_root_url="http://example.org",
                                 indices=None,
                                 challenges=None)

        task = track.Task(track.Operation(
            "time-based",
            track.OperationType.Index.name,
            params={
                "body": ["action_metadata_line", "index_line"],
                "action_metadata_present": True
            },
            param_source="driver-test-param-source"),
                          warmup_time_period=0,
                          clients=4,
                          target_throughput=None)
        schedule = driver.schedule_for(test_track, task, 0)

        sampler = driver.Sampler(client_id=2, task=task, start_timestamp=100)
        cancel = threading.Event()
        driver.execute_schedule(cancel, 0, task.operation, schedule, es,
                                sampler)

        samples = sampler.samples

        self.assertTrue(len(samples) > 0)
        previous_absolute_time = -1.0
        previous_relative_time = -1.0
        for sample in samples:
            self.assertEqual(2, sample.client_id)
            self.assertEqual(task, sample.task)
            self.assertTrue(previous_absolute_time < sample.absolute_time)
            previous_absolute_time = sample.absolute_time
            self.assertTrue(previous_relative_time < sample.relative_time)
            previous_relative_time = sample.relative_time
            # we don't have any warmup time period
            self.assertEqual(metrics.SampleType.Normal, sample.sample_type)
            # latency equals service time in throughput mode
            self.assertEqual(sample.latency_ms, sample.service_time_ms)
            self.assertEqual(1, sample.total_ops)
            self.assertEqual("docs", sample.total_ops_unit)
            self.assertEqual(1, sample.request_meta_data["bulk-size"])
Beispiel #12
0
    def test_execute_schedule_in_throughput_mode(self, es):
        es.bulk.return_value = {
            "errors": False
        }

        params.register_param_source_for_name("driver-test-param-source", DriverTestParamSource)
        test_track = track.Track(name="unittest", short_description="unittest track",
                                 source_root_url="http://example.org",
                                 indices=None,
                                 challenges=None)

        task = track.Task(track.Operation("time-based", track.OperationType.Index.name, params={
            "body": ["action_metadata_line", "index_line"],
            "action_metadata_present": True,
            "bulk-size": 1,
            # we need this because DriverTestParamSource does not know that we only have one bulk and hence size() returns incorrect results
            "size": 1
        },
                                          param_source="driver-test-param-source"),
                          warmup_time_period=0, clients=4)
        schedule = driver.schedule_for(test_track, task, 0)

        sampler = driver.Sampler(client_id=2, task=task, start_timestamp=100)
        cancel = threading.Event()
        complete = threading.Event()

        execute_schedule = driver.Executor(task, schedule, es, sampler, cancel, complete)
        execute_schedule()

        samples = sampler.samples

        self.assertTrue(len(samples) > 0)
        self.assertFalse(complete.is_set(), "Executor should not auto-complete a normal task")
        previous_absolute_time = -1.0
        previous_relative_time = -1.0
        for sample in samples:
            self.assertEqual(2, sample.client_id)
            self.assertEqual(task, sample.task)
            self.assertTrue(previous_absolute_time < sample.absolute_time)
            previous_absolute_time = sample.absolute_time
            self.assertTrue(previous_relative_time < sample.relative_time)
            previous_relative_time = sample.relative_time
            # we don't have any warmup time period
            self.assertEqual(metrics.SampleType.Normal, sample.sample_type)
            # latency equals service time in throughput mode
            self.assertEqual(sample.latency_ms, sample.service_time_ms)
            self.assertEqual(1, sample.total_ops)
            self.assertEqual("docs", sample.total_ops_unit)
            self.assertEqual(1, sample.request_meta_data["bulk-size"])
Beispiel #13
0
    def test_execute_schedule_with_progress_determined_by_runner(self, es):
        es.bulk.return_value = {
            "errors": False
        }

        params.register_param_source_for_name("driver-test-param-source", DriverTestParamSource)
        test_track = track.Track(name="unittest", description="unittest track",
                                 indices=None,
                                 challenges=None)

        task = track.Task("time-based", track.Operation("time-based", operation_type="unit-test-recovery", params={
            "indices-to-restore": "*",
            # The runner will determine progress
            "size": None
        }, param_source="driver-test-param-source"), warmup_time_period=0, clients=4)
        schedule = driver.schedule_for(test_track, task, 0)

        sampler = driver.Sampler(client_id=2, task=task, start_timestamp=time.perf_counter())
        cancel = threading.Event()
        complete = threading.Event()

        execute_schedule = driver.Executor(task, schedule, es, sampler, cancel, complete)
        execute_schedule()

        samples = sampler.samples

        self.assertEqual(5, len(samples))
        self.assertTrue(self.runner_with_progress.completed)
        self.assertEqual(1.0, self.runner_with_progress.percent_completed)
        self.assertFalse(complete.is_set(), "Executor should not auto-complete a normal task")
        previous_absolute_time = -1.0
        previous_relative_time = -1.0
        for sample in samples:
            self.assertEqual(2, sample.client_id)
            self.assertEqual(task, sample.task)
            self.assertLess(previous_absolute_time, sample.absolute_time)
            previous_absolute_time = sample.absolute_time
            self.assertLess(previous_relative_time, sample.relative_time)
            previous_relative_time = sample.relative_time
            # we don't have any warmup time period
            self.assertEqual(metrics.SampleType.Normal, sample.sample_type)
            # latency equals service time in throughput mode
            self.assertEqual(sample.latency_ms, sample.service_time_ms)
            self.assertEqual(1, sample.total_ops)
            self.assertEqual("ops", sample.total_ops_unit)
Beispiel #14
0
    def test_execute_schedule_in_throughput_mode(self, es):
        es.bulk.return_value = {
            "errors": False
        }

        params.register_param_source_for_name("driver-test-param-source", DriverTestParamSource)
        test_track = track.Track(name="unittest", short_description="unittest track", description="unittest track",
                                 source_root_url="http://example.org",
                                 indices=None,
                                 challenges=None)

        task = track.Task(track.Operation("time-based", track.OperationType.Index.name, params={
            "body": ["action_metadata_line", "index_line"],
            "action_metadata_present": True
        },
                                          param_source="driver-test-param-source"),
                          warmup_time_period=0, clients=4, target_throughput=None)
        schedule = driver.schedule_for(test_track, task, 0)

        sampler = driver.Sampler(client_id=2, operation=task.operation, start_timestamp=100)

        driver.execute_schedule(schedule, es, sampler)

        samples = sampler.samples

        self.assertTrue(len(samples) > 0)
        previous_absolute_time = -1.0
        previous_relative_time = -1.0
        for sample in samples:
            self.assertEqual(2, sample.client_id)
            self.assertEqual(task.operation, sample.operation)
            self.assertTrue(previous_absolute_time < sample.absolute_time)
            previous_absolute_time = sample.absolute_time
            self.assertTrue(previous_relative_time < sample.relative_time)
            previous_relative_time = sample.relative_time
            # we don't have any warmup time period
            self.assertEqual(metrics.SampleType.Normal, sample.sample_type)
            # latency equals service time in throughput mode
            self.assertEqual(sample.latency_ms, sample.service_time_ms)
            self.assertEqual(1, sample.total_ops)
            self.assertEqual("docs", sample.total_ops_unit)
            self.assertEqual(1, sample.request_meta_data["bulk-size"])
Beispiel #15
0
 def setUp(self):
     self.test_track = track.Track(name="unittest")
     self.runner_with_progress = SchedulerTests.RunnerWithProgress()
     params.register_param_source_for_name("driver-test-param-source", DriverTestParamSource)
     runner.register_default_runners()
     runner.register_runner("driver-test-runner-with-completion", self.runner_with_progress)
Beispiel #16
0
    def test_run_benchmark(self):
        cfg = config.Config()

        cfg.add(config.Scope.application, "system", "env.name", "unittest")
        cfg.add(
            config.Scope.application, "system", "time.start",
            datetime(year=2017, month=8, day=20, hour=1, minute=0, second=0))
        cfg.add(config.Scope.application, "system", "race.id",
                "6ebc6e53-ee20-4b0c-99b4-09697987e9f4")
        cfg.add(config.Scope.application, "system", "offline.mode", False)
        cfg.add(config.Scope.application, "driver", "on.error", "abort")
        cfg.add(config.Scope.application, "driver", "profiling", False)
        cfg.add(config.Scope.application, "reporting", "datastore.type",
                "in-memory")
        cfg.add(config.Scope.application, "track", "params", {})
        cfg.add(config.Scope.application, "track", "test.mode.enabled", True)
        cfg.add(config.Scope.application, "telemetry", "devices", [])
        cfg.add(config.Scope.application, "telemetry", "params", {})
        cfg.add(config.Scope.application, "mechanic", "car.names",
                ["external"])
        cfg.add(config.Scope.application, "mechanic", "skip.rest.api.check",
                True)
        cfg.add(
            config.Scope.application, "client", "hosts",
            AsyncDriverTests.Holder(all_hosts={"default": ["localhost:9200"]}))
        cfg.add(config.Scope.application, "client", "options",
                AsyncDriverTests.Holder(all_client_options={"default": {}}))

        params.register_param_source_for_name("bulk-param-source",
                                              AsyncDriverTestParamSource)

        task = track.Task(
            name="bulk-index",
            operation=track.Operation(
                "bulk-index",
                track.OperationType.Bulk.name,
                params={
                    "body": ["action_metadata_line", "index_line"],
                    "action-metadata-present": True,
                    "bulk-size": 1,
                    # we need this because the parameter source does not know that we only have one
                    # bulk and hence size() returns incorrect results
                    "size": 1
                },
                param_source="bulk-param-source"),
            warmup_iterations=0,
            iterations=1,
            clients=1)

        current_challenge = track.Challenge(name="default",
                                            default=True,
                                            schedule=[task])
        current_track = track.Track(name="unit-test",
                                    challenges=[current_challenge])

        driver = async_driver.AsyncDriver(
            cfg,
            current_track,
            current_challenge,
            es_client_factory_class=StaticClientFactory)

        distribution_flavor, distribution_version, revision = driver.setup()
        self.assertEqual("oss", distribution_flavor)
        self.assertEqual("7.3.0", distribution_version)
        self.assertEqual("de777fa", revision)

        metrics_store_representation = driver.run()

        metric_store = metrics.metrics_store(cfg,
                                             read_only=True,
                                             track=current_track,
                                             challenge=current_challenge)
        metric_store.bulk_add(metrics_store_representation)

        self.assertIsNotNone(
            metric_store.get_one(name="latency",
                                 task="bulk-index",
                                 sample_type=metrics.SampleType.Normal))
        self.assertIsNotNone(
            metric_store.get_one(name="service_time",
                                 task="bulk-index",
                                 sample_type=metrics.SampleType.Normal))
        self.assertIsNotNone(
            metric_store.get_one(name="processing_time",
                                 task="bulk-index",
                                 sample_type=metrics.SampleType.Normal))
        self.assertIsNotNone(
            metric_store.get_one(name="throughput",
                                 task="bulk-index",
                                 sample_type=metrics.SampleType.Normal))
        self.assertIsNotNone(
            metric_store.get_one(name="node_total_young_gen_gc_time",
                                 sample_type=metrics.SampleType.Normal))
        self.assertIsNotNone(
            metric_store.get_one(name="node_total_old_gen_gc_time",
                                 sample_type=metrics.SampleType.Normal))
        # ensure that there are not more documents than we expect
        self.assertEqual(6,
                         len(metric_store.docs),
                         msg=json.dumps(metric_store.docs, indent=2))
Beispiel #17
0
 def setUp(self):
     params.register_param_source_for_name("driver-test-param-source",
                                           DriverTestParamSource)
Beispiel #18
0
 def register_param_source(self, name, param_source):
     params.register_param_source_for_name(name, param_source)
Beispiel #19
0
 def setUp(self):
     params.register_param_source_for_name("driver-test-param-source", DriverTestParamSource)
     runner.register_default_runners()
     self.test_track = track.Track(name="unittest")
Beispiel #20
0
 def register_param_source(self, name, param_source):
     params.register_param_source_for_name(name, param_source)
Beispiel #21
0
 def setUp(self):
     params.register_param_source_for_name("driver-test-param-source", DriverTestParamSource)
     self.test_track = track.Track(name="unittest", short_description="unittest track",
                                   source_root_url="http://example.org",
                                   indices=None,
                                   challenges=None)
Beispiel #22
0
 def setUp(self):
     params.register_param_source_for_name("driver-test-param-source", DriverTestParamSource)