Пример #1
0
    async def measure(
        self,
        *,
        metrics: List[str] = None,
        control: servo.Control = servo.Control()
    ) -> servo.Measurement:
        warmup_until = datetime.datetime.now() + control.warmup
        self.config._duration = control.duration

        number_of_urls = (1 if self.config.target else
                          _number_of_lines_in_file(self.config.targets))
        summary = f"Loading {number_of_urls} URL(s) for {self.config._duration} (delay of {control.delay}, warmup of {control.warmup}) at a rate of {self.config.rate} (reporting every {self.config.reporting_interval})"
        self.logger.info(summary)

        # Run the load generator, publishing metrics for interested subscribers
        async with self.publish("loadgen.vegeta") as publisher:
            _, vegeta_reports = await _run_vegeta(config=self.config,
                                                  warmup_until=warmup_until,
                                                  publisher=publisher)

        self.logger.info(
            f"Producing time series readings from {len(vegeta_reports)} Vegeta reports"
        )
        readings = (_time_series_readings_from_vegeta_reports(
            metrics, vegeta_reports) if vegeta_reports else [])
        measurement = servo.Measurement(
            readings=readings,
            annotations={
                "load_profile": summary,
            },
        )
        self.logger.trace(
            f"Reporting time series metrics {devtools.pformat(measurement)}")

        return measurement
Пример #2
0
async def test_control_sent_on_adjust(
    servo_runner: servo.runner.ServoRunner,
    fakeapi_url: str,
    fastapi_app: "tests.OpsaniAPI",
    mocker: pytest_mock.MockFixture,
) -> None:
    sequenced_optimizer = tests.fake.SequencedOptimizer(
        id="dev.opsani.com/big-in-japan", token="31337"
    )
    control = servo.Control(settlement="10s")
    await sequenced_optimizer.recommend_adjustments(adjustments=[], control=control),
    sequenced_optimizer.sequence(sequenced_optimizer.done())
    fastapi_app.optimizer = sequenced_optimizer
    servo_runner.servo.optimizer.base_url = fakeapi_url

    adjust_connector = servo_runner.servo.get_connector("adjust")
    event_handler = adjust_connector.get_event_handlers(
        "adjust", servo.events.Preposition.on
    )[0]
    spy = mocker.spy(event_handler, "handler")

    async def wait_for_optimizer_done():
        while fastapi_app.optimizer.state.name != "done":
            await asyncio.sleep(0.01)

    await servo_runner.run()
    await asyncio.wait_for(wait_for_optimizer_done(), timeout=2)
    await servo_runner.shutdown()

    spy.assert_called_once_with(adjust_connector, [], control)
Пример #3
0
 async def recommend_adjustments(
         self,
         adjustments: List[servo.types.Adjustment],
         control: servo.Control = servo.Control(),
 ) -> None:
     """Recommend Adjustments to the Servo."""
     servo.logging.logger.info(
         f"Recommending Adjustments ({adjustments}, {control})")
Пример #4
0
 async def _enter_awaiting_measurement(
     self,
     metrics: List[servo.Metric] = [],
     control: servo.Control = servo.Control()
 ) -> None:
     self.command_response = servo.api.CommandResponse(
         cmd=servo.api.Commands.measure,
         param=servo.api.MeasureParams(metrics=metrics, control=control),
     )
Пример #5
0
    def describe(
        self, control: servo.Control = servo.Control()) -> servo.Description:
        """Describes the current state of Metrics measured by querying Prometheus.

        Returns:
            Description: An object describing the current state of metrics
                queried from Prometheus.
        """
        return servo.Description(metrics=self.config.metrics)
Пример #6
0
    async def _enter_awaiting_adjustment(
            self,
            adjustments: List[servo.types.Adjustment] = [],
            control: servo.Control = servo.Control(),
    ) -> None:
        descriptor = servo.api.adjustments_to_descriptor(adjustments)
        descriptor["control"] = control.dict(exclude_unset=True)

        self.command_response = servo.api.CommandResponse(
            cmd=servo.api.Commands.adjust, param=descriptor)
Пример #7
0
async def test_adjustment_rejected(
    mocker, servo_runner: servo.runner.ServoRunner
) -> None:
    connector = servo_runner.servo.get_connector("adjust")
    with servo.utilities.pydantic.extra(connector):
        on_handler = connector.get_event_handlers(
            "adjust", servo.events.Preposition.on
        )[0]
        mock = mocker.patch.object(on_handler, "handler")
        mock.side_effect = servo.errors.AdjustmentRejectedError()
        await servo_runner.servo.startup()
        with pytest.raises(servo.errors.AdjustmentRejectedError):
            await servo_runner.adjust([], servo.Control())
Пример #8
0
    async def test_subscribe_via_exchange_context_manager(self, connector) -> None:
        connector.pubsub_exchange.start()
        reports = []

        async def _subscribe_to_vegeta() -> None:
            async with connector.subscribe("loadgen.vegeta") as subscriber:
                async for message, channel in subscriber:
                    debug("Vegeta Reported: ", message.json())
                    reports.append(message.json())

        task = asyncio.create_task(_subscribe_to_vegeta())
        await connector.measure(control=servo.Control(duration="3s"))
        task.cancel()
        assert len(reports) > 5
Пример #9
0
    async def test_subscribe_via_exchange_subscriber_object(self, connector) -> None:
        reports = []

        async def _callback(message, channel) -> None:
            debug("Vegeta Reported: ", message.json())
            reports.append(message.json())

        subscriber = connector.pubsub_exchange.create_subscriber(
            "loadgen.vegeta", callback=_callback
        )
        connector.pubsub_exchange.start()
        measurement = await asyncio.wait_for(
            connector.measure(control=servo.Control(duration="5s")),
            timeout=7,  # NOTE: Always make timeout exceed control duration
        )
        assert len(reports) > 5
Пример #10
0
async def test_state_machine_lifecyle(measurement: servo.Measurement) -> None:
    static_optimizer = tests.fake.StaticOptimizer(
        id="dev.opsani.com/big-in-japan", token="31337")
    await static_optimizer.say_hello()

    await static_optimizer.request_description()
    await static_optimizer.submit_description(_random_description())

    metric = servo.Metric(
        name="Some Metric",
        unit=servo.Unit.requests_per_minute,
    )
    await static_optimizer.request_measurement(metrics=[metric],
                                               control=servo.Control())
    await static_optimizer.submit_measurement(measurement)

    adjustment = servo.Adjustment(component_name="web",
                                  setting_name="cpu",
                                  value=1.25)
    await static_optimizer.recommend_adjustments([adjustment])
    await static_optimizer.complete_adjustments(_random_description())

    await static_optimizer.say_goodbye()
Пример #11
0
 def describe(
     self, control: servo.Control = servo.Control()) -> servo.Description:
     """
     Describes the metrics and components exported by the connector.
     """
     return servo.Description(metrics=METRICS, components=[])
Пример #12
0
    async def measure(
        self,
        *,
        metrics: List[str] = None,
        control: servo.Control = servo.Control()
    ) -> servo.Measurement:
        """Queries Prometheus for metrics as time series values and returns a
        Measurement object that aggregates the readings for processing by the
        optimizer.

        Args:
            metrics (List[str], optional): A list of the metric names to measure.
                When None, all configured metrics are measured. Defaults to None.
            control (Control, optional): A control descriptor that describes how
                the measurement is to be captured. Defaults to Control().

        Returns:
            Measurement: An object that aggregates the state of the metrics
            queried from Prometheus.
        """
        if metrics:
            metrics__ = list(
                filter(lambda m: m.name in metrics, self.metrics()))
        else:
            metrics__ = self.metrics()
        measuring_names = list(map(lambda m: m.name, metrics__))

        # TODO: Rationalize these given the streaming metrics support
        start = datetime.datetime.now() + control.warmup
        end = start + control.duration
        measurement_duration = servo.Duration(control.warmup +
                                              control.duration)
        self.logger.info(
            f"Measuring {len(metrics__)} metrics for {measurement_duration}: {servo.utilities.join_to_series(measuring_names)}"
        )

        progress = servo.EventProgress(timeout=measurement_duration,
                                       settlement=None)

        # Handle fast fail metrics
        if (self.config.fast_fail.disabled == 0 and control.userdata
                and control.userdata.slo):
            self.logger.info(
                "Fast Fail enabled, the following SLO Conditions will be monitored during measurement: "
                f"{', '.join(map(str, control.userdata.slo.conditions))}")
            fast_fail_observer = servo.fast_fail.FastFailObserver(
                config=self.config.fast_fail,
                input=control.userdata.slo,
                metrics_getter=functools.partial(self._query_slo_metrics,
                                                 metrics=metrics__),
            )
            fast_fail_progress = servo.EventProgress(
                timeout=measurement_duration)
            gather_tasks = [
                asyncio.create_task(progress.watch(self.observe)),
                asyncio.create_task(
                    fast_fail_progress.watch(
                        fast_fail_observer.observe,
                        every=self.config.fast_fail.period)),
            ]
            try:
                await asyncio.gather(*gather_tasks)
            except:
                [task.cancel() for task in gather_tasks]
                await asyncio.gather(*gather_tasks, return_exceptions=True)
                raise
        else:
            await progress.watch(self.observe)

        # Capture the measurements
        self.logger.info(
            f"Querying Prometheus for {len(metrics__)} metrics...")
        readings = await asyncio.gather(*list(
            map(lambda m: self._query_prometheus(m, start, end), metrics__)))
        all_readings = (functools.reduce(lambda x, y: x + y, readings)
                        if readings else [])
        measurement = servo.Measurement(readings=all_readings)
        return measurement