def test_throughput_calculator_manual_set() -> None:

    with local_persistant_storage("media_throughput") as c:
        c[experiment] = str(1.0)

    with local_persistant_storage("alt_media_throughput") as c:
        c[experiment] = str(1.5)

    with DosingController(
            "turbidostat",
            target_od=1.0,
            duration=5 / 60,
            volume=1.0,
            unit=unit,
            experiment=experiment,
    ) as algo:

        pause()
        assert algo.automation_job.media_throughput == 1.0
        assert algo.automation_job.alt_media_throughput == 1.5

        pubsub.publish(
            f"pioreactor/{unit}/{experiment}/dosing_automation/alt_media_throughput/set",
            0,
        )
        pubsub.publish(
            f"pioreactor/{unit}/{experiment}/dosing_automation/media_throughput/set",
            0)
        pause()
        pause()
        assert algo.automation_job.media_throughput == 0
        assert algo.automation_job.alt_media_throughput == 0
    def test_od_blank_being_non_zero(self) -> None:
        unit = get_unit_name()
        experiment = get_latest_experiment_name()
        with local_persistant_storage("od_blank") as cache:
            cache[experiment] = json.dumps({"1": 0.25, "2": 0.4})

        with local_persistant_storage("od_normalization_mean") as cache:
            cache[experiment] = json.dumps({"1": 0.5, "2": 0.8})

        with local_persistant_storage("od_normalization_variance") as cache:
            cache[experiment] = json.dumps({"1": 1e-6, "2": 1e-4})

        calc = GrowthRateCalculator(unit=unit, experiment=experiment)

        pause()
        pause()

        publish(
            f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched",
            create_od_raw_batched_json(
                ["1", "2"], [0.50, 0.80], ["90", "135"], timestamp="2010-01-01 12:02:00"
            ),
            retain=True,
        )

        pause()
        pause()

        assert calc.od_normalization_factors == {"2": 0.8, "1": 0.5}
        assert calc.od_blank == {"2": 0.4, "1": 0.25}
        results = calc.scale_raw_observations({"2": 1.0, "1": 0.6})
        print(results)
        assert abs(results["2"] - 1.5) < 0.00001
        assert abs(results["1"] - 1.4) < 0.00001
        calc.set_state(calc.DISCONNECTED)
    def setup_class(cls) -> None:
        # clear the caches and MQTT
        experiment = get_latest_experiment_name()

        config["od_config.photodiode_channel"]["1"] = "90"
        config["od_config.photodiode_channel"]["2"] = None

        with local_persistant_storage("od_blank") as cache:
            if experiment in cache:
                del cache[experiment]

        with local_persistant_storage("od_normalization_mean") as cache:
            if experiment in cache:
                del cache[experiment]

        with local_persistant_storage("od_normalization_variance") as cache:
            if experiment in cache:
                del cache[experiment]

        with local_persistant_storage("growth_rate") as cache:
            if experiment in cache:
                del cache[experiment]

        with local_persistant_storage("od_filtered") as cache:
            if experiment in cache:
                del cache[experiment]
    def test_single_observation(self) -> None:
        unit = get_unit_name()
        experiment = get_latest_experiment_name()

        with local_persistant_storage("od_normalization_mean") as cache:
            cache[experiment] = json.dumps({1: 1})

        with local_persistant_storage("od_normalization_variance") as cache:
            cache[experiment] = json.dumps({1: 1})

        publish(
            f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched",
            create_od_raw_batched_json(
                ["1"], [1.153], ["90"], timestamp="2010-01-01 12:00:30"
            ),
            retain=True,
        )

        calc = GrowthRateCalculator(unit=unit, experiment=experiment)

        publish(
            f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched",
            create_od_raw_batched_json(
                ["1"], [1.155], ["90"], timestamp="2010-01-01 12:00:35"
            ),
        )
        pause()

        assert True
        calc.set_state(calc.DISCONNECTED)
def test_execute_io_action() -> None:
    with local_persistant_storage("media_throughput") as c:
        c[experiment] = "0.0"

    with local_persistant_storage("alt_media_throughput") as c:
        c[experiment] = "0.0"

    with DosingController("silent", unit=unit, experiment=experiment) as ca:
        ca.automation_job.execute_io_action(media_ml=0.65,
                                            alt_media_ml=0.35,
                                            waste_ml=0.65 + 0.35)
        pause()
        assert ca.automation_job.media_throughput == 0.65
        assert ca.automation_job.alt_media_throughput == 0.35

        ca.automation_job.execute_io_action(media_ml=0.15,
                                            alt_media_ml=0.15,
                                            waste_ml=0.3)
        pause()
        assert ca.automation_job.media_throughput == 0.80
        assert ca.automation_job.alt_media_throughput == 0.50

        ca.automation_job.execute_io_action(media_ml=1.0,
                                            alt_media_ml=0,
                                            waste_ml=1)
        pause()
        assert ca.automation_job.media_throughput == 1.80
        assert ca.automation_job.alt_media_throughput == 0.50

        ca.automation_job.execute_io_action(media_ml=0.0,
                                            alt_media_ml=1.0,
                                            waste_ml=1)
        pause()
        assert ca.automation_job.media_throughput == 1.80
        assert ca.automation_job.alt_media_throughput == 1.50
def test_execute_io_action_outputs_will_be_null_if_calibration_is_not_defined(
) -> None:
    # regression test
    with local_persistant_storage("media_throughput") as c:
        c[experiment] = "0.0"

    with local_persistant_storage("alt_media_throughput") as c:
        c[experiment] = "0.0"

    with local_persistant_storage("alt_media_fraction") as c:
        c[experiment] = "0.0"

    with local_persistant_storage("pump_calibration") as cache:
        del cache["media_ml_calibration"]
        del cache["alt_media_ml_calibration"]

    ca = DosingAutomation(unit=unit,
                          experiment=experiment,
                          skip_first_run=True)
    result = ca.execute_io_action(media_ml=1.0, alt_media_ml=1.0, waste_ml=2.0)
    assert result[0] == 0
    assert result[1] == 0.0
    assert result[2] == 2.0
    ca.set_state(ca.DISCONNECTED)

    # add back to cache
    with local_persistant_storage("pump_calibration") as cache:
        cache["media_ml_calibration"] = '{"duration_" : 1.0}'
        cache["alt_media_ml_calibration"] = '{"duration_" : 1.0}'
Esempio n. 7
0
    def _update_throughput(self, message) -> None:
        payload = json.loads(message.payload)
        (
            self.media_throughput,
            self.alt_media_throughput,
        ) = self._volume_throughput_calculator.update(
            payload, self.media_throughput, self.alt_media_throughput)

        # add to cache
        with local_persistant_storage("alt_media_throughput") as cache:
            cache[self.experiment] = str(self.alt_media_throughput)

        with local_persistant_storage("media_throughput") as cache:
            cache[self.experiment] = str(self.media_throughput)
def test_changing_algo_over_mqtt_will_not_produce_two_dosing_jobs() -> None:
    with local_persistant_storage("media_throughput") as c:
        c[experiment] = "0.0"

    with local_persistant_storage("alt_media_throughput") as c:
        c[experiment] = "0.0"

    with local_persistant_storage("alt_media_fraction") as c:
        c[experiment] = "0.0"

    algo = DosingController(
        "pid_turbidostat",
        volume=1.0,
        target_od=0.4,
        duration=60,
        unit=unit,
        experiment=experiment,
    )
    assert algo.automation["automation_name"] == "pid_turbidostat"
    pause()
    pubsub.publish(
        f"pioreactor/{unit}/{experiment}/dosing_control/automation/set",
        '{"automation_name": "turbidostat", "duration": 60, "target_od": 1.0, "volume": 1.0, "skip_first_run": 1}',
    )
    time.sleep(
        10
    )  # need to wait for all jobs to disconnect correctly and threads to join.
    assert isinstance(algo.automation_job, Turbidostat)

    pubsub.publish(
        f"pioreactor/{unit}/{experiment}/growth_rate_calculating/growth_rate",
        '{"growth_rate": 1.0}',
    )
    pubsub.publish(
        f"pioreactor/{unit}/{experiment}/growth_rate_calculating/od_filtered",
        '{"od_filtered": 1.0}',
    )
    pause()

    # note that we manually run, as we have skipped the first run in the json
    algo.automation_job.run()
    time.sleep(5)
    assert algo.automation_job.media_throughput == 1.0

    pubsub.publish(
        f"pioreactor/{unit}/{experiment}/dosing_automation/target_od/set", 1.5)
    pause()
    pause()
    assert algo.automation_job.target_od == 1.5
    algo.set_state(algo.DISCONNECTED)
    def test_zero_blank_and_zero_od_coming_in(self) -> None:
        unit = get_unit_name()
        experiment = get_latest_experiment_name()
        with local_persistant_storage("od_normalization_mean") as cache:
            cache[experiment] = json.dumps({"1": 0})

        with local_persistant_storage("od_normalization_variance") as cache:
            cache[experiment] = json.dumps({"1": 0})

        with local_persistant_storage("od_blank") as cache:
            cache[experiment] = json.dumps({"1": 0})

        with pytest.raises(ZeroDivisionError):
            with GrowthRateCalculator(unit=unit, experiment=experiment):
                ...
    def create_obs_noise_covariance(self):  # typing: ignore
        """
        Our sensor measurements have initial variance V, but in our KF, we scale them their
        initial mean, M. Hence the observed variance of the _normalized_ measurements is

        var(measurement / M) = V / M^2

        (there's also a blank to consider)


        However, we offer the variable ods_std to tweak this a bit.

        """
        import numpy as np

        try:

            scaling_obs_variances = np.array(
                [
                    self.od_variances[channel]
                    / (self.od_normalization_factors[channel] - self.od_blank[channel])
                    ** 2
                    for channel in self.od_normalization_factors
                ]
            )

            obs_variances = config.getfloat(
                "growth_rate_kalman", "obs_std"
            ) ** 2 * np.diag(scaling_obs_variances)
            return obs_variances
        except ZeroDivisionError:
            self.logger.debug(
                "Is there an OD Reading that is 0? Maybe there's a loose photodiode connection?",
                exc_info=True,
            )
            self.logger.error(
                "Is there an OD Reading that is 0? Maybe there's a loose photodiode connection?"
            )

            # we should clear the cache here...

            with local_persistant_storage("od_normalization_mean") as cache:
                del cache[self.experiment]

            with local_persistant_storage("od_normalization_variance") as cache:
                del cache[self.experiment]

            raise
Esempio n. 11
0
    def __init__(self, target_od: float, **kwargs) -> None:
        super(PIDTurbidostat, self).__init__(**kwargs)
        assert target_od is not None, "`target_od` must be set"

        with local_persistant_storage("pump_calibration") as cache:
            if "media_ml_calibration" not in cache:
                raise RuntimeError("Media pump calibration must be performed first.")
            elif "waste_ml_calibration" not in cache:
                raise RuntimeError("Waste pump calibration must be performed first.")

        self.set_target_od(target_od)
        self.volume_to_cycle = None

        # PID%20controller%20turbidostat.ipynb
        Kp = config.getfloat("dosing_automation.pid_turbidostat", "Kp")
        Ki = config.getfloat("dosing_automation.pid_turbidostat", "Ki")
        Kd = config.getfloat("dosing_automation.pid_turbidostat", "Kd")

        self.pid = PID(
            -Kp,
            -Ki,
            -Kd,
            setpoint=self.target_od,
            sample_time=None,
            unit=self.unit,
            experiment=self.experiment,
            job_name=self.job_name,
            target_name="od",
        )
Esempio n. 12
0
def test_changing_algo_over_mqtt_with_wrong_type_is_okay() -> None:
    with local_persistant_storage("media_throughput") as c:
        c[experiment] = "0.0"

    algo = DosingController(
        "pid_turbidostat",
        volume=1.0,
        target_od=0.4,
        duration=2 / 60,
        unit=unit,
        experiment=experiment,
    )
    assert algo.automation["automation_name"] == "pid_turbidostat"
    assert algo.automation_name == "pid_turbidostat"
    pause()
    pubsub.publish(
        f"pioreactor/{unit}/{experiment}/dosing_control/automation/set",
        '{"automation_name": "pid_turbidostat", "duration": "60", "target_od": "1.0", "volume": "1.0"}',
    )
    time.sleep(
        7
    )  # need to wait for all jobs to disconnect correctly and threads to join.
    assert isinstance(algo.automation_job, PIDTurbidostat)
    assert algo.automation_job.target_od == 1.0
    algo.set_state(algo.DISCONNECTED)
Esempio n. 13
0
def test_stirring_with_lookup_linear_v1() -> None:
    class FakeRpmCalculator:
        def __call__(self, *args):
            return 475

        def cleanup(self):
            pass

    with local_persistant_storage("stirring_calibration") as cache:
        cache["linear_v1"] = json.dumps({"rpm_coef": 0.1, "intercept": 20})

    target_rpm = 500
    current_dc = Stirrer.duty_cycle
    with Stirrer(target_rpm, unit, exp,
                 rpm_calculator=FakeRpmCalculator()) as st:  # type: ignore
        st.start_stirring()

        assert st.duty_cycle == current_dc - 0.9 * (current_dc -
                                                    (0.1 * target_rpm + 20))

        pause()
        pause()

        current_dc = st.duty_cycle
        target_rpm = 600
        publish(f"pioreactor/{unit}/{exp}/stirring/target_rpm/set", target_rpm)
        pause()
        pause()

        assert st.duty_cycle == current_dc - 0.9 * (current_dc -
                                                    (0.1 * target_rpm + 20))
Esempio n. 14
0
def setup_function() -> None:
    with local_persistant_storage("pump_calibration") as cache:
        cache["media_ml_calibration"] = json.dumps({
            "duration_": 1.0,
            "bias_": 0,
            "dc": 60,
            "hz": 100,
            "timestamp": "2010-01-01"
        })
        cache["alt_media_ml_calibration"] = json.dumps({
            "duration_":
            1.0,
            "bias_":
            0,
            "dc":
            60,
            "hz":
            100,
            "timestamp":
            "2010-01-01"
        })
        cache["waste_ml_calibration"] = json.dumps({
            "duration_": 1.0,
            "bias_": 0,
            "dc": 60,
            "hz": 100,
            "timestamp": "2010-01-01"
        })
    def get_od_blank_from_cache(self) -> dict[pt.PdChannel, float]:
        with local_persistant_storage("od_blank") as cache:
            result = cache.get(self.experiment, None)

        if result:
            return json.loads(result)
        else:
            return defaultdict(lambda: 0)
    def test_od_blank_being_higher_than_observations(self) -> None:
        unit = get_unit_name()
        experiment = get_latest_experiment_name()
        with local_persistant_storage("od_blank") as cache:
            cache[experiment] = json.dumps({"1": 0.25, "2": 0.4})

        with local_persistant_storage("od_normalization_mean") as cache:
            cache[experiment] = json.dumps({"1": 0.5, "2": 0.8})

        with local_persistant_storage("od_normalization_variance") as cache:
            cache[experiment] = json.dumps({"1": 1e-6, "2": 1e-4})

        calc = GrowthRateCalculator(unit=unit, experiment=experiment)
        pause()

        pause()
        publish(
            f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched",
            create_od_raw_batched_json(
                ["1", "2"], [0.50, 0.80], ["90", "135"], timestamp="2010-01-01 12:02:00"
            ),
            retain=True,
        )
        pause()
        pause()
        publish(
            f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched",
            create_od_raw_batched_json(
                ["1", "2"], [0.1, 0.1], ["90", "135"], timestamp="2010-01-01 12:02:05"
            ),
            retain=True,
        )
        pause()
        pause()
        pause()
        publish(
            f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched",
            create_od_raw_batched_json(
                ["1", "2"], [0.1, 0.1], ["90", "135"], timestamp="2010-01-01 12:02:10"
            ),
            retain=True,
        )
        pause()
        pause()
        calc.set_state(calc.DISCONNECTED)
    def test_90_angle(self) -> None:
        import json
        import numpy as np
        from pioreactor.utils.timing import RepeatedTimer

        unit = get_unit_name()
        experiment = get_latest_experiment_name()
        samples_per_second = 0.2
        config["od_config"]["samples_per_second"] = str(samples_per_second)
        config["od_config.photodiode_channel"]["1"] = "90"
        config["od_config.photodiode_channel"]["2"] = None

        with local_persistant_storage("od_normalization_mean") as cache:
            cache[experiment] = json.dumps({"1": 0.1})

        with local_persistant_storage("od_normalization_variance") as cache:
            cache[experiment] = json.dumps({"1": 8.2e-02})

        class Mock180ODReadings:

            growth_rate = 0.1
            od_reading = 1.0

            def __call__(self):
                self.od_reading *= np.exp(self.growth_rate / 60 / 60 / samples_per_second)

                voltage = 0.1 * self.od_reading
                payload = {
                    "od_raw": {"1": {"voltage": voltage, "angle": "90"}},
                    "timestamp": "2021-06-06T15:08:12.081153",
                }

                publish(
                    f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched",
                    json.dumps(payload),
                )

        thread = RepeatedTimer(0.025, Mock180ODReadings()).start()

        with GrowthRateCalculator(unit=unit, experiment=experiment) as calc:
            time.sleep(35)

            assert calc.ekf.state_[1] > 0

        thread.cancel()
Esempio n. 18
0
def test_execute_io_action2() -> None:
    with local_persistant_storage("media_throughput") as c:
        c[experiment] = "0.0"

    with local_persistant_storage("alt_media_throughput") as c:
        c[experiment] = "0.0"

    with local_persistant_storage("alt_media_fraction") as c:
        c[experiment] = "0.0"

    with DosingController("silent", unit=unit, experiment=experiment) as ca:
        ca.automation_job.execute_io_action(media_ml=1.25,
                                            alt_media_ml=0.01,
                                            waste_ml=1.26)
        pause()
        assert ca.automation_job.media_throughput == 1.25
        assert ca.automation_job.alt_media_throughput == 0.01
        assert abs(ca.automation_job.alt_media_fraction - 0.0007142) < 0.000001
Esempio n. 19
0
    def _init_alt_media_fraction_calculator(self) -> AltMediaCalculator:
        self.published_settings["alt_media_fraction"] = {
            "datatype": "float",
            "settable": True,
        }

        with local_persistant_storage("alt_media_fraction") as cache:
            self.alt_media_fraction = float(cache.get(self.experiment, 0.0))
            return AltMediaCalculator()
Esempio n. 20
0
def test_execute_io_action_outputs_will_shortcut_if_disconnected() -> None:
    # regression test
    with local_persistant_storage("media_throughput") as c:
        c[experiment] = "0.0"

    with local_persistant_storage("alt_media_throughput") as c:
        c[experiment] = "0.0"

    with local_persistant_storage("alt_media_fraction") as c:
        c[experiment] = "0.0"

    ca = DosingAutomation(unit=unit, experiment=experiment)
    ca.set_state(ca.DISCONNECTED)
    result = ca.execute_io_action(media_ml=1.25,
                                  alt_media_ml=0.01,
                                  waste_ml=1.26)
    assert result[0] == 0.0
    assert result[1] == 0.0
    assert result[2] == 0.0
Esempio n. 21
0
def test_throughput_calculator_restart() -> None:

    with local_persistant_storage("media_throughput") as c:
        c[experiment] = str(1.0)

    with local_persistant_storage("alt_media_throughput") as c:
        c[experiment] = str(1.5)

    with DosingController(
            "turbidostat",
            target_od=1.0,
            duration=5 / 60,
            volume=1.0,
            unit=unit,
            experiment=experiment,
    ) as algo:
        pause()
        assert algo.automation_job.media_throughput == 1.0
        assert algo.automation_job.alt_media_throughput == 1.5
Esempio n. 22
0
    def __init__(self, target_od: float, volume: float, **kwargs) -> None:
        super(Turbidostat, self).__init__(**kwargs)

        with local_persistant_storage("pump_calibration") as cache:
            if "media_ml_calibration" not in cache:
                raise RuntimeError("Media pump calibration must be performed first.")
            elif "waste_ml_calibration" not in cache:
                raise RuntimeError("Waste pump calibration must be performed first.")

        self.target_od = float(target_od)
        self.volume = float(volume)
Esempio n. 23
0
    def check_for_last_backup(self) -> None:

        with local_persistant_storage("database_backups") as cache:
            if cache.get("latest_backup_timestamp"):
                latest_backup_at = datetime.strptime(
                    cache["latest_backup_timestamp"].decode("utf-8"),
                    "%Y-%m-%dT%H:%M:%S.%fZ",
                )

                if (datetime.utcnow() - latest_backup_at).days > 30:
                    self.logger.warning(
                        "Database hasn't been backed up in over 30 days.")
Esempio n. 24
0
    def _init_volume_throughput_calculator(self) -> ThroughputCalculator:
        self.published_settings["alt_media_throughput"] = {
            "datatype": "float",
            "settable": True,
            "unit": "mL",
            "persist": True,
        }
        self.published_settings["media_throughput"] = {
            "datatype": "float",
            "settable": True,
            "unit": "mL",
            "persist": True,
        }

        with local_persistant_storage("alt_media_throughput") as cache:
            self.alt_media_throughput = float(cache.get(self.experiment, 0.0))

        with local_persistant_storage("media_throughput") as cache:
            self.media_throughput = float(cache.get(self.experiment, 0.0))

        return ThroughputCalculator()
    def test_observation_order_is_preserved_in_job(self) -> None:
        unit = get_unit_name()
        experiment = get_latest_experiment_name()
        with local_persistant_storage("od_normalization_mean") as cache:
            cache[experiment] = json.dumps({"1": 2, "2": 1})

        with local_persistant_storage("od_normalization_variance") as cache:
            cache[experiment] = json.dumps({"1": 1, "2": 1})

        with local_persistant_storage("growth_rate") as cache:
            cache[experiment] = str(1.0)

        publish(
            f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched",
            create_od_raw_batched_json(
                ["2", "1"], [0.9, 1.1], ["135", "90"], timestamp="2010-01-01 12:00:00"
            ),
            retain=True,
        )

        calc = GrowthRateCalculator(unit=unit, experiment=experiment)

        assert calc.scale_raw_observations({"2": 2, "1": 0.5}) == {"2": 2.0, "1": 0.25}
        calc.set_state(calc.DISCONNECTED)
    def test_scaling_works(self) -> None:
        unit = get_unit_name()
        experiment = get_latest_experiment_name()

        with local_persistant_storage("od_normalization_mean") as cache:
            cache[experiment] = json.dumps({"1": 0.5, "2": 0.8})

        with local_persistant_storage("od_normalization_variance") as cache:
            cache[experiment] = json.dumps({"1": 1e-6, "2": 1e-4})

        publish(
            f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched",
            create_od_raw_batched_json(
                ["1", "2"], [0.5, 0.8], ["90", "135"], timestamp="2010-01-01 12:00:35"
            ),
            retain=True,
        )

        calc = GrowthRateCalculator(unit=unit, experiment=experiment)

        pause()
        assert calc.od_normalization_factors == {"2": 0.8, "1": 0.5}

        calc.set_state(calc.DISCONNECTED)
    def get_od_normalization_from_cache(self) -> dict[pt.PdChannel, float]:
        # we check if the broker has variance/mean stats
        with local_persistant_storage("od_normalization_mean") as cache:
            result = cache.get(self.experiment, None)

        if result is not None:
            return json.loads(result)
        else:
            self.logger.debug("od_normalization/mean not found in cache.")
            self.logger.info(
                "Calculating OD normalization metrics. This may take a few minutes"
            )
            means, _ = od_normalization(unit=self.unit, experiment=self.experiment)
            self.logger.info("Finished calculating OD normalization metrics.")
            return means
Esempio n. 28
0
    def __init__(self,
                 target_growth_rate=None,
                 target_od=None,
                 volume=None,
                 **kwargs):
        super(PIDMorbidostat, self).__init__(**kwargs)
        assert target_od is not None, "`target_od` must be set"
        assert target_growth_rate is not None, "`target_growth_rate` must be set"

        with local_persistant_storage("pump_calibration") as cache:
            if "media_ml_calibration" not in cache:
                raise RuntimeError(
                    "Media pump calibration must be performed first.")
            elif "waste_ml_calibration" not in cache:
                raise RuntimeError(
                    "Waste pump calibration must be performed first.")
            elif "alt_media_ml_calibration" not in cache:
                raise RuntimeError(
                    "Alt-Media pump calibration must be performed first.")

        self.set_target_growth_rate(target_growth_rate)
        self.target_od = float(target_od)

        Kp = config.getfloat("dosing_automation.pid_morbidostat", "Kp")
        Ki = config.getfloat("dosing_automation.pid_morbidostat", "Ki")
        Kd = config.getfloat("dosing_automation.pid_morbidostat", "Kd")

        self.pid = PID(
            -Kp,
            -Ki,
            -Kd,
            setpoint=self.target_growth_rate,
            output_limits=(0, 1),
            sample_time=None,
            unit=self.unit,
            experiment=self.experiment,
            job_name=self.job_name,
            target_name="growth_rate",
        )

        if volume is not None:
            self.logger.info(
                "Ignoring volume parameter; volume set by target growth rate and duration."
            )

        assert isinstance(self.duration, float)
        self.volume = round(
            self.target_growth_rate * VIAL_VOLUME * (self.duration / 60), 4)
Esempio n. 29
0
    def initialize_rpm_to_dc_lookup(self) -> Callable:
        if self.rpm_calculator is None:
            # if we can't track RPM, no point in adjusting DC
            return lambda rpm: self.duty_cycle

        with local_persistant_storage("stirring_calibration") as cache:

            if "linear_v1" in cache:
                parameters = json.loads(cache["linear_v1"])
                coef = parameters["rpm_coef"]
                intercept = parameters["intercept"]
                # we scale this by 90% to make sure the PID + prediction doesn't overshoot,
                # better to be conservative here.
                # equivalent to a weighted average: 0.1 * current + 0.9 * predicted
                return lambda rpm: self.duty_cycle - 0.90 * (
                    self.duty_cycle - (coef * rpm + intercept))
            else:
                return lambda rpm: self.duty_cycle
Esempio n. 30
0
def test_positive_correlation_between_rpm_and_stirring(
    logger: Logger, unit: str, experiment: str
) -> None:

    with local_persistant_storage("stirring_calibration") as cache:

        if "linear_v1" in cache:
            parameters = loads(cache["linear_v1"])
            coef = parameters["rpm_coef"]
            intercept = parameters["intercept"]

            initial_dc = coef * 700 + intercept

        else:
            initial_dc = config.getfloat("stirring", "initial_duty_cycle")

    dcs = []
    measured_rpms = []
    n_samples = 8
    start = initial_dc
    end = initial_dc * 0.66

    with stirring.Stirrer(
        target_rpm=0, unit=unit, experiment=experiment, rpm_calculator=None
    ) as st, stirring.RpmFromFrequency() as rpm_calc:

        st.duty_cycle = initial_dc
        st.start_stirring()
        sleep(1)

        for i in range(n_samples):
            dc = start * (1 - i / n_samples) + (i / n_samples) * end

            st.set_duty_cycle(dc)
            sleep(1)
            measured_rpms.append(rpm_calc(4))
            dcs.append(dc)

        measured_correlation = round(correlation(dcs, measured_rpms), 2)
        logger.debug(
            f"Correlation between stirring RPM and duty cycle: {measured_correlation}"
        )
        logger.debug(f"{dcs=}, {measured_rpms=}")
        assert measured_correlation > 0.9, (dcs, measured_rpms)