def test_end_to_end(self) -> None: config["od_config.photodiode_channel"]["1"] = "90" config["od_config.photodiode_channel"]["2"] = "135" unit = get_unit_name() experiment = get_latest_experiment_name() interval = 0.1 config["od_config"]["samples_per_second"] = "0.2" od = start_od_reading( "135", "90", sampling_rate=interval, unit=unit, experiment=experiment, fake_data=True, ) st = start_stirring(target_rpm=500, unit=unit, experiment=experiment) calc = GrowthRateCalculator(unit=unit, experiment=experiment) time.sleep(35) assert calc.ekf.state_[-2] != 1.0 calc.set_state(calc.DISCONNECTED) st.set_state(st.DISCONNECTED) od.set_state(od.DISCONNECTED)
def test_od_blank_being_non_zero(self) -> None: unit = get_unit_name() experiment = get_latest_experiment_name() with local_persistant_storage("od_blank") as cache: cache[experiment] = json.dumps({"1": 0.25, "2": 0.4}) with local_persistant_storage("od_normalization_mean") as cache: cache[experiment] = json.dumps({"1": 0.5, "2": 0.8}) with local_persistant_storage("od_normalization_variance") as cache: cache[experiment] = json.dumps({"1": 1e-6, "2": 1e-4}) calc = GrowthRateCalculator(unit=unit, experiment=experiment) pause() pause() publish( f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched", create_od_raw_batched_json( ["1", "2"], [0.50, 0.80], ["90", "135"], timestamp="2010-01-01 12:02:00" ), retain=True, ) pause() pause() assert calc.od_normalization_factors == {"2": 0.8, "1": 0.5} assert calc.od_blank == {"2": 0.4, "1": 0.25} results = calc.scale_raw_observations({"2": 1.0, "1": 0.6}) print(results) assert abs(results["2"] - 1.5) < 0.00001 assert abs(results["1"] - 1.4) < 0.00001 calc.set_state(calc.DISCONNECTED)
def test_single_observation(self) -> None: unit = get_unit_name() experiment = get_latest_experiment_name() with local_persistant_storage("od_normalization_mean") as cache: cache[experiment] = json.dumps({1: 1}) with local_persistant_storage("od_normalization_variance") as cache: cache[experiment] = json.dumps({1: 1}) publish( f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched", create_od_raw_batched_json( ["1"], [1.153], ["90"], timestamp="2010-01-01 12:00:30" ), retain=True, ) calc = GrowthRateCalculator(unit=unit, experiment=experiment) publish( f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched", create_od_raw_batched_json( ["1"], [1.155], ["90"], timestamp="2010-01-01 12:00:35" ), ) pause() assert True calc.set_state(calc.DISCONNECTED)
def test_restart(): publish( f"pioreactor/{unit}/{experiment}/od_normalization/median", '{"135/0": 1, "135/1": 1, "90/0": 1}', retain=True, ) publish( f"pioreactor/{unit}/{experiment}/od_normalization/variance", '{"135/0": 1, "135/1": 1, "90/0": 1}', retain=True, ) publish( f"pioreactor/{unit}/{experiment}/od_raw_batched", '{"135/0": 0.778586260567034, "135/1": 0.20944389172032837, "90/0": 0.1}', retain=True, ) publish( f"pioreactor/{unit}/{experiment}/od_normalization/median", '{"135/0": 0.778586260567034, "135/1": 0.20944389172032837, "90/0": 0.1}', retain=True, ) publish( f"pioreactor/{unit}/{experiment}/od_normalization/variance", '{"135/0": 1, "135/1": 1, "90/0": 1}', retain=True, ) publish(f"pioreactor/{unit}/{experiment}/growth_rate", None, retain=True) pause() calc1 = GrowthRateCalculator(unit=unit, experiment=experiment) publish( f"pioreactor/{unit}/{experiment}/od_raw_batched", '{"135/0": 1.808586260567034, "135/1": 1.21944389172032837, "90/0": 1.2}', ) publish( f"pioreactor/{unit}/{experiment}/od_raw_batched", '{"135/0": 2.808586260567034, "135/1": 2.21944389172032837, "90/0": 2.2}', ) publish( f"pioreactor/{unit}/{experiment}/od_raw_batched", '{"135/0": 3.808586260567034, "135/1": 3.21944389172032837, "90/0": 3.2}', ) publish( f"pioreactor/{unit}/{experiment}/od_raw_batched", '{"135/0": 4.808586260567034, "135/1": 4.21944389172032837, "90/0": 4.2}', ) pause() assert calc1.state_[-1] != 0 calc2 = GrowthRateCalculator(unit=unit, experiment=experiment) pause() assert calc2.initial_growth_rate != 0
def test_mis_shapen_data(monkeypatch): publish( f"pioreactor/{unit}/{experiment}/od_normalization/median", '{"135/0": 1, "90/0": 1}', retain=True, ) publish( f"pioreactor/{unit}/{experiment}/od_normalization/variance", '{"135/0": 1, "90/0": 1}', retain=True, ) publish( f"pioreactor/{unit}/{experiment}/od_raw_batched", '{"135/0": 0.778586260567034, "90/0": 0.1}', retain=True, ) GrowthRateCalculator(unit=unit, experiment=experiment) publish( f"pioreactor/{unit}/{experiment}/od_raw_batched", '{"135/0": 0.778586260567034, "90/0": 0.1}', ) pause() publish(f"pioreactor/{unit}/{experiment}/od_raw_batched", '{"135/0": 0.808586260567034}') pause()
def test_same_angles(monkeypatch): publish( f"pioreactor/{unit}/{experiment}/od_normalization/median", '{"135/0": 1, "135/1": 1, "90/0": 1}', retain=True, ) publish( f"pioreactor/{unit}/{experiment}/od_normalization/variance", '{"135/0": 1, "135/1":1, "90/0": 1}', retain=True, ) publish(f"pioreactor/{unit}/{experiment}/growth_rate", None, retain=True) publish( f"pioreactor/{unit}/{experiment}/od_raw_batched", '{"135/0": 0.778586260567034, "135/1": 0.20944389172032837, "90/0": 0.1}', retain=True, ) GrowthRateCalculator(unit=unit, experiment=experiment) publish( f"pioreactor/{unit}/{experiment}/od_raw_batched", '{"135/0": 0.778586260567034, "135/1": 0.20944389172032837, "90/0": 0.1}', ) publish( f"pioreactor/{unit}/{experiment}/od_raw_batched", '{"135/0": 0.808586260567034, "135/1": 0.21944389172032837, "90/0": 0.2}', ) publish( f"pioreactor/{unit}/{experiment}/od_raw_batched", '{"135/0": 0.808586260567034, "135/1": 0.21944389172032837, "90/0": 0.2}', )
def test_single_observation(): publish( f"pioreactor/{unit}/{experiment}/od_normalization/median", '{"135/0": 1}', retain=True, ) publish( f"pioreactor/{unit}/{experiment}/od_normalization/variance", '{"135/0": 1}', retain=True, ) publish(f"pioreactor/{unit}/{experiment}/growth_rate", None, retain=True) publish( f"pioreactor/{unit}/{experiment}/od_raw_batched", '{"135/0": 0.20944389172032837}', retain=True, ) GrowthRateCalculator(unit=unit, experiment=experiment) publish(f"pioreactor/{unit}/{experiment}/od_raw_batched", '{"135/0": 0.20944389172032837}') pause() assert True
def test_skip_180(): publish( f"pioreactor/{unit}/{experiment}/od_normalization/median", '{"135/0": 1, "180/2": 1, "90/1": 1}', retain=True, ) publish( f"pioreactor/{unit}/{experiment}/od_normalization/variance", '{"135/0": 1, "180/2": 1, "90/1": 1}', retain=True, ) publish(f"pioreactor/{unit}/{experiment}/growth_rate", None, retain=True) publish( f"pioreactor/{unit}/{experiment}/od_raw_batched", '{"180/2": 0.778586260567034, "135/0": 0.20944389172032837, "90/1": 0.1}', retain=True, ) calc = GrowthRateCalculator(unit=unit, experiment=experiment) publish( f"pioreactor/{unit}/{experiment}/od_raw_batched", '{"180/2": 0.778586260567034, "135/0": 0.20944389172032837, "90/1": 0.1}', ) pause() assert "180/2" not in calc.angles
def test_od_blank_being_higher_than_observations(self) -> None: unit = get_unit_name() experiment = get_latest_experiment_name() with local_persistant_storage("od_blank") as cache: cache[experiment] = json.dumps({"1": 0.25, "2": 0.4}) with local_persistant_storage("od_normalization_mean") as cache: cache[experiment] = json.dumps({"1": 0.5, "2": 0.8}) with local_persistant_storage("od_normalization_variance") as cache: cache[experiment] = json.dumps({"1": 1e-6, "2": 1e-4}) calc = GrowthRateCalculator(unit=unit, experiment=experiment) pause() pause() publish( f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched", create_od_raw_batched_json( ["1", "2"], [0.50, 0.80], ["90", "135"], timestamp="2010-01-01 12:02:00" ), retain=True, ) pause() pause() publish( f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched", create_od_raw_batched_json( ["1", "2"], [0.1, 0.1], ["90", "135"], timestamp="2010-01-01 12:02:05" ), retain=True, ) pause() pause() pause() publish( f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched", create_od_raw_batched_json( ["1", "2"], [0.1, 0.1], ["90", "135"], timestamp="2010-01-01 12:02:10" ), retain=True, ) pause() pause() calc.set_state(calc.DISCONNECTED)
def test_subscribing(monkeypatch): publish( f"pioreactor/{unit}/{experiment}/od_normalization/median", '{"135/0": 1, "90/0": 1}', retain=True, ) publish( f"pioreactor/{unit}/{experiment}/od_normalization/variance", '{"135/0": 1, "90/0": 1}', retain=True, ) publish(f"pioreactor/{unit}/{experiment}/growth_rate", None, retain=True) publish( f"pioreactor/{unit}/{experiment}/od_raw_batched", '{"135/0": 0.778586260567034, "90/0": 0.20944389172032837}', retain=True, ) publish(f"pioreactor/{unit}/{experiment}/growth_rate", 1.0, retain=True) calc = GrowthRateCalculator(unit=unit, experiment=experiment) pause() assert calc.initial_growth_rate == 1.0 publish( f"pioreactor/{unit}/{experiment}/od_raw_batched", '{"135/0": 0.778586260567034, "90/0": 0.20944389172032837}', ) pause() assert calc.ekf is not None publish( f"pioreactor/{unit}/{experiment}/od_raw_batched", '{"135/0": 0.778586260567034, "90/0": 0.20944389172032837}', ) publish( f"pioreactor/{unit}/{experiment}/dosing_events", '{"volume_change": "1.5", "event": "add_media"}', ) publish( f"pioreactor/{unit}/{experiment}/od_raw_batched", '{"135/0": 1.778586260567034, "90/0": 1.20944389172032837}', ) publish( f"pioreactor/{unit}/{experiment}/od_raw_batched", '{"135/0": 1.778586260567034, "90/0": 1.20944389172032837}', ) pause() assert calc.state_ is not None
def test_scaling_works(self) -> None: unit = get_unit_name() experiment = get_latest_experiment_name() with local_persistant_storage("od_normalization_mean") as cache: cache[experiment] = json.dumps({"1": 0.5, "2": 0.8}) with local_persistant_storage("od_normalization_variance") as cache: cache[experiment] = json.dumps({"1": 1e-6, "2": 1e-4}) publish( f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched", create_od_raw_batched_json( ["1", "2"], [0.5, 0.8], ["90", "135"], timestamp="2010-01-01 12:00:35" ), retain=True, ) calc = GrowthRateCalculator(unit=unit, experiment=experiment) pause() assert calc.od_normalization_factors == {"2": 0.8, "1": 0.5} calc.set_state(calc.DISCONNECTED)
def test_zero_blank_and_zero_od_coming_in(self) -> None: unit = get_unit_name() experiment = get_latest_experiment_name() with local_persistant_storage("od_normalization_mean") as cache: cache[experiment] = json.dumps({"1": 0}) with local_persistant_storage("od_normalization_variance") as cache: cache[experiment] = json.dumps({"1": 0}) with local_persistant_storage("od_blank") as cache: cache[experiment] = json.dumps({"1": 0}) with pytest.raises(ZeroDivisionError): with GrowthRateCalculator(unit=unit, experiment=experiment): ...
def test_observation_order_is_preserved_in_job(self) -> None: unit = get_unit_name() experiment = get_latest_experiment_name() with local_persistant_storage("od_normalization_mean") as cache: cache[experiment] = json.dumps({"1": 2, "2": 1}) with local_persistant_storage("od_normalization_variance") as cache: cache[experiment] = json.dumps({"1": 1, "2": 1}) with local_persistant_storage("growth_rate") as cache: cache[experiment] = str(1.0) publish( f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched", create_od_raw_batched_json( ["2", "1"], [0.9, 1.1], ["135", "90"], timestamp="2010-01-01 12:00:00" ), retain=True, ) calc = GrowthRateCalculator(unit=unit, experiment=experiment) assert calc.scale_raw_observations({"2": 2, "1": 0.5}) == {"2": 2.0, "1": 0.25} calc.set_state(calc.DISCONNECTED)
def test_90_angle(self) -> None: import json import numpy as np from pioreactor.utils.timing import RepeatedTimer unit = get_unit_name() experiment = get_latest_experiment_name() samples_per_second = 0.2 config["od_config"]["samples_per_second"] = str(samples_per_second) config["od_config.photodiode_channel"]["1"] = "90" config["od_config.photodiode_channel"]["2"] = None with local_persistant_storage("od_normalization_mean") as cache: cache[experiment] = json.dumps({"1": 0.1}) with local_persistant_storage("od_normalization_variance") as cache: cache[experiment] = json.dumps({"1": 8.2e-02}) class Mock180ODReadings: growth_rate = 0.1 od_reading = 1.0 def __call__(self): self.od_reading *= np.exp(self.growth_rate / 60 / 60 / samples_per_second) voltage = 0.1 * self.od_reading payload = { "od_raw": {"1": {"voltage": voltage, "angle": "90"}}, "timestamp": "2021-06-06T15:08:12.081153", } publish( f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched", json.dumps(payload), ) thread = RepeatedTimer(0.025, Mock180ODReadings()).start() with GrowthRateCalculator(unit=unit, experiment=experiment) as calc: time.sleep(35) assert calc.ekf.state_[1] > 0 thread.cancel()
def test_scaling_works(): publish( f"pioreactor/{unit}/{experiment}/od_normalization/median", json.dumps({ "135/0": 0.5, "90/1": 0.8 }), retain=True, ) publish( f"pioreactor/{unit}/{experiment}/od_normalization/variance", json.dumps({ "135/0": 1e-6, "90/1": 1e-4 }), retain=True, ) publish( f"pioreactor/{unit}/{experiment}/od_raw_batched", '{"135/0": 0.5, "90/1": 0.8}', retain=True, ) publish(f"pioreactor/{unit}/{experiment}/growth_rate", "", retain=True) calc = GrowthRateCalculator(unit=unit, experiment=experiment) publish(f"pioreactor/{unit}/{experiment}/od_raw_batched", '{"135/0": 0.51, "90/1": 0.82}') publish(f"pioreactor/{unit}/{experiment}/od_raw_batched", '{"135/0": 0.51, "90/1": 0.83}') publish(f"pioreactor/{unit}/{experiment}/od_raw_batched", '{"135/0": 0.51, "90/1": 0.84}') pause() assert calc.od_normalization_factors == {"90/1": 0.8, "135/0": 0.5} assert ((calc.ekf.observation_noise_covariance - 30 * np.array([[1e-4 / 0.8**2, 0], [0, 1e-6 / 0.5**2]])) < 1e-7).all()
def test_restart(self) -> None: unit = get_unit_name() experiment = get_latest_experiment_name() config["od_config.photodiode_channel"]["1"] = "90" config["od_config.photodiode_channel"]["2"] = "135" config["od_config.photodiode_channel"]["3"] = "90" with local_persistant_storage("od_normalization_mean") as cache: cache[experiment] = json.dumps({1: 1, 2: 1, 2: 1}) with local_persistant_storage("od_normalization_variance") as cache: cache[experiment] = json.dumps({1: 1, 2: 1, 2: 1}) publish( f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched", create_od_raw_batched_json( ["1", "2", "3"], [1.15, 0.93, 1.0], ["90", "135", "90"], timestamp="2010-01-01 12:00:15", ), retain=True, ) with local_persistant_storage("od_normalization_mean") as cache: cache[experiment] = '{"1": 1.15, "2": 0.93, "3": 1.0}' with local_persistant_storage("od_normalization_variance") as cache: cache[experiment] = '{"1": 1, "2": 1, "3": 1}' pause() calc1 = GrowthRateCalculator(unit=unit, experiment=experiment) publish( f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched", create_od_raw_batched_json( ["1", "2", "3"], [1.151, 0.931, 1.1], ["90", "135", "90"], timestamp="2010-01-01 12:00:20", ), ) publish( f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched", create_od_raw_batched_json( ["1", "2", "3"], [1.152, 0.932, 1.2], ["90", "135", "90"], timestamp="2010-01-01 12:00:25", ), ) publish( f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched", create_od_raw_batched_json( ["1", "2", "3"], [1.153, 0.933, 1.3], ["90", "135", "90"], timestamp="2010-01-01 12:00:30", ), ) publish( f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched", create_od_raw_batched_json( ["1", "2", "3"], [1.154, 0.934, 1.4], ["90", "135", "90"], timestamp="2010-01-01 12:00:35", ), ) pause() assert calc1.ekf.state_[-1] != 0 calc1.set_state(calc1.DISCONNECTED) calc2 = GrowthRateCalculator(unit=unit, experiment=experiment) pause() assert calc2.initial_growth_rate != 0 calc2.set_state(calc2.DISCONNECTED)
def test_kalman_filter_entries() -> None: config["storage"]["database"] = "test.sqlite" config["od_config"]["samples_per_second"] = "0.2" unit = "unit" exp = "exp" def parse_kalman_filter_outputs(topic, payload) -> dict: metadata, _ = m2db.produce_metadata(topic) payload = json.loads(payload) return { "experiment": metadata.experiment, "pioreactor_unit": metadata.pioreactor_unit, "timestamp": current_utc_time(), "state": json.dumps(payload["state"]), "covariance_matrix": json.dumps(payload["covariance_matrix"]), } # init the database connection = sqlite3.connect(config["storage"]["database"]) cursor = connection.cursor() cursor.executescript(""" CREATE TABLE IF NOT EXISTS kalman_filter_outputs ( timestamp TEXT NOT NULL, pioreactor_unit TEXT NOT NULL, experiment TEXT NOT NULL, state TEXT NOT NULL, covariance_matrix TEXT NOT NULL ); """) connection.commit() # turn on data collection interval = 0.5 od = start_od_reading( od_angle_channel1="135", od_angle_channel2="90", sampling_rate=interval, fake_data=True, unit=unit, experiment=exp, ) with local_persistant_storage("od_normalization_mean") as cache: cache[exp] = json.dumps({"1": 0.5, "2": 0.8}) with local_persistant_storage("od_normalization_variance") as cache: cache[exp] = json.dumps({"1": 1e-6, "2": 1e-4}) gr = GrowthRateCalculator(unit=unit, experiment=exp) # turn on our mqtt to db parsers = [ m2db.TopicToParserToTable( "pioreactor/+/+/growth_rate_calculating/kalman_filter_outputs", parse_kalman_filter_outputs, "kalman_filter_outputs", ) ] m = m2db.MqttToDBStreamer(parsers, unit=unit, experiment=exp) # let data collect time.sleep(10) cursor.execute("SELECT * FROM kalman_filter_outputs WHERE experiment = ?", (exp, )) results = cursor.fetchall() assert len(results) > 0 cursor.execute( 'SELECT json_array_length("state"), json_array_length("covariance_matrix"), json("covariance_matrix") FROM kalman_filter_outputs WHERE experiment = ? ORDER BY timestamp DESC LIMIT 1', (exp, ), ) results = cursor.fetchone() assert ( results[0] == 3 ) # why 3? growth rate, od filtered, and acceleration are the three hidden states assert results[1] == 3 assert np.array(json.loads(results[2])).shape == (3, 3) od.set_state(od.DISCONNECTED) gr.set_state(gr.DISCONNECTED) m.set_state(m.DISCONNECTED)
def test_shock_from_dosing_works(self) -> None: unit = get_unit_name() experiment = get_latest_experiment_name() with local_persistant_storage("od_normalization_mean") as cache: cache[experiment] = json.dumps({"1": 0.5, "2": 0.8}) with local_persistant_storage("od_normalization_variance") as cache: cache[experiment] = json.dumps({"1": 8.2e-07, "2": 8.2e-07}) publish( f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched", create_od_raw_batched_json( ["1", "2"], [0.5, 0.8], ["90,90", "135,45"], timestamp="2010-01-01 12:00:35", ), retain=True, ) calc = GrowthRateCalculator(unit=unit, experiment=experiment) publish( f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched", create_od_raw_batched_json( ["1", "2"], [0.51, 0.82], ["90,90", "135,45"], timestamp="2010-01-01 12:00:40", ), ) pause() publish( f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched", create_od_raw_batched_json( ["1", "2"], [0.51, 0.82], ["90,90", "135,45"], timestamp="2010-01-01 12:00:45", ), ) pause() previous_covariance_matrix = calc.ekf.covariance_.copy() # trigger dosing events, which change the "regime" publish( f"pioreactor/{unit}/{experiment}/dosing_events", json.dumps( {"source_of_event": "algo", "event": "add_media", "volume_change": 1.0} ), ) pause() publish( f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched", create_od_raw_batched_json( ["1", "2"], [0.49, 0.80], ["90,90", "135,45"], timestamp="2010-01-01 12:00:50", ), ) pause() publish( f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched", create_od_raw_batched_json( ["1", "2"], [0.48, 0.80], ["90,90", "135,45"], timestamp="2010-01-01 12:00:55", ), ) pause() assert not np.array_equal(previous_covariance_matrix, calc.ekf.covariance_) publish( f"pioreactor/{unit}/{experiment}/dosing_events", json.dumps( {"source_of_event": "algo", "event": "add_media", "volume_change": 1.0} ), ) pause() publish( f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched", create_od_raw_batched_json( ["1", "2"], [0.40, 0.70], ["90,90", "135,45"], timestamp="2010-01-01 12:02:00", ), ) pause() time.sleep(8) assert calc.ekf._currently_scaling_covariance assert not np.array_equal(previous_covariance_matrix, calc.ekf.covariance_) time.sleep(10) pause() # should revert back assert not calc.ekf._currently_scaling_covariance assert_array_equal(calc.ekf.covariance_, previous_covariance_matrix) calc.set_state(calc.DISCONNECTED)
def test_subscribing(self) -> None: unit = get_unit_name() experiment = get_latest_experiment_name() with local_persistant_storage("od_normalization_mean") as cache: cache[experiment] = json.dumps({1: 1, 2: 1}) with local_persistant_storage("od_normalization_variance") as cache: cache[experiment] = json.dumps({1: 1, 2: 1}) with local_persistant_storage("growth_rate") as cache: cache[experiment] = str(1.0) publish( f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched", create_od_raw_batched_json( ["2", "1"], [0.9, 1.1], ["135", "90"], timestamp="2010-01-01 12:00:00" ), retain=True, ) calc = GrowthRateCalculator(unit=unit, experiment=experiment) pause() assert calc.initial_growth_rate == 1.0 publish( f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched", create_od_raw_batched_json( ["1", "2"], [1.12, 0.88], ["90", "135"], timestamp="2010-01-01 12:00:05" ), ) pause() publish( f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched", create_od_raw_batched_json( ["2", "1"], [0.87, 1.14], ["135", "90"], timestamp="2010-01-01 12:00:05" ), ) pause() publish( f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched", create_od_raw_batched_json( ["2", "1"], [0.85, 1.16], ["135", "90"], timestamp="2010-01-01 12:00:05" ), ) pause() assert calc.ekf is not None publish( f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched", create_od_raw_batched_json( ["1", "2"], [1.14, 0.92], ["90", "135"], timestamp="2010-01-01 12:00:10" ), ) publish( f"pioreactor/{unit}/{experiment}/dosing_events", '{"volume_change": "1.5", "event": "add_media", "source_of_event": "test"}', ) publish( f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched", create_od_raw_batched_json( ["1", "2"], [1.15, 0.93], ["90", "135"], timestamp="2010-01-01 12:00:15" ), ) pause() assert calc.ekf.state_ is not None calc.set_state(calc.DISCONNECTED)
None, retain=True, ) publish(f"pioreactor/{unit}/{exp}/od_normalization/mean", None, retain=True) publish(f"pioreactor/{unit}/{exp}/od_normalization/variance", None, retain=True) start_time = time.time() od = ODReader( channel_label_map={"A0": "90/0", "A1": "90/1"}, sampling_rate=interval_for_testing, unit=unit, experiment=exp, fake_data=True, stop_IR_led_between_ADC_readings=False, ) calc = GrowthRateCalculator(unit=unit, experiment=exp) actual_grs = [] estimated_grs = [] def append_actual_growth_rates(msg): actual_grs.append(float(msg.payload)) def append_estimated_growth_rates(msg): actual_grs.append(od.adc_reader.analog_in[0][1].gr) estimated_grs.append(float(msg.payload)) c1 = subscribe_and_callback( append_actual_growth_rates, "pioreactor/mock/0/actual_gr" ) c2 = subscribe_and_callback(