Пример #1
0
def run(automation=None,
        duration=None,
        sensor="135/0",
        skip_first_run=False,
        **kwargs):
    unit = get_unit_name()
    experiment = get_latest_experiment_name()

    try:

        kwargs["duration"] = duration
        kwargs["unit"] = unit
        kwargs["experiment"] = experiment
        kwargs["sensor"] = sensor
        kwargs["skip_first_run"] = skip_first_run

        controller = DosingController(automation, **kwargs)  # noqa: F841

        while True:
            signal.pause()

    except Exception as e:
        logging.getLogger("dosing_automation").debug(f"{str(e)}",
                                                     exc_info=True)
        logging.getLogger("dosing_automation").error(f"{str(e)}")
        raise e
Пример #2
0
def test_error_in_subscribe_and_callback_is_logged() -> None:
    class TestJob(BackgroundJob):
        def __init__(self, *args, **kwargs) -> None:
            super(TestJob, self).__init__(*args, **kwargs)
            self.start_passive_listeners()

        def start_passive_listeners(self) -> None:
            self.subscribe_and_callback(self.callback,
                                        "pioreactor/testing/subscription")

        def callback(self, msg: MQTTMessage) -> None:
            print(1 / 0)

    error_logs = []

    def collect_error_logs(msg: MQTTMessage) -> None:
        if "ERROR" in msg.payload.decode():
            error_logs.append(msg)

    subscribe_and_callback(
        collect_error_logs,
        f"pioreactor/{get_unit_name()}/{get_latest_experiment_name()}/logs/app",
    )

    with TestJob(job_name="job",
                 unit=get_unit_name(),
                 experiment=get_latest_experiment_name()):
        publish("pioreactor/testing/subscription", "test")
        pause()
        pause()
        assert len(error_logs) > 0
        assert "division by zero" in error_logs[0].payload.decode()
Пример #3
0
def test_custom_class_will_register_and_run() -> None:
    class NaiveTurbidostat(DosingAutomation):

        automation_name = "naive_turbidostat"
        published_settings = {
            "target_od": {
                "datatype": "float",
                "settable": True,
                "unit": "AU"
            },
            "duration": {
                "datatype": "float",
                "settable": True,
                "unit": "min"
            },
        }

        def __init__(self, target_od: float, **kwargs: Any) -> None:
            super(NaiveTurbidostat, self).__init__(**kwargs)
            self.target_od = target_od

        def execute(self) -> None:
            if self.latest_od > self.target_od:
                self.execute_io_action(media_ml=1.0, waste_ml=1.0)

    algo = DosingController(
        "naive_turbidostat",
        target_od=2.0,
        duration=10,
        unit=get_unit_name(),
        experiment=get_latest_experiment_name(),
    )
    algo.set_state(algo.DISCONNECTED)
Пример #4
0
def od_reading(
    od_angle_channel,
    sampling_rate=1 /
    float(config["od_config.od_sampling"]["samples_per_second"]),
    fake_data=False,
):

    unit = get_unit_name()
    experiment = get_latest_experiment_name()

    channel_label_map = {}
    for input_ in od_angle_channel:
        angle, channel = input_.split(",")

        # We split input of the form ["135,0", "135,1", "90,3"] into the form
        # "135/0", "135/1", "90/3"
        angle_label = f"{angle}/{channel}"
        channel_label_map[int(channel)] = angle_label

    ODReader(
        channel_label_map,
        sampling_rate=sampling_rate,
        unit=unit,
        experiment=experiment,
        fake_data=fake_data,
    )

    signal.pause()
Пример #5
0
def click_od_normalization(n_samples):
    """
    Compute statistics about the OD time series
    """
    unit = get_unit_name()
    experiment = get_latest_experiment_name()
    click.echo(od_normalization(unit, experiment, n_samples=n_samples))
Пример #6
0
def click_od_normalization(od_angle_channel):
    """
    Compute statistics about the OD timeseries
    """
    unit = get_unit_name()
    experiment = get_latest_experiment_name()
    od_normalization(od_angle_channel, unit, experiment)
Пример #7
0
def test_jobs_connecting_and_disconnecting_will_still_log_to_mqtt() -> None:
    # see note in base.py about create_logger

    unit: str = get_unit_name()
    exp: str = get_latest_experiment_name()

    results = []

    def cb(msg: MQTTMessage) -> None:
        if "WARNING" in msg.payload.decode():
            results.append([msg.payload])

    subscribe_and_callback(cb, f"pioreactor/{unit}/{exp}/logs/app")

    bj = BackgroundJob(job_name="job", unit=unit, experiment=exp)
    bj.logger.warning("test1")

    # disonnect, which should clear logger handlers (but may not...)
    bj.set_state(bj.DISCONNECTED)

    bj = BackgroundJob(job_name="job", unit=unit, experiment=exp)
    bj.logger.warning("test2")

    pause()
    pause()
    assert len(results) == 2
    bj.set_state(bj.DISCONNECTED)
Пример #8
0
def test_editing_readonly_attr_via_mqtt() -> None:
    class TestJob(BackgroundJob):

        published_settings = {
            "readonly_attr": {
                "datatype": "float",
                "settable": False,
            },
        }

    warning_logs = []

    def collect_logs(msg: MQTTMessage) -> None:
        if "readonly" in msg.payload.decode():
            warning_logs.append(msg)

    subscribe_and_callback(
        collect_logs,
        f"pioreactor/{get_unit_name()}/{get_latest_experiment_name()}/logs/app",
    )

    with TestJob(job_name="job",
                 unit=get_unit_name(),
                 experiment=get_latest_experiment_name()):
        publish(
            f"pioreactor/{get_unit_name()}/{get_latest_experiment_name()}/job/readonly_attr/set",
            1.0,
        )
        pause()

    assert len(warning_logs) > 0
    def test_od_blank_being_non_zero(self) -> None:
        unit = get_unit_name()
        experiment = get_latest_experiment_name()
        with local_persistant_storage("od_blank") as cache:
            cache[experiment] = json.dumps({"1": 0.25, "2": 0.4})

        with local_persistant_storage("od_normalization_mean") as cache:
            cache[experiment] = json.dumps({"1": 0.5, "2": 0.8})

        with local_persistant_storage("od_normalization_variance") as cache:
            cache[experiment] = json.dumps({"1": 1e-6, "2": 1e-4})

        calc = GrowthRateCalculator(unit=unit, experiment=experiment)

        pause()
        pause()

        publish(
            f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched",
            create_od_raw_batched_json(
                ["1", "2"], [0.50, 0.80], ["90", "135"], timestamp="2010-01-01 12:02:00"
            ),
            retain=True,
        )

        pause()
        pause()

        assert calc.od_normalization_factors == {"2": 0.8, "1": 0.5}
        assert calc.od_blank == {"2": 0.4, "1": 0.25}
        results = calc.scale_raw_observations({"2": 1.0, "1": 0.6})
        print(results)
        assert abs(results["2"] - 1.5) < 0.00001
        assert abs(results["1"] - 1.4) < 0.00001
        calc.set_state(calc.DISCONNECTED)
    def setup_class(cls) -> None:
        # clear the caches and MQTT
        experiment = get_latest_experiment_name()

        config["od_config.photodiode_channel"]["1"] = "90"
        config["od_config.photodiode_channel"]["2"] = None

        with local_persistant_storage("od_blank") as cache:
            if experiment in cache:
                del cache[experiment]

        with local_persistant_storage("od_normalization_mean") as cache:
            if experiment in cache:
                del cache[experiment]

        with local_persistant_storage("od_normalization_variance") as cache:
            if experiment in cache:
                del cache[experiment]

        with local_persistant_storage("growth_rate") as cache:
            if experiment in cache:
                del cache[experiment]

        with local_persistant_storage("od_filtered") as cache:
            if experiment in cache:
                del cache[experiment]
Пример #11
0
def update_settings(ctx, job: str, units: tuple[str, ...]) -> None:
    """
    pios update-settings stirring --duty_cycle 10

    """

    exp = get_latest_experiment_name()
    extra_args = {
        ctx.args[i][2:]: ctx.args[i + 1]
        for i in range(0, len(ctx.args), 2)
    }

    if "unit" in extra_args:
        click.echo("Did you mean to use 'units' instead of 'unit'? Exiting.",
                   err=True)
        sys.exit(1)

    assert len(extra_args) > 0

    from pioreactor.pubsub import publish

    def _thread_function(unit: str) -> bool:
        for (setting, value) in extra_args.items():
            publish(f"pioreactor/{unit}/{exp}/{job}/{setting}/set", value)
        return True

    units = universal_identifier_to_all_active_workers(units)
    with ThreadPoolExecutor(max_workers=len(units)) as executor:
        results = executor.map(_thread_function, units)

    if not all(results):
        sys.exit(1)
    def test_end_to_end(self) -> None:

        config["od_config.photodiode_channel"]["1"] = "90"
        config["od_config.photodiode_channel"]["2"] = "135"

        unit = get_unit_name()
        experiment = get_latest_experiment_name()

        interval = 0.1
        config["od_config"]["samples_per_second"] = "0.2"

        od = start_od_reading(
            "135",
            "90",
            sampling_rate=interval,
            unit=unit,
            experiment=experiment,
            fake_data=True,
        )

        st = start_stirring(target_rpm=500, unit=unit, experiment=experiment)

        calc = GrowthRateCalculator(unit=unit, experiment=experiment)

        time.sleep(35)
        assert calc.ekf.state_[-2] != 1.0
        calc.set_state(calc.DISCONNECTED)
        st.set_state(st.DISCONNECTED)
        od.set_state(od.DISCONNECTED)
    def test_single_observation(self) -> None:
        unit = get_unit_name()
        experiment = get_latest_experiment_name()

        with local_persistant_storage("od_normalization_mean") as cache:
            cache[experiment] = json.dumps({1: 1})

        with local_persistant_storage("od_normalization_variance") as cache:
            cache[experiment] = json.dumps({1: 1})

        publish(
            f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched",
            create_od_raw_batched_json(
                ["1"], [1.153], ["90"], timestamp="2010-01-01 12:00:30"
            ),
            retain=True,
        )

        calc = GrowthRateCalculator(unit=unit, experiment=experiment)

        publish(
            f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched",
            create_od_raw_batched_json(
                ["1"], [1.155], ["90"], timestamp="2010-01-01 12:00:35"
            ),
        )
        pause()

        assert True
        calc.set_state(calc.DISCONNECTED)
Пример #14
0
def test_bad_key_in_published_settings() -> None:
    class TestJob(BackgroundJob):

        published_settings = {
            "some_key": {
                "datatype": "float",
                "units": "%",  # type: ignore
                "settable": True,
            },  # units is wrong, should be unit.
        }

        def __init__(self, *args, **kwargs) -> None:
            super(TestJob, self).__init__(*args, **kwargs)

    warning_logs = []

    def collect_warning_logs(msg: MQTTMessage) -> None:
        if "WARNING" in msg.payload.decode():
            warning_logs.append(msg)

    subscribe_and_callback(
        collect_warning_logs,
        f"pioreactor/{get_unit_name()}/{get_latest_experiment_name()}/logs/app",
    )

    with TestJob(job_name="job",
                 unit=get_unit_name(),
                 experiment=get_latest_experiment_name()):
        pause()
        pause()
        assert len(warning_logs) > 0
        assert "Found extra property" in warning_logs[0].payload.decode()
Пример #15
0
def click_led_intensity(channel, intensity, source_of_event):
    """
    Modify the intensity of an LED
    """
    unit = get_unit_name()
    experiment = get_latest_experiment_name()

    return led_intensity(channel, intensity, source_of_event, unit, experiment)
Пример #16
0
def start_temperature_control(automation_name: str,
                              **kwargs) -> TemperatureController:
    return TemperatureController(
        automation_name=automation_name,
        unit=get_unit_name(),
        experiment=get_latest_experiment_name(),
        **kwargs,
    )
Пример #17
0
def kill(job: str, units: tuple[str, ...], all_jobs: bool, y: bool) -> None:
    """
    Send a SIGTERM signal to JOB. JOB can be any Pioreactor job name, like "stirring".
    Example:

    > pios kill stirring


    multiple jobs accepted:

    > pios kill stirring dosing_control


    Kill all worker jobs (i.e. this excludes leader jobs like watchdog). Ignores `job` argument.

    > pios kill --all


    """
    from sh import ssh  # type: ignore

    if not y:
        confirm = input(
            f"Confirm killing {str(job) if (not all_jobs) else 'all jobs'} on {units}? Y/n: "
        ).strip()
        if confirm != "Y":
            return

    command = f"pio kill {' '.join(job)}"
    command += "--all-jobs" if all_jobs else ""

    logger = create_logger("CLI",
                           unit=get_unit_name(),
                           experiment=get_latest_experiment_name())

    def _thread_function(unit: str):
        logger.debug(f"Executing `{command}` on {unit}.")
        try:
            ssh(unit, command)
            if all_jobs:  # tech debt
                ssh(
                    unit,
                    "pio run led_intensity --intensity 0 --channel A --channel B --channel C --channel D --no-log",
                )
            return True

        except Exception as e:
            logger.debug(e, exc_info=True)
            logger.error(f"Unable to connect to unit {unit}.")
            return False

    units = universal_identifier_to_all_active_workers(units)
    with ThreadPoolExecutor(max_workers=len(units)) as executor:
        results = executor.map(_thread_function, units)

    if not all(results):
        sys.exit(1)
Пример #18
0
def click_remove_waste(ml, duration, duty_cycle, source_of_event):
    """
    Remove waste/media from unit
    """
    unit = get_unit_name()
    experiment = get_latest_experiment_name()
    signal.signal(signal.SIGTERM, clean_up_gpio)

    return remove_waste(ml, duration, duty_cycle, source_of_event, unit,
                        experiment)
Пример #19
0
def click_add_media(ml, duration, duty_cycle, source_of_event):
    """
    Add media to unit
    """
    unit = get_unit_name()
    experiment = get_latest_experiment_name()

    signal.signal(signal.SIGTERM, clean_up_gpio)

    return add_media(ml, duration, duty_cycle, source_of_event, unit,
                     experiment)
Пример #20
0
 def _thread_function(unit: str) -> bool:
     click.echo(f"Executing `{core_command}` on {unit}.")
     try:
         ssh(unit, command)
         return True
     except Exception as e:
         logger = create_logger("CLI",
                                unit=get_unit_name(),
                                experiment=get_latest_experiment_name())
         logger.debug(e, exc_info=True)
         logger.error(f"Unable to connect to unit {unit}.")
         return False
Пример #21
0
def test_state_transition_callbacks() -> None:
    class TestJob(BackgroundJob):
        called_on_init = False
        called_on_ready = False
        called_on_sleeping = False
        called_on_ready_to_sleeping = False
        called_on_sleeping_to_ready = False
        called_on_init_to_ready = False

        def __init__(self, unit: str, experiment: str) -> None:
            super(TestJob, self).__init__(job_name="testjob",
                                          unit=unit,
                                          experiment=experiment)

        def on_init(self) -> None:
            self.called_on_init = True

        def on_ready(self) -> None:
            self.called_on_ready = True

        def on_sleeping(self) -> None:
            self.called_on_sleeping = True

        def on_ready_to_sleeping(self) -> None:
            self.called_on_ready_to_sleeping = True

        def on_sleeping_to_ready(self) -> None:
            self.called_on_sleeping_to_ready = True

        def on_init_to_ready(self) -> None:
            self.called_on_init_to_ready = True

    unit, exp = get_unit_name(), get_latest_experiment_name()
    with TestJob(unit, exp) as tj:
        assert tj.called_on_init
        assert tj.called_on_init_to_ready
        assert tj.called_on_ready
        publish(f"pioreactor/{unit}/{exp}/{tj.job_name}/$state/set",
                tj.SLEEPING)
        pause()
        pause()
        pause()
        pause()
        assert tj.called_on_ready_to_sleeping
        assert tj.called_on_sleeping

        publish(f"pioreactor/{unit}/{exp}/{tj.job_name}/$state/set", tj.READY)
        pause()
        pause()
        pause()
        pause()
        assert tj.called_on_sleeping_to_ready
Пример #22
0
def growth_rate_calculating(ignore_cache):
    unit = get_unit_name()
    experiment = get_latest_experiment_name()

    try:
        calculator = GrowthRateCalculator(  # noqa: F841
            ignore_cache=ignore_cache,
            unit=unit,
            experiment=experiment)
        while True:
            signal.pause()
    except Exception as e:
        logging.getLogger(JOB_NAME).error(f"{str(e)}")
        raise e
Пример #23
0
def test_local_cache_is_updated() -> None:

    channel: LedChannel = "B"

    unit = get_unit_name()
    exp = get_latest_experiment_name()

    assert led_intensity(channels=channel,
                         intensities=20,
                         unit=unit,
                         experiment=exp)

    with local_intermittent_storage("leds") as cache:
        assert float(cache["B"]) == 20
def click_growth_rate_calculating(ignore_cache):
    """
    Start calculating growth rate
    """
    import os

    os.nice(1)

    unit = get_unit_name()
    experiment = get_latest_experiment_name()

    calculator = GrowthRateCalculator(  # noqa: F841
        ignore_cache=ignore_cache, unit=unit, experiment=experiment
    )
    calculator.block_until_disconnected()
    def test_zero_blank_and_zero_od_coming_in(self) -> None:
        unit = get_unit_name()
        experiment = get_latest_experiment_name()
        with local_persistant_storage("od_normalization_mean") as cache:
            cache[experiment] = json.dumps({"1": 0})

        with local_persistant_storage("od_normalization_variance") as cache:
            cache[experiment] = json.dumps({"1": 0})

        with local_persistant_storage("od_blank") as cache:
            cache[experiment] = json.dumps({"1": 0})

        with pytest.raises(ZeroDivisionError):
            with GrowthRateCalculator(unit=unit, experiment=experiment):
                ...
Пример #26
0
def test_bad_setting_name_in_published_settings() -> None:
    class TestJob(BackgroundJob):

        published_settings = {
            "some--!4key": {
                "datatype": "float",
                "settable": True,
            },
        }

        def __init__(self, *args, **kwargs) -> None:
            super(TestJob, self).__init__(*args, **kwargs)

    with pytest.raises(ValueError):
        TestJob(job_name="job",
                unit=get_unit_name(),
                experiment=get_latest_experiment_name())
    def test_90_angle(self) -> None:
        import json
        import numpy as np
        from pioreactor.utils.timing import RepeatedTimer

        unit = get_unit_name()
        experiment = get_latest_experiment_name()
        samples_per_second = 0.2
        config["od_config"]["samples_per_second"] = str(samples_per_second)
        config["od_config.photodiode_channel"]["1"] = "90"
        config["od_config.photodiode_channel"]["2"] = None

        with local_persistant_storage("od_normalization_mean") as cache:
            cache[experiment] = json.dumps({"1": 0.1})

        with local_persistant_storage("od_normalization_variance") as cache:
            cache[experiment] = json.dumps({"1": 8.2e-02})

        class Mock180ODReadings:

            growth_rate = 0.1
            od_reading = 1.0

            def __call__(self):
                self.od_reading *= np.exp(self.growth_rate / 60 / 60 / samples_per_second)

                voltage = 0.1 * self.od_reading
                payload = {
                    "od_raw": {"1": {"voltage": voltage, "angle": "90"}},
                    "timestamp": "2021-06-06T15:08:12.081153",
                }

                publish(
                    f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched",
                    json.dumps(payload),
                )

        thread = RepeatedTimer(0.025, Mock180ODReadings()).start()

        with GrowthRateCalculator(unit=unit, experiment=experiment) as calc:
            time.sleep(35)

            assert calc.ekf.state_[1] > 0

        thread.cancel()
    def test_od_blank_being_higher_than_observations(self) -> None:
        unit = get_unit_name()
        experiment = get_latest_experiment_name()
        with local_persistant_storage("od_blank") as cache:
            cache[experiment] = json.dumps({"1": 0.25, "2": 0.4})

        with local_persistant_storage("od_normalization_mean") as cache:
            cache[experiment] = json.dumps({"1": 0.5, "2": 0.8})

        with local_persistant_storage("od_normalization_variance") as cache:
            cache[experiment] = json.dumps({"1": 1e-6, "2": 1e-4})

        calc = GrowthRateCalculator(unit=unit, experiment=experiment)
        pause()

        pause()
        publish(
            f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched",
            create_od_raw_batched_json(
                ["1", "2"], [0.50, 0.80], ["90", "135"], timestamp="2010-01-01 12:02:00"
            ),
            retain=True,
        )
        pause()
        pause()
        publish(
            f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched",
            create_od_raw_batched_json(
                ["1", "2"], [0.1, 0.1], ["90", "135"], timestamp="2010-01-01 12:02:05"
            ),
            retain=True,
        )
        pause()
        pause()
        pause()
        publish(
            f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched",
            create_od_raw_batched_json(
                ["1", "2"], [0.1, 0.1], ["90", "135"], timestamp="2010-01-01 12:02:10"
            ),
            retain=True,
        )
        pause()
        pause()
        calc.set_state(calc.DISCONNECTED)
Пример #29
0
    def check_on_max(self, value: float) -> None:

        if value > 3.2:
            self.logger.error(
                f"An ADC channel is recording a very high voltage, {round(value, 2)}V. We are shutting down components and jobs to keep the ADC safe."
            )

            unit, exp = get_unit_name(), get_latest_experiment_name()

            with local_intermittent_storage("led_locks") as cache:
                for c in ALL_LED_CHANNELS:
                    cache[c] = LED_UNLOCKED

            # turn off all LEDs that might be causing problems
            # however, ODReader may turn on the IR LED again.
            change_led_intensity(
                channels=ALL_LED_CHANNELS,
                intensities=[0] * len(ALL_LED_CHANNELS),
                source_of_event="ADCReader",
                unit=unit,
                experiment=exp,
                verbose=True,
            )

            publish(
                f"pioreactor/{unit}/{exp}/monitor/flicker_led_with_error_code",
                error_codes.ADC_INPUT_TOO_HIGH,
            )
            # kill ourselves - this will hopefully kill ODReader.
            # we have to send a signal since this is often called in a thread (RepeatedTimer)
            import os
            import signal

            os.kill(os.getpid(), signal.SIGTERM)
            return

        elif value > 3.1:
            self.logger.warning(
                f"An ADC channel is recording a very high voltage, {round(value, 2)}V. It's recommended to keep it less than 3.3V."
            )
            publish(
                f"pioreactor/{get_unit_name()}/{get_latest_experiment_name()}/monitor/flicker_led_with_error_code",
                error_codes.ADC_INPUT_TOO_HIGH,
            )
            return
Пример #30
0
def stirring(duty_cycle=0, duration=None):
    experiment = get_latest_experiment_name()

    try:
        stirrer = Stirrer(duty_cycle, unit=unit, experiment=experiment)
        stirrer.start_stirring()

        if duration is None:
            signal.pause()
        else:
            time.sleep(duration)

    except Exception as e:
        GPIO.cleanup()
        logger.error(f"failed with {str(e)}")
        raise e

    return