Example #1
0
    def check_heater_pcb_temperature(self) -> None:
        """
        Originally from #220
        """
        if is_testing_env():
            from pioreactor.utils.mock import MockTMP1075 as TMP1075
        else:
            from TMP1075 import TMP1075  # type: ignore

        try:
            tmp_driver = TMP1075()
        except ValueError:
            # No PCB detected using i2c - fine to exit.
            return

        observed_tmp = tmp_driver.get_temperature()

        if observed_tmp >= 62:
            # something is wrong - temperature_control should have detected this, but didn't, so it must have failed / incorrectly cleaned up.
            # we're going to just shutdown to be safe.
            from subprocess import call

            self.logger.error(
                f"Detected an extremely high temperature, {observed_tmp} ℃ on the heating PCB - shutting down for safety."
            )

            call("sudo shutdown --poweroff", shell=True)
        self.logger.debug(f"Heating PCB temperature at {observed_tmp} ℃.")
Example #2
0
def publish_to_pioreactor_cloud(endpoint: str, data=None, json=None):
    """
    Parameters
    ------------
    endpoint: the function to send to the data to
    data: (optional) Dictionary, list of tuples, bytes, or file-like object to send in the body.
    json: (optional) json data to send in the body.

    """
    from pioreactor.mureq import post
    from pioreactor.whoami import get_uuid, is_testing_env
    from pioreactor.utils.timing import current_utc_time

    if is_testing_env():
        return

    if json is not None:
        json["rpi_uuid"] = get_uuid()
        json["timestamp"] = current_utc_time()

    headers = {"Content-type": "application/json", "Accept": "text/plain"}
    try:
        post(
            f"https://cloud.pioreactor.com/{endpoint}",
            data=data,
            json=json,
            headers=headers,
        )
    except Exception:
        pass
Example #3
0
    def publish_self_statistics(self) -> None:
        import psutil  # type: ignore

        if is_testing_env():
            return

        disk_usage_percent = round(psutil.disk_usage("/").percent)
        cpu_usage_percent = round(
            (psutil.cpu_percent() + psutil.cpu_percent() +
             psutil.cpu_percent()) / 3
        )  # this is a noisy process, and we average it over a small window.
        available_memory_percent = round(100 *
                                         psutil.virtual_memory().available /
                                         psutil.virtual_memory().total)

        with open("/sys/class/thermal/thermal_zone0/temp", "r") as f:
            cpu_temperature_celcius = round(int(f.read().strip()) / 1000)

        if disk_usage_percent <= 80:
            self.logger.debug(f"Disk space at {disk_usage_percent}%.")
        else:
            # TODO: add documentation to clear disk space.
            self.logger.warning(f"Disk space at {disk_usage_percent}%.")
            self.flicker_led_with_error_code(
                error_codes.DISK_IS_ALMOST_FULL_ERROR_CODE)

        if cpu_usage_percent <= 75:
            self.logger.debug(f"CPU usage at {cpu_usage_percent}%.")
        else:
            # TODO: add documentation
            self.logger.warning(f"CPU usage at {cpu_usage_percent}%.")

        if available_memory_percent >= 20:
            self.logger.debug(
                f"Available memory at {available_memory_percent}%.")
        else:
            # TODO: add documentation
            self.logger.warning(
                f"Available memory at {available_memory_percent}%.")

        if cpu_temperature_celcius <= 70:
            self.logger.debug(
                f"CPU temperature at {cpu_temperature_celcius} ℃.")
        else:
            # TODO: add documentation
            self.logger.warning(
                f"CPU temperature at {cpu_temperature_celcius} ℃.")

        self.publish(
            f"pioreactor/{self.unit}/{self.experiment}/{self.job_name}/computer_statistics",
            dumps({
                "disk_usage_percent": disk_usage_percent,
                "cpu_usage_percent": cpu_usage_percent,
                "available_memory_percent": available_memory_percent,
                "cpu_temperature_celcius": cpu_temperature_celcius,
            }),
        )
Example #4
0
def click_od_reading(
    od_angle_channel1: pt.PdAngle, od_angle_channel2: pt.PdAngle, fake_data: bool
):
    """
    Start the optical density reading job
    """
    od = start_od_reading(
        od_angle_channel1,
        od_angle_channel2,
        fake_data=fake_data or is_testing_env(),
    )
    od.block_until_disconnected()
 def update_ekf_variance_after_event(self, minutes: float, factor: float) -> None:
     if is_testing_env():
         msg = subscribe(
             f"pioreactor/{self.unit}/{self.experiment}/adc_reader/interval",
             timeout=1.0,
         )
         if msg:
             interval = float(msg.payload)
         else:
             interval = 1
         self.ekf.scale_OD_variance_for_next_n_seconds(
             factor, minutes * (12 * interval)
         )
     else:
         self.ekf.scale_OD_variance_for_next_n_seconds(factor, minutes * 60)
Example #6
0
    def check_for_power_problems(self) -> None:
        """
        Note: `get_throttled` feature isn't available on the Rpi Zero

        Sourced from https://github.com/raspberrypi/linux/pull/2397
         and https://github.com/N2Github/Proje
        """
        def status_to_human_readable(status) -> str:
            hr_status = []

            # if status & 0x40000:
            #     hr_status.append("Throttling has occurred.")
            # if status & 0x20000:
            #     hr_status.append("ARM frequency capping has occurred.")
            # if status & 0x10000:
            #     hr_status.append("Undervoltage has occurred.")
            if status & 0x4:
                hr_status.append("Active throttling")
            if status & 0x2:
                hr_status.append("Active ARM frequency capped")
            if status & 0x1:
                hr_status.append("Active undervoltage")

            hr_status.append(
                "Suggestion: use a larger external power supply. See docs at: https://pioreactor.com/pages/using-an-external-power-supply"
            )
            return ". ".join(hr_status)

        def currently_throttling(status: int) -> int:
            return (status & 0x2) or (status & 0x1) or (status & 0x4)

        def non_ignorable_status(status: int) -> int:
            return (status & 0x1) or (status & 0x4)

        if is_testing_env():
            return

        with open("/sys/devices/platform/soc/soc:firmware/get_throttled"
                  ) as file:
            status = int(file.read(), 16)

        if not currently_throttling(status):
            self.logger.debug("Power status okay.")
        else:
            self.logger.debug(
                f"Power status: {status_to_human_readable(status)}")
Example #7
0
def local_persistant_storage(
    cache_name: str, ) -> Generator[pt.DbmMapping, None, None]:
    """
    Values stored in this storage will stay around between RPi restarts, and until overwritten
    or deleted.

    Examples
    ---------
    > with local_persistant_storage('od_blank') as cache:
    >     assert '1' in cache
    >     cache['1'] = str(0.5)

    """
    from pioreactor.whoami import is_testing_env

    try:
        if is_testing_env():
            cache = ndbm.open(f".pioreactor/storage/{cache_name}", "c")
        else:
            cache = ndbm.open(f"/home/pi/.pioreactor/storage/{cache_name}",
                              "c")
        yield cache  # type: ignore
    finally:
        cache.close()
Example #8
0
def test_ambient_light_interference(logger: Logger, unit: str, experiment: str) -> None:
    # test ambient light IR interference. With all LEDs off, and the Pioreactor not in a sunny room, we should see near 0 light.

    adc_reader = ADCReader(
        channels=ALL_PD_CHANNELS,
        dynamic_gain=False,
        initial_gain=16,
        fake_data=is_testing_env(),
    )

    adc_reader.setup_adc()

    led_intensity(
        ALL_LED_CHANNELS,
        intensities=[0] * len(ALL_LED_CHANNELS),
        unit=unit,
        source_of_event="self_test",
        experiment=experiment,
        verbose=False,
    )

    readings = adc_reader.take_reading()

    assert all([readings[pd_channel] < 0.005 for pd_channel in ALL_PD_CHANNELS]), readings
    def update_state_from_observation(self, message) -> None:

        if self.state != self.READY:
            return

        payload = json.loads(message.payload)
        observations = self.batched_raw_od_readings_to_dict(payload["od_raw"])
        scaled_observations = self.scale_raw_observations(observations)

        if is_testing_env():
            # when running a mock script, we run at an accelerated rate, but want to mimic
            # production.
            dt = self.expected_dt
        else:
            # TODO this should use the internal timestamp reference

            time_of_current_observation = datetime.strptime(
                payload["timestamp"], "%Y-%m-%dT%H:%M:%S.%fZ"
            )
            dt = (
                (
                    time_of_current_observation - self.time_of_previous_observation
                ).total_seconds()
                / 60
                / 60
            )  # delta time in hours
            self.time_of_previous_observation = time_of_current_observation

        try:
            updated_state = self.ekf.update(list(scaled_observations.values()), dt)
        except Exception as e:
            self.logger.debug(e, exc_info=True)
            self.logger.error(f"Updating Kalman Filter failed with {str(e)}")
            return
        else:

            # TODO: EKF values can be nans...

            latest_od_filtered, latest_growth_rate = updated_state[0], updated_state[1]

            self.growth_rate = {
                "growth_rate": latest_growth_rate,
                "timestamp": payload["timestamp"],
            }

            self.od_filtered = {
                "od_filtered": latest_od_filtered,
                "timestamp": payload["timestamp"],
            }

            with local_persistant_storage("growth_rate") as cache:
                cache[self.experiment] = str(latest_growth_rate)

            with local_persistant_storage("od_filtered") as cache:
                cache[self.experiment] = str(latest_od_filtered)

            self.publish(
                f"pioreactor/{self.unit}/{self.experiment}/{self.job_name}/kalman_filter_outputs",
                {
                    "state": self.format_list(updated_state.tolist()),
                    "covariance_matrix": [
                        self.format_list(x) for x in self.ekf.covariance_.tolist()
                    ],
                    "timestamp": payload["timestamp"],
                },
                qos=QOS.EXACTLY_ONCE,
            )
Example #10
0
def brief_pause() -> None:
    if is_testing_env():
        return
    else:
        time.sleep(3)
        return
Example #11
0
def od_normalization(
    unit: str,
    experiment: str,
    n_samples: int = 35
) -> tuple[dict[PdChannel, float], dict[PdChannel, float]]:
    from statistics import mean, variance

    action_name = "od_normalization"
    logger = create_logger(action_name)
    logger.debug("Starting OD normalization.")

    with publish_ready_to_disconnected_state(unit, experiment, action_name):

        if (not (is_pio_job_running("od_reading"))
                # but if test mode, ignore
                and not is_testing_env()):
            logger.error(
                " OD Reading should be running. Run OD Reading first. Exiting."
            )
            raise exc.JobRequiredError(
                "OD Reading should be running. Run OD Reading first. Exiting.")

        # TODO: write tests for this
        def yield_from_mqtt() -> Generator[dict, None, None]:
            while True:
                msg = pubsub.subscribe(
                    f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched",
                    allow_retained=False,
                )
                if msg is None:
                    continue

                yield json.loads(msg.payload)

        signal = yield_from_mqtt()
        readings = defaultdict(list)

        for count, batched_reading in enumerate(signal, start=1):
            for (sensor, reading) in batched_reading["od_raw"].items():
                readings[sensor].append(reading["voltage"])

            pubsub.publish(
                f"pioreactor/{unit}/{experiment}/{action_name}/percent_progress",
                count // n_samples * 100,
            )
            logger.debug(f"Progress: {count/n_samples:.0%}")
            if count == n_samples:
                break

        variances = {}
        means = {}
        autocorrelations = {}  # lag 1

        for sensor, od_reading_series in readings.items():
            variances[sensor] = variance(
                residuals_of_simple_linear_regression(list(
                    range(n_samples)), od_reading_series))  # see issue #206
            means[sensor] = mean(od_reading_series)
            autocorrelations[sensor] = correlation(od_reading_series[:-1],
                                                   od_reading_series[1:])

        with local_persistant_storage("od_normalization_mean") as cache:
            cache[experiment] = json.dumps(means)

        with local_persistant_storage("od_normalization_variance") as cache:
            cache[experiment] = json.dumps(variances)

        logger.debug(f"measured mean: {means}")
        logger.debug(f"measured variances: {variances}")
        logger.debug(f"measured autocorrelations: {autocorrelations}")
        logger.debug("OD normalization finished.")

        if config.getboolean(
                "data_sharing_with_pioreactor",
                "send_od_statistics_to_Pioreactor",
                fallback=False,
        ):

            add_on = {
                "ir_intensity": config["od_config"]["ir_intensity"],
            }

            pubsub.publish_to_pioreactor_cloud(
                "od_normalization_variance",
                json={
                    **variances,
                    **add_on,
                },  # TODO: this syntax changed in a recent python version...
            )
            pubsub.publish_to_pioreactor_cloud(
                "od_normalization_mean",
                json={
                    **means,
                    **add_on
                },
            )

        return means, variances
Example #12
0
def get_plugins() -> dict[str, Plugin]:
    """
    This function is really time consuming...
    """

    # get entry point plugins
    # Users can use Python's entry point system to create rich plugins, see
    # example here: https://github.com/Pioreactor/pioreactor-air-bubbler
    eps = entry_points()
    pioreactor_plugins: tuple = eps.get("pioreactor.plugins", tuple())
    plugins: dict[str, Plugin] = {}
    for plugin in pioreactor_plugins:
        try:
            md = metadata(plugin.name)
            plugins[md["Name"]] = Plugin(
                plugin.load(),
                md["Summary"],
                md["Version"],
                md["Home-page"],
                md["Author"],
                "entry_points",
            )
        except Exception as e:
            print(f"{plugin.name} plugin load error: {e}")

    # get file-based plugins.
    # Users can put .py files into the MODULE_DIR folder below.
    # The below code will load it into Python, and treat it like any other plugin.
    # The authors can add metadata to their file with the following variables at the
    # highest level in the file:
    # __plugin_name__
    # __plugin_author__
    # __plugin_summary__
    # __plugin_version__
    # __plugin_homepage__
    BLANK = "Unknown"

    # The directory containing your modules needs to be on the search path.
    if is_testing_env():
        MODULE_DIR = "plugins_dev"
    else:
        MODULE_DIR = "/home/pi/.pioreactor/plugins"

    sys.path.append(MODULE_DIR)

    # Get the stem names (file name, without directory and '.py') of any
    # python files in your directory, load each module by name and run
    # the required function.
    py_files = glob.glob(os.path.join(MODULE_DIR, "*.py"))

    for py_file in py_files:
        module_name = pathlib.Path(py_file).stem
        module = importlib.import_module(module_name)
        plugins[getattr(module, "__plugin_name__", module_name)] = Plugin(
            module,
            getattr(module, "__plugin_summary__", BLANK),
            getattr(module, "__plugin_version__", BLANK),
            getattr(module, "__plugin_homepage__", BLANK),
            getattr(module, "__plugin_author__", BLANK),
            "plugins_folder",
        )

    return plugins
Example #13
0
    def power_down(*args) -> None:
        pass


class MockTMP1075:
    def __init__(*args) -> None:
        pass

    def get_temperature(self) -> float:
        import time, math, random

        return 3 * math.sin(
            0.1 * time.time() / 60) + 25 + 0.2 * random.random()


if am_I_active_worker() or is_testing_env():

    from rpi_hardware_pwm import HardwarePWM

    class MockHardwarePWM(HardwarePWM):
        def __init__(self, pwm_channel: int, hz: float) -> None:
            self.pwm_channel = pwm_channel
            self._hz = hz
            self.pwm_dir = ""

        def is_overlay_loaded(self) -> bool:
            return True

        def is_export_writable(self) -> bool:
            return True
Example #14
0
def led_intensity(
    channels: LedChannel | list[LedChannel],
    intensities: float | list[float],
    unit: str,
    experiment: str,
    verbose: bool = True,
    source_of_event: Optional[str] = None,
    pubsub_client: Optional[Client] = None,
) -> bool:
    """

    Parameters
    ------------
    channel: an LED channel or list
    intensity: float or list
        a value between 0 and 100 to set the LED channel to.
    verbose: bool
        if True, log the change, and send event to led_event table & mqtt. This is FALSE
        in od_reading job, so as to not create spam.
    pubsub_client:
        provide a MQTT paho client to use for publishing.

    Returns
    --------
    bool representing if the all LED channels intensity were successfully changed


    State is also updated in

    pioreactor/<unit>/<experiment>/led/<channel>/intensity   <intensity>

    and

    pioreactor/<unit>/<experiment>/leds/intensity    {'A': intensityA, 'B': intensityB, ...}

    """
    logger = create_logger("led_intensity", experiment=experiment, unit=unit)
    updated_successfully = True
    if not is_testing_env():
        from DAC43608 import DAC43608
    else:
        logger.debug("DAC43608 not available; using MockDAC43608")
        from pioreactor.utils.mock import MockDAC43608 as DAC43608  # type: ignore

    if pubsub_client is None:
        pubsub_client = create_client()

    channels, intensities = _list(channels), _list(intensities)

    if len(channels) != len(intensities):
        raise ValueError("channels must be the same length as intensities")

    # any locked channels?
    for channel in channels:
        if is_led_channel_locked(channel):
            updated_successfully = False
            logger.warning(
                f"Unable to update channel {channel} due to a lock on it. Please try again."
            )

    # remove locked channels:
    try:
        channels, intensities = zip(  # type: ignore
            *[
                (c, i)
                for c, i in zip(channels, intensities)
                if not is_led_channel_locked(c)
            ]
        )
    except ValueError:
        # if the only channel being updated is locked, the resulting error is a ValueError: not enough values to unpack (expected 2, got 0)
        return updated_successfully

    for channel, intensity in zip(channels, intensities):
        try:
            assert (
                0.0 <= intensity <= 100.0
            ), "intensity should be between 0 and 100, inclusive"
            assert (
                channel in ALL_LED_CHANNELS
            ), f"saw incorrect channel {channel}, not in {ALL_LED_CHANNELS}"
            intensity = float(intensity)

            dac = DAC43608()
            dac.power_up(getattr(dac, channel))
            dac.set_intensity_to(getattr(dac, channel), intensity / 100.0)

            if intensity == 0:
                # setting to 0 doesn't fully remove the current, there is some residual current. We turn off
                # the channel to guarantee no output.
                dac.power_down(getattr(dac, channel))

            pubsub_client.publish(
                f"pioreactor/{unit}/{experiment}/led/{channel}/intensity",
                intensity,
                qos=QOS.AT_MOST_ONCE,
                retain=True,
            )

        except ValueError as e:
            logger.debug(e, exc_info=True)
            logger.error(
                "Unable to find I²C for LED driver. Is the Pioreactor HAT attached to the Raspberry Pi? Is I²C enabled on the Raspberry Pi?"
            )
            updated_successfully = False
            return updated_successfully

    new_state, old_state = _update_current_state(channels, intensities)

    pubsub_client.publish(
        f"pioreactor/{unit}/{experiment}/leds/intensity",
        dumps(new_state),
        qos=QOS.AT_MOST_ONCE,
        retain=True,
    )

    if verbose:
        for channel, intensity in zip(channels, intensities):
            event = {
                "channel": channel,
                "intensity": intensity,
                "source_of_event": source_of_event,
                "timestamp": current_utc_time(),
            }

            pubsub_client.publish(
                f"pioreactor/{unit}/{experiment}/led_events",
                dumps(event),
                qos=QOS.AT_MOST_ONCE,
                retain=False,
            )

            logger.info(
                f"Updated LED {channel} from {old_state[channel]:0.3g}% to {new_state[channel]:0.3g}%."
            )

    return updated_successfully
Example #15
0
def get_config():
    """
    This function initializes the configuration logic for the Pioreactor cluster.

    Locally, `config.ini` configurations can be overwritten by `unit_config.ini` (hence the very
    specific order we use in `config.read`)

    We also insert some **dynamic** config sections: PWM_reverse and leds_reverse. Ex: `PWM` is
    designed for users to edit:

        [PWM]
        0=stirring
        1=heating
        2=alt_media
        3=waste
        4=media


    and `PWM_reverse` is easier for computers to access (Note this is not in the config.ini file, but only in memory)

        [PWM_reverse]
        stirring=0
        heating=1
        alt_media=2
        waste=3
        media=4


    """
    config = ConfigParserMod()

    if is_testing_env():
        global_config_path = "./config.dev.ini"
        local_config_path = ""
    else:
        global_config_path = "/home/pi/.pioreactor/config.ini"
        local_config_path = "/home/pi/.pioreactor/unit_config.ini"
        if not os.path.isfile(global_config_path):
            raise FileNotFoundError(
                "/home/pi/.pioreactor/config.ini is missing from this Pioreactor. Has it completed initializing? Does it need to connect to a leader?"
            )

    config_files = [global_config_path, local_config_path]

    try:
        config.read(config_files)
    except configparser.MissingSectionHeaderError as e:
        # this can happen in the following situation:
        # on the leader (as worker) Rpi, the unit_config.ini is malformed. When leader_config.ini is fixed in the UI
        # pios sync tries to run, it uses a malformed unit_config.ini and hence the leader_config.ini can't be deployed
        # to replace the malformed unit_config.ini.
        print(
            "Bad config state. Check /home/pi/.pioreactor/unit_config.ini on leader for malformed configuration?"
        )
        raise e
    except configparser.DuplicateSectionError as e:
        print(e)
        raise e

    # some helpful additions - see docs above
    if "leds" in config:
        config["leds_reverse"] = config.invert_section("leds")
    if "PWM" in config:
        config["PWM_reverse"] = config.invert_section("PWM")

    return config
Example #16
0
def test_all_positive_correlations_between_pds_and_leds(
    logger: Logger, unit: str, experiment: str
) -> None:
    """
    This tests that there is a positive correlation between the IR LED channel, and the photodiodes
    as defined in the config.ini.
    """
    from pprint import pformat

    INTENSITIES = list(
        range(10, 50, 5)
    )  # better to err on the side of MORE samples than less - it's only a few extra seconds...
    current_experiment_name = get_latest_experiment_name()
    results: dict[tuple[LedChannel, PdChannel], float] = {}

    adc_reader = ADCReader(
        channels=ALL_PD_CHANNELS,
        dynamic_gain=False,
        initial_gain=16,  # I think a small gain is okay, since we only varying the lower-end of LED intensity
        fake_data=is_testing_env(),
    ).setup_adc()

    # set all to 0, but use original experiment name, since we indeed are setting them to 0.
    led_intensity(
        ALL_LED_CHANNELS,
        intensities=[0] * len(ALL_LED_CHANNELS),
        unit=unit,
        source_of_event="self_test",
        experiment=current_experiment_name,
        verbose=False,
    )

    for led_channel in ALL_LED_CHANNELS:
        varying_intensity_results: dict[PdChannel, list[float]] = {
            pd_channel: [] for pd_channel in ALL_PD_CHANNELS
        }
        for intensity in INTENSITIES:
            # turn on the LED to set intensity
            led_intensity(
                led_channel,
                intensities=intensity,
                unit=unit,
                experiment=current_experiment_name,
                verbose=False,
                source_of_event="self_test",
            )

            # record from ADC, we'll average them
            readings1 = adc_reader.take_reading()
            readings2 = adc_reader.take_reading()

            # Add to accumulating list
            for pd_channel in ALL_PD_CHANNELS:
                varying_intensity_results[pd_channel].append(
                    0.5 * (readings1[pd_channel] + readings2[pd_channel])
                )

        # compute the linear correlation between the intensities and observed PD measurements
        for pd_channel in ALL_PD_CHANNELS:
            results[(led_channel, pd_channel)] = round(
                correlation(INTENSITIES, varying_intensity_results[pd_channel]), 2
            )

        # set back to 0
        led_intensity(
            led_channel,
            intensities=0,
            unit=unit,
            experiment=current_experiment_name,
            verbose=False,
            source_of_event="self_test",
        )

    logger.debug(f"Correlations between LEDs and PD:\n{pformat(results)}")
    detected_relationships = []
    for (led_channel, pd_channel), measured_correlation in results.items():
        if measured_correlation > 0.925:
            detected_relationships.append(
                (
                    config["leds"].get(led_channel, fallback=led_channel),
                    config["od_config.photodiode_channel"].get(
                        pd_channel, fallback=pd_channel
                    ),
                )
            )

    publish(
        f"pioreactor/{unit}/{experiment}/self_test/correlations_between_pds_and_leds",
        dumps(detected_relationships),
        retain=True,
    )

    # we require that the IR photodiodes defined in the config have a
    # correlation with the IR led
    pd_channels_to_test: list[PdChannel] = []
    for (channel, angle_or_ref) in config["od_config.photodiode_channel"].items():
        if angle_or_ref != "":
            channel = cast(PdChannel, channel)
            pd_channels_to_test.append(channel)

    ir_led_channel = config["leds_reverse"][IR_keyword]

    for ir_pd_channel in pd_channels_to_test:
        assert (
            results[(ir_led_channel, ir_pd_channel)] > 0.925
        ), f"missing {ir_led_channel} ⇝ {ir_pd_channel}"
Example #17
0
def od_blank(
    od_angle_channel1,
    od_angle_channel2,
    n_samples: int = 30,
):
    """
    Compute the sample average of the photodiodes attached.

    Note that because of the sensitivity of the growth rate (and normalized OD) to the starting values,
    we need a very accurate estimate of these statistics.

    """
    from statistics import mean, variance

    action_name = "od_blank"
    logger = create_logger(action_name)
    unit = get_unit_name()
    experiment = get_latest_experiment_name()
    testing_experiment = get_latest_testing_experiment_name()
    logger.info(
        "Starting reading of blank OD. This will take about a few minutes.")

    with publish_ready_to_disconnected_state(unit, experiment, action_name):

        # running this will mess with OD Reading - best to just not let it happen.
        if (is_pio_job_running("od_reading")
                # but if test mode, ignore
                and not is_testing_env()):
            logger.error(
                "od_reading should not be running. Stop od_reading first. Exiting."
            )
            return

        # turn on stirring if not already on
        if not is_pio_job_running("stirring"):
            # start stirring
            st = start_stirring(
                target_rpm=config.getint("stirring", "target_rpm"),
                unit=unit,
                experiment=testing_experiment,
            )
            st.block_until_rpm_is_close_to_target()
        else:
            # TODO: it could be paused, we should make sure it's running
            ...

        sampling_rate = 1 / config.getfloat("od_config", "samples_per_second")

        # start od_reading
        start_od_reading(
            od_angle_channel1,
            od_angle_channel2,
            sampling_rate=sampling_rate,
            unit=unit,
            experiment=testing_experiment,
            fake_data=is_testing_env(),
        )

        def yield_from_mqtt():
            while True:
                msg = pubsub.subscribe(
                    f"pioreactor/{unit}/{testing_experiment}/od_reading/od_raw_batched"
                )
                yield json.loads(msg.payload)

        signal = yield_from_mqtt()
        readings = defaultdict(list)

        for count, batched_reading in enumerate(signal, start=1):
            for (channel, reading) in batched_reading["od_raw"].items():
                readings[channel].append(reading["voltage"])

            pubsub.publish(
                f"pioreactor/{unit}/{experiment}/{action_name}/percent_progress",
                count // n_samples * 100,
            )
            logger.debug(f"Progress: {count/n_samples:.0%}")
            if count == n_samples:
                break

        means = {}
        variances = {}
        autocorrelations = {}  # lag 1

        for channel, od_reading_series in readings.items():
            # measure the mean and publish. The mean will be used to normalize the readings in downstream jobs
            means[channel] = mean(od_reading_series)
            variances[channel] = variance(od_reading_series)
            autocorrelations[channel] = correlation(od_reading_series[:-1],
                                                    od_reading_series[1:])

            # warn users that a blank is 0 - maybe this should be an error instead? TODO: link this to a docs page.
            if means[channel] == 0.0:
                logger.warning(
                    f"OD reading for PD Channel {channel} is 0.0 - that shouldn't be. Is there a loose connection, or an extra channel in the configuration's [od_config.photodiode_channel] section?"
                )

            pubsub.publish(
                f"pioreactor/{unit}/{experiment}/od_blank/{channel}",
                json.dumps({
                    "timestamp": current_utc_time(),
                    "od_reading_v": means[channel]
                }),
            )

        # store locally as the source of truth.
        with local_persistant_storage(action_name) as cache:
            cache[experiment] = json.dumps(means)

        # publish to UI and database
        pubsub.publish(
            f"pioreactor/{unit}/{experiment}/{action_name}/mean",
            json.dumps(means),
            qos=pubsub.QOS.AT_LEAST_ONCE,
            retain=True,
        )

        if config.getboolean(
                "data_sharing_with_pioreactor",
                "send_od_statistics_to_Pioreactor",
                fallback=False,
        ):
            to_share = {"mean": means, "variance": variances}
            to_share["ir_intensity"] = config["od_config"]["ir_intensity"]
            to_share["od_angle_channel1"] = od_angle_channel1
            to_share["od_angle_channel2"] = od_angle_channel2
            pubsub.publish_to_pioreactor_cloud("od_blank_mean", json=to_share)

        logger.debug(f"measured mean: {means}")
        logger.debug(f"measured variances: {variances}")
        logger.debug(f"measured autocorrelations: {autocorrelations}")
        logger.debug("OD normalization finished.")

        return means
Example #18
0
# -*- coding: utf-8 -*-
from __future__ import annotations

from pioreactor.whoami import is_testing_env

if is_testing_env():
    from pioreactor.utils.mock import MockI2C as I2C
else:
    from busio import I2C  # type: ignore

from adafruit_bus_device.i2c_device import I2CDevice  # type: ignore

from pioreactor.types import GpioPin, PwmChannel
from pioreactor.version import hardware_version_info

# All GPIO pins below are BCM numbered

PWM_TO_PIN: dict[PwmChannel, GpioPin] = {
    # map between PCB labels and GPIO pins
    "1": 6 if hardware_version_info == (0, 1) else 17,
    "2": 13,  # hardware PWM1 available
    "3": 16,
    "4": 12,  # hardware PWM0 available
    "5": 18,  # dedicated to heater
}

# led and button GPIO pins
PCB_LED_PIN: GpioPin = 23
PCB_BUTTON_PIN: GpioPin = 24

# hall sensor
Example #19
0
    def __init__(
        self,
        automation_name: str,
        unit: str,
        experiment: str,
        eval_and_publish_immediately: bool = True,
        **kwargs,
    ) -> None:
        super().__init__(job_name="temperature_control",
                         unit=unit,
                         experiment=experiment)

        if not is_HAT_present():
            self.logger.error("Pioreactor HAT must be present.")
            self.set_state(self.DISCONNECTED)
            raise exc.HardwareNotFoundError("Pioreactor HAT must be present.")

        if not is_heating_pcb_present():
            self.logger.error("Heating PCB must be attached to Pioreactor HAT")
            self.set_state(self.DISCONNECTED)
            raise exc.HardwareNotFoundError(
                "Heating PCB must be attached to Pioreactor HAT")

        if is_testing_env():
            self.logger.debug("TMP1075 not available; using MockTMP1075")
            from pioreactor.utils.mock import MockTMP1075 as TMP1075
        else:
            from TMP1075 import TMP1075  # type: ignore

        self.pwm = self.setup_pwm()
        self.update_heater(0)

        self.tmp_driver = TMP1075()
        self.read_external_temperature_timer = RepeatedTimer(
            45, self.read_external_temperature, run_immediately=False)
        self.read_external_temperature_timer.start()

        self.publish_temperature_timer = RepeatedTimer(
            4 * 60,
            self.evaluate_and_publish_temperature,
            run_immediately=eval_and_publish_immediately,
            run_after=60,
        )
        self.publish_temperature_timer.start()

        self.automation = AutomationDict(automation_name=automation_name,
                                         **kwargs)

        try:
            automation_class = self.automations[
                self.automation["automation_name"]]
        except KeyError:
            raise KeyError(
                f"Unable to find automation {self.automation['automation_name']}. Available automations are {list(self.automations.keys())}"
            )

        self.logger.info(f"Starting {self.automation}.")
        try:
            self.automation_job = automation_class(unit=self.unit,
                                                   experiment=self.experiment,
                                                   parent=self,
                                                   **kwargs)
        except Exception as e:
            self.logger.error(e)
            self.logger.debug(e, exc_info=True)
            self.set_state(self.DISCONNECTED)
            raise e
        self.automation_name = self.automation["automation_name"]

        self.temperature = {
            "temperature": self.read_external_temperature(),
            "timestamp": current_utc_time(),
        }