def click_od_normalization(od_angle_channel): """ Compute statistics about the OD timeseries """ unit = get_unit_name() experiment = get_latest_experiment_name() od_normalization(od_angle_channel, unit, experiment)
def test_error_in_subscribe_and_callback_is_logged() -> None: class TestJob(BackgroundJob): def __init__(self, *args, **kwargs) -> None: super(TestJob, self).__init__(*args, **kwargs) self.start_passive_listeners() def start_passive_listeners(self) -> None: self.subscribe_and_callback(self.callback, "pioreactor/testing/subscription") def callback(self, msg: MQTTMessage) -> None: print(1 / 0) error_logs = [] def collect_error_logs(msg: MQTTMessage) -> None: if "ERROR" in msg.payload.decode(): error_logs.append(msg) subscribe_and_callback( collect_error_logs, f"pioreactor/{get_unit_name()}/{get_latest_experiment_name()}/logs/app", ) with TestJob(job_name="job", unit=get_unit_name(), experiment=get_latest_experiment_name()): publish("pioreactor/testing/subscription", "test") pause() pause() assert len(error_logs) > 0 assert "division by zero" in error_logs[0].payload.decode()
def test_custom_class_will_register_and_run() -> None: class NaiveTurbidostat(DosingAutomation): automation_name = "naive_turbidostat" published_settings = { "target_od": { "datatype": "float", "settable": True, "unit": "AU" }, "duration": { "datatype": "float", "settable": True, "unit": "min" }, } def __init__(self, target_od: float, **kwargs: Any) -> None: super(NaiveTurbidostat, self).__init__(**kwargs) self.target_od = target_od def execute(self) -> None: if self.latest_od > self.target_od: self.execute_io_action(media_ml=1.0, waste_ml=1.0) algo = DosingController( "naive_turbidostat", target_od=2.0, duration=10, unit=get_unit_name(), experiment=get_latest_experiment_name(), ) algo.set_state(algo.DISCONNECTED)
def run(automation=None, duration=None, sensor="135/0", skip_first_run=False, **kwargs): unit = get_unit_name() experiment = get_latest_experiment_name() try: kwargs["duration"] = duration kwargs["unit"] = unit kwargs["experiment"] = experiment kwargs["sensor"] = sensor kwargs["skip_first_run"] = skip_first_run controller = DosingController(automation, **kwargs) # noqa: F841 while True: signal.pause() except Exception as e: logging.getLogger("dosing_automation").debug(f"{str(e)}", exc_info=True) logging.getLogger("dosing_automation").error(f"{str(e)}") raise e
def test_end_to_end(self) -> None: config["od_config.photodiode_channel"]["1"] = "90" config["od_config.photodiode_channel"]["2"] = "135" unit = get_unit_name() experiment = get_latest_experiment_name() interval = 0.1 config["od_config"]["samples_per_second"] = "0.2" od = start_od_reading( "135", "90", sampling_rate=interval, unit=unit, experiment=experiment, fake_data=True, ) st = start_stirring(target_rpm=500, unit=unit, experiment=experiment) calc = GrowthRateCalculator(unit=unit, experiment=experiment) time.sleep(35) assert calc.ekf.state_[-2] != 1.0 calc.set_state(calc.DISCONNECTED) st.set_state(st.DISCONNECTED) od.set_state(od.DISCONNECTED)
def test_jobs_connecting_and_disconnecting_will_still_log_to_mqtt() -> None: # see note in base.py about create_logger unit: str = get_unit_name() exp: str = get_latest_experiment_name() results = [] def cb(msg: MQTTMessage) -> None: if "WARNING" in msg.payload.decode(): results.append([msg.payload]) subscribe_and_callback(cb, f"pioreactor/{unit}/{exp}/logs/app") bj = BackgroundJob(job_name="job", unit=unit, experiment=exp) bj.logger.warning("test1") # disonnect, which should clear logger handlers (but may not...) bj.set_state(bj.DISCONNECTED) bj = BackgroundJob(job_name="job", unit=unit, experiment=exp) bj.logger.warning("test2") pause() pause() assert len(results) == 2 bj.set_state(bj.DISCONNECTED)
def test_bad_key_in_published_settings() -> None: class TestJob(BackgroundJob): published_settings = { "some_key": { "datatype": "float", "units": "%", # type: ignore "settable": True, }, # units is wrong, should be unit. } def __init__(self, *args, **kwargs) -> None: super(TestJob, self).__init__(*args, **kwargs) warning_logs = [] def collect_warning_logs(msg: MQTTMessage) -> None: if "WARNING" in msg.payload.decode(): warning_logs.append(msg) subscribe_and_callback( collect_warning_logs, f"pioreactor/{get_unit_name()}/{get_latest_experiment_name()}/logs/app", ) with TestJob(job_name="job", unit=get_unit_name(), experiment=get_latest_experiment_name()): pause() pause() assert len(warning_logs) > 0 assert "Found extra property" in warning_logs[0].payload.decode()
def test_editing_readonly_attr_via_mqtt() -> None: class TestJob(BackgroundJob): published_settings = { "readonly_attr": { "datatype": "float", "settable": False, }, } warning_logs = [] def collect_logs(msg: MQTTMessage) -> None: if "readonly" in msg.payload.decode(): warning_logs.append(msg) subscribe_and_callback( collect_logs, f"pioreactor/{get_unit_name()}/{get_latest_experiment_name()}/logs/app", ) with TestJob(job_name="job", unit=get_unit_name(), experiment=get_latest_experiment_name()): publish( f"pioreactor/{get_unit_name()}/{get_latest_experiment_name()}/job/readonly_attr/set", 1.0, ) pause() assert len(warning_logs) > 0
def test_single_observation(self) -> None: unit = get_unit_name() experiment = get_latest_experiment_name() with local_persistant_storage("od_normalization_mean") as cache: cache[experiment] = json.dumps({1: 1}) with local_persistant_storage("od_normalization_variance") as cache: cache[experiment] = json.dumps({1: 1}) publish( f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched", create_od_raw_batched_json( ["1"], [1.153], ["90"], timestamp="2010-01-01 12:00:30" ), retain=True, ) calc = GrowthRateCalculator(unit=unit, experiment=experiment) publish( f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched", create_od_raw_batched_json( ["1"], [1.155], ["90"], timestamp="2010-01-01 12:00:35" ), ) pause() assert True calc.set_state(calc.DISCONNECTED)
def click_od_normalization(n_samples): """ Compute statistics about the OD time series """ unit = get_unit_name() experiment = get_latest_experiment_name() click.echo(od_normalization(unit, experiment, n_samples=n_samples))
def test_od_blank_being_non_zero(self) -> None: unit = get_unit_name() experiment = get_latest_experiment_name() with local_persistant_storage("od_blank") as cache: cache[experiment] = json.dumps({"1": 0.25, "2": 0.4}) with local_persistant_storage("od_normalization_mean") as cache: cache[experiment] = json.dumps({"1": 0.5, "2": 0.8}) with local_persistant_storage("od_normalization_variance") as cache: cache[experiment] = json.dumps({"1": 1e-6, "2": 1e-4}) calc = GrowthRateCalculator(unit=unit, experiment=experiment) pause() pause() publish( f"pioreactor/{unit}/{experiment}/od_reading/od_raw_batched", create_od_raw_batched_json( ["1", "2"], [0.50, 0.80], ["90", "135"], timestamp="2010-01-01 12:02:00" ), retain=True, ) pause() pause() assert calc.od_normalization_factors == {"2": 0.8, "1": 0.5} assert calc.od_blank == {"2": 0.4, "1": 0.25} results = calc.scale_raw_observations({"2": 1.0, "1": 0.6}) print(results) assert abs(results["2"] - 1.5) < 0.00001 assert abs(results["1"] - 1.4) < 0.00001 calc.set_state(calc.DISCONNECTED)
def click_monitor(): """ Monitor and report metadata on the unit. """ heidi = Monitor(unit=get_unit_name(), experiment=UNIVERSAL_EXPERIMENT) # noqa: F841 signal.pause()
def od_reading( od_angle_channel, sampling_rate=1 / float(config["od_config.od_sampling"]["samples_per_second"]), fake_data=False, ): unit = get_unit_name() experiment = get_latest_experiment_name() channel_label_map = {} for input_ in od_angle_channel: angle, channel = input_.split(",") # We split input of the form ["135,0", "135,1", "90,3"] into the form # "135/0", "135/1", "90/3" angle_label = f"{angle}/{channel}" channel_label_map[int(channel)] = angle_label ODReader( channel_label_map, sampling_rate=sampling_rate, unit=unit, experiment=experiment, fake_data=fake_data, ) signal.pause()
def start_temperature_control(automation_name: str, **kwargs) -> TemperatureController: return TemperatureController( automation_name=automation_name, unit=get_unit_name(), experiment=get_latest_experiment_name(), **kwargs, )
def click_watchdog(): """ Start the watchdog on the leader """ heidi = WatchDog(unit=get_unit_name(), experiment=UNIVERSAL_EXPERIMENT) # noqa: F841 signal.pause()
def click_led_intensity(channel, intensity, source_of_event): """ Modify the intensity of an LED """ unit = get_unit_name() experiment = get_latest_experiment_name() return led_intensity(channel, intensity, source_of_event, unit, experiment)
def update(ui: bool, app: bool, dev: bool) -> None: import subprocess from json import loads from pioreactor.mureq import get logger = create_logger( "update", unit=get_unit_name(), experiment=UNIVERSAL_EXPERIMENT ) if (not app) and (not ui): click.echo("Nothing to do. Specify either --app or --ui.") if app: if not dev: latest_release_metadata = loads( get( "https://api.github.com/repos/pioreactor/pioreactor/releases/latest" ).body ) latest_release_version = latest_release_metadata["name"] url_to_get_whl = f"https://github.com/Pioreactor/pioreactor/releases/download/{latest_release_version}/pioreactor-{latest_release_version}-py3-none-any.whl" command = f'sudo pip3 install "pioreactor @ {url_to_get_whl}"' else: latest_release_version = "master" command = "sudo pip3 install -U --force-reinstall https://github.com/pioreactor/pioreactor/archive/master.zip" p = subprocess.run( command, shell=True, universal_newlines=True, stdout=subprocess.DEVNULL, stderr=subprocess.PIPE, ) if p.returncode == 0: logger.info(f"Updated Pioreactor to version {latest_release_version}.") else: logger.error(p.stderr) if ui and am_I_leader(): cd = "cd ~/pioreactorui/backend" gitp = "git pull origin master" npm_install = "npm install" setup = "pm2 restart ui" unedit_edited_files = "git checkout ." # TODO: why do I do this. Can I be more specific than `.`? This blocks edits to the contrib folder from sticking around. command = " && ".join([cd, gitp, setup, npm_install, unedit_edited_files]) p = subprocess.run( command, shell=True, universal_newlines=True, stdout=subprocess.DEVNULL, stderr=subprocess.PIPE, ) if p.returncode == 0: logger.info("Updated PioreactorUI to latest version.") else: logger.error(p.stderr)
def kill(job: str, units: tuple[str, ...], all_jobs: bool, y: bool) -> None: """ Send a SIGTERM signal to JOB. JOB can be any Pioreactor job name, like "stirring". Example: > pios kill stirring multiple jobs accepted: > pios kill stirring dosing_control Kill all worker jobs (i.e. this excludes leader jobs like watchdog). Ignores `job` argument. > pios kill --all """ from sh import ssh # type: ignore if not y: confirm = input( f"Confirm killing {str(job) if (not all_jobs) else 'all jobs'} on {units}? Y/n: " ).strip() if confirm != "Y": return command = f"pio kill {' '.join(job)}" command += "--all-jobs" if all_jobs else "" logger = create_logger("CLI", unit=get_unit_name(), experiment=get_latest_experiment_name()) def _thread_function(unit: str): logger.debug(f"Executing `{command}` on {unit}.") try: ssh(unit, command) if all_jobs: # tech debt ssh( unit, "pio run led_intensity --intensity 0 --channel A --channel B --channel C --channel D --no-log", ) return True except Exception as e: logger.debug(e, exc_info=True) logger.error(f"Unable to connect to unit {unit}.") return False units = universal_identifier_to_all_active_workers(units) with ThreadPoolExecutor(max_workers=len(units)) as executor: results = executor.map(_thread_function, units) if not all(results): sys.exit(1)
def run_tests(execute_pump: Callable, hz: float, dc: float, min_duration: float, max_duration: float) -> tuple[list[float], list[float]]: click.clear() click.echo() click.echo("Beginning tests.") results = [] durations_to_test = [ min_duration, min_duration * 1.1, min_duration * 1.2, min_duration * 1.3, ] + [ max_duration * 0.85, max_duration * 0.90, max_duration * 0.95, max_duration ] for i, duration in enumerate(durations_to_test): if i > 0: click.echo("Remove the water from the measuring container.") click.echo( "We will run the pump for a set amount of time, and you will measure how much liquid is expelled." ) click.echo( "You can either use a container on top of an accurate weighing scale, or a graduated cylinder (recall that 1 g = 1 ml water)." ) click.echo( "Place the outflow tube into the container (or graduated cylinder)." ) while not click.confirm( click.style(f"Ready to test {duration:.2f}s?", fg="green")): pass execute_pump( duration=duration, source_of_event="pump_calibration", unit=get_unit_name(), experiment=get_latest_testing_experiment_name(), calibration={ "duration_": 1.0, "hz": hz, "dc": dc, "bias_": 0 }, ) r = click.prompt( click.style("Enter amount of water expelled", fg="green"), type=click.FLOAT, confirmation_prompt=click.style("Repeat for confirmation", fg="green"), ) results.append(r) click.clear() click.echo() return durations_to_test, results
def click_remove_waste(ml, duration, duty_cycle, source_of_event): """ Remove waste/media from unit """ unit = get_unit_name() experiment = get_latest_experiment_name() signal.signal(signal.SIGTERM, clean_up_gpio) return remove_waste(ml, duration, duty_cycle, source_of_event, unit, experiment)
def click_monitor() -> None: """ Monitor and report metadata on the unit. """ import os os.nice(1) job = Monitor(unit=get_unit_name(), experiment=UNIVERSAL_EXPERIMENT) job.block_until_disconnected()
def click_watchdog(): """ Start the watchdog on the leader """ import os os.nice(1) wd = WatchDog(unit=get_unit_name(), experiment=UNIVERSAL_EXPERIMENT) wd.block_until_disconnected()
def click_add_media(ml, duration, duty_cycle, source_of_event): """ Add media to unit """ unit = get_unit_name() experiment = get_latest_experiment_name() signal.signal(signal.SIGTERM, clean_up_gpio) return add_media(ml, duration, duty_cycle, source_of_event, unit, experiment)
def _thread_function(unit: str) -> bool: click.echo(f"Executing `{core_command}` on {unit}.") try: ssh(unit, command) return True except Exception as e: logger = create_logger("CLI", unit=get_unit_name(), experiment=get_latest_experiment_name()) logger.debug(e, exc_info=True) logger.error(f"Unable to connect to unit {unit}.") return False
def test_state_transition_callbacks() -> None: class TestJob(BackgroundJob): called_on_init = False called_on_ready = False called_on_sleeping = False called_on_ready_to_sleeping = False called_on_sleeping_to_ready = False called_on_init_to_ready = False def __init__(self, unit: str, experiment: str) -> None: super(TestJob, self).__init__(job_name="testjob", unit=unit, experiment=experiment) def on_init(self) -> None: self.called_on_init = True def on_ready(self) -> None: self.called_on_ready = True def on_sleeping(self) -> None: self.called_on_sleeping = True def on_ready_to_sleeping(self) -> None: self.called_on_ready_to_sleeping = True def on_sleeping_to_ready(self) -> None: self.called_on_sleeping_to_ready = True def on_init_to_ready(self) -> None: self.called_on_init_to_ready = True unit, exp = get_unit_name(), get_latest_experiment_name() with TestJob(unit, exp) as tj: assert tj.called_on_init assert tj.called_on_init_to_ready assert tj.called_on_ready publish(f"pioreactor/{unit}/{exp}/{tj.job_name}/$state/set", tj.SLEEPING) pause() pause() pause() pause() assert tj.called_on_ready_to_sleeping assert tj.called_on_sleeping publish(f"pioreactor/{unit}/{exp}/{tj.job_name}/$state/set", tj.READY) pause() pause() pause() pause() assert tj.called_on_sleeping_to_ready
def click_log_aggregating(output): """ (leader only) Aggregate logs for the UI """ logs = LogAggregation( # noqa: F841 ["pioreactor/+/+/app_logs_for_ui"], output, experiment=UNIVERSAL_EXPERIMENT, unit=get_unit_name(), ) while True: signal.pause()
def backup_database(output): """ This action will create a backup of the SQLite3 database into specified output. It then will try to scp the backup to any available worker Pioreactors as a futher backup. A cronjob is set up as well to run this action every 12 hours. """ import sqlite3 from sh import scp, ErrorReturnCode def progress(status, remaining, total): logger.debug(f"Copied {total-remaining} of {total} pages.") logger.debug(f"Starting backup of database to {output}") con = sqlite3.connect(config.get("storage", "database")) bck = sqlite3.connect(output) with bck: con.backup(bck, pages=-1, progress=progress) bck.close() con.close() logger.debug( f"Completed backup of database to {output}. Attempting distributed backup..." ) n_backups = 2 backups_complete = 0 available_workers = get_active_workers_in_inventory() while (backups_complete < n_backups) and (len(available_workers) > 0): backup_unit = available_workers.pop() if backup_unit == get_unit_name(): continue try: scp(output, f"{backup_unit}:{output}") except ErrorReturnCode: logger.debug( f"Unable to backup database to {backup_unit}. Is it online?", exc_info=True, ) logger.warning(f"Unable to backup database to {backup_unit}.") else: logger.debug(f"Backed up database to {backup_unit}:{output}.") backups_complete += 1 return
def test_local_cache_is_updated() -> None: channel: LedChannel = "B" unit = get_unit_name() exp = get_latest_experiment_name() assert led_intensity(channels=channel, intensities=20, unit=unit, experiment=exp) with local_intermittent_storage("leds") as cache: assert float(cache["B"]) == 20
def growth_rate_calculating(ignore_cache): unit = get_unit_name() experiment = get_latest_experiment_name() try: calculator = GrowthRateCalculator( # noqa: F841 ignore_cache=ignore_cache, unit=unit, experiment=experiment) while True: signal.pause() except Exception as e: logging.getLogger(JOB_NAME).error(f"{str(e)}") raise e
def click_growth_rate_calculating(ignore_cache): """ Start calculating growth rate """ import os os.nice(1) unit = get_unit_name() experiment = get_latest_experiment_name() calculator = GrowthRateCalculator( # noqa: F841 ignore_cache=ignore_cache, unit=unit, experiment=experiment ) calculator.block_until_disconnected()