def test_output_limits(self): pid = PID(100, 20, 40, setpoint=10, output_limits=(0, 100)) assert 0 <= pid(0) <= 100 assert 0 <= pid(-100) <= 100 pid.output_limits = (0, 50) assert 0 <= pid(0) <= 50 assert 0 <= pid(-100) <= 50
def test_tunings(self): """Check tunings property and setter.""" pid = PID(1, 2, 3, output_limits=(0, 10)) assert pid.tunings == (1, 2, 3) assert pid.output_limits == (0, 10) pid.tunings = (4, 5, 6) assert pid.tunings == (4, 5, 6) pid.output_limits = (20, 30) assert pid.output_limits == (20, 30) with pytest.raises(ValueError): pid.output_limits = (30, 0)
def test_D(self): pid = PID(0, 0, 1, setpoint=10) # should not compute derivate when there is no previous input # (don't assume 0 as first input) assert pid(0) == 0 # derivate is 0 when input is the same assert pid(0) == 0 assert pid(0) == 0 assert pid(5) == -5 assert pid(20) == -15
def test_desired_state(self): pid = PID(10, 5, 2, setpoint=10) # should not make any adjustment when setpoint is achieved assert pid(10) == 0
def test_I_negative_setpoint(self): pid = PID(0, 10, 0, setpoint=-10) assert round(pid(0)) == -100.0 assert round(pid(0)) == -200.0
def test_I(self): pid = PID(0, 10, 0, setpoint=10) assert round( pid(0)) == 100.0 # make sure we are close to expected value assert round(pid(0)) == 200.0
def test_P_negative_setpoint(self): pid = PID(1, 0, 0, setpoint=-10) assert pid(0) == -10 assert pid(5) == -15 assert pid(-5) == -5 assert pid(-15) == 5
def test_P(self): pid = PID(1, 0, 0, setpoint=10) assert pid(0) == 10 assert pid(5) == 5 assert pid(-5) == 15
def test_zero(self): pid = PID(1, 1, 1, setpoint=0) assert pid(0) == 0
def test_version(self): """Check if version is defined.""" logging.debug("package version: %s", PID().version)
def _store_search(self, id_taxo_group, territorial_unit_ids=None, short_version="1"): """Download from VN by API search and store json to file. Calls biolovision_api to get observations, convert to json and store. If id_taxo_group is defined, downloads only this taxo_group Else if id_taxo_group is None, downloads all database Moves back in date range, starting from now Date range is adapted to regulate flow Parameters ---------- id_taxo_group : str or None If not None, taxo_group to be downloaded. territorial_unit_ids : list List of territorial_units to include in storage. short_version : str '0' for long JSON and '1' for short_version. """ # GET from API logger.debug( _("Getting observations from controler %s, using API search"), self._api_instance.controler, ) if id_taxo_group is None: taxo_groups = TaxoGroupsAPI(self._config).api_list()["data"] else: taxo_groups = [{"id": id_taxo_group, "access_mode": "full"}] for taxo in taxo_groups: if taxo["access_mode"] != "none": id_taxo_group = taxo["id"] logger.debug( _("Getting observations from taxo_group %s"), id_taxo_group, ) # Record end of download interval if self._config.end_date is None: end_date = datetime.now() else: end_date = self._config.end_date since = self._backend.increment_get(self._config.site, id_taxo_group) if since is None: since = end_date self._backend.increment_log(self._config.site, id_taxo_group, since) # When to start download interval start_date = end_date min_date = (datetime(1900, 1, 1) if self._config.start_date is None else self._config.start_date) seq = 1 pid = PID( kp=self._config.tuning_pid_kp, ki=self._config.tuning_pid_ki, kd=self._config.tuning_pid_kd, setpoint=self._config.tuning_pid_setpoint, output_limits=( self._config.tuning_pid_limit_min, self._config.tuning_pid_limit_max, ), ) delta_days = self._config.tuning_pid_delta_days while start_date > min_date: nb_obs = 0 start_date = end_date - timedelta(days=delta_days) q_param = { "period_choice": "range", "date_from": start_date.strftime("%d.%m.%Y"), "date_to": end_date.strftime("%d.%m.%Y"), "species_choice": "all", "taxonomic_group": taxo["id"], } if self._config._type_date is not None: if self._config._type_date == "entry": q_param["entry_date"] = "1" else: q_param["entry_date"] = "0" if territorial_unit_ids is None or len( territorial_unit_ids) == 0: t_us = self._t_units else: t_us = [ u for u in self._t_units if u[0]["short_name"] in territorial_unit_ids ] for t_u in t_us: logger.debug( _("Getting observations from territorial_unit %s, using API search" ), t_u[0]["name"], ) q_param["location_choice"] = "territorial_unit" q_param["territorial_unit_ids"] = [ t_u[0]["id_country"] + t_u[0]["short_name"] ] timing = perf_counter_ns() items_dict = self._api_instance.api_search( q_param, short_version=short_version) timing = (perf_counter_ns() - timing) / 1000 # Call backend to store results nb_o = self._backend.store( self._api_instance.controler, str(id_taxo_group) + "_" + str(seq), items_dict, ) # Throttle on max size downloaded during each interval nb_obs = max(nb_o, nb_obs) log_msg = _( "{} => Iter: {}, {} obs, taxo_group: {}, territorial_unit: {}, date: {}, interval: {}" ).format( self._config.site, seq, nb_o, id_taxo_group, t_u[0]["short_name"], start_date.strftime("%d/%m/%Y"), str(delta_days), ) # Call backend to store log self._backend.log( self._config.site, self._api_instance.controler, self._api_instance.transfer_errors, self._api_instance.http_status, log_msg, total_size(items_dict), timing, ) logger.info(log_msg) seq += 1 end_date = start_date delta_days = int(pid(nb_obs)) return None