def _lookup_extrema(sensor_series): """ Returns min/max timestamps and min/max temperature for the given temperature series. """ tmin, tmax = None, None smin, smax = None, None for sname in sensor_series: # Iterate all (potential) sub-series of each sensor: for idx in range(len(sensor_series[sname].sec)): # Update min/max timestamp (number of seconds since dt_from) csmin = min(sensor_series[sname].sec[idx]) csmax = max(sensor_series[sname].sec[idx]) if smin is None or csmin < smin: smin = csmin if smax is None or csmax > smax: smax = csmax # Update min/max temperature ctmin = min(sensor_series[sname].val[idx]) ctmax = max(sensor_series[sname].val[idx]) if tmin is None or ctmin < tmin: tmin = ctmin if tmax is None or ctmax > tmax: tmax = ctmax return AttributeDict({ 'sec': AttributeDict({ 'min': smin, 'max': smax }), 'val': AttributeDict({ 'min': tmin, 'max': tmax }) })
def __init__(self): self.paths = AttributeDict({}) self.debug = AttributeDict({}) self.parameters = AttributeDict({}) for group in _CONFIG_PROPS: for value in _CONFIG_PROPS[group]: self[group.lower()][value] = get_property(f"{group}.{value}")
def get_inlet_control_options(self, as_int=True): """ Returns the subset of "CMI buttons" which we support to "click" programmatically. As there is no actual API, but only a simplistic web interface, we have to replay an HTTP request which simulates a button click in order to adjust the inlet temperature. """ return [ AttributeDict({'label': '55\u200a°', 'value': DistrictHeatingRequest.MEDIUM}), AttributeDict({'label': '60\u200a°', 'value': DistrictHeatingRequest.HIGH}), AttributeDict({'label': '65\u200a°', 'value': DistrictHeatingRequest.VERY_HIGH}) ]
def _ssd_evaluate_prepare_paths(args: argparse.Namespace, conf_obj: conf.Config) -> AttributeDict: import os output_path = f"{conf_obj.paths.output}/{args.network}/test/{args.iteration}" evaluation_path = f"{conf_obj.paths.evaluation}/{args.network}" result_file = f"{evaluation_path}/results-{args.iteration}" label_file = f"{output_path}/labels.bin" filenames_file = f"{output_path}/filenames.bin" predictions_file = f"{output_path}/predictions" predictions_per_class_file = f"{output_path}/predictions_class" predictions_glob_string = f"{output_path}/*ssd_predictions_transformed*" label_glob_string = f"{output_path}/*ssd_label*" os.makedirs(evaluation_path, exist_ok=True) return AttributeDict({ "output_path": output_path, "evaluation_path": evaluation_path, "result_file": result_file, "label_file": label_file, "filenames_file": filenames_file, "predictions_file": predictions_file, "predictions_per_class_file": predictions_per_class_file, "predictions_glob_string": predictions_glob_string, "label_glob_string": label_glob_string })
def test_cest_switch(): # Test summertime switch cfg = AttributeDict({'type': 'dummy_task', 'interval': 1, 'unit': 'hours'}) # CET --> CEST dt = datetime.datetime(year=2021, month=3, day=28, hour=1, minute=55, second=3, tzinfo=tz.tzlocal()) with freeze_time(dt) as frozen_datetime: job = NonHeatingJob.from_attrdict(cfg) assert job.next_run == (dt + datetime.timedelta(hours=2)) # CEST --> CET dt = datetime.datetime(year=2020, month=10, day=25, hour=3, minute=55, second=3, tzinfo=tz.tzlocal()) with freeze_time(dt) as frozen_datetime: job = NonHeatingJob.from_attrdict(cfg) assert job.next_run == datetime.datetime(year=2020, month=10, day=25, hour=4, minute=55, second=3, tzinfo=tz.tzlocal())
def make_response(success, **kwargs): """ Most API hooks will return a JSON response, containing a boolean success flag and arbitrary payload (e.g. a message, data/image in case of querying the temperature plot, etc.) """ return AttributeDict({'success': success, **kwargs})
def _ssd_train_get_generators( args: argparse.Namespace, conf_obj: conf.Config, load_data: callable, gt: AttributeDict, predictor_sizes: Sequence[Sequence[int]]) -> AttributeDict: nr_trajectories = conf_obj.parameters.nr_trajectories if conf_obj.parameters.nr_trajectories != -1 else None train_generator, train_length, train_debug_generator = \ load_data(gt.file_names_train, gt.instances_train, conf_obj.paths.coco, predictor_sizes=predictor_sizes, batch_size=conf_obj.parameters.batch_size, image_size=conf_obj.parameters.ssd_image_size, training=True, evaluation=False, augment=False, debug=args.debug, nr_trajectories=nr_trajectories) val_generator, val_length, val_debug_generator = \ load_data(gt.file_names_val, gt.instances_val, conf_obj.paths.coco, predictor_sizes=predictor_sizes, batch_size=conf_obj.parameters.batch_size, image_size=conf_obj.parameters.ssd_image_size, training=False, evaluation=False, augment=False, debug=args.debug, nr_trajectories=nr_trajectories) return AttributeDict({ "train_generator": train_generator, "train_length": train_length, "train_debug_generator": train_debug_generator, "val_generator": val_generator, "val_length": val_length, "val_debug_generator": val_debug_generator })
def reload_config(self): cfg = config.load_configuration(config.HELHEIMR_CONFIG_FILE_CONTROL) self._cfg = cfg.raspbee self._thermometer_config = dict() for config_label in self._cfg.thermometers.deconz_names: deconz_name = self._cfg.thermometers.deconz_names[config_label] alias = self._cfg.thermometers.aliases[config_label] abbrev = self._cfg.thermometers.abbreviations[config_label] self._thermometer_config[deconz_name] = AttributeDict({ 'config_label': config_label, 'deconz_name': deconz_name, 'alias': alias, 'abbreviation': abbrev }) self._thermometer_deconz_mapping = dict() thread = threading.Thread( target=lambda: self._make_thermometer_name2id_mapping()) thread.daemon = True thread.start() return common.make_response( True, message= _('Reloaded thermometer configuration. Remapping ZigBee sensors in the background (this can take up to %(sec)d seconds).', sec=self._cfg.sensor_mapping.max_retries * self._cfg.sensor_mapping.sleep_between_retries))
def detect_files(config_path): config_files = [] for config_file_name in listdir(config_path): config_file_path = path.join(config_path, config_file_name) config_file_name_parts = dict( enumerate(config_file_name.split('.'))) config_file_basename = config_file_name_parts.get(0, None) config_file_extension = config_file_name_parts.get(1, None) is_valid_config_file_extension = ( config_file_extension in DEFAULT_CONFIG_FORMAT_EXTENSIONS.keys()) if is_valid_config_file_extension: config_files.append( AttributeDict({ 'basename': config_file_basename, 'path': config_file_path, 'name': config_file_name, 'extension': config_file_extension, 'format': DEFAULT_CONFIG_FORMAT_EXTENSIONS.get( config_file_extension, None) })) return config_files
def system_status(): """Returns the current operating system/PC status.""" return AttributeDict({ 'cpu': cpu_info(), 'process': proc_info(), 'uptime': uptime(), 'storage': disk_info(), 'database_size': filesize_str(config.database_size()) })
def get(self, key=None, default=None): if key is None: result = self.__dict__ else: result = self.__dict__.get(key, default) result = AttributeDict.dict(result) return result
def _extract_heating_series(log_entries, dt_from, dt_to): # Handle the special case where we don't have log entries for the requested # duration: offset_to = _naive_time_offset(dt_to, dt_from) if len(log_entries) == 0: return dict() elif len(log_entries) == 1 and _naive_time_offset(log_entries[0].timestamp, dt_from) < 0: tca = log_entries[0].task_curr_active series = AttributeDict({'sec': [0, offset_to], 'val': [tca, tca]}) else: # Double list comprehension to get the step curve (i.e. add two datapoints # for each log entry) def _pa(e): # Returns the entry's 'previously active' flag return e.task_prev_active def _ca(e): # Returns the entry's 'currently active' flag return e.task_curr_active def _o(e): # Returns the (timezone-agnostic) time difference (offset) return _naive_time_offset(e.timestamp, dt_from) seconds = [f(e) for e in log_entries for f in (_o, _o)] values = [f(e) for e in log_entries for f in (_pa, _ca)] # As we log only heating state changes, we must prepend (and append) # a datapoint at the beginning (end) of the plotted period if seconds[0] > 0: seconds = [0, *seconds] values = [values[0], *values] if values[-1] < offset_to: seconds = [*seconds, offset_to] values = [*values, values[-1]] # series = AttributeDict({ # 'sec': [f(e) for e in log_entries for f in (_o, _o)], # 'val': [f(e) for e in log_entries for f in (_pa, _ca)] # }) series = AttributeDict({'sec': seconds, 'val': values}) return series
def convertLog(log): log = AttributeDict(log) log.blockHash = HexBytes(int(log.blockHash, 16)) log.transactionHash = HexBytes(int(log.transactionHash, 16)) encodedTopics = [] for topic in log.topics: encodedTopics.append(HexBytes(int(topic, 16))) log.topics = encodedTopics log.blockNumber = int(log.blockNumber,16) log.logIndex = int(log.logIndex,16) log.transactionIndex = int(log.transactionIndex,16) return log
def _ssd_test_load_gt(conf_obj: conf.Config) -> AttributeDict: import pickle with open(f"{conf_obj.paths.scenenet_gt_test}/photo_paths.bin", "rb") as file: file_names = pickle.load(file) with open(f"{conf_obj.paths.scenenet_gt_test}/instances.bin", "rb") as file: instances = pickle.load(file) return AttributeDict({"file_names": file_names, "instances": instances})
def cast_dicts(value): if isinstance(value, tuple): return tuple(cast_dicts(list(value))) elif isinstance(value, list): return list(map(cast_dicts, value)) elif isinstance(value, dict): return dict(AttributeDict.dict(value)) return value
def job_list(self, language, use_code_tags=False): heating_jobs = [ j.summary(language, use_code_tags) for j in self.jobs if isinstance(j, PeriodicHeatingJob) ] non_heating_jobs = [ j.summary(language, use_code_tags) for j in self.jobs if isinstance(j, NonHeatingJob) ] return AttributeDict({ 'heating_jobs': heating_jobs, 'non_heating_jobs': non_heating_jobs })
def _ssd_train_prepare_paths(args: argparse.Namespace, conf_obj: conf.Config) -> AttributeDict: import os summary_path = f"{conf_obj.paths.summary}/{args.network}/train/{args.iteration}" pre_trained_weights_file = f"{conf_obj.paths.weights}/{args.network}/VGG_coco_SSD_300x300_iter_400000.h5" weights_path = f"{conf_obj.paths.weights}/{args.network}/train/" os.makedirs(summary_path, exist_ok=True) os.makedirs(weights_path, exist_ok=True) return AttributeDict({ "summary_path": summary_path, "weights_path": weights_path, "pre_trained_weights_file": pre_trained_weights_file })
def summary(self, language=None, use_code_tags=False): return AttributeDict({ 'unique_id': self.unique_id, 'created_by': self.created_by, 'interval_string': self._intervalstr(language), 'at_string': self._atstr(language, use_code_tags), 'heating_string': self._heatingstr(language, use_code_tags), 'next_run': time_utils.format(time_utils.dt_as_local(self.next_run), fmt='%Y-%m-%d %H:%M:%S') })
def summary(self, language=None, use_code_tags=False): dt_fmt = '{:s}%Y-%m-%d %H:%M:%S{:s}'.format( '<code>' if use_code_tags else '', '</code>' if use_code_tags else '') return AttributeDict({ 'unique_id': self.unique_id, 'description': self.job_description, 'interval_string': self._intervalstr(language), 'at_string': self._atstr(use_code_tags=use_code_tags), 'next_run': time_utils.format(time_utils.dt_as_local(self.next_run), fmt=dt_fmt) })
def _ssd_test_prepare_paths(args: argparse.Namespace, conf_obj: conf.Config) -> AttributeDict: import os output_path = f"{conf_obj.paths.output}/{args.network}/test/{args.iteration}/" checkpoint_path = f"{conf_obj.paths.weights}/{args.network}/train/{args.train_iteration}" if conf_obj.parameters.ssd_test_pretrained: weights_file = f"{conf_obj.paths.weights}/ssd/VGG_coco_SSD_300x300_iter_400000_subsampled.h5" else: weights_file = f"{checkpoint_path}/ssd300_weights.h5" os.makedirs(output_path, exist_ok=True) return AttributeDict({ "output_path": output_path, "weights_file": weights_file })
def _cvt(bc): return AttributeDict({ 'id': bc[0], 'timestamp': time_utils.dt_as_local( time_utils.dt_fromstr(bc[1], fmt="%Y-%m-%d %H:%M:%S")), 'msg_type': bc[2], 'receiver': bc[3], 'message': bc[4], 'source': bc[5], 'extra': bc[6] })
def req_scheduler_add(): _logger.info('Serving NON-HEATING job ADD') if flask.request.method == 'POST': # POSTs from the _form_add_non_heating_job will post # empty strings for 'at' and 'description', thus replace # those by actual None at = flask.request.values.get('at', type=str, default=None) if at is not None and len(at.strip()) == 0: at = None description = flask.request.values.get('description', type=str, default=None) if description is not None and len(description.strip()) == 0: description = None start_day = flask.request.values.get('start_day', type=str, default=None) if start_day is not None and len(start_day.strip()) == 0: start_day = None cfg = AttributeDict({ 'type': flask.request.values.get('type', type=str), 'description': description, 'interval': flask.request.values.get('interval', type=int, default=1), 'unit': flask.request.values.get('unit', type=str, default=None), 'start_day': start_day, 'at': at }) try: job = scheduling.NonHeatingJob.from_attrdict(cfg) rsp = scheduling.scheduler.schedule_job(job, get_locale()) except scheduling.ScheduleError as e: return _return_response(common.make_response(False, message=_('Exception:') + f' {e}')) if not rsp.success: return _return_response(rsp) if scheduling.scheduler.serialize_jobs(): return _return_response(common.make_response(True, message=_('Permanently added new non-heating job.'))) else: return _return_response(common.make_response(False, message=_('Added new non-heating job temporarily, because the schedule could not be saved to disk.'))) else: return flask.redirect(svc_urls.scheduler_ctrl)
def _extract_temperature_series(log_entries, dt_from): series = { sname: AttributeDict({ 'sec': list(), 'val': list(), 'alias': None, 'abbreviation': None }) for sname in set( [k for entry in log_entries for k in entry if k != 'timestamp']) } # First, collect all readings for each sensor: for entry in log_entries: sec_offset = _naive_time_offset(entry['timestamp'], dt_from) for sname in entry: if sname == 'timestamp': continue series[sname]['alias'] = entry[sname].alias series[sname]['abbreviation'] = entry[sname].abbreviation series[sname]['sec'].append(sec_offset) series[sname]['val'].append(entry[sname].temperature) # Now split each sensor's readings into multiple curves if they are # temporally disrupted: for sname in series: split_indices = [0] num_readings = len(series[sname].sec) for idx in range(1, num_readings): delta_sec = series[sname].sec[idx] - series[sname].sec[idx - 1] if delta_sec >= 1800: # If the sensor readings are more than (or eq. to) 30 mins # apart, start a new curve split_indices.append(idx) split_indices.append(num_readings) # Split the readings into the separate curves: series[sname]['sec'] = [ series[sname].sec[split_indices[sidx - 1]:split_indices[sidx]] for sidx in range(1, len(split_indices)) ] series[sname]['val'] = [ series[sname].val[split_indices[sidx - 1]:split_indices[sidx]] for sidx in range(1, len(split_indices)) ] return series
def query_zigbee_status(self): """Retrieves the status information for the ZigBee network/deCONZ API.""" rsp = networking.http_get_request(self.api_url) if rsp is None: # pragma: no cover return common.make_response( False, message= _("Network error while querying the deCONZ API for the gateway status." )) state = json.loads(rsp.content) deconz_state = AttributeDict({ 'api_version': state['config']['apiversion'], 'sw_version': state['config']['swversion'], 'channel': state['config']['zigbeechannel'] }) return common.make_response(True, zigbee_state=deconz_state)
def _parse_payment( self, payment_data: Dict, channel: models.Channel ) -> Optional[AttributeDict]: event_name = payment_data.pop("event") payment_data.pop("token_address", None) if event_name == "EventPaymentReceivedSuccess": payment_data["sender_address"] = payment_data.pop("initiator") payment_data["receiver_address"] = self.raiden.address elif event_name == "EventPaymentSentSuccess": payment_data["sender_address"] = self.raiden.address payment_data["receiver_address"] = payment_data.pop("target") else: return None iso_time = payment_data.pop("log_time") payment_data["amount"] = channel.token.from_wei(payment_data.pop("amount")).amount payment_data["timestamp"] = make_aware(datetime.fromisoformat(iso_time)) return AttributeDict(payment_data)
def _ssd_test_get_generators( args: argparse.Namespace, conf_obj: conf.Config, load_data_coco: callable, load_data_scenenet: callable, gt: AttributeDict, predictor_sizes: Sequence[Sequence[int]]) -> AttributeDict: from twomartens.masterthesis import data nr_trajectories = conf_obj.parameters.nr_trajectories if conf_obj.parameters.nr_trajectories != -1 else None if conf_obj.parameters.ssd_use_coco: generator, length, debug_generator = load_data_coco( data.clean_dataset, data.group_bboxes_to_images, conf_obj.paths.coco, conf_obj.parameters.batch_size, conf_obj.parameters.ssd_image_size, training=False, evaluation=True, augment=False, debug=args.debug, predictor_sizes=predictor_sizes) else: generator, length, debug_generator = load_data_scenenet( gt.file_names, gt.instances, conf_obj.paths.coco, predictor_sizes=predictor_sizes, batch_size=conf_obj.parameters.batch_size, image_size=conf_obj.parameters.ssd_image_size, training=False, evaluation=True, augment=False, debug=args.debug, nr_trajectories=nr_trajectories) return AttributeDict({ "generator": generator, "length": length, "debug_generator": debug_generator })
def req_heating_jobs_add(): _logger.info('Serving heating job ADD') if flask.request.method == 'POST': # Temperature and hysteresis can't be set via the web interface # This is on purpose (we never used it in home automation v1) start_hour = flask.request.values.get('start_hour', type=int) start_min = flask.request.values.get('start_minute', type=int) d_hours = flask.request.values.get('duration_hours', type=int) d_mins = flask.request.values.get('duration_minutes', type=int) creator = flask.request.values.get('creator', type=str, default='') if len(creator.strip()) == 0: creator = 'webinterface' temperature = flask.request.values.get('temperature', type=float, default=None) cfg = AttributeDict({ 'day_interval': 1, 'at': f'{start_hour:02d}:{start_min:02d}', 'duration': f'{d_hours:02d}:{d_mins:02d}', 'temperature': temperature, # 'hysteresis': None, 'created_by': creator }) try: job = scheduling.PeriodicHeatingJob.from_attrdict(cfg) rsp = scheduling.scheduler.schedule_job(job, get_locale()) except scheduling.ScheduleError as e: return _return_response(common.make_response(False, message=_('Exception:') + f' {e}')) if not rsp.success: return _return_response(rsp) if scheduling.scheduler.serialize_jobs(): return _return_response(common.make_response(True, message=_('Permanently added new heating job.'))) else: return _return_response(common.make_response(False, message=_('Added new heating job temporarily, because the new schedule could not be saved to disk.'))) else: return flask.redirect(svc_urls.heating_ctrl)
def _get_json(endpoint: str, params: dict = None, language: str = None, timeout: float = 2.0) -> object: if params is None: params = dict() params['json'] = True if language is not None: params['lang'] = language _logger.info(f'GET request: {url(endpoint)} params: {params}') rsp = networking.safe_http_get(url(endpoint), params=params, timeout=timeout) if rsp is None: _logger.error(f'Received no response from {url(endpoint)}.') return None elif rsp.status_code == 200: return AttributeDict(json.loads(rsp.content)) else: _logger.error( f'Error querying {url(endpoint)}. Status code: {rsp.status_code}.') return None
def _ssd_train_load_gt(conf_obj: conf.Config) -> AttributeDict: import pickle with open(f"{conf_obj.paths.scenenet_gt_train}/photo_paths.bin", "rb") as file: file_names_train = pickle.load(file) with open(f"{conf_obj.paths.scenenet_gt_train}/instances.bin", "rb") as file: instances_train = pickle.load(file) with open(f"{conf_obj.paths.scenenet_gt_val}/photo_paths.bin", "rb") as file: file_names_val = pickle.load(file) with open(f"{conf_obj.paths.scenenet_gt_val}/instances.bin", "rb") as file: instances_val = pickle.load(file) return AttributeDict({ "file_names_train": file_names_train, "instances_train": instances_train, "file_names_val": file_names_val, "instances_val": instances_val })
def report(self): """Return the current weather report and forecast.""" try: one_call = self._mgr.one_call(lat=self._latitude, lon=self._longitude, exclude='minutely,daily,alerts', units='metric') # PyOWM v3 uses the new "one call" API: # new one call api: https://openweathermap.org/api/one-call-api # Documentation of PyOWM-internal representation: # https://pyowm.readthedocs.io/en/latest/_modules/pyowm/weatherapi25/weather.html return AttributeDict({ 'current': Weather.fromOwmWeather(one_call.current, self._language), 'forecast': [ Weather.fromOwmWeather(w, self._language) for w in one_call.forecast_hourly ] }) except PyOWMError as e: #pragma: no cover _logger.error( f'Error querying OpenWeatherMap current weather: {e}') return None