def _unpickle_stats(self, stats, type_name=None, averagedata_name=None): if averagedata_name is None: averagedata_name = "bucket_average_data" if type_name is None: type_name = "bucket_type" if isinstance(stats, list): for s in stats: if s[type_name] == "average": s[averagedata_name] = data_unpickle(s[averagedata_name]) else: stats[averagedata_name] = data_unpickle(stats[averagedata_name])
def _get_using_ipify_ipstack(): """ The last resort is to ask someone for out external IP address, and then feed this to IPStack to get more details. """ response = yield treq.get(IPIFY_API) content = yield treq.content(response) ip = content.decode().strip() url = MAXMIND_API.format(ip) try: response = yield treq.get(url, timeout=5) except Exception: return None content = yield treq.content(response) raw_info = data_unpickle(content, "json") return { "source": "ipify_ipstack", "ip": raw_info.get("query"), "country_code": raw_info.get("country_code"), "country_name": raw_info.get("country_name"), "region_code": raw_info.get("region_code"), "region_name": raw_info.get("region_name"), "city": raw_info.get("city"), "zip_code": raw_info.get("zip"), "time_zone": raw_info.get("time_zone", {}).get("id"), "latitude": float(raw_info.get("latitude")), "longitude": float(raw_info.get("longitude")), "isp": raw_info.get("connection", {}).get("isp"), }
def process_get_results(self, records, pickled_columns=None, encoding=None): """ Accepts an input from a find() or all(). If incoming is None, then an empty list is returned. If pickled_columns (a list) is provided, then those columns will be sent though data_unpickle() :param records: :param pickled_columns: :return: """ if records is None: return [] if isinstance(pickled_columns, list) and len(pickled_columns) > 0: for record in records: for pickled in pickled_columns: if getattr(record, pickled) is not None: setattr(record, pickled, data_unpickle(getattr(record, pickled))) return records
def _load_authkeys_from_config(self): """ Load authkey from the configuration file. """ rbac_authkeys = self._Configs.get("rbac_authkeys", "*", {}, False, ignore_case=True) for authkey_id, authkey_raw in rbac_authkeys.items(): authkey_data = data_unpickle(authkey_raw, encoder="msgpack_base64") self.add_authkey(authkey_data)
def get_expired_storage(self): """ Get expired storage items. :param storage_id: :return: """ records = yield Storage.find(where=["expires > 0 and expires < ?", int(time())], limit=50) for record in records: record.variables = data_unpickle(record.variables) return records
def get_storage(self, storage_id): """ Returns a db object representing the storage item. :param storage_id: :return: """ record = yield Storage.find(storage_id) if record is None: raise KeyError(f"Storage not found in database: {storage_id}", errorno=8657) record.variables = data_unpickle(record.variables) return record
def __init__(self, parent, incoming, source=None, flush_cache=None): """ Setup a new user instance. :param parent: A reference to the users library. """ self._Entity_type = "User" self._Entity_label_attribute = "display" super().__init__(parent) self.auth_type = AUTH_TYPE_USER # Auth specific attributes # Local attributes self._row_id: str = incoming["id"] self._user_id: str = incoming["user_id"] self.email: str = incoming["email"] self.name: str = incoming["name"] self.access_code_digits: int = incoming["access_code_digits"] self.access_code_string: str = incoming["access_code_string"] # Load roles and item permissions. rbac_raw = self._Parent._Configs.get("rbac_user_roles", self.user_id, None, False, ignore_case=True) if rbac_raw is None: rbac = {} else: rbac = data_unpickle(rbac_raw, encoder="msgpack_base64") if "roles" in rbac: roles = rbac["roles"] if len(roles) > 0: for role in roles: try: self.attach_role(role, save=False, flush_cache=False) except KeyError: logger.warn( "Cannot find role for user, removing from user: {role}", role=role) # Don't have to actually do anything, it won't be added, so it can't be saved. :-) if flush_cache in (None, True): self._Parent._Cache.flush(tags=("user", "role")) if "item_permissions" in rbac: self.item_permissions = rbac["item_permissions"] self.save()
def decode_message(self, topic, raw_message): """ Decode a message from another gateway. :param payload: Dictionary to send. :param destination_id: :return: """ # ybo_req/src_gwid/dest_gwid topic_parts = topic.split("/", 3) message = data_unpickle(raw_message, encoder="msgpack_base85") required_keys = ("body", "hash") if all(required in message for required in required_keys) is False: raise YomboWarning("MQTT Gateway is dropping message, missing a required message field.") message_hash = message["hash"] generated_hash = sha256_compact(message["body"]) if message_hash != generated_hash: raise YomboWarning("Invalid incoming check hash.") message["body"] = data_unpickle(message["body"], encoder="msgpack_base85") message["body"]["time_received"] = time() required_keys = ("payload", "time_sent", "source_id", "destination_id", "message_id", "component_type", "component_name", "reply_to", "protocol_version") if all(required in message["body"] for required in required_keys) is False: raise YomboWarning("MQTT Gateway is dropping message, missing a required body field.") body = message["body"] if body["source_id"] != topic_parts[1]: raise YomboWarning("Gateway source_id doesn't match topic source_id") if body["destination_id"] != topic_parts[2]: raise YomboWarning("Gateway destination_id doesn't match topic destination_id") return message
def get_tasks(self, section): """ Get all tasks for a given section. :return: """ records = yield Tasks.find(where=["run_section = ?", section]) results = [] for record in records: data = record.__dict__ data["task_arguments"] = data_unpickle(data["task_arguments"], "msgpack_base85_zip") results.append(data) # we need a dictionary, not an object return results
def parse_record(data): auth_data = data_unpickle(data.auth_data) return { "id": data.id, "enabled": coerce_value(data.enabled, "bool"), "user_id": data.user_id, "auth_data": auth_data, "refresh_token": data.refresh_token, "access_token": data.access_token, "refresh_token_expires_at": data.refresh_token_expires_at, "access_token_expires_at": data.access_token_expires_at, "created_at": data.created_at, "last_access_at": data.last_access_at, "updated_at": data.updated_at, }
def get_devices(self, status=None): if status == True: records = yield Device.find(orderby="label ASC") # return records elif status is None: records = yield Device.find(where=["status = ? OR status = ?", 1, 0], orderby="label ASC") else: records = yield Device.find(where=["status = ? ", status], orderby="label ASC") if records is None: return [] for record in records: record = record.__dict__ if record["energy_map"] is None: record["energy_map"] = {"0.0": 0, "1.0": 0} else: try: record["energy_map"] = data_unpickle(record["energy_map"], encoder="json") except: record["energy_map"] = {"0.0": 0, "1.0": 0} return records
def _get_maxmind(location: str = None) -> Optional[Dict[str, Any]]: """Query maxmind for location data.""" if location is None: location = "me" url = MAXMIND_API.format(location) try: response = yield treq.get(url, timeout=5, params={"demo": 1}) except Exception as e: return None content = yield treq.content(response) raw_info = data_unpickle(content, "json") return { "source": "maxmind", "ip": raw_info.get("traits").get("ip_address"), "country_code": raw_info.get("country", {}).get("iso_code"), "country_name": raw_info.get("country", {}).get("names", {}).get("en"), "region_code": raw_info.get("subdivisions", [{}])[0].get("iso_code"), "region_name": raw_info.get("subdivisions", [{}])[0].get("names", {}).get("en"), "city": raw_info.get("city", {}).get("names", {}).get("en"), "zip_code": raw_info.get("postal", {}).get("code"), "time_zone": raw_info.get("location").get("time_zone"), "latitude": float(raw_info.get("location").get("latitude")), "longitude": float(raw_info.get("location").get("longitude")), "isp": raw_info.get("traits").get("isp"), }
def _get_ip_api() -> Optional[Dict[str, Any]]: """Query ip-api.com for location data.""" try: response = yield treq.get(IPAPI_API, timeout=5) except Exception: return None content = yield treq.content(response) raw_info = data_unpickle(content, "json") return { "source": "ip_api", "ip": raw_info.get("query"), "country_code": raw_info.get("countryCode"), "country_name": raw_info.get("country"), "region_code": raw_info.get("region"), "region_name": raw_info.get("regionName"), "city": raw_info.get("city"), "zip_code": raw_info.get("zip"), "time_zone": raw_info.get("timezone"), "latitude": float(raw_info.get("lat")), "longitude": float(raw_info.get("lon")), "isp": raw_info.get("isp"), }
def elevation(latitude, longitude): """Return elevation for given latitude and longitude.""" try: response = yield treq.get(ELEVATION_URL, params={ "locations": "{},{}".format(latitude, longitude), "sensor": "false", }, timeout=10) except Exception: return 800 if response.code != 200: return 800 content = yield treq.content(response) raw_info = data_unpickle(content, "json") try: return int(float(raw_info["results"][0]["elevation"])) except (ValueError, KeyError, IndexError): return 800
def _init_(self, **kwargs): """ Open the yombo.ini file for reading. Import the configuration items into the database, also prime the configs for reading. """ self.exit_config_file = None # Holds a complete configuration file to save when exiting. self.cache_dirty = False self.configs = {} # Holds actual config data self.cfg_loaded = False self.yombo_ini_last_modified = 0 self.working_dir = settings.arguments["working_dir"] ini_norestore = settings.arguments["norestoreini"] self.yombo_ini_path = f"{self.working_dir}/yombo.ini" if os.path.exists(self.yombo_ini_path): if os.path.isfile(self.yombo_ini_path) is False: try: os.remove(self.yombo_ini_path) except Exception as e: logger.error( "'yombo.ini' file exists, but it's not a file and it can't be deleted!" ) reactor.stop() return if ini_norestore: self.restore_backup_yombi_ini() else: if os.path.getsize(self.yombo_ini_path) < 2: logger.warn( "yombo.ini appears corrupt, attempting to restore from backup." ) if ini_norestore: self.restore_backup_yombi_ini() else: if ini_norestore: self.restore_backup_yombi_ini() self.loading_yombo_ini = True if settings.yombo_ini is False: self._Loader.operating_mode = "first_run" else: for section, options in settings.yombo_ini.items(): for option, value in options.items(): try: value = yield self._GPG.decrypt(value) except: pass self.set(section, option, value, ignore_case=True) self.get("core", "rand_seed", random_string(length=128)) logger.debug( "done parsing yombo.ini. Now about to parse yombo.ini.info.") try: config_parser = configparser.ConfigParser() config_parser.read(f"{self.working_dir}/etc/yombo.ini.info") logger.debug("yombo.ini.info file read into memory.") for section in config_parser.sections(): if section not in self.configs: continue for option in config_parser.options(section): if option not in self.configs[section]: continue values = msgpack.loads( b64decode(config_parser.get(section, option))) self.configs[section][option] = dict_merge( self.configs[section][option], values) except IOError as e: logger.warn( "CAUGHT IOError!!!!!!!!!!!!!!!!!! In reading meta: {error}", error=e) except configparser.NoSectionError: logger.warn("CAUGHT ConfigParser.NoSectionError!!!! IN saving. ") logger.debug("done parsing yombo.ini.info") #setup some defaults if we are new.... self.get("core", "gwid", "local") self.get("core", "gwuuid", None) self.get("core", "is_master", 1) self.get("core", "master_gateway_id", "local") # Perform DB cleanup activites based on local section. if self.get("local", "deletedelayedmessages", False, False) is True: self._LocalDB.delete("sqldict", ["module = ?", "yombo.lib.messages"]) self.set("local", "deletedelayedmessages", False) if self.get("local", "deletedevicehistory", False, False) is True: self._LocalDB.truncate("devicestatus") self.set("local", "deletedevicehistory", False) current_time = int(time()) # Ask external services what they know about us. # detected_location states are based off this and is set in the locations library. # times uses this self.detected_location_info = self.get("core", "locationinfo", None, False) if self.detected_location_info is None or \ self.get("core", "locationinfotime", 0, False) < current_time - 3600: self.detected_location_info = yield detect_location_info() self.set( "core", "locationinfo", data_pickle(self.detected_location_info, encoder="msgpack_base64", local=True)) self.set("core", "locationinfotime", current_time) else: self.detected_location_info = data_unpickle( self.detected_location_info, encoder="msgpack_base64") self.set("core", "externalipaddress_v4", self.detected_location_info["ip"]) if self.get("core", "localipaddress_v4", False, False) is False or \ self.get("core", "localipaddresstime", False, False) is False: address_info = get_local_network_info() self.set("core", "localipaddress_v4", address_info["ipv4"]["address"]) self.set("core", "localipaddress_netmask_v4", address_info["ipv4"]["netmask"]) self.set("core", "localipaddress_cidr_v4", address_info["ipv4"]["cidr"]) self.set("core", "localipaddress_network_v4", address_info["ipv4"]["network"]) self.set("core", "localipaddress_v6", address_info["ipv6"]["address"]) self.set("core", "localipaddress_netmask_v6", address_info["ipv6"]["netmask"]) # self.set("core", "localipaddress_cidr_v6", address_info["ipv6"]["cidr"]) # self.set("core", "localipaddress_network_v6", address_info["ipv6"]["network"]) self.set("core", "localipaddresstime", int(time())) else: if int(self.configs["core"]["localipaddresstime"]["value"]) < ( int(time()) - 180): address_info = get_local_network_info() self.set("core", "localipaddress_v4", address_info["ipv4"]["address"]) self.set("core", "localipaddress_netmask_v4", address_info["ipv4"]["netmask"]) self.set("core", "localipaddress_cidr_v4", address_info["ipv4"]["cidr"]) self.set("core", "localipaddress_network_v4", address_info["ipv4"]["network"]) self.set("core", "localipaddress_v6", address_info["ipv6"]["address"]) self.set("core", "localipaddress_netmask_v6", address_info["ipv6"]["netmask"]) # self.set("core", "localipaddress_cidr_v6", address_info["ipv6"]["cidr"]) # self.set("core", "localipaddress_network_v6", address_info["ipv6"]["network"]) self.set("core", "localipaddresstime", int(time())) self.save_loop = LoopingCall(self.save) self.save_loop.start(randint(12600, 14400), False) # every 3.5-4 hours if self.get("core", "first_run", None, False) is None: self.set("core", "first_run", True) self.loading_yombo_ini = False # set system defaults. Reasons: 1) All in one place. 2) Somes values are needed before respective libraries # are loaded. self._Configs.get("mqtt", "client_enabled", True) self._Configs.get("mqtt", "server_enabled", True) self._Configs.get("mqtt", "server_max_connections", 1000) self._Configs.get("mqtt", "server_timeout_disconnect_delay", 2) self._Configs.get("mqtt", "server_listen_ip", "*") self._Configs.get("mqtt", "server_listen_port", 1883) self._Configs.get("mqtt", "server_listen_port_ss_ssl", 1884) self._Configs.get("mqtt", "server_listen_port_le_ssl", 1885) self._Configs.get("mqtt", "server_listen_port_websockets", 8081) self._Configs.get("mqtt", "server_listen_port_websockets_ss_ssl", 8444) self._Configs.get("mqtt", "server_listen_port_websockets_le_ssl", 8445) self._Configs.get("mqtt", "server_allow_anonymous", False) self._Configs.get("misc", "temperature_display", "f") self._Configs.get("misc", "length_display", "imperial") # will we ever get to metric? self.cfg_loaded = True