def encode_message(self, destination_id=None, component_type=None, component_name=None, payload=None, reply_to=None): """ Creates a basic dictionary to represent the message and then pickles it using JSON. :param payload: Dictionary to send. :param destination_id: :return: """ if destination_id is None: destination_id = "all" message_id = random_string(length=20) body = { "payload": payload, "time_sent": time(), "source_id": self.gateway_id, "destination_id": destination_id, "message_id": message_id, "component_type": component_type, "component_name": component_name, "reply_to": reply_to, "protocol_version": 2, } message_out = { "body": data_pickle(body, "msgpack_base85"), } message_out["hash"] = sha256_compact(message_out["body"]) message = { "body": body, "hash": message_out["hash"], } return message_id, message, data_pickle(message_out, "msgpack_base85")
def save_to_db(self, forced=None): if self.device.gateway_id != self._Parent.gateway_id and self._Parent.is_master is not True: self._dirty = False return if self._dirty or forced is True: data = self.asdict() del data['started'] # if self.inputs is None: # data['inputs'] = None # else: data['history'] = data_pickle(self.history) data['inputs'] = data_pickle(self.inputs) data['requested_by'] = data_pickle(self.requested_by) if self._in_db is True: self._Parent._LocalDB.add_bulk_queue('device_commands', 'update', data, 'request_id') else: self._Parent._LocalDB.add_bulk_queue('device_commands', 'insert', data, 'request_id') self._dirty = False self._in_db = True
def save_to_db(self, forced=None): # print("device status, save to database... %s" % self.asdict()) # print("device status: save_to_db,,, in db: %s, dirty: %s, machine_status: %s" % (self._in_db, self._dirty, self.machine_status)) if self.fake_data is True: # print("not updating db, it's fake data!") self._dirty = False return if self.device.gateway_id != self._Parent.gateway_id and self._Parent.is_master is not True: self._dirty = False return if self._dirty or forced is True: data = self.asdict() # if self.inputs is None: # data['inputs'] = None # else: data['machine_status_extra'] = data_pickle( self.machine_status_extra) data['requested_by'] = data_pickle(self.requested_by) if self._in_db is True: self._Parent._LocalDB.add_bulk_queue('device_status', 'update', data, 'status_id') else: self._Parent._LocalDB.add_bulk_queue('device_status', 'insert', data, 'status_id') self._dirty = False self._in_db = True
def generic_item_save(self, name, data): """ Replaces many generic database interactions for setting/saving/updating various items. :return: """ attrs = GENERIC_ATTRIBUTES[name] # print(f"Saving generic items: {name}") primary_id = getattr(data, attrs["primary_column_name"]) db_item = yield attrs["class"].find(primary_id) if db_item is None: # If none is found, create a new one. db_item = attrs["class"]() db_item.id = primary_id if len(attrs["pickled_columns"]) > 0: fields = self.get_table_columns(name) for field in fields: if field in attrs["pickled_columns"]: setattr(db_item, field, data_pickle(getattr(data, field))) else: setattr(db_item, field, getattr(data, field)) # print(db_item.__dict__) yield db_item.save() return db_item
def save_statistic(self, bucket, finished=None): # print("save_statistic was called directly... sup?!") if finished is None: finished = False args = { "bucket_value": bucket["value"], "updated_at": int(time()), "anon": bucket["anon"], } if finished is not None: args["finished"] = finished else: args["finished"] = 0 if bucket["type"] == "average": args["bucket_average_data"] = data_pickle(bucket["average_data"], separators=(",", ":")) if "restored_db_id" in bucket: results = yield self.dbconfig.update( "statistics", args, where=["id = ?", bucket["restored_db_id"]]) else: args["bucket_time"] = bucket["time"] args["bucket_type"] = bucket["type"] args["bucket_name"] = bucket["bucket_name"] results = yield self.dbconfig.insert("statistics", args, None, "OR IGNORE") return results
def save_device_states(self, data): """ Attempts to find the provided device state in the database. If it's found, update it. Otherwise, a new one is created. :param data: A device state instance. :return: """ device_command = yield DeviceState.find(data.command_id) if device_command is None: # If none is found, create a new one. device_command = DeviceState() device_command.id = data.state_id fields = self.get_table_columns("device_states") print(f"device state fields: {fields}") print(f"data: {data.__dict__}") print(f"data: fake data: {data._fake_data}") for field in fields: if field in PICKLED_COLUMNS: setattr(device_command, field, data_pickle(getattr(data, field))) else: setattr(device_command, field, getattr(data, field)) yield device_command.save()
def save_storage(self, storage): # logger.debug("save_web_session: session.auth_id: {auth_id}", auth_id=session._auth_id) args = { "id": storage["id"], "scheme": storage["scheme"], "username": storage["username"], "password": storage["password"], "netloc": storage["netloc"], "port": storage["port"], "path": storage["path"], "params": storage["params"], "query": storage["query"], "fragment": storage["fragment"], "mangle_id": storage["mangle_id"], "expires": storage["expires"], "public": coerce_value(storage["public"], "bool"), "internal_url": storage["internal_url"], "external_url": storage["external_url"], "internal_thumb_url": storage["internal_thumb_url"], "external_thumb_url": storage["external_thumb_url"], "content_type": storage["content_type"], "charset": storage["charset"], "size": storage["size"], "file_path": storage["file_path"], "file_path_thumb": storage["file_path_thumb"], "variables": data_pickle(storage['variables']), "created_at": storage["created_at"], } yield self.dbconfig.insert("storage", args)
def edit_node(self, node_id, node_data, source=None, authorization=None, **kwargs): """ This shouldn't be used by outside calls, instead, tp update the node, simply edit the node attributes directly. That will cause the node to update the database and Yombo API. This is used by other internal libraries to update a node's data in bulk and optionally :param node_id: Node ID to bulk update. :param node_data: Dictionary of items to update :param source: Should be: local or remote. Default is local. :param kwargs: :return: """ gateway_id = self.gateway_id if source is None: source = "local" # print("editing node: %s" % node_id) if isinstance(node_data, dict) is False: raise YomboWarning("edit_node() only accepts dictionaries for 'node_data' argument.") if "data" not in node_data: raise YomboWarning("Cannot edit node, 'data' not found") global_invoke_all("_node_before_update_", called_by=self, node_id=node_id, node=node_data, in_memory=node_id in self.nodes, ) if source == "local": api_data = deepcopy(node_data) node_data["data"] = data_pickle(api_data["data"], api_data["data_content_type"]) # print("new node data: %s" % api_data) api_to_send = {k: v for k, v in bytes_to_unicode(api_data).items() if v} response = yield self.patch_node(node_id=node_id, api_data=api_to_send, authorization=authorization) node_data = response.content["data"]['attributes'] # Now we have the final data, lets update the local info. node_id = node_data["id"] if node_id in self.nodes: self.nodes[node_id].update_attributes(node_data) # Update existing node data. self.nodes[node_id].save_to_db() global_invoke_all("_node_updated_", called_by=self, node_id=node_id, node=self.nodes[node_id], ) return self.nodes[node_id]
def add_task(self, data): """ Get all tasks for a given section. :return: """ data["task_arguments"] = data_pickle(data["task_arguments"], "msgpack_base85_zip") results = yield self.dbconfig.insert("tasks", data, None, "OR IGNORE") return results
def pickle_records(self, records, pickled_columns, encoding=None): """ Unpickles records. :param records: :param columns: :return: """ if isinstance(records, list) is False: records = [records] for record in records: for pickled in pickled_columns: setattr(record, pickled, data_pickle(getattr(record, pickled)))
def save(self): """ Save the user roles and permissions. :return: """ tosave = { "roles": list(self.roles), "item_permissions": self.item_permissions } self._Parent._Configs.set("rbac_user_roles", self.user_id, data_pickle(tosave, encoder="msgpack_base64", local=True), ignore_case=True)
def update_web_session(self, session): logger.debug("update_web_session: session.auth_id: {auth_id}", auth_id=session._auth_id) save_data = data_pickle({ "auth_data": session.auth_data, "auth_type": session.auth_type, "auth_at": session.auth_at, }) args = { "enabled": coerce_value(session.enabled, "int"), "auth_data": save_data, "refresh_token": session._refresh_token, "access_token": session._access_token, "refresh_token_expires_at": session.refresh_token_expires_at, "access_token_expires_at": session.access_token_expires_at, "user_id": session.user_id, "last_access_at": session.last_access_at, "updated_at": session.updated_at, } yield self.dbconfig.update("webinterface_sessions", args, where=["id = ?", session._auth_id])
def save(self): """ Save the user device :return: """ if self.source == "system": return tosave = { "role_id": self.role_id, "label": self.label, "machine_label": self.machine_label, "description": self.description, "saved_permissions": self.item_permissions } self._Parent._Configs.set("rbac_roles", self.role_id, data_pickle(tosave, encoder="msgpack_base64", local=True), ignore_case=True)
def save_device_commands(self, data): """ Attempts to find the provided command in the database. If it's found, it's updated. Otherwise, a new one is created. :param data: A command instance. :return: """ device_command = yield DeviceCommand.find(data.command_id) if device_command is None: # If none is found, create a new one. device_command = DeviceCommand() device_command.request_id = data.request_id fields = self.get_table_columns("device_commands") for field in fields: if field in PICKLED_COLUMNS: setattr(device_command, field, data_pickle(getattr(data, field))) else: setattr(device_command, field, getattr(data, field)) yield device_command.save()
def save_web_session(self, session): logger.debug("save_web_session: session.auth_id: {auth_id}", auth_id=session._auth_id) logger.debug("save_web_session: session.auth_data: {auth_data}", auth_data=session.auth_data) auth_data = data_pickle({ "auth_data": session.auth_data, "auth_type": session.auth_type, "auth_at": session.auth_at, }) args = { "id": session._auth_id, "enabled": coerce_value(session.enabled, "int"), "auth_data": auth_data, "refresh_token": session._refresh_token, "access_token": session._access_token, "refresh_token_expires_at": session.refresh_token_expires_at, "access_token_expires_at": session.access_token_expires_at, "user_id": session.user_id, "created_at": session.created_at, "last_access_at": session.last_access_at, "updated_at": session.updated_at, } yield self.dbconfig.insert("webinterface_sessions", args, None, "OR IGNORE")
def _do_sync_config(self): print("auth key: do_sync_config") tosave = { "source": self.source, "label": self.label, "description": self.description, # "auth_data": self.auth_data, "enabled": self.enabled, "roles": list(self.roles), "auth_id": self.auth_id, "created_by": self.created_by, "created_by_type": self.created_by_type, "last_access_at": self.last_access_at, "created_at": self.created_at, "updated_at": self.updated_at, "item_permissions": self.item_permissions, "saved_permissions": self.item_permissions } self._Parent._Configs.set("rbac_authkeys", sha256_compact(self.auth_id), data_pickle(tosave, encoder="msgpack_base64", local=True), ignore_case=True)
def add_node(self, node_data, source=None, authorization=None, **kwargs): """ Used to create new nodes. Node data should be a dictionary. This will: 1) Send the node information to Yombo cloud for persistence. 2) Save the node information to local database. 3) Load the node into memory for usage. This adds the node at Yombo, adds to the local DB store if the gateway_id matches outs, and loads it into memory if the gateways is ours and destination is 'gw' or 'always_load' is 1. Required: node_type weight (defaults to 0 if not set) always_load (defaults to 1 - true if not set) data data_content_type - Usually msgpack_base85 or json. status (defaults to 1 - enabled) Optional: gateway_id - Will not save to localdb or load into memory if not set to this gateway. machine_label label destination :param node_data: :param kwargs: :return: """ print("nodes:: new_new 1") if source is None: source = "local" gateway_id = self.gateway_id if "data" not in node_data or node_data["data"] is None: raise YomboWarning("Node must have data!") if "data_content_type" not in node_data or node_data["data_content_type"] is None: if isinstance(node_data["data"], dict) or isinstance(node_data["data"], list): node_data["data_content_type"] = "json" elif isinstance(node_data["data"], bool): node_data["data_content_type"] = "bool" else: node_data["data_content_type"] = "string" if "parent_id" not in node_data: node_data["parent_id"] = None if "gateway_id" not in node_data: node_data["gateway_id"] = gateway_id if "destination" in node_data and node_data["destination"] == "gw" and \ ("gateway_id" not in node_data or node_data["gateway_id"] is None): node_data["gateway_id"] = gateway_id if "always_load" not in node_data or node_data["always_load"] is None: node_data["always_load"] = 1 if "weight" not in node_data or node_data["weight"] is None: node_data["weight"] = 0 if "status" not in node_data or node_data["status"] is None: node_data["status"] = 1 if source == "local": # api_data = deepcopy(node_data) node_data["data"] = data_pickle(node_data["data"], node_data["data_content_type"]) api_data = {k: v for k, v in bytes_to_unicode(node_data).items() if v} print("nodes:: new_new 10") response = yield self._YomboAPI.request("POST", "/v1/node", api_data, authorization_header=authorization) # print("added node results: %s" % node_results) node_data = response.content["data"]['attributes'] print(f"new node data: {node_data}") node_id = node_data["id"] if "gateway_id" in node_data and node_data["gateway_id"] == gateway_id: self.nodes[node_id].add_to_db() if "destination" in node_data and node_data["destination"] == "gw" and \ "gateway_id" in node_data and node_data["gateway_id"] == gateway_id: print("Loading new node data into memory...") self._load_node_into_memory(node_data) global_invoke_all("_node_added_", called_by=self, node_id=node_id, node=self.nodes[node_id], ) return node_id
def save_file(self, source_file, destination, delete_source=None, expires=None, public=None, mangle_name=None, content_type=None, charset=None, extra=None): """ Saves a file. Usually used when a file is on disk and needs to be uploaded. Destinations: file://path/file. s3://bucketname/path/file dropbox://path/file :param source_file: :param destination: :return: """ if path.isfile(source_file) is False: raise YomboWarning(f"File doesn't exist: {source_file}") if delete_source is None: delete_source = True if expires is None: expires = 30 if public is None: public = True if mangle_name is None: mangle_name = 1 file_id = random_string(length=15) dest_parts, dest_parts_thumb, mangle_id = self.check_destination(destination, file_id, mangle_name) size = yield file_size(source_file) results = yield maybeDeferred(self.storage[dest_parts.scheme]["save_file_callback"], source_file, dest_parts, dest_parts_thumb, delete_source, file_id, mangle_id, expires, public, extra) """ new_path = "file://somehost/somepath/somefile_{index-id}.jpg """ if content_type is None or charset is None: content_info = yield mime_type_from_file(source_file) if content_type in (None, ""): content_type = content_info["content_type"] if charset in (None, ""): charset = content_info["charset"] new = StorageDB() new.id = file_id new.scheme = dest_parts.scheme new.username = dest_parts.username new.password = dest_parts.password new.netloc = dest_parts.netloc new.port = dest_parts.port new.path = dest_parts.path new.params = dest_parts.params new.query = dest_parts.query new.fragment = dest_parts.fragment new.mangle_id = mangle_id if expires > 0: new.expires = time() + (expires*86400) else: new.expires = 0 new.public = public new.internal_url = results["internal_url"] new.external_url = results["external_url"] new.internal_thumb_url = results.get("internal_thumb_url", None) new.external_thumb_url = results.get("external_thumb_url", None) new.content_type = content_type new.charset = charset new.size = size new.created_at = round(time(), 3) new.file_path = results.get("file_path", None) new.file_path_thumb = results.get("file_path_thumb", None) new.variables = data_pickle(results.get("variables", {})) # used by the various storage backends for their own use. yield new.save()
def save_data(self, source_data, destination, expires=None, public=None, mangle_name=None, content_type=None, charset=None, extra=None): """ Uploads data :param source_data: :param destination: :return: """ if expires is None: expires = 30 if public is None: public = True if mangle_name is None: mangle_name = 1 file_id = random_string(length=15) dest_parts, dest_parts_thumb, mangle_id = self.check_destination(destination, file_id, mangle_name) results = yield maybeDeferred(self.storage[dest_parts.scheme]["save_data_callback"], source_data, dest_parts, dest_parts_thumb, file_id, mangle_id, expires, public, extra) # print(f" save data results: {results}") if content_type is None or charset is None: content_info = yield mime_type_from_buffer(source_data) if content_type in (None, ""): content_type = content_info["content_type"] if charset in (None, ""): charset = content_info["charset"] new = StorageDB() new.id = file_id new.scheme = dest_parts.scheme new.username = dest_parts.username new.password = dest_parts.password new.netloc = dest_parts.netloc new.port = dest_parts.port new.path = dest_parts.path new.params = dest_parts.params new.query = dest_parts.query new.fragment = dest_parts.fragment new.mangle_id = mangle_id if expires > 0: new.expires = time() + (expires*86400) else: new.expires = 0 new.public = public new.internal_url = results["internal_url"] new.external_url = results["external_url"] new.internal_thumb_url = results.get("internal_thumb_url", None) new.external_thumb_url = results.get("external_thumb_url", None) new.content_type = content_type new.charset = charset new.size = len(source_data) new.created_at = round(time(), 3) new.file_path = results.get("file_path", None) new.file_path_thumb = results.get("file_path_thumb", None) new.variables = data_pickle(results.get("variables", {})) # used by the various storage backends for their own use. # print(f"new: {new} :: {new.__dict__}") yield new.save()
def _init_(self, **kwargs): """ Open the yombo.ini file for reading. Import the configuration items into the database, also prime the configs for reading. """ self.exit_config_file = None # Holds a complete configuration file to save when exiting. self.cache_dirty = False self.configs = {} # Holds actual config data self.cfg_loaded = False self.yombo_ini_last_modified = 0 self.working_dir = settings.arguments["working_dir"] ini_norestore = settings.arguments["norestoreini"] self.yombo_ini_path = f"{self.working_dir}/yombo.ini" if os.path.exists(self.yombo_ini_path): if os.path.isfile(self.yombo_ini_path) is False: try: os.remove(self.yombo_ini_path) except Exception as e: logger.error( "'yombo.ini' file exists, but it's not a file and it can't be deleted!" ) reactor.stop() return if ini_norestore: self.restore_backup_yombi_ini() else: if os.path.getsize(self.yombo_ini_path) < 2: logger.warn( "yombo.ini appears corrupt, attempting to restore from backup." ) if ini_norestore: self.restore_backup_yombi_ini() else: if ini_norestore: self.restore_backup_yombi_ini() self.loading_yombo_ini = True if settings.yombo_ini is False: self._Loader.operating_mode = "first_run" else: for section, options in settings.yombo_ini.items(): for option, value in options.items(): try: value = yield self._GPG.decrypt(value) except: pass self.set(section, option, value, ignore_case=True) self.get("core", "rand_seed", random_string(length=128)) logger.debug( "done parsing yombo.ini. Now about to parse yombo.ini.info.") try: config_parser = configparser.ConfigParser() config_parser.read(f"{self.working_dir}/etc/yombo.ini.info") logger.debug("yombo.ini.info file read into memory.") for section in config_parser.sections(): if section not in self.configs: continue for option in config_parser.options(section): if option not in self.configs[section]: continue values = msgpack.loads( b64decode(config_parser.get(section, option))) self.configs[section][option] = dict_merge( self.configs[section][option], values) except IOError as e: logger.warn( "CAUGHT IOError!!!!!!!!!!!!!!!!!! In reading meta: {error}", error=e) except configparser.NoSectionError: logger.warn("CAUGHT ConfigParser.NoSectionError!!!! IN saving. ") logger.debug("done parsing yombo.ini.info") #setup some defaults if we are new.... self.get("core", "gwid", "local") self.get("core", "gwuuid", None) self.get("core", "is_master", 1) self.get("core", "master_gateway_id", "local") # Perform DB cleanup activites based on local section. if self.get("local", "deletedelayedmessages", False, False) is True: self._LocalDB.delete("sqldict", ["module = ?", "yombo.lib.messages"]) self.set("local", "deletedelayedmessages", False) if self.get("local", "deletedevicehistory", False, False) is True: self._LocalDB.truncate("devicestatus") self.set("local", "deletedevicehistory", False) current_time = int(time()) # Ask external services what they know about us. # detected_location states are based off this and is set in the locations library. # times uses this self.detected_location_info = self.get("core", "locationinfo", None, False) if self.detected_location_info is None or \ self.get("core", "locationinfotime", 0, False) < current_time - 3600: self.detected_location_info = yield detect_location_info() self.set( "core", "locationinfo", data_pickle(self.detected_location_info, encoder="msgpack_base64", local=True)) self.set("core", "locationinfotime", current_time) else: self.detected_location_info = data_unpickle( self.detected_location_info, encoder="msgpack_base64") self.set("core", "externalipaddress_v4", self.detected_location_info["ip"]) if self.get("core", "localipaddress_v4", False, False) is False or \ self.get("core", "localipaddresstime", False, False) is False: address_info = get_local_network_info() self.set("core", "localipaddress_v4", address_info["ipv4"]["address"]) self.set("core", "localipaddress_netmask_v4", address_info["ipv4"]["netmask"]) self.set("core", "localipaddress_cidr_v4", address_info["ipv4"]["cidr"]) self.set("core", "localipaddress_network_v4", address_info["ipv4"]["network"]) self.set("core", "localipaddress_v6", address_info["ipv6"]["address"]) self.set("core", "localipaddress_netmask_v6", address_info["ipv6"]["netmask"]) # self.set("core", "localipaddress_cidr_v6", address_info["ipv6"]["cidr"]) # self.set("core", "localipaddress_network_v6", address_info["ipv6"]["network"]) self.set("core", "localipaddresstime", int(time())) else: if int(self.configs["core"]["localipaddresstime"]["value"]) < ( int(time()) - 180): address_info = get_local_network_info() self.set("core", "localipaddress_v4", address_info["ipv4"]["address"]) self.set("core", "localipaddress_netmask_v4", address_info["ipv4"]["netmask"]) self.set("core", "localipaddress_cidr_v4", address_info["ipv4"]["cidr"]) self.set("core", "localipaddress_network_v4", address_info["ipv4"]["network"]) self.set("core", "localipaddress_v6", address_info["ipv6"]["address"]) self.set("core", "localipaddress_netmask_v6", address_info["ipv6"]["netmask"]) # self.set("core", "localipaddress_cidr_v6", address_info["ipv6"]["cidr"]) # self.set("core", "localipaddress_network_v6", address_info["ipv6"]["network"]) self.set("core", "localipaddresstime", int(time())) self.save_loop = LoopingCall(self.save) self.save_loop.start(randint(12600, 14400), False) # every 3.5-4 hours if self.get("core", "first_run", None, False) is None: self.set("core", "first_run", True) self.loading_yombo_ini = False # set system defaults. Reasons: 1) All in one place. 2) Somes values are needed before respective libraries # are loaded. self._Configs.get("mqtt", "client_enabled", True) self._Configs.get("mqtt", "server_enabled", True) self._Configs.get("mqtt", "server_max_connections", 1000) self._Configs.get("mqtt", "server_timeout_disconnect_delay", 2) self._Configs.get("mqtt", "server_listen_ip", "*") self._Configs.get("mqtt", "server_listen_port", 1883) self._Configs.get("mqtt", "server_listen_port_ss_ssl", 1884) self._Configs.get("mqtt", "server_listen_port_le_ssl", 1885) self._Configs.get("mqtt", "server_listen_port_websockets", 8081) self._Configs.get("mqtt", "server_listen_port_websockets_ss_ssl", 8444) self._Configs.get("mqtt", "server_listen_port_websockets_le_ssl", 8445) self._Configs.get("mqtt", "server_allow_anonymous", False) self._Configs.get("misc", "temperature_display", "f") self._Configs.get("misc", "length_display", "imperial") # will we ever get to metric? self.cfg_loaded = True