def update_caches(self, force=None, quick=False): """ Iterates through all libraries to update the cache, then tells the modules to do the same. :return: """ if self.cache_updater_running is True and force is True: self.cache_updater_running == None return if self._Loader.operating_mode != 'run' or self.cache_updater_running is not False or \ self.system_stopping is True: return logger.info("Starting cache updates.") # Copied from Modules library, but with sleeps and checks. for module_id, module in self._Modules.modules.items(): if self.cache_updater_running is None or self.system_stopping is True: self.cache_updater_running = False self.update_caches_loop.reset() return yield self._Modules.do_update_module_cache(module) yield sleep(1) for device_id, device in self._Devices.devices.items(): if self.cache_updater_running is None or self.system_stopping is True: self.cache_updater_running = False self.update_caches_loop.reset() return yield device.device_variables() yield sleep(0.5)
def connected(self) -> None: """ Called by AMQPProtocol when the connection is ready to use. We'll take this time to deliver any queued items. """ self.check_registrations("queue", call_check_delivery=False) self.check_registrations("exchange", call_check_delivery=False) yield sleep(0.07) self.check_registrations("binding", call_check_delivery=False) self.check_registrations("subscribe", call_check_delivery=False) yield sleep(0.03) yield self.do_check_delivery_queue() # Send any queued items to the server. self.AMQPClient.connected() # Let the factory know we are online!
def wait_for_command_to_finish(self, device_command_id: str, timeout: int = 1) -> bool: """ Simply waits for a command to finish by monitoring the device command request status. Typically used to wait for one device command to finish before sending a new one. :param device_command_id: :param timeout: Max number of seconds to wait before returning false. :return: True if command completed successfully within the timeout, otherwise false. """ if device_command_id not in self.device_commands: return True device_command = self.device_commands[device_command_id] waiting = True waited_time = 0 while(waiting): status_id = device_command.status_id if status_id == 100: return True if status_id > 100: return False yield sleep(0.05) waited_time += 0.05 if waited_time > timeout: return False
def stream_http_mjpeg_video(self, image_callback, framerate=None, quality=None, **kwargs): """ Collects images from the remove device as individual images, and returns them to the image_callback function. :param image_callback: The callback to send images to. :param framerate: How many frames per second to try to return. :param quality: The quality of the jpg, ranging from 1 to 32, 1 being best. Suggested: 2-5. :param kwargs: :return: """ interval = 0.200 # print("stream_mjpeg starting..") while True: start_time = time() print("stream_mjpeg: requesting image") image = yield self.get_camera_image() print("stream_mjpeg: have image.") duration = float(time() - start_time) print(f"stream_mjpeg: Duration of fetch: {duration}") image_callback(image) # print(image_results['response'].history()) # print(f"got image: {image_results['headers']['content-size'][0]} bytes") if duration < interval: yield sleep(interval - round(duration, 3))
def purge_expired(self): """ Iterate through all files and purge any expired ones. This call can take a while to complete. :return: """ # print("purge expired: starting") if self.purge_expired_running: logger.info("purge_expired already running, skipping.") return self.purge_expired_running = True while True: records = yield self._LocalDB.get_expired_storage() # print(f"records: {records}") if len(records) == 0: self.purge_expired_running = False return for record in records: if record.scheme not in self.storage: logger.warn("Unknown storage scheme type: {scheme}", scheme=record.scheme) else: # print(f"purge expired: calling remote function") results = yield maybeDeferred(self.storage[record.scheme]["delete"], clean_dict(record.__dict__)) # print(f"purge expired: done with remote, about to delete record from db") yield record.delete() # print(f"purge expired: done with db delete") yield sleep(0.02) self.purge_expired_running = False
def _stop_(self, **kwargs): """Tell other gateways we are going offline.""" if self.enabled is False or hasattr(self, "_Loader") is False or self._Loader.operating_mode != "run": return if hasattr(self, "mqtt") and self.mqtt is not None: self.publish_yombo_gw(topic="system/offline", payload=None, destination="global", publish=True) yield sleep(0.100)
def apiv1_device_command_get_post(webinterface, request, session, device_id, command_id): try: wait_time = float(request.args.get('_wait')[0]) except: wait_time = 2 arguments = args_to_dict(request.args) if 'inputs' in arguments: inputs = arguments['inputs'] else: inputs = None if device_id in webinterface._Devices: device = webinterface._Devices[device_id] else: return return_not_found(request, 'device not found') # print("inputs: %s" % inputs) try: request_id = device.command( cmd=command_id, requested_by={ 'user_id': session.user_id, 'component': 'yombo.gateway.lib.webinterface.routes.api_v1.devices.device_command', 'gateway': webinterface.gateway_id() }, inputs=inputs, ) except KeyError as e: return return_not_found(request, 'Error with command: %s' % e) DC = webinterface._Devices.device_commands[request_id] if wait_time > 0: exit_while = False start_time = time() while (start_time > (time() - wait_time) and exit_while is False): yield sleep(.075) if DC.status_id >= 100: exit_while = True if len(device.status_history) > 0: status_current = device.status_history[0].asdict() else: status_current = None if len(device.status_history) > 1: status_previous = device.status_history[1].asdict() else: status_previous = None return return_good(request, payload={ 'device_command_id': request_id, 'device_command': DC.asdict(), 'status_current': status_current, 'status_previous': status_previous, })
def patch_node(self, node_id, api_data, authorization=None): """ Sends the data within the node to the server. :param node_id: A string containing the node id. :param api_data: A dictionary of data to send. :param authorization: An optional header to use for authorization :return: """ yield sleep(1)
def _stop_(self, **kwargs): """ Tell other gateways we are going offline. """ if hasattr(self, "_Loader") is False or self._Loader.operating_mode != "run": return if hasattr(self, "mqtt"): if self.mqtt is not None: self.publish_data(destination_id="all", component_type="lib", component_name="system_state", payload="offline") yield sleep(0.2)
def reload_mqtt_broker(self): logger.warn("Reloading mosquitto service.") try: yield getProcessOutput( "sudo", ['systemctl', 'kill', '-s', 'HUP', 'mosquitto.service']) except Exception as e: logger.warn( "Error while trying to reload mosquitto (mqtt) service configs: {e}", e=e) yield sleep(0.5) running = yield self.check_mqtt_broker_running() return running
def stop_mqtt_broker(self): """ Stop the mqtt broker. Note: This will sleep for 2 seconds to ensure it stops. :return: """ logger.warn("stopping mosquitto service.") try: yield getProcessOutput("sudo", ["systemctl", "stop", "mosquitto.service"]) except Exception as e: logger.warn("Error while trying to stop mosquitto (mqtt) service: {e}", e=e) yield sleep(0.5) running = yield self.check_mqtt_broker_running() return running
def _unload_(self, **kwargs): logger.debug("shutting down mqtt clients...") for client_id, client in self.client_connections.items(): logger.debug("in loop to try to stop mqtt client: {client_id}", client_id=client_id) try: logger.debug(f"telling client to say goodbye... {client_id}") client.factory.stopTrying( ) # Tell reconnecting factory to don"t attempt connecting after disconnect. client.factory.protocol.disconnect() client.factory.protocol.close() except: pass yield sleep(0.1)
def get_my_gpg_key_from_keyserver(self): """ Get my gpg key from the keyserver. Sometimes Yombo servers will sign known good GPG keys for gateways and other clients to help others know which keys are valid. This function is allows those signatures to be updated locally. :return: """ # print("starting: get_my_gpg_key_from_keyserver") logger.info("Asking GPG key servers for any updates.") for fingerprint, key in self.gpg_keys.items: yield threads.deferToThread(self._get_my_gpg_key_from_keyserver, self.sks_pools[0], fingerprint) yield sleep(0.5) self._Configs.set("gpg.last_received_keyserver", int(time()))
def clean_sessions(self, close_deferred=None): """ Called by loopingcall. Cleanup the stored sessions """ for auth_id in list(self.active_api_auth.keys()): if self.active_api_auth[auth_id].check_valid( ) is False or self.active_api_auth[auth_id].is_valid is False: logger.debug("Removing invalid api auth: %s" % auth_id) try: del self.active_api_auth_by_label[ self.active_api_auth[auth_id].label] except Exception: pass del self.active_api_auth[auth_id] yield self._LocalDB.delete_api_auth(auth_id) for auth_id in list(self.active_api_auth): session = self.active_api_auth[auth_id] if session.is_dirty >= 200 or close_deferred is not None or session.last_access < int( time() - (60 * 5)): if session.in_db: logger.debug("updating old db api auth record: {auth_id}", auth_id=auth_id) yield self._LocalDB.update_api_auth(session) else: logger.debug("creating new db api auth record: {auth_id}", auth_id=auth_id) yield self._LocalDB.save_api_auth(session) session.in_db = True session.is_dirty = 0 if session.last_access < int( time() - (60 * 60 * 3)): # delete session from memory after 3 hours logger.debug("Deleting session from memory: {auth_id}", auth_id=auth_id) try: del self.active_api_auth_by_label[ self.active_api_auth[auth_id].label] except Exception: pass del self.active_api_auth[auth_id] # print("api auth clean sessions...: %s" % close_deferred) if close_deferred is not None and close_deferred is not True and close_deferred is not False: yield sleep(0.1) close_deferred.callback(1)
def start_mqtt_broker(self): """ Start the mqtt broker. Note: this will sleep for 2 seconds to ensure it starts. :return: """ logger.warn("starting mosquitto service.") try: yield getProcessOutput("sudo", ['systemctl', 'start', 'mosquitto.service']) except Exception as e: logger.warn( "Error while trying to start mosquitto (mqtt) service: {e}", e=e) yield sleep(0.5) running = yield self.check_mqtt_broker_running() return running
def ping_gateways(self): """ Pings all the known gateways. :return: """ my_gateway_id = self.gateway_id for gateway_id, gateway in self._Gateways.gateways.items(): if gateway_id in ("local", "all", "cluster", my_gateway_id) or len(gateway_id) < 13: continue current_time = time() request_id = self.publish_data(destination_id=gateway_id, component_type="lib", component_name="system_ping", payload=time(), message_type="req") self._Gateways.gateways[gateway_id].ping_request_id = request_id self._Gateways.gateways[gateway_id].ping_request_at = current_time yield sleep(0.1)
def clean_sessions(self, close_deferred=None): """ Called by loopingcall. Cleanup the stored sessions """ # logger.debug("clean_sessions()") count = 0 # session_delete_at = int(time()) - self.config.max_session # idle_delete_at = int(time()) - self.config.max_idle # max_session_no_auth_at = int(time()) - self.config.max_session_no_auth for session_id in list(self.active_sessions.keys()): if self.active_sessions[session_id].check_valid( ) is False or self.active_sessions[session_id].is_valid is False: del self.active_sessions[session_id] yield self._LocalDB.delete_web_session(session_id) count += 1 # logger.debug("Deleted {count} sessions from the session store.", count=count) for session_id in list(self.active_sessions): session = self.active_sessions[session_id] if session.is_dirty >= 200 or close_deferred is not None or session.last_access < int( time() - (60 * 5)): if session.in_db: # session.in_db = True logger.debug("updating old db session record: {id}", id=session_id) yield self._LocalDB.update_web_session(session) else: logger.debug("creating new db session record: {id}", id=session_id) yield self._LocalDB.save_web_session(session) session.in_db = True session.is_dirty = 0 if session.last_access < int( time() - (60 * 60 * 3)): # delete session from memory after 3 hours logger.debug("Deleting session from memory: {session_id}", session_id=session_id) del self.active_sessions[session_id] if close_deferred is not None and close_deferred is not True and close_deferred is not False: yield sleep(0.1) close_deferred.callback(1)
def argon2_find_cost(self, slow=None): results = yield threads.deferToThread(self.argon2_find_cost_calculator) self.argon2_rounds = results[0] self.argon2_memory = results[1] self.argon2_duration = results[2] self._Configs.set("hash", "argon2_rounds", results[0]) self._Configs.set("hash", "argon2_memory", results[1]) self._Configs.set("hash", "argon2_duration", results[2]) if slow is True: yield sleep(16) results = yield threads.deferToThread(self.argon2_find_cost_calculator, max_time=MAX_DURATION/2) self.argon2_rounds_fast = results[0] self.argon2_memory_fast = results[1] self.argon2_duration_fast = results[2] self._Configs.set("hash", "argon2_rounds_fast", results[0]) self._Configs.set("hash", "argon2_memory_fast", results[1]) self._Configs.set("hash", "argon2_duration_fast", results[2])
def _unload_(self, **kwargs): logger.debug("shutting down mqtt clients...") if hasattr(self, "client_connections"): for client_id, client in self.client_connections.items(): logger.debug("in loop to try to stop mqtt client: {client_id}", client_id=client_id) try: logger.debug( f"telling client to say goodbye... {client_id}") client.factory.stopTrying( ) # Tell reconnecting factory to don"t attempt connecting after disconnect. client.factory.protocol.disconnect() client.factory.protocol.close() except: pass yield sleep(0.1) if hasattr(self, "_States"): if self._Loader.operating_mode == "run" and self.mqtt_server is not None: self.mqtt_server.shutdown()
def amqp_connected(self): """ Called by AQMP when connected. This function was define above when setting up self.ampq. :return: """ self.connected = True for the_callback in self.amqpyombo_options["connected"]: the_callback() if self.send_local_information_loop is None: self.send_local_information_loop = LoopingCall( self.send_local_information) # Sends various information, helps Yombo cloud know we are alive and where to find us. self.send_local_information(full=True) yield sleep(1) self.init_deferred.callback(1) if self.send_local_information_loop.running is False: self.send_local_information_loop.start(random_int(60 * 60 * 4, .2), False)
def wait_for_command_to_finish(self, request_id, timeout=1): """ Simply waits for a command to finish by monitoring the device command request status. :param request_id: :param timeout: :return: """ if request_id not in self.device_commands: return True device_command = self.device_commands[request_id] waiting = True waited_time = 0 while (waiting): status_id = device_command.status_id if status_id == 100: return True if status_id > 100: return False yield sleep(0.05) waited_time += 0.05 if waited_time > timeout: return False
def db_cleanup(self, *args, section=None, **kwargs): """ Cleans out old data and optimizes the database. :return: """ logger.info("db cleanup starting, section : {section}...", section=section) if self.db_cleanup_running is True: logger.info("Cleanup database already running.") self.db_cleanup_running = True if section is None: section = "all" timer = 0 # Delete old device commands if section in ("device_commands", "all"): yield sleep(5) start_time = time() for device_command_id in list( self._DeviceCommands.device_commands.keys()): device_command = self._DeviceCommands.device_commands[ device_command_id] if device_command.finished_at is not None: if device_command.finished_at > start_time - 3600: # keep 60 minutes worth. found_dc = False for device_id, device in self._Devices.devices.items(): if device_command_id in device.device_commands: found_dc = True break if found_dc is False: yield device_command.save_to_db() del self._DeviceCommands.device_commands[ device_command_id] yield self.db_delete( "device_commands", where=[ f"created_at < {self.variable_placeholder}", time() - (86400 * 45) ]) timer += time() - start_time # Lets delete any device status after 90 days. Long term data should be in the statistics. if section in ("device_states", "all"): yield sleep(5) start_time = time() yield self.db_delete( "device_states", where=[ f"created_at < {self.variable_placeholder}", time() - (86400 * 90) ]) timer += time() - start_time # Cleanup events. if section in ("events", "all"): yield sleep(5) for event_type, event_data in self._Events.event_types.items(): for event_subtype, event_subdata in event_data.items(): if event_subdata[ "expires"] == 0: # allow data collection for forever. continue yield sleep(1) # There's no race start_time = time() results = yield self.db_delete( "events", where=[ f"event_type = {self.variable_placeholder} AND event_subtype = {self.variable_placeholder} AND " f"created_at < {self.variable_placeholder}", event_type, event_subtype, time() - (86400 * event_subdata["expires"]) ]) timer += time() - start_time # Clean notifications if section in ("notifications", "all"): yield sleep(5) start_time = time() for id in list(self._Notifications.notifications.keys()): if self._Notifications.notifications[id].expire_at == "Never": continue if self._Notifications.notifications[id].expire_at is not None and \ start_time > self._Notifications.notifications[id].expire_at: del self._Notifications.notifications[id] yield self.db_delete( "notifications", where=[f"expire_at < {self.variable_placeholder}", time()]) timer += time() - start_time # Clean states if section in ("states", "all"): yield sleep(5) # Delete unused states older than 1 year sql = f"DELETE FROM states WHERE updated_at < {int(time() - 31104000)}" start_time = time() yield self.db_pool.runQuery(sql) timer += time() - start_time # # yield sleep(5) # start_time = time() # sql = """DELETE FROM states WHERE id IN # (SELECT id # FROM states AS s # WHERE s.id = states.id # ORDER BY created_at DESC # LIMIT -1 OFFSET 100)""" # yield self.db_pool.runQuery(sql) # timer += time() - start_time self._Events.new(event_type="localdb", event_subtype="cleaning", attributes=(section, timer), request_by="localdb", request_by_type="library") if section == "all": yield sleep(5) if hasattr(self, "make_backup"): yield maybeDeferred(self.make_backup) yield sleep(10) if hasattr(self, "db_cleanup_post_process"): yield maybeDeferred(self.db_cleanup_post_process) self.db_cleanup_running = False
def apiv1_devices_command_get_post(webinterface, request, session, device_id, command_id): webinterface._Validate.id_string(device_id) webinterface._Validate.id_string(command_id) session.is_allowed(AUTH_PLATFORM_DEVICE, "control", device_id) try: wait_time = float(request.args.get("_wait")[0]) except: wait_time = 2 print(f"rrequest.content.read(): {request.content.read()}") print(f"request.processed_body: {request.processed_body}") print( f"request.processed_body_encoding: {request.processed_body_encoding}" ) print(f"request.args: {request.args}") if request.processed_body is not None: arguments = request.processed_body else: arguments = request_args(request) pin_code = arguments.get("pin_code", None) delay = arguments.get("delay", None) max_delay = arguments.get("max_delay", None) not_before = arguments.get("not_before", None) not_after = arguments.get("not_after", None) inputs = arguments.get("inputs", None) if device_id in webinterface._Devices: device = webinterface._Devices[device_id] else: return return_not_found(request, "Device id not found") if command_id in webinterface._Commands: command = webinterface._Commands[command_id] else: return return_not_found(request, "Command id not found") print(f"device control, input: {inputs}") try: device_command_id = yield device.command( command=command, authentication=session, pin=pin_code, delay=delay, max_delay=max_delay, not_before=not_before, not_after=not_after, inputs=inputs, request_context=f"api/v1:{request.getClientIP()}" # idempotence=request.idempotence, ) except KeyError as e: print( f"error with apiv1_device_command_get_post keyerror: {e}") return return_not_found( request, f"Error with command, it is not found: {e}") except YomboWarning as e: print(f"error with apiv1_device_command_get_post warning: {e}") return return_error(request, f"Error with command: {e}") DC = webinterface._DeviceCommands.device_commands[ device_command_id] if wait_time > 0: exit_while = False start_time = time() while (start_time > (time() - wait_time) and exit_while is False): yield sleep(.075) if DC.status_id >= 100: exit_while = True if len(device.state_history) > 0: status_current = device.state_history[0].to_dict( include_meta=False) else: status_current = None if len(device.state_history) > 1: status_previous = device.state_history[1].to_dict( include_meta=False) else: status_previous = None return webinterface.render_api( request, data=JSONApi( data={ "type": device_command_id, "id": device_command_id, "attributes": { "id": device_command_id, "device_id": device.device_id, "command_id": DC.command_id, "device_command_id": device_command_id, "device_command": DC.to_dict(include_meta=False), "status_current": status_current, "status_previous": status_previous, } }), data_type="device_commands", )
def db_save_bulk_queue(self, slow: Optional[bool] = None): """ Saves the bulk data to the database. :param slow: If true, sleeps 1 second between tables to give the system breathing room. :return: """ if len(self.db_bulk_queue) == 0: return if self.db_save_bulk_queue_running == True: self.db_save_bulk_queue_run_again = True return self.db_save_bulk_queue_running = True # print("saving bulk data: %s" % self.db_bulk_queue) for table in list(self.db_bulk_queue): # print("saving bulk table: %s" % table) table_data = deepcopy(self.db_bulk_queue[table]) del self.db_bulk_queue[table] for queue_type in table_data.keys(): if len(table_data[queue_type]) > 0: data_values = table_data[queue_type].copy() table_data[queue_type].clear() if queue_type == "insert": save_data = [] for key, value in data_values.items(): save_data.append(value) try: # print(f"save bulk queue, type {queue_type}, table: {table}, data: {save_data}") yield self.db_insert(table, save_data) yield sleep(0.01) except IntegrityError as e: logger.warn( "Error trying to insert in bulk save: {e}", e=e) logger.warn("Table: {table}, data: {save_data}", table=table, save_data=save_data) elif queue_type == "insert_blind": save_data = [] for data in data_values: save_data.append(data) try: # print(f"save bulk queue, type {queue_type}, table: {table}, data: {save_data}") yield self.db_insert(table, save_data) yield sleep(0.01) except IntegrityError as e: logger.warn( "Error trying to insert_blind in bulk save: {e}", e=e) logger.warn("Table: {table}, data: {save_data}", table=table, save_data=save_data) elif queue_type == "update": save_data = [] for key, value in data_values.items(): save_data.append(value) try: # print(f"save bulk queue, type {queue_type}, table: {table}, data: {save_data}") yield self.db_update_many( table, save_data, self.db_bulk_queue_id_cols[table]) yield sleep(0.01) except IntegrityError as e: logger.warn( "Error trying to update_many in bulk save: {e}", e=e) logger.warn("Table: {table}, data: {save_data}", table=table, save_data=save_data) elif queue_type == "delete": try: # print(f"save bulk queue, type {queue_type}, table: {table}, data: {save_data}") yield self.db_delete_many(table, data_values) yield sleep(0.01) except IntegrityError as e: logger.warn( "Error trying to delete_many in bulk save: {e}", e=e) logger.warn("Table: {table}, data: {save_data}", table=table, save_data=save_data) self.db_save_bulk_queue_running = False if self.db_save_bulk_queue_run_again is True: self.db_save_bulk_queue_run_again = False yield self.db_save_bulk_queue()
def _load_(self, **kwargs): if self.server_enabled is False: logger.info("Embedded MQTT Disabled.") return if self.is_master is not True: logger.info("Not managing MQTT broker, we are not the master!") if self.mosquitto_enabled is True: logger.info("Disabling mosquitto MQTT broker.") try: yield getProcessOutput( "sudo", ["systemctl", "disable", "mosquitto.service"]) except Exception as e: logger.warn( "Error while trying to disable mosquitto (mqtt) service: {e}", e=e) yield self.start_mqtt_broker() logger.info("Sleeping for 2 seconds while MQTT broker stops.") self._Configs.set("mqtt", "mosquitto_enabled", False) self.mosquitto_enabled = False yield sleep(2) return ssl_self_signed = self._SSLCerts.get("selfsigned") ssl_lib_webinterface = self._SSLCerts.get("lib_webinterface") mosquitto_config = [ "allow_anonymous false", "user mosquitto", "persistent_client_expiration 4h", "max_connections 512", "", ] if self.server_listen_port > 0: mosquitto_config.extend([ "#", "# Insecure listen MQTT port", "#", f"port {self.server_listen_port}", "" ]) if self.server_listen_port_ss_ssl > 0: mosquitto_config.extend([ "#", "# Self-signed cert for mqtt", "#", f"listener {self.server_listen_port_ss_ssl}", f"certfile {ssl_self_signed['cert_file']}", f"keyfile {ssl_self_signed['key_file']}", "ciphers ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA:ECDHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-RSA-AES256-SHA256:DHE-RSA-AES256-SHA:ECDHE-ECDSA-DES-CBC3-SHA:ECDHE-RSA-DES-CBC3-SHA:EDH-RSA-DES-CBC3-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:DES-CBC3-SHA:!DSS", "tls_version tlsv1.2", "protocol mqtt", "", ]) if self.server_listen_port_le_ssl > 0 and ssl_lib_webinterface[ "self_signed"] is False: mosquitto_config.extend([ "#", "# Lets encrypt signed cert for mqtt", "#", f"listener {self.server_listen_port_le_ssl}", f"cafile {ssl_lib_webinterface['chain_file']}", f"certfile {ssl_lib_webinterface['cert_file']}", f"keyfile {ssl_lib_webinterface['key_file']}", "ciphers ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA:ECDHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-RSA-AES256-SHA256:DHE-RSA-AES256-SHA:ECDHE-ECDSA-DES-CBC3-SHA:ECDHE-RSA-DES-CBC3-SHA:EDH-RSA-DES-CBC3-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:DES-CBC3-SHA:!DSS", "tls_version tlsv1.2", "protocol mqtt", "", ]) if self.server_listen_port_websockets > 0: mosquitto_config.extend([ "#", "# Unecrypted websockets", "#", f"listener {self.server_listen_port_websockets}", "protocol websockets", "max_connections 512", "", ]) if self.server_listen_port_websockets_ss_ssl > 0: mosquitto_config.extend([ "#", "# Self-signed cert for websockets", "#", f"listener {self.server_listen_port_websockets_ss_ssl}", f"certfile {ssl_self_signed['cert_file']}", f"keyfile {ssl_self_signed['key_file']}", "ciphers ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA:ECDHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-RSA-AES256-SHA256:DHE-RSA-AES256-SHA:ECDHE-ECDSA-DES-CBC3-SHA:ECDHE-RSA-DES-CBC3-SHA:EDH-RSA-DES-CBC3-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:DES-CBC3-SHA:!DSS", "tls_version tlsv1.2", "protocol websockets", "", ]) if self.server_listen_port_websockets_le_ssl > 0 and ssl_lib_webinterface[ "self_signed"] is False: mosquitto_config.extend([ "#", "# Lets encrypt signed cert for websockets", "#", f"listener {self.server_listen_port_websockets_le_ssl}", f"cafile {ssl_lib_webinterface['chain_file']}", f"certfile {ssl_lib_webinterface['cert_file']}", f"keyfile {ssl_lib_webinterface['key_file']}", "ciphers ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA:ECDHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-RSA-AES256-SHA256:DHE-RSA-AES256-SHA:ECDHE-ECDSA-DES-CBC3-SHA:ECDHE-RSA-DES-CBC3-SHA:EDH-RSA-DES-CBC3-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:DES-CBC3-SHA:!DSS", "tls_version tlsv1.2", "protocol websockets", "", ]) if ssl_lib_webinterface["self_signed"] is False: self.server_listen_port_websockets_le_ssl = self.server_listen_port_websockets_ss_ssl logger.debug( "Writting mosquitto_config_file to: {mosquitto_config_file}", mosquitto_config_file=self.mosquitto_config_file) config_file = FileWriter(filename=self.mosquitto_config_file, mode="w") config_file.write( "# File automatically generated by Yombo Gateway. Edits will be lost.\n" ) config_file.write( f"# Created {f'{datetime.now():%Y-%m-%d %H%M%S}'}\n\n") config_file.write("# HTTP Auth plugin...\n") config_file.write( "auth_plugin /usr/local/src/yombo/mosquitto-auth-plug/auth-plug.so\n" ) config_file.write("auth_opt_backends http\n") # config_file.write("auth_opt_acl_log_quiet true\n") config_file.write("auth_opt_acl_cacheseconds 600\n") config_file.write("auth_opt_auth_cacheseconds 30\n") config_file.write("auth_opt_http_ip 127.0.0.1\n") webinterface_port = self._Configs.get("webinterface", "nonsecure_port", fallback=8080) config_file.write(f"auth_opt_http_port {webinterface_port}\n") config_file.write("auth_opt_http_getuser_uri /api/v1/mqtt/auth/user\n") config_file.write( "auth_opt_http_superuser_uri /api/v1/mqtt/auth/superuser\n") config_file.write("auth_opt_http_aclcheck_uri /api/v1/mqtt/auth/acl\n") config_file.write("# Base configs\n\n") for line_out in mosquitto_config: config_file.write(f"{line_out}\n") yield config_file.close_while_waiting() if self.mosquitto_enabled is False: logger.info("Enabling mosquitto MQTT broker.") try: yield getProcessOutput( "sudo", ["systemctl", "enable", "mosquitto.service"]) except Exception as e: logger.warn( "Error while trying to enable mosquitto (mqtt) service: {e}", e=e) self._Configs.set("mqtt", "mosquitto_enabled", True) self.mosquitto_enabled = True yield self.check_mqtt_broker_running() if self.mosquitto_running is False: yield self.start_mqtt_broker() logger.info("Sleeping for 3 seconds while MQTT broker starts up.") yield sleep(3) if self.mosquitto_running is False: logger.error("Cannot connect to MQTT broker.") raise YomboCritical( "MQTT failed to connect and/or start, shutting down.")
def apiv1_device_command_get_post(webinterface, request, session, device_id, command_id): session.has_access("device", device_id, "control", raise_error=True) if len(device_id) > 200 or isinstance(device_id, str) is False: return return_error(request, "invalid device_id format", 400) if len(command_id) > 200 or isinstance(command_id, str) is False: return return_error(request, "invalid command_id format", 400) try: wait_time = float(request.args.get("_wait")[0]) except: wait_time = 2 arguments = args_to_dict(request.args) pin_code = arguments.get("pin_code", None) delay = arguments.get("delay", None) max_delay = arguments.get("max_delay", None) not_before = arguments.get("not_before", None) not_after = arguments.get("not_after", None) inputs = arguments.get("inputs", None) if device_id in webinterface._Devices: device = webinterface._Devices[device_id] else: return return_not_found(request, "Device not found") try: device_command_id = device.command( cmd=command_id, auth=session, pin=pin_code, delay=delay, max_delay=max_delay, not_before=not_before, not_after=not_after, inputs=inputs, idempotence=request.idempotence, ) except KeyError as e: print( f"error with apiv1_device_command_get_post keyerror: {e}") return return_not_found( request, f"Error with command, it is not found: {e}") except YomboWarning as e: print(f"error with apiv1_device_command_get_post warning: {e}") return return_error(request, f"Error with command: {e}") DC = webinterface._DeviceCommands.device_commands[ device_command_id] if wait_time > 0: exit_while = False start_time = time() while (start_time > (time() - wait_time) and exit_while is False): yield sleep(.075) if DC.status_id >= 100: exit_while = True if len(device.status_history) > 0: status_current = device.status_history[0].asdict() else: status_current = None if len(device.status_history) > 1: status_previous = device.status_history[1].asdict() else: status_previous = None return return_good(request, payload={ "device_command_id": device_command_id, "device_command": DC.asdict(), "status_current": status_current, "status_previous": status_previous, })
def do_start(self, **kwargs): """ Starts the scene. Performs the actual trigger. It's wrapped here to handle any requested delays. :param scene: :param kwargs: :return: """ logger.debug("Scene '{label}' is now running.", label=self.label) actions = self.data["actions"] self._Scenes.trigger_monitor("scene", scene=self, name=self.machine_label, action="start") yield global_invoke_all("_scene_starting_", called_by=self, arguments={ "scene_id": self.node_id, "scene": self, }) logger.info("Scene is starting: {label}", label=self.label) for action_id, action in actions.items(): action_type = action["action_type"] if action_type == "device": device = self._Devices[action["device_machine_label"]] logger.info("Scene is firing {label}, device: {device}", label=self.label, device=device.label) command = self._Commands[action["command_machine_label"]] device.command(command=command, auth_id=self._Users.system_user, control_method="scene", inputs=action["inputs"], **kwargs) elif action_type == "pause": final_duration = 0 loops = 0 sleep_duration = action["duration"] if sleep_duration < 6: final_duration = sleep_duration loops = 1 else: loops = int(round(sleep_duration / 5)) final_duration = sleep_duration / loops for current_loop in range(loops): yield sleep(final_duration) if self.run_state != "running": # a way to kill this trigger self.run_state = "stopped" return False elif action_type == "scene": local_scene = self._Scenes.get(action["scene_machine_label"]) scene_action = action["scene_action"] if scene_action == "enable": self.enable(local_scene.scene_id) elif scene_action == "disable": self.disable(local_scene.scene_id) elif scene_action == "start": try: self.start(local_scene.scene_id) except Exception: # Gobble everything up.. pass elif scene_action == "stop": try: self.stop(local_scene.scene_id) except Exception: # Gobble everything up.. pass elif action_type == "template": try: yield self.scene_templates[action_id].render( {"current_scene": self}) except Exception as e: logger.warn( "-==(Warning: Scenes library had trouble with template==-" ) logger.warn("Input template:") logger.warn("{template}", template=action["template"]) logger.warn( "---------------==(Traceback)==--------------------------" ) logger.warn("{trace}", trace=traceback.format_exc()) logger.warn( "--------------------------------------------------------" ) logger.warn( "Scene had trouble running template: {message}", message=e) elif action_type in self._Scenes.additional_scene_actions: self._Scenes.additional_scene_actions[action_type][ "handle_trigger_callback"](self, action) if self.run_state != "running": # a way to kill this trigger self.run_state = "stopped" return False self.run_state = "stopped"
def _unload_(self): # print(f"end item unloaded: {self._Parent._class_storage_load_hook_prefix}") yield self.flush_sync() yield sleep(.1)
def queue_worker2(self, arguments): yield sleep(5) self.assertIsEqual(arguments, 'letsdoit', "queue_worker() arguments should be the same.") return "someresults"
def _load_(self, **kwargs): if self.server_enabled is False: logger.info("Embedded MQTT Disabled.") return if self.is_master is not True: logger.info("Not managing MQTT broker, we are not the master!") if self.mosquitto_enabled is True: logger.info("Disabling mosquitto MQTT broker.") try: yield getProcessOutput( "sudo", ['systemctl', 'disable', 'mosquitto.service']) except Exception as e: logger.warn( "Error while trying to disable mosquitto (mqtt) service: {e}", e=e) yield self.start_mqtt_broker() logger.info("Sleeping for 2 seconds while MQTT broker stops.") self._Configs.set('mqtt', 'mosquitto_enabled', False) self.mosquitto_enabled = False yield sleep(2) return ssl_self_signed = self._SSLCerts.get('selfsigned') ssl_lib_webinterface = self._SSLCerts.get('lib_webinterface') mosquitto_config = [ 'allow_anonymous false', 'password_file /etc/mosquitto/yombo/passwd', 'user mosquitto', 'persistent_client_expiration 4h', 'max_connections 512', '', ] if self.server_listen_port > 0: mosquitto_config.append("port %s" % self.server_listen_port) if self.server_listen_port_ss_ssl > 0: mosquitto_config.extend([ '#', '# Self-signed cert for mqtt', '#', 'listener %s' % self.server_listen_port_ss_ssl, 'certfile %s' % ssl_self_signed['cert_file'], 'keyfile %s' % ssl_self_signed['key_file'], 'ciphers ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA:ECDHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-RSA-AES256-SHA256:DHE-RSA-AES256-SHA:ECDHE-ECDSA-DES-CBC3-SHA:ECDHE-RSA-DES-CBC3-SHA:EDH-RSA-DES-CBC3-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:DES-CBC3-SHA:!DSS', 'tls_version tlsv1.2', 'protocol mqtt', '', ]) if self.server_listen_port_le_ssl > 0 and ssl_lib_webinterface[ 'self_signed'] is False: mosquitto_config.extend([ '#', '# Lets encrypt signed cert for mqtt', '#', 'listener %s' % self.server_listen_port_le_ssl, 'cafile %s' % ssl_lib_webinterface['chain_file'], 'certfile %s' % ssl_lib_webinterface['cert_file'], 'keyfile %s' % ssl_lib_webinterface['key_file'], 'ciphers ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA:ECDHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-RSA-AES256-SHA256:DHE-RSA-AES256-SHA:ECDHE-ECDSA-DES-CBC3-SHA:ECDHE-RSA-DES-CBC3-SHA:EDH-RSA-DES-CBC3-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:DES-CBC3-SHA:!DSS', 'tls_version tlsv1.2', 'protocol mqtt', '', ]) if self.server_listen_port_websockets > 0: mosquitto_config.extend([ '#', '# Unecrypted websockets', '#', 'listener %s' % self.server_listen_port_websockets, 'protocol websockets', 'max_connections 512', '', ]) if self.server_listen_port_websockets_ss_ssl > 0: mosquitto_config.extend([ '#', '# Self-signed cert for websockets', '#', 'listener %s' % self.server_listen_port_websockets_ss_ssl, 'certfile %s' % ssl_self_signed['cert_file'], 'keyfile %s' % ssl_self_signed['key_file'], 'ciphers ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA:ECDHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-RSA-AES256-SHA256:DHE-RSA-AES256-SHA:ECDHE-ECDSA-DES-CBC3-SHA:ECDHE-RSA-DES-CBC3-SHA:EDH-RSA-DES-CBC3-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:DES-CBC3-SHA:!DSS', 'tls_version tlsv1.2', 'protocol websockets', '', ]) if self.server_listen_port_websockets_le_ssl > 0 and ssl_lib_webinterface[ 'self_signed'] is False: mosquitto_config.extend([ '#', '# Lets encrypt signed cert for websockets', '#', 'listener %s' % self.server_listen_port_websockets_le_ssl, 'cafile %s' % ssl_lib_webinterface['chain_file'], 'certfile %s' % ssl_lib_webinterface['cert_file'], 'keyfile %s' % ssl_lib_webinterface['key_file'], 'ciphers ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA:ECDHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-RSA-AES256-SHA256:DHE-RSA-AES256-SHA:ECDHE-ECDSA-DES-CBC3-SHA:ECDHE-RSA-DES-CBC3-SHA:EDH-RSA-DES-CBC3-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:DES-CBC3-SHA:!DSS', 'tls_version tlsv1.2', 'protocol websockets', '', ]) if ssl_lib_webinterface['self_signed'] is False: self.server_listen_port_websockets_le_ssl = self.server_listen_port_websockets_ss_ssl self.mqtt_available_ports = { 'ws': self.server_listen_port_websockets, 'wss': self.server_listen_port_websockets_le_ssl, 'wss-ss': self.server_listen_port_websockets_ss_ssl, } mosquitto_config_filepointer = open(self.mosquitto_config_file, 'w') print( "# File automatically generated by Yombo Gateway. Edits will be lost.", file=mosquitto_config_filepointer) print("# Created %s" % f"{datetime.now():%Y-%m-%d %H%M%S}", file=mosquitto_config_filepointer) print("#", file=mosquitto_config_filepointer) print("# Base configs", file=mosquitto_config_filepointer) print("#", file=mosquitto_config_filepointer) for line_out in mosquitto_config: print(line_out, file=mosquitto_config_filepointer) mosquitto_config_filepointer.close() password_file = open(self.mosquitto_pass_file, 'w') print( "# File automatically generated by Yombo Gateway. Edits will be lost.", file=password_file) print("# Created %s" % f"{datetime.now():%Y-%m-%d %H%M%S}", file=password_file) cfg_users = self._Configs.get('mqtt_users', '*') if cfg_users is not None: print("# ", file=password_file) print("# Users from yombo.ini", file=password_file) print("# ", file=password_file) for username, password in cfg_users.items(): print("%s:%s" % (username, sha512_crypt_mosquitto(password)), file=password_file) gateway_passwords = self._Gateways.get_mqtt_passwords() for gateway, passwords in gateway_passwords.items(): print("# ", file=password_file) print("# Gateways", file=password_file) print("# ", file=password_file) print("yombogw_%s:%s" % (gateway, sha512_crypt_mosquitto(passwords['current'])), file=password_file) password_file.close() if self.mosquitto_enabled is False: logger.info("Enabling mosquitto MQTT broker.") try: yield getProcessOutput( "sudo", ['systemctl', 'enable', 'mosquitto.service']) except Exception as e: logger.warn( "Error while trying to enable mosquitto (mqtt) service: {e}", e=e) self._Configs.set('mqtt', 'mosquitto_enabled', True) self.mosquitto_enabled = True yield self.check_mqtt_broker_running() if self.mosquitto_running is False: yield self.start_mqtt_broker() logger.info("Sleeping for 2 seconds while MQTT broker starts up.") yield sleep(2) if self.mosquitto_running is False: logger.error("MQTT failed to start!") raise YomboCritical("MQTT failed to start, shutting down.") else: yield self.reload_mqtt_broker() #reload the configs