def check_time(self): """ Checks the time to see if it is currently sunrise or sunset """ # Get state object from manager state = self.mudpi.states.get(self.source) if state is not None: _state = state.state else: _state = None if _state: try: _value = self._parse_data(_state) _now = datetime.datetime.now().replace(microsecond=0) if _value: _value = datetime.datetime.strptime(_value, "%Y-%m-%d %I:%M:%S %p").replace(microsecond=0) + self.offset if _now == _value: self.active = True if self._previous_state != self.active: # Trigger is reset, Fire self.trigger(_value.strftime('%Y-%m-%d %I:%M:%S %p')) else: # Trigger not reset check if its multi fire if self.frequency == 'many': self.trigger(_value.strftime('%Y-%m-%d %I:%M:%S %p')) else: self.active = False except Exception as error: Logger.log(LOG_LEVEL["error"], f'Error evaluating thresholds for trigger {self.id}') Logger.log(LOG_LEVEL["debug"], error) self._previous_state = self.active
def call(self, action_call, action_data={}): """ Call an action from the registry Format: {namespace}.{action} or {namespace}.{component}.{action} """ command = self.parse_call(action_call) action = self._registry.get(command['namespace'], {}).get(command['action']) if not action: # raise MudPiError("Call to action that doesn't exists!") Logger.log( LOG_LEVEL["error"], f'{FONT_YELLOW}Call to action {action_call} that doesn\'t exists!.{FONT_RESET}' ) validated_data = action.validate(action_data) if not validated_data and action_data: # raise MudPiError("Action data was not valid!") Logger.log( LOG_LEVEL["error"], f'{FONT_YELLOW}Action data was not valid for {action_call}{FONT_RESET}' ) self.mudpi.events.publish( 'core', { 'event': 'ActionCall', 'action': action_call, 'data': action_data, 'namespace': command['namespace'] }) action(data=validated_data)
def handle_event(self, event): """ Handle the event data from the event system """ _event_data = decode_event_data(event) if _event_data == self._last_event: # Event already handled return self._last_event = _event_data if _event_data.get('event'): try: if _event_data['event'] == 'StateUpdated': if _event_data['component_id'] == self.source: sensor_value = self._parse_data( _event_data["new_state"]["state"]) if self.evaluate_thresholds(sensor_value): self.active = True if self._previous_state != self.active: # Trigger is reset, Fire self.trigger(_event_data) else: # Trigger not reset check if its multi fire if self.frequency == 'many': self.trigger(_event_data) else: self.active = False except Exception as error: Logger.log( LOG_LEVEL["error"], f'Error evaluating thresholds for trigger {self.id}') Logger.log(LOG_LEVEL["debug"], error) self._previous_state = self.active
def update(self): """ Get data from DHT device""" humidity = None temperature_c = None if self.check_dht(): try: humidity, temperature_c = Adafruit_DHT.read_retry( self._sensor, self.pin) except Exception as error: # Errors happen fairly often, DHT's are hard to read Logger.log(LOG_LEVEL["debug"], error) if humidity is not None and temperature_c is not None: _temperature = temperature_c if self.mudpi.unit_system == METRIC_SYSTEM else ( temperature_c * 1.8 + 32) readings = { 'temperature': round(_temperature, 2), 'humidity': round(humidity, 2) } self._state = readings return readings else: Logger.log(LOG_LEVEL["debug"], f'DHT Reading was Invalid (Legacy).') time.sleep(2.1) return None
def update(self): """ Get data from T9602 device""" for trynb in range(5): # 5 tries try: data = self.bus.read_i2c_block_data(self.config['address'], 0, 4) break except OSError: Logger.log( LOG_LEVEL["info"], "Single reading error [t9602]. It happens, let's try again..." ) time.sleep(2) humidity = (((data[0] & 0x3F) << 8) + data[1]) / 16384.0 * 100.0 temperature_c = ((data[2] * 64) + (data[3] >> 2)) / 16384.0 * 165.0 - 40.0 humidity = round(humidity, 2) temperature_c = round(temperature_c, 2) if humidity is not None and temperature_c is not None: _temperature = temperature_c if self.mudpi.unit_system == METRIC_SYSTEM else (temperature_c * 1.8 + 32) readings = { 'temperature': _temperature, 'humidity': humidity } self._state = readings return readings else: Logger.log( LOG_LEVEL["error"], 'Failed to get reading [t9602]. Try again!' ) return None
def create(cls, mudpi, extension_name, extensions_module): """ Static method to load extension """ for path in extensions_module.__path__: config_path = os.path.join(path, extension_name, "extension.json") if not os.path.isfile(config_path): continue try: with open(config_path) as f: config = json.loads(f.read()) except FileNotFoundError: Logger.log( LOG_LEVEL["error"], f'{FONT_RED}No extension.json found at {config_path}.{FONT_RESET}' ) continue except Exception as e: Logger.log( LOG_LEVEL["error"], f'{FONT_RED}Error loading extension.json at {config_path} {error}.{FONT_RESET}' ) continue return cls(mudpi, config, f"{extensions_module.__name__}.{extension_name}", os.path.split(config_path)[0]) return None
def handle_event(self, event): """ Handle events from event system """ _event = None try: _event = decode_event_data(event) except Exception as error: _event = decode_event_data(event['data']) if _event == self._last_event: # Event already handled return self._last_event = _event if _event is not None: try: if _event['event'] == 'Message': if _event.get('data', None): self.add_message(_event['data']) elif _event['event'] == 'Clear': self.clear() elif _event['event'] == 'ClearQueue': self.clear_queue() except Exception as error: Logger.log(LOG_LEVEL["error"], f'Error handling event for {self.id}')
def get_extension_importer(mudpi, extension, install_requirements=False): """ Find or create an extension importer, Loads it if not loaded, Checks cache first. Set install_requirements to True to also have all requirements checked through pip. """ # First we check if the namespace is disabled. Could be due to errors or configs disabled_cache = mudpi.cache.setdefault("disabled_namespaces", {}) if extension in disabled_cache: raise MudPiError(f"Extension is {extension} is disabled.") if install_requirements: extension_importer = _extension_with_requirements_installed( mudpi, extension) if extension_importer is not None: return extension_importer importer_cache = mudpi.cache.setdefault("extension_importers", {}) try: extension_importer = importer_cache.get(extension) if extension_importer is not None: return extension_importer except Exception as error: extension_importer = None if extension_importer is None: extension_importer = _get_custom_extensions(mudpi).get(extension) if extension_importer is not None: Logger.log( LOG_LEVEL["warning"], f'{FONT_YELLOW}You are using {extension} which is not provided by MudPi.{FONT_RESET}\nIf you experience errors, remove it.' ) return extension_importer # Component not found look in internal extensions from mudpi import extensions extension_importer = ExtensionImporter.create(mudpi, extension, extensions) if extension_importer is not None: importer_cache[extension] = extension_importer Logger.log_formatted( LOG_LEVEL["info"], f'{extension_importer.namespace.title()} Ready for Import', 'Ready', 'success') else: Logger.log_formatted(LOG_LEVEL["debug"], f'Import Preperations for {extension.title()}', 'error', 'error') Logger.log( LOG_LEVEL["debug"], f'{FONT_YELLOW}`{extension.title()}` was not found.{FONT_RESET}') disabled_cache[extension] = 'Not Found' raise ExtensionNotFound(extension) return extension_importer
def stop(self, data=None): """ Stop the timer """ if self.active: self._active = False self.reset() Logger.log( LOG_LEVEL["debug"], f'Timer Sensor {FONT_MAGENTA}{self.name}{FONT_RESET} Stopped')
def update(self): """ Main run loop for sequence to check time past and if it should fire actions """ if self.mudpi.is_prepared: try: if self.active: if not self._step_complete: if not self._delay_complete: if self.step_delay is not None: if self.duration > self.step_delay: self._delay_complete = True self._delay_actual = self.duration self.reset_duration() else: # Waiting break early return else: self._delay_complete = True self.reset_duration() if self._delay_complete: if not self._step_triggered: if self.evaluate_thresholds(): self.trigger() else: if self.current_step.get('thresholds') is not None: # Thresholds failed skip step without trigger self._step_triggered = True self._step_complete = True if self.step_duration is not None and not self._step_complete: if self.duration > self.step_duration: self._step_complete = True self._duration_actual = self.duration self.reset_duration() else: # Waiting break early return else: # No duration set meaning step only advances # manualy by calling actions and events. RTM pass if self._step_complete: self.fire({"event": "SequenceStepEnded"}) # Logger.log( # LOG_LEVEL["debug"], # f'Sequence {FONT_CYAN}{self.name}{FONT_RESET} Step {self._current_step+1} Debug\n' \ # f'Delay: {self.step_delay} Actual: {self._delay_actual} Duration: {self.step_duration} Actual: {self._duration_actual}' # ) return self.next_step() else: # Sequence is not active. self.reset_duration() except Exception as e: Logger.log_formatted(LOG_LEVEL["error"], f'Sequence {self.id}', 'Unexpected Error', 'error') Logger.log(LOG_LEVEL["critical"], e)
def handle_event(self, data={}): """ Handle event from mqtt broker """ if data is not None: try: # _event_data = self.last_event = decode_event_data(data) self._state = data except: Logger.log(LOG_LEVEL["info"], f"Error Decoding Event for MQTT Sensor {self.id}")
def restore_states(self): """ Restore previous components states on first boot """ _comp_ids = self.components.ids() for state_id in self.states.ids(): if state_id in _comp_ids: comp = self.components.get(state_id) comp.restore_state(self.states.get(state_id)) Logger.log(LOG_LEVEL["debug"], f"Restored State for {state_id}") else: self.states.remove(state_id)
def connect(self, connection): """ Connect the sensor to redis """ self.conn = connection if self.type in ("current", "forecast"): self.sensor = "https://api.openweathermap.org/data/2.5/onecall?exclude=minutely&%s" % ( str(self.conn)) elif self.type in ("historical"): self.sensor = "https://api.openweathermap.org/data/2.5/onecall/timemachine?%s&dt=" % ( str(self.conn)) Logger.log(LOG_LEVEL["debug"], 'OwmapiSensor: apicall: ' + str(self.sensor))
def import_config_dir(self): """ Add config dir to sys path so we can import extensions """ if self.mudpi.config_path is None: Logger.log( LOG_LEVEL["error"], f'{RED_BACK}Could not import config_path - No path was set.{FONT_RESET}' ) return False if self.mudpi.config_path not in sys.path: sys.path.insert(0, self.mudpi.config_path) return True
def handle_event(self, event={}): """ Handle event from redis pubsub """ data = decode_event_data(event['data']) if data is not None: try: # _event_data = self.last_event = decode_event_data(data) self._state = data except: Logger.log( LOG_LEVEL["info"], f"Error Decoding Event for Redis Sensor {self.id}" )
def check_dht(self): """ Check if the DHT device is setup """ if self._sensor is None: try: self._sensor = self._dht_device except Exception as error: Logger.log( LOG_LEVEL["error"], 'Sensor Initialize Error: DHT (Legacy) Failed to Init') self._sensor = None Logger.log(LOG_LEVEL["debug"], error) return False return True
def validate(self, config): """ Validate the trigger config """ if not isinstance(config, list): config = [config] for conf in config: if not conf.get('schedule'): Logger.log( LOG_LEVEL["debug"], 'Trigger: No `schedule`, defaulting to every 5 mins') # raise ConfigError('Missing `schedule` in Trigger config.') return config
def init(self): """ Setup the socket """ self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.client_threads = [] self._server_ready = threading.Event() self._server_ready.set() self._server_running = False try: self.sock.bind((self.host, self.port)) except socket.error as msg: Logger.log(LOG_LEVEL['error'], f'Failed to create socket. Error Code: {str(msg[0])} Error Message: {msg[1]}') return False return True
def check(self): """ Check trigger schedule thresholds """ if self.mudpi.is_running: try: if pycron.is_now(self.schedule): self.trigger() if not self.active: self.active = True else: if self.active: self.active = False except Exception as error: Logger.log(LOG_LEVEL["error"], "Error evaluating time trigger schedule.") return
def start(self, data=None): """ Start the timer """ if not self.active: self.reset_duration() self._active = True if self._pause_offset == 0: Logger.log( LOG_LEVEL["debug"], f'Timer Sensor {FONT_MAGENTA}{self.name}{FONT_RESET} Started' ) else: Logger.log( LOG_LEVEL["debug"], f'Timer Sensor {FONT_MAGENTA}{self.name}{FONT_RESET} Resumed' )
def validate(self, config): """ Validate the dht config """ if not isinstance(config, list): config = [config] for conf in config: if not conf.get('pin'): raise ConfigError('Missing `pin` in DHT config.') if str(conf.get('model')) not in DHTSensor.models: conf['model'] = '11' Logger.log(LOG_LEVEL["warning"], 'Sensor Model Error: Defaulting to DHT11') return config
def trigger(self, value=None): """ Trigger all the actions for the current step """ if self._step_triggered: return try: for action in self.current_step.get('actions', []): if self.mudpi.actions.exists(action): _data = value or {} self.mudpi.actions.call(action, action_data=_data) except Exception as e: Logger.log( LOG_LEVEL["error"], f"Error triggering sequence action {self.id} ", e) self._step_triggered = True return
def server(self): """ Socket server main loop """ self.sock.listen(0) # number of clients to listen for. Logger.log_formatted(LOG_LEVEL['info'], 'MudPi Server', 'Online', 'success') while self._server_ready.is_set(): try: client, address = self.sock.accept() client.settimeout(600) ip, port = client.getpeername() Logger.log(LOG_LEVEL['info'], f'Socket Client {port} from {ip} Connected') t = threading.Thread(target = self.listenToClient, args = (client, address, ip)) self.client_threads.append(t) t.start() except Exception as e: Logger.log(LOG_LEVEL['error'], e) time.sleep(1) self.sock.close()
def set(self, component_id, new_state, metadata=None): if new_state is None: return component_id = component_id.lower() # new_state = json.dumps(new_state) metadata = metadata or {} if new_state is not None: self._lock.acquire() previous_state = self.states.get(component_id) state_exists = previous_state is not None state_is_same = (state_exists and previous_state.state == new_state) metadata_is_same = (state_exists and previous_state.metadata == metadata) if state_is_same and metadata_is_same: self._lock.release() return updated_at = previous_state.updated_at if state_is_same else None state = State(component_id, new_state, metadata, updated_at) self.states[component_id] = state self._lock.release() if previous_state: previous_state = previous_state.to_dict() event_data = { 'event': 'StateUpdated', 'component_id': component_id, 'previous_state': previous_state, 'new_state': state.to_dict() } self.mudpi.events.publish('state', event_data) self.redis.set(f'{component_id}.state', json.dumps(state.to_dict())) self.redis.set('state_keys', json.dumps(self.ids())) Logger.log( LOG_LEVEL["debug"], f"State Changed: {FONT_YELLOW}{component_id}{FONT_RESET} - {state.state} @ {state.updated_at}" ) return event_data
def run(self): if self.node_ready: t = threading.Thread(target=self.work, args=()) t.start() Logger.log( LOG_LEVEL["info"], str(self.config['name']) + ' Node Worker [' + str( len(self.config[ 'sensors'])) + ' Sensors]...\t\033[1;32m Online\033[0;0m' ) return t else: Logger.log( LOG_LEVEL["warning"], "Node Connection...\t\t\t\033[1;31m Failed\033[0;0m" ) return None
def __init__(self, mudpi, redis_conf=None): self.mudpi = mudpi self.states = {} self._lock = threading.RLock() host = '127.0.0.1' port = 6379 try: if redis_conf: host = redis_conf.get('host', '127.0.0.1') port = redis_conf.get('port', 6379) self.redis = redis.Redis(host=host, port=port) except Exception as error: Logger.log(LOG_LEVEL["error"], f"State Manager Error Connecting to Redis") self.restore_states() Logger.log_formatted(LOG_LEVEL["info"], f"Preparing State Manager ", "Complete", "success")
def load_extensions(self, extensions, config): """ Initialize a list of extensions with provided config """ disabled_cache = self.mudpi.cache.setdefault("disabled_namespaces", {}) # Import and setup the extensions for extension in extensions: if extension not in disabled_cache: try: extension_importer = importer.get_extension_importer(self.mudpi, extension) if not extension_importer.import_extension(config): disabled_cache[extension] = 'Failed Import' except Exception as error: # Ignore errors Logger.log( LOG_LEVEL["debug"], error ) continue return True
def _install_extension_requirements(mudpi, extension): """ Installs all the extension requirements """ cache = mudpi.cache.setdefault('extensions_requirements_installed', {}) if cache.get(extension.namespace) is not None: # Already processed and installed return cache[extension.namespace] # Handle all the dependencies requirements if extension.has_dependencies: if extension.import_dependencies(): for dependency in extension.loaded_dependencies: try: dependency_extension = get_extension_importer( mudpi, dependency) except Exception as error: Logger.log( LOG_LEVEL["error"], f'Error getting extension <{extension}> dependency: {FONT_YELLOW}{dependency}{FONT_RESET}' ) if not dependency_extension.install_requirements(): Logger.log( LOG_LEVEL["error"], f'Error with extension <{extension}> dependency: {FONT_YELLOW}{dependency}{FONT_RESET} requirements.' ) if not extension.has_requirements: cache[extension.namespace] = extension return cache[extension.namespace] requirement_cache = mudpi.cache.setdefault('requirement_installed', {}) for requirement in extension.requirements: if requirement not in requirement_cache: if not utils.is_package_installed(requirement): Logger.log_formatted( LOG_LEVEL["info"], f'{FONT_YELLOW}{extension.namespace.title()}{FONT_RESET} requirements', 'Installing', 'notice') Logger.log( LOG_LEVEL["debug"], f'Installing package {FONT_YELLOW}{requirement}{FONT_RESET}', ) if not utils.install_package(requirement): Logger.log( LOG_LEVEL["error"], f'Error installing <{extension.title()}> requirement: {FONT_YELLOW}{requirement}{FONT_RESET}' ) return False requirement_cache[requirement] = True # extension.requirements_installed = True cache[extension.namespace] = extension return cache[extension.namespace]
def add_component(self, component): """ Add a component for the interface """ if not _is_component(component): raise MudPiError( f"Passed non-component to add_component for {self.namesapce}.") # Worker is loaded here to prevent empty workers without components if self.worker is None: self.worker = self.load_worker() try: if component.id is None: Logger.log( LOG_LEVEL["debug"], f"Interface {self.namespace}:{self.type} component did not define `id`." ) return False if component.id in self.worker.components: Logger.log( LOG_LEVEL["debug"], f"Interface {self.namespace}:{self.type} component id ({component.id}) already registered." ) return False component.namespace = self.namespace component.interface = self.type self.worker.components[ component.id] = self.mudpi.components.register( component.id, component, self.namespace) component.component_registered(mudpi=self.mudpi, interface=self) return True except Exception as error: Logger.log( LOG_LEVEL["debug"], f"Interface {self.namespace}:{self.type} unknown error adding component.\n{error}" )
def work(self): while self.main_thread_running.is_set(): if self.system_ready.is_set() and self.node_ready: message = {'event': 'SensorUpdate'} readings = {} for sensor in self.sensors: result = sensor.read() readings[sensor.key] = result # r.set(sensor.get('key', sensor.get('type')), value) Logger.log(LOG_LEVEL["info"], readings) message['data'] = readings self.r.publish('sensors', json.dumps(message)) time.sleep(15) # This is only ran after the main thread is shut down Logger.log( LOG_LEVEL["info"], "{name} Node Worker Shutting Down...\t\t\033[1;32m Complete\033[0;0m".format( **self.config) )