def linreg_calculator(in_q, out_q, out_topic, buffer_size): logger = Logger(out_q) try: logger.log(__name__ + ' started') X = np.zeros(shape=(buffer_size, 1), dtype=np.double) y = np.zeros(shape=(buffer_size, 1), dtype=np.double) linregr = LinearRegression() for i in range(buffer_size): in_msg = in_q.get() X[i, 0] = in_msg.timestamp y[i, 0] = in_msg.data logger.log(__name__ + ' entering main loop') while True: X = np.roll(X, -1) y = np.roll(y, -1) in_msg = in_q.get() X[buffer_size - 1, 0] = in_msg.timestamp y[buffer_size - 1, 0] = in_msg.data linregr.fit(X, y) out_msg = Message(out_topic, round(linregr.coef_[0, 0], 2)) out_q.put(out_msg) except: logger.log(format_current_exception(__name__))
def derivative_calculator(in_q, out_q, out_topic, nskip): logger = Logger(out_q) try: logger.log(__name__ + ' started') result = 0.0 in_msg = in_q.get() last_value, last_t = in_msg.data, time() logger.log(__name__ + ' entering main loop') while True: in_msg = in_q.get() new_value, new_t = in_msg.data, time() result = (new_value - last_value) / (new_t - last_t) last_value, last_t = new_value, new_t out_msg = Message(out_topic, round(result, 2)) out_q.put(out_msg) for n in range(nskip): in_q.get() out_q.put(out_msg) except: logger.log(format_current_exception(__name__))
async def receive(self, *args: Type[Message], status: Union[MessageStatus, List[MessageStatus]] = None) -> AsyncGenerator: statuses = [n for n in MessageStatus] if not status else status if isinstance(status, list) else [status] channels = [n.channel.__name__ for n in args] self.consumer = AIOKafkaConsumer(*channels, loop=self.loop, bootstrap_servers=self.host) await self.consumer.start() async for msg in self.consumer: msg = Message.deserialize(json.loads(msg.value, object_hook=self.converter.decode)) if msg.status in statuses: yield msg
def file_reader(out_q, out_topic, filename, sep, field, interval): logger = Logger(out_q) try: logger.log(__name__ + ' started') with open(filename, 'r') as f: logger.log(__name__ + ' entering main loop') while True: for line in f: tokens = line.split(sep) out_msg = Message(out_topic, float(tokens[field])) out_q.put(out_msg) sleep(interval) except: logger.log(format_current_exception(__name__))
def MPL3115A2_reader(initial_altitude, out_q, out_topic, interval): logger = Logger(out_q) try: logger.log(__name__ + ' started') sensor = MPL3115A2() sensor.set_altitude(initial_altitude) logger.log(__name__ + ' entering main loop') while True: altitude = sensor.read_altitude() msg = Message(out_topic, altitude) out_q.put(msg) sleep(interval) except: logger.log(format_current_exception(__name__))
def pubsub_manager(in_q, topics, out_q): logger = Logger(out_q) try: logger.log(__name__ + ' started') indexes = {} for t in topics: indexes[t] = 0 logger.log(__name__ + ' entering main loop') while True: in_msg = in_q.get() t, data, timestamp = in_msg.topic, in_msg.data, in_msg.timestamp if t in topics: tag = '{0}.{1}'.format(topics[t].tag, indexes[t]) out_msg = Message(t, data, tag, timestamp) indexes[t] += 1 for q in topics[t].subscriptions: q.put(out_msg) except: logger.log(format_current_exception(__name__))
def log(self, s): self.__q.put(Message(LOGS_TOPIC, s))
async def send(self, message: Message) -> None: if not self.producer: self.producer = AIOKafkaProducer(loop=self.loop, bootstrap_servers=self.host) await self.producer.start() await self.producer.send(message.channel.__name__, json.dumps(message.serialize(), default=self.converter.encode).encode())