class QueryOne(): def __init__(self): self.et = ElectricTransformation() self.dt = DataTransformation() self.bf = BarsimAlgorithm(q_size=100, ) self.bm = LatencyBenchmark() def run(self): rx.create(lambda o, s: KafkaConsumer().create_subscription( topics=[ "Input", ], observer=o, scheduler=s)).pipe( split_payload(), # self.bm.inject_time("start_time"), self.dt.transform_to_pandas(), self.et.active_power(), self.et.apparent_power(), self.et.reactive_power(), self.dt.remove_payload(keys=["records"]), self.bf.build_window(), self.bf.predict(), self.bf.check_event_model_constraints(0.8), self.bf.compute_loss(), self.bf.process_detected_event(), self.bf.prepare_result(), # self.bm.get_latency("start_time"), self.bm.append_kafka_metrics()).subscribe( on_next=lambda x: self.bm.measure_kafka( x), # self.bm.calc_mean_latency(x), on_error=lambda error: print(error), on_completed=lambda: print("Query 1 done!"))
class QueryOneWOKafka(): def __init__(self): self.et = ElectricTransformation() self.dt = DataTransformation() self.bf = BarsimAlgorithm(q_size=100, ) self.bm = LatencyBenchmark() self.kp = KafkaProducer() self.hc = HttpConnector(protocol="HTTP", host="localhost", port=8000, path="data/1/") def run(self): rx.create(lambda o, s: self.hc.connect(observer=o, scheduler=s)).pipe( self.hc.transform_http_request(), self.dt.transform_to_pandas(), self.et.active_power(), self.et.apparent_power(), self.et.reactive_power(), self.dt.remove_payload(keys=["records"]), self.bf.build_window(), self.bf.predict(), self.bf.check_event_model_constraints(0.8), self.bf.compute_loss(), self.bf.process_detected_event(), self.bf.prepare_result(), # self.bm.append_kafka_metrics() ).subscribe( on_next=lambda x: self.hc.submit( x), # self.bm.calc_mean_latency(x), on_error=lambda error: print(error), on_completed=lambda: self.kp.show_benchmark_results( start_time_dict=self.hc.start_time_dict))
class QueryTwo(): def __init__(self): self.et = ElectricTransformation() self.dt = DataTransformation() self.bf = BarsimAlgorithm(q_size=100, ) self.pq = CustomBuffer() self.kc = KafkaConsumer() self.kp = KafkaProducer() def run(self): rx.create(lambda o, s: self.kc.create_subscription( topics=[ "Input_q2", ], observer=o, scheduler=s)).pipe( split_payload(), self.pq.buffer_and_manage(), self.pq.sort_buffer(), # self.pq.check_order(), self.dt.transform_to_pandas(), self.et.active_power(), self.et.apparent_power(), self.et.reactive_power(), self.dt.remove_payload(keys=["records"]), self.bf.build_window(), self.bf.predict(), self.bf.check_event_model_constraints(0.8), self.bf.compute_loss(), self.bf.process_detected_event(), self.bf.prepare_result(), ).subscribe(on_next=lambda x: self.kp.benchmark_sink(x), on_error=lambda error: print(error), on_completed=lambda: self.kp.show_benchmark_results( start_time_dict=self.kc.start_time_dict))