def _run(conf: config.MaukaConfig): import logging import signal import os import zmq _logger = logging.getLogger("app") logging.basicConfig( format= "[%(levelname)s][%(asctime)s][{} %(filename)s:%(lineno)s - %(funcName)s() ] %(message)s" .format(os.getpid())) signal.signal(signal.SIGINT, signal.SIG_IGN) _logger.info("Starting makai event bridge...") zmq_context = zmq.Context() zmq_sub_event_socket = zmq_context.socket(zmq.SUB) zmq_sub_event_socket.setsockopt(zmq.SUBSCRIBE, b"") zmq_pub_socket = zmq_context.socket(zmq.PUB) zmq_sub_event_socket.connect(conf.get("zmq.event.interface")) zmq_pub_socket.connect(conf.get("zmq.mauka.plugin.pub.interface")) while True: event_msg = zmq_sub_event_socket.recv_multipart() if mauka_config.get("debug", False): _logger.debug("recv event msg: %s", str(event_msg)) event_id = int(event_msg[1]) makai_event = protobuf.pb_util.build_makai_event( "makai_event_bridge", event_id) mauka_message_bytes = protobuf.pb_util.serialize_message( makai_event) zmq_pub_socket.send_multipart( (Routes.makai_event.encode(), mauka_message_bytes))
def __init__(self, conf: config.MaukaConfig, exit_event: multiprocessing.Event): """ Initializes this plugin :param conf: Configuration dictionary """ super().__init__(conf, SUBSCRIBED_TOPICS, BoxOptimizationPlugin.NAME, exit_event) # Logging self.box_optimization_logger = BoxOptimizationPluginLogger(self) # ZMQ zmq_context = zmq.Context() self.makai_send_interface: str = conf.get("zmq.trigger.interface") self.makai_recv_interface: str = conf.get("zmq.data.interface") self.makai_send_socket = zmq_context.socket(zmq.PUSH) self.makai_send_socket.connect(self.makai_send_interface) # Box optimization records self.box_optimization_records = BoxOptimizationRecords( self.box_optimization_logger) # Start up the subscription thread self.makai_optimization_results_subscriber = MakaiOptimizationResultSubscriber( self.makai_recv_interface, self.box_optimization_records, self.box_optimization_logger, self) self.makai_optimization_results_subscriber.start()
def __init__(self, conf: config.MaukaConfig, exit_event: multiprocessing.Event): """ Initializes this plugin :param conf: Configuration dictionary :param exit_event: Exit event """ super().__init__(conf, [Routes.heartbeat, Routes.gc_stat, Routes.box_measurement_rate_response], SystemStatsPlugin.NAME, exit_event) self.interval_s = conf.get("plugins.SystemStatsPlugin.intervalS") self.system_stats_interval_s = conf.get("plugins.SystemStatsPlugin.systemStatsIntervalS") self.plugin_stats: typing.Dict[str, typing.Dict[str, int]] = {} self.gc_stats: typing.Dict[protobuf.mauka_pb2.GcDomain, int] = { protobuf.mauka_pb2.SAMPLES: 0, protobuf.mauka_pb2.MEASUREMENTS: 0, protobuf.mauka_pb2.TRENDS: 0, protobuf.mauka_pb2.EVENTS: 0, protobuf.mauka_pb2.INCIDENTS: 0, protobuf.mauka_pb2.PHENOMENA: 0 } self.system_stats = collections.defaultdict(DescriptiveStatistic) self.system_stats["cpu_load_percent"].update(self.cpu_load_percent()) self.system_stats["memory_use_bytes"].update(self.memory_use_bytes()) self.system_stats["disk_use_bytes"].update(self.disk_use_bytes()) self.box_measurement_rates: typing.Dict[str, int] = {} # Start stats collection system_stats_timer = threading.Timer(self.system_stats_interval_s, self.update_system_stats, args=[self.system_stats_interval_s]) system_stats_timer.start() timer = threading.Timer(self.interval_s, self.collect_stats, args=[self.interval_s]) timer.start()
def from_config(conf: config.MaukaConfig) -> OpqMongoClient: """ Returns a OpqMongoClient given a MaukaConfig. :param conf: MaukaConfig. :return: An OpqMongoClient. """ mongo_host = conf.get("mongo.host") mongo_port = conf.get("mongo.port") mongo_db = conf.get("mongo.db") return OpqMongoClient(mongo_host, mongo_port, mongo_db)
def _run(conf: config.MaukaConfig): """ This is the target function that will run as its own process. :param conf: OPQ Mauka config file :return: The process object. """ import logging import signal import os import zmq _logger = logging.getLogger("app") logging.basicConfig( format= "[%(levelname)s][%(asctime)s][{} %(filename)s:%(lineno)s - %(funcName)s() ] %(message)s" .format(os.getpid())) signal.signal(signal.SIGINT, signal.SIG_IGN) zmq_pub_interface = conf.get("zmq.mauka.broker.pub.interface") zmq_sub_interface = conf.get("zmq.mauka.broker.sub.interface") zmq_context = zmq.Context() zmq_pub_socket = zmq_context.socket(zmq.PUB) zmq_sub_socket = zmq_context.socket(zmq.SUB) zmq_pub_socket.bind(zmq_pub_interface) zmq_sub_socket.bind(zmq_sub_interface) zmq_sub_socket.setsockopt(zmq.SUBSCRIBE, b"") _logger.info("Starting Mauka pub/sub broker") zmq.proxy(zmq_sub_socket, zmq_pub_socket) _logger.info("Exiting Mauka pub/sub broker")
def _run(conf: config.MaukaConfig): import logging import signal import os import mongo import zmq _logger = logging.getLogger("app") logging.basicConfig( format= "[%(levelname)s][%(asctime)s][{} %(filename)s:%(lineno)s - %(funcName)s() ] %(message)s" .format(os.getpid())) signal.signal(signal.SIGINT, signal.SIG_IGN) _logger.info("Starting incident id service...") mongo_client = mongo.OpqMongoClient(conf.get("mongo.host"), int(conf.get("mongo.port")), conf.get("mongo.db")) next_available_incident_id = mongo.next_available_incident_id( mongo_client) incident_id_service = services.incident_id_provider.IncidentIdProvider( next_available_incident_id) zmq_context: zmq.Context = zmq.Context() zmq_req_socket: zmq.Socket = zmq_context.socket(zmq.REP) zmq_req_socket.bind(conf.get("zmq.incident_id_provider.rep.interface")) while True: req: bytes = zmq_req_socket.recv() mauka_message: protobuf.pb_util.mauka_pb2.MaukaMessage = protobuf.pb_util.deserialize_mauka_message( req) if protobuf.pb_util.is_incident_id_req(mauka_message): resp = protobuf.pb_util.build_incident_id_resp( "incident_id_service", mauka_message.incident_id_req.req_id, incident_id_service.get_and_inc()) zmq_req_socket.send(protobuf.pb_util.serialize_message(resp)) else: _logger.error("Did not receive valid IncidentIdReq")
def __init__(self, conf: config.MaukaConfig, exit_event: multiprocessing.Event): """ Initializes this plugin :param conf: Configuration dictionary """ super().__init__(conf, [Routes.heartbeat], StatusPlugin.NAME, exit_event) health_port = int(conf.get("plugins.StatusPlugin.port")) self.httpd_thread = threading.Thread( target=start_health_sate_httpd_server, args=(health_port, )) self.httpd_thread.start()
def profile_makai_event_plugin(data_file: str): if not os.path.exists(data_file): print('Data File provided: {} Not Found'.format(data_file)) return # read file df = pd.read_table(data_file, header=None) # load configurations config_dict = load_config("mauka.config.json") config = MaukaConfig(config_dict) # create profiler pr = cProfile.Profile() # get entire waveform and single window of waveform waveform = df.values.flatten() # smooth waveform and profile pr.enable() smoothed_waveform = smooth_waveform(waveform) pr.disable() # open and write profile to output file file_name = data_file.split('/')[-1] output_file = "profilers/profile_results/profile_makai_event_plugin_{}".format(file_name) out_file = open(output_file, 'w') write_profile('Smoothing', out_file, pr) # profile single frequency calculation for a window # first obtain frequency window window_size = int(config.get("plugins.MakaiEventPlugin.frequencyWindowCycles") * constants.SAMPLES_PER_CYCLE) downsample_factor = int(config.get("plugins.MakaiEventPlugin.frequencyDownSampleRate")) waveform_window = smoothed_waveform[:window_size] # frequency on a single window pr.enable() frequency(waveform_window, downsample_factor) pr.disable() # write profile to output file write_profile('Single Window Frequency Calculation', out_file, pr) # profile frequency calculation for entire waveform # first obtain configuration filter_order = int(config.get("plugins.MakaiEventPlugin.filterOrder")) cutoff_frequency = float(config.get("plugins.MakaiEventPlugin.cutoffFrequency")) pr.enable() frequencies = frequency_waveform(waveform, window_size, filter_order, cutoff_frequency, down_sample_factor=downsample_factor) pr.disable() # write profile to output file write_profile('Waveform Frequency Calculation', out_file, pr) out_file.close()
def bootstrap_db(conf: config.MaukaConfig): """ Performs bootstrapping of the database. :param conf: Configuration. """ mongo_client: mongo.OpqMongoClient = mongo.from_config(conf) # Check to make sure a laha config exists if mongo_client.get_laha_config() is None: logger.info("laha_config DNE, inserting default from config...") mongo_client.laha_config_collection.insert_one( conf.get("laha.config.default")) # Indexes mongo_client.measurements_collection.create_index("expire_at") mongo_client.trends_collection.create_index("expire_at") mongo_client.events_collection.create_index("expire_at") mongo_client.incidents_collection.create_index("expire_at")
def run_cli(cli_config: config.MaukaConfig): """Starts the REPL and sends commands to the plugin manager over TCP using ZMQ :param cli_config: Configuration dictionary """ zmq_context = zmq.Context() # noinspection PyUnresolvedReferences # pylint: disable=E1101 zmq_request_socket = zmq_context.socket(zmq.REQ) zmq_request_socket.connect( cli_config.get("zmq.mauka.plugin.management.req.interface")) prompt = "opq-mauka> " try: zmq_request_socket.send_string("completions") completions = zmq_request_socket.recv_string() vocabulary = set(completions.split(",")) readline.parse_and_bind("tab: complete") readline.set_completer(make_completer(vocabulary)) while True: cmd = input(prompt).strip() if cmd == "exit": logger.info("Exiting mauka-cli") sys.exit(0) if cmd == "completions": zmq_request_socket.send_string("completions") completions = zmq_request_socket.recv_string() vocabulary = set(completions.split(",")) readline.set_completer(make_completer(vocabulary)) logger.debug(ok("Completions updated")) continue zmq_request_socket.send_string(cmd.strip()) logger.debug(zmq_request_socket.recv_string()) except (EOFError, KeyboardInterrupt): logger.info("Exiting mauka-cli") sys.exit(0)
def profile_frequency_variation_plugin(data_file: str): """ profiler for frequency variation plugin :param data_file: :param output_file: :return: """ if not os.path.exists(data_file): print('Data File provided: {} Not Found'.format(data_file)) return 0 # read file df = pd.read_table(data_file, header=None) # load configurations config_dict = load_config("config.json") config = MaukaConfig(config_dict) window_size = int( int(config.get("plugins.MakaiEventPlugin.frequencyWindowCycles")) * constants.SAMPLES_PER_CYCLE) filter_order = int(config.get("plugins.MakaiEventPlugin.filterOrder")) cutoff_frequency = float( config.get("plugins.MakaiEventPlugin.cutoffFrequency")) freq_ref = float(constants.CYCLES_PER_SECOND) freq_var_low = float( config.get( "plugins.FrequencyVariationPlugin.frequency.variation.threshold.low" )) freq_var_high = float( config.get( "plugins.FrequencyVariationPlugin.frequency.variation.threshold.high" )) freq_interruption = float( config.get("plugins.FrequencyVariationPlugin.frequency.interruption")) max_lull = int( config.get("plugins.FrequencyVariationPlugin.max.lull.windows")) downsample_factor = int( config.get("plugins.MakaiEventPlugin.frequencyDownSampleRate")) # create profiler pr = cProfile.Profile() # get entire waveform waveform = df.values.flatten() # obtain frequencies frequencies = frequency_waveform(waveform, window_size, filter_order, cutoff_frequency, downsample_factor=downsample_factor) # profile frequency_incident_classifier pr.enable() incidents = frequency_incident_classifier(0, "", frequencies, 0, freq_ref, freq_var_high, freq_var_low, freq_interruption, window_size, max_lull) pr.disable() # open write profile to output file file_name = data_file.split('/')[-1] output_file = "profilers/profile_results/profile_frequency_variation_plugin_{}".format( file_name) out_file = open(output_file, 'w') write_profile('Waveform Frequency Calculation', out_file, pr) # add incident count to profile out_file.write('Incident count: {} \n \n'.format(len(incidents))) out_file.write('Frequency Min: {} \n \n'.format(frequencies.min())) out_file.write('Frequency Max: {} \n \n'.format(frequencies.max())) # Simulate an incident waveform_1 = simulate_waveform( num_samples=int(100 * constants.SAMPLES_PER_CYCLE)) waveform_2 = simulate_waveform(freq=60.2, num_samples=int( 10 * constants.SAMPLE_RATE_HZ / 60.2)) waveform_3 = simulate_waveform( num_samples=int(100 * constants.SAMPLES_PER_CYCLE)) waveform = numpy.concatenate((waveform_1, waveform_2, waveform_3)) # obtain frequencies frequencies = frequency_waveform(waveform, window_size, filter_order, cutoff_frequency, downsample_factor=downsample_factor) # profile frequency_incident_classifier on simulated event pr.enable() incidents = frequency_incident_classifier(0, "", frequencies, 0, freq_ref, freq_var_high, freq_var_low, freq_interruption, window_size, max_lull) pr.disable() # open write profile to output file write_profile( 'Simulated Waveform Frequency Calculation: 100 cycles then 60.2 Hz for 10 cycles then 100 cycles', out_file, pr) # add incident count and classification and start and end indices to profile out_file.write('Incident count: {} \n \n'.format(len(incidents))) out_file.write('Incident Classifications: {} \n \n'.format( str([i['incident_classifications'] for i in incidents]))) window_duration = (window_size / constants.SAMPLE_RATE_HZ) * 1000 start_indices = [ i['incident_start_ts'] / window_duration for i in incidents ] end_indices = [i['incident_end_ts'] / window_duration for i in incidents] out_file.write('Incident Start : End indices: {} : {} \n \n'.format( str(start_indices), str(end_indices))) out_file.write("Frequencies: {}".format(str(frequencies))) out_file.close()