class TelemetryClient(): def __init__(self, pubsub_endpoint: str, topic: str): self.kafka_mgr = PubSubManagerKafka(PubSubConnectionKafka(pubsub_endpoint)) self.producer = self.kafka_mgr.create_producer(topic, "testapp_sig_gen") # example input: '2019-08-17 00:39:00 0.6 0.3' def getFeatures(self, s: str): params = s.split(" ") date = params[0].split("-") day = datetime.date(int(date[0]), int(date[1]), int(date[2])).weekday() hour = int(params[1].split(":")[0]) return np.array([day, hour, float(params[2]), float(params[2])]).astype(np.float32) def send_telemetry(self, inputstr: str): features = self.getFeatures(inputstr) msg = InferenceMessage( AbstractMessage.create_new_correlation_id(), data_descriptor_name="update app message", feature_data=[ InferenceDataFeature("float_input", features) ]) self.producer.publish(msg) print(f"Message sent! {msg}")
class TelemetryClient: def __init__(self, pubsub_endpoint: str, topic: str): self.kafka_mgr = PubSubManagerKafka( PubSubConnectionKafka(pubsub_endpoint)) self.producer = self.kafka_mgr.create_producer(topic, "testapp_sig_gen") @staticmethod def svm_features(s): valid_x = set([]) valid_x.add(s) fi = open('../thebox_testapp/keyStrokes/tfidfvocabulary.pkl', 'rb') tfidf_vect = pickle.load(fi) xvalid_tfidf1 = tfidf_vect.transform(valid_x) xvalid_tfidf = xvalid_tfidf1.toarray() return xvalid_tfidf.astype(np.float32) def send_telemetry(self, text: str): key_stroke_feature = np.squeeze(self.svm_features(text)) msg = InferenceMessage(AbstractMessage.create_new_correlation_id(), data_descriptor_name="demo app message", feature_data=[ InferenceDataFeature( "float_input", key_stroke_feature) ]) self.producer.publish(msg)
class TelemetryClient(): def __init__(self, pubsub_endpoint: str, topic: str): self.kafka_mgr = PubSubManagerKafka(PubSubConnectionKafka(pubsub_endpoint)) self.producer = self.kafka_mgr.create_producer(topic, "testapp_sig_gen") # example input: '09/13/2019 11:30 PM' def getDayTime(self, s: str): s = s.split() date = s[0].split("/") day = datetime.date(int(date[2]), int(date[0]), int(date[1])).weekday() hour = int(s[1].split(":")[0]) if s[2] == 'PM': hour += 12 return np.array([day, hour]).astype(np.float32) def send_telemetry(self, dateTime: str): dateTimeFeature = self.getDayTime(dateTime) msg = InferenceMessage( AbstractMessage.create_new_correlation_id(), data_descriptor_name="demo app message", feature_data=[ InferenceDataFeature("float_input", dateTimeFeature) ]) self.producer.publish(msg) print(f"Message sent! {msg}")
class TheBoxSignalClient(): def __init__(self, pubsub_endpoint: str, topic: str): self.kafka_mgr = PubSubManagerKafka( PubSubConnectionKafka(pubsub_endpoint)) self.producer = self.kafka_mgr.create_producer(topic, "testapp_sig_gen") def send(self, image: np.ndarray): print(f"image size: {image.size}") msg = InferenceMessage(AbstractMessage.create_new_correlation_id(), data_descriptor_name="camera_feed_pos_demo", feature_data=[ InferenceDataFeature("image_tensor", image), ]) self.producer.publish(msg) print(f"Message sent! {msg}")
class TelemetryClient(): def __init__(self, pubsub_endpoint: str, topic: str): self.appProc = AppProcessor() self.urlProc = UrlProcessor() self.kafka_mgr = PubSubManagerKafka( PubSubConnectionKafka(pubsub_endpoint)) self.producer = self.kafka_mgr.create_producer(topic, "testapp_sig_gen") def send_telemetry(self, url: str): appFeature = self.appProc.process_app("chrome.exe").astype(np.float32) urlFeature = self.urlProc.process_url(url).astype(np.float32) msg = InferenceMessage(AbstractMessage.create_new_correlation_id(), data_descriptor_name="demo app message", feature_data=[ InferenceDataFeature("app", appFeature), InferenceDataFeature("url", urlFeature) ]) self.producer.publish(msg) print(f"Message sent! {msg}")
class TelemetryClient(): def __init__(self, pubsub_endpoint: str, topic: str): self.kafka_mgr = PubSubManagerKafka( PubSubConnectionKafka(pubsub_endpoint)) self.producer = self.kafka_mgr.create_producer(topic, "testapp_sig_gen") def SVM_features(self, s): valid_x = set([]) valid_x.add(s) ##fi=open( 'c:\\temp\\tfidfvocabulary.pkl','rb') fi = open('tfidfvocabulary.pkl', 'rb') tfidf_vect = pickle.load(fi) xvalid_tfidf1 = tfidf_vect.transform(valid_x) xvalid_tfidf = xvalid_tfidf1.toarray() #filename = 'c:\\temp\\svm_model.sav' #loaded_model = pickle.load(open(filename, 'rb')) #predictions=loaded_model.predict(xvalid_tfidf) #print("predictions",predictions ) return xvalid_tfidf.astype(np.float32) #xvalid_tfidf def send_telemetry(self, text: str): keyStrokeFeature = np.squeeze(self.SVM_features(text)) #print(f"{keyStrokeFeature.shape}") msg = InferenceMessage(AbstractMessage.create_new_correlation_id(), data_descriptor_name="demo app message", feature_data=[ InferenceDataFeature( "float_input", keyStrokeFeature) ]) self.producer.publish(msg)
class TelemetryClient(): def __init__(self, pubsub_endpoint: str, topic: str): self.data_proc = DataProcessor() self.kafka_mgr = PubSubManagerKafka( PubSubConnectionKafka(pubsub_endpoint)) self.producer = self.kafka_mgr.create_producer(topic, "testapp_sig_gen") def send_telemetry(self): X_test = self.data_proc.preprocess_data() num_data = X_test.shape[0] for idx in range(num_data): msg = InferenceMessage(AbstractMessage.create_new_correlation_id(), data_descriptor_name="demo app message", feature_data=[ InferenceDataFeature( "input_sequence", X_test[idx, :]) ]) self.producer.publish(msg) print(f"Message sent! {msg}")
class KafkaMessageSender(): def __init__(self, pubsub_endpoint: str, topic: str): self.kafka_mgr = PubSubManagerKafka(PubSubConnectionKafka(pubsub_endpoint)) # uncomment following code to reset kafka db #self.kafka_mgr.reset() #time.sleep(1000) self.producer = self.kafka_mgr.create_producer(topic, "testapp_sig_gen") self.data = np.zeros([1,64,10]) self.data_delta = { "mhhd_s1" : 0.0, "mhhd_s2" : 0.0, "mhhd_s3" : 0.0, "sesor3" : 0.0, "sesor4" : 0.0, "sesor5" : 0.0, "sesor6" : 0.0, "sesor7" : 0.0, "sesor8" : 0.0, "sesor9" : 0.0 } self.interval_begin = datetime.datetime.now() def send_signal(self): msg = InferenceMessage( AbstractMessage.create_new_correlation_id(), data_descriptor_name="demo app message", feature_data=[ InferenceDataFeature("input_sequence", self.data[0,:,:]) ]) self.producer.publish(msg) self.displayData() print(f"Message sent! {msg}") # The callback for when the client receives a CONNACK response from the server. def on_connect(self, client, userdata, flags, rc): print("Connected with result code "+str(rc)) # The callback for when a PUBLISH message is received from the server. def on_message(self, client, userdata, msg): print(msg.topic+" "+str(msg.payload)) count = str(msg.payload).split("-")[-1:][0] if(count[-1] == '\''): count = count[:-1] self.data_delta[msg.topic] += float(count) new_time = datetime.datetime.now() time_delta = new_time-self.interval_begin #DEBUG #print(time_delta) #DEBUG END if(time_delta.seconds >= 1): data_delta_array = np.array([val for val in self.data_delta.values()]) # # DEBUG # print(data_delta_array) # print("shape of delta_array" + str(data_delta_array.shape)) # print("shape of data" + str(self.data.shape)) # #DEBUG END self.data = np.append([[data_delta_array]],self.data[:,:63,:] ,axis=1) # #DEBUG # print("merged shape of data" + str(self.data.shape)) #print(self.data) # #DEBUG END self.interval_begin = datetime.datetime.now() self.send_signal() self.data_delta = { "mhhd_s1" : 0.0, "mhhd_s2" : 0.0, "mhhd_s3" : 0.0, "sesor3" : 0.0, "sesor4" : 0.0, "sesor5" : 0.0, "sesor6" : 0.0, "sesor7" : 0.0, "sesor8" : 0.0, "sesor9" : 0.0 } def MQTTMessaging(self): client = mqtt.Client() client.connect("localhost", 1834, 60) client.subscribe("mhhd_s1") client.subscribe("mhhd_s2") client.subscribe("mhhd_s3") client.on_connect = self.on_connect client.on_message = self.on_message # Blocking call that processes network traffic, dispatches callbacks and # handles reconnecting. # Other loop*() functions are available that give a threaded interface and a # manual interface. client.loop_forever() def displayData(self): plt.clf() # Create data points and offsets x = np.linspace(0.0,64, 64) y = self.data[0,:,:] label = ["Sensor1", "Sensor2", "Sensor3"] # Set the plot curve with markers and a title for i in range(3): plt.plot(x,y[:,i],label=label[i]) plt.xlim(left = 0, right = 65) plt.ylim(bottom = 0, top = 100) plt.xlabel("Time") plt.ylabel("Sensor Reading") plt.title("Readings From Sensor") plt.legend() plt.show(block=False) plt.pause(0.1)
class TelemetryClient(): def __init__(self, pubsub_endpoint: str, topic: str): self.kafka_mgr = PubSubManagerKafka( PubSubConnectionKafka(pubsub_endpoint)) self.producer = self.kafka_mgr.create_producer(topic, "testapp_sig_gen") self.params = { 'dim': 1, 'sequence_length': 37, 'noise_scale': 0.05, 'mixed_sample_size': 1, 'anomaly_magnitude': 0.5, 'anomaly_window_size': 13 } self.sequence = np.zeros( (self.params['dim'], self.params['sequence_length'])) self.abnormal_peak = np.zeros( (self.params['dim'], self.params['sequence_length'] + self.params['anomaly_window_size'])) self.time = 0 def get_normal_reading(self, dim, sequence_length, noise_scale, time_t, **kwargs): """ Generate one normal reading and push to the existing normal part of the sequence. Push out the old one. """ w = np.linspace(2.6, 5.8, dim) # frequency s = np.linspace(1.2, 9.4, dim) # phase reading = [] for i in range(dim): signal = np.sin(w[i] * time_t * 0.02 + s[i]) noise = np.random.normal(0, noise_scale) reading.append(signal + noise) reading = [np.array(reading)] self.sequence = np.append(reading, self.sequence[:, :36], axis=1) def get_abnormal_reading(self): """ Push 0 to the existing abnormal part of the sequence. Push out the old one. """ abnormal_sequence_length = self.params[ 'anomaly_window_size'] + self.params['sequence_length'] self.abnormal_peak = np.append( np.zeros((self.params['dim'], 1)), self.abnormal_peak[:, :abnormal_sequence_length - 1], axis=1) def add_abnormal_peak_to_bank(self): """ Push a list of abnormal data to the data sequence. """ anomaly_magnitude = 1 window_size = self.params['anomaly_window_size'] window_std = 2 # anomaly standard deviation anomaly_dim = 0 anomaly = anomaly_magnitude * signal.gaussian(window_size, std=window_std) self.abnormal_peak[:, 0:window_size] = anomaly def send_signal(self, usrInput: str): if usrInput == "Y": self.add_abnormal_peak_to_bank() self.get_normal_reading(time_t=self.time, **self.params) self.get_abnormal_reading() self.time = self.time + 1 # combine normal part and abnormal part of the sequence together. combined_sequence = self.abnormal_peak[:, self.params[ 'anomaly_window_size']:] + self.sequence # the model need to transform multitple dimentional data into one dimention. Flattern the data here. sequence_flatten = np.reshape(combined_sequence, (np.prod(combined_sequence.shape))) msg = InferenceMessage(AbstractMessage.create_new_correlation_id(), data_descriptor_name="demo app message", feature_data=[ InferenceDataFeature( "input_sequence", sequence_flatten) ]) self.producer.publish(msg) displayData(sequence_flatten) print(f"Message sent! {msg}")
class OrchestrationService(BaseService): """This is the master service that is responsible for - take administrative input from external environment via API exposed - manages the communication channels (pubsub + message pipeline) - using orchestration message to direct other micro-services """ ORCHESTRATION_SERVICE_STORE_NAME_DEFAULT = "table_orchestration_service" def __init__(self, config: Config): orch_infer_topic = config.try_get( 'servicesettings.orchestration-inference-channel-in', default.ORCHESTRATION_INFER_TOPIC_DEFAULT) orch_notif_topic = config.try_get( 'servicesettings.orchestration-notification-channel-in', default.ORCHESTRATION_NOTIF_TOPIC_DEFAULT) self.__scenario_store_name = config.try_get( 'servicesettings.orchestration-service-store-name', OrchestrationService.ORCHESTRATION_SERVICE_STORE_NAME_DEFAULT) self.__client_name = config.try_get( 'servicesettings.service-client-name', None) # load configurations self.__db_conn = CouchDbConnection(config['store.couchdb.connection'], config['store.couchdb.username'], config['store.couchdb.usertoken']) self.__pubsub_conn = PubSubConnectionKafka( config['eventqueue.kafka.server']) # intialize db readers self.__scn_repo = ScenarioRepository(self.__db_conn, self.__scenario_store_name) # initialize message bus manager and producers self.__pubsub_mgr = PubSubManagerKafka(self.__pubsub_conn, acquire_service_logger()) self.__pubsub_mgr.create_topic_if_not_exist(orch_infer_topic) self.__pubsub_mgr.create_topic_if_not_exist(orch_notif_topic) self.__producers = { 'infer': self.__pubsub_mgr.create_producer(orch_infer_topic, self.__client_name), 'notif': self.__pubsub_mgr.create_producer(orch_notif_topic, self.__client_name), } super().__init__() def __validate_scenario(self, scenario: Scenario): """ Validate scenario input and raise exception with a list of validation errors, if any """ errors = [] if (scenario.scenario_definition is None or type( scenario.scenario_definition) is not ScenarioDefinition): errors.append(("scenario.scenario_definition", "is not valid")) else: scn_def = scenario.scenario_definition if (scn_def.inference_definition is None or type( scn_def.inference_definition) is not InferenceDefinition): errors.append( ("scenario.scenario_definition.inference_definition", "is not valid")) if (scn_def.notification_definition is None or type(scn_def.notification_definition) is not NotificationDefinition): errors.append( ("scenario.scenario_definition.notification_definition", "is not valid")) # check topic coherence if (scn_def.notification_definition.in_topic != scn_def.inference_definition.out_topic): errors.append(( "scenario.scenario_definition.notification_definition.in_topic", "does not equal to inference_definition.out_topic")) # TODO: # validate the additional fields in inference/notification definition # decide on how to valid inference/notification_definition. should it done by # orchestration service or by individual worker service and post back the result if len(errors) > 0: raise ServiceParameterError(errors) # Public APIs def create_scenario(self, scenario: Scenario): self.__validate_scenario(scenario) scn_def = scenario.scenario_definition # create topics required by this scenario self.log.debug(f"Creating scenario pubsub topics ...") scn_topics = [ scn_def.inference_definition.in_topic, scn_def.inference_definition.out_topic, scn_def.notification_definition.out_topic ] for t in scn_topics: self.__pubsub_mgr.create_topic_if_not_exist(t) # post messages and verify that they are sucessfully created corr_id = AbstractMessage.create_new_correlation_id() self.log.debug( f"Notify worker services for scenario creation: {scenario} " f"with corr-id = {corr_id} ...") self.__producers['infer'].publish( OrchestrationMessage(corr_id, scn_def.inference_definition)) self.__producers['notif'].publish( OrchestrationMessage(corr_id, scn_def.notification_definition)) # TODO: # Wait for successful confirmation by the post back channel for creation message # save the scenario self.log.debug(f"Writing scenario: {scenario} ...") self.__scn_repo.save(scenario) # return creation result self.log.debug(f"Scenario succesfully created!") return scenario def get_scenarios(self) -> List[Scenario]: return self.__scn_repo.load_all() def delete_scenarios(self, scenario_id: str): # TODO: # validate if scenario exists # post messages to worker services to delete their respective definitions # delete the scenario raise NotImplementedError("delete_scenario")