def connect(self): """Initialize publisher and subscribers""" self.routing_keys = { self.input_metric: _ROUTING_KEY % (self.user_id, self.device_type, self.input_metric), self.output_metric: _ROUTING_KEY % (self.user_id, self.device_type, self.output_metric), self.tag_metric: _ROUTING_KEY % (self.user_id, self.device_type, self.tag_metric), } self.tag_subscriber = PikaSubscriber(self.rmq_address, self.rmq_user, self.rmq_pwd) self.classification_publisher = PikaPublisher(self.rmq_address, self.rmq_user, self.rmq_pwd) self.input_metric_subscriber = PikaSubscriber(self.rmq_address, self.rmq_user, self.rmq_pwd) self.output_metric_publisher = PikaPublisher(self.rmq_address, self.rmq_user, self.rmq_pwd) self.tag_subscriber.connect() self.classification_publisher.connect() self.input_metric_subscriber.connect() self.output_metric_publisher.connect() self.tag_subscriber.register(self.routing_keys[self.tag_metric]) self.classification_publisher.register(self.routing_keys[self.tag_metric]) self.input_metric_subscriber.register(self.routing_keys[self.input_metric]) self.output_metric_publisher.register(self.routing_keys[self.output_metric])
def initialize(self): """ Initialize classifier, publisher (classification), and subscribers (mu and tag) """ self.classifiers["left"] = HTMClassifier(network_config, _TRAINING_DATA, _CATEGORIES) self.classifiers["right"] = HTMClassifier(network_config, _TRAINING_DATA, _CATEGORIES) for classifier in self.classifiers.values(): classifier.initialize() if _PRE_TRAIN: classifier.train(_TRAIN_SET_SIZE, partitions) self.tag_subscriber = PikaSubscriber(self.rmq_address, self.rmq_user, self.rmq_pwd) self.tag_publisher = PikaPublisher(self.rmq_address, self.rmq_user, self.rmq_pwd) self.mu_subscriber = PikaSubscriber(self.rmq_address, self.rmq_user, self.rmq_pwd) self.classification_publisher = PikaPublisher(self.rmq_address, self.rmq_user, self.rmq_pwd) self.tag_subscriber.connect() self.tag_publisher.connect() self.mu_subscriber.connect() self.classification_publisher.connect() self.tag_subscriber.subscribe(self.routing_keys["tag"]) self.tag_publisher.register(self.routing_keys["tag"]) self.mu_subscriber.subscribe(self.routing_keys["mu"]) self.classification_publisher.register(self.routing_keys["classification"])
def connect(self): """ Initialize routing keys, publisher, and subscriber """ self._validate_metrics() if self._input_metrics is not None: # Sources have no input metrics for input_metric_key, input_metric_name in self._input_metrics.items(): self.routing_keys[input_metric_key] = _ROUTING_KEY % ( self.user_id, self.device_type, input_metric_name) for input_metric_key in self._input_metrics.keys(): sub = PikaSubscriber(self.rmq_address, self.rmq_user, self.rmq_pwd) sub.connect() sub.register(self.routing_keys[input_metric_key]) self.subscribers[input_metric_key] = sub if self._output_metrics is not None: # Sinks have no input metrics for output_metric_key, output_metric_name in self._output_metrics.items(): self.routing_keys[output_metric_key] = _ROUTING_KEY % ( self.user_id, self.device_type, output_metric_name) for output_metric_key in self._output_metrics.keys(): pub = PikaPublisher(self.rmq_address, self.rmq_user, self.rmq_pwd) pub.connect() pub.register(self.routing_keys[output_metric_key]) self.publishers[output_metric_key] = pub
def connect(self): """ Initialize EEG preprocessor, publisher, and subscriber """ if self.step_size is None: raise ValueError("Step size can't be none. " "Use configure() to set it.") if self.electrodes_placement is None: raise ValueError("Electrode placement can't be none. " "Use configure() to set it.") if self.input_metric is None: raise ValueError("Input metric can't be none. " "Use configure() to set it.") if self.output_metric is None: raise ValueError("Output metric can't be none. " "Use configure() to set it.") self.routing_keys = { self.input_metric: _ROUTING_KEY % (self.user_id, self.device_type, self.input_metric), self.output_metric: _ROUTING_KEY % (self.user_id, self.device_type, self.output_metric) } self.mu_publisher = PikaPublisher(self.rmq_address, self.rmq_user, self.rmq_pwd) self.eeg_subscriber = PikaSubscriber(self.rmq_address, self.rmq_user, self.rmq_pwd) self.eeg_subscriber.connect() self.mu_publisher.connect() self.mu_publisher.register(self.routing_keys[self.output_metric]) self.eeg_subscriber.register(self.routing_keys[self.input_metric])
def initialize(self): """ Initialize EEG preprocessor, publisher, and subscriber """ self.mu_publisher = PikaPublisher(self.rmq_address, self.rmq_user, self.rmq_pwd) self.eeg_subscriber = PikaSubscriber(self.rmq_address, self.rmq_user, self.rmq_pwd) self.eeg_subscriber.connect() self.mu_publisher.connect() self.mu_publisher.register(self.routing_keys[_MU]) self.eeg_subscriber.subscribe(self.routing_keys[_EEG])
class MockPublisher(object): def __init__(self, user, device, metric): self.routing_key = "%s:%s:%s" % (user, device, metric) self.num_channels = 2 #get_num_channels(device, metric) self.pub = None def connect(self, host, username, pwd): self.pub = PikaPublisher(host, username, pwd) self.pub.connect() self.pub.register(self.routing_key) print "Connected to: {}".format(host) def publish(self, buffer_size): print "Publishing on queue: {}".format(self.routing_key) data_buffer = [] while 1: data = {"timestamp": int(time.time() * 1000)} for i in xrange(self.num_channels): data["channel_%s" % i] = random.random() * 100 time.sleep(0.01) if len(data_buffer) < buffer_size: data_buffer.append(data) else: self.pub.publish(self.routing_key, data_buffer) print data_buffer data_buffer = []
class MockPublisher(object): def __init__(self, user, device, metric): self.routing_key = "%s:%s:%s" % (user, device, metric) self.pub = None def connect(self, host, username, pwd): self.pub = PikaPublisher(host, username, pwd) self.pub.connect() self.pub.register(self.routing_key) print "Connected to: {}".format(host) def publish(self, buffer_size): print "Publishing on queue: {}".format(self.routing_key) data_buffer = [] while 1: time.sleep(0.1) random_value = random.random() if random_value > .5: value = 1 else: value = 0 data = {"timestamp": int(time.time() * 1000), "value": value } if len(data_buffer) < buffer_size: data_buffer.append(data) else: self.pub.publish(self.routing_key, data_buffer) print data_buffer data_buffer = []
#!/usr/bin/env python from brainsquared.publishers.PikaPublisher import PikaPublisher USER_ID = "brainsquared" MODULE_IDS = ["module0", "module1", "module2", "module3"] DEVICE = "openbci" pub = PikaPublisher("rabbitmq.cloudbrain.rocks", "cloudbrain", "cloudbrain") pub.connect() for module_id in MODULE_IDS: TAG_KEY = '%s:%s:tag' % (USER_ID, module_id) pub.register(TAG_KEY) pub.publish(TAG_KEY, {"timestamp": 1, "value": "middle"}) #pub.publish(TAG_KEY, {"timestamp": 1, "value": "left"}) #pub.publish(TAG_KEY, {"timestamp": 1, "value": "right"}) MU_KEY = '%s:%s:mu' % (USER_ID, DEVICE) pub.register(MU_KEY) pub.publish(MU_KEY, {"timestamp": 1, "left": 1, "right": 3})
from brainsquared.publishers.PikaPublisher import PikaPublisher _RMQ_ADDRESS = "rabbitmq.cloudbrain.rocks" _RMQ_USER = "******" _RMQ_PWD = "cloudbrain" _WEBSERVER_PORT = 8080 _API_VERSION = "v0.1" _VALID_MODULES = ["motor_imagery"] modules = {} app = Flask(__name__) CORS(app) app.config['PROPAGATE_EXCEPTIONS'] = True tag_publisher = PikaPublisher(_RMQ_ADDRESS, _RMQ_USER, _RMQ_PWD) tag_publisher.connect() tag_publisher.register("brainsquared:module0:tag") @app.route('/api/%s/users/<string:user_id>/modules/<string:module_id>/tag' % _API_VERSION, methods=['POST']) def create_tag(user_id, module_id): """Create new module tag""" timestamp = request.json["timestamp"] value = request.json['value'] data = {"timestamp": timestamp, "value": value} routing_key = "%s:%s:%s" % (user_id, module_id, "tag") tag_publisher.publish(routing_key, data)
class PreprocessingModule(object): def __init__(self, user_id, module_id, device_type, rmq_address, rmq_user, rmq_pwd, step_size): self.user_id = user_id self.module_id = module_id self.device_type = device_type self.rmq_address = rmq_address self.rmq_user = rmq_user self.rmq_pwd = rmq_pwd self.eeg_subscriber = None self.mu_publisher = None self.routing_keys = { _EEG: _ROUTING_KEY % (user_id, device_type, _EEG), _MU: _ROUTING_KEY % (user_id, device_type, _MU), } self.preprocessor = None self.eeg_data = np.zeros((0, 8)) self.count = 0 self.eyeblinks_remover = EyeBlinksRemover() self.step_size = step_size self.started_fit = False def initialize(self): """ Initialize EEG preprocessor, publisher, and subscriber """ self.mu_publisher = PikaPublisher(self.rmq_address, self.rmq_user, self.rmq_pwd) self.eeg_subscriber = PikaSubscriber(self.rmq_address, self.rmq_user, self.rmq_pwd) self.eeg_subscriber.connect() self.mu_publisher.connect() self.mu_publisher.register(self.routing_keys[_MU]) self.eeg_subscriber.subscribe(self.routing_keys[_EEG]) def start(self): _LOGGER.info("[Module %s] Starting Preprocessing. Routing " "keys: %s" % (self.module_id, self.routing_keys)) self.eeg_subscriber.consume_messages(self.routing_keys[_EEG], self._preprocess) def refit_ica(self): t = Thread(target=self.eyeblinks_remover.fit, args=(self.eeg_data[1000:],)) t.start() # self.eyeblinks_remover.fit(self.eeg_data[1000:]) def _preprocess(self, ch, method, properties, body): eeg = json.loads(body) self.eeg_data = np.vstack([self.eeg_data, get_raw(eeg)]) # self.count += len(eeg) self.count += self.step_size print(self.count) if (self.count >= 5000 and not self.started_fit) or self.count % 10000 == 0: _LOGGER.info("refitting...") self.started_fit = True self.refit_ica() timestamp = eeg[-1]["timestamp"] eeg = from_raw(self.eyeblinks_remover.transform(get_raw(eeg))) process = preprocess_stft(eeg, _METADATA) mu_left = process["left"][-1] mu_right = process["right"][-1] data = {"timestamp": timestamp, "left": mu_left, "right": mu_right} _LOGGER.debug("--> mu: %s" % data) if self.eyeblinks_remover.fitted: self.mu_publisher.publish(self.routing_keys[_MU], data)
class HTMMotorImageryModule(object): def __init__(self, user_id, module_id, device_type, rmq_address, rmq_user, rmq_pwd): self.stats = { "left": {"min": None, "max": None}, "right": {"min": None, "max": None} } self.module_id = module_id self.user_id = user_id self.device_type = device_type self.rmq_address = rmq_address self.rmq_user = rmq_user self.rmq_pwd = rmq_pwd self.classification_publisher = None self.mu_subscriber = None self.tag_subscriber = None self.tag_publisher = None self.routing_keys = { "mu": _ROUTING_KEY % (user_id, device_type, _MU), "tag": _ROUTING_KEY % (user_id, module_id, _TAG), "classification": _ROUTING_KEY % (user_id, module_id, _CLASSIFICATION) } self.start_time = int(time.time() * 1000) # in ms self.last_tag = {"timestamp": self.start_time, "value": _CATEGORIES[1]} self.classifiers = {"left": None, "right": None} self.numRecords = 0 self.learning_mode = True def initialize(self): """ Initialize classifier, publisher (classification), and subscribers (mu and tag) """ self.classifiers["left"] = HTMClassifier(network_config, _TRAINING_DATA, _CATEGORIES) self.classifiers["right"] = HTMClassifier(network_config, _TRAINING_DATA, _CATEGORIES) for classifier in self.classifiers.values(): classifier.initialize() if _PRE_TRAIN: classifier.train(_TRAIN_SET_SIZE, partitions) self.tag_subscriber = PikaSubscriber(self.rmq_address, self.rmq_user, self.rmq_pwd) self.tag_publisher = PikaPublisher(self.rmq_address, self.rmq_user, self.rmq_pwd) self.mu_subscriber = PikaSubscriber(self.rmq_address, self.rmq_user, self.rmq_pwd) self.classification_publisher = PikaPublisher(self.rmq_address, self.rmq_user, self.rmq_pwd) self.tag_subscriber.connect() self.tag_publisher.connect() self.mu_subscriber.connect() self.classification_publisher.connect() self.tag_subscriber.subscribe(self.routing_keys["tag"]) self.tag_publisher.register(self.routing_keys["tag"]) self.mu_subscriber.subscribe(self.routing_keys["mu"]) self.classification_publisher.register(self.routing_keys["classification"]) def start(self): _LOGGER.info("[Module %s] Starting Motor Imagery module. Routing keys: %s" % (self.module_id, self.routing_keys)) self.mu_subscriber.consume_messages(self.routing_keys["mu"], self._tag_and_classify) def _update_last_tag(self, last_tag): """Consume all tags in the queue and keep the last one (i.e. the most up to date)""" while 1: (meth_frame, header_frame, body) = self.tag_subscriber.get_one_message( self.routing_keys["tag"]) if body: last_tag = json.loads(body) else: return last_tag def _tag_and_classify(self, ch, method, properties, body): """Tag data and runs it through the classifier""" self.numRecords += 1 print self.numRecords if self.numRecords > 1000: self.learning_mode = False print "=======LEARNING DISABLED!!!=========" self.last_tag = self._update_last_tag(self.last_tag) _LOGGER.debug("[Module %s] mu: %s | last_tag: %s" % (self.module_id, body, self.last_tag)) mu = json.loads(body) mu_timestamp = mu["timestamp"] tag_timestamp = self.last_tag["timestamp"] results = {} for (hemisphere, classifier) in self.classifiers.items(): mu_value = mu[hemisphere] tag_value = self.last_tag["value"] mu_clipped = np.clip(mu_value, _MU_MIN, _MU_MAX) results[hemisphere] = classifier.classify(input_data=mu_clipped, target=tag_value, learning_is_on=self.learning_mode) self._update_stats(hemisphere, mu_value) #_LOGGER.debug(self.stats) _LOGGER.debug("Raw results: %s" % results) classification_result = _reconcile_results(results['left'], results['right']) buffer = [{"timestamp": mu_timestamp, "value": classification_result}] self.classification_publisher.publish(self.routing_keys["classification"], buffer) def _update_stats(self, hemisphere, mu_value): """ Update stats. self.stats = { "left": {"min": None, "max": None}, "right": {"min": None, "max": None} } """ min_val = self.stats[hemisphere]["min"] max_val = self.stats[hemisphere]["max"] if not min_val: self.stats[hemisphere]["min"] = mu_value if not max_val: self.stats[hemisphere]["max"] = mu_value if mu_value < min_val: self.stats[hemisphere]["min"] = mu_value if mu_value > max_val: self.stats[hemisphere]["max"] = mu_value
def __init__(self, log, device_address=THINKGEAR_DEVICE_SERIAL_PORT, emulate_headset_data=THINKGEAR_ENABLE_SIMULATE_HEADSET_DATA, server_host=RABBITMQ_HOST, server_username=RABBITMQ_USERNAME, server_password=RABBITMQ_PASSWORD, publisher_user=PUBLISHER_USERNAME, publisher_device=PUBLISHER_DEVICE, publisher_metric=PUBLISHER_METRIC, DEBUG=DEBUG, parent=None): threading.Thread.__init__(self, parent) self.protocol = None self.serial_device = None self.log = log self.DEBUG = DEBUG self.parent = parent self.device_address = device_address self.emulate_headset_data = emulate_headset_data self.attention_threshold = 70 self.data = { 'poorSignalLevel': 200, 'attention': 0, 'meditation': 0, 'delta': 0, 'theta': 0, 'lowAlpha': 0, 'highAlpha': 0, 'lowBeta': 0, 'highBeta': 0, 'lowGamma': 0, 'highGamma': 0, 'label': 0 } self.host = server_host self.username = server_username self.pwd = server_password self.user = publisher_user self.device = publisher_device self.metric = publisher_metric # Send data efficiently in one packet self.buffer_size = 10 self.data_buffer = [] self.routing_key = "%s:%s:%s" % (self.user, self.device, self.metric) self.pub = PikaPublisher(self.host, self.username, self.pwd) self.pub.connect() self.pub.register(self.routing_key) # Also send each metric individually in cloudbrain format self.metrics = ['timestamp', 'eeg', 'poorSignalLevel', 'attention', 'meditation', 'delta', 'theta', 'lowAlpha', 'highAlpha', 'lowBeta', 'highBeta', 'lowGamma', 'highGamma'] self.publishers = {} self.routing_keys = {} for metric in self.metrics: self.routing_keys[metric] = "%s:neurosky:%s" % (self.user, metric) self.publishers[metric] = PikaPublisher(self.host, self.username, self.pwd) self.publishers[metric].connect() self.publishers[metric].register(self.routing_keys[metric]) # Send FFT self.fft_routing_key = "%s:%s:%s" % (self.user, self.device, "fft") self.fft_pub = PikaPublisher(self.host, self.username, self.pwd) self.fft_pub.connect() self.fft_pub.register(self.fft_routing_key) # Final setup self.configureEEG() displayCSVHeader()
class NeuroskySource(threading.Thread): def __init__(self, log, device_address=THINKGEAR_DEVICE_SERIAL_PORT, emulate_headset_data=THINKGEAR_ENABLE_SIMULATE_HEADSET_DATA, server_host=RABBITMQ_HOST, server_username=RABBITMQ_USERNAME, server_password=RABBITMQ_PASSWORD, publisher_user=PUBLISHER_USERNAME, publisher_device=PUBLISHER_DEVICE, publisher_metric=PUBLISHER_METRIC, DEBUG=DEBUG, parent=None): threading.Thread.__init__(self, parent) self.protocol = None self.serial_device = None self.log = log self.DEBUG = DEBUG self.parent = parent self.device_address = device_address self.emulate_headset_data = emulate_headset_data self.attention_threshold = 70 self.data = { 'poorSignalLevel': 200, 'attention': 0, 'meditation': 0, 'delta': 0, 'theta': 0, 'lowAlpha': 0, 'highAlpha': 0, 'lowBeta': 0, 'highBeta': 0, 'lowGamma': 0, 'highGamma': 0, 'label': 0 } self.host = server_host self.username = server_username self.pwd = server_password self.user = publisher_user self.device = publisher_device self.metric = publisher_metric # Send data efficiently in one packet self.buffer_size = 10 self.data_buffer = [] self.routing_key = "%s:%s:%s" % (self.user, self.device, self.metric) self.pub = PikaPublisher(self.host, self.username, self.pwd) self.pub.connect() self.pub.register(self.routing_key) # Also send each metric individually in cloudbrain format self.metrics = ['timestamp', 'eeg', 'poorSignalLevel', 'attention', 'meditation', 'delta', 'theta', 'lowAlpha', 'highAlpha', 'lowBeta', 'highBeta', 'lowGamma', 'highGamma'] self.publishers = {} self.routing_keys = {} for metric in self.metrics: self.routing_keys[metric] = "%s:neurosky:%s" % (self.user, metric) self.publishers[metric] = PikaPublisher(self.host, self.username, self.pwd) self.publishers[metric].connect() self.publishers[metric].register(self.routing_keys[metric]) # Send FFT self.fft_routing_key = "%s:%s:%s" % (self.user, self.device, "fft") self.fft_pub = PikaPublisher(self.host, self.username, self.pwd) self.fft_pub.connect() self.fft_pub.register(self.fft_routing_key) # Final setup self.configureEEG() displayCSVHeader() def setPacketCount(self, value): if self.parent is not None: self.parent.setPacketCount(value) def setBadPackets(self, value): if self.parent is not None: self.parent.setBadPackets(value) def incrementPacketCount(self): if self.parent is not None: self.parent.incrementPacketCount() def incrementBadPackets(self): if self.parent is not None: self.parent.incrementBadPackets() def resetSessionStartTime(self): if self.parent is not None: self.parent.resetSessionStartTime() def configureEEG(self): if not self.emulate_headset_data: self.serial_device = NeuroskyConnector.SerialDevice( self.log, device_address=self.device_address, DEBUG=0, parent=self) self.serial_device.start() else: self.serial_device = None self.protocol = NeuroskyConnector.puzzlebox_synapse_protocol_thinkgear( self.log, self.serial_device, device_model='NeuroSky MindWave', DEBUG=0, parent=self) self.protocol.start() def processPacketThinkGear(self, packet): if self.DEBUG > 2: print packet if 'rawEeg' in packet.keys(): # packet['channel_0'] = packet.pop('rawEeg') packet['eeg'] = packet.pop('rawEeg') packet['poorSignalLevel'] = self.data['poorSignalLevel'] packet['attention'] = self.data['attention'] packet['meditation'] = self.data['meditation'] packet['delta'] = self.data['delta'] packet['theta'] = self.data['theta'] packet['lowAlpha'] = self.data['lowAlpha'] packet['highAlpha'] = self.data['highAlpha'] packet['lowBeta'] = self.data['lowBeta'] packet['highBeta'] = self.data['highBeta'] packet['lowGamma'] = self.data['lowGamma'] packet['highGamma'] = self.data['highGamma'] packet['label'] = self.data['label'] if self.DEBUG > 1: print packet else: displayCSV(packet) if len(self.data_buffer) > self.buffer_size: # Publish efficiently in one packet #self.pub.publish(self.routing_key, self.data_buffer) # Also send each metric individually in cloudbrain format for metric in self.metrics: buffer_out = [] for packet in self.data_buffer: metric_data = { "timestamp": packet["timestamp"], "channel_0": packet[metric] } buffer_out.append(metric_data) self.publishers[metric].publish(self.routing_keys[metric], buffer_out) # Also send fft buffer_out = [] for packet in self.data_buffer: metric_data = { "timestamp": packet["timestamp"], "channel_0": packet['lowAlpha'], "channel_1": packet['highAlpha'], "channel_2": packet['lowBeta'], "channel_3": packet['highBeta'], "channel_4": packet['lowGamma'], "channel_5": packet['highGamma'], "channel_6": packet['delta'], "channel_7": self.data['theta'], } buffer_out.append(metric_data) self.fft_pub.publish(self.fft_routing_key, buffer_out) if self.DEBUG > 1: print self.data_buffer self.data_buffer = [] if self.DEBUG > 1: print "Publishing:", print self.routing_key else: self.data_buffer.append(packet) else: if 'poorSignalLevel' in packet.keys(): self.data['poorSignalLevel'] = packet['poorSignalLevel'] if 'eegPower' in packet.keys(): self.data['delta'] = packet['eegPower']['delta'] self.data['theta'] = packet['eegPower']['theta'] self.data['lowAlpha'] = packet['eegPower']['lowAlpha'] self.data['highAlpha'] = packet['eegPower']['highAlpha'] self.data['lowBeta'] = packet['eegPower']['lowBeta'] self.data['highBeta'] = packet['eegPower']['highBeta'] self.data['lowGamma'] = packet['eegPower']['lowGamma'] self.data['highGamma'] = packet['eegPower']['highGamma'] if 'eSense' in packet.keys(): if 'attention' in packet['eSense'].keys(): self.data['attention'] = packet['eSense']['attention'] if self.data['attention'] >= self.attention_threshold: self.data['label'] = 1 else: self.data['label'] = 0 if 'meditation' in packet['eSense'].keys(): self.data['meditation'] = packet['eSense']['meditation'] def resetDevice(self): if self.serial_device is not None: self.serial_device.exitThread() if self.protocol is not None: self.protocol.exitThread() self.configureEEG() def exitThread(self, callThreadQuit=True): # Call disconnect block in protocol first due to above error self.protocol.disconnectHardware() if self.serial_device is not None: self.serial_device.exitThread() if self.protocol is not None: self.protocol.exitThread() if callThreadQuit: if self.DEBUG: self.join() self.join() if self.parent is None: sys.exit()
class HTMClassifier(object): def __init__(self, user_id, device_type, rmq_address, rmq_user, rmq_pwd, input_metrics, output_metrics): """ Motor Imagery Module. Metrics conventions: - Data to classify: {"timestamp": <int>, "channel_0": <float>} - Data label: {"timestamp": <int>, "channel_0": <int>} - Classification result: {"timestamp": <int>, "channel_0": <int>} @param user_id: (string) ID of the user using the device. @param device_type: (string) type of the device publishing to this module. @param rmq_address: (string) address of the RabbitMQ server. @param rmq_user: (string) login for RabbitMQ connection. @param rmq_pwd: (string) password for RabbitMQ connection. @param input_metrics: (list) name of the input metric. @param output_metrics (list) name of the output metric. """ self.module_id = HTMClassifier.__name__ self.user_id = user_id self.device_type = device_type self.rmq_address = rmq_address self.rmq_user = rmq_user self.rmq_pwd = rmq_pwd self.input_metrics = input_metrics self.output_metrics = output_metrics self.input_metric = None self.output_metric = None self.tag_metric = None self.last_tag = {"timestamp": None, "channel_0": None} self.output_metric_publisher = None self.input_metric_subscriber = None self.tag_subscriber = None self.routing_keys = None # Set when configure() is called. self.categories = None self.network_config = None self.trained_network_path = None self.minval = None self.maxval = None # Module specific self._network = None def _validate_metrics(self): """ Validate input and output metrics and initialize them accordingly. This module must have the following signature for input and output metrics: input_metrics = {"metric_to_classify": <string>, "label_metric": <string>} output_metrics = {"result_metric": <string>} """ if "label_metric" in self.input_metrics: self.tag_metric = self.input_metrics["label_metric"] else: raise KeyError("The input metric 'label_metric' is not set!") if "metric_to_classify" in self.input_metrics: self.input_metric = self.input_metrics["metric_to_classify"] else: raise KeyError("The input metric 'metric_to_classify' is not set!") if "result_metric" in self.output_metrics: self.output_metric = self.output_metrics["result_metric"] else: raise KeyError("The output metric 'result_metric' is not set!") def configure(self, categories, network_config, trained_network_path, minval, maxval): """Configure the module""" self._validate_metrics() self.categories = categories self.network_config = network_config self.trained_network_path = trained_network_path self.minval = minval self.maxval = maxval self.network_config["sensorRegionConfig"]["encoders"]["scalarEncoder"][ "minval"] = minval # Init tag with first category self.last_tag["channel_0"] = self.categories[0] self.last_tag["timestamp"] = int(time.time() * 1000) def connect(self): """Initialize publisher and subscribers""" self.routing_keys = { self.input_metric: _ROUTING_KEY % (self.user_id, self.device_type, self.input_metric), self.output_metric: _ROUTING_KEY % (self.user_id, self.device_type, self.output_metric), self.tag_metric: _ROUTING_KEY % (self.user_id, self.device_type, self.tag_metric), } self.tag_subscriber = PikaSubscriber(self.rmq_address, self.rmq_user, self.rmq_pwd) self.classification_publisher = PikaPublisher(self.rmq_address, self.rmq_user, self.rmq_pwd) self.input_metric_subscriber = PikaSubscriber(self.rmq_address, self.rmq_user, self.rmq_pwd) self.output_metric_publisher = PikaPublisher(self.rmq_address, self.rmq_user, self.rmq_pwd) self.tag_subscriber.connect() self.classification_publisher.connect() self.input_metric_subscriber.connect() self.output_metric_publisher.connect() self.tag_subscriber.register(self.routing_keys[self.tag_metric]) self.classification_publisher.register(self.routing_keys[self.tag_metric]) self.input_metric_subscriber.register(self.routing_keys[self.input_metric]) self.output_metric_publisher.register(self.routing_keys[self.output_metric]) def train(self, training_file, num_records): """Create a network and training it on a CSV data source""" dataSource = FileRecordStream(streamID=training_file) dataSource.setAutoRewind(True) self._network = configureNetwork(dataSource, self.network_config) for i in xrange(num_records): # Equivalent to: network.run(num_records) self._network.run(1) self._network.save(self.trained_network_path) def start(self): """Get data from rabbitMQ and classify input data""" if self._network is None: self._network = Network(self.trained_network_path) regionNames = self._get_all_regions_names() setNetworkLearningMode(self._network, regionNames, False) _LOGGER.info("[Module %s] Starting Motor Imagery module. Routing keys: %s" % (self.module_id, self.routing_keys)) self.input_metric_subscriber.subscribe( self.routing_keys[self.input_metric], self._tag_and_classify) def _get_all_regions_names(self): region_names = [] for region_config_key, region_config in self.network_config.items(): region_names.append(region_config["regionName"]) return region_names def _tag_and_classify(self, ch, method, properties, body): """Tag data and runs it through the classifier""" self._update_last_tag() input_data = simplejson.loads(body) timestamp = input_data["timestamp"] if self.maxval is not None and self.minval is not None: value = np.clip(input_data["channel_0"], self.minval, self.maxval) else: value = input_data["channel_0"] classificationResults = classifyNextRecord(self._network, self.network_config, timestamp, value, self.last_tag["channel_0"]) inferredCategory = classificationResults["bestInference"] _LOGGER.debug("Raw results: %s" % classificationResults) buffer = [{"timestamp": timestamp, "channel_0": inferredCategory}] self.output_metric_publisher.publish(self.routing_keys[self.output_metric], buffer) def _update_last_tag(self): """ Consume all tags in the queue and keep the last one (i.e. the most up to date) A tag is a dict with the following format: tag = {"timestamp": <int>, "channel_0": <float>} """ while 1: (meth_frame, header_frame, body) = self.tag_subscriber.get_one_message( self.routing_keys[self.tag_metric]) if body: self.last_tag = simplejson.loads(body) else: _LOGGER.info("Last tag: {}".format(self.last_tag)) return
class PreprocessingModule(object): def __init__(self, user_id, device_type, rmq_address, rmq_user, rmq_pwd, input_metrics, output_metrics): self.module_id = str(uuid.uuid4()) self.user_id = user_id self.device_type = device_type self.rmq_address = rmq_address self.rmq_user = rmq_user self.rmq_pwd = rmq_pwd self.input_metrics = None self.output_metrics = None self.eeg_subscriber = None self.mu_publisher = None self.routing_keys = None self.preprocessor = None self.num_channels = get_num_channels(self.device_type, "eeg") self.eeg_data = np.zeros((0, self.num_channels)) self.count = 0 self.eyeblinks_remover = EyeBlinksFilter() self.step_size = None self.electrodes_placement = None self.enable_ica = False self.started_fit = False def configure(self, step_size, electrodes_placement, enable_ica=False): """ Module specific params. @param step_size: (int) STFT step size @param electrodes_placement: (dict) dict with the electrode placement for optional Laplacian filtering. E.g: { "channel_2": { "main": "channel_2", "artifact": ["channel_0", "channel_3", "channel_5"] }, "channel_4": { "main": "channel_4", "artifact": ["channel_1", "channel_3", "channel_6"] }, } If you don't want any Laplacian filtering then set this to: { "channel_2": { "main": "channel_2", "artifact": [] }, "channel_4": { "main": "channel_4", "artifact": [] }, } More about Laplacian filtering: http://sccn.ucsd.edu/wiki/Flt_laplace @param input_metric: (string) name of the input metric. @param output_metric: (string) name of the output metric. @param enable_ica: (boolean) if 1, enable ICA pre-processing. This will remove eye blinks. """ self.step_size = step_size self.electrodes_placement = electrodes_placement self.input_metric = input_metric self.output_metric = output_metric self.enable_ica = enable_ica def connect(self): """ Initialize EEG preprocessor, publisher, and subscriber """ if self.step_size is None: raise ValueError("Step size can't be none. " "Use configure() to set it.") if self.electrodes_placement is None: raise ValueError("Electrode placement can't be none. " "Use configure() to set it.") if self.input_metric is None: raise ValueError("Input metric can't be none. " "Use configure() to set it.") if self.output_metric is None: raise ValueError("Output metric can't be none. " "Use configure() to set it.") self.routing_keys = { self.input_metric: _ROUTING_KEY % (self.user_id, self.device_type, self.input_metric), self.output_metric: _ROUTING_KEY % (self.user_id, self.device_type, self.output_metric) } self.mu_publisher = PikaPublisher(self.rmq_address, self.rmq_user, self.rmq_pwd) self.eeg_subscriber = PikaSubscriber(self.rmq_address, self.rmq_user, self.rmq_pwd) self.eeg_subscriber.connect() self.mu_publisher.connect() self.mu_publisher.register(self.routing_keys[self.output_metric]) self.eeg_subscriber.register(self.routing_keys[self.input_metric]) def start(self): _LOGGER.info("[Module %s] Starting Preprocessing. Routing " "keys: %s" % (self.module_id, self.routing_keys)) self.eeg_subscriber.subscribe(self.routing_keys[self.input_metric], self._preprocess) def refit_ica(self): t = Thread(target=self.eyeblinks_remover.fit, args=(self.eeg_data,)) t.start() def _preprocess(self, ch, method, properties, body): eeg = json.loads(body) self.eeg_data = np.vstack([self.eeg_data, get_raw(eeg, self.num_channels)]) self.count += self.step_size timestamp = eeg[-1]["timestamp"] if self.enable_ica: eeg = from_raw(self.eyeblinks_remover.transform( get_raw(eeg, self.num_channels)), self.num_channels) if ((self.count >= 5000 and not self.started_fit) or self.count % 10000 == 0): _LOGGER.info('refitting...') self.started_fit = True self.refit_ica() processed_data = preprocess_stft(eeg, self.electrodes_placement) data = {"timestamp": timestamp} for key, value in processed_data.items(): data[key] = processed_data[key][-1] _LOGGER.debug("--> output: %s" % data) self.mu_publisher.publish(self.routing_keys[self.output_metric], data)
import termios, fcntl, sys, os from brainsquared.publishers.PikaPublisher import PikaPublisher host = "localhost" username = "******" pwd = "guest" user = "******" device = "openbci" metric = "tag" routing_key = "%s:%s:%s" % (user, device, metric) pub = PikaPublisher(host, username, pwd) pub.connect() pub.register(routing_key) info = """Listening for keyboard input. \n * Press 0 to tag neutral position. * Press 1 to tag left hand. * Press 2 to tag right hand. """ print info fd = sys.stdin.fileno() oldterm = termios.tcgetattr(fd) newattr = termios.tcgetattr(fd) newattr[3] = newattr[3] & ~termios.ICANON & ~termios.ECHO termios.tcsetattr(fd, termios.TCSANOW, newattr)
if __name__ == "__main__": attention_threshold = 70 host = "localhost" username = "******" pwd = "guest" user = "******" device = "neurosky" metric = "attention" routing_key = "%s:%s:%s" % (user, device, metric) buffer_size = 128 data_buffer = [] pub = PikaPublisher(host, username, pwd) pub.connect() pub.register(routing_key) con = Consider() print "Ready to publish data to '%s' on queue '%s'" % (host, str(routing_key)) print "Waiting for BCI headset signal ..." for p in con.packet_generator(): print "==> Signal quality: {}".format(p.poor_signal) if p.poor_signal == 0: print "Got good signal!" data = get_attention(p) if len(data_buffer) > buffer_size:
def connect(self, host, username, pwd): self.pub = PikaPublisher(host, username, pwd) self.pub.connect() self.pub.register(self.routing_key) print "Connected to: {}".format(host)
#!/usr/bin/env python from brainsquared.publishers.PikaPublisher import PikaPublisher import time import random import json USER_ID = "brainsquared" MODULE_IDS = ["module0", "module1", "module2", "module3"] DEVICE = "openbci" pub = PikaPublisher("rabbitmq.cloudbrain.rocks", "cloudbrain", "cloudbrain") pub.connect() for module_id in MODULE_IDS: TAG_KEY = '%s:%s:tag' % (USER_ID, module_id) pub.register(TAG_KEY) pub.publish(TAG_KEY, {"timestamp": 1, "value": "middle"}) #pub.publish(TAG_KEY, {"timestamp": 1, "value": "left"}) #pub.publish(TAG_KEY, {"timestamp": 1, "value": "right"}) MU_KEY = '%s:%s:mu' % (USER_ID, DEVICE) pub.register(MU_KEY) #pub.publish(MU_KEY, {"timestamp": 1, "left": 1, "right": 3}) CLASSIFICATION_KEY = '%s:%s:classification' % (USER_ID, MODULE_IDS[0]) pub.register(CLASSIFICATION_KEY) while 1: random_class = random.sample([-2,-1,0,1,2], 1)[0]