def run(device_name='muse',
        mock_data_enabled=True,
        device_id=MOCK_DEVICE_ID,
        cloudbrain_address=RABBITMQ_ADDRESS,
        buffer_size=10,
        device_port='/dev/tty.OpenBCI-DN0094CZ'):
  if device_name == 'muse':
    from cloudbrain.connectors.MuseConnector import MuseConnector as Connector
  elif device_name == 'openbci':
    from cloudbrain.connectors.OpenBCIConnector import OpenBCIConnector as Connector
  else:
    raise Exception("Device type '%s' not supported. Supported devices are:%s" % (device_name, _SUPPORTED_DEVICES))

  if mock_data_enabled:
    from cloudbrain.connectors.MockConnector import MockConnector as Connector

  metrics = get_metrics_names(device_name)
  publishers = {metric: Publisher(device_name, device_id, cloudbrain_address, metric) for metric in metrics}
  for publisher in publishers.values():
    publisher.connect()
  if device_name == 'openbci':
    connector = Connector(publishers, buffer_size, device_name, device_port)
  else:
    connector = Connector(publishers, buffer_size, device_name)
  connector.connect_device()

  if mock_data_enabled:
    print "INFO: Mock data enabled."
  print ("SUCCESS: device '%s' connected with ID '%s'\n"
         "Sending data to cloudbrain at address '%s' ...") % (device_name,
                                                              device_id,
                                                              cloudbrain_address)
  connector.start()
def run(device_name="muse",
        mock_data_enabled=True,
        device_id=MOCK_DEVICE_ID,
        cloudbrain_address=RABBITMQ_ADDRESS,
        buffer_size=10,
        step_size=10,
        device_port=None,
        pipe_name=None,
        publisher_type="pika",
        device_mac=None):

    if device_name == "muse" and not mock_data_enabled:
        from cloudbrain.connectors.MuseConnector import MuseConnector as Connector
        if not device_port:
            device_port = 9090
    elif device_name == "openbci" and not mock_data_enabled:
        from cloudbrain.connectors.OpenBCIConnector import OpenBCIConnector as Connector
    elif mock_data_enabled:
        from cloudbrain.connectors.MockConnector import MockConnector as Connector
    else:
        raise ValueError("Device type '%s' not supported. "
                         "Supported devices are:%s" %
                         (device_name, _SUPPORTED_DEVICES))

    metrics = get_metrics_names(device_name)

    if publisher_type == "pika":
        publishers = {
            metric: PikaPublisher(device_name, device_id, cloudbrain_address,
                                  metric)
            for metric in metrics
        }
    elif publisher_type == "pipe":
        publishers = {
            metric: PipePublisher(device_name, device_id, metric, pipe_name)
            for metric in metrics
        }
    else:
        raise ValueError("'%s' is not a valid publisher type. "
                         "Valid types are %s." %
                         (publisher_type, "pika, pipe"))

    for publisher in publishers.values():
        publisher.connect()
    connector = Connector(publishers, buffer_size, step_size, device_name,
                          device_port, device_mac)
    connector.connect_device()

    if mock_data_enabled and (publisher_type != 'pipe'):
        print "INFO: Mock data enabled."

    if publisher_type == 'pika':
        print(
            "SUCCESS: device '%s' connected with ID '%s'\n"
            "Sending data to cloudbrain at address '%s' ...") % (
                device_name, device_id, cloudbrain_address)
    connector.start()
    def __init__(self, publishers, buffer_size, device_name, device_port):

      self.metrics = get_metrics_names(device_name)
      self.device = None
      self.device_port = device_port
      self.device_name = device_name

      self.buffers = {metric: ConnectorBuffer(buffer_size, publishers[metric].publish) for metric in self.metrics}
      self.publishers = publishers
Example #4
0
def get_tag_aggregate(user_id, tag_id):
    """Retrieve all aggregates for a specific tag and user"""

    device_type = request.args.get('device_type', None)
    metrics = request.args.getlist('metrics', None)

    if device_type is None and len(metrics) == 0:
        device_types = get_supported_devices()
        for device_type in device_types:
            metrics.extend(get_metrics_names(device_type))
    elif len(metrics) == 0 and device_type is not None:
        metrics = get_metrics_names(device_type)
    elif len(metrics) > 0 and device_type is None:
        return "parameter 'device_type' is required to filter on `metrics`", 500

    #aggregates = _generate_mock_tag_aggregates(user_id, tag_id, device_type, metrics)
    aggregates = dao.get_aggregates(user_id, tag_id, device_type, metrics)

    return json.dumps(aggregates), 200
Example #5
0
def run(device_name='muse',
        mock_data_enabled=True,
        device_id=MOCK_DEVICE_ID,
        cloudbrain_address=RABBITMQ_ADDRESS,
        buffer_size=10,
        device_port='/dev/tty.OpenBCI-DN0094CZ',
        pipe_name=None,
        publisher_type="pika",
        device_mac=None):

    if device_name == 'muse':
        from cloudbrain.connectors.MuseConnector import MuseConnector as Connector
    elif device_name == 'openbci':
        from cloudbrain.connectors.OpenBCIConnector import OpenBCIConnector as Connector
    else:
        raise ValueError("Device type '%s' not supported. "
                         "Supported devices are:%s" % (device_name, _SUPPORTED_DEVICES))

    if mock_data_enabled:
        from cloudbrain.connectors.MockConnector import MockConnector as Connector

    metrics = get_metrics_names(device_name)

    if publisher_type == 'pika':
        publishers = {metric: PikaPublisher(device_name,
                                            device_id,
                                            cloudbrain_address,
                                            metric) for metric in metrics}
    elif publisher_type == 'pipe':
        publishers = {metric: PipePublisher(device_name,
                                            device_id,
                                            metric,
                                            pipe_name) for metric in metrics}
    else:
        raise ValueError("'%s' is not a valid publisher type. "
                         "Valid types are %s." % (publisher_type, "pika, pipe"))

    for publisher in publishers.values():
        publisher.connect()
    if device_name == 'openbci':
        connector = Connector(publishers, buffer_size, device_name, device_port, device_mac)
    else:
        connector = Connector(publishers, buffer_size, device_name, 9090, device_mac)
    connector.connect_device()

    if mock_data_enabled and (publisher_type != 'pipe'):
        print "INFO: Mock data enabled."

    if publisher_type == 'pika':
        print ("SUCCESS: device '%s' connected with ID '%s'\n"
               "Sending data to cloudbrain at address '%s' ...") % (device_name,
                                                                    device_id,
                                                                    cloudbrain_address)
    connector.start()
    def __init__(self, publishers, buffer_size, device_name, device_port):

        self.metrics = get_metrics_names(device_name)
        self.device = None
        self.device_port = device_port
        self.device_name = device_name

        self.buffers = {
            metric: ConnectorBuffer(buffer_size, publishers[metric].publish)
            for metric in self.metrics
        }
        self.publishers = publishers
Example #7
0
def get_tag_aggregate(user_id, tag_id):
    """Retrieve all aggregates for a specific tag and user"""

    device_type = request.args.get('device_type', None)
    metrics = request.args.getlist('metrics', None)

    if device_type is None and len(metrics) == 0:
        device_types = get_supported_devices()
        for device_type in device_types:
            metrics.extend(get_metrics_names(device_type))
    elif len(metrics) == 0 and device_type is not None:
        metrics = get_metrics_names(device_type)
    elif len(metrics) > 0 and device_type is None:
        return "parameter 'device_type' is required to filter on `metrics`", 500

    if mock_data_enabled:
        aggregates = _generate_mock_tag_aggregates(user_id, tag_id, device_type, metrics)
    else:
        aggregates = dao.get_aggregates(user_id, tag_id, device_type, metrics)

    return json.dumps(aggregates), 200
    def start(self):
        self.cassandra_dao.connect()
        metrics = get_metrics_names(self.device_type)
        for metric in metrics:
            self.subscribers[metric] = PikaSubscriber(
                device_name=self.device_type,
                device_id=self.device_id,
                rabbitmq_address=self.rabbitmq_address,
                metric_name=metric)
            self.subscribers[metric].connect()

            t = threading.Thread(target=self.subscribers[metric].consume_messages,
                                 args=(self.write_to_cassandra_factory(metric),))
            self.threads.append(t)
            t.start()
Example #9
0
def run(device_name='muse',
        mock_data_enabled=True,
        device_id=MOCK_DEVICE_ID,
        cloudbrain_address=RABBITMQ_ADDRESS,
        buffer_size=10,
        device_port='/dev/tty.OpenBCI-DN0094CZ'):
    if device_name == 'muse':
        from cloudbrain.connectors.MuseConnector import MuseConnector as Connector
    elif device_name == 'openbci':
        from cloudbrain.connectors.OpenBCIConnector import OpenBCIConnector as Connector
    else:
        raise Exception(
            "Device type '%s' not supported. Supported devices are:%s" %
            (device_name, _SUPPORTED_DEVICES))

    if mock_data_enabled:
        from cloudbrain.connectors.MockConnector import MockConnector as Connector

    metrics = get_metrics_names(device_name)
    publishers = {
        metric: Publisher(device_name, device_id, cloudbrain_address, metric)
        for metric in metrics
    }
    for publisher in publishers.values():
        publisher.connect()
    if device_name == 'openbci':
        connector = Connector(publishers, buffer_size, device_name,
                              device_port)
    else:
        connector = Connector(publishers, buffer_size, device_name)
    connector.connect_device()

    if mock_data_enabled:
        print "INFO: Mock data enabled."
    print(
        "SUCCESS: device '%s' connected with ID '%s'\n"
        "Sending data to cloudbrain at address '%s' ...") % (
            device_name, device_id, cloudbrain_address)
    connector.start()
Example #10
0
    def get_one_message(self):
        for method, properties, body in self.channel.consume(self.queue_name,
                                                             exclusive=True,
                                                             no_ack=True):
            return body


def _print_message(ch, method, properties, body):
    print body


if __name__ == "__main__":

    device_id = "test"
    device_name = "muse"
    host = RABBITMQ_ADDRESS
    buffer_size = 100

    metric_names = get_metrics_names(device_name)

    while 1:
        for metric in metric_names:
            print metric
            subscriber = PikaSubscriber(device_name, device_id, host, metric)
            subscriber.connect()
            #subscriber.consume_messages(_print_message)
            buffer = json.loads(subscriber.get_one_message())
            for record in buffer:
                print record
Example #11
0
  def get_one_message(self):
    for method, properties, body in self.channel.consume(self.queue_name, exclusive=True, no_ack=True):
      return body

def _print_message(ch, method, properties, body):
  print body

    
if __name__ == "__main__":

  device_id = "test"
  device_name = "muse"
  host = RABBITMQ_ADDRESS
  buffer_size = 100

  metric_names = get_metrics_names(device_name)

  #while 1:
  #for metric in metric_names:
  #  print metric
  subscriber = PikaSubscriber(device_name, device_id, host, "eeg")
  subscriber.connect()
  while 1:
    subscriber.consume_messages(_print_message)
      # buffer = json.loads(subscriber.get_one_message())
      # for record in buffer:
      #   print record