Exemplo n.º 1
0
    def get_data(self, device_name, device_id, metric_name, start):
        """
    Get data from Cassandra.
    :param device_name:
    :param device_id:
    :param metric_name:
    :param start:
    :return:
    """
        start_date = datetime.datetime.fromtimestamp(start).strftime(DATE_FORMAT)
        table_name = "%s_%s" % (device_name, metric_name)
        cql_select = "SELECT * FROM %s WHERE device_id='%s' AND timestamp>'%s';" % (table_name, device_id, start_date)

        rows = self.session.execute(cql_select)
        data = []
        for row in rows:
            device_id = row[0]
            timestamp = int(time.mktime(row[1].timetuple()) * 1000)

            record = {"device_id": device_id, "timestamp": timestamp}

            num_channels = get_num_channels(device_name, metric_name)
            for i in xrange(num_channels):
                channel_name = "channel_%s" % i
                record[channel_name] = row[2 + i]

            data.append(record)
        return data
Exemplo n.º 2
0
  def __init__(self, publishers, buffer_size, device_name, device_port='mock_port', device_mac=None):
    """

    :return:
    """
    super(MockConnector, self).__init__(publishers, buffer_size, device_name, device_port, device_mac)
    self.data_generators = [self.data_generator_factory(metric, get_num_channels(self.device_name, metric)) for metric in self.metrics]
Exemplo n.º 3
0
    def get_data(self, device_name, device_id, metric_name, start):
        """
        Get data from Cassandra.
        :param device_name:
        :param device_id:
        :param metric_name:
        :param start:
        """
        start_date = datetime.datetime.fromtimestamp(start).strftime(
            DATE_FORMAT)
        table_name = "%s_%s" % (device_name, metric_name)
        cql_select = ("SELECT * FROM %s WHERE device_id='%s' "
                      "AND timestamp>'%s';" %
                      (table_name, device_id, start_date))

        rows = self.sensor_data_session.execute(cql_select)
        data = []
        for row in rows:
            device_id = row[0]
            timestamp = int(time.mktime(row[1].timetuple()) * 1000)

            record = {'device_id': device_id, 'timestamp': timestamp}

            num_channels = get_num_channels(device_name, metric_name)
            for i in xrange(num_channels):
                channel_name = "channel_%s" % i
                record[channel_name] = row[2 + i]

            data.append(record)
        return data
Exemplo n.º 4
0
  def start(self):

    # callback functions to handle the sample for that metric (each metric has a specific number of channels)
    cb_functions = {metric: self.callback_factory(metric, get_num_channels(self.device_name, metric))
                    for metric in self.metrics}

    self.device.start(cb_functions)
Exemplo n.º 5
0
 def __init__(self, publishers, buffer_size, device_name, device_port='mock_port'):
   """
   
   :return:
   """
   super(MockConnector, self).__init__(publishers, buffer_size, device_name, device_port)
   self.data_generators = [self.data_generator_factory(metric, get_num_channels(self.device_name, metric)) for metric in self.metrics]
Exemplo n.º 6
0
    def setup(self):
        """
        Generic setup for any analysis module, can be overriden by implementing in any child class
        This sets up subscriber and publisher based on input and output feature names
        """
        # usually this module is used with incoming EEG,
        # so we'd like to know num channels, and a header is for convenience
        # hard-coded "eeg" could be a problem if the device's metric name for raw data is not "eeg"
        # currently "eeg" is a known good metric name for both OpenBCI and Muse
        if "num_channels" in self.module_settings:
            # module specific setting
            self.num_channels = self.module_settings["num_channels"]
        elif "num_channels" in self.global_settings:
            #global setting
            self.num_channels = self.global_settings["num_channels"]
        else:
            # default for the device type
            self.num_channels = get_num_channels(self.device_name,"eeg")

        self.headers = ['timestamp'] + ['channel_%s' % i for i in xrange(self.num_channels)]

        # if input, instantiate subscriber
        if len(self.inputs):
            # there is only one subscriber to handle all inputs
            self.subscriber = PikaSubscriber(device_name=self.device_name,
                                                     device_id=self.device_id,
                                                     rabbitmq_address=self.rabbitmq_address,
                                                     metrics=self.inputs)

        # if output, instantiate publishers
        if len(self.outputs):

            for output_key, output in self.outputs.iteritems():
                # each output has a specific key, assign a placeholder for it in publishers collection
                self.publishers[output_key] = {}
                self.output_buffers[output_key] = {}

                # each output has a parameter called "message_queues"
                # this can be a single value or a list, i.e. "foo" or ["foo1","foo2"]
                # most of the time, this will be a single string
                # an example of where an output might use more than one message_queue might be:
                # one output goes to visualization, while a second copy continues down the processing chain

                if 'message_queues' in output:
                    # for convenience, convert the "message_queues" parameter to list if it isn't already
                    if type(output['message_queues']) != list:
                        output['message_queues'] =  [output['message_queues']]

                    # there is one publisher per output
                    for message_queue_name in output['message_queues']:
                        self.publishers[output_key][message_queue_name] = PikaPublisher(
                                                                    device_name=self.device_name,
                                                                    device_id=self.device_id,
                                                                    rabbitmq_address=self.rabbitmq_address,
                                                                    metric_name=message_queue_name)

                        # also instantiate an output buffer for each publisher
                        self.output_buffers[output_key][message_queue_name] = []
Exemplo n.º 7
0
  def connect_device(self):

    connect_muse(port=self.device_port)

    # callback functions to handle the sample for that metric (each metric has a specific number of channels)
    cb_functions = {metric: self.callback_factory(metric, get_num_channels(self.device_name, metric))
                    for metric in self.metrics}

    self.device = MuseOSC(self.device_port, cb_functions)
  def get_headers(self):
    """
    Generate the CSV headers for that metric.
    :return: CSV headers
    """

    num_channels = get_num_channels(self.device_name,self.metric)
    headers = ['timestamp'] + ['channel_%s' % i for i in xrange(num_channels)]
    return headers
    def setup(self):
        super(ModuleDownsample, self).setup()
        self.factor = 1
        self.counter = 0

        # usually this module is used with incoming EEG,
        # so we'd like to know num channels, and a header is for convenience
        self.num_channels = get_num_channels(self.device_name,self.input_feature)
        self.headers = ['timestamp'] + ['channel_%s' % i for i in xrange(self.num_channels)]
Exemplo n.º 10
0
  def get_headers(self):
    """
    Generate the CSV headers for that metric.
    :return: CSV headers
    """

    num_channels = get_num_channels(self.device_name,self.metric)
    headers = ['timestamp'] + ['channel_%s' % i for i in xrange(num_channels)]
    return headers
Exemplo n.º 11
0
    def start(self):

        # callback functions to handle the sample for that metric (each metric has a specific number of channels)
        cb_functions = {
            metric:
            self.callback_factory(metric,
                                  get_num_channels(self.device_name, metric))
            for metric in self.metrics
        }

        self.device.start(cb_functions)
Exemplo n.º 12
0
    def __init__(self, device_name, device_id, rabbitmq_address, metric):
        self.subscriber = PikaSubscriber(device_name=device_name,
                                         device_id=device_id,
                                         rabbitmq_address=rabbitmq_address,
                                         metric_name=metric)
        self.metric = metric
        self.device_name = device_name
        self.device_id = device_id
        self.num_channels = get_num_channels(device_name, metric)

        self.cassandra_dao = CassandraDAL()
Exemplo n.º 13
0
  def __init__(self, device_name, device_id, rabbitmq_address, metric):
    self.subscriber = PikaSubscriber(device_name=device_name,
                                     device_id=device_id,
                                     rabbitmq_address=rabbitmq_address,
                                     metric_name=metric)
    self.metric = metric
    self.device_name = device_name
    self.device_id = device_id
    self.num_channels = get_num_channels(device_name, metric)

    self.cassandra_dao = CassandraDAO()
Exemplo n.º 14
0
    def connect_device(self):

        connect_muse(port=self.device_port)

        # callback functions to handle the sample for that metric (each metric has a specific number of channels)
        cb_functions = {
            metric:
            self.callback_factory(metric,
                                  get_num_channels(self.device_name, metric))
            for metric in self.metrics
        }

        self.device = MuseOSC(self.device_port, cb_functions)
    def write_to_cassandra_factory(self, metric):

        num_channels = get_num_channels(self.device_type, metric)


        def write_to_cassandra(ch, method, properties, body):
            buffer_content = json.loads(body)
            for record in buffer_content:
                timestamp = record["timestamp"]
                channel_data = [record["channel_%s" % i] for i in
                                xrange(num_channels)]

                self.cassandra_dao.store_data(timestamp,
                                              self.device_id,
                                              self.device_type,
                                              metric,
                                              channel_data)


        return write_to_cassandra