class LslToOscStreamer:

    def __init__(self, host, port, stream_channels):
        self.client = udp_client.SimpleUDPClient(host, port)
        self.inlet = None
        self.stream_channels = stream_channels
        self.is_streaming = False

    def connect(self, prop='type', value='INSERT_MAC_ADDRESS'):
        streams = resolve_byprop(prop, value, timeout=5)
        self.inlet = StreamInlet(streams[0], max_chunklen=12)
        return self.inlet is not None

    def stream_data(self):
        if self.inlet is None:
            raise Exception("LSL stream is not connected")
        self.is_streaming = True
        streaming_thread = Thread(target=self._stream_handler)
        streaming_thread.setDaemon(True)
        streaming_thread.start()

    def _stream_handler(self):
        while self.is_streaming:
            eeg_sample, _ = self.inlet.pull_sample()
            for channel_idx, channel in enumerate(self.stream_channels):
                self.client.send_message(channel, eeg_sample[channel_idx])

    def close_stream(self):
        self.is_streaming = False
        self.inlet.close_stream()
Exemple #2
0
class MuseLslToOscStreamer:
    def __init__(self, host, port):
        self.client = udp_client.SimpleUDPClient(host, port)
        self.inlet = None
        self.stream_channels = [
            "/muse/tp9", "/muse/af7", "/muse/af8", "/muse/tp10", "/muse/aux"
        ]
        self.is_streaming = False

    def connect(self):
        streams = resolve_byprop('type', 'EEG', timeout=5)
        self.inlet = StreamInlet(streams[0], max_chunklen=12)
        return self.inlet is not None

    def stream_data(self):
        if self.inlet is None:
            raise Exception("Muse lsl stream is not connected")
        self.is_streaming = True
        streaming_thread = Thread(target=self._stream_handler)
        streaming_thread.setDaemon(True)
        streaming_thread.start()

    def _stream_handler(self):
        while self.is_streaming:
            eeg_sample, _ = self.inlet.pull_sample()
            for channel_idx, channel in enumerate(self.stream_channels):
                self.client.send_message(channel, eeg_sample[channel_idx])

    def close_stream(self):
        self.is_streaming = False
        self.inlet.close_stream()
Exemple #3
0
class AIYVoiceInterface:
    def __init__(self, lsl_data_type,
                 num_channels):  # default board_id 2 for Cyton
        self.lsl_data_type = lsl_data_type
        self.lsl_num_channels = num_channels

        self.streams = resolve_byprop('name', self.lsl_data_type, timeout=1)
        if len(self.streams) < 1:
            raise AttributeError(
                'Unable to find LSL Stream with given type {0}'.format(
                    lsl_data_type))
        self.inlet = StreamInlet(self.streams[0])
        # TO-DO: fix this, we need to re-stream this since sometimes unity doesn't pick up AIY data for some reason
        info = StreamInfo('VoiceBox', 'Voice', num_channels, 0.0, 'string',
                          'voice')
        self.outlet = StreamOutlet(info)
        pass

    def start_sensor(self):
        # connect to the sensor
        self.streams = resolve_byprop('name', self.lsl_data_type, timeout=1)
        if len(self.streams) < 1:
            raise AttributeError(
                'Unable to find LSL Stream with given type {0}'.format(
                    self.lsl_data_type))
        self.inlet = StreamInlet(self.streams[0])
        self.inlet.open_stream()
        print(
            'LSLInletInterface: resolved, created and opened inlet for lsl stream with type '
            + self.lsl_data_type)

        # read the channel names is there's any
        # tell the sensor to start sending frames

    def process_frames(self):
        # return one or more frames of the sensor
        try:
            frames, timestamps = self.inlet.pull_chunk()
            if len(frames) > 0:
                self.outlet.push_sample(frames[0])  # TO-DO: see above
        except LostError:
            frames, timestamps = [], []
            pass  # TODO handle stream lost
        return np.transpose(frames), timestamps

    def stop_sensor(self):
        if self.inlet:
            self.inlet.close_stream()
        print('LSLInletInterface: inlet stream closed.')

    def info(self):
        return self.inlet.info()

    def get_num_chan(self):
        return self.lsl_num_channels

    def get_nominal_srate(self):
        return self.streams[0].nominal_srate()
Exemple #4
0
class LSLInletInterface:
    def __init__(self, lsl_data_type):
        self.streams = resolve_byprop('name', lsl_data_type, timeout=0.1)
        if len(self.streams) < 1:
            raise AttributeError(
                'Unable to find LSL Stream with given type {0}'.format(
                    lsl_data_type))
        self.inlet = StreamInlet(self.streams[0])
        self.lsl_data_type = lsl_data_type
        self.lsl_num_channels = self.inlet.channel_count
        pass

    def start_sensor(self):
        # connect to the sensor
        self.streams = resolve_byprop('name', self.lsl_data_type, timeout=0.1)
        if len(self.streams) < 1:
            raise AttributeError(
                'Unable to find LSL Stream with given type {0}'.format(
                    self.lsl_data_type))
        if not self.inlet:
            self.inlet = StreamInlet(self.streams[0])
        self.inlet.open_stream()
        print(
            'LSLInletInterface: resolved, created and opened inlet for lsl stream with type '
            + self.lsl_data_type)

        # read the channel names is there's any
        # tell the sensor to start sending frames

    def process_frames(self):
        # return one or more frames of the sensor
        try:
            frames, timestamps = self.inlet.pull_chunk()
        except LostError:
            frames, timestamps = [], []
            pass  # TODO handle stream lost
        return np.transpose(frames), timestamps

    def stop_sensor(self):
        if self.inlet:
            self.inlet.close_stream()
        print('LSLInletInterface: inlet stream closed.')

    def info(self):
        return self.inlet.info()

    def get_num_chan(self):
        return self.lsl_num_channels

    def get_nominal_srate(self):
        return self.streams[0].nominal_srate()
Exemple #5
0
def testing():
    dummy_streamer = ble2lsl.Dummy(muse2016)  #

    streams = resolve_byprop(
        "type", "EEG", timeout=5
    )  #type: EEG, minimum return streams = 1, timeout after 5 seconds

    streamIn = StreamInlet(
        streams[0], max_chunklen=12, recover=True
    )  #Grab first stream from streams, MUSE chunk 12, drop lost stream
    print(streamIn)
    print(streamIn.info().channel_count())
    streamIn.open_stream(
    )  #This actually isn't required: pull_sample() and pull_chunk() implicitly open the stream.
    #But it's good to be explicit because it makes the code clearer
    print("Pull Sample")
    print(streamIn.pull_sample()
          )  #Returns a tuple with the actual values we want.
    #The first element is the list of channel values, the second element is a timestamp. This is a snapshot of our stream
    #at a certain point in time.
    print("Pull Chunk")
    ts = time.time()
    while (1):
        x = streamIn.pull_chunk()
        if all(x):
            #if not np.shape(x) == (2, 0):
            print(np.shape(x))
            print(np.shape(x[1]))
            t = [t - ts for t in x[1]]
            print(t)
            print(t[-1] - t[0])

        # for y in x:
        #     for z in y:
        #         print(z)
        #print("\n")

    plt.style.use('ggplot')

    # data first then time stamps, sick

    pprint(streamIn.info().as_xml())  #what
    timeC = streamIn.time_correction()
    print(timeC)

    #Clean up time

    streams.clear()
    streamIn.close_stream()  #calls lsl_close_stream
    streamIn.__del__()  #Not throwing errors
    dummy_streamer.stop()
Exemple #6
0
class OpenVibeLSLClient:
    # first resolve an EEG stream on the lab network
    #print("looking for an EEG stream...")

    def __init__(self):
        self.streams=None
        self.inlet=None


    def resolveStream(self,type="OpenVibe"):
        # first resolve an EEG stream on the lab network
        print("looking for an EEG stream...")
        self.streams = resolve_stream('type', type)
        # create a new inlet to read from the stream
        self.inlet = StreamInlet(self.streams[0])
        if self.inlet!=None and self.streams!=None:
            print "Success! Stream Resolved"
            return True
        print "Could not resolve LSL stream..."
        return False

    def getControlValue(self,printValue=False):
        if self.streams==None or self.inlet==None:
            print "No stream and/or inlet initialized!"
            return None
        chunk, timestamps = self.inlet.pull_sample()

        return chunk[0]
        # get a new sample (you can also omit the timestamp part if you're not
        # interested in it)
        #while(1):
        #    chunk, timestamps = self.inlet.pull_chunk()
        #    if timestamps:
        #        value= chunk[0][0]
        #        print "Received control value: "+str(value)
        #        return value
        #        break;

    def disconnect(self):
        if self.inlet!=None:
            self.inlet.close_stream()
            return True

        return False

    def closeStream(self):
        self.inlet.close_stream()
Exemple #7
0
    def __init__(self, stream_name, block_size=128, bad_channels=None, store_first_timestamp_to=None, name='lsl_socket'):
        """
        Initialize a LSL socket to receive data send over the Lab Streaming Layer interface

        :param stream_name: Name of the stream to connect to
        :param name: Name of the Node in the Closed-Loop System for debug printing
        """
        super(LSL_Socket, self).__init__(has_inputs=False, name=name)
        self.block_size = block_size
        self.stream_name = stream_name
        self.store_first_timestamp_to = store_first_timestamp_to

        stream = self.find_given_stream()
        stream_inlet = StreamInlet(stream)

        self.mask = np.ones(stream_inlet.channel_count, bool)
        stream_inlet.close_stream()

        if bad_channels is not None and len(bad_channels) > 0:
            self.mask[bad_channels] = False

        self.feeder_process = None
        self.timestamp_stored = False
        logger.info('Connected to stream [{}].'.format(stream_name))
Exemple #8
0
class PupilTracker(object):
  def __init__(self):
    pupil_queue = Queue()
    self.pupil_proc = Process(target=pupil_capture.alternate_launch,
                              args=((pupil_queue), ))
    self.pupil_proc.start()

    while True:
      pupil_msg = pupil_queue.get()
      print(pupil_msg)
      if 'tcp' in pupil_msg:
        self.ipc_sub_url = pupil_msg
      if 'EYE_READY' in pupil_msg:
        break

    context = zmq.Context()
    self.socket = zmq.Socket(context, zmq.SUB)
    monitor = self.socket.get_monitor_socket()

    self.socket.connect(self.ipc_sub_url)
    while True:
      status = recv_monitor_message(monitor)
      if status['event'] == zmq.EVENT_CONNECTED:
        break
      elif status['event'] == zmq.EVENT_CONNECT_DELAYED:
        pass
    print('Capturing from pupil on url %s.' % self.ipc_sub_url)
    self.socket.subscribe('pupil')

    # setup LSL
    streams = resolve_byprop('name', LSL_STREAM_NAME, timeout=2.5)
    try:
      self.inlet = StreamInlet(streams[0])
    except IndexError:
      raise ValueError('Make sure stream name="%s", is opened first.'
          % LSL_STREAM_NAME)

    self.running = True
    self.samples = []

  # LSL and pupil samples are synchronized to local_clock(), which is the
  # runtime on this slave, not the host
  def _record_lsl(self):
    while self.running:
      sample, timestamp = self.inlet.pull_sample(timeout=5)

      # time correction to sync to local_clock()
      try:
        if timestamp is not None and sample is not None:
          timestamp = timestamp + self.inlet.time_correction(timeout=5) 

          samples_lock.acquire()
          self.samples.append(('STIM', timestamp, sample))
          samples_lock.release()

      except TimeoutError:
        pass

    print('closing lsl on the pupil side')
    self.inlet.close_stream()

  def _record_pupil(self):
    while self.running:
      topic = self.socket.recv_string()
      payload = serializer.loads(self.socket.recv(), encoding='utf-8')

      samples_lock.acquire()
      self.samples.append(('pupil', local_clock(), payload['diameter']))
      samples_lock.release()

    print('Terminating pupil tracker recording.')
      
  def capture(self):
    self.pupil_thread = threading.Thread(target=self._record_pupil)
    self.lsl_thread = threading.Thread(target=self._record_lsl)
    self.pupil_thread.start()
    self.lsl_thread.start()

  def export_data(self):
    self.running = False

    self.pupil_thread.join(5)
    self.lsl_thread.join(5)
    print('Joined threads, now outputting pupil data.')

    i = 0
    while os.path.exists("data/pupil/data-%s.csv" % i):
      i += 1

    # csv writer with stim_type, msg, and timestamp, then data
    with open('data/pupil/data-%s.csv' % i, 'w+') as f:
      writer = csv.writer(f)
      writer.writerow(('Signal Type', 'Msg', 'Time', 'Channel 1', 'Channel 2', 'Channel 3', 'Channel 4', 'Channel 5', 'Channel 6', 'Channel 7', 'Channel 8' ))
      for sample in self.samples:
        signal_type, timestamp, datas = sample
        out = (signal_type, 'msg', timestamp)
        for data in datas:
          out = out + (data,)
        writer.writerow(out)

  def __str__(self):
    return 'Pupil tracker listening to %s' % self.ipc_sub_url

  def __del__(self):
    try:
      self.inlet.close_stream()
    except AttributeError:
      raise AttributeError('self.inlet does not exist. Most likely the LSL stimuli stream was not opened yet.')

    self.pupil_proc.terminate()
class HandRecorder:

    def __init__(self, subject="test", nChannels=31, frequency=512, streamer_type="Brainvision Recorder", channels=None):
        global screen, total_ch, freq, select_ch
        freq = int(frequency)
        total_ch = int(nChannels)
        select_ch = range(1, total_ch + 1)
        channel_names=channels

        self.streamer = str(streamer_type)
        self.name = self.__class__.__name__
        self.running = True
        self.server_running = True
        self.server_connected = False
        self.sock = None
        self.global_time = time.clock()
        self.EEGdata = np.zeros((freq * EEGdata_size, total_ch + 1))
        self.label = ""
        self.samples=0
        self.print_label=0
        self.edx=0

        print("{}: Looking for an EEG stream...".format(self.name))

        if (self.streamer == "OpenVibe"):
            print("{}: Looking for an EEG stream...".format(self.name))
            streams = resolve_stream('type', 'signal')
            self.inlet = StreamInlet(streams[0])

        i = 1
        fname = "{}/{}_{}_{}t{}c{}s{}ch_{}".format(folder, date, data_type, num_trials, num_classes, window_sec, len(select_ch), subject)
        self.filename = "{}_{}.txt".format(fname, i)
        while os.path.exists(self.filename):
            i += 1
            self.filename = "{}_{}.txt".format(fname, i)
        self.file = open(self.filename, "w")
        print("{}: Writing to {}".format(self.name, self.filename))


        self.output_data = []
        self.output_label = []
        self.class_count = [0] * num_classes

    def get_label(self):
        return self.print_label

    def collect_data(self):
        self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        server_address = (ip, port)
        self.sock.bind(server_address)
        self.sock.listen(100)
        connection, client_address = self.sock.accept()

        label=[0 for i in range(num_trials*num_iteration)]
        sleep(20)
        for i in range(num_trials*num_iteration):
            if(class_random==False):
                label[i] = class_order[i % num_iteration]
            elif(class_random==True):
                if(i%num_iteration==0):
                    class_flags=[0]*num_iteration
                while True:
                    temp=random.randint(1,num_iteration) - 1
                    if(class_flags[temp]==0):
                        class_flags[temp]=1
                        label[i] = class_order[temp]
                        break
        print("{}: Collection starting".format(self.name))
        #write_to_progress("Collection Started")
        for i in range(num_trials * num_iteration):
            self.edx=0
            self.samples=0
            flag=1
            start_time=time.time()
            prev_time=0
            while True:
                current_time=time.time()-start_time
                if current_time >= 1 and flag == 1:  # cross
                    self.label = 'w'
                    connection.sendall(self.label.encode())
                    self.print_label=0
                    flag = 2
                elif current_time >= 2 and flag == 2 and label[i] == 0:  # rest
                    self.label = 'w'
                    connection.sendall(self.label.encode())
                    self.print_label=0
                    flag = 3
                elif current_time >= 2 and flag == 2 and label[i] == 1:  # right
                    self.label = 'd'
                    connection.sendall(self.label.encode())
                    self.print_label=0
                    flag = 3
                elif current_time >= 2 and flag == 2 and label[i] == 2:  # left
                    self.label = 'a'
                    connection.sendall(self.label.encode())
                    self.print_label=0
                    flag = 3
                elif current_time >= 2 and flag == 2 and label[i] == 3:  # both
                    self.label = 's'
                    connection.sendall(self.label.encode())
                    self.print_label=0
                    flag = 3
                elif current_time >= 4 and flag == 3:  # cross
                    self.label = 'w'
                    connection.sendall(self.label.encode())
                    self.print_label=0
                    flag = 4
                elif current_time >= 5 and flag == 4:  # cross
                    print("start recording")
                    if label[i] == 0:
                        print("rest")
                        #write_to_progress("rest")
                        self.label = 'i'
                        connection.sendall(self.label.encode())
                        self.print_label=1
                    if label[i] == 1:
                        print("right")
                        #write_to_progress("right")
                        self.label = 'l'
                        connection.sendall(self.label.encode())
                        self.print_label=2
                    if label[i] == 2:
                        print("left")
                        #write_to_progress("left")
                        self.label = 'j'
                        connection.sendall(self.label.encode())
                        self.print_label=3
                    if label[i] == 3:
                        print("both")
                        #write_to_progress("both")
                        self.label = 'k'
                        connection.sendall(self.label.encode())
                        self.print_label=4
                    flag = 5
                elif (current_time >= (5 + wait_time+ window_sec) and current_time >= prev_time + window_sec and flag == 5):
                    prev_time = current_time
                    selected_eeg = self.EEGdata[(self.edx - (freq * window_sec)): self.edx, select_ch]
                    self.output_data.append(selected_eeg.T)
                    self.output_label.append(self.print_label-1)
                    self.file.write(str(np.ndarray.tolist(selected_eeg)) + '\n')
                    self.file.write(str(self.print_label-1) + '\n')
                    self.class_count[self.print_label-1] += 1
                elif current_time >= (5 + wait_time + session_sec) and flag==5:  # 3 second data
                    self.label = 'i'
                    connection.sendall(self.label.encode())
                    self.print_label=0
                    flag=6
                elif current_time > (6 + wait_time + session_sec):
                    self.label = 'i'
                    connection.sendall(self.label.encode())
                    print("end recording")
                    break

        sleep(10)
        self.file.close()
        connection.close()
        print("{}: Collection finished".format(self.name))
        #write_to_progress("Collection Finished")

    def retrieve_eeg(self):
        if (self.streamer == "Brainvision Recorder"):
            parent_conn, child_conn = Pipe()
            eeg_process = multiprocessing.Process(target=retrieve_eeg_BREC, args=(child_conn,))
            eeg_process.start()

        while self.running:
            if (self.streamer == "Brainvision Recorder"):
                (sample, timestamp) = parent_conn.recv()
            elif (self.streamer == "OpenVibe"):
                sample, timestamp = self.inlet.pull_sample()

            current_time = time.clock() - self.global_time
            self.EEGdata[self.edx % (freq * EEGdata_size), 0] = current_time
            self.EEGdata[self.edx % (freq * EEGdata_size), 1:total_ch + 1] = sample
            self.edx = self.edx + 1
            if self.edx >= freq * EEGdata_size:
                self.edx = 0

    def close_recorder(self):
        self.file.close()
        self.running = False
        if(self.streamer == "OpenVibe"):
            self.inlet.close_stream()

    def send_hand(self, label):
        try:
            command = ['i', 'l', 'j', 'k'][int(label)]
        except ValueError:
            return
        self.label = command

    def start(self):
        eeg_thrd = threading.Thread(target=self.retrieve_eeg)
        eeg_thrd.daemon = True
        eeg_thrd.start()
        print("{}: Waiting for Unity...".format(self.name))
        #write_to_progress("Waiting for Unity...")
        self.collect_data()
        self.close_recorder()

        return self.filename
class MyOVBox(OVBox):
  def __init__(self):
    OVBox.__init__(self)

    
  # the initialize method reads settings and outputs the first header
  def initialize(self):
    self.initLabel = 0
    self.debug=self.setting['debug'] == "true"
    print "Debug: ", self.debug
    self.stream_type=self.setting['Stream type']
    self.stream_name=self.setting['Stream name'] 
    # total channels for all streams
    self.channelCount = 0
    #self.stream_name=self.setting['Stream name'] # in case !all_streams
    print "Looking for streams of type: " + self.stream_type
    streams = resolve_stream('type',self.stream_type)
    print "Nb streams: " + str( len(streams))
    self.nb_streams = len(streams)
    if self.nb_streams == 0:
      raise Exception("Error: no stream found.")
    self.inlet = StreamInlet(streams[0], max_buflen=1)
    self.info = self.inlet.info()
    self.channelCount = self.info.channel_count()
    print "Stream name: " + self.info.name()
    stream_freq = self.info.nominal_srate()
    if stream_freq != 0:
	  raise Exception("Error: no irregular stream found.")
    # we append to the box output a stimulation header. This is just a header, dates are 0.
    self.output[0].append(OVStimulationHeader(0., 0.))
    self.init = False
  # The process method will be called by openvibe on every clock tick
  def process(self):
    # A stimulation set is a chunk which starts at current time and end time is the time step between two calls
    # init here and filled within triger()
    self.stimSet = OVStimulationSet(self.getCurrentTime(), self.getCurrentTime()+1./self.getClock())
    if self.init == False :
     local_time = local_clock()
     initSecond=int(local_time) 
     initMillis=int((local_time-initSecond)*1000)
     self.stimSet.append(OVStimulation(self.initLabel, self.getCurrentTime(), 0.))
     self.stimSet.append(OVStimulation(initSecond, self.getCurrentTime(), 0.))
     self.stimSet.append(OVStimulation(initMillis, self.getCurrentTime(), 0.))
     self.init=True
	# read all available stream
    samples=[]
    sample,timestamp = self.inlet.pull_sample(0)
    while sample != None:
     samples += sample
     sample,timestamp = self.inlet.pull_sample(0)
     # every value will be converted to openvibe code and a stim will be create
    for label in samples: 
      label = str(label)
      if self.debug:
        print "Got label: ", label
      self.stimSet.append(OVStimulation(float(label), self.getCurrentTime(), 0.))	
    # even if it's empty we have to send stim list to keep the rest in sync
    self.output[0].append(self.stimSet)

  def uninitialize(self):
    # we send a stream end.
    end = self.getCurrentTime()
    self.output[0].append(OVStimulationEnd(end, end))
    self.inlet.close_stream()
Exemple #11
0
def plotFreqDomain(stream_info, chunkwidth, channels=0, size=(1500, 1500), title=None):
    """Plot Real-Time in the frequency domain using a static x-axis and changing y axis values.

    Accepts a pylsl StreamInlet Object and plots chunks in real-time as they are recieved
    using a pyqtgraph plot. Can plot multiple channels.

    Args:
        stream_info (pylsl StreamInfo Object): The stream info object for the stream to be plotted
        chunkwidth (int): The number of samples in each chunk when pulling chunks from the stream
        fs (int): The sampling frequency of the device. If zero function will attempt to determine 
            sampling frequency automatically
        size (array): Array of type (width, height) of the figure
        title (string): Title of the plot figure
    
    Returns:
        bool: True if window was closed and no errors were encountered. False if an error was encountered within
            the function
    """
    #################################
    ## Stream Inlet Creation
    #################################
    inlet = StreamInlet(stream_info, max_chunklen=chunkwidth, recover=True)
    inlet.open_stream() # Stream is opened implicitely on first call of pull chunk, but opening now for clarity

    #################################
    ## Variable Initialization
    #################################

    if(channels == 0):
        channels = stream_info.channel_count() # Get number of channels

    ##################################
    ## Figure and Plot Set Up
    ##################################

    ## Initialize QT
    app = QtGui.QApplication([])

    ## Define a top-level widget to hold everything
    fig = QtGui.QWidget()
    fig.resize(size[0], size[1]) # Resize window
    if (title != None): 
        fig.setWindowTitle(title) # Set window title
    layout = QtGui.QGridLayout()
    fig.setLayout(layout)

    # Set up initial plot conditions
    (x_vec, step) = np.linspace(0,chunkwidth,chunkwidth, retstep=True) # vector used to plot y values
    y_vec = np.zeros((channels,len(x_vec))) # Initialize y_values as zero

    # Set Up subplots and lines
    plots = []
    curves = []
    colors = ['c', 'm', 'g', 'r', 'y', 'b'] # Color options for various channels
    for i in range(0, channels):
        # Create plot widget and append to list
        plot = pg.PlotWidget(labels={'left': 'Power (dB)'}, title='Channel ' + (str)(i + 1)) # Create Plot Widget
        plot.plotItem.setMouseEnabled(x=False, y=False) # Disable panning for widget
        plot.plotItem.showGrid(x=True) # Enable vertical gridlines
        plots.append(plot)
        # Plot data and save curve. Append curve to list
        curve = plot.plot(x_vec, y_vec[i], pen=pg.mkPen(colors[i%len(colors)], width=0.5)) # Set thickness and color of lines
        curves.append(curve)
        # Add plot to main widget
        layout.addWidget(plot, np.floor(i/2), i%2)

    # Display figure as a new window
    fig.show()

    ###################################
    # Real-Time Plotting Loop
    ###################################

    firstUpdate = True
    buffer = []
    while(True):
        chunk = inlet.pull_chunk()
        #print(np.shape(chunk[0]))
        #print(chunk[0][0:129])
        #print(np.shape(chunk[0][0:129]))

        if not (np.size(chunk[0]) == 0): # Check for available chunk
            chunkdata = np.transpose(chunk[0]) # Get chunk data and transpose to be CHANNELS x CHUNKLENGTH
            if np.size(buffer) == 0:
                buffer = chunkdata
            else:
                buffer = np.append(buffer, chunkdata, axis=1)
        
        while np.size(buffer,1) > 129:
            data = buffer[:,0:129]
            buffer = buffer[:,129:]
            #if np.size(buffer,1) < 129:
                #data = np.zeros((5,129))
            # Update plotted data
            for i in range(0,channels):
                curves[i].setData(x_vec, data[i]) # Update data
            
            # Update QT Widget to reflect the changes we made
            pg.QtGui.QApplication.processEvents()

        # Check to see if widget if has been closed, if so exit loop
        if not fig.isVisible():
            break
    
    # Close the stream inlet
    inlet.close_stream()
    
    return True
Exemple #12
0
def plotTimeDomain(stream_info, chunkwidth=0, fs=0, channels=0, timewin=50, tickfactor=5, size=(1500, 800), title=None):
    """Plot Real-Time domain in the time domain using a scrolling plot.

    Accepts a pylsl StreamInlet Object and plots chunks in real-time as they are recieved
    using a scrolling pyqtgraph plot. Can plot multiple channels.

    Args:
        stream_info (pylsl StreamInfo Object): The stream info object for the stream to be plotted
        chunkwidth (int): The number of samples in each chunk when pulling chunks from the stream
        fs (int): The sampling frequency of the device. If zero function will attempt to determine 
            sampling frequency automatically
        channels (int): The number of channels in the stream (Eg. Number of EEG Electrodes). If
            zero the function will attempt determine automatically
        timewin (int): The number seconds to show at any given time in the plot. This affects the speed 
            with which the plot will scroll accross the screen. Can not be a prime number.
        tickfactor (int): The number of seconds between x-axis labels. Must be a factor of timewin
        size (array): Array of type (width, height) of the figure
        title (string): Title of the plot figure
    
    Returns:
        bool: True if window was closed and no errors were encountered. False if an error was encountered within
            the function
    """
    #################################
    ## Stream Inlet Creation
    #################################
    #stream = resolve_byprop("name",stream_info.name(),timeout= 10)
    inlet = StreamInlet(stream_info, max_chunklen=chunkwidth, recover=True)
    inlet.open_stream() # Stream is opened implicitely on first call of pull chunk, but opening now for clarity

    #################################
    ## Variable Initialization
    #################################

    ## Get/Check Default Params
    if(timewin%tickfactor != 0):
        print('''ERROR: The tickfactor should be a factor of of timewin. The default tickfactor
        \n is 5 seconds. If you changed the default timewin, make sure that 5 is a factor, or 
        \n change the tickfactor so that it is a factor of timewin''')
        return False

    if(fs == 0):
        fs = stream_info.nominal_srate() # Get sampling rate

    if(channels == 0):
        channels = stream_info.channel_count() # Get number of channels

    ## Initialize Constants
    XWIN = timewin*fs # Width of X-Axis in samples
    XTICKS = (int)((timewin + 1)/tickfactor) # Number of labels to have on X-Axis
    #CHUNKPERIOD = chunkwidth*(1/fs) # The length of each chunk in seconds

    ##################################
    ## Figure and Plot Set Up
    ##################################

    ## Initialize QT
    app = QtGui.QApplication([])

    ## Define a top-level widget to hold everything
    fig = QtGui.QWidget()
    fig.resize(size[0], size[1]) # Resize window
    if (title != None): 
        fig.setWindowTitle(title) # Set window title
    layout = QtGui.QGridLayout()
    fig.setLayout(layout)

    # Set up initial plot conditions
    (x_vec, step) = np.linspace(0,timewin,XWIN+1, retstep=True) # vector used to plot y values
    xlabels = np.zeros(XTICKS).tolist() # Vector to hold labels of ticks on x-axis
    xticks = [ x * tickfactor for x in list(range(0, XTICKS))] # Initialize locations of x-labels
    y_vec = np.zeros((channels,len(x_vec))) # Initialize y_values as zero

    # Set Up subplots and lines
    plots = []
    curves = []
    colors = ['c', 'm', 'g', 'r', 'y', 'b'] # Color options for various channels
    for i in range(0, channels):
        # Create axis item and set tick locations and labels
        axis = pg.AxisItem(orientation='bottom')
        axis.setTicks([[(xticks[i],str(xlabels[i])) for i in range(len(xticks))]]) # Initialize all labels as zero
        # Create plot widget and append to list
        plot = pg.PlotWidget(axisItems={'bottom': axis}, labels={'left': 'Volts (mV)'}, title='Channel ' + (str)(i + 1)) # Create Plot Widget
        plot.plotItem.setMouseEnabled(x=False, y=False) # Disable panning for widget
        plot.plotItem.showGrid(x=True) # Enable vertical gridlines
        plots.append(plot)
        # Plot data and save curve. Append curve to list
        curve = plot.plot(x_vec, y_vec[i], pen=pg.mkPen(colors[i%len(colors)], width=0.5)) # Set thickness and color of lines
        curves.append(curve)
        # Add plot to main widget
        layout.addWidget(plot, i, 0)

    # Display figure as a new window
    fig.show()

    ###################################
    # Real-Time Plotting Loop
    ###################################

    firstUpdate = True
    while(True):
        chunk = inlet.pull_chunk()

        # (something is wierd with dummy chunks, get chunks of diff sizes, data comes in too fast)
        if chunk and np.shape(chunk)[1] > 0: # Check for available chunk 
            print(np.shape(chunk))
            chunkdata = np.transpose(chunk[0]) # Get chunk data and transpose to be CHANNELS x CHUNKLENTH
            chunkperiod = len(chunkdata[0])*(1/fs)
            xticks = [x - chunkperiod for x in xticks] # Update location of x-labels

            # Update x-axis locations and labels
            if(xticks[0] < 0): # Check if a label has crossed to the negative side of the y-axis

                # Delete label on left of x-axis and add a new one on the right side
                xticks.pop(0)
                xticks.append(xticks[-1] + tickfactor)

                # Adjust time labels accordingly
                if (firstUpdate == False): # Check to see if it's the first update, if so skip so that time starts at zero
                    xlabels.append(xlabels[-1] + tickfactor)
                    xlabels.pop(0)
                else:
                    firstUpdate = False
            
            # Update plotted data
            for i in range(0,channels):
                y_vec[i] = np.append(y_vec[i], chunkdata[i], axis=0)[len(chunkdata[i]):] # Append chunk to the end of y_data (currently only doing 1 channel)
                curves[i].setData(x_vec, y_vec[i]) # Update data

                # Update x-axis labels
                axis = plots[i].getAxis(name='bottom')
                axis.setTicks([[(xticks[i],str(xlabels[i])) for i in range(len(xticks))]])
               
        # Update QT Widget to reflect the changes we made
        pg.QtGui.QApplication.processEvents()

        # Check to see if widget if has been closed, if so exit loop
        if not fig.isVisible():
            break
    
    # Close the stream inlet
    inlet.close_stream()
    
    return True
Exemple #13
0
print("Start aquiring data")
# eeg
inlet = StreamInlet(streams[0], max_chunklen=12)
eeg_time_correction = inlet.time_correction()
info = inlet.info()
description = info.desc()
freq = info.nominal_srate()
Nchan = info.channel_count()

ch = description.child('channels').first_child()
ch_names = [ch.child_value('label')]
for i in range(1, Nchan):
    ch = ch.next_sibling()
    ch_names.append(ch.child_value('label'))

inlet.close_stream()

# gyro
#inlet_gyro = StreamInlet(streams_gyro[0], max_chunklen=12)

currentWord = 1
currentTerm = 1
# main loop
while True:
    try:

        print("Waiting for gyro trigger ......")
        gyro_data = []
        timestamp = []
        inlet_gyro = StreamInlet(streams_gyro[0], max_chunklen=12)
        while True:
Exemple #14
0
class Model(object):
    '''
    It contains the data and the functionality of the application, that is, 
    it is responsible for performing the functions of updating, searching, 
    consulting, data processing.
    '''

    def __init__(self, name_db, name_collection, data=None):
        '''
        Receives: Name of the database, name of the collection of the database,
        data for graphics.
        Function: Make the connection to the mongo database,
        design the filters by calling filtDesign () and define the storage 
        locations of the files.
        '''

        MONGO_URI = "mongodb://localhost:27017/"
        self.__client = MongoClient(MONGO_URI)
        self.__db = self.__client[name_db]
        self.__collection = self.__db[name_collection]
        self.__fs = 250
        self.filtDesign()
        self.data = data
        print("se diseño el filtro")
        self.path_app = r'C:\Users\veroh\OneDrive - Universidad de Antioquia\Proyecto Banco de la republica\Trabajo de grado\Herramienta\HVA\GITLAB\interface\ViAT'
        self.cwd = self.path_app+'/'+'Records'
        self.processing = self.path_app+'/'+'Processing'
        if not np.all(data) == None:
            self.assign_data(data)
        else:
            self.__data = np.asarray([])

    def newLocation(self, location):
        '''
        Receive: The new location
        Function: Select the location of the new records to acquire
        '''

        if location != "":
            self.path_app = location
            print(self.path_app)
            self.cwd = location+'/'+'Records'
            self.processing = location+'/'+'Processing'
            if not os.path.isdir(self.cwd):
                os.mkdir(self.cwd)
                print(self.cwd)
            if not os.path.isdir(self.processing):
                os.mkdir(self.processing)
                print(self.processing)
        else:
            pass

    def location(self):
        '''
        Redefine the location of the new records to acquire
        '''

        return self.cwd, self.processing

    def startDevice(self):
        '''
        Start sending the data, use the subprocess.Popen function to start a
        new process without stopping the existing one.
        '''

        # servidor = Server()
        # servidor.port()
        # data = RandData()
        # data.sample()
        try:
            self.__process = subprocess.Popen('start python randData.py',
                                              shell=True, stdin=subprocess.PIPE,
                                              stdout=subprocess.PIPE,)
            output = self.__process.communicate()[0].decode('utf-8')
            print(output)
            # if (Rand):
            # msg = QMessageBox(self.ventana_principal)
            # msg.setIcon(QMessageBox.Information)
            # msg.setText("El dispositivo ha sido detectado")
            # msg.setWindowTitle("Información")
            # msg.show()
            # else:
            # msg = QMessageBox(self.ventana_principal)
            # msg.setIcon(QMessageBox.Warning)
            # msg.setText("El dispositivo no ha sido detectado o no se encuentra conectado")
            # msg.setWindowTitle("Alerta!")
            # msg.show()
            # self.boton_iniciar.setEnabled(False)
        except KeyboardInterrupt:
            os.popen(
                r'TASKKILL /F /FI "WINDOWTITLE eq C:\Users\veroh\Anaconda3\python.exe"')

#        self.__channels = 8
#        self.__data = np.zeros((self.__channels, 2500))
#        self.streams_EEG = resolve_stream('type', 'EEG')

    def stopDevice(self):
        '''
        Stop sending the data and close the process.
        '''

        os.popen(
            r'TASKKILL /F /FI "WINDOWTITLE eq C:\Users\veroh\Anaconda3\python.exe"')

    def startData(self):
        '''
        Defines a matrix of zeros for the data with the number of channels
        and the number of samples.
        Use the resolve_stream function of the pylsl library to receive the
        data sent by the Server.py server or the randData.py data simulator.
        Using the StreamInlet object to receive transmission data 
        (and metadata) from the lab network, it takes the available 
        transmissions and finally does a pull_chunk () that extracts a chunk 
        of samples from the input.
        '''

        self.__channels = 8
        self.__data = np.zeros((self.__channels, 2500))
        self.streams_EEG = resolve_stream('type', 'EEG')
        self.__inlet = StreamInlet(self.streams_EEG[0], max_buflen=250)
        self.__inlet.pull_chunk()

    def stopData(self):
        '''
        Stops the action of receiving the data by closing the stream with 
        close_stream () and calls the dataprocessing.py and plot_stft.py
        processing modules
        '''

        if not os.path.isfile(self.cwd + '/' + str(self.p[0])+'_'+str(self.p[1])+'/'+self.date[0]+'/'+'Mark_'+self.p[0]+'_'+self.p[1]+'.csv'):
            pass
        else:
            maxV = Processing(self.p[0], self.p[1],
                              self.date[0], self.cwd, self.processing)
            maxV.run()
            TimeFre = TimeFrequency(
                self.p[0], self.p[1], self.date[0], self.cwd, self.processing)
            TimeFre.plot_stft()

        self.__inlet.close_stream()
        print('Stop Data Modelo')

    def startStimulus(self):
        '''
        call the stimulation module Stimulation_Acuity.py and start it.
        '''

        s = Stimulus(
            self.p[0], self.p[1], self.cwd + '/' + str(self.p[0])+'_'+str(self.p[1]))
        s.start_stimulus()

    def stopStimulus(self):
        '''
        Stops stimulation by closing the pygame.
        '''

        pygame.quit()

    def startZ(self):
        '''
        Start receiving data with StreamInlet to later find the impedance 
        of the data.
        '''

        self.__inlet = StreamInlet(self.streams_EEG[0], max_buflen=250)

    def stopZ(self):
        '''
        Close the reception of the data with close_stream at 
        the end of the reading of the impedance.
        '''

        self.__inlet.close_stream()
        print('Stop impedance')

    def readZ(self):
        '''
        Do a pull_sample () to take a value and perform the ohm-law operation.
        V = received rms voltage. i = device current = 6 nA
        Z = (V * √2) / i
        The value of Z is divided by 1000 before being presented in the view.
        '''

        sample, timestamp = self.__inlet.pull_sample()
        self.Z = []
        for i in range(0, 8):
            Z_i = ((sample[i])*np.sqrt(2))/(6*pow(10, -9))
            self.Z.append(Z_i/1000)

    def readData(self):
        '''
        Do a pull_chunk () to take a number of values and make a difference
        between the reference channel and each of the channels of interest.
        Create a Dataframe of each result to store it in a .csv file
        '''

        samples, timestamp = self.__inlet.pull_chunk()
        samples = np.transpose(np.asanyarray(samples))
        try:
            self.sh = samples.shape[1]
            self.s = samples
            self.__data = np.roll(self.__data, self.sh)
            self.__data[0, 0:self.sh] = samples[0, :]  # FCz
            self.__data[1, 0:self.sh] = samples[1, :] - \
                samples[0, :]  # Oz - FCz
            self.__data[2, 0:self.sh] = samples[2, :] - \
                samples[0, :]  # O1 - FCz
            self.__data[3, 0:self.sh] = samples[3, :] - \
                samples[0, :]  # PO7 - FCz
            self.__data[4, 0:self.sh] = samples[4, :] - \
                samples[0, :]  # O2  - FCz
            self.__data[5, 0:self.sh] = samples[5, :] - \
                samples[0, :]  # PO8 - FCz
            self.__data[6, 0:self.sh] = samples[6, :] - \
                samples[0, :]  # PO3 - FCz
            self.__data[7, 0:self.sh] = samples[7, :] - \
                samples[0, :]  # PO4 - FCz

        except:
            return

        self.__dataT = {'C1': samples[0, :],
                        'C2': self.__data[1, 0:self.sh],
                        'C3': self.__data[2, 0:self.sh],
                        'C4': self.__data[3, 0:self.sh],
                        'C5': self.__data[4, 0:self.sh],
                        'C6': self.__data[5, 0:self.sh],
                        'C7': self.__data[6, 0:self.sh],
                        'C8': self.__data[7, 0:self.sh]}
        now = datetime.now()
        self.date = (now.strftime("%m-%d-%Y"), now.strftime("%H-%M-%S"))
        # cwd = os.getcwd()
        loc = self.cwd + '/' + \
            str(self.p[0])+'_'+str(self.p[1]) + '/'+self.date[0]
        name = '/' + 'Record_'+str(self.p[0])+'_'+str(self.p[1])+'.csv'
        if not os.path.isdir(loc):
            os.mkdir(loc)
            header = True
        else:
            if os.path.isfile(loc + name):
                header = False
            else:
                header = True
        if not np.all(self.__data == 0):
            r = pd.DataFrame(self.__dataT, columns=[
                             'C1', 'C2', 'C3', 'C4', 'C5', 'C6', 'C7', 'C8'])
            d = str(self.date[1])
            r['H'] = pd.Series([d])
            r = r.fillna(0)
            r.to_csv(loc + name, mode='a', header=header, index=True, sep=';')

    def filtDesign(self):
        '''
        Design a low pass filter and a high pass filter
        '''

        order, self.lowpass = filter_design(
            self.__fs, locutoff=0, hicutoff=50, revfilt=0)
        order, self.highpass = filter_design(
            self.__fs, locutoff=5, hicutoff=0, revfilt=1)

    def filtData(self):
        '''
        Call the readData () function and use the filtfit function of
        scipy.signal to apply a filter to the received data in real time.
        '''

        self.readData()

        self.senal_filtrada_pasaaltas = signal.filtfilt(
            self.highpass, 1, self.__data)
        self.senal_filtrada_pasaaltas = hampelFilter(
            self.senal_filtrada_pasaaltas, 6)
        self.senal_filtrada_pasabandas = signal.filtfilt(
            self.lowpass, 1, self.senal_filtrada_pasaaltas)

        self.laplace_filtrada_pasaaltas = signal.filtfilt(
            self.highpass, 1, self.__laplace)
        self.laplace_filtrada_pasaaltas = hampelFilter(
            self.laplace_filtrada_pasaaltas, 6)
        self.laplace_filtrada_pasabandas = signal.filtfilt(
            self.lowpass, 1, self.laplace_filtrada_pasaaltas)

    def Pot(self):
        '''
        Apply the Welch method to the channel or user-defined configuration
        in the interface
        '''

        self.filtData()
        nblock = 250
        noverlap = nblock/2

        self.f, self.Pxx = signal.welch(self.senal_filtrada_pasabandas, self.__fs,
                                        nperseg=self.__fs*2, noverlap=noverlap)

        self.ftg, self.Pxxtg = signal.welch(self.__laplace,
                                            self.__fs, nperseg=self.__fs*2, noverlap=noverlap)

    def laplace(self, laplace1, laplace2, laplace3):
        '''
        Rec: From the drop-down menu of the graphical interface, take the
        values of the channels of interest.
        Func: Create an array to perform the Laplace operation with the 
        delivered values.
        '''

        self.readData()

        if laplace1 == 0:
            one = 1
        elif laplace1 == 1:
            one = 0
        elif laplace1 == 2:
            one = 2
        elif laplace1 == 3:
            one = 3
        elif laplace1 == 4:
            one = 4
        elif laplace1 == 5:
            one = 5
        elif laplace1 == 6:
            one = 6
        else:
            one = 7
        if laplace2 == 0:
            two = 2
        elif laplace2 == 1:
            two = 0
        elif laplace2 == 2:
            two = 1
        elif laplace2 == 3:
            two = 3
        elif laplace2 == 4:
            two = 4
        elif laplace2 == 5:
            two = 5
        elif laplace2 == 6:
            two = 6
        else:
            two = 7
        if laplace3 == 0:
            three = 4
        elif laplace3 == 1:
            three = 0
        elif laplace3 == 2:
            three = 1
        elif laplace3 == 3:
            three = 3
        elif laplace3 == 4:
            three = 2
        elif laplace3 == 5:
            three = 5
        elif laplace3 == 6:
            three = 6
        else:
            three = 7
        self.__laplace = np.zeros((1, 2500))
        self.__laplace = np.roll(self.__laplace, self.sh)
        self.__laplace[0, 0:self.sh] = (
            self.s[one, :]*2)-self.s[two, :]-self.s[three, :]

    def returnLastData(self):
        '''
        Run Pot () and return the values to the view to graph them.
        '''

        self.Pot()
        return (self.senal_filtrada_pasabandas, self.Pxx, self.f,
                self.laplace_filtrada_pasabandas, self.Pxxtg, self.ftg)  # [0:6,:]

    def returnLastZ(self):
        '''
        Run readZ () and return the values of the impedance.
        '''

        self.readZ()
        return self.Z

    def returnLastStimulus(self):
        '''
        Run readData ()
        '''

        self.readData()
        return

    def add_into_collection_one(self, data):
        '''
        Add a subject to the database
        '''

        self.__collection.insert_one(data)
        self.p = data['d'], data['cc']
        loc = self.cwd + '/' + str(self.p[0])+'_'+str(self.p[1])
        if not os.path.isdir(loc):
            os.mkdir(loc)
        else:
            pass
        return True

    def search_one(self, consult, proj):
        '''
        Search for a person from the database
        '''

        result = self.__collection.find_one(consult, proj)
        try:
            info_result = [result.get("d", None), result.get("nombre", None), result.get("apellidos", None),
                           result.get("cc", None), result.get(
                               "sexo", None), result.get("dominante", None),
                           result.get("gafas", None), result.get(
                               "snellen", None), result.get("corregida", None),
                           result.get("estimulo", None), result.get(
                               "edad", None), result.get("tiempo", None),
                           result.get("rp", None), result.get("ubicacion")]
            self.p = info_result[0], info_result[3]
            loc = self.cwd + '/' + str(self.p[0])+'_'+str(self.p[1])
            if not os.path.isdir(loc):
                os.mkdir(loc)
            else:
                pass
            return info_result
        except:
            return False

    def search_many(self, consult, proj, view=False):
        '''
        Returns the list of members of the database
        '''

        results = self.__collection.find(consult, proj)
        if view == True:
            for result in results:
                print(result)
        info_integrantes = list()
        for result in results:
            info = [result.get("d", None), result.get("nombre", None), result.get("apellidos", None),
                    result.get("cc", None), result.get(
                        "sexo", None), result.get("dominante", None),
                    result.get("gafas", None), result.get(
                        "snellen", None), result.get("corregida", None),
                    result.get("estimulo", None), result.get(
                        "edad", None), result.get("tiempo", None),
                    result.get("rp", None), result.get("ubicacion")]
            info_integrantes.append(info)
        return info_integrantes

    def delete_data(self, data):
        '''
        Delete a subject from the database
        '''

        self.__collection.delete_one(data)

    def assign_data(self, data):
        '''
        Deliver the data to be graphed from a .csv file
        '''
        self.__data = data

    def return_segment(self, x_min, x_max):
        '''
        Allow signal advance in time
        '''

        if x_min >= x_max:
            return None
        return self.__data[:, x_min:x_max]

    def signal_scale(self, x_min, x_max, escala):
        '''
        Allow to expand or decrease the signal
        '''

        copy_data = self.__data[:, x_min:x_max].copy()
        return copy_data*escala

    def file_location(self, i, cc):
        '''
        It allows to find a file of a certain subject.
        '''

        path_subject = self.cwd + '/' + str(i)+'_'+str(cc)
        return path_subject
Exemple #15
0
class Graph(object):
  def __init__(self, size=(600,350)):
    streams = resolve_byprop('name', 'bci', timeout=2.5)
    try:
      self.inlet = StreamInlet(streams[0])
    except IndexError:
      raise ValueError('Make sure stream name=bci is opened first.')
    
    self.running = True
    
    self.frequency = 250.0
    self.sampleinterval = (1/self.frequency)
    self.timewindow = 10
    self._bufsize = int(self.timewindow/self.sampleinterval)
    self.dataBuffer = collections.deque([0.0] * self._bufsize, self._bufsize)
    self.timeBuffer = collections.deque([0.0] * self._bufsize, self._bufsize)
    self.x = np.empty(self._bufsize,dtype='float64')
    self.y = np.empty(self._bufsize,dtype='float64')
    self.app = QtGui.QApplication([])
    self.plt = pg.plot(title='EEG data from OpenBCI')
    self.plt.resize(*size)
    self.plt.showGrid(x=True,y=True)
    self.plt.setLabel('left','Amplitude','V')
    self.plt.setLabel('bottom','Time','s')
    self.curve = self.plt.plot(self.x,self.y,pen=(255,0,0))
    self.sample = np.zeros(8)
    self.timestamp = 0.0

    #QTimer
    self.timer = QtCore.QTimer()
    self.timer.timeout.connect(self.update)
    self.timer.start(self.sampleinterval)

  def _graph_lsl(self):
    while self.running:
      # initial run
      self.sample, self.timestamp = self.inlet.pull_sample(timeout=5)
      if self.timeBuffer[0] == 0.0:
        self.timeBuffer = collections.deque([self.timestamp] * self._bufsize, self._bufsize)

      # time correction to sync to local_clock()
      try:
        if self.timestamp is not None and self.sample is not None:
          self.timestamp = self.timestamp + self.inlet.time_correction(timeout=5) 

      except TimeoutError:
        pass

    print('closing graphing utility')
    self.inlet.close_stream()

  def update(self):
    self.dataBuffer.append(self.sample[3])
    self.y[:] = self.dataBuffer
    self.timeBuffer.append(self.timestamp)
    self.x[:] = self.timeBuffer

    if len(self.x):
      print(self.x[0])
    else:
      print('no data yet')

    self.curve.setData(self.x,self.y)
    self.app.processEvents()

  def start(self):
    self.lsl_thread = threading.Thread(target=self._graph_lsl)
    self.lsl_thread.start()
  
  def stop(self):
    self.running = False
    self.lsl_thread.join(5)
Exemple #16
0
class Graph(object):
    def __init__(self, size=(600, 350)):
        streams = resolve_byprop('name', 'bci', timeout=2.5)
        try:
            self.inlet = StreamInlet(streams[0])
        except IndexError:
            raise ValueError('Make sure stream name=bci is opened first.')

        self.running = True

        self.ProcessedSig = []
        self.SecondTimes = []
        self.count = -1
        self.sampleCount = self.count
        self.maximum = 0
        self.minimum = 0

        plt.ion()
        plt.hold(False)
        self.lineHandle = plt.plot(self.SecondTimes, self.ProcessedSig)
        plt.title("Live Stream EEG Data")
        plt.xlabel('Time (s)')
        plt.ylabel('mV')
        #plt.autoscale(True, 'y', tight = True)
        plt.show()
        #while(1):
        #secondTimes.append(serialData[0])                         #add time stamps to array 'timeValSeconds'
        #floatSecondTimes.append(float(serialData[0])/1000000)     # makes all second times into float from string

        #processedSig.append(serialData[6])                           #add processed signal values to 'processedSig'
        #floatProcessedSig.append(float(serialData[6]))

    def _graph_lsl(self):
        while self.running:
            # initial run
            self.sample, self.timestamp = self.inlet.pull_sample(timeout=5)
            #if self.timeBuffer[0] == 0.0:
            # self.timeBuffer = collections.deque([self.timestamp] * self._bufsize, self._bufsize)
            # time correction to sync to local_clock()
            try:
                if self.timestamp is not None and self.sample is not None:
                    self.timestamp = self.timestamp + self.inlet.time_correction(
                        timeout=5)

            except TimeoutError:
                pass
            self.SecondTimes.append(
                self.timestamp)  #add time stamps to array 'timeValSeconds'
            #print(abs(self.sample[3])/1000)
            self.ProcessedSig.append(
                abs(self.sample[3]) /
                1000)  #add processed signal values to 'processedSig'
            if (abs(self.sample[3] / 1000) > self.maximum):
                self.maximum = abs(self.sample[3] / 1000)
            if (abs(self.sample[3] / 1000) < self.minimum):
                self.minimum = abs(self.sample[3] / 1000)

            self.sampleCount = self.sampleCount + 1
            self.count = self.count + 1
            #plt.show()
            if (
                (self.count % 20 == 0) and (self.count != 0)
            ):  #every 20 samples (ie ~ 0.2 ms) is when plot updates. Change the sample number (ie 20) to modify frequency at which plot updates
                #if(self.count == 20):
                self.count = -1
                self.lineHandle[0].set_ydata(self.ProcessedSig)
                self.lineHandle[0].set_xdata(self.SecondTimes)
                #plt.xlim(0, 5)
                plt.xlim(self.SecondTimes[0], self.SecondTimes[-1])

                plt.ylim(self.minimum - 0.75, self.maximum + 0.75)
                #plt.ylim(0, 20)
                #plt.ylim(0, 10)
                #elf.ax.set_autoscaley_on(True)
                #plt.autoscale(enable=True, axis='y', tight=True)
                plt.pause(0.01)

            if (
                    self.sampleCount >= 511
            ):  #shows up to 2 seconds of data (512 samples = 2s of data given a 256 Hz sampling freq by the BCI)
                self.ProcessedSig.pop(0)
                self.SecondTimes.pop(0)

        plt.pause(0.01)
        print('closing graphing utility')
        self.inlet.close_stream()

    def start(self):
        #self.lsl_thread = threading.Thread(target=self._graph_lsl)
        #self.lsl_thread.start()
        self._graph_lsl()

    def stop(self):
        self.running = False
        self.lsl_thread.join(5)
Exemple #17
0
class Board(object):
    LSL_STREAM_NAME = 'psychopy'

    LSL_BCI_STREAM_NAME = 'bci'
    LSL_BCI_NUM_CHANNELS = 8
    LSL_BCI_SAMPLE_RATE = 0  #

    def __init__(self):
        # check device manager for correct COM port.
        self.board = bci.OpenBCIBoard(port='COM3',
                                      filter_data=True,
                                      daisy=False)

        # setup LSL
        streams = resolve_byprop('name', self.LSL_STREAM_NAME, timeout=2.5)
        try:
            self.inlet = StreamInlet(streams[0])
        except IndexError:
            raise ValueError('Make sure stream name="%s", is opened first.' %
                             LSL_STREAM_NAME)

        self.running = True
        self.samples = []

        info = StreamInfo(self.LSL_BCI_STREAM_NAME, 'eeg',
                          self.LSL_BCI_NUM_CHANNELS, self.LSL_BCI_SAMPLE_RATE,
                          'float32', 'uid2')
        self.outlet = StreamOutlet(info)

    # LSL and BCI samples are synchronized to local_clock(), which is the
    # runtime on this slave, not the host
    def _record_lsl(self):
        while self.running:
            sample, timestamp = self.inlet.pull_sample(timeout=5)

            # time correction to sync to local_clock()
            try:
                if timestamp is not None and sample is not None:
                    timestamp = timestamp + self.inlet.time_correction(
                        timeout=5)

                    samples_lock.acquire()
                    self.samples.append(('STIM', timestamp, sample))
                    samples_lock.release()

            except TimeoutError:
                pass

        print('closing lsl')
        self.inlet.close_stream()

    def _bci_sample(self, sample):
        NUM_CHANNELS = 8
        data = sample.channel_data[0:NUM_CHANNELS]

        samples_lock.acquire()
        self.samples.append(('BCI', local_clock(), data))
        samples_lock.release()

        self.outlet.push_sample(data)

    def _record_bci(self):
        try:
            self.board.start_streaming(self._bci_sample)
        except:
            print(
                'Got a serial exception. Expected behavior if experiment ending.'
            )

    def capture(self):
        self.bci_thread = threading.Thread(target=self._record_bci)
        self.lsl_thread = threading.Thread(target=self._record_lsl)
        self.bci_thread.start()
        self.lsl_thread.start()

    def export_data(self):
        self.board.stop()
        self.board.disconnect()
        self.running = False
        self.bci_thread.join(5)
        self.lsl_thread.join(5)
        print('Joined threads, now outputting BCI data.')

        i = 0
        folder = '\\recorded_data\\BCI'
        folder_path = os.getcwd() + folder
        #new folders recorded_data/BCI will be created in current directory (where experiment.py is saved) if they don't exist
        if not os.path.exists(folder_path):
            os.makedirs(folder_path)

        #file_path = os.path.normpath(folder_path + 'data-%s.csv')
        file_path = folder_path + '\\data-%s.csv'

        while os.path.exists(file_path % i):
            i += 1

        # csv writer with stim_type, msg, and timestamp, then data
        with open(file_path % i, 'w+') as f:
            writer = csv.writer(f)
            writer.writerow(
                ('Signal Type', 'Msg', 'Time', 'Channel 1', 'Channel 2',
                 'Channel 3', 'Channel 4', 'Channel 5', 'Channel 6',
                 'Channel 7', 'Channel 8'))
            for sample in self.samples:
                signal_type, timestamp, datas = sample
                out = (signal_type, 'msg', timestamp)
                for data in datas:
                    out = out + (data, )
                writer.writerow(out)

    def __str__(self):
        return '%s EEG channels' % board.getNbEEGChannels()

    def __del__(self):
        self.board.disconnect()
        self.inlet.close_stream()
Exemple #18
0
class InferenceInterface:
    def __init__(self,
                 lsl_data_name=config.INFERENCE_LSL_NAME,
                 lsl_data_type=config.INFERENCE_LSL_TYPE
                 ):  # default board_id 2 for Cyton
        self.lsl_data_type = lsl_data_type
        self.lsl_data_name = lsl_data_name

        # TODO need to change the channel count when adding eeg
        info = StreamInfo(lsl_data_name,
                          lsl_data_type,
                          channel_count=config.EYE_TOTAL_POINTS_PER_INFERENCE,
                          channel_format='float32',
                          source_id='myuid2424')
        info.desc().append_child_value("apocalyvec", "RealityNavigation")

        # chns = info.desc().append_child("eeg_channels")
        # channel_names = ["C3", "C4", "Cz", "FPz", "POz", "CPz", "O1", "O2", '1','2','3','4','5','6','7','8']
        # for label in channel_names:
        #     ch = chns.append_child("channel")
        #     ch.append_child_value("label", label)
        #     ch.append_child_value("unit", "microvolts")
        #     ch.append_child_value("type", "EEG")

        chns = info.desc().append_child("eye")
        channel_names = [
            'left_pupil_diameter_sample', 'right_pupil_diameter_sample'
        ]
        for label in channel_names:
            ch = chns.append_child("channel")
            ch.append_child_value("label", label)
            ch.append_child_value("unit", "mm")
            ch.append_child_value("type", "eye")

        self.outlet = StreamOutlet(info, max_buffered=360)
        self.start_time = local_clock()

        self.inlet = None
        self.connect_inference_result_stream()

    def connect_inference_result_stream(self):
        streams = resolve_byprop('type',
                                 config.INFERENCE_LSL_RESULTS_TYPE,
                                 timeout=1)

        if len(streams) == 0:
            print('No inference stream open.')
        else:  # TODO handle external inference stream lost
            self.inlet = StreamInlet(streams[0])
            self.inlet.open_stream()

    def disconnect_inference_result_stream(self):
        self.inlet.close_stream()

    def send_samples_receive_inference(self, samples_dict):
        """
        receive frames
        :param frames:
        """
        # TODO add EEG
        sample = np.reshape(samples_dict['eye'],
                            newshape=(-1, ))  # flatten out
        sample = sample.tolist()  # have to convert to list for LSL

        # chunk[0][0] = 42.0
        # chunk[0][1] = 24.0

        self.outlet.push_sample(sample)

        if self.inlet:
            inference_results_moving_averaged, timestamps = self.inlet.pull_chunk(
            )
            return inference_results_moving_averaged
        else:
            return sim_inference()
Exemple #19
0
brain_inlet = StreamInlet(brain_stream[0])
video_inlet = StreamInlet(video_stream[0])
brain_inlet.open_stream()
video_inlet.open_stream()

saver = Saver()
video_info = None
print("While entered")

try:
    timestamp = None
    while True:
        brain_info, timestamp = brain_inlet.pull_sample()
        if video_inlet.samples_available():
            video_info, _ = video_inlet.pull_sample()
            video_info = video_info[0]
        saver.check_reading(video_info)
        saver.save_chunk(video_info)
        if saver.status:
            saver.add_data(timestamp, brain_info, video_info)
        saver.save_data(video_info)
        saver.check_reading_final(video_info)
        video_info = None
except KeyboardInterrupt:
    brain_inlet.close_stream()
    video_inlet.close_stream()
except Exception as e:
    print(e)
    brain_inlet.close_stream()
    video_inlet.close_stream()
Exemple #20
0
class PubSubInterface:
    def __init__(self, lsl_data_type,
                 num_channels):  # default board_id 2 for Cyton
        self.lsl_data_type = lsl_data_type
        self.lsl_num_channels = num_channels
        self._time_dilation = 1

        self._sfreq = int(1)
        self.subscriber = pubsub_v1.SubscriberClient()
        # The `subscription_path` method creates a fully qualified identifier
        # in the form `projects/{project_id}/subscriptions/{subscription_id}`
        self.subscription_path = self.subscriber.subscription_path(
            'vae-cloud-model', 'test_topic_out-sub')
        self.streaming_pull_future = None

        info = StreamInfo(self.lsl_data_type, 'Pubsub', num_channels, 0.0,
                          'string', 'gcppubsub')

        # next make an outlet
        self.outlet = StreamOutlet(info)
        self.streams = resolve_byprop('name', self.lsl_data_type, timeout=1)
        if len(self.streams) < 1:
            raise AttributeError(
                'Unable to find LSL Stream with given type {0}'.format(
                    lsl_data_type))
        self.inlet = StreamInlet(self.streams[0])
        pass

    def callback(self, message):
        self.outlet.push_sample([message.data.decode()])
        print(f"Received {message}.")
        message.ack()

    def start_sensor(self):
        # connect to the sensor
        self.streams = resolve_byprop('name', self.lsl_data_type, timeout=1)
        if len(self.streams) < 1:
            raise AttributeError(
                'Unable to find LSL Stream with given type {0}'.format(
                    self.lsl_data_type))
        self.inlet = StreamInlet(self.streams[0])
        self.streaming_pull_future = self.subscriber.subscribe(
            self.subscription_path, callback=self.callback)
        self.inlet.open_stream()
        print(
            'LSLInletInterface: resolved, created and opened inlet for lsl stream with type '
            + self.lsl_data_type)

        # read the channel names is there's any
        # tell the sensor to start sending frames

    def process_frames(self):
        # return one or more frames of the sensor
        try:
            frames, timestamps = self.inlet.pull_chunk()
        except LostError:
            frames, timestamps = [], []
            pass  # TODO handle stream lost
        return np.transpose(frames), timestamps

    def stop_sensor(self):
        if self.inlet:
            self.inlet.close_stream()
        print('LSLInletInterface: inlet stream closed.')

    def info(self):
        return self.inlet.info()

    def get_num_chan(self):
        return self.lsl_num_channels

    def get_nominal_srate(self):
        return self.streams[0].nominal_srate()
Exemple #21
0
class BallRecorderSix:
    def __init__(self,
                 subject="test",
                 nChannels=31,
                 frequency=512,
                 streamer_type="OpenVibe",
                 channels=None):
        global screen, total_ch, freq, select_ch
        freq = int(frequency)
        total_ch = int(nChannels)
        select_ch = range(1, total_ch + 1)
        channel_names = channels

        self.streamer = str(streamer_type)
        self.name = self.__class__.__name__
        self.running = True
        self.edx = 0
        self.global_time = time.clock()
        self.EEGdata = np.zeros((freq * EEGdata_size, total_ch + 1))

        if (self.streamer == "OpenVibe"):
            print("{}: Looking for an EEG stream...".format(self.name))
            streams = resolve_stream('type', 'signal')
            self.inlet = StreamInlet(streams[0])

        i = 1
        fname = "{}/{}_{}_{}t{}c{}s{}ch_{}".format(folder, date, data_type,
                                                   num_trials,
                                                   num_classes, window_sec,
                                                   len(select_ch), subject)
        self.filename = "{}_{}.txt".format(fname, i)
        while os.path.exists(self.filename):
            i += 1
            self.filename = "{}_{}.txt".format(fname, i)
        self.file = open(self.filename, "w")
        print("{}: Writing to {}".format(self.name, self.filename))

        pygame.init()
        pygame.font.init()
        screen = pygame.display.set_mode([XSCREEN, YSCREEN], pygame.FULLSCREEN)
        always_on_top(False)
        self.output_data = []
        self.output_label = []
        self.class_count = [0] * num_classes

        self.model = FBCSP(augment=False,
                           nChannels=total_ch,
                           frequency=freq,
                           chnames=channel_names)

    @staticmethod
    def draw_train(flag):
        screen.fill((0, 0, 0))
        if flag == 0:
            pygame.draw.rect(screen, (255, 255, 255),
                             (XSCREEN / 2 -
                              (plus_size / 2), YSCREEN / 2 - 3, plus_size, 6))
            pygame.draw.rect(screen, (255, 255, 255),
                             (XSCREEN / 2 - 3, YSCREEN / 2 -
                              (plus_size / 2), 6, plus_size))
        elif flag == 1:
            pygame.draw.polygon(screen, (0, 255, 0),
                                ((XSCREEN / 2 - (plus_size / 2), YSCREEN / 2 -
                                  (plus_size / 2)),
                                 (XSCREEN / 2 + (plus_size / 2), YSCREEN / 2),
                                 (XSCREEN / 2 - (plus_size / 2), YSCREEN / 2 +
                                  (plus_size / 2))))
        elif flag == 2:
            pygame.draw.polygon(screen, (0, 255, 0),
                                ((XSCREEN / 2 - (plus_size / 2), YSCREEN / 2),
                                 (XSCREEN / 2 + (plus_size / 2), YSCREEN / 2 -
                                  (plus_size / 2)),
                                 (XSCREEN / 2 + (plus_size / 2), YSCREEN / 2 +
                                  (plus_size / 2))))
        elif flag == 3:
            pygame.draw.polygon(screen, (0, 255, 0),
                                ((XSCREEN / 2, YSCREEN / 2 - (plus_size / 2)),
                                 (XSCREEN / 2 + (plus_size / 2), YSCREEN / 2 +
                                  (plus_size / 2)),
                                 (XSCREEN / 2 - (plus_size / 2), YSCREEN / 2 +
                                  (plus_size / 2))))

        elif flag == 4:
            pygame.draw.polygon(screen, (255, 0, 0),
                                ((XSCREEN / 2, YSCREEN / 2 - (plus_size / 2)),
                                 (XSCREEN / 2 + (plus_size / 2), YSCREEN / 2 +
                                  (plus_size / 2)),
                                 (XSCREEN / 2 - (plus_size / 2), YSCREEN / 2 +
                                  (plus_size / 2))))

        elif flag == 5:
            pygame.draw.polygon(screen, (255, 255, 255),
                                ((XSCREEN / 2 + (plus_size / 2), YSCREEN / 2 -
                                  (plus_size / 2)),
                                 (XSCREEN / 2, YSCREEN / 2 + (plus_size / 2)),
                                 (XSCREEN / 2 - (plus_size / 2), YSCREEN / 2 -
                                  (plus_size / 2))))
        else:
            assert False
        pygame.display.update()

    @staticmethod
    def draw_test(flag):
        screen.fill((0, 0, 0))
        if flag == 0:
            pygame.draw.rect(screen, (255, 255, 255),
                             (int(XSCREEN / 2) - (plus_size / 2),
                              int(YSCREEN / 2) - 3, plus_size, 6))
            pygame.draw.rect(screen, (255, 255, 255),
                             (int(XSCREEN / 2) - 3, int(YSCREEN / 2) -
                              (plus_size / 2), 6, plus_size))
        elif flag == 1:
            pygame.draw.rect(
                screen, (0, 255, 0),
                (XSCREEN - circle_radius, 0, circle_radius, YSCREEN))
            pygame.draw.circle(screen, (0, 0, 255),
                               (int(XSCREEN / 2), YSCREEN - circle_radius * 2),
                               circle_radius)
        elif flag == 2:
            pygame.draw.rect(screen, (0, 255, 0),
                             (0, 0, circle_radius, YSCREEN))
            pygame.draw.circle(screen, (0, 0, 255),
                               (int(XSCREEN / 2), YSCREEN - circle_radius * 2),
                               circle_radius)
        elif flag == 3:
            pygame.draw.rect(screen, (0, 255, 0),
                             (0, 0, XSCREEN, circle_radius))
            pygame.draw.circle(screen, (0, 0, 255),
                               (int(XSCREEN / 2), YSCREEN - circle_radius * 2),
                               circle_radius)
        elif flag == 4:
            pygame.draw.rect(screen, (255, 0, 0),
                             (0, 0, XSCREEN, circle_radius))
            pygame.draw.circle(screen, (0, 0, 255),
                               (int(XSCREEN / 2), YSCREEN - circle_radius * 2),
                               circle_radius)
        elif flag == 5:
            pygame.draw.rect(screen, (255, 255, 255),
                             (0, 0, XSCREEN, circle_radius))
            pygame.draw.circle(screen, (0, 0, 255),
                               (int(XSCREEN / 2), YSCREEN - circle_radius * 2),
                               circle_radius)
        else:
            assert False
        pygame.display.update()

    @staticmethod
    def update_circle(x, y, z, v_x, v_y, v_z):
        if (v_z == -1):
            pygame.draw.circle(screen, (0, 0, 255), (x, y), circle_radius)
            x += v_x
            y += v_y
            z += v_z
            if (z < 0):
                z = 0

            pygame.draw.polygon(screen, (0, 0, (z // 10)),
                                ((x + (plus_size / 4), y - (plus_size / 4)),
                                 (x, y + (plus_size / 4)),
                                 (x - (plus_size / 4), y - (plus_size / 4))))

            #pygame.draw.circle(screen, (0, 0, (z//10)), (x, y), circle_radius)
        elif (v_z == 1):
            pygame.draw.circle(screen, (0, 0, 255), (x, y), circle_radius)
            x += v_x
            y += v_y
            z += v_z
            if (z > 2550):
                z = 2550

            pygame.draw.polygon(screen, (0, 0, (z // 10)),
                                ((x, y - (plus_size / 4)),
                                 (x + (plus_size / 4), y + (plus_size / 4)),
                                 (x - (plus_size / 4), y + (plus_size / 4))))

            #pygame.draw.circle(screen, (0, 0, (z//10)), (x, y), circle_radius)
        else:
            pygame.draw.circle(screen, (0, 0, 0), (x, y), circle_radius)
            x += v_x
            y += v_y
            z += v_z
            pygame.draw.circle(screen, (0, 0, 255), (x, y), circle_radius)
        pygame.display.update()
        return x, y, z

    def collect_data(self):
        print("{}: Collection starting".format(self.name))
        for i in range(num_trials * num_iteration):
            print(i / num_iteration)
            self.edx = 0
            time.sleep(5)
            flag = class_order[i % num_iteration]
            self.draw_train(flag)
            start_time = time.clock()
            prev_time = 0
            while True:
                current_time = int(math.floor(time.clock() - start_time))
                if current_time >= session_sec:
                    break
                if current_time >= wait_time + window_sec and current_time >= prev_time + stride_sec:
                    prev_time = current_time
                    edx = self.edx
                    selected_eeg = self.EEGdata[(edx -
                                                 (freq * window_sec)):edx,
                                                select_ch]
                    self.output_data.append(selected_eeg.T)
                    self.output_label.append(flag)
                    self.file.write(
                        str(np.ndarray.tolist(selected_eeg)) + '\n')
                    self.file.write(str(flag) + '\n')
                    self.class_count[flag] += 1
            screen.fill((0, 0, 0))
            pygame.display.update()
        print("{}: Collection finished".format(self.name))

    def test_model(self):
        print("{}: Started model testing".format(self.name))
        for t in range(num_tests * 3):
            # collecting direction data
            size_rest, size_right, size_left, size_front, size_up, size_down = self.class_count
            circle_x = int(XSCREEN / 2)
            circle_y = YSCREEN - circle_radius * 2
            circle_z = 1280
            v_x = 0
            v_y = 0
            v_z = 0
            '''
            if size_right <= size_left and size_right <= size_front:
                flag = 1
            elif size_left <= size_right and size_left <= size_front:
                flag = 2
            else:
                flag = 3
            '''
            class_determine = [
                100000000, size_right, size_left, size_front, size_up,
                size_down
            ]
            flag = class_determine.index(min(class_determine))
            self.draw_test(flag)
            self.edx = 0
            start_time = time.clock()
            prev_time = 0
            print("Flag is: {}, class_count is: {}".format(
                flag, self.class_count))
            while circle_radius < circle_x < XSCREEN - circle_radius and circle_y > circle_radius and 0 < circle_z < 2550:
                current_time = int(math.floor(time.clock() - start_time))
                if keyboard.is_pressed('m'):
                    time.sleep(2)
                    break
                if keyboard.is_pressed('p'):
                    self.file.close()
                    time.sleep(3)
                    print("{}: Forced close model testing".format(self.name))
                    return
                if current_time < 3:
                    continue

                circle_x, circle_y, circle_z = self.update_circle(
                    circle_x, circle_y, circle_z, v_x, v_y, v_z)
                time.sleep(0.01)

                if current_time >= prev_time + window_sec:
                    prev_time = current_time
                    edx = self.edx
                    selected_eeg = get_eeg(self.EEGdata,
                                           edx - (freq * window_sec), edx)
                    transformed_eeg = np.asarray(
                        np.transpose(np.asmatrix(selected_eeg)))
                    transformed_eeg = np.asarray([transformed_eeg])
                    print(transformed_eeg)
                    if np.shape(transformed_eeg) != (1, len(select_ch),
                                                     window_sec * freq):
                        print(np.shape(transformed_eeg))
                        assert False

                    predicted_label = self.model.predict(transformed_eeg)
                    print(predicted_label)
                    if predicted_label[0] == 0:
                        v_x = 0
                        v_y = 0
                        v_z = 0
                    elif predicted_label[0] == 1:
                        v_x = 1
                        v_y = 0
                        v_z = 0
                    elif predicted_label[0] == 2:
                        v_x = -1
                        v_y = 0
                        v_z = 0
                    elif predicted_label[0] == 3:
                        v_x = 0
                        v_y = -1
                        v_z = 0
                    elif predicted_label[0] == 4:
                        v_x = 0
                        v_y = 0
                        v_z = +1
                    elif predicted_label[0] == 5:
                        v_x = 0
                        v_y = 0
                        v_z = -1
                    self.output_data.append(selected_eeg.T)
                    self.output_label.append(flag)
                    self.file.write(
                        str(np.ndarray.tolist(selected_eeg)) + '\n')
                    self.file.write(str(flag) + '\n')
                    self.class_count[flag] += 1

            # collecting rest data
            flag = 0
            self.draw_test(flag)
            self.edx = 0
            start_time = time.clock()
            prev_time = 0
            size_rest, size_right, size_left, size_front, size_up, size_down = self.class_count
            while size_rest < min(size_left, size_right, size_front, size_up,
                                  size_down):
                current_time = int(math.floor(time.clock() - start_time))
                if keyboard.is_pressed('m'):
                    time.sleep(2)
                    break
                if keyboard.is_pressed('p'):
                    self.file.close()
                    time.sleep(3)
                    print("{}: Forced close model testing".format(self.name))
                    return

                if current_time >= 3 and current_time >= prev_time + window_sec:
                    prev_time = current_time
                    edx = self.edx
                    selected_eeg = get_eeg(self.EEGdata,
                                           edx - (freq * window_sec), edx)
                    self.output_data.append(selected_eeg.T)
                    self.output_label.append(flag)
                    self.file.write(
                        str(np.ndarray.tolist(selected_eeg)) + '\n')
                    self.file.write(str(flag) + '\n')
                    self.class_count[flag] += 1
                    ssize_rest, size_right, size_left, size_front, size_up, size_down = self.class_count

            min_data = []
            min_label = []
            min_count = min(self.class_count)
            count = [0] * num_classes
            print("len_output_data: {}, min_count: {}".format(
                len(self.output_data), min_count))
            for j in range(len(self.output_data)):
                if count[self.output_label[j]] >= min_count:
                    continue
                min_data.append(self.output_data[j])
                min_label.append(self.output_label[j])
                count[self.output_label[j]] += 1

            self.model.build_model(min_data, min_label)

    def close_recorder(self):
        self.file.close()
        self.running = False
        if (self.streamer == "OpenVibe"):
            self.inlet.close_stream()
        pygame.display.quit()

    def retrieve_eeg(self):
        if (self.streamer == "Brainvision Recorder"):
            parent_conn, child_conn = Pipe()
            eeg_process = multiprocessing.Process(target=retrieve_eeg_BREC,
                                                  args=(child_conn, ))
            eeg_process.start()

        while self.running:
            if (self.streamer == "Brainvision Recorder"):
                (sample, timestamp) = parent_conn.recv()
            elif (self.streamer == "OpenVibe"):
                sample, timestamp = self.inlet.pull_sample()

            current_time = time.clock() - self.global_time
            self.EEGdata[self.edx % (freq * EEGdata_size), 0] = current_time
            self.EEGdata[self.edx % (freq * EEGdata_size),
                         1:total_ch + 1] = sample
            self.edx = self.edx + 1
            if self.edx >= freq * EEGdata_size:
                self.edx = 0

    def start(self):
        eeg_thrd = threading.Thread(target=self.retrieve_eeg)
        eeg_thrd.daemon = True
        eeg_thrd.start()

        self.collect_data()
        self.model.build_model(self.output_data, self.output_label)
        self.test_model()
        self.close_recorder()

        return self.filename
            #==========================================================#
            #                 Prep for the Next Trial                  #
            #==========================================================#

            # Reset counter
            sequences_complete = 0
            del epochs
            first_epoch_time = -1
            last_epoch_time = -1
            del mne_events
            del filtered_data
            del mne_raw_data
            events = []

            if args.training_mode:
                # Increment the column
                p300_col = (p300_col + 1) % 6
                if p300_col == 0:
                    # Increment the row when we get to the end of a row
                    p300_row = (p300_row + 1) % 6
                # Send these over the pipe to the GUI
                main_conn.send(["train", (p300_row, p300_col)])
                
    #==========================================================#
    #                 Disconnect LSL Stream                    #
    #==========================================================#

    if args.live_mode or args.training_mode:
        inlet.close_stream()
        if args.verbose:
            print "Closed data stream."
Exemple #23
0
class FilterbankInterface:
    def __init__(self,
                 filenames,
                 nChannels=31,
                 frequency=512,
                 streamer_type="OpenVibe",
                 channels=None,
                 subject=None):
        global total_ch, freq, select_ch
        total_ch = int(nChannels)
        freq = int(frequency)
        select_ch = range(1, total_ch + 1)
        self.name = self.__class__.__name__
        self.running = True
        self.edx = 0
        self.global_time = time.clock()
        channel_names = channels

        self.streamer = str(streamer_type)

        if (self.streamer == "OpenVibe"):
            print("{}: Looking for an EEG stream...".format(self.name))
            streams = resolve_stream('type', 'signal')
            self.inlet = StreamInlet(streams[0])

        i = 1
        fname = "{}/{}_{}_{}s{}ch_{}".format(folder, date,
                                             data_type, window_sec,
                                             len(select_ch), subject)
        self.filename = "{}_{}.txt".format(fname, i)
        while os.path.exists(self.filename):
            i += 1
            self.filename = "{}_{}.txt".format(fname, i)
        self.file = open(self.filename, "w")
        print("{}: Writing to {}".format(self.name, self.filename))

        self.filenames = filenames
        file = open(filenames[0], "r")
        lines = file.readlines()
        self.total_ch = len(lines[0].split('],')[0].split(','))
        self.window_size = len(lines[0].split('],'))
        self.window_sec = int(self.window_size / freq)
        file.close()

        self.EEGdata = np.zeros((freq * EEGdata_size, self.total_ch + 1))
        self.model = FBCSP(nChannels=self.total_ch,
                           frequency=freq,
                           chnames=channel_names)

        self.buffer = Queue.Queue()

    def make_model(self):
        print("{}: Making model".format(self.name))

        output_data = []
        output_label = []
        for fname in self.filenames:
            file = open(fname, 'r')
            lines = file.readlines()
            np_arr = []
            for line in lines:
                tokens = line.split(',')
                if len(tokens) > 1:
                    tokens = [float(i.strip(" [],\n")) for i in tokens]
                    np_arr = np.array(tokens).reshape((-1, self.total_ch))
                else:
                    trial_num = int(tokens[0])
                    if trial_num == -2:
                        continue
                    output_data.append(np_arr)
                    output_label.append(trial_num)
            file.close()

        output_data = np.transpose(output_data, [0, 2, 1])
        output_label = np.array(output_label)
        self.model.build_model(output_data, output_label)

    def receive_commands(self):
        print("{}: Receiving commands".format(self.name))
        self.edx = 0
        prev_time = 0
        start_time = time.clock()
        predicted_before = [1000]
        predicted_label = [1000]
        while self.running:
            current_time = int(math.floor(time.clock() - start_time))
            if current_time <= wait_time + action_time:
                continue
            time.sleep(0.01)

            if (current_time >= prev_time + window_sec):
                prev_time = current_time
                edx = self.edx
                selected_eeg = get_eeg(self.EEGdata,
                                       edx - (freq * self.window_sec), edx)
                self.file.write(str(np.ndarray.tolist(selected_eeg.T)) + '\n')

                selected_eeg = np.asarray(
                    np.transpose(np.asmatrix(selected_eeg)))
                transformed_eeg = np.asarray([selected_eeg])
                predicted_before = predicted_label
                predicted_label = self.model.predict(transformed_eeg)
                print(predicted_label)
                # self.buffer.put(predicted_label[0])
                if (predicted_label[0] == 0 and predicted_before[0] == 0):
                    self.buffer.put('r')
                if (predicted_label[0] == 1 and predicted_before[0] == 1):
                    self.buffer.put('l')
                if (predicted_label[0] == 2 and predicted_before[0] == 2):
                    self.buffer.put('j')
                if (predicted_label[0] == 3 and predicted_before[0] == 3):
                    self.buffer.put('i')

                if (predicted_label[0] == 4 and predicted_before[0] == 4):
                    self.buffer.put('w')
                if (predicted_label[0] == 5 and predicted_before[0] == 5):
                    self.buffer.put('s')

    def retrieve_eeg(self):
        global total_ch
        if (self.streamer == "Brainvision Recorder"):
            parent_conn, child_conn = Pipe()
            eeg_process = multiprocessing.Process(target=retrieve_eeg_BREC,
                                                  args=(child_conn, ))
            eeg_process.start()
        sample = None
        while self.running:
            if (self.streamer == "Brainvision Recorder"):
                (sample, timestamp) = parent_conn.recv()
            elif (self.streamer == "OpenVibe"):
                sample, timestamp = self.inlet.pull_sample()
            if (sample == None or len(sample) != total_ch):
                continue
            current_time = time.clock() - self.global_time
            self.EEGdata[self.edx % (freq * EEGdata_size), 0] = current_time
            self.EEGdata[self.edx % (freq * EEGdata_size),
                         1:total_ch + 1] = sample
            self.edx = self.edx + 1
            if self.edx >= freq * EEGdata_size:
                self.edx = 0

    def has_command(self):
        return not self.buffer.empty()

    def get_command(self):
        command = self.buffer.get()
        return command

    def end_operation(self):
        self.running = False
        self.inlet.close_stream()

    def start(self):

        eeg_thrd = threading.Thread(target=self.retrieve_eeg)
        eeg_thrd.daemon = True
        eeg_thrd.start()

        self.make_model()

        input_thrd = threading.Thread(target=self.receive_commands)
        input_thrd.daemon = True
        input_thrd.start()
Exemple #24
0
class Lsl():
    def recv_data_unspecified_OS_stream(self):
        # Resolve an available OpenSignals stream
        print("# Looking for an available OpenSignals stream...")
        self.os_stream = resolve_stream("name", "OpenSignals")

        # Create an inlet to receive signal samples from the stream
        self.inlet = StreamInlet(self.os_stream[0])

        try:
            while True:
                # Receive samples
                sample, timestamp = self.inlet.pull_sample()
                print(timestamp, sample)
        except KeyboardInterrupt:
            self.inlet.close_stream()

    def recv_data_PLUX_device(self, mac_address):
        # Resolve stream
        print(
            "# Looking for an available OpenSignals stream from the specified device..."
        )
        self.os_stream = resolve_stream("type", mac_address)

        # Create an inlet to receive signal samples from the stream
        self.inlet = StreamInlet(self.os_stream[0])

        try:
            while True:
                # Receive samples
                samples, timestamp = self.inlet.pull_sample()
                print(timestamp, samples)
        except KeyboardInterrupt:
            self.inlet.close_stream()

    def recv_data_host(self, hostname):
        # Resolve stream
        print(
            "# Looking for an available OpenSignals stream from the specified host..."
        )
        self.os_stream = resolve_stream("hostname", hostname)

        # Create an inlet to receive signal samples from the stream
        self.inlet = StreamInlet(self.os_stream[0])

        try:
            while True:
                # Receive samples
                samples, timestamp = self.inlet.pull_sample()
                print(timestamp, samples)
        except KeyboardInterrupt:
            self.inlet.close_stream()

    def recv_stream_metadata(self):
        # Get information about the stream
        self.stream_info = self.inlet.info()

        # Get individual attributes
        stream_name = self.stream_info.name()
        stream_mac = self.stream_info.type()
        stream_host = self.stream_info.hostname()
        stream_n_channels = self.stream_info.channel_count()

        # Store sensor channel info & units in the dictionary
        stream_channels = dict()
        channels = self.stream_info.desc().child("channels").child("channel")

        # Loop through all available channels
        for i in range(stream_n_channels - 1):

            # Get the channel number (e.g. 1)
            channel = i + 1

            # Get the channel type (e.g. ECG)
            sensor = channels.child_value("sensor")

            # Get the channel unit (e.g. mV)
            unit = channels.child_value("unit")

            # Store the information in the stream_channels dictionary
            stream_channels.update({channel: [sensor, unit]})
            channels = channels.next_sibling()
Exemple #25
0
class SimulationInterface:
    def __init__(self, lsl_data_type, num_channels,
                 sampling_rate):  # default board_id 2 for Cyton
        self.lsl_data_type = lsl_data_type
        self.lsl_num_channels = num_channels
        self.sampling_rate = sampling_rate
        with open('data/s01.dat', "rb") as f:
            deap_data = pickle.load(f, encoding="latin1")
        deap_data = np.array(deap_data['data'])
        # flatten so we have a continuous stream
        self.deap_data = deap_data.reshape(
            deap_data.shape[1], deap_data.shape[0] * deap_data.shape[2])
        self.dreader = None
        self.stream_process = None
        info = StreamInfo('DEAP Simulation', 'EEG', num_channels,
                          self.sampling_rate, 'float32', 'deapcontinuous')
        self.outlet = StreamOutlet(info, 32, 360)
        self.streams = resolve_byprop('name', self.lsl_data_type, timeout=1)
        if len(self.streams) < 1:
            raise AttributeError(
                'Unable to find LSL Stream with given type {0}'.format(
                    lsl_data_type))
        self.inlet = StreamInlet(self.streams[0])
        pass

    def start_sensor(self):
        # connect to the sensor
        self.dreader = DEAPReader(self.sampling_rate)
        self.stream_process = threading.Thread(target=self.dreader.run,
                                               args=(self.deap_data,
                                                     self.outlet))

        self.stream_process.start()
        self.streams = resolve_byprop('name', self.lsl_data_type, timeout=1)
        if len(self.streams) < 1:
            raise AttributeError(
                'Unable to find LSL Stream with given type {0}'.format(
                    self.lsl_data_type))
        self.inlet = StreamInlet(self.streams[0])
        self.inlet.open_stream()
        print(
            'LSLInletInterface: resolved, created and opened inlet for lsl stream with type '
            + self.lsl_data_type)

        # read the channel names is there's any
        # tell the sensor to start sending frames

    def process_frames(self):
        # return one or more frames of the sensor
        try:
            frames, timestamps = self.inlet.pull_chunk()
        except LostError:
            frames, timestamps = [], []
            pass  # TODO handle stream lost
        return np.transpose(frames), timestamps

    def stop_sensor(self):
        self.dreader.terminate()
        if self.inlet:
            self.inlet.close_stream()
        print('LSLInletInterface: inlet stream closed.')

    def info(self):
        return self.inlet.info()

    def get_num_chan(self):
        return self.lsl_num_channels

    def get_nominal_srate(self):
        return self.streams[0].nominal_srate()
class Graph(object):
    def __init__(self, size=(600, 350)):
        self.running = True
        self.ProcessedSig = []
        self.SecondTimes = []
        self.count = -1

        plt.ion()
        plt.hold(False)
        self.lineHandle = plt.plot(self.SecondTimes, self.ProcessedSig)
        plt.title("Streaming Live EMG Data")
        plt.xlabel('Time (s)')
        plt.ylabel('Volts')
        plt.show()

    def _graph_lsl(self):
        print('checking if stream has be initialized')
        self.streams = resolve_byprop('name', 'bci', timeout=2.5)
        try:
            self.inlet = StreamInlet(self.streams[0])
        except IndexError:
            raise ValueError('Make sure stream name=bci is opened first.')
        while self.running:
            # initial run
            self.sample, self.timestamp = self.inlet.pull_sample(timeout=5)
            # time correction to sync to local_clock()
            try:
                if self.timestamp is not None and self.sample is not None:
                    self.timestamp = self.timestamp + self.inlet.time_correction(
                        timeout=5)

            except TimeoutError:
                pass
            self.SecondTimes.append(
                self.sample[1])  #add time stamps to array 'timeValSeconds'
            self.ProcessedSig.append(
                self.sample[0])  #add processed signal values to 'processedSig'

            self.count = self.count + 1

            if ((self.count % 20 == 0) and
                (self.count !=
                 0)):  #every 20 samples (ie ~ 0.10 s) is when plot updates
                self.lineHandle[0].set_ydata(self.ProcessedSig)
                self.lineHandle[0].set_xdata(self.SecondTimes)
                #plt.xlim(0, 5)
                plt.xlim(self.SecondTimes[0], self.SecondTimes[-1])
                plt.ylim(0, 10)
                plt.pause(0.01)

            if (self.count >= 399):
                self.ProcessedSig.pop(0)
                self.SecondTimes.pop(0)

        plt.pause(0.01)
        print('closing graphing utility')
        self.inlet.close_stream()

    def start(self):
        self.lsl_data = threading.Thread(target=random_lsl.start)
        #self.lsl_thread = threading.Thread(target=self._graph_lsl)
        self.lsl_data.start()
        print('lsl data stream has started')
        time.sleep(6)
        #self.lsl_thread.start()
        print('graphing will begin')
        self._graph_lsl()

    def stop(self):
        self.running = False
        self.lsl_thread.join(5)