def testing(): dummy_streamer = ble2lsl.Dummy(muse2016) # streams = resolve_byprop( "type", "EEG", timeout=5 ) #type: EEG, minimum return streams = 1, timeout after 5 seconds streamIn = StreamInlet( streams[0], max_chunklen=12, recover=True ) #Grab first stream from streams, MUSE chunk 12, drop lost stream print(streamIn) print(streamIn.info().channel_count()) streamIn.open_stream( ) #This actually isn't required: pull_sample() and pull_chunk() implicitly open the stream. #But it's good to be explicit because it makes the code clearer print("Pull Sample") print(streamIn.pull_sample() ) #Returns a tuple with the actual values we want. #The first element is the list of channel values, the second element is a timestamp. This is a snapshot of our stream #at a certain point in time. print("Pull Chunk") ts = time.time() while (1): x = streamIn.pull_chunk() if all(x): #if not np.shape(x) == (2, 0): print(np.shape(x)) print(np.shape(x[1])) t = [t - ts for t in x[1]] print(t) print(t[-1] - t[0]) # for y in x: # for z in y: # print(z) #print("\n") plt.style.use('ggplot') # data first then time stamps, sick pprint(streamIn.info().as_xml()) #what timeC = streamIn.time_correction() print(timeC) #Clean up time streams.clear() streamIn.close_stream() #calls lsl_close_stream streamIn.__del__() #Not throwing errors dummy_streamer.stop()
class LslStream(object): """ This class creates the basic connection between the computer and a Lab Streaming Layer data stream. With it connecting is made simpler and pulling and processing information directly is made trivial. METHODS: __init__(**stream_info): Initiates a connection when the class is called connect(**stream_info): Connects to a data stream in the network given defined by the keyword args pull(**kwargs): Pulls a sample from the connected data stream chunk(**kwargs): Pulls a chunk of samples from the data stream ATTRIBUTES: streams: List of found LSL streams in the network inlet: Stream inlet used to pull data from the stream metainfo: Metadata from the stream """ def __init__(self, **stream_info): self.connect(**stream_info) def connect(self, **stream_info): """ This method connects to a LSL data stream. It accepts keyword arguments that define the data stream we are searching. Normally this would be (use keywords given between quotes as key for the argument) 'name' (e.g. 'Cognionics Quick-20'), 'type' (e.g. 'EEG'), 'channels' (e.g. 8), 'freq' (from frequency, e.g. 500), 'dtype' (type of data, e.g. 'float32'), 'serialn' (e.g. 'quick_20'). After receiving the information of the stream, the script searches for it in the network and resolves it, and then connects to it (or the first one in case there are many, that's the reason why one has to be as specific as possible if many instances of LSL are being used in the lab). It prints some of the metadata of the data stream to the screen so the user can check if it is right, and returns the inlet to be used in other routines. INPUT: **kwargs: Keyword arguments defining the data stream RELATED ATTRIBUTES: streams, inlet, metainfo """ # Put the known information of the stream in a tuple. It is better to know as much # as possible if more than one kit is running LSL at the same time. stream_info_list = [] for key, val in stream_info.items(): stream_info_list.append(key) stream_info_list.append(val) # Resolve the stream from the lab network self.streams = resolve_stream(*stream_info_list) # Create a new inlet to read from the stream self.inlet = StreamInlet(self.streams[0], max_buflen=938) # Get stream information (including custom meta-data) and break it down self.metainfo = self.inlet.info() def pull(self, **kwargs): """ This method pulls data from the connected stream (using more information for the pull as given by kwargs). INPUT: kwargs: Extra specifications for the data pull from the stream OUTPUT: the data from the stream """ # Retrieve data from the data stream return self.inlet.pull_sample(**kwargs) def init_pull(self, **kwargs): """ This serves as a sacrificial initialization pull to get the streaming going. This method pulls data from the connected stream (using more information for the pull as given by kwargs). INPUT: kwargs: Extra specifications for the data pull from the stream OUTPUT: the data from the stream """ print('Marker Stream Initializarion Pull: ', self.inlet.pull_sample(**kwargs)) def chunk(self, **kwargs): """ This method pulls chunks. Uses sames formating as .pull """ # chunk, timestamp = self.inlet.pull_chunk(**kwargs) return self.inlet.pull_chunk(**kwargs) def mark_check(self, **kwargs): # Available Markers. ava_markers = self.inlet.samples_available() print('Available Markers: ', ava_markers) def inlet_del(self, **kwargs): self.inlet.__del__()
""" """ Pull chunk or recording: """ if online: # Online: try: RRi, Ti = RRInlet.pull_chunk(timeout=1.5) # lists of lists # of format: # RRi = [[rr,], [rr,], [rr,], [rr,],...] # Ti = [t, t, t, t,...] # RRi, Ti == [], None if timeout reached with no sample found if RRi == []: print "No more samples to pull..." RRInlet.__del__() break except WindowsError: # skip iteration and try again # print "Windows access exception on loop", i i += 1 continue if RRi != []: for n in xrange(len(RRi)): # each received sample in the chunk RRi[n] = (RRi[n][0] + 2**15 ) #/ 1000.0 # Compensate for Faros silliness # RR = RR.append(pd.Series(RRi, index=Ti)) else: # skip iteration and try again