def __init__(self, device, processor=FileWriter(filename='rawdata.csv'), buffer_name='buffer.db', clock=CountClock(), delete_archive=True): self._device = device self._processor = processor self._buffer_name = buffer_name self._clock = clock # boolean; set to false to retain the sqlite db. self.delete_archive = delete_archive self._device_info = None # set on start. self._is_streaming = False # Offset in seconds from the start of acquisition to calibration # trigger. Calculated once, then cached. self._cached_offset = None self._record_at_calib = None self._max_wait = 0.1 # for process loop # Max number of records in queue before it blocks for processing. maxsize = 500 self._process_queue = multiprocessing.JoinableQueue(maxsize=maxsize) self.marker_writer = NullMarkerWriter() self._acq_process = None self._data_processor = None self._buf = None
def __init__(self, connector, buffer_name='raw_data.db', raw_data_file_name='raw_data.csv', clock=CountClock(), delete_archive=True): self._connector = connector self._buffer_name = buffer_name self._raw_data_file_name = raw_data_file_name self._clock = clock # boolean; set to false to retain the sqlite db. self.delete_archive = delete_archive self._device_info = None # set on start. self._is_streaming = False # Offset in seconds from the start of acquisition to calibration # trigger. Calculated once, then cached. self._cached_offset = None self._record_at_calib = None self._max_wait = 0.1 # for process loop self.marker_writer = NullMarkerWriter() # column in the acquisition data used to calculate offset. self.trigger_column = TRIGGER_COLUMN_NAME self._acq_process = None self._buf = None
def stop_acquisition(self): """Stop acquiring data; perform cleanup.""" log.debug("Stopping Acquisition Process") self._is_streaming = False self._acq_process.stop() self._acq_process.join() self.marker_writer.cleanup() self.marker_writer = NullMarkerWriter() if self._raw_data_file_name and self._buf: buffer_server.dump_data(self._buf, self._raw_data_file_name, self.device_info.name, self.device_info.fs)
def stop_acquisition(self): """Stop acquiring data; perform cleanup.""" log.debug("Stopping Acquisition Process") self._is_streaming = False self._acq_process.stop() self._acq_process.join() log.debug("Stopping Processing Queue") # Blocks until all data in the queue is consumed. self._process_queue.join() self._data_processor.stop() self.marker_writer.cleanup() self.marker_writer = NullMarkerWriter()
def start_acquisition(self): """Run the initialization code and start the loop to acquire data from the server. We use multiprocessing to achieve best performance during our sessions. Some references: Stopping processes and other great multiprocessing examples: https://pymotw.com/2/multiprocessing/communication.html Windows vs. Unix Process Differences: https://docs.python.org/2.7/library/multiprocessing.html#windows """ if not self._is_streaming: log.debug("Starting Acquisition") msg_queue = multiprocessing.Queue() # Initialize the marker streams before the device connection so the # device can start listening. # TODO: Should this be a property of the device? if self._device.name == 'LSL': self.marker_writer = LslMarkerWriter() # Clock is copied, so reset should happen in the main thread. self._clock.reset() self._acq_process = AcquisitionProcess(self._device, self._clock, self._process_queue, msg_queue) self._acq_process.start() # Block thread until device connects and returns device_info. msg_type, msg = msg_queue.get() if msg_type == MSG_DEVICE_INFO: self._device_info = msg elif msg_type == MSG_ERROR: raise Exception("Error connecting to device") else: raise Exception("Message not understood: " + str(msg)) # Initialize the buffer and processor; this occurs after the # device initialization to ensure that any device parameters have # been updated as needed. self._processor.set_device_info(self._device_info) self._buf = buffer_server.start(self._device_info.channels, self._buffer_name) self._data_processor = DataProcessor( data_queue=self._process_queue, msg_queue = msg_queue, processor=self._processor, buf=self._buf, wait=self._max_wait) self._data_processor.start() # Block until processor has initialized. msg_type, msg = msg_queue.get() self._is_streaming = True
def start_acquisition(self): """Run the initialization code and start the loop to acquire data from the server. We use multiprocessing to achieve best performance during our sessions. Some references: Stopping processes and other great multiprocessing examples: https://pymotw.com/2/multiprocessing/communication.html Windows vs. Unix Process Differences: https://docs.python.org/2.7/library/multiprocessing.html#windows """ if not self._is_streaming: log.debug("Starting Acquisition") msg_queue = Queue() # Initialize the marker streams before the device connection so the # device can start listening. # TODO: Should this be a property of the device? if self._device.name == 'LSL': self.marker_writer = LslMarkerWriter() # Clock is copied, so reset should happen in the main thread. self._clock.reset() # Used to communicate with the database from both the main thread # as well as the acquisition thread. self._buf = buffer_server.new_mailbox() self._acq_process = AcquisitionProcess(device=self._device, clock=self._clock, buf=self._buf, msg_queue=msg_queue) self._acq_process.start() # Block thread until device connects and returns device_info. msg_type, msg = msg_queue.get() if msg_type == MSG_DEVICE_INFO: self._device_info = msg log.info("Connected to device") log.info(msg) elif msg_type == MSG_ERROR: raise Exception("Error connecting to device") else: raise Exception("Message not understood: " + str(msg)) # Start up the database server buffer_server.start_server(self._buf, self._device_info.channels, self._buffer_name) # Inform acquisition process that database server is ready msg_queue.put(True) msg_queue = None self._is_streaming = True
def configure_connector(self) -> None: """Steps to configure the device connector before starting acquisition.""" if self._connector.__class__.supports( self._connector.device_spec, ConnectionMethod.LSL ) and self._connector.device_spec.content_type == 'EEG': log.debug("Initializing LSL Marker Writer.") self._connector.include_marker_streams = True # If there are any device channels with the same name as the offset_column ('TRG'), # rename these to avoid conflicts. self._connector.rename_rules[ self.trigger_column] = f"{self.trigger_column}_device_stream" # Initialize the marker streams before the device connection (name it 'TRG'). self.marker_writer = LslMarkerWriter( stream_name=self.trigger_column)
def __init__(self, window: visual.Window, static_clock, experiment_clock: core.Clock, marker_writer: Optional[MarkerWriter] = None, task_color: List[str] = ['white'], task_font: str = 'Times', task_pos: Tuple[float, float] = (-.8, .9), task_height: float = 0.2, task_text: str = '1/100', info_color: List[str] = ['white'], info_text: List[str] = ['Information Text'], info_font: List[str] = ['Times'], info_pos=[(.8, .9)], info_height=[0.2], stim_font='Times', stim_pos=(-.8, .9), stim_height=0.2, stim_sequence: List[str] = ['a'] * 10, stim_colors: List[str] = ['white'] * 10, stim_timing: List[float] = [1] * 10, is_txt_stim: bool = True, static_time: float = .05, trigger_type: str = 'image', space_char: SPACE_CHAR = SPACE_CHAR): """Initialize RSVP window parameters and objects. PARAMETERS: ---------- # Experiment window(visual.Window): PsychoPy Window static_clock(TODO): no idea experiment_clock(core.Clock): Clock used to timestamp experiment marker_writer(MarkerWriter): object used to write triggers to the daq stream. # Task task_color(list[string]): Color of the task string. Shares the length of the task_text. If of length 1 the entire task bar shares the same color. task_font(string): Font of task string task_pos(tuple): position of task string task_height(float): height for task string task_text(string): text of the task bar # Info info_text(list[string]): Text list for information texts info_color(list[string]): Color of the information text string info_font(list[string]): Font of the information text string info_pos(list[tuple]): Position of the information text string info_height(list[float]): Height of the information text string # Stimuli stim_height(float): height of the stimuli object stim_pos(tuple): position of stimuli stim_font(string): font of the stimuli stim_sequence(list[string]): list of elements to flash stim_colors(list[string]): list of colors for stimuli stim_timing(list[float]): timing for each letter flash """ self.window = window self.refresh_rate = window.getActualFrameRate() self.logger = logging.getLogger(__name__) self.stimuli_sequence = stim_sequence self.stimuli_colors = stim_colors self.stimuli_timing = stim_timing self.is_txt_stim = is_txt_stim self.staticPeriod = static_clock self.static_time = static_time self.experiment_clock = experiment_clock self.timing_clock = core.Clock() # Used to handle writing the marker stimulus self.marker_writer = marker_writer or NullMarkerWriter() # Length of the stimuli (number of flashes) self.stim_length = len(stim_sequence) # Informational Parameters self.info_text = info_text # Stim parameters self.stimuli_font = stim_font self.stimuli_height = stim_height self.stimuli_pos = stim_pos # Trigger Items self.first_run = True self.trigger_type = trigger_type self.trigger_callback = TriggerCallback() # Callback used on presentation of first stimulus. self.first_stim_callback = lambda _sti: None self.size_list_sti = [] self.space_char = space_char self.task = visual.TextStim(win=self.window, color=task_color[0], height=task_height, text=task_text, font=task_font, pos=task_pos, wrapWidth=None, colorSpace='rgb', opacity=1, depth=-6.0) # Create multiple text objects based on input self.text = [] for idx in range(len(self.info_text)): self.text.append( visual.TextStim(win=self.window, color=info_color[idx], height=info_height[idx], text=self.info_text[idx], font=info_font[idx], pos=info_pos[idx], wrapWidth=None, colorSpace='rgb', opacity=1, depth=-6.0)) # Create Stimuli Object if self.is_txt_stim: self.sti = visual.TextStim(win=self.window, color='white', height=self.stimuli_height, text='+', font=self.stimuli_font, pos=self.stimuli_pos, wrapWidth=None, colorSpace='rgb', opacity=1, depth=-6.0) else: self.sti = visual.ImageStim(win=self.window, image=None, mask=None, pos=self.stimuli_pos, ori=0.0)
blendMode='avg', waitBlanking=True, winType='pyglet') win.recordFrameIntervals = True frameRate = win.getActualFrameRate() print(frameRate) # Initialize Clock clock = core.StaticPeriod(screenHz=frameRate) experiment_clock = core.MonotonicClock(start_time=None) rsvp = CopyPhraseDisplay(win, clock, experiment_clock, marker_writer=NullMarkerWriter(), static_task_text=text_task, static_task_color=color_task, info_text=text_text, info_color=color_text, info_pos=pos_text, info_height=txt_height, info_font=font_text, task_color=['white'], task_font=font_task, task_text='COPY_PH', task_height=height_task, stim_font=font_sti, stim_pos=pos_sti, stim_height=sti_height, stim_sequence=['a'] * 10,
class DataAcquisitionClient: """Data Acquisition client. The client sets up a separate thread for acquisition, writes incoming data to a queue, and processes the data from the queue. Parameters ---------- connector: Connector instance Object with device-specific implementations for connecting, initializing, and reading a packet. processor : Processor; optional A data Processor that does something with the streaming data (ex. writes to a file.) buffer_name : str, optional Name of the sql database archive; default is buffer.db. raw_data_file_name: str, Name of the raw data csv file to output; if not present raw data is not written. clock : Clock, optional Clock instance used to timestamp each acquisition record delete_archive: boolean, optional Flag indicating whether to delete the database archive on exit. Default is False. """ def __init__(self, connector, buffer_name='raw_data.db', raw_data_file_name='raw_data.csv', clock=CountClock(), delete_archive=True): self._connector = connector self._buffer_name = buffer_name self._raw_data_file_name = raw_data_file_name self._clock = clock # boolean; set to false to retain the sqlite db. self.delete_archive = delete_archive self._device_info = None # set on start. self._is_streaming = False # Offset in seconds from the start of acquisition to calibration # trigger. Calculated once, then cached. self._cached_offset = None self._record_at_calib = None self._max_wait = 0.1 # for process loop self.marker_writer = NullMarkerWriter() # column in the acquisition data used to calculate offset. self.trigger_column = TRIGGER_COLUMN_NAME self._acq_process = None self._buf = None # @override ; context manager def __enter__(self): self.start_acquisition() return self # @override ; context manager def __exit__(self, _exc_type, _exc_value, _traceback): self.stop_acquisition() def start_acquisition(self): """Run the initialization code and start the loop to acquire data from the server. We use multiprocessing to achieve best performance during our sessions. Some references: Stopping processes and other great multiprocessing examples: https://pymotw.com/2/multiprocessing/communication.html Windows vs. Unix Process Differences: https://docs.python.org/2.7/library/multiprocessing.html#windows """ if not self._is_streaming: log.debug("Starting Acquisition") msg_queue = Queue() self.configure_connector() # Clock is copied, so reset should happen in the main thread. self._clock.reset() # Used to communicate with the database from both the main thread # as well as the acquisition thread. self._buf = buffer_server.new_mailbox() self._acq_process = AcquisitionProcess(connector=self._connector, clock=self._clock, buf=self._buf, msg_queue=msg_queue) self._acq_process.start() # Block thread until device connects and returns device_info. msg_type, msg = msg_queue.get() if msg_type == MSG_DEVICE_INFO: self._device_info = msg log.info("Connected to device") log.info(msg) elif msg_type == MSG_ERROR: raise Exception("Error connecting to device") else: raise Exception("Message not understood: " + str(msg)) # Start up the database server buffer_server.start_server(self._buf, self._device_info.channels, self._buffer_name) # Inform acquisition process that database server is ready msg_queue.put(True) msg_queue = None self._is_streaming = True def configure_connector(self) -> None: """Steps to configure the device connector before starting acquisition.""" if self._connector.__class__.supports( self._connector.device_spec, ConnectionMethod.LSL ) and self._connector.device_spec.content_type == 'EEG': log.debug("Initializing LSL Marker Writer.") self._connector.include_marker_streams = True # If there are any device channels with the same name as the offset_column ('TRG'), # rename these to avoid conflicts. self._connector.rename_rules[ self.trigger_column] = f"{self.trigger_column}_device_stream" # Initialize the marker streams before the device connection (name it 'TRG'). self.marker_writer = LslMarkerWriter( stream_name=self.trigger_column) def stop_acquisition(self): """Stop acquiring data; perform cleanup.""" log.debug("Stopping Acquisition Process") self._is_streaming = False self._acq_process.stop() self._acq_process.join() self.marker_writer.cleanup() self.marker_writer = NullMarkerWriter() if self._raw_data_file_name and self._buf: buffer_server.dump_data(self._buf, self._raw_data_file_name, self.device_info.name, self.device_info.fs) def get_data(self, start=None, end=None, field='_rowid_'): """Queries the buffer by field. Parameters ---------- start : number, optional start of time slice; units are those of the acquisition clock. end : float, optional end of time slice; units are those of the acquisition clock. field: str, optional field on which to query; default value is the row id. Returns ------- list of Records """ if self._buf is None: return [] return buffer_server.get_data(self._buf, start, end, field) def get_data_for_clock(self, calib_time: float, start_time: float, end_time: float): """Queries the database, using start and end values relative to a clock different than the acquisition clock. Parameters ---------- calib_time: float experiment_clock time (in seconds) at calibration. start_time : float, optional start of time slice; units are those of the experiment clock. end_time : float, optional end of time slice; units are those of the experiment clock. Returns ------- list of Records """ sample_rate = self._device_info.fs if self._record_at_calib is None: rownum_at_calib = 1 else: rownum_at_calib = self._record_at_calib.rownum # Calculate number of samples since experiment_clock calibration; # multiplying by the fs converts from seconds to samples. start_offset = (start_time - calib_time) * sample_rate start = rownum_at_calib + start_offset end = None if end_time: end_offset = (end_time - calib_time) * sample_rate end = rownum_at_calib + end_offset return self.get_data(start=start, end=end, field='_rowid_') def get_data_len(self): """Efficient way to calculate the amount of data cached.""" if self._buf is None: return 0 return buffer_server.count(self._buf) @property def device_info(self): """Get the latest device_info.""" return self._device_info @property def is_calibrated(self): """Returns boolean indicating whether or not acquisition has been calibrated (an offset calculated based on a trigger).""" return self.offset is not None @is_calibrated.setter def is_calibrated(self, bool_val): """Setter for the is_calibrated property that allows the user to override the calculated value and use a 0 offset. Parameters ---------- bool_val: boolean if True, uses a 0 offset; if False forces the calculation. """ self._cached_offset = 0.0 if bool_val else None @property def offset(self): """Offset in seconds from the start of acquisition to calibration trigger. Returns ------- float or None if values in offset_column are all 0. """ # cached value if previously queried; only needs to be computed once. if self._cached_offset is not None: return self._cached_offset if self._buf is None or self._device_info is None: log.debug("Buffer or device has not been initialized") return None log.debug("Querying database for offset") # Assumes that the offset_column is present and used for calibration, and # that non-calibration values are all 0. rows = buffer_server.query(self._buf, filters=[(self.trigger_column, ">", 0)], ordering=("timestamp", "asc"), max_results=1) if not rows: log.debug(f"No rows have a {self.trigger_column} value.") return None log.debug(rows[0]) # Calculate offset from the number of samples recorded by the time # of calibration. self._record_at_calib = rows[0] self._cached_offset = rows[0].rownum / self._device_info.fs log.debug("Cached offset: %s", str(self._cached_offset)) return self._cached_offset @property def record_at_calib(self): """Data record at the calibration trigger""" return self._record_at_calib def cleanup(self): """Performs cleanup tasks, such as deleting the buffer archive. Note that data will be unavailable after calling this method.""" if self._buf: buffer_server.stop(self._buf, delete_archive=self.delete_archive) self._buf = None
class DataAcquisitionClient: """Data Acquisition client. The client sets up a separate thread for acquisition, writes incoming data to a queue, and processes the data from the queue. Parameters ---------- device: Device instance Object with device-specific implementations for connecting, initializing, and reading a packet. processor : Processor; optional A data Processor that does something with the streaming data (ex. writes to a file.) buffer_name : str, optional Name of the sql database archive; default is buffer.db. clock : Clock, optional Clock instance used to timestamp each acquisition record delete_archive: boolean, optional Flag indicating whether to delete the database archive on exit. Default is True. """ def __init__(self, device, processor=FileWriter(filename='rawdata.csv'), buffer_name='buffer.db', clock=CountClock(), delete_archive=True): self._device = device self._processor = processor self._buffer_name = buffer_name self._clock = clock # boolean; set to false to retain the sqlite db. self.delete_archive = delete_archive self._device_info = None # set on start. self._is_streaming = False # Offset in seconds from the start of acquisition to calibration # trigger. Calculated once, then cached. self._cached_offset = None self._record_at_calib = None self._max_wait = 0.1 # for process loop # Max number of records in queue before it blocks for processing. maxsize = 500 self._process_queue = multiprocessing.JoinableQueue(maxsize=maxsize) self.marker_writer = NullMarkerWriter() self._acq_process = None self._data_processor = None self._buf = None # @override ; context manager def __enter__(self): self.start_acquisition() return self # @override ; context manager def __exit__(self, _exc_type, _exc_value, _traceback): self.stop_acquisition() def start_acquisition(self): """Run the initialization code and start the loop to acquire data from the server. We use multiprocessing to achieve best performance during our sessions. Some references: Stopping processes and other great multiprocessing examples: https://pymotw.com/2/multiprocessing/communication.html Windows vs. Unix Process Differences: https://docs.python.org/2.7/library/multiprocessing.html#windows """ if not self._is_streaming: log.debug("Starting Acquisition") msg_queue = multiprocessing.Queue() # Initialize the marker streams before the device connection so the # device can start listening. # TODO: Should this be a property of the device? if self._device.name == 'LSL': self.marker_writer = LslMarkerWriter() # Clock is copied, so reset should happen in the main thread. self._clock.reset() self._acq_process = AcquisitionProcess(self._device, self._clock, self._process_queue, msg_queue) self._acq_process.start() # Block thread until device connects and returns device_info. msg_type, msg = msg_queue.get() if msg_type == MSG_DEVICE_INFO: self._device_info = msg elif msg_type == MSG_ERROR: raise Exception("Error connecting to device") else: raise Exception("Message not understood: " + str(msg)) # Initialize the buffer and processor; this occurs after the # device initialization to ensure that any device parameters have # been updated as needed. self._processor.set_device_info(self._device_info) self._buf = buffer_server.start(self._device_info.channels, self._buffer_name) self._data_processor = DataProcessor( data_queue=self._process_queue, msg_queue = msg_queue, processor=self._processor, buf=self._buf, wait=self._max_wait) self._data_processor.start() # Block until processor has initialized. msg_type, msg = msg_queue.get() self._is_streaming = True def stop_acquisition(self): """Stop acquiring data; perform cleanup.""" log.debug("Stopping Acquisition Process") self._is_streaming = False self._acq_process.stop() self._acq_process.join() log.debug("Stopping Processing Queue") # Blocks until all data in the queue is consumed. self._process_queue.join() self._data_processor.stop() self.marker_writer.cleanup() self.marker_writer = NullMarkerWriter() def get_data(self, start=None, end=None, field='_rowid_', win=None): """Queries the buffer by field. Parameters ---------- start : number, optional start of time slice; units are those of the acquisition clock. end : float, optional end of time slice; units are those of the acquisition clock. field: str, optional field on which to query; default value is the row id. win : Window window to pass to server for reloading Returns ------- list of Records """ if self._buf is None: return [] return buffer_server.get_data(self._buf, start, end, field, win) def get_data_for_clock(self, calib_time: float, start_time: float, end_time: float): """Queries the database, using start and end values relative to a clock different than the acquisition clock. Parameters ---------- calib_time: float experiment_clock time (in seconds) at calibration. start_time : float, optional start of time slice; units are those of the experiment clock. end_time : float, optional end of time slice; units are those of the experiment clock. Returns ------- list of Records """ sample_rate = self._device_info.fs if self._record_at_calib is None: rownum_at_calib = 1 else: rownum_at_calib = self._record_at_calib.rownum # Calculate number of samples since experiment_clock calibration; # multiplying by the fs converts from seconds to samples. start_offset = (start_time - calib_time) * sample_rate start = rownum_at_calib + start_offset end = None if end_time: end_offset = (end_time - calib_time) * sample_rate end = rownum_at_calib + end_offset return self.get_data(start=start, end=end, field='_rowid_') def get_data_len(self): """Efficient way to calculate the amount of data cached.""" if self._buf is None: return 0 return buffer_server.count(self._buf) @property def device_info(self): """Get the latest device_info.""" return self._device_info @property def is_calibrated(self): """Returns boolean indicating whether or not acquisition has been calibrated (an offset calculated based on a trigger).""" return self.offset is not None @is_calibrated.setter def is_calibrated(self, bool_val): """Setter for the is_calibrated property that allows the user to override the calculated value and use a 0 offset. Parameters ---------- bool_val: boolean if True, uses a 0 offset; if False forces the calculation. """ self._cached_offset = 0.0 if bool_val else None @property def offset(self): """Offset in seconds from the start of acquisition to calibration trigger. Returns ------- float or None if TRG channel is all 0. TODO: Consider setting the trigger channel name in the device_info. """ # cached value if previously queried; only needs to be computed once. if self._cached_offset is not None: return self._cached_offset if self._buf is None or self._device_info is None: log.debug("Buffer or device has not been initialized") return None log.debug("Querying database for offset") # Assumes that the TRG column is present and used for calibration, and # that non-calibration values are all 0. rows = buffer_server.query(self._buf, filters=[("TRG", ">", 0)], ordering=("timestamp", "asc"), max_results=1) if not rows: log.debug("No rows have a TRG value.") return None log.debug(rows[0]) # Calculate offset from the number of samples recorded by the time # of calibration. self._record_at_calib = rows[0] self._cached_offset = rows[0].rownum / self._device_info.fs log.debug("Cached offset: %s", str(self._cached_offset)) return self._cached_offset @property def record_at_calib(self): """Data record at the calibration trigger""" return self._record_at_calib def cleanup(self): """Performs cleanup tasks, such as deleting the buffer archive. Note that data will be unavailable after calling this method.""" if self._buf: buffer_server.stop(self._buf, delete_archive=self.delete_archive) self._buf = None
def __init__(self, window, static_period, experiment_clock, marker_writer=None, color_task=['white'], font_task='Times', pos_task=(-.8, .9), task_height=0.2, text_task='1/100', color_text=['white'], text_text=['Information Text'], font_text=['Times'], pos_text=[(.8, .9)], height_text=[0.2], font_sti='Times', pos_sti=(-.8, .9), sti_height=0.2, stim_sequence=['a'] * 10, color_list_sti=['white'] * 10, time_list_sti=[1] * 10, tr_pos_bg=(.5, .5), bl_pos_bg=(-.5, -.5), size_domain_bg=7, color_bg_txt='red', font_bg_txt='Times', color_bar_bg='green', bg_step_num=20, is_txt_sti=True, static_period_time=.05, trigger_type='image', bg=False): """Initialize RSVP window parameters and objects. Args: window(visual_window): Window in computer marker_writer(MarkerWriter): object used to write triggers to the daq stream. color_task(list[string]): Color of the task string. Shares the length of the text_task. If of length 1 the entire task bar shares the same color. font_task(string): Font of task string pos_task(tuple): position of task string task_height(float): height for task string text_task(string): text of the task bar text_text(list[string]): text list for information texts color_text(list[string]): Color of the text string font_text(list[string]): Font of text string pos_text(list[tuple]): position of text string task_height(list[float]): height for text string sti_height(float): height of the stimuli object pos_sti(tuple): position of stimuli font_sti(string): font of the stimuli stim_sequence(list[string]): list of elements to flash color_list_sti(list[string]): list of colors for stimuli time_list_sti(list[float]): timing for each letter flash tr_pos_bg(tuple): top right corner location of bar graph bl_pos_bg(tuple): bottom left corner location of bar graph size_domain_bg(int): number of elements in bar graph color_bg_txt(string): color of bar graph text font_bg_txt(string): font of bar graph text color_bar_bg(string): color of bar graph bars bg_step_num(int): number of animation iterations for bars """ self.win = window self.refresh_rate = window.getActualFrameRate() self.stim_sequence = stim_sequence self.color_list_sti = color_list_sti self.time_list_sti = time_list_sti self.is_txt_sti = is_txt_sti self.staticPeriod = static_period self.static_period_time = static_period_time self.experiment_clock = experiment_clock self.timing_clock = core.Clock() # Used to handle writing the marker stimulus self.marker_writer = marker_writer or NullMarkerWriter() # Length of the stimuli (number of flashes) self.len_sti = len(stim_sequence) # Stim parameters self.font_stim = font_sti self.height_stim = sti_height self.pos_sti = pos_sti self.first_run = True self.trigger_type = trigger_type # Check if task text is multicolored if len(color_task) == 1: self.task = visual.TextStim(win=window, color=color_task[0], height=task_height, text=text_task, font=font_task, pos=pos_task, wrapWidth=None, colorSpace='rgb', opacity=1, depth=-6.0) else: self.task = MultiColorText(win=window, list_color=color_task, height=task_height, text=text_task, font=font_task, pos=pos_task, opacity=1, depth=-6.0) # Create multiple text objects based on input self.text = [] for idx in range(len(text_text)): self.text.append(visual.TextStim(win=window, color=color_text[idx], height=height_text[idx], text=text_text[idx], font=font_text[idx], pos=pos_text[idx], wrapWidth=None, colorSpace='rgb', opacity=1, depth=-6.0)) # Create Stimuli Object if self.is_txt_sti: self.sti = visual.TextStim(win=window, color='white', height=sti_height, text='+', font=font_sti, pos=pos_sti, wrapWidth=None, colorSpace='rgb', opacity=1, depth=-6.0) else: self.sti = visual.ImageStim(win=window, image=None, mask=None, units='', pos=pos_sti, size=(sti_height, sti_height), ori=0.0) if bg: # Create Bar Graph self.bg = BarGraph(win=window, tr_pos_bg=tr_pos_bg, bl_pos_bg=bl_pos_bg, size_domain=size_domain_bg, color_txt=color_bg_txt, font_bg=font_bg_txt, color_bar_bg=color_bar_bg, max_num_step=bg_step_num)
def __init__(self, window, static_period, experiment_clock, marker_writer=None, color_task=['white'], font_task='Times', pos_task=(-.8, .9), task_height=0.2, text_task='1/100', color_text=['white'], text_text=['Information Text'], font_text=['Times'], pos_text=[(.8, .9)], height_text=[0.2], font_sti='Times', pos_sti=(-.8, .9), sti_height=0.2, stim_sequence=['a'] * 10, color_list_sti=['white'] * 10, time_list_sti=[1] * 10, is_txt_sti=True, static_period_time=.05, trigger_type='image', space_char=SPACE_CHAR): """Initialize RSVP window parameters and objects. Args: window(visual_window): Window in computer marker_writer(MarkerWriter): object used to write triggers to the daq stream. color_task(list[string]): Color of the task string. Shares the length of the text_task. If of length 1 the entire task bar shares the same color. font_task(string): Font of task string pos_task(tuple): position of task string task_height(float): height for task string text_task(string): text of the task bar text_text(list[string]): text list for information texts color_text(list[string]): Color of the text string font_text(list[string]): Font of text string pos_text(list[tuple]): position of text string task_height(list[float]): height for text string sti_height(float): height of the stimuli object pos_sti(tuple): position of stimuli font_sti(string): font of the stimuli stim_sequence(list[string]): list of elements to flash color_list_sti(list[string]): list of colors for stimuli time_list_sti(list[float]): timing for each letter flash """ self.win = window self.refresh_rate = window.getActualFrameRate() self.logger = logging.getLogger(__name__) self.stim_sequence = stim_sequence self.color_list_sti = color_list_sti self.time_list_sti = time_list_sti self.is_txt_sti = is_txt_sti self.staticPeriod = static_period self.static_period_time = static_period_time self.experiment_clock = experiment_clock self.timing_clock = core.Clock() # Used to handle writing the marker stimulus self.marker_writer = marker_writer or NullMarkerWriter() # Length of the stimuli (number of flashes) self.len_sti = len(stim_sequence) # Stim parameters self.font_stim = font_sti self.height_stim = sti_height self.pos_sti = pos_sti self.first_run = True self.trigger_type = trigger_type self.trigger_callback = TriggerCallback() # Callback used on presentation of first stimulus. self.first_stim_callback = lambda _sti: None self.size_list_sti = [] self.space_char = space_char # Check if task text is multicolored if len(color_task) == 1: self.task = visual.TextStim(win=window, color=color_task[0], height=task_height, text=text_task, font=font_task, pos=pos_task, wrapWidth=None, colorSpace='rgb', opacity=1, depth=-6.0) else: self.task = MultiColorText(win=window, list_color=color_task, height=task_height, text=text_task, font=font_task, pos=pos_task, opacity=1, depth=-6.0) # Create multiple text objects based on input self.text = [] for idx in range(len(text_text)): self.text.append( visual.TextStim(win=window, color=color_text[idx], height=height_text[idx], text=text_text[idx], font=font_text[idx], pos=pos_text[idx], wrapWidth=None, colorSpace='rgb', opacity=1, depth=-6.0)) # Create Stimuli Object if self.is_txt_sti: self.sti = visual.TextStim(win=window, color='white', height=sti_height, text='+', font=font_sti, pos=pos_sti, wrapWidth=None, colorSpace='rgb', opacity=1, depth=-6.0) else: self.sti = visual.ImageStim(win=window, image=None, mask=None, pos=pos_sti, ori=0.0)