Ejemplo n.º 1
0
def printExperimentMetaDataDemo():
    # Create an instance of the ExperimentDataAccessUtility class
    # for the selected DataStore file. This allows us to access data
    # in the file based on Device Event names and attributes.
    # 
    experiment_data=ExperimentDataAccessUtility('..\hdf5_files' , 'events.hdf5')
    
    # Access the Experiment Meta Data for the first Experiment found in the file.
    # Note that currently only one experiment's data can be saved in each hdf5 file 
    # created. However multiple sessions / runs of the same experiment are all
    # saved in one file.
    #
    exp_md=experiment_data.getExperimentMetaData()[0]
    
    printExperimentMetaData(exp_md)
    
    # Close the HDF5 File
    #
    experiment_data.close()
Ejemplo n.º 2
0
def printEventTypesWithDataDemo():
    # Create an instance of the ExperimentDataAccessUtility class
    # for the selected DataStore file. This allows us to access data
    # in the file based on Device Event names and attributes.
    # 
    experiment_data=ExperimentDataAccessUtility('..\hdf5_files' , 'events.hdf5')
    
    # Get any event tables that have >=1 event saved in them
    #
    events_by_type=experiment_data.getEventsByType()
    
    # print out info on each table
    #
    for event_id, event_gen in events_by_type.iteritems():
        event_constant=EventConstants.getName(event_id)
        print "{0} ({1}): {2}".format(event_constant,event_gen.table.nrows,event_gen)

    # Close the HDF5 File
    #
    experiment_data.close()
def printQueriedEventsDemo():
    # Create an instance of the ExperimentDataAccessUtility class
    # for the selected DataStore file. This allows us to access data
    # in the file based on Device Event names and attributes.
    # 
    experiment_data=ExperimentDataAccessUtility('..\hdf5_files' , 'events.hdf5')
    
    # Retrieve the 'time','device_time','event_id','delay','category','text'
    # attributes from the Message Event table, where the event time is between
    # the associated trials condition variables TRIAL_START and TRIAL_END
    # value.
    # i.e. only get message events sent during each trial of the eperiment, not any
    #      sent between trials.
    #
    event_results=experiment_data.getEventAttributeValues(EventConstants.MESSAGE,
                        ['time','device_time','event_id','delay','category','text'], 
                        conditionVariablesFilter=None, 
                        startConditions={'time':('>=','@TRIAL_START@')},
                        endConditions={'time':('<=','@TRIAL_END@')})

    
    for trial_events in event_results:    
        print '==== TRIAL DATA START ======='
        print "Trial Condition Values:"
        for ck,cv in trial_events.condition_set._asdict().iteritems():
            print "\t{ck} : {cv}".format(ck=ck,cv=cv)
        print
        
        trial_events.query_string
        print "Trial Query String:\t"
        print trial_events.query_string
        print 
        
        event_value_arrays=[(cv_name,cv_value) for cv_name,cv_value in trial_events._asdict().iteritems() if cv_name not in ('query_string','condition_set')]
        print "Trial Event Field Data:"
        for field_name,field_data in event_value_arrays:
            print "\t"+field_name+': '+str(field_data)
            print
        print '===== TRIAL DATA END ========'

    experiment_data.close()
Ejemplo n.º 4
0
def printExperimentConditionVariableDemo():
    # Create an instance of the ExperimentDataAccessUtility class
    # for the selected DataStore file. This allows us to access data
    # in the file based on Device Event names and attributes.
    # 
    experiment_data=ExperimentDataAccessUtility('..\hdf5_files' , 'events.hdf5')
    
    # Here we are accessing the condition values saved.
    # A list is returned, with each element being the condition variable data 
    # for a trial of the experiment, in the order the trials 
    # were run for the given session.    
    #    
    condition_variables=experiment_data.getConditionVariables()

    print "Experiment Condition Variable values:"
    print 
    
    for variable_set in condition_variables:
        pprint(dict(variable_set._asdict()))
        print
    # Close the HDF5 File
    #
    experiment_data.close()
Ejemplo n.º 5
0
    def convertToText(self, dir, name, localtime):
        # Select the hdf5 file to process.
        #data_file_path= displayDataFileSelectionDialog(psychopy.iohub.module_directory(writeOutputFileHeader))
        print(' this is dir')
        print(dir)
        data_file_path = dir + '\events.hdf5'
        if data_file_path is None:
            print("File Selection Cancelled, exiting...")
            sys.exit(0)
        dpath, dfile = os.path.split(data_file_path)
        print('dpath')
        print(dpath)
        print('dfile')
        print(dfile)
        # Lets time how long processing takes
        #
        start_time = getTime()

        # Create an instance of the ExperimentDataAccessUtility class
        # for the selected DataStore file. This allows us to access data
        # in the file based on Device Event names and attributes, as well
        # as access the experiment session metadata saved with each session run.
        dataAccessUtil = ExperimentDataAccessUtility(dpath,
                                                     dfile,
                                                     experimentCode=None,
                                                     sessionCodes=[])
        print('this is dataaccessutil:')
        print(dataAccessUtil)

        # Get a dict of all event types -> DataStore table info
        #   for the selected DataStore file.
        eventTableMappings = dataAccessUtil.getEventMappingInformation()

        # Get event tables that have data...
        #
        events_with_data = dataAccessUtil.getEventsByType()

        duration = getTime() - start_time

        # Select which event table to output by displaying a list of
        #   Event Class Names that have data available to the user...
        #event_class_selection=displayEventTableSelectionDialog("Select Event Type to Save", "Event Type:",
        #            [eventTableMappings[event_id].class_name for event_id in events_with_data.keys()])
        event_class_selection = 'BinocularEyeSampleEvent'
        print('event_class_selection')
        print(event_class_selection)
        if event_class_selection == None:
            print("Event table Selection Cancelled, exiting...")
            dataAccessUtil.close()
            sys.exit(0)

        # restart processing time calculation...
        #
        start_time = getTime()

        # Lookup the correct event iterator fiven the event class name selected.
        #
        event_iterator_for_output = []
        for event_id, mapping_info in eventTableMappings.items():
            if mapping_info.class_name == event_class_selection:
                event_iterator_for_output = events_with_data[event_id]
                break

        # Read the session metadata table for all sessions saved to the file.
        #
        session_metadata = dataAccessUtil.getSessionMetaData()

        print('this is dataaccessutil getsession metadata')
        print(session_metadata)
        sesion_meta_data_dict = dict()

        # Create a session_id -> session metadata mapping for use during
        # file writing.
        #
        session_metadata_columns = []
        if len(session_metadata):
            session_metadata_columns = list(session_metadata[0]._fields[:-1])
            session_uservar_columns = list(
                session_metadata[0].user_variables.keys())
            for s in session_metadata:
                sesion_meta_data_dict[s.session_id] = s

        # Open a file to save the tab delimited ouput to.
        #
        #log_file_name="%s.%s.txt"%(dfile[:-5],event_class_selection)
        log_file_name = name + '_EyeSample' + localtime + '.txt'
        with open(dir + '\\Exp Results\\' + log_file_name, 'w') as output_file:

            # write column header
            #
            writeOutputFileHeader(
                output_file, session_metadata_columns,
                dataAccessUtil.getEventTable(
                    event_class_selection).cols._v_colnames[3:])

            print('Writing Data to %s:\n' % (dir + log_file_name)),
            i = 0
            for i, event in enumerate(event_iterator_for_output):
                # write out each row of the event data with session
                # data as prepended columns.....
                #
                writeDataRow(output_file,
                             sesion_meta_data_dict[event['session_id']],
                             session_uservar_columns, event[:][3:])

                if i % 100 == 0: print('.'),

        duration = duration + (getTime() - start_time)
        #print
        print(
            '\nOutput Complete. %d Events Saved to %s in %.3f seconds (%.2f events/seconds).\n'
            % (i, log_file_name, duration, i / duration))
        print('%s will be in the same directory as the selected .hdf5 file' %
              (log_file_name))
Ejemplo n.º 6
0
    # Get a dict of all event types -> DataStore table info
    #   for the selected DataStore file.
    eventTableMappings=dataAccessUtil.getEventMappingInformation()

    # Get event tables that have data...
    #
    events_with_data=dataAccessUtil.getEventsByType()

    duration=getTime()-start_time
    # Select which event table to output by displaying a list of
    #   Event Class Names that have data available to the user...
    event_class_selection=displayEventTableSelectionDialog("Select Event Type to Save", "Event Type:",
                [eventTableMappings[event_id].class_name.decode('utf-8') for event_id in list(events_with_data.keys())])
    if event_class_selection is None:
        print("Event table Selection Cancelled, exiting...")
        dataAccessUtil.close()
        sys.exit(0)

    start_time=getTime()

    # Lookup the correct event iterator fiven the event class name selected.
    #
    event_iterator_for_output=None
    for event_id, mapping_info in eventTableMappings.items():
        if mapping_info.class_name.decode('utf-8') == event_class_selection:
            event_iterator_for_output=events_with_data[event_id]
            break
    # Read the session metadata table for all sessions saved to the file.
    #
    session_metadata=dataAccessUtil.getSessionMetaData()
Ejemplo n.º 7
0
#
event_type = EventConstants.BINOCULAR_EYE_SAMPLE
retrieve_attributes = ('time', 'left_gaze_x', 'left_gaze_y',
                       'left_pupil_measure1', 'right_gaze_x', 'right_gaze_y',
                       'right_pupil_measure1', 'status')
trial_event_data = dataAccessUtil.getEventAttributeValues(
    event_type,
    retrieve_attributes,
    conditionVariablesFilter=None,
    startConditions={'time': ('>=', '@TRIAL_START@')},
    endConditions={'time': ('<=', '@TRIAL_END@')},
)

# No need to keep the hdf5 file open anymore...
#
dataAccessUtil.close()

# Process and plot the sample data for each trial in the data file.
#
for trial_index, trial_samples in enumerate(trial_event_data):
    ##### STEP B. #####
    # Find all samples that have missing eye position data and filter the eye position
    # and pupil size streams so that the eye track plot is more useful. In this case that
    # means setting position fields to NaN and pupil size to 0.
    #
    # left eye manufacturer specific missing data indicator
    left_eye_invalid_data_masks = trial_samples.status // 10 >= 2
    # Right eye manufacturer specific missing data indicator
    right_eye_invalid_data_masks = trial_samples.status % 10 >= 2
    # Get the needed left eye sample arrays
    #
    ecount = 0

    # Open a file to save the tab delimited output to.
    #
    output_file_name = "%s.txt" % (dfile[:-5])
    with open(output_file_name, 'w') as output_file:
        print('Writing Data to %s:\n' % (output_file_name))
        column_names = events_by_trial[0].condition_set._fields[
            2:] + events_by_trial[0]._fields[:-2]
        output_file.write('\t'.join(column_names))
        output_file.write('\n')

        for trial_data in events_by_trial:
            cv_fields = [str(cv) for cv in trial_data.condition_set[2:]]
            # Convert trial_data namedtuple to list of arrays.
            # len(trial_data) == len(SAVE_EVENT_FIELDS)
            trial_data = trial_data[:-2]
            for eix in range(len(trial_data[0])):
                # Step through each event, saving condition variable and event fields
                ecount += 1
                event_data = [str(c[eix]) for c in trial_data]
                output_file.write('\t'.join(cv_fields + event_data))
                output_file.write('\n')

                if eix % 100 == 0:
                    sys.stdout.write('.')

    print("\n\nWrote %d events." % ecount)
    datafile.close()
from psychopy.iohub.datastore.util import ExperimentDataAccessUtility

# Create an instance of the ExperimentDataAccessUtility class
# for the selected DataStore file. This allows us to access data
# in the file based on Device Event names and attributes.
#
experiment_data = ExperimentDataAccessUtility('.\hdf5_files', 'events.hdf5')

# Print the HDF5 Structure for the given ioDataStore file.
#
experiment_data.printHubFileStructure()

# Close the HDF5 File
#
experiment_data.close()
Ejemplo n.º 10
0
def createTrialDataStreams():
    trial_data_streams = []

    # Get the filtered event data.
    # We will use right eye data only for the testing..
    #
    dataAccessUtil = ExperimentDataAccessUtility(
        "../hdf5_files", "remote_data.hdf5", experimentCode=None, sessionCodes=[]
    )

    event_type = EventConstants.BINOCULAR_EYE_SAMPLE
    retrieve_attributes = ("time", "right_gaze_x", "right_gaze_y", "right_pupil_measure1", "status")
    trial_event_data = dataAccessUtil.getEventAttributeValues(
        event_type,
        retrieve_attributes,
        conditionVariablesFilter=None,
        startConditions={"time": (">=", "@TRIAL_START@")},
        endConditions={"time": ("<=", "@TRIAL_END@")},
    )

    dataAccessUtil.close()

    for t, trial_data in enumerate(trial_event_data):
        # Create a mask to be used to define periods of missing data in a data trace (eye tracker dependent)
        #
        invalid_data_mask = trial_data.status % 10 >= 2

        time = trial_data.time
        pupil = trial_data.right_pupil_measure1
        # Get x, y eye position traces (in pixels), setting sample positions where there is track loss
        # to NaN.
        xpix_cleared = trial_data.right_gaze_x.copy()
        ypix_cleared = trial_data.right_gaze_y.copy()
        processSampleEventGaps(xpix_cleared, ypix_cleared, pupil, invalid_data_mask, "clear")

        # Get x, y eye position traces (in pixels), setting sample positions
        # where there is track loss to be linearly interpolated using each
        # missing_sample_start-1 and missing_sample_end+1 as the points to
        # interpolate between.
        #
        xpix_linear = trial_data.right_gaze_x.copy()
        ypix_linear = trial_data.right_gaze_y.copy()

        # valid_data_periods is a list of array slice objects giving the start,end index of each non missing
        # period of in the data stream.
        #
        valid_data_periods = processSampleEventGaps(xpix_linear, ypix_linear, pupil, invalid_data_mask, "linear")

        # Convert from pixels to visual angle coordinates
        calibration_area_info = dict(
            display_size_mm=(340, 280.0), display_res_pix=(1280.0, 1024.0), eye_distance_mm=590.0
        )
        vac = VisualAngleCalc(**calibration_area_info)
        xdeg, ydeg = vac.pix2deg(xpix_linear, ypix_linear)

        # Create Filtered versions of the x and y degree data traces
        # We'll use the Median Filter...
        #
        xdeg_filtered = scipy.signal.medfilt(xdeg, SPATIAL_FILTER_WINDOW_SIZE)
        ydeg_filtered = scipy.signal.medfilt(ydeg, SPATIAL_FILTER_WINDOW_SIZE)

        # Create the velocity stream
        #
        xvel = calculateVelocity(time, xdeg_filtered)
        yvel = calculateVelocity(time, ydeg_filtered)

        # Filter the velocity data
        #
        FILTER_ORDER = 2
        Wn = 0.3
        b, a = scipy.signal.butter(FILTER_ORDER, Wn, "low")
        ffunc = scipy.signal.filtfilt
        xvel_filtered = ffunc(b, a, xvel)
        yvel_filtered = ffunc(b, a, yvel)

        #        xvel_filtered=savitzky_golay(xvel,window_size=VELOCITY_FILTER_WINDOW_SIZE,order=2)
        #        yvel_filtered=savitzky_golay(yvel,window_size=VELOCITY_FILTER_WINDOW_SIZE,order=2)
        #        xvel_filtered=gaussian_filter1d(xvel,VELOCITY_FILTER_WINDOW_SIZE)
        #        yvel_filtered=gaussian_filter1d(yvel,VELOCITY_FILTER_WINDOW_SIZE)
        #        xvel_filtered=scipy.signal.medfilt(xvel,VELOCITY_FILTER_WINDOW_SIZE)
        #        yvel_filtered=scipy.signal.medfilt(yvel,VELOCITY_FILTER_WINDOW_SIZE)

        velocity = np.sqrt(xvel * xvel + yvel * yvel)
        velocity_filtered = np.sqrt(xvel_filtered * xvel_filtered + yvel_filtered * yvel_filtered)

        # Create a data trace dictionary for all the different types
        #  of data traces created for the trial
        #
        trial_data = {}
        trial_data["time"] = time
        trial_data["xpix_cleared"] = xpix_cleared
        trial_data["ypix_cleared"] = ypix_cleared
        trial_data["xpix_linear"] = xpix_linear
        trial_data["xpix_linear"] = xpix_linear
        trial_data["xdeg"] = xdeg
        trial_data["ydeg"] = ydeg
        trial_data["xdeg_filtered"] = xdeg_filtered
        trial_data["ydeg_filtered"] = ydeg_filtered
        trial_data["pupil"] = pupil
        trial_data["velocity"] = velocity
        trial_data["velocity_filtered"] = velocity_filtered
        trial_data["valid_data_periods"] = valid_data_periods
        trial_data["missing_data_mask"] = invalid_data_mask
        # Add the data trace dictionary to a list
        #
        trial_data_streams.append(trial_data)
    return trial_data_streams
def createTrialDataStreams():
    trial_data_streams = []

    # Get the filtered event data.
    # We will use right eye data only for the testing..
    #
    dataAccessUtil = ExperimentDataAccessUtility('../hdf5_files',
                                                 'remote_data.hdf5',
                                                 experimentCode=None,
                                                 sessionCodes=[])

    event_type = EventConstants.BINOCULAR_EYE_SAMPLE
    retrieve_attributes = ('time', 'right_gaze_x', 'right_gaze_y',
                           'right_pupil_measure1', 'status')
    trial_event_data = dataAccessUtil.getEventAttributeValues(
        event_type,
        retrieve_attributes,
        conditionVariablesFilter=None,
        startConditions={'time': ('>=', '@TRIAL_START@')},
        endConditions={'time': ('<=', '@TRIAL_END@')},
    )

    dataAccessUtil.close()

    for t, trial_data in enumerate(trial_event_data):
        #Create a mask to be used to define periods of missing data in a data trace (eye tracker dependent)
        #
        invalid_data_mask = trial_data.status % 10 >= 2

        time = trial_data.time
        pupil = trial_data.right_pupil_measure1
        # Get x, y eye position traces (in pixels), setting sample positions where there is track loss
        # to NaN.
        xpix_cleared = trial_data.right_gaze_x.copy()
        ypix_cleared = trial_data.right_gaze_y.copy()
        processSampleEventGaps(xpix_cleared, ypix_cleared, pupil,
                               invalid_data_mask, 'clear')

        # Get x, y eye position traces (in pixels), setting sample positions
        # where there is track loss to be linearly interpolated using each
        # missing_sample_start-1 and missing_sample_end+1 as the points to
        # interpolate between.
        #
        xpix_linear = trial_data.right_gaze_x.copy()
        ypix_linear = trial_data.right_gaze_y.copy()

        # valid_data_periods is a list of array slice objects giving the start,end index of each non missing
        # period of in the data stream.
        #
        valid_data_periods = processSampleEventGaps(xpix_linear, ypix_linear,
                                                    pupil, invalid_data_mask,
                                                    'linear')

        # Convert from pixels to visual angle coordinates
        calibration_area_info = dict(display_size_mm=(340, 280.0),
                                     display_res_pix=(1280.0, 1024.0),
                                     eye_distance_mm=590.0)
        vac = VisualAngleCalc(**calibration_area_info)
        xdeg, ydeg = vac.pix2deg(xpix_linear, ypix_linear)

        # Create Filtered versions of the x and y degree data traces
        # We'll use the Median Filter...
        #
        xdeg_filtered = scipy.signal.medfilt(xdeg, SPATIAL_FILTER_WINDOW_SIZE)
        ydeg_filtered = scipy.signal.medfilt(ydeg, SPATIAL_FILTER_WINDOW_SIZE)

        # Create the velocity stream
        #
        xvel = calculateVelocity(time, xdeg_filtered)
        yvel = calculateVelocity(time, ydeg_filtered)

        # Filter the velocity data
        #
        FILTER_ORDER = 2
        Wn = 0.3
        b, a = scipy.signal.butter(FILTER_ORDER, Wn, 'low')
        ffunc = scipy.signal.filtfilt
        xvel_filtered = ffunc(b, a, xvel)
        yvel_filtered = ffunc(b, a, yvel)

        #        xvel_filtered=savitzky_golay(xvel,window_size=VELOCITY_FILTER_WINDOW_SIZE,order=2)
        #        yvel_filtered=savitzky_golay(yvel,window_size=VELOCITY_FILTER_WINDOW_SIZE,order=2)
        #        xvel_filtered=gaussian_filter1d(xvel,VELOCITY_FILTER_WINDOW_SIZE)
        #        yvel_filtered=gaussian_filter1d(yvel,VELOCITY_FILTER_WINDOW_SIZE)
        #        xvel_filtered=scipy.signal.medfilt(xvel,VELOCITY_FILTER_WINDOW_SIZE)
        #        yvel_filtered=scipy.signal.medfilt(yvel,VELOCITY_FILTER_WINDOW_SIZE)

        velocity = np.sqrt(xvel * xvel + yvel * yvel)
        velocity_filtered = np.sqrt(xvel_filtered * xvel_filtered +
                                    yvel_filtered * yvel_filtered)

        # Create a data trace dictionary for all the different types
        #  of data traces created for the trial
        #
        trial_data = {}
        trial_data['time'] = time
        trial_data['xpix_cleared'] = xpix_cleared
        trial_data['ypix_cleared'] = ypix_cleared
        trial_data['xpix_linear'] = xpix_linear
        trial_data['xpix_linear'] = xpix_linear
        trial_data['xdeg'] = xdeg
        trial_data['ydeg'] = ydeg
        trial_data['xdeg_filtered'] = xdeg_filtered
        trial_data['ydeg_filtered'] = ydeg_filtered
        trial_data['pupil'] = pupil
        trial_data['velocity'] = velocity
        trial_data['velocity_filtered'] = velocity_filtered
        trial_data['valid_data_periods'] = valid_data_periods
        trial_data['missing_data_mask'] = invalid_data_mask
        # Add the data trace dictionary to a list
        #
        trial_data_streams.append(trial_data)
    return trial_data_streams
from psychopy.iohub.datastore.util import ExperimentDataAccessUtility         

# Create an instance of the ExperimentDataAccessUtility class
# for the selected DataStore file. This allows us to access data
# in the file based on Device Event names and attributes.
#
experiment_data=ExperimentDataAccessUtility('.\hdf5_files' , 'events.hdf5')

# Print the HDF5 Structure for the given ioDataStore file.
#
experiment_data.printHubFileStructure()

# Close the HDF5 File
#
experiment_data.close()