Example #1
0
def rm_baseline(ndvar, tstart=None, tend=0, name='{name}'):
    """
    returns an ndvar object with baseline correction applied.

    ndvar : ndvar
        the source data
    tstart : scalar | None
        the beginning of the baseline period (None -> the start of the epoch)
    tend : scalar | None
        the end of the baseline  period (None -> the end of the epoch)
    name : str
        name for the new ndvar

    """
    subdata = ndvar.subdata(time=(tstart, tend))
    baseline = subdata.summary('time')

    t_ax = subdata.get_axis('time')
    index = (slice(None),) * t_ax + (None,)
    bl_data = baseline.x[index]

    dims = ndvar.dims
    data = ndvar.x - bl_data
    name = name.format(name=ndvar.name)
    return _data.ndvar(data, dims=dims, properties=ndvar.properties, name=name)
Example #2
0
 def get_load(self, i):
     time = self.source.get_dim('time')
     dims = ('case', time,)
     shape = (len(self.source), len(time))
     data = self._proj[:, i].reshape(shape)
     name = 'comp_%i_load' % i
     ndvar = _data.ndvar(data, dims=dims, name=name)
     return ndvar
Example #3
0
def rm_pca(ds, rm=[], source='MEG', target='MEG'):
    """
    Perform PCA and remove certain components. Use gui.pca to find components
    initially. Algorithm from the gui!
    
    """
    if not rm:
        raise ValueError("No components selected")
    if isinstance(source, basestring):
        source = ds[source]
    
    rm = sorted(rm)
    n_comp = max(rm) + 1
    
    # do the pca
    pca = _mdp.nodes.PCANode(output_dim=n_comp)
    for epoch in source.data:
        pca.train(epoch)
    pca.stop_training()
    
    # remove the components
    n_epochs, n_t, n_sensors = source.data.shape
    data = source.data.copy() # output data
    
    # take serialized data views for working with the PCANode
    new_data = data.view()
    old_data = source.data.view()
    
    # reshape the views
    new_data.shape = (n_epochs * n_t, n_sensors)
    old_data.shape = (n_epochs * n_t, n_sensors)
    
    # project the components and remove
    proj = pca.execute(old_data)
    for i in xrange(proj.shape[1]):
        if i not in rm:
            proj[:,i] = 0 
    rm_comp_data = pca.inverse(proj)
    new_data -= rm_comp_data
    
    # create the output new ndvar 
    dims = source.dims
    properties = source.properties
    ds[target] = _data.ndvar(dims, data, properties, name=target)
Example #4
0
def add_fiff_to_events(path, dataset, i_start='i_start', 
                       tstart=-.2, tstop=.6, properties=None, 
                       name="MEG", sensorsname='fiff-sensors'):
    """
    Adds MEG data form a new file to a dataset containing events. 
    
    """
    events = np.empty((dataset.N, 3), dtype=np.uint32)
    events[:,0] = dataset[i_start].x
    events[:,1] = 0
    events[:,2] = 1
    
    raw = mne.fiff.Raw(path)
    
    # parse sensor net
    sensor_list = []
    for ch in raw.info['chs']:
        ch_name = ch['ch_name']
        if ch_name.startswith('MEG'):
            x, y, z = ch['loc'][:3]
            sensor_list.append([x, y, z, ch_name])
    sensor_net = sensors.sensor_net(sensor_list, name=sensorsname)
    
    picks = mne.fiff.pick_types(raw.info, meg=True, eeg=False, stim=False, 
                                eog=False, include=[], exclude=[])
    
    # read the data
    epochs = mne.Epochs(raw, events, 1, tstart, tstop, picks=picks)
    data = np.array([e.T for e in epochs.get_data()])
    
    props = {'samplingrate': epochs.info['sfreq'][0]}
    props.update(_default_fiff_properties)
    if properties:
        props.update(properties)
    
    T = epochs.times
    timevar = _data.var(T, 'time')
    dims = (timevar, sensor_net)
    
    dataset.add(_data.ndvar(dims, data, properties=props, name=name))
    dataset.default_DV = name
Example #5
0
    def subtract(self, components, baseline=(None, 0), name='{name}'):
        """
        returns a copy of the source ndvar with the principal
        components specified in ``components`` removed.

        Arguments:

        components : list of ints
            list of components to remove
        baseline : True | False | (int|None, int|None)
            Baseline correction after subtracting the components. True -> use the
            settings stored in the ndvar.properties; False -> do not apply any
            baseline correction; a new baseline can be specified with a tuple of
            two time values or None (use all values until the end of the epoch).

        """
        data = self._source_data
        proj = self._proj

        # flatten retained components
        for i in xrange(proj.shape[1]):
            if i not in components:
                proj[:, i] = 0

        # remove the components
        rm_comp_data = self.node.inverse(proj)
        new_data = data - rm_comp_data.reshape(data.shape)
        new_data = new_data.astype(self._source_dtype)

        # create the output new ndvar
        dims = ('case',) + self.source.get_dims(('time', 'sensor'))
        properties = self.source.properties
        name = name.format(name=self.source.name)
        out = _data.ndvar(new_data, dims=dims, properties=properties, name=name)
        if baseline:
            tstart, tend = baseline
            out = rm_baseline(out, tstart, tend)
        return out
Example #6
0
 def get_component(self, i):
     dims = self.source.get_dims(('sensor',))
     data = self.node.v.T[i, ]
     name = 'comp_%i' % i
     ndvar = _data.ndvar(data, dims, name=name)
     return ndvar
Example #7
0
def fiff_epochs(dataset, i_start='i_start', 
                tstart=-.2, tstop=.6, baseline=(None,  0),
                properties=None, name="MEG", sensorsname='fiff-sensors'):
    """
    Uses the events in ``dataset[i_start]`` to extract epochs from the raw 
    file
    
    i_start : str
        name of the variable containing the index of the events to be
        imported
         
    """
    events = np.empty((dataset.N, 3), dtype=np.int32)
    events[:,0] = dataset[i_start].x
    events[:,1] = 0
    events[:,2] = 1
    
    source_path = dataset.info['source']
    raw = mne.fiff.Raw(source_path)
    
    # parse sensor net
    sensor_list = []
    for ch in raw.info['chs']:
        ch_name = ch['ch_name']
        if ch_name.startswith('MEG'):
            x, y, z = ch['loc'][:3]
            sensor_list.append([x, y, z, ch_name])
    sensor_net = sensors.sensor_net(sensor_list, name=sensorsname)
    
    picks = mne.fiff.pick_types(raw.info, meg=True, eeg=False, stim=False, 
                                eog=False, include=[], exclude=[])
    
    epochs = mne.Epochs(raw, events, 1, tstart, tstop, picks=picks, 
                        baseline=baseline)
    
    # determine data container properties
    epoch_shape = (len(picks), len(epochs.times))
    data_shape = (len(events), len(epochs.times), len(picks))
    # mne.fiff.raw.read_raw_segment uses float32
    data = np.empty(data_shape, dtype='float32') 

    # read the data
    for i, epoch in enumerate(epochs):
        if epoch.shape == epoch_shape:
            data[i] = epoch.T
        else:
            msg = ("Epoch %i shape mismatch: does your epoch definition "
                   "result in an epoch that overlaps the end of your data "
                   "file?" % i)
            raise IOError(msg)
    
    # read data properties
    props = {'samplingrate': epochs.info['sfreq'][0]}
    props.update(_default_fiff_properties)
    if properties:
        props.update(properties)
    
    T = epochs.times
    timevar = _data.var(T, 'time')
    dims = (timevar, sensor_net)
    
    dataset.add(_data.ndvar(dims, data, properties=props, name=name))
    dataset.default_DV = name
Example #8
0
def fiff(raw, events, conditions, varname='condition', dataname='MEG',
         tstart=-.2, tstop=.6, properties=None, name=None, c_colors={},
         sensorsname='fiff-sensors'):
    """
    Loads data directly when two files (raw and events) are provided 
    separately.
    
    conditions : dict
        ID->name dictionary of conditions that should be imported
    event : str
        path to the event file
    properties : dict
        set properties in addition to the defaults
    raw : str
        path to the raw file
    varname : str
        variable name that will contain the condition value 
    
    """
    if name is None:
        name = os.path.basename(raw)
    
    raw = mne.fiff.Raw(raw)
    
    # parse sensor net
    sensor_list = []
    for ch in raw.info['chs']:
        ch_name = ch['ch_name']
        if ch_name.startswith('MEG'):
            x, y, z = ch['loc'][:3]
            sensor_list.append([x, y, z, ch_name])
    sensor_net = sensors.sensor_net(sensor_list, name=sensorsname)
    
    events = mne.read_events(events)
    picks = mne.fiff.pick_types(raw.info, meg=True, eeg=False, stim=False, 
                                eog=False, include=[], exclude=[])
    
    data = []
    c_x = []
    
    # read the data
    for ID in conditions:
        epochs = mne.Epochs(raw, events, ID, tstart, tstop, picks=picks)
        samplingrate = epochs.info['sfreq'][0]
        
        # data
        c_data = epochs.get_data()        # n_ep, n_ch, n_t 
        
        for epoch in c_data:
            data.append(epoch.T)
#        data.append(c_data.T)

        T = epochs.times
        
        # conditions variable
        n_ep = len(c_data)
        c_x.extend([ID] * n_ep)
    
    # construct the dataset
    c_factor = _data.factor(c_x, name=varname, labels=conditions, 
                            colors=c_colors, retain_label_codes=True)
    
    props = {'samplingrate': samplingrate}
    props.update(_default_fiff_properties)
    if properties is not None:
        props.update(properties)
    
    data = np.array(data)
#    data = np.concatenate(data, axis=0)
    
    timevar = _data.var(T, 'time')
    dims = (timevar, sensor_net)
    
    Y = _data.ndvar(dims, data, properties=props, name=dataname)
    
    dataset = _data.dataset(Y, c_factor, name=name, default_DV=dataname)
    return dataset