Пример #1
0
def fiff_events(source_path=None, name=None):
    """
    Returns a dataset containing events from a raw fiff file. Use
    :func:`fiff_epochs` to load MEG data corresponding to those events.
    
    source_path : str (path)
        the location of the raw file (if ``None``, a file dialog will be 
        displayed).
    
    name : str
        A name for the dataset.
    """
    if source_path is None:
        source_path = ui.ask_file("Pick a Fiff File", "Pick a Fiff File",
                                  ext=[('fif', 'Fiff')])
    
    if name is None:
        name = os.path.basename(source_path)
    
    raw = mne.fiff.Raw(source_path)
    events = mne.find_events(raw)
    if any(events[:,1] != 0):
        raise NotImplementedError("Events starting with ID other than 0")
        # this was the case in the raw-eve file, which contained all event 
        # offsets, but not in the raw file created by kit2fiff. For handling
        # see :func:`fiff_event_file`
    istart = _data.var(events[:,0], name='i_start')
    event = _data.var(events[:,2], name='eventID')
    info = {'source': source_path}
    return _data.dataset(event, istart, name=name, info=info)
Пример #2
0
def fiff_event_file(path, labels={}):
    events = mne.read_events(path).reshape((-1,6))
    name = os.path.basename(path)
    assert all(events[:,1] == events[:,5])
    assert all(events[:,2] == events[:,4])
    istart = _data.var(events[:,0], name='i_start')
    istop = _data.var(events[:,3], name='i_stop')
    event = _data.var(events[:,2], name='eventID')
    dataset = _data.dataset(event, istart, istop, name=name)
    if labels:
        dataset.add(_data.factor(events[:,2], name='event', labels=labels))
    return dataset
Пример #3
0
def mark_by_threshold(dataset, DV='MEG', threshold=2e-12, above=False, below=None,
                      target='accept'):
    """
    Marks epochs based on a threshold criterion (any sensor exceeding the
    threshold at any time)

    above: True, False, None
        How to mark segments that exceed the threshold: True->good;
        False->bad; None->don't change

    below:
        Same as ``above`` but for segments that do not exceed the threshold

    threshold : float
        The threshold value.
        1.25e-11: detect saturated channels
        2e-12: conservative final rejection

    target : factor or str
        Factor (or its name) in which the result is stored. If ``var`` is
        a string and the dataset does not contain that factor, it is
        created.

    """
    if isinstance(DV, basestring):
        DV = dataset[DV]

    # get the factor on which to store results
    if _data.isfactor(target) or _data.isvar(target):
        assert len(target) == dataset.N
    elif isinstance(target, basestring):
        if target in dataset:
            target = dataset[target]
        else:
            x = _np.ones(dataset.N, dtype=bool)
            target = _data.var(x, name=target)
            dataset.add(target)
    else:
        raise ValueError("target needs to be a factor")

    # do the thresholding
    if _data.isndvar(DV):
        for ID in xrange(dataset.N):
            data = DV[ID]
            v = _np.max(_np.abs(data.x))

            if v > threshold:
                if above is not None:
                    target[ID] = above
            elif below is not None:
                target[ID] = below
    else:
        for ID in xrange(dataset.N):
            v = DV[ID]

            if v > threshold:
                if above is not None:
                    target[ID] = above
            elif below is not None:
                target[ID] = below
Пример #4
0
def get_permutated_dataset(variables, count='caseID', randomize=False):
    # sort variables
    perm_rand = []    # permutated and randomized
    perm_nonrand = [] # permutated and not randomized
    for v in variables:
        if v.is_rand:
            perm_rand.append(v)
        else:
            perm_nonrand.append(v)
#    variables = perm_rand + perm_nonrand
    
    # set the variables IDs
    for i,v in enumerate(variables):
        v._set_list_ID(i)
    
    perm_n = [v.Ndraw for v in variables]
    n_trials = np.prod(perm_n)
    n_properties = len(variables)
    out = np.empty((n_trials, n_properties), dtype=np.uint8)
    
    # permutatet variables
    for i,v in enumerate(variables):
        t = np.prod(perm_n[:i])
        r = np.prod(perm_n[i+1:])
        if len(v.urn) == 0:
            out[:,i] = np.tile(np.arange(v.N), t).repeat(r)
        else:
            base = np.arange(v.N)
            for v0 in variables[:i]:
                if v0 in v.urn:
                    base = np.ravel([base[base!=j] for j in xrange(v.N)])
                else:
                    base = np.tile(base, v.Ndraw)
            
            out[:,i] = np.repeat(base, r)
    
    if randomize:
        # shuffle those perm factors that should be shuffled
        n_rand_bins = np.prod([v.Ndraw for v in perm_nonrand])
        rand_bin_len = int(n_trials / n_rand_bins)
        for i in xrange(0, n_trials, rand_bin_len):
            np.random.shuffle(out[i:i+rand_bin_len])
    
    # create dataset
    ds = _data.dataset(name='Design')
    for v in variables:
        x = out[:,v.ID]
        f = _data.factor(x, v.name, labels=v.cells)
        ds.add(f)
    
    if count:
        ds.add(_data.var(np.arange(ds.N), count)) 
    
    return ds
Пример #5
0
def add_fiff_to_events(path, dataset, i_start='i_start', 
                       tstart=-.2, tstop=.6, properties=None, 
                       name="MEG", sensorsname='fiff-sensors'):
    """
    Adds MEG data form a new file to a dataset containing events. 
    
    """
    events = np.empty((dataset.N, 3), dtype=np.uint32)
    events[:,0] = dataset[i_start].x
    events[:,1] = 0
    events[:,2] = 1
    
    raw = mne.fiff.Raw(path)
    
    # parse sensor net
    sensor_list = []
    for ch in raw.info['chs']:
        ch_name = ch['ch_name']
        if ch_name.startswith('MEG'):
            x, y, z = ch['loc'][:3]
            sensor_list.append([x, y, z, ch_name])
    sensor_net = sensors.sensor_net(sensor_list, name=sensorsname)
    
    picks = mne.fiff.pick_types(raw.info, meg=True, eeg=False, stim=False, 
                                eog=False, include=[], exclude=[])
    
    # read the data
    epochs = mne.Epochs(raw, events, 1, tstart, tstop, picks=picks)
    data = np.array([e.T for e in epochs.get_data()])
    
    props = {'samplingrate': epochs.info['sfreq'][0]}
    props.update(_default_fiff_properties)
    if properties:
        props.update(properties)
    
    T = epochs.times
    timevar = _data.var(T, 'time')
    dims = (timevar, sensor_net)
    
    dataset.add(_data.ndvar(dims, data, properties=props, name=name))
    dataset.default_DV = name
Пример #6
0
import datetime
import data

#Take user input to set parameters
print(
    'Welcome to hy_pynance. For instructions and valid inputs, see the readme.'
)
while True:
    try:
        comp = input('Enter stock abreviation: ')
        metric = input('Enter metric: ')
        df = data.get_old_data(comp)
        break
    except Exception as e:
        pass
    print('Wrong input, see readme for instructions')

df = data.var(df, metric)

#print('\nIncorrect parameters. Learn more on this in the read_me. Try again')

#will store data if 'y', does nothing if anything else
get_data = input('Enter "y" if you would like to store data in sql table: ')
if (get_data == 'y'):
    engine = viz.get_engine()
    df.to_sql(comp, engine)

#plots
#valid colors =  {'b', 'g', 'r', 'c', 'm', 'y', 'k', 'w'}
data._plot(df, comp, metric, 'var')
Пример #7
0
def fiff_epochs(dataset, i_start='i_start', 
                tstart=-.2, tstop=.6, baseline=(None,  0),
                properties=None, name="MEG", sensorsname='fiff-sensors'):
    """
    Uses the events in ``dataset[i_start]`` to extract epochs from the raw 
    file
    
    i_start : str
        name of the variable containing the index of the events to be
        imported
         
    """
    events = np.empty((dataset.N, 3), dtype=np.int32)
    events[:,0] = dataset[i_start].x
    events[:,1] = 0
    events[:,2] = 1
    
    source_path = dataset.info['source']
    raw = mne.fiff.Raw(source_path)
    
    # parse sensor net
    sensor_list = []
    for ch in raw.info['chs']:
        ch_name = ch['ch_name']
        if ch_name.startswith('MEG'):
            x, y, z = ch['loc'][:3]
            sensor_list.append([x, y, z, ch_name])
    sensor_net = sensors.sensor_net(sensor_list, name=sensorsname)
    
    picks = mne.fiff.pick_types(raw.info, meg=True, eeg=False, stim=False, 
                                eog=False, include=[], exclude=[])
    
    epochs = mne.Epochs(raw, events, 1, tstart, tstop, picks=picks, 
                        baseline=baseline)
    
    # determine data container properties
    epoch_shape = (len(picks), len(epochs.times))
    data_shape = (len(events), len(epochs.times), len(picks))
    # mne.fiff.raw.read_raw_segment uses float32
    data = np.empty(data_shape, dtype='float32') 

    # read the data
    for i, epoch in enumerate(epochs):
        if epoch.shape == epoch_shape:
            data[i] = epoch.T
        else:
            msg = ("Epoch %i shape mismatch: does your epoch definition "
                   "result in an epoch that overlaps the end of your data "
                   "file?" % i)
            raise IOError(msg)
    
    # read data properties
    props = {'samplingrate': epochs.info['sfreq'][0]}
    props.update(_default_fiff_properties)
    if properties:
        props.update(properties)
    
    T = epochs.times
    timevar = _data.var(T, 'time')
    dims = (timevar, sensor_net)
    
    dataset.add(_data.ndvar(dims, data, properties=props, name=name))
    dataset.default_DV = name
Пример #8
0
def fiff(raw, events, conditions, varname='condition', dataname='MEG',
         tstart=-.2, tstop=.6, properties=None, name=None, c_colors={},
         sensorsname='fiff-sensors'):
    """
    Loads data directly when two files (raw and events) are provided 
    separately.
    
    conditions : dict
        ID->name dictionary of conditions that should be imported
    event : str
        path to the event file
    properties : dict
        set properties in addition to the defaults
    raw : str
        path to the raw file
    varname : str
        variable name that will contain the condition value 
    
    """
    if name is None:
        name = os.path.basename(raw)
    
    raw = mne.fiff.Raw(raw)
    
    # parse sensor net
    sensor_list = []
    for ch in raw.info['chs']:
        ch_name = ch['ch_name']
        if ch_name.startswith('MEG'):
            x, y, z = ch['loc'][:3]
            sensor_list.append([x, y, z, ch_name])
    sensor_net = sensors.sensor_net(sensor_list, name=sensorsname)
    
    events = mne.read_events(events)
    picks = mne.fiff.pick_types(raw.info, meg=True, eeg=False, stim=False, 
                                eog=False, include=[], exclude=[])
    
    data = []
    c_x = []
    
    # read the data
    for ID in conditions:
        epochs = mne.Epochs(raw, events, ID, tstart, tstop, picks=picks)
        samplingrate = epochs.info['sfreq'][0]
        
        # data
        c_data = epochs.get_data()        # n_ep, n_ch, n_t 
        
        for epoch in c_data:
            data.append(epoch.T)
#        data.append(c_data.T)

        T = epochs.times
        
        # conditions variable
        n_ep = len(c_data)
        c_x.extend([ID] * n_ep)
    
    # construct the dataset
    c_factor = _data.factor(c_x, name=varname, labels=conditions, 
                            colors=c_colors, retain_label_codes=True)
    
    props = {'samplingrate': samplingrate}
    props.update(_default_fiff_properties)
    if properties is not None:
        props.update(properties)
    
    data = np.array(data)
#    data = np.concatenate(data, axis=0)
    
    timevar = _data.var(T, 'time')
    dims = (timevar, sensor_net)
    
    Y = _data.ndvar(dims, data, properties=props, name=dataname)
    
    dataset = _data.dataset(Y, c_factor, name=name, default_DV=dataname)
    return dataset