コード例 #1
0
ファイル: findAbort.py プロジェクト: hyperbolicTom/pyctf
        printerror(msg)
        printusage()
        sys.exit(1)
    return optlist, args

optlist, args = parseargs("")

for opt, arg in optlist:
    pass

if len(args) != 1:
    printusage()
    sys.exit(1)

dsname = args[0]
ds = pyctf.dsopen(dsname)

T = ds.getNumberOfTrials()
if T > 1:
    printerror("This dataset has more than one trial.")
    sys.exit(1)

ch = ds.getChannelIndex("SCLK01")
d = ds.getDsRawData(0, ch)

if d[-1] != 0.:
    printerror("This dataset does not appear to end with zero.")
    sys.exit(1)

l = d.tolist()
samp = l.index(0.)
コード例 #2
0
ファイル: fixit2.py プロジェクト: hyperbolicTom/pyctf
import sys, os
from pyctf import dsopen
from pyctf import ctf

if len(sys.argv) < 2:
    print "usage: %s [-f] dataset" % sys.argv[0]
    exit(1)

n = 1
fixit = False
if sys.argv[1][0:2] == '-f':
    fixit = True
    n = 2
dsname = sys.argv[n]
ds = dsopen(dsname)

srate = ds.getSampleRate()
ntrial = ds.getNumberOfTrials()
nsamp = ds.getNumberOfSamples()
nch = ds.getNumberOfChannels()

res4name = ds.getDsFileNameExt(".res4")
r = ctf.read_res4_structs(res4name)

for ch in range(nch):
    sr = list(r.sensRes[ch][0])
    if sr[ctf.sr_type] == ctf.TYPE_UADC:
        print ds.getChannelName(ch), sr

if not fixit:
コード例 #3
0
import numpy
import pyctf

ds_fname = '/mnt/neuro/MEG_data/raw/20110429/CXIFPSQC_rest_20110429_01.ds/'

ds = pyctf.dsopen(ds_fname)

# Continuous head localization channels.

fids = ['Na', 'Le', 'Re']
chans = {
    fids[0]: ['HLC0011', 'HLC0012', 'HLC0013'],
    fids[1]: ['HLC0021', 'HLC0022', 'HLC0023'],
    fids[2]: ['HLC0031', 'HLC0032', 'HLC0033']
}

all_ds = []
for chan in fids:
    i = fids.index(chan)
    o = ds.head[i]
    c = chans[chan]
    ch_map = ds.channel

    # make an nx3; one channel per column
    trial = 0
    l = []
    for ch in c:
        x = ds.getDsData(trial, ch_map[ch])
        x.shape = (x.shape[0], 1)
        l.append(x)
    d = numpy.hstack(l) * 100.  # m -> cm
コード例 #4
0
ファイル: markers.py プロジェクト: linneasf/MNE_project
#! /usr/bin/env python

import os
import numpy as np
import pyctf
import mne
from mne.io import read_raw_ctf

tmin, tmax = -0.1, 0.5

raw_path = os.getenv('ds')
raw = read_raw_ctf(raw_path, clean_names=True, preload=True)
ds = pyctf.dsopen(raw_path)

# pick MEG channels
picks = mne.pick_types(raw.info, meg='mag', eeg=False, stim=True)

# Compute epochs
events = mne.find_events(raw, stim_channel='UPPT001')

m = 'pic1'

srate = raw.info['sfreq']
tlen = ds.getNumberOfSamples()
ev = []
for tr, t in ds.marks[m]:
    s = tr * tlen + int(t * srate + .5)
    ev.append([s, 0, 1])

epochs = mne.Epochs(raw, ev, {'pic1': 1}, tmin, tmax, picks=picks,
                    baseline=(None, 0), reject=None, preload=False)
コード例 #5
0
import numpy
import pyctf

ds_fname = '/mnt/neuro/MEG_data/raw/20110429/CXIFPSQC_rest_20110429_01.ds/'

ds = pyctf.dsopen(ds_fname)

# Continuous head localization channels.

fids = ['Na', 'Le', 'Re']
chans = {fids[0]: ['HLC0011', 'HLC0012', 'HLC0013'],
         fids[1]: ['HLC0021', 'HLC0022', 'HLC0023'],
         fids[2]: ['HLC0031', 'HLC0032', 'HLC0033']}

all_ds = []
for chan in fids:
    i = fids.index(chan)
    o = ds.head[i]
    c = chans[chan]
    ch_map = ds.channel

    # make an nx3; one channel per column
    trial=0
    l = []
    for ch in c:
        x = ds.getDsData(trial, ch_map[ch])
        x.shape = (x.shape[0], 1)
        l.append(x)
    d = numpy.hstack(l) * 100. # m -> cm

    # Transform to relative head coordinates.