예제 #1
0
def convert_data(block):
    """return stimulus and response matrix"""

    print "---------- Converting data set ----------"

    fs_stim = 16000.
    f_cutoff = (500., 8000.)

    win_len = 0.04
    fs_spec = 400.  # bin with: 2 ms
    filt_per_erb = 1.
    n_samples = np.Inf

    fb = GammatoneFilterbank(samplerate=fs_stim, f_cutoff=f_cutoff,
                             filt_per_erb=filt_per_erb,
                             spectype='magnitude', bw_factor=1.)

    converter = DataConverter(fb, win_len=win_len, samplerate=fs_spec,
                              verbose=True, n_samples=n_samples,
                              scaling='dB', dynamic_range=60.,
                              center=True)
    X, Y, rfsize, axes, stim_pos = converter.process(block)

    print "Stimulus matrix: %d temp. steps x %d features" % X.shape
    print "Spike    matrix: %d temp. steps x %d trials" % Y.shape
    print "%d spikes (%0.3f spikes per sample)" % (np.sum(Y), np.mean(Y))

    return X, Y, rfsize, axes, fs_spec
예제 #2
0
def convert_data(block, t_spikefilt=0.015):
    """return stimulus and response matrix"""

    print "---------- Converting data set ----------"

    fs_stim = 16000.
    f_cutoff = (500., 8000.)

    win_len = 0.04
    fs_spec = 500.  # bin with: 2 ms
    filt_per_erb = 1.
    n_samples = np.Inf

    fb = GammatoneFilterbank(samplerate=fs_stim, f_cutoff=f_cutoff,
                             filt_per_erb=filt_per_erb,
                             spectype='magnitude', bw_factor=1.)

    converter = DataConverter(fb, win_len=win_len, samplerate=fs_spec,
                              verbose=True, n_samples=n_samples,
                              scaling='dB', dynamic_range=60.,
                              center=True)
    X, Y, rfsize, axes, stim_pos = converter.process(block)

    # Add post-spike filter features
    n_spikefilt = int(np.ceil(t_spikefilt * fs_spec))

    Y = Y[:, 0]  # works only for single trials
    Z = create_postfilt_features(Y, n_spikefilt)
    X = np.hstack((X, Z))  # append to feature matrix

    print "Stimulus matrix: %d temp. steps x %d features" % X.shape
    print "%d spikes (%0.3f spikes per sample)" % (np.sum(Y), np.mean(Y))
    print "Post-spike filter length: %d samples" % n_spikefilt

    return X, Y, rfsize, axes, fs_spec, n_spikefilt
예제 #3
0
def convert_data(block):
    """return stimulus and response matrix"""

    print "---------- Converting data set ----------"

    fs_stim = 16000.
    f_cutoff = (500., 8000.)

    win_len = 0.04
    fs_spec = 400.  # bin with: 2 ms
    filt_per_erb = 1.
    n_samples = np.Inf

    fb = GammatoneFilterbank(samplerate=fs_stim,
                             f_cutoff=f_cutoff,
                             filt_per_erb=filt_per_erb,
                             spectype='magnitude',
                             bw_factor=1.)

    converter = DataConverter(fb,
                              win_len=win_len,
                              samplerate=fs_spec,
                              verbose=True,
                              n_samples=n_samples,
                              scaling='dB',
                              dynamic_range=60.,
                              center=True)
    X, Y, rfsize, axes, stim_pos = converter.process(block)

    print "Stimulus matrix: %d temp. steps x %d features" % X.shape
    print "Spike    matrix: %d temp. steps x %d trials" % Y.shape
    print "%d spikes (%0.3f spikes per sample)" % (np.sum(Y), np.mean(Y))

    return X, Y, rfsize, axes, fs_spec