Пример #1
0
def train(filename):
    cnt = io.load_bcicomp3_ds2(filename)

    fs_n = cnt.fs / 2

    b, a = proc.signal.butter(5, [38 / fs_n], btype='low')
    cnt = proc.lfilter(cnt, b, a)

    b, a = proc.signal.butter(5, [.1 / fs_n], btype='high')
    cnt = proc.lfilter(cnt, b, a)

    cnt = proc.subsample(cnt, 60)

    epo = proc.segment_dat(cnt, MARKER_DEF_TRAIN, SEG_IVAL)

    # from wyrm import plot
    # logger.debug('Ploting channels...')
    # plot.plot_spatio_temporal_r2_values(proc.sort_channels(epo))
    # print JUMPING_MEANS_IVALS
    # plot.plt.show()

    fv = proc.jumping_means(epo, JUMPING_MEANS_IVALS)
    fv = proc.create_feature_vectors(fv)

    cfy = proc.lda_train(fv)
    return cfy
Пример #2
0
def train(filename):
    dat = io.load_bcicomp3_ds2(filename)

    fs_n = dat.fs / 2

    b, a = proc.signal.butter(16, [30 / fs_n], btype='low')
    dat = proc.lfilter(dat, b, a)

    b, a = proc.signal.butter(5, [.4 / fs_n], btype='high')
    dat = proc.lfilter(dat, b, a)

    dat = proc.subsample(dat, 60)

    epo = proc.segment_dat(dat, MARKER_DEF_TRAIN, SEG_IVAL)

    #from wyrm import plot
    #plot.plot_spatio_temporal_r2_values(proc.sort_channels(epo))
    #print JUMPING_MEANS_IVALS
    #plot.plt.show()

    fv = proc.jumping_means(epo, JUMPING_MEANS_IVALS)
    fv = proc.create_feature_vectors(fv)

    clf = proc.lda_train(fv)
    return clf
Пример #3
0
def train(filename):
    cnt = io.load_bcicomp3_ds2(filename)

    fs_n = cnt.fs / 2

    b, a = proc.signal.butter(5, [30 / fs_n], btype='low')
    cnt = proc.lfilter(cnt, b, a)

    b, a = proc.signal.butter(5, [.4 / fs_n], btype='high')
    cnt = proc.lfilter(cnt, b, a)

    cnt = proc.subsample(cnt, 60)

    epo = proc.segment_dat(cnt, MARKER_DEF_TRAIN, SEG_IVAL)

    #from wyrm import plot
    #plot.plot_spatio_temporal_r2_values(proc.sort_channels(epo))
    #print JUMPING_MEANS_IVALS
    #plot.plt.show()

    fv = proc.jumping_means(epo, JUMPING_MEANS_IVALS)
    fv = proc.create_feature_vectors(fv)

    cfy = proc.lda_train(fv)
    return cfy
Пример #4
0
def train(filename_):
    cnt = io.load_bcicomp3_ds2(filename_)

    fs_n = cnt.fs / 2

    b, a = proc.signal.butter(5, [HIGH_CUT / fs_n], btype='low')
    cnt = proc.lfilter(cnt, b, a)

    b, a = proc.signal.butter(5, [LOWER_CUT / fs_n], btype='high')
    cnt = proc.lfilter(cnt, b, a)
    print("Filtragem aplicada em [{} Hz ~ {} Hz]".format(LOWER_CUT, HIGH_CUT))

    cnt = proc.subsample(cnt, SUBSAMPLING)
    print("Sub-amostragem em {} Hz".format(SUBSAMPLING))

    epo = proc.segment_dat(cnt, MARKER_DEF_TRAIN, SEG_IVAL)
    print("Dados segmentados em intervalos de [{} ~ {}]".format(
        SEG_IVAL[0], SEG_IVAL[1]))

    fv = proc.jumping_means(epo, JUMPING_MEANS_INTERVALS)
    fv = proc.create_feature_vectors(fv)

    print("Iniciando treinamento da LDA...")
    cfy = proc.lda_train(fv)
    print("Treinamento concluido!")
    return cfy
Пример #5
0
def offline_experiment(filename_, cfy_, true_labels_):
    print("\n")
    cnt = io.load_bcicomp3_ds2(filename_)

    fs_n = cnt.fs / 2

    b, a = proc.signal.butter(5, [HIGH_CUT / fs_n], btype='low')
    cnt = proc.filtfilt(cnt, b, a)

    b, a = proc.signal.butter(5, [LOWER_CUT / fs_n], btype='high')
    cnt = proc.filtfilt(cnt, b, a)

    cnt = proc.subsample(cnt, SUBSAMPLING)

    epo = proc.segment_dat(cnt, MARKER_DEF_TEST, SEG_IVAL)

    fv = proc.jumping_means(epo, JUMPING_MEANS_INTERVALS)
    fv = proc.create_feature_vectors(fv)

    lda_out = proc.lda_apply(fv, cfy_)
    markers = [fv.class_names[cls_idx] for cls_idx in fv.axes[0]]
    result = zip(markers, lda_out)
    endresult = []
    markers_processed = 0
    letter_prob = {i: 0 for i in 'abcdefghijklmnopqrstuvwxyz123456789_'}
    for s, score in result:
        if markers_processed == 180:
            endresult.append(
                sorted(letter_prob.items(), key=lambda x: x[1])[-1][0])
            letter_prob = {
                i: 0
                for i in 'abcdefghijklmnopqrstuvwxyz123456789_'
            }
            markers_processed = 0
        for letter in s:
            letter_prob[letter] += score
        markers_processed += 1

    print('Letras Encontradas-: %s' % "".join(endresult))
    print('Letras Corretas----: %s' % true_labels_)
    acc = np.count_nonzero(
        np.array(endresult) == np.array(
            list(true_labels_.lower()[:len(endresult)]))) / len(endresult)
    print("Acertividade Final : %d" % (acc * 100))
Пример #6
0
    #from wyrm import plot
    #plot.plot_spatio_temporal_r2_values(proc.sort_channels(epo))
    #print JUMPING_MEANS_IVALS
    #plot.plt.show()

    fv = proc.jumping_means(epo, JUMPING_MEANS_IVALS)
    fv = proc.create_feature_vectors(fv)

    clf = proc.lda_train(fv)
    return clf


if __name__ == '__main__':
    logger.debug('Training...')
    clf = train(TRAIN_DATA)

    logger.debug('Starting Online experiment...')
    cnt = io.load_bcicomp3_ds2(TEST_DATA)
    amp = libmushu.get_amp('replayamp')
    # fast (non-realtime)
    amp.configure(data=cnt.data,
                  marker=cnt.markers,
                  channels=cnt.axes[-1],
                  fs=cnt.fs,
                  realtime=False,
                  samples=1000)
    # slow (realtime)
    #amp.configure(data=cnt.data, marker=cnt.markers, channels=cnt.axes[-1], fs=cnt.fs)
    online_experiment(amp, clf)
Пример #7
0
acc = 0
for subject in range(2):
    if subject == 0:
        training_set = TRAIN_A
        testing_set = TEST_A
        labels = TRUE_LABELS_A
        jumping_means_ivals = JUMPING_MEANS_IVALS_A
    else:
        training_set = TRAIN_B
        testing_set = TEST_B
        labels = TRUE_LABELS_B
        jumping_means_ivals = JUMPING_MEANS_IVALS_B
    
    # load the training set
    print "before loading"
    dat = load_bcicomp3_ds2(training_set)
    print "after loading "
    fv_train, epo[subject] = preprocessing(dat, MARKER_DEF_TRAIN, jumping_means_ivals)
    
    # train the lda
    print "before training"
    cfy = proc.lda_train(fv_train)

    print "after training"
    
    # load the testing set
    dat = load_bcicomp3_ds2(testing_set)
    fv_test, _ = preprocessing(dat, MARKER_DEF_TEST, jumping_means_ivals)
    
    # predict
    lda_out_prob = proc.lda_apply(fv_test, cfy)
Пример #8
0
    cnt = proc.subsample(cnt, 60)

    epo = proc.segment_dat(cnt, MARKER_DEF_TRAIN, SEG_IVAL)

    #from wyrm import plot
    #plot.plot_spatio_temporal_r2_values(proc.sort_channels(epo))
    #print JUMPING_MEANS_IVALS
    #plot.plt.show()

    fv = proc.jumping_means(epo, JUMPING_MEANS_IVALS)
    fv = proc.create_feature_vectors(fv)

    cfy = proc.lda_train(fv)
    return cfy


if __name__ == '__main__':
    logger.debug('Training...')
    cfy = train(TRAIN_DATA)

    logger.debug('Starting Online experiment...')
    cnt = io.load_bcicomp3_ds2(TEST_DATA)
    amp = libmushu.get_amp('replayamp')
    if REALTIME:
        amp.configure(data=cnt.data, marker=cnt.markers, channels=cnt.axes[-1], fs=cnt.fs, blocksize_samples=4)
    else:
        amp.configure(data=cnt.data, marker=cnt.markers, channels=cnt.axes[-1], fs=cnt.fs, realtime=False, blocksize_samples=40)
    online_experiment(amp, cfy)

Пример #9
0
acc = 0
for subject in range(2):
    if subject == 0:
        training_set = TRAIN_A
        testing_set = TEST_A
        labels = TRUE_LABELS_A
        jumping_means_ivals = JUMPING_MEANS_IVALS_A
    else:
        training_set = TRAIN_B
        testing_set = TEST_B
        labels = TRUE_LABELS_B
        jumping_means_ivals = JUMPING_MEANS_IVALS_B

    # load the training set
    print "before loading"
    dat = load_bcicomp3_ds2(training_set)
    print "after loading "
    fv_train, epo[subject] = preprocessing(dat, MARKER_DEF_TRAIN,
                                           jumping_means_ivals)
    labels = fv_train.axes[0]
    y_as_categorical = to_categorical(labels)
    lstm_model.fit(epo[subject].data,
                   y_as_categorical,
                   verbose=1,
                   show_accuracy=1,
                   validation_split=0.1,
                   nb_epoch=20,
                   class_weight={
                       0: 1,
                       1: 50
                   })