コード例 #1
0
def test_vectorizer():
    """Test Vectorizer."""
    data = np.random.rand(150, 18, 6)
    vect = Vectorizer()
    result = vect.fit_transform(data)
    assert_equal(result.ndim, 2)

    # check inverse_trasnform
    orig_data = vect.inverse_transform(result)
    assert_equal(orig_data.ndim, 3)
    assert_array_equal(orig_data, data)
    assert_array_equal(vect.inverse_transform(result[1:]), data[1:])

    # check with different shape
    assert_equal(vect.fit_transform(np.random.rand(150, 18, 6, 3)).shape,
                 (150, 324))
    assert_equal(vect.fit_transform(data[1:]).shape, (149, 108))

    # check if raised errors are working correctly
    vect.fit(np.random.rand(105, 12, 3))
    assert_raises(ValueError, vect.transform, np.random.rand(105, 12, 3, 1))
    assert_raises(ValueError, vect.inverse_transform,
                  np.random.rand(102, 12, 12))
コード例 #2
0
ファイル: test_transformer.py プロジェクト: Hugo-W/mne-python
def test_vectorizer():
    """Test Vectorizer."""
    data = np.random.rand(150, 18, 6)
    vect = Vectorizer()
    result = vect.fit_transform(data)
    assert_equal(result.ndim, 2)

    # check inverse_trasnform
    orig_data = vect.inverse_transform(result)
    assert_equal(orig_data.ndim, 3)
    assert_array_equal(orig_data, data)
    assert_array_equal(vect.inverse_transform(result[1:]), data[1:])

    # check with different shape
    assert_equal(vect.fit_transform(np.random.rand(150, 18, 6, 3)).shape,
                 (150, 324))
    assert_equal(vect.fit_transform(data[1:]).shape, (149, 108))

    # check if raised errors are working correctly
    vect.fit(np.random.rand(105, 12, 3))
    assert_raises(ValueError, vect.transform, np.random.rand(105, 12, 3, 1))
    assert_raises(ValueError, vect.inverse_transform,
                  np.random.rand(102, 12, 12))
labels = np.concatenate(labels)

# get the data
# sacale the data to (0,1)
data = []
for tf in glob(os.path.join(working_dir, '*-tfr.h5')):
    tfcs = mne.time_frequency.read_tfrs(tf)[0]
    data_ = tfcs.data
    # define a (0,1) scaler
    scaler = MinMaxScaler(feature_range=(0, 1))
    # define a vectorizer so we can transform the data from 3D to 2D
    vectorizer = Vectorizer()
    data_vec = vectorizer.fit_transform(data_)
    data_scaled = scaler.fit_transform(data_vec)
    # after we scale the data to (0,1), we transform the data from 2D back to 3D
    data_scaled = vectorizer.inverse_transform(data_scaled)
    del tfcs
    del data_, data_vec
    data.append(data_scaled)
    del data_scaled
data = np.concatenate(data, axis=0)

# shuffle the order of the feature matrix and the labels

for _ in range(10):
    data, labels = utils.shuffle(data, labels)

# customized the temporal decoding process
# define 10-fold cross validation
cv = StratifiedShuffleSplit(n_splits=10, random_state=12345)
coefs = []