Пример #1
0
def test_collecting_feature():
    """Test computation of spectral markers"""
    epochs = _get_data()[:2]
    psds_params = dict(n_fft=4096, n_overlap=100, n_jobs='auto', nperseg=128)
    estimator = PowerSpectralDensityEstimator(tmin=None,
                                              tmax=None,
                                              fmin=1.,
                                              fmax=45.,
                                              psd_method='welch',
                                              psd_params=psds_params,
                                              comment='default')

    wpli = WeightedPhaseLagIndex()
    markers_list = [
        PowerSpectralDensity(estimator=estimator, fmin=1, fmax=4),
        ContingentNegativeVariation(), wpli
    ]

    markers = Markers(markers_list)
    # check states and names
    for name, marker in markers.items():
        assert_true(not any(k.endswith('_') for k in vars(marker)))
        assert_equal(name, marker._get_title())

    # check order
    assert_equal(list(markers.values()), markers_list)

    # check fit
    markers.fit(epochs)
    for t_marker in markers_list:
        assert_true(any(k.endswith('_') for k in vars(t_marker)))

    tmp = _TempDir()
    tmp_fname = tmp + '/test-smarkers.hdf5'
    markers.save(tmp_fname)
    markers2 = read_markers(tmp_fname)
    for ((k1, v1), (k2, v2)) in zip(markers.items(), markers2.items()):
        assert_equal(k1, k2)
        assert_equal(
            {
                k: v
                for k, v in vars(v1).items()
                if not k.endswith('_') and not k == 'estimator'
            }, {
                k: v
                for k, v in vars(v2).items()
                if not k.endswith('_') and not k == 'estimator'
            })
    pe = PermutationEntropy().fit(epochs)
    markers._add_marker(pe)

    tmp = _TempDir()
    tmp_fname = tmp + '/test-markers.hdf5'
    markers.save(tmp_fname)
    markers3 = read_markers(tmp_fname)
    assert_true(pe._get_title() in markers3)

    assert_true(wpli._get_title() in markers3)
sns.set_color_codes()


def trim_mean80(a, axis=0):  # noqa
    return trim_mean(a, proportiontocut=.1, axis=axis)


def entropy(a, axis=0):  # noqa
    return -np.nansum(a * np.log(a), axis=axis) / np.log(a.shape[axis])


fname = 'data/JSXXX-markers.hdf5'
if not op.exists(fname):
    raise ValueError('Please run compute_doc_forest_markers.py example first')

fc = read_markers(fname)


##############################################################################
# Set regions of interest
# 
# For some markers we do not want to use all channels. We therefore supply
# selections of channels for some markers.

scalp_roi = np.arange(224)
non_scalp = np.arange(224, 256)
cnv_roi = np.array([5,  6, 13, 14, 15, 21, 22])
mmn_roi = np.array([5,   6,   8,  13,  14,  15,  21,  22,  44,  80, 131, 185])
p3b_roi = np.array([8,  44,  80,  99, 100, 109, 118, 127, 128, 131, 185])
p3a_roi = np.array([5,   6,   8,  13,  14,  15,  21,  22,  44,  80, 131, 185])
Пример #3
0
    for k, v in attrs.items():
        if k.endswith('_'):
            setattr(out, k, v)
    return out


# Resgister the marker to NICE
register_marker_class(MyCustomMarker)

# Now you can create a collection with nice markers and the custom marker

markers_list = [
    PermutationEntropy(),
    ContingentNegativeVariation(),
    MyCustomMarker()
]

markers = Markers(markers_list)

# Fit on test data
epochs = _get_data()[:2]
markers.fit(epochs)

# Save to a file
tmp = _TempDir()
tmp_fname = tmp + '/test-markers.hdf5'
markers.save(tmp_fname)

# Read from file
markers2 = read_markers(tmp_fname)
Пример #4
0
df_data['condition'] = []

for k in markers.keys():
    df_data[k] = []

o_path = op.join(db_path, 'results', 'sources')
o_path = op.join(o_path, subject)

with open('../data/rois.json', 'r') as f:
    rois = json.load(f)

for condition in conditions:
    print('Using condition: {}'.format(condition))
    t_path = op.join(o_path, condition)

    fc = nice.read_markers(op.join(t_path, 'results-markers.hdf5'))
    _samp = fc['nice/marker/SymbolicMutualInformation/theta_weighted']
    labels = _samp.ch_info_['ch_names']
    rois_idx = OrderedDict()
    for roi, ch_names in rois.items():
        t_elems = np.array([labels.index(x) for x in ch_names])
        rois_idx[roi] = t_elems
    n_rois = len(rois_idx)
    rois_names = list(rois_idx.keys())

    for i in range(n_rois):
        i_roi = rois_names[i]
        i_idx = rois_idx[i_roi]
        for j in range(i, n_rois):
            j_roi = rois_names[j]
            j_idx = rois_idx[j_roi]