Beispiel #1
0
        con.save('{dir}NEMO_{c}_{f}-average-connectivity.h5'.format(
            dir=meg_dir, c=cond, f=freq))

    # Compute contrast between conditions
    contrast = ga_con[exp_conds[0]] - ga_con[exp_conds[1]]
    contrast.save(
        '{dir}NEMO_pos_vs_ton_contrast_{f}-avg-connectivity.h5'.format(
            dir=meg_dir, f=freq))

    # Perform a permutation test to only retain connections that are part of a significant bundle.
    stats = conpy.cluster_permutation_test(cons['pos'],
                                           cons['ton'],
                                           cluster_threshold=5,
                                           src=fs_src,
                                           n_permutations=1000,
                                           verbose=True,
                                           alpha=0.05,
                                           n_jobs=2,
                                           seed=10,
                                           return_details=True,
                                           max_spread=0.01)
    connection_indices, bundles, bundle_ts, bundle_ps, H0 = stats
    con_clust = contrast[connection_indices]

    # Save some details about the permutation stats to disk
    write_hdf5('{dir}NEMO_pos_vs_ton_contrast_{f}-stats.h5'.format(dir=meg_dir,
                                                                   f=freq),
               dict(connection_indices=connection_indices,
                    bundles=bundles,
                    bundle_ts=bundle_ts,
                    bundle_ps=bundle_ps,
Beispiel #2
0
    ga_con[cond] = con
    con.save(fname.ga_con(condition=cond))

# Compute contrast between faces and scrambled pictures
contrast = ga_con[conditions[0]] - ga_con[conditions[1]]
contrast.save(fname.ga_con(condition='contrast'))

# Perform a permutation test to only retain connections that are part of a
# significant bundle.
stats = conpy.cluster_permutation_test(
    cons['face'],
    cons['scrambled'],
    cluster_threshold=5,
    src=fsaverage,
    n_permutations=1024,
    verbose=True,
    alpha=0.05,
    n_jobs=2,
    seed=10,
    return_details=True,
    max_spread=0.01,
)
connection_indices, bundles, bundle_ts, bundle_ps, H0 = stats
con_clust = contrast[connection_indices]

# Save some details about the permutation stats to disk
write_hdf5(fname.stats,
           dict(connection_indices=connection_indices,
                bundles=bundles,
                bundle_ts=bundle_ts,
                bundle_ps=bundle_ps,
Beispiel #3
0
]
for subject in subjects:
    con_face = conpy.read_connectivity('%s-face-con.h5' % subject)
    con_face = con_face.to_original_src(fsaverage)
    con_scrambled = conpy.read_connectivity('%s-scrambled-con.h5' % subject)
    con_scrambled = con_scrambled.to_original_src(fsaverage)
    face.append(con_face)
    scrambled.append(con_scrambled)
    contrast.append(con_face - con_scrambled)  # Create contrast

# Compute the grand-average contrast
contrast = reduce(operator.add, contrast) / len(subjects)

# Perform a permutation test to only retain connections that are part of a
# significant bundle.
connection_indices = conpy.cluster_permutation_test(
    face,
    scrambled,  # The two conditions
    cluster_threshold=5,  # The initial t-value threshold to form bundles
    max_spread=0.01,  # Maximum distance (in m) between connections
    #   that are assigned to the same bundle.
    src=fsaverage,  # The source space for distance computations
    n_permutations=1000,  # The number of permutations for estimating
    #   the distribution of t-values.
    alpha=0.05  # The p-value at which to reject the null-hypothesis
)

# Prune the contrast connectivity to only contain connections that are part of
# significant bundles.
contrast = contrast[connection_indices]