Beispiel #1
0
def calc_subdists(subjects_data, voxel_range):
    subjects, voxels, _ = subjects_data.shape
    D = np.zeros((len(voxel_range), subjects, subjects))

    for i, v in enumerate(voxel_range):
        profiles = np.zeros((subjects, voxels))
        for si in range(subjects):
            profiles[si] = correlation(subjects_data[si, v], subjects_data[si])
        profiles = np.clip(np.nan_to_num(profiles), -0.9999, 0.9999)
        profiles = np.arctanh(np.delete(profiles, v, 1))
        D[i] = correlation(profiles, profiles)

    D = np.sqrt(2.0 * (1.0 - D))
    return D
Beispiel #2
0
def isfc(D, std=None, collapse_subj=True):

    assert D.ndim == 3

    n_vox, _, n_subj = D.shape
    n_subj_loo = n_subj - 1

    group_sum = np.add.reduce(D, axis=2)
    masked = None

    if collapse_subj:
        ISFC = np.zeros((n_vox, n_vox))
        for loo_subj in range(n_subj):
            loo_subj_ts = D[:, :, loo_subj]
            ISFC += correlation(loo_subj_ts,
                                (group_sum - loo_subj_ts) / n_subj_loo,
                                symmetric=True)
        ISFC /= n_subj

        if std:
            ISFC_avg = ISFC.mean()
            ISFC_std = ISFC.std()
            masked = (ISFC <= ISFC_avg + ISFC_std) | (ISFC >=
                                                      ISFC_avg - ISFC_std)

    else:
        ISFC = np.zeros((n_vox, n_vox, n_subj))
        for loo_subj in range(n_subj):
            loo_subj_ts = D[:, :, loo_subj]
            ISFC[:, :, loo_subj] = correlation(
                loo_subj_ts, (group_sum - loo_subj_ts) / n_subj_loo,
                symmetric=True)

    if masked is not None:
        masked = np.all(masked, axis=1)
    else:
        masked = np.array([True] * n_vox)

    return ISFC, masked
Beispiel #3
0
def isc(D, std=None, collapse_subj=True):

    assert D.ndim == 3

    n_vox, _, n_subj = D.shape
    n_subj_loo = n_subj - 1

    group_sum = np.add.reduce(D, axis=2)

    if collapse_subj:
        ISC = np.zeros(n_vox)
        for loo_subj in range(n_subj):
            loo_subj_ts = D[:, :, loo_subj]
            ISC += correlation(loo_subj_ts,
                               (group_sum - loo_subj_ts) / n_subj_loo,
                               match_rows=True)
        ISC /= n_subj

        if std:
            ISC_avg = ISC.mean()
            ISC_std = ISC.std()
            masked = (ISC <= ISC_avg + ISC_std * std) & (
                ISC >= ISC_avg - ISC_std * std)
        else:
            masked = np.array([True] * n_vox)

    else:
        ISC = np.zeros((n_subj, n_vox))
        for loo_subj in range(n_subj):
            loo_subj_ts = D[:, :, loo_subj]
            ISC[loo_subj] = correlation(loo_subj_ts,
                                        (group_sum - loo_subj_ts) / n_subj_loo,
                                        match_rows=True)

        masked = np.array([True] * n_vox)

    return ISC, masked
Beispiel #4
0
def isfc_permutation(permutation,
                     D,
                     masked,
                     collapse_subj=True,
                     random_state=0):

    print("Permutation", permutation)

    min_null = 1
    max_null = -1

    D = D[masked]

    n_vox, _, n_subj = D.shape
    n_subj_loo = n_subj - 1

    D = phase_randomize(D, random_state)

    if collapse_subj:
        ISFC_null = np.zeros((n_vox, n_vox))

    group_sum = np.add.reduce(D, axis=2)

    for loo_subj in range(n_subj):
        loo_subj_ts = D[:, :, loo_subj]
        ISFC_subj = \
            correlation(
                loo_subj_ts,
                (group_sum - loo_subj_ts) / n_subj_loo,
                symmetric=True
            )

        if collapse_subj:
            ISFC_null += ISFC_subj
        else:
            max_null = max(np.max(ISFC_subj), max_null)
            min_null = min(np.min(ISFC_subj), min_null)

    if collapse_subj:
        ISFC_null /= n_subj
        max_null = np.max(ISFC_null)
        min_null = np.min(ISFC_null)

    return permutation, min_null, max_null
Beispiel #5
0
def detect_qpp(data,
               num_scans,
               window_length,
               permutations,
               correlation_threshold,
               iterations,
               convergence_iterations=1,
               random_state=None):
    """
    This code is adapted from the paper "Quasi-periodic patterns (QP): Large-
    scale dynamics in resting state fMRI that correlate with local infraslow
    electrical activity", Shella Keilholz et al. NeuroImage, 2014.
    """

    random_state = check_random_state(random_state)

    voxels, trs = data.shape

    iterations = int(max(1, iterations))
    convergence_iterations = int(max(1, convergence_iterations))

    if callable(correlation_threshold):
        correlation_thresholds = [
            correlation_threshold(i) for i in range(iterations)
        ]
    else:
        correlation_thresholds = [
            correlation_threshold for _ in range(iterations)
        ]

    trs_per_scan = int(trs / num_scans)
    inpectable_trs = np.arange(trs) % trs_per_scan
    inpectable_trs = np.where(
        inpectable_trs < trs_per_scan - window_length + 1)[0]

    df = voxels * window_length

    initial_trs = random_state.choice(inpectable_trs, permutations)

    permutation_result = [{} for _ in range(permutations)]
    for perm in range(permutations):

        template_holder = np.zeros(trs)
        random_initial_window = normalize_segment(
            flattened_segment(data, window_length, initial_trs[perm]), df)
        for tr in inpectable_trs:
            scan_window = normalize_segment(
                flattened_segment(data, window_length, tr), df)
            template_holder[tr] = np.dot(random_initial_window, scan_window)

        template_holder_convergence = np.zeros((convergence_iterations, trs))

        for iteration in range(iterations):

            peak_threshold = correlation_thresholds[iteration]

            peaks, _ = find_peaks(template_holder,
                                  height=peak_threshold,
                                  distance=window_length)
            peaks = np.delete(peaks,
                              np.where(~np.isin(peaks, inpectable_trs))[0])

            template_holder = smooth(template_holder)

            found_peaks = np.size(peaks)
            if found_peaks < 1:
                break

            peaks_segments = flattened_segment(data, window_length, peaks[0])
            for peak in peaks[1:]:
                peaks_segments = peaks_segments + flattened_segment(
                    data, window_length, peak)

            peaks_segments = peaks_segments / found_peaks
            peaks_segments = normalize_segment(peaks_segments, df)

            for tr in inpectable_trs:
                scan_window = normalize_segment(
                    flattened_segment(data, window_length, tr), df)
                template_holder[tr] = np.dot(peaks_segments, scan_window)

            if np.all(
                    correlation(template_holder, template_holder_convergence) >
                    0.9999):
                break

            if convergence_iterations > 1:
                template_holder_convergence[1:] = template_holder_convergence[
                    0:-1]
            template_holder_convergence[0] = template_holder

        if found_peaks > 1:
            permutation_result[perm] = {
                'template': template_holder,
                'peaks': peaks,
                'final_iteration': iteration,
                'correlation_score': np.sum(template_holder[peaks]),
            }

    # Retrieve max correlation of template from permutations
    correlation_scores = np.array(
        [r['correlation_score'] if r else 0.0 for r in permutation_result])
    if not np.any(correlation_scores):
        raise Exception(
            "C-PAC could not find QPP in your data. "
            "Please lower your correlation threshold and try again.")

    max_correlation = np.argsort(correlation_scores)[-1]
    best_template = permutation_result[max_correlation]['template']
    best_selected_peaks = permutation_result[max_correlation]['peaks']

    best_template_metrics = [
        np.median(best_template[best_selected_peaks]),
        np.median(np.diff(best_selected_peaks)),
        len(best_selected_peaks),
    ]

    window_length_start = round(window_length / 2)
    window_length_end = window_length_start - window_length % 2

    best_template_segment = np.zeros((voxels, window_length))

    for best_peak in best_selected_peaks:
        start_tr = int(best_peak - np.ceil(window_length / 2.))
        end_tr = int(best_peak + np.floor(window_length / 2.))

        start_segment = np.zeros((voxels, 0))
        if start_tr <= 0:
            start_segment = np.zeros((voxels, abs(start_tr)))
            start_tr = 0

        end_segment = np.zeros((voxels, 0))
        if end_tr > trs:
            end_segment = np.zeros((voxels, end_tr - trs))
            end_tr = trs

        data_segment = data[:, start_tr:end_tr]

        best_template_segment += np.concatenate([
            start_segment,
            data_segment,
            end_segment,
        ],
                                                axis=1)

    best_template_segment /= len(best_selected_peaks)

    return best_template_segment, best_selected_peaks, best_template_metrics