def slice_wise_fft(in_file, ftmask=None, spike_thres=3.0, out_prefix=None): """Search for spikes in slices using the 2D FFT""" import os.path as op import nibabel as nb import numpy as np from mriqc.workflows.utils import spectrum_mask from scipy.ndimage import binary_erosion, generate_binary_structure from scipy.ndimage.filters import median_filter from statsmodels.robust.scale import mad if out_prefix is None: fname, ext = op.splitext(op.basename(in_file)) if ext == ".gz": fname, _ = op.splitext(fname) out_prefix = op.abspath(fname) func_data = nb.load(in_file).get_data() if ftmask is None: ftmask = spectrum_mask(tuple(func_data.shape[:2])) fft_data = [] for t in range(func_data.shape[-1]): func_frame = func_data[..., t] fft_slices = [] for z in range(func_frame.shape[2]): sl = func_frame[..., z] fftsl = (median_filter( np.real(np.fft.fft2(sl)).astype(np.float32), size=(5, 5), mode="constant", ) * ftmask) fft_slices.append(fftsl) fft_data.append(np.stack(fft_slices, axis=-1)) # Recompose the 4D FFT timeseries fft_data = np.stack(fft_data, -1) # Z-score across t, using robust statistics mu = np.median(fft_data, axis=3) sigma = np.stack([mad(fft_data, axis=3)] * fft_data.shape[-1], -1) idxs = np.where(np.abs(sigma) > 1e-4) fft_zscored = fft_data - mu[..., np.newaxis] fft_zscored[idxs] /= sigma[idxs] # save fft z-scored out_fft = op.abspath(out_prefix + "_zsfft.nii.gz") nii = nb.Nifti1Image(fft_zscored.astype(np.float32), np.eye(4), None) nii.to_filename(out_fft) # Find peaks spikes_list = [] for t in range(fft_zscored.shape[-1]): fft_frame = fft_zscored[..., t] for z in range(fft_frame.shape[-1]): sl = fft_frame[..., z] if np.all(sl < spike_thres): continue # Any zscore over spike_thres will be called a spike sl[sl <= spike_thres] = 0 sl[sl > 0] = 1 # Erode peaks and see how many survive struc = generate_binary_structure(2, 2) sl = binary_erosion(sl.astype(np.uint8), structure=struc).astype(np.uint8) if sl.sum() > 10: spikes_list.append((t, z)) out_spikes = op.abspath(out_prefix + "_spikes.tsv") np.savetxt(out_spikes, spikes_list, fmt=b"%d", delimiter=b"\t", header="TR\tZ") return len(spikes_list), out_spikes, out_fft
def slice_wise_fft(in_file, ftmask=None, spike_thres=3., out_prefix=None): """Search for spikes in slices using the 2D FFT""" import os.path as op import numpy as np import nibabel as nb from mriqc.workflows.utils import spectrum_mask from scipy.ndimage.filters import median_filter from scipy.ndimage import generate_binary_structure, binary_erosion from statsmodels.robust.scale import mad if out_prefix is None: fname, ext = op.splitext(op.basename(in_file)) if ext == '.gz': fname, _ = op.splitext(fname) out_prefix = op.abspath(fname) func_data = nb.load(in_file).get_data() if ftmask is None: ftmask = spectrum_mask(tuple(func_data.shape[:2])) fft_data = [] for t in range(func_data.shape[-1]): func_frame = func_data[..., t] fft_slices = [] for z in range(func_frame.shape[2]): sl = func_frame[..., z] fftsl = median_filter(np.real(np.fft.fft2(sl)).astype(np.float32), size=(5, 5), mode='constant') * ftmask fft_slices.append(fftsl) fft_data.append(np.stack(fft_slices, axis=-1)) # Recompose the 4D FFT timeseries fft_data = np.stack(fft_data, -1) # Z-score across t, using robust statistics mu = np.median(fft_data, axis=3) sigma = np.stack([mad(fft_data, axis=3)] * fft_data.shape[-1], -1) idxs = np.where(np.abs(sigma) > 1e-4) fft_zscored = fft_data - mu[..., np.newaxis] fft_zscored[idxs] /= sigma[idxs] # save fft z-scored out_fft = op.abspath(out_prefix + '_zsfft.nii.gz') nii = nb.Nifti1Image(fft_zscored.astype(np.float32), np.eye(4), None) nii.to_filename(out_fft) # Find peaks spikes_list = [] for t in range(fft_zscored.shape[-1]): fft_frame = fft_zscored[..., t] for z in range(fft_frame.shape[-1]): sl = fft_frame[..., z] if np.all(sl < spike_thres): continue # Any zscore over spike_thres will be called a spike sl[sl <= spike_thres] = 0 sl[sl > 0] = 1 # Erode peaks and see how many survive struc = generate_binary_structure(2, 2) sl = binary_erosion(sl.astype(np.uint8), structure=struc).astype(np.uint8) if sl.sum() > 10: spikes_list.append((t, z)) out_spikes = op.abspath(out_prefix + '_spikes.tsv') np.savetxt(out_spikes, spikes_list, fmt=b'%d', delimiter=b'\t', header='TR\tZ') return len(spikes_list), out_spikes, out_fft