Exemplo n.º 1
0
def test_anisotropic_sphere_extraction():
    data = np.random.RandomState(42).random_sample((3, 3, 3, 5))
    affine = np.eye(4)
    affine[0, 0] = 2
    affine[2, 2] = 2
    img = nibabel.Nifti1Image(data, affine)
    masker = NiftiSpheresMasker([(2, 1, 2)], radius=1)
    # Test the fit
    masker.fit()
    # Test the transform
    s = masker.transform(img)
    mask = np.zeros((3, 3, 3), dtype=np.bool)
    mask[1, :, 1] = True
    assert_array_equal(s[:, 0], np.mean(data[mask], axis=0))
    # Now with a mask
    mask_img = np.zeros((3, 2, 3))
    mask_img[1, 0, 1] = 1
    affine_2 = affine.copy()
    affine_2[0, 0] = 4
    mask_img = nibabel.Nifti1Image(mask_img, affine=affine_2)
    masker = NiftiSpheresMasker([(2, 1, 2)], radius=1, mask_img=mask_img)

    masker.fit()
    s = masker.transform(img)
    assert_array_equal(s[:, 0], data[1, 0, 1])
def test_small_radius():
    affine = np.eye(4)
    shape = (3, 3, 3)

    data = np.random.random(shape)
    mask = np.zeros(shape)
    mask[1, 1, 1] = 1
    mask[2, 2, 2] = 1
    affine = np.eye(4) * 1.2
    seed = (1.4, 1.4, 1.4)

    masker = NiftiSpheresMasker([seed],
                                radius=0.1,
                                mask_img=nibabel.Nifti1Image(mask, affine))
    masker.fit_transform(nibabel.Nifti1Image(data, affine))

    # Test if masking is taken into account
    mask[1, 1, 1] = 0
    mask[1, 1, 0] = 1

    masker = NiftiSpheresMasker([seed],
                                radius=0.1,
                                mask_img=nibabel.Nifti1Image(mask, affine))
    assert_raises_regex(ValueError,
                        'Sphere around seed #0 is empty', masker.fit_transform,
                        nibabel.Nifti1Image(data, affine))

    masker = NiftiSpheresMasker([seed],
                                radius=1.6,
                                mask_img=nibabel.Nifti1Image(mask, affine))
    masker.fit_transform(nibabel.Nifti1Image(data, affine))
Exemplo n.º 3
0
def test_gaussian_coord_smoothing():
    coords = [(0.0, 0.0, 0.0), (10.0, -10.0, 30.0)]
    computed_img = img_utils.gaussian_coord_smoothing(coords)
    masker = NiftiSpheresMasker(coords + [(-10.0, 10.0, -30)]).fit()
    values = masker.transform(computed_img)[0]
    assert (values[:2] > computed_img.get_data().max() / 2.0).all()
    assert values[-1] == pytest.approx(0.0)
def test_seed_extraction():
    data = np.random.random((3, 3, 3, 5))
    img = nibabel.Nifti1Image(data, np.eye(4))
    masker = NiftiSpheresMasker([(1, 1, 1)])
    # Test the fit
    masker.fit()
    # Test the transform
    s = masker.transform(img)
    assert_array_equal(s[:, 0], data[1, 1, 1])
def test_seed_extraction():
    data = np.random.random((3, 3, 3, 5))
    img = nibabel.Nifti1Image(data, np.eye(4))
    masker = NiftiSpheresMasker([(1, 1, 1)])
    # Test the fit
    masker.fit()
    # Test the transform
    s = masker.transform(img)
    assert_array_equal(s[:, 0], data[1, 1, 1])
Exemplo n.º 6
0
def extract_timeseries_coords(filename, raw_coords, confounds=None):
    """Because the power parcellation is given in coordinates and not labels,
    we dedicate an exclusive function to deal with it.
    """
    coords = np.vstack(
        (raw_coords.rois['x'], raw_coords.rois['y'], raw_coords.rois['z'])).T

    spheres_masker = NiftiSpheresMasker(seeds=coords,
                                        radius=5.,
                                        standardize=True)

    time_series = spheres_masker.fit_transform(filename, confounds=confounds)
    return time_series
Exemplo n.º 7
0
def signal_extract(data,atlas,t_r=2.2,masker_type='Spheres',saveas='file'):
    
    """
    Extracts BOLD time-series from regions of interest
    
    Parameters
    ----------
    data: Filenames of subjects. 
    
    atlas: regions or coordinates to extract signals from.
    
    masker_type : Type of masker used to extract BOLD signals . types are : 'Spheres','Maps','Labels'
    
    saveas : Destination to save and load output (.npz)
    
    Returns
    ---------
    subject_ts : array-like , 2-D (n_subjects,n_regions)
                 Array of BOLD time-series 
    """
    subjects_ts=[]
    
    if os.path.exists(saveas):
        
        subjects_ts=np.load(saveas)['arr_0']
        
    else:
        
        if masker_type== 'Spheres':
            masker = NiftiSpheresMasker(
                            seeds=atlas, smoothing_fwhm=6, radius=4 ,mask_img=brainmask,
                            detrend=False, standardize=True, low_pass=0.1, high_pass=0.01, t_r=t_r)
        elif masker_type == 'Maps':
            masker = NiftiMapsMasker(maps_img=atlas,mask_img=brainmask,standardize=True,
                                 high_pass=0.01,low_pass=0.1,detrend=False,t_r=t_r,
                                 memory_level=2,smoothing_fwhm=5,resampling_target='data',
                                 memory=mem,verbose=5)
        elif masker_type == 'Labels':
            masker = NiftiLabelsMasker(labels_img=atlas,mask_img=brainmask,standardize=True,
                                 high_pass=0.01,low_pass=0.1,detrend=False,t_r=t_r,
                                 memory_level=2,smoothing_fwhm=5,resampling_target='data',
                                 memory=mem,verbose=5)
        else:
            raise ValueError("Please provide masker type")
            
        for func_file in data:
            time_series = masker.fit_transform(func_file)
            subjects_ts.append(time_series)
            np.savez(saveas,subjects_ts)
            
    return subjects_ts
Exemplo n.º 8
0
    def fit(self):
        # Create mask
        print("Fit the SphereMasker...")

        self.n_seeds = int(self.seeds_img.get_data().sum())

        # Pass our xform_fn for a callback on each seed.
        self.sphere_masker = NiftiSpheresMasker(
            seeds=self.seeds_img,
            mask_img=self.seeds_img,
            radius=self.radius,
            xform_fn=self.rsa_on_ball_axis_1,
            standardize=False)  # no mem
        self.sphere_masker.fit()
Exemplo n.º 9
0
def _fmri_roi_extract_image(data,  atlas_path, atlas_type, radius, overlap_ok,mask = None):
    if 'label' in atlas_type:
        logging.debug('Labels Extract')
        label_masker = NiftiLabelsMasker(atlas_path, mask_img=mask)
        timeseries = label_masker.fit_transform(data)
    if 'sphere' in atlas_type:
        atlas_path = np.loadtxt(atlas_path)
        logging.debug('Sphere Extract')
        spheres_masker = NiftiSpheresMasker(atlas_path, float(radius),mask_img=mask, allow_overlap = overlap_ok)
        timeseries = spheres_masker.fit_transform(data)
    if 'maps' in atlas_type:
        logging.debug('Maps Extract')
        maps_masker = NiftiMapsMasker(atlas_path,mask_img=mask, allow_overlap = overlap_ok)
        timeseries = maps_masker.fit_transform(data)
    timeseries[timeseries == 0.0] = np.nan

    return timeseries
def test_small_radius():
    affine = np.eye(4)
    shape = (3, 3, 3)

    data = np.random.random(shape)
    mask = np.zeros(shape)
    mask[1, 1, 1] = 1
    mask[2, 2, 2] = 1
    affine = np.eye(4) * 1.2
    seed = (1.4, 1.4, 1.4)

    masker = NiftiSpheresMasker([seed], radius=0.1,
                                mask_img=nibabel.Nifti1Image(mask, affine))
    masker.fit_transform(nibabel.Nifti1Image(data, affine))

    # Test if masking is taken into account
    mask[1, 1, 1] = 0
    mask[1, 1, 0] = 1

    masker = NiftiSpheresMasker([seed], radius=0.1,
                                mask_img=nibabel.Nifti1Image(mask, affine))
    assert_raises_regex(ValueError, 'Sphere around seed #0 is empty',
                        masker.fit_transform,
                        nibabel.Nifti1Image(data, affine))

    masker = NiftiSpheresMasker([seed], radius=1.6,
                                mask_img=nibabel.Nifti1Image(mask, affine))
    masker.fit_transform(nibabel.Nifti1Image(data, affine))
Exemplo n.º 11
0
def extract_parcellation_time_series(in_data, parcellation_name,
                                     parcellations_dict, bp_freqs, tr):
    '''
    Depending on parcellation['is_probabilistic'] this function chooses either NiftiLabelsMasker or NiftiMapsMasker
    to extract the time series of each parcel
    if bp_freq: data is band passfiltered at (hp, lp), if (None,None): no filter, if (None, .1) only lp...
    tr in ms (e.g. from freesurfer ImageInfo())
    returns np.array with parcellation time series and saves this array also to parcellation_time_series_file, and
    path to pickled masker object
    '''
    from nilearn.input_data import NiftiLabelsMasker, NiftiMapsMasker, NiftiSpheresMasker
    import os, pickle
    import numpy as np

    if parcellations_dict[parcellation_name][
            'is_probabilistic'] == True:  # use probab. nilearn
        masker = NiftiMapsMasker(
            maps_img=parcellations_dict[parcellation_name]['nii_path'],
            standardize=True)

    elif parcellations_dict[parcellation_name]['is_probabilistic'] == 'sphere':
        atlas = pickle.load(
            open(parcellations_dict[parcellation_name]['nii_path']))
        coords = atlas.rois
        masker = NiftiSpheresMasker(coords,
                                    radius=5,
                                    allow_overlap=True,
                                    standardize=True)

    else:  # 0/1 labels
        masker = NiftiLabelsMasker(
            labels_img=parcellations_dict[parcellation_name]['nii_path'],
            standardize=True)

    # add bandpass filter (only executes if freq not None
    hp, lp = bp_freqs
    masker.low_pass = lp
    masker.high_pass = hp
    if tr is not None:
        masker.t_r = tr
    else:
        masker.t_r = None

    masker.standardize = True

    masker_file = os.path.join(os.getcwd(), 'masker.pkl')
    with open(masker_file, 'w') as f:
        pickle.dump(masker, f)

    parcellation_time_series = masker.fit_transform(in_data)

    parcellation_time_series_file = os.path.join(
        os.getcwd(), 'parcellation_time_series.npy')
    np.save(parcellation_time_series_file, parcellation_time_series)

    return parcellation_time_series, parcellation_time_series_file, masker_file
def build_series(brain4d):
    coords = [
        (30, -45, -10),
        (0, -85, 10),
        (0, 47, 30),
        (-35, -20, 50),
    ]
    masker = NiftiSpheresMasker(
        coords,
        radius=5,
        detrend=True,
        standardize=True,
        low_pass=0.1,
        high_pass=0.01,
        t_r=2,
    )

    series = masker.fit_transform(brain4d)
    return series
def report_flm_adhd_dmn():  # pragma: no cover
    t_r = 2.
    slice_time_ref = 0.
    n_scans = 176
    pcc_coords = (0, -53, 26)
    adhd_dataset = nilearn.datasets.fetch_adhd(n_subjects=1)
    seed_masker = NiftiSpheresMasker([pcc_coords],
                                     radius=10,
                                     detrend=True,
                                     standardize=True,
                                     low_pass=0.1,
                                     high_pass=0.01,
                                     t_r=2.,
                                     memory='nilearn_cache',
                                     memory_level=1,
                                     verbose=0)
    seed_time_series = seed_masker.fit_transform(adhd_dataset.func[0])
    frametimes = np.linspace(0, (n_scans - 1) * t_r, n_scans)
    design_matrix = make_first_level_design_matrix(frametimes,
                                                   hrf_model='spm',
                                                   add_regs=seed_time_series,
                                                   add_reg_names=["pcc_seed"])
    dmn_contrast = np.array([1] + [0] * (design_matrix.shape[1] - 1))
    contrasts = {'seed_based_glm': dmn_contrast}

    first_level_model = FirstLevelModel(t_r=t_r, slice_time_ref=slice_time_ref)
    first_level_model = first_level_model.fit(run_imgs=adhd_dataset.func[0],
                                              design_matrices=design_matrix)

    report = make_glm_report(
        first_level_model,
        contrasts=contrasts,
        title='ADHD DMN Report',
        cluster_threshold=15,
        height_control='bonferroni',
        min_distance=8.,
        plot_type='glass',
        report_dims=(1200, 'a'),
    )
    output_filename = 'generated_report_flm_adhd_dmn.html'
    output_filepath = os.path.join(REPORTS_DIR, output_filename)
    report.save_as_html(output_filepath)
    report.get_iframe()
Exemplo n.º 14
0
def _set_volume_masker(roi_file, as_voxels=False, **kwargs):
    """Check and see if multiple ROIs exist in atlas file"""

    if not isinstance(roi_file, str):
        raise ValueError('roi_file must be a file name string')

    if roi_file.endswith('.csv') or roi_file.endswith('.tsv'):
        roi = _read_coords(roi_file)
        n_rois = len(roi)
        print('  {} region(s) detected from coordinates'.format(n_rois))

        if kwargs.get('radius') is None:
            warnings.warn('No radius specified for coordinates; setting '
                          'to nilearn.input_data.NiftiSphereMasker default '
                          'of extracting from a single voxel')
        masker = NiftiSpheresMasker(roi, **kwargs)
    
    elif roi_file.endswith('.nii.gz'):
        # remove args for NiftiSpheresMasker 
        if 'radius' in kwargs:
            kwargs.pop('radius')
        if 'allow_overlap' in kwargs:
                kwargs.pop('allow_overlap')
    
        roi_img = image.load_img(roi_file)
        if len(roi_img.shape) == 4:
            n_rois = roi_img.shape[-1]
            print('  {} region(s) detected from {}'.format(n_rois,
                                                        roi_img.get_filename()))
            masker = NiftiMapsMasker(roi_img, allow_overlap=True,**kwargs)
        else:
            n_rois = len(np.unique(roi_img.get_fdata())) - 1
            print('  {} region(s) detected from {}'.format(n_rois,
                                                        roi_img.get_filename()))
            if n_rois > 1:
                masker = NiftiLabelsMasker(roi_img, **kwargs)
            elif n_rois == 1:
                # binary mask for single ROI 
                if as_voxels:
                    if 'mask_img' in kwargs:
                        kwargs.pop('mask_img')
                    masker = NiftiMasker(roi_img, **kwargs)
                else:
                    # more computationally efficient if only wanting the mean
                    masker = NiftiLabelsMasker(roi_img, **kwargs)
            else:
                raise ValueError('No ROI detected; check ROI file')
    
    else:
        raise ValueError('Invalid file type for roi_file. Must be one of: '
                         '.nii.gz, .csv, .tsv')
    
    return masker, n_rois
def test_is_nifti_spheres_masker_give_nans():
    affine = np.eye(4)

    data_with_nans = np.zeros((10, 10, 10), dtype=np.float32)
    data_with_nans[:, :, :] = np.nan

    data_without_nans = np.random.random((9, 9, 9))
    indices = np.nonzero(data_without_nans)

    # Leaving nans outside of some data
    data_with_nans[indices] = data_without_nans[indices]
    img = nibabel.Nifti1Image(data_with_nans, affine)
    seed = [(7, 7, 7)]

    # Interaction of seed with nans
    masker = NiftiSpheresMasker(seeds=seed, radius=2.)
    assert_false(np.isnan(np.sum(masker.fit_transform(img))))

    mask = np.ones((9, 9, 9))
    mask_img = nibabel.Nifti1Image(mask, affine)
    # When mask_img is provided, the seed interacts within the brain, so no nan
    masker = NiftiSpheresMasker(seeds=seed, radius=2., mask_img=mask_img)
    assert_false(np.isnan(np.sum(masker.fit_transform(img))))
def test_sphere_extraction():
    data = np.random.random((3, 3, 3, 5))
    img = nibabel.Nifti1Image(data, np.eye(4))
    masker = NiftiSpheresMasker([(1, 1, 1)], radius=1)
    # Test the fit
    masker.fit()
    # Test the transform
    s = masker.transform(img)
    mask = np.zeros((3, 3, 3), dtype=np.bool)
    mask[:, 1, 1] = True
    mask[1, :, 1] = True
    mask[1, 1, :] = True
    assert_array_equal(s[:, 0], np.mean(data[mask], axis=0))
    # Now with a mask
    mask_img = np.zeros((3, 3, 3))
    mask_img[1, :, :] = 1
    mask_img = nibabel.Nifti1Image(mask_img, np.eye(4))
    masker = NiftiSpheresMasker([(1, 1, 1)], radius=1, mask_img=mask_img)
    masker.fit()
    s = masker.transform(img)
    assert_array_equal(
        s[:, 0],
        np.mean(data[np.logical_and(mask, mask_img.get_data())], axis=0))
def test_nifti_spheres_masker_overlap():
    # Test resampling in NiftiMapsMasker
    affine = np.eye(4)
    shape = (5, 5, 5)

    data = np.random.random(shape + (5,))
    fmri_img = nibabel.Nifti1Image(data, affine)

    seeds = [(0, 0, 0), (2, 2, 2)]

    overlapping_masker = NiftiSpheresMasker(seeds, radius=1,
                                            allow_overlap=True)
    overlapping_masker.fit_transform(fmri_img)
    overlapping_masker = NiftiSpheresMasker(seeds, radius=2,
                                            allow_overlap=True)
    overlapping_masker.fit_transform(fmri_img)

    noverlapping_masker = NiftiSpheresMasker(seeds, radius=1,
                                             allow_overlap=False)
    noverlapping_masker.fit_transform(fmri_img)
    noverlapping_masker = NiftiSpheresMasker(seeds, radius=2,
                                             allow_overlap=False)
    assert_raises_regex(ValueError, 'Overlap detected',
                        noverlapping_masker.fit_transform, fmri_img)
Exemplo n.º 18
0
def _set_masker(roi_file, as_voxels=False, **kwargs):
    """Check and see if multiple ROIs exist in atlas file"""

    if isinstance(roi_file, str) and roi_file.endswith('.tsv'):
        roi = _read_coords(roi_file)
        n_rois = len(roi)
        is_coords = True
        print('  {} region(s) detected from coordinates'.format(n_rois))
    else:
        roi = load_img(roi_file)
        n_rois = len(np.unique(roi.get_data())) - 1

        is_coords = False
        print('  {} region(s) detected from {}'.format(n_rois,
                                                       roi.get_filename()))
    
    if is_coords:
        if kwargs.get('radius') is None:
            warnings.warn('No radius specified for coordinates; setting '
                            'to nilearn.input_data.NiftiSphereMasker default '
                            'of extracting from a single voxel')
        masker = NiftiSpheresMasker(roi, **kwargs)
    else:

        if 'radius' in kwargs:
            kwargs.pop('radius')
        
        if 'allow_overlap' in kwargs:
            kwargs.pop('allow_overlap')
        
        if n_rois > 1:
            masker = NiftiLabelsMasker(roi, **kwargs)
        elif n_rois == 1:
            # single binary ROI mask 
            if as_voxels:
                if 'mask_img' in kwargs:
                    kwargs.pop('mask_img')
                masker = NiftiMasker(roi, **kwargs)
            else:
                # more computationally efficient if only wanting the mean of ROI
                masker = NiftiLabelsMasker(roi, **kwargs)
        else:
            raise ValueError('No ROI detected; check ROI file')
    
    return masker
def test_sphere_extraction():
    data = np.random.random((3, 3, 3, 5))
    img = nibabel.Nifti1Image(data, np.eye(4))
    masker = NiftiSpheresMasker([(1, 1, 1)], radius=1)
    # Test the fit
    masker.fit()
    # Test the transform
    s = masker.transform(img)
    mask = np.zeros((3, 3, 3), dtype=np.bool)
    mask[:, 1, 1] = True
    mask[1, :, 1] = True
    mask[1, 1, :] = True
    assert_array_equal(s[:, 0], np.mean(data[mask], axis=0))
    # Now with a mask
    mask_img = np.zeros((3, 3, 3))
    mask_img[1, :, :] = 1
    mask_img = nibabel.Nifti1Image(mask_img, np.eye(4))
    masker = NiftiSpheresMasker([(1, 1, 1)], radius=1, mask_img=mask_img)
    masker.fit()
    s = masker.transform(img)
    assert_array_equal(s[:, 0],
                       np.mean(data[np.logical_and(mask, mask_img.get_data())],
                               axis=0))
def test_anisotropic_sphere_extraction():
    data = np.random.random((3, 3, 3, 5))
    affine = np.eye(4)
    affine[0, 0] = 2
    affine[2, 2] = 2
    img = nibabel.Nifti1Image(data, affine)
    masker = NiftiSpheresMasker([(2, 1, 2)], radius=1)
    # Test the fit
    masker.fit()
    # Test the transform
    s = masker.transform(img)
    mask = np.zeros((3, 3, 3), dtype=np.bool)
    mask[1, :, 1] = True
    assert_array_equal(s[:, 0], np.mean(data[mask], axis=0))
    # Now with a mask
    mask_img = np.zeros((3, 2, 3))
    mask_img[1, 0, 1] = 1
    affine_2 = affine.copy()
    affine_2[0, 0] = 4
    mask_img = nibabel.Nifti1Image(mask_img, affine=affine_2)
    masker = NiftiSpheresMasker([(2, 1, 2)], radius=1, mask_img=mask_img)
    masker.fit()
    s = masker.transform(img)
    assert_array_equal(s[:, 0], data[1, 0, 1])
Exemplo n.º 21
0
def test_standardization():
    data = np.random.RandomState(42).random_sample((3, 3, 3, 5))
    img = nibabel.Nifti1Image(data, np.eye(4))

    # test zscore
    masker = NiftiSpheresMasker([(1, 1, 1)], standardize='zscore')
    # Test the fit
    s = masker.fit_transform(img)

    np.testing.assert_almost_equal(s.mean(), 0)
    np.testing.assert_almost_equal(s.std(), 1)

    # test psc
    masker = NiftiSpheresMasker([(1, 1, 1)], standardize='psc')
    # Test the fit
    s = masker.fit_transform(img)

    np.testing.assert_almost_equal(s.mean(), 0)
    np.testing.assert_almost_equal(
        s.ravel(),
        data[1, 1, 1] / data[1, 1, 1].mean() * 100 - 100,
    )
Exemplo n.º 22
0
def test_is_nifti_spheres_masker_give_nans():
    affine = np.eye(4)

    data_with_nans = np.zeros((10, 10, 10), dtype=np.float32)
    data_with_nans[:, :, :] = np.nan

    data_without_nans = np.random.RandomState(42).random_sample((9, 9, 9))
    indices = np.nonzero(data_without_nans)

    # Leaving nans outside of some data
    data_with_nans[indices] = data_without_nans[indices]
    img = nibabel.Nifti1Image(data_with_nans, affine)
    seed = [(7, 7, 7)]

    # Interaction of seed with nans
    masker = NiftiSpheresMasker(seeds=seed, radius=2.)
    assert not np.isnan(np.sum(masker.fit_transform(img)))

    mask = np.ones((9, 9, 9))
    mask_img = nibabel.Nifti1Image(mask, affine)
    # When mask_img is provided, the seed interacts within the brain, so no nan
    masker = NiftiSpheresMasker(seeds=seed, radius=2., mask_img=mask_img)
    assert not np.isnan(np.sum(masker.fit_transform(img)))
Exemplo n.º 23
0
class RsaSearchlight(object):
    def __init__(self,
                 mask_img,
                 seeds_img,
                 radius=10.,
                 distance_method='correlation',
                 memory_params=None):
        # Defs
        self.memory_params = memory_params or dict()
        self.seeds_img = seeds_img
        self.mask_img = mask_img
        self.radius = radius
        self.distance_method = distance_method

    def rsa_on_ball_axis_1(self, sphere_data):
        """
        Data: axis=1: [nvoxels, nslices]
        """

        # sphere_data could be a single voxel; in this case, we'll get
        # nan
        similarity_comparisons = pdist(sphere_data.T, self.distance_method)
        self.similarity_comparisons[self.si, :] = similarity_comparisons
        self.n_voxels[self.si] = sphere_data.shape[0]
        self.si += 1

        if self.memory_params.get('verbose', 0) > 1 and self.si % 100 == 99:
            print 'Processed %s of %s...' % (self.si + 1, self.n_seeds)
        return similarity_comparisons.std()  # output value for all slices

    def fit(self):
        # Create mask
        print("Fit the SphereMasker...")

        self.n_seeds = int(self.seeds_img.get_data().sum())

        # Pass our xform_fn for a callback on each seed.
        self.sphere_masker = NiftiSpheresMasker(
            seeds=self.seeds_img,
            mask_img=self.seeds_img,
            radius=self.radius,
            xform_fn=self.rsa_on_ball_axis_1,
            standardize=False)  # no mem
        self.sphere_masker.fit()

    def transform(self, func_img):
        print("Transforming the image...")

        n_images = func_img.shape[3]
        n_compares = n_images * (n_images - 1) / 2

        # These are computed within the callback.
        self.si = 0
        self.n_voxels = np.empty((self.n_seeds))
        self.similarity_comparisons = np.empty((self.n_seeds, n_compares))

        similarity_std = self.sphere_masker.transform(func_img)

        # Pull the values off of self, set locally.
        n_voxels = self.n_voxels
        similarity_comparisons = self.similarity_comparisons
        delattr(self, 'si')
        delattr(self, 'n_voxels')
        delattr(self, 'similarity_comparisons')

        # Clean up
        good_seeds = np.logical_not(
            np.isnan(similarity_comparisons.mean(axis=1)))
        n_voxels = n_voxels[good_seeds]
        similarity_comparisons = similarity_comparisons[good_seeds]
        similarity_std = similarity_std[:, good_seeds]  # slices x seeds

        return similarity_comparisons, similarity_std, n_voxels

    def visualize(self,
                  similarity_comparisons,
                  similarity_std=None,
                  anat_img=None,
                  labels=None):
        print("Plotting the results...")

        self.visualize_seeds(anat_img=anat_img)
        self.visualize_mask(anat_img=anat_img)
        self.visualize_comparisons(
            similarity_comparisons=similarity_comparisons,
            labels=labels,
            anat_img=anat_img)
        self.visualize_comparisons_std(similarity_std=similarity_std,
                                       anat_img=anat_img)

    def visualize_seeds(self, anat_img=None):
        plot_roi(self.sphere_masker.seeds_img_,
                 bg_img=anat_img,
                 title='seed img')

    def visualize_mask(self, anat_img=None):
        plot_roi(self.sphere_masker.mask_img_,
                 bg_img=anat_img,
                 title='mask img')

    def visualize_comparisons(self,
                              similarity_comparisons,
                              labels=None,
                              anat_img=None):
        # Plot (up to) twenty comparisons.
        plotted_similarity = similarity_comparisons[:, 0]
        plotted_img = self.sphere_masker.inverse_transform(
            plotted_similarity.T)
        plot_stat_map(plotted_img,
                      bg_img=anat_img,
                      title='RSA comparison %s vs. %s' % tuple(labels[:2]))

        # Plot mosaic of up to 20

        # Choose the comparisons
        idx = np.linspace(0, similarity_comparisons.shape[1] - 1, 20)
        idx = np.unique(np.round(idx).astype(int))  # if there's less than 20

        # Make (and filter) titles
        if labels is None:
            titles = None
        else:
            titles = []
            for ai, label1 in enumerate(labels):
                for bi, label2 in enumerate(labels[(ai + 1):]):
                    titles.append('%s vs. %s' % (label1, label2))
            titles = np.asarray(titles)[idx]

        # Create the image
        plotted_similarity = similarity_comparisons[:, idx]
        plotted_img = self.sphere_masker.inverse_transform(
            plotted_similarity.T)

        fh = plt.figure(figsize=(18, 10))
        plot_mosaic_stat_map(plotted_img,
                             colorbar=False,
                             display_mode='z',
                             bg_img=anat_img,
                             cut_coords=1,
                             figure=fh,
                             title=titles)

    def visualize_comparisons_std(self, similarity_std, anat_img=None):
        if similarity_std is not None:
            RSA_std_img = self.sphere_masker.inverse_transform(
                similarity_std[0])
            plot_stat_map(RSA_std_img, bg_img=anat_img, title='RSA std')
Exemplo n.º 24
0
def make_parcellation(data_path, parcellation, parc_type=None, parc_params=None):
    """
    Performs a parcellation which reduces voxel space to regions of interest (brain data).

    Parameters
    ----------

    data_path : str
        Path to .nii image.
    parcellation : str
        Specify which parcellation that you would like to use. For MNI: 'gordon2014_333', 'power2012_264', For TAL: 'shen2013_278'.
        It is possible to add the OH subcotical atlas on top of a cortical atlas (e.g. gordon) by adding:
            '+OH' (for oxford harvard subcortical atlas) and '+SUIT' for SUIT cerebellar atlas.
            e.g.: gordon2014_333+OH+SUIT'
    parc_type : str
        Can be 'sphere' or 'region'. If nothing is specified, the default for that parcellation will be used.
    parc_params : dict
        **kwargs for nilearn functions

    Returns
    -------

    data : array
        Data after the parcellation.

    NOTE
    ----
    These functions make use of nilearn. Please cite nilearn if used in a publicaiton.
    """

    if isinstance(parcellation, str):
        parcin = ''
        if '+' in parcellation:
            parcin = parcellation
            parcellation = parcellation.split('+')[0]
        if '+OH' in parcin:
            subcortical = True
        else:
            subcortical = None
        if '+SUIT' in parcin:
            cerebellar = True
        else:
            cerebellar = None

        if not parc_type or not parc_params:
            path = tenetopath[0] + '/data/parcellation_defaults/defaults.json'
            with open(path) as data_file:
                defaults = json.load(data_file)
        if not parc_type:
            parc_type = defaults[parcellation]['type']
            print('Using default parcellation type')
        if not parc_params:
            parc_params = defaults[parcellation]['params']
            print('Using default parameters')

    if parc_type == 'sphere':
        parcellation = load_parcellation_coords(parcellation)
        seed = NiftiSpheresMasker(np.array(parcellation), **parc_params)
        data = seed.fit_transform(data_path)
    elif parc_type == 'region':
        path = tenetopath[0] + '/data/parcellation/' + parcellation + '.nii.gz'
        region = NiftiLabelsMasker(path, **parc_params)
        data = region.fit_transform(data_path)
    else:
        raise ValueError('Unknown parc_type specified')

    if subcortical:
        subatlas = fetch_atlas_harvard_oxford('sub-maxprob-thr0-2mm')['maps']
        region = NiftiLabelsMasker(subatlas, **parc_params)
        data_sub = region.fit_transform(data_path)
        data = np.hstack([data, data_sub])

    if cerebellar:
        path = tenetopath[0] + '/data/parcellation/Cerebellum-SUIT_space-MNI152NLin2009cAsym.nii.gz'
        region = NiftiLabelsMasker(path, **parc_params)
        data_cerebellar = region.fit_transform(data_path)
        data = np.hstack([data, data_cerebellar])

    return data
Exemplo n.º 25
0
def test_nifti_spheres_masker_overlap():
    # Test resampling in NiftiMapsMasker
    affine = np.eye(4)
    shape = (5, 5, 5)

    data = np.random.RandomState(42).random_sample(shape + (5, ))
    fmri_img = nibabel.Nifti1Image(data, affine)

    seeds = [(0, 0, 0), (2, 2, 2)]

    overlapping_masker = NiftiSpheresMasker(seeds,
                                            radius=1,
                                            allow_overlap=True)
    overlapping_masker.fit_transform(fmri_img)
    overlapping_masker = NiftiSpheresMasker(seeds,
                                            radius=2,
                                            allow_overlap=True)
    overlapping_masker.fit_transform(fmri_img)

    noverlapping_masker = NiftiSpheresMasker(seeds,
                                             radius=1,
                                             allow_overlap=False)
    noverlapping_masker.fit_transform(fmri_img)
    noverlapping_masker = NiftiSpheresMasker(seeds,
                                             radius=2,
                                             allow_overlap=False)
    with pytest.raises(ValueError, match='Overlap detected'):
        noverlapping_masker.fit_transform(fmri_img)
Exemplo n.º 26
0
# Find the coordinates of the peak

from nibabel.affines import apply_affine
values = z_map.get_data()
coord_peaks = np.dstack(
    np.unravel_index(np.argsort(values.ravel()), values.shape))[0, 0, :]
coord_mm = apply_affine(z_map.affine, coord_peaks)

###############################################################################
# We create a masker for the voxel (allowing us to detrend the signal)
# and extract the time course

from nilearn.input_data import NiftiSpheresMasker
mask = NiftiSpheresMasker([coord_mm],
                          radius=3,
                          detrend=True,
                          standardize=True,
                          high_pass=None,
                          low_pass=None,
                          t_r=7.)
sig = mask.fit_transform(fmri_img)

##########################################################
# Let's plot the signal and the theoretical response

plt.plot(frame_times, sig, label='voxel %d %d %d' % tuple(coord_mm))
plt.plot(design_matrix['active'], color='red', label='model')
plt.xlabel('scan')
plt.legend()
plt.show()
Exemplo n.º 27
0
def make_parcellation(data_path,
                      parcellation,
                      parc_type=None,
                      parc_params=None):
    """
    Performs a parcellation which reduces voxel space to regions of interest (brain data).

    Parameters
    ----------

    data_path : str
        Path to .nii image.
    parcellation : str
        Specify which parcellation that you would like to use. For MNI: 'gordon2014_333', 'power2012_264', For TAL: 'shen2013_278'.
        It is possible to add the OH subcotical atlas on top of a cortical atlas (e.g. gordon) by adding:
            '+sub-maxprob-thr0-1mm', '+sub-maxprob-thr0-2mm', 'sub-maxprob-thr25-1mm', 'sub-maxprob-thr25-2mm',
            '+sub-maxprob-thr50-1mm', '+sub-maxprob-thr50-2mm'.
            e.g.: gordon2014_333+submaxprob-thr0-2mm'
    parc_type : str
        Can be 'sphere' or 'region'. If nothing is specified, the default for that parcellation will be used.
    parc_params : dict
        **kwargs for nilearn functions

    Returns
    -------

    data : array
        Data after the parcellation.

    NOTE
    ----
    These functions make use of nilearn. Please cite nilearn if used in a publicaiton.
    """

    if isinstance(parcellation, str):

        if '+' in parcellation:
            parcin = parcellation.split('+')
            parcellation = parcin[0]
            subcortical = parcin[1]
        else:
            subcortical = None

        if not parc_type or not parc_params:
            path = teneto.__path__[
                0] + '/data/parcellation_defaults/defaults.json'
            with open(path) as data_file:
                defaults = json.load(data_file)
        if not parc_type:
            parc_type = defaults[parcellation]['type']
            print('Using default parcellation type')
        if not parc_params:
            parc_params = defaults[parcellation]['params']
            print('Using default parameters')

    if parc_type == 'sphere':
        parcellation = teneto.utils.load_parcellation_coords(parcellation)
        seed = NiftiSpheresMasker(np.array(parcellation), **parc_params)
        data = seed.fit_transform(data_path)
    elif parc_type == 'region':
        path = teneto.__path__[
            0] + '/data/parcellation/' + parcellation + '.nii'
        region = NiftiLabelsMasker(path, **parc_params)
        data = region.fit_transform(data_path)
    else:
        raise ValueError('Unknown parc_type specified')

    if subcortical:
        subatlas = fetch_atlas_harvard_oxford('sub-maxprob-thr0-2mm')['maps']
        region = NiftiLabelsMasker(subatlas, **parc_params)
        data_sub = region.fit_transform(data_path)
        data = np.hstack([data, data_sub])

    return data
def calc_sc_sphere(in_file, coords, coords_labels, MNI_brain_mask, radius,
                   FWHM):

    from nilearn.input_data import NiftiMasker, NiftiSpheresMasker
    import numpy as np
    import os

    # MNI mask
    MNI_brain_mask = MNI_brain_mask

    # probabilistic seed regions, 4d nifti file &  corresponding labels
    coords = coords
    coords_labels = coords_labels

    # extract time series from coords
    seed_masker = NiftiSpheresMasker(coords,
                                     radius=radius,
                                     standardize=True,
                                     memory='nilearn_cache',
                                     memory_level=5,
                                     verbose=5)

    seed_time_series_array = seed_masker.fit_transform(in_file)

    # extract time series brain-wide
    brain_masker = NiftiMasker(mask_img=MNI_brain_mask,
                               smoothing_fwhm=FWHM,
                               standardize=True,
                               memory='nilearn_cache',
                               memory_level=5,
                               verbose=2)

    brain_time_series = brain_masker.fit_transform(in_file)

    # check if length of coords_labels is equal to number of seed time series
    # (dependend on number of coordinate sets given with "coords")
    # break if not equal
    if len(coords_labels) == seed_time_series_array.shape[1]:

        icoord = 0
        corr_maps_dict = dict.fromkeys(coords_labels)
        for seed in coords_labels:
            print("##################################")
            print(seed)
            print("##################################")

            # assign extracted seed time series from seed_time_series_array
            # at column icoord to seed_time_series & transpose it (because otherwise they have not the right dimensions?)
            seed_time_series = np.matrix(seed_time_series_array[:, icoord]).T

            # compute correlation of all voxels with seed regions
            seed_based_correlations = np.dot(brain_time_series.T, seed_time_series) / \
                                              seed_time_series.shape[0]

            # increase imask to iterate through columns of seed_time_series_array
            icoord = icoord + 1

            # Fisher-z transform the data to achieve a normal distribution
            seed_based_correlations_fisher_z = np.arctanh(
                seed_based_correlations)

            # transform the 2 dim matrix with the value of each voxel back to a 3 dim image
            seed_based_correlation_img = brain_masker.inverse_transform(
                seed_based_correlations_fisher_z.T)
            print("##################################")

            # initialize  an empty file & "fill" it with the calculated img, necessary becaus nipype needs file types or so... aehm hmm
            out_file = os.path.abspath('corr_map_' + seed + '_rad5.nii.gz')
            seed_based_correlation_img.to_filename(out_file)

            corr_maps_dict[seed] = out_file

        return corr_maps_dict

    else:
        print(
            "#####################################################################################"
        )
        print(
            "#####################################################################################"
        )
        print(
            "Number of labels in prob_masks_labels and volumes in prob_masks does not match!!!!!!!"
        )
        print(
            "#####################################################################################"
        )
        print(
            "#####################################################################################"
        )
Exemplo n.º 29
0
# Prepare timing
t_r = 2.
slice_time_ref = 0.
n_scans = 176

# Prepare seed
pcc_coords = (0, -53, 26)

#########################################################################
# Estimate contrasts
# ------------------
# Specify the contrasts
seed_masker = NiftiSpheresMasker([pcc_coords], radius=10, detrend=True,
                                 standardize=True, low_pass=0.1,
                                 high_pass=0.01, t_r=2.,
                                 memory='nilearn_cache',
                                 memory_level=1, verbose=0)
seed_time_series = seed_masker.fit_transform(adhd_dataset.func[0])
frametimes = np.linspace(0, (n_scans - 1) * t_r, n_scans)
design_matrix = make_first_level_design_matrix(frametimes, hrf_model='spm',
                                               add_regs=seed_time_series,
                                               add_reg_names=["pcc_seed"])
dmn_contrast = np.array([1] + [0]*(design_matrix.shape[1]-1))
contrasts = {'seed_based_glm': dmn_contrast}

#########################################################################
# Perform first level analysis
# ----------------------------
# Setup and fit GLM
first_level_model = FirstLevelModel(t_r=t_r, slice_time_ref=slice_time_ref)
Exemplo n.º 30
0
def test_errors():
    masker = NiftiSpheresMasker(([1, 2]), radius=.2)
    with pytest.raises(ValueError, match='Seeds must be a list .+'):
        masker.fit()
Exemplo n.º 31
0
def atlas_masker_spheres(coords, radius):
    masker = NiftiSpheresMasker(coords, radius)
    assert isinstance(masker, object)
    return masker
Exemplo n.º 32
0
# Prepare timing
t_r = 2.
slice_time_ref = 0.
n_scans = 176

# Prepare seed
pcc_coords = (0, -53, 26)

#########################################################################
# Estimate contrasts
# ------------------
# Specify the contrasts
seed_masker = NiftiSpheresMasker([pcc_coords], radius=10, detrend=True,
                                 standardize=True, low_pass=0.1,
                                 high_pass=0.01, t_r=2.,
                                 memory='nilearn_cache',
                                 memory_level=1, verbose=0)
seed_time_series = seed_masker.fit_transform(adhd_dataset.func[0])
frametimes = np.linspace(0, (n_scans - 1) * t_r, n_scans)
design_matrix = make_first_level_design_matrix(frametimes, hrf_model='spm',
                                               add_regs=seed_time_series,
                                               add_reg_names=["pcc_seed"])
dmn_contrast = np.array([1] + [0]*(design_matrix.shape[1]-1))
contrasts = {'seed_based_glm': dmn_contrast}

#########################################################################
# Perform first level analysis
# ----------------------------
# Setup and fit GLM
first_level_model = FirstLevelModel(t_r=t_r, slice_time_ref=slice_time_ref)
def test_errors():
    masker = NiftiSpheresMasker(([1, 2]), radius=.2)
    assert_raises_regex(ValueError, 'Seeds must be a list .+', masker.fit)
Exemplo n.º 34
0
def test_nifti_spheres_masker_inverse_overlap():
    rng = np.random.RandomState(42)

    # Test overlapping data in inverse_transform
    affine = np.eye(4)
    shape = (5, 5, 5)

    data = rng.random_sample(shape + (5, ))
    fmri_img = nibabel.Nifti1Image(data, affine)

    # Apply mask image - to allow inversion
    mask_img = new_img_like(fmri_img, np.ones(shape))
    seeds = [(0, 0, 0), (2, 2, 2)]
    # Inverse data
    inv_data = rng.random_sample(len(seeds))

    overlapping_masker = NiftiSpheresMasker(seeds,
                                            radius=1,
                                            allow_overlap=True,
                                            mask_img=mask_img).fit()
    overlapping_masker.inverse_transform(inv_data)

    overlapping_masker = NiftiSpheresMasker(seeds,
                                            radius=2,
                                            allow_overlap=True,
                                            mask_img=mask_img).fit()

    overlap = overlapping_masker.inverse_transform(inv_data)

    # Test whether overlapping data is averaged
    assert_array_almost_equal(get_data(overlap)[1, 1, 1], np.mean(inv_data))

    noverlapping_masker = NiftiSpheresMasker(seeds,
                                             radius=1,
                                             allow_overlap=False,
                                             mask_img=mask_img).fit()

    noverlapping_masker.inverse_transform(inv_data)
    noverlapping_masker = NiftiSpheresMasker(seeds,
                                             radius=2,
                                             allow_overlap=False,
                                             mask_img=mask_img).fit()

    with pytest.raises(ValueError, match='Overlap detected'):
        noverlapping_masker.inverse_transform(inv_data)
def test_nifti_spheres_masker_overlap():
    # Test resampling in NiftiMapsMasker
    affine = np.eye(4)
    shape = (5, 5, 5)

    data = np.random.random(shape + (5, ))
    fmri_img = nibabel.Nifti1Image(data, affine)

    seeds = [(0, 0, 0), (2, 2, 2)]

    overlapping_masker = NiftiSpheresMasker(seeds,
                                            radius=1,
                                            allow_overlap=True)
    overlapping_masker.fit_transform(fmri_img)
    overlapping_masker = NiftiSpheresMasker(seeds,
                                            radius=2,
                                            allow_overlap=True)
    overlapping_masker.fit_transform(fmri_img)

    noverlapping_masker = NiftiSpheresMasker(seeds,
                                             radius=1,
                                             allow_overlap=False)
    noverlapping_masker.fit_transform(fmri_img)
    noverlapping_masker = NiftiSpheresMasker(seeds,
                                             radius=2,
                                             allow_overlap=False)
    assert_raises_regex(ValueError, 'Overlap detected',
                        noverlapping_masker.fit_transform, fmri_img)
Exemplo n.º 36
0
def test_small_radius_inverse():
    affine = np.eye(4)
    shape = (3, 3, 3)

    data = np.random.RandomState(42).random_sample(shape)
    mask = np.zeros(shape)
    mask[1, 1, 1] = 1
    mask[2, 2, 2] = 1
    affine = np.eye(4) * 1.2
    seed = (1.4, 1.4, 1.4)

    masker = NiftiSpheresMasker([seed],
                                radius=0.1,
                                mask_img=nibabel.Nifti1Image(mask, affine))
    spheres_data = masker.fit_transform(nibabel.Nifti1Image(data, affine))
    masker.inverse_transform(spheres_data)
    # Test if masking is taken into account
    mask[1, 1, 1] = 0
    mask[1, 1, 0] = 1

    masker = NiftiSpheresMasker([seed],
                                radius=0.1,
                                mask_img=nibabel.Nifti1Image(mask, affine))
    masker.fit(nibabel.Nifti1Image(data, affine))

    with pytest.raises(ValueError, match='These spheres are empty'):
        masker.inverse_transform(spheres_data)

    masker = NiftiSpheresMasker([seed],
                                radius=1.6,
                                mask_img=nibabel.Nifti1Image(mask, affine))
    masker.fit(nibabel.Nifti1Image(data, affine))
    masker.inverse_transform(spheres_data)
Exemplo n.º 37
0
def test_nifti_spheres_masker_inverse_transform():
    # Applying the sphere_extraction example from above backwards
    data = np.random.RandomState(42).random_sample((3, 3, 3, 5))
    img = nibabel.Nifti1Image(data, np.eye(4))
    masker = NiftiSpheresMasker([(1, 1, 1)], radius=1)
    # Test the fit
    masker.fit()
    # Transform data
    with pytest.raises(ValueError, match='Please provide mask_img'):
        masker.inverse_transform(data[0, 0, 0, :])

    # Mask describes the extend of the masker's sphere
    mask = np.zeros((3, 3, 3), dtype=np.bool)
    mask[:, 1, 1] = True
    mask[1, :, 1] = True
    mask[1, 1, :] = True

    # Now with a mask
    mask_img = np.zeros((3, 3, 3))
    mask_img[1, :, :] = 1
    mask_img = nibabel.Nifti1Image(mask_img, np.eye(4))
    masker = NiftiSpheresMasker([(1, 1, 1)], radius=1, mask_img=mask_img)
    masker.fit()
    s = masker.transform(img)
    # Create an array mask
    array_mask = np.logical_and(mask, get_data(mask_img))

    inverse_map = masker.inverse_transform(s)

    # Testing whether mask is applied to inverse transform
    assert_array_equal(
        np.mean(get_data(inverse_map), axis=-1) != 0, array_mask)
    # Test whether values are preserved
    assert_array_equal(get_data(inverse_map)[array_mask].mean(0), s[:, 0])

    # Test whether the mask's shape is applied
    assert_array_equal(inverse_map.shape[:3], mask_img.shape)
Exemplo n.º 38
0
def signal_extract(func_data=None,confounds=None,atlas_img=None,masker_type='Spheres',smoothing_fwhm=6,high_pass=0.01,low_pass=0.1,t_r=2.2,detrend=False,saveas='file'):
    
    """
    Extracts BOLD time-series from regions of interest
    
    Parameters
    ----------
    func_data: functional images ( Default= None ) 
    
    confounds: Confounds file used to clean signals ( Default= None )
    
    atlas_img: regions or coordinates to extract signals from ( Default= None )
    
    masker_type : Type of masker used to extract BOLD signals . types are : 'Spheres','Maps','Labels'
    
    smoothing_fwhm : Smoothing width applied to signals in mm ( Default= 6 mm )
    
    high_pass, low_pass: Bandpass-Filtering ( Default= 0.01-0.1 Hz )
    
    detrend: Detrending signals ( Default= False )
    
    saveas : Destination to save and load output (.npz)
    
    Returns
    ---------
    subject_ts : array-like , 2-D (n_subjects,n_regions)
                 Array of BOLD time-series 
    """
    subjects_ts=[]
    
    if os.path.exists(saveas):
        
        subjects_ts=np.load(saveas)['arr_0']
        
    else:
        
        if 
        
        if masker_type== 'Spheres':
            masker = NiftiSpheresMasker(
                            seeds=atlas_img, smoothing_fwhm=smoothing_fwhm, radius=4 ,mask_img=brainmask,
                            detrend=False, standardize=True, low_pass=low_pass, high_pass=high_pass, t_r=t_r
            )
        elif masker_type == 'Maps':
            masker = NiftiMapsMasker(
                                    maps_img=atlas_img,mask_img=brainmask,standardize=True,
                                    low_pass=low_pass, high_pass=high_pass, t_r=t_r,
                                    memory_level=2,smoothing_fwhm=smoothing_fwhm,resampling_target='data',
                                    memory=mem,verbose=5
            )
        elif masker_type == 'Labels':
            masker = NiftiLabelsMasker(
                                 labels_img=atlas_img,mask_img=brainmask,standardize=True,
                                 high_pass=high_pass,low_pass=low_pass,detrend=False,t_r=t_r,
                                 memory_level=2,smoothing_fwhm=smoothing_fwhm,resampling_target='data',
                                 memory=mem,verbose=5
            )
            
        else:
            raise ValueError("Please provide masker type")
        
        if confounds is not None:    
            for func_file, confound_file in zip(func_data,confounds):
                time_series = masker.fit_transform(func_file,confounds=confound_file)
                subjects_ts.append(time_series)
                np.savez(saveas,subjects_ts)
        else:
          for func_file in data:
            time_series = masker.fit_transform(func_file)
            subjects_ts.append(time_series)
            np.savez(saveas,subjects_ts)   
            
    return subjects_ts