def transform_single_imgs(self, imgs, confounds=None, copy=True, shelve=False): """Apply mask, spatial and temporal preprocessing Parameters ---------- imgs: 3D/4D Niimg-like object See http://nilearn.github.io/manipulating_visualizing/manipulating_images.html#niimg. Images to process. It must boil down to a 4D image with scans number as last dimension. confounds: CSV file or array-like, optional This parameter is passed to signal.clean. Please see the related documentation for details. shape: (number of scans, number of confounds) Returns ------- region_signals: 2D numpy.ndarray Signal for each voxel inside the mask. shape: (number of scans, number of voxels) """ # Ignore the mask-computing params: they are not useful and will # just invalid the cache for no good reason # target_shape and target_affine are conveyed implicitly in mask_img params = get_params(self.__class__, self, ignore=['mask_img', 'mask_args', 'mask_strategy']) data, _ = shelved_filter_and_mask(self, imgs, params, confounds=confounds, copy=copy, shelve=shelve ) return data
def params_masker(instance, estimator): """Get params of NiftiMasker or MultiNiftiMasker instance Parameters ---------- instance : NiftiMasker or MultiNiftiMasker called object or instance """ params = get_params(estimator, instance) if isinstance(instance, MultiNiftiMasker): multi_masker = True
def test_filter_and_mask(): data = np.zeros([20, 30, 40, 5]) mask = np.zeros([20, 30, 40, 2]) mask[10, 15, 20, :] = 1 data_img = nibabel.Nifti1Image(data, np.eye(4)) mask_img = nibabel.Nifti1Image(mask, np.eye(4)) masker = NiftiMasker() params = get_params(NiftiMasker, masker) assert_raises_regex(DimensionError, "Data must be a 3D", filter_and_mask, data_img, mask_img, params)
def test_filter_and_mask(): data = np.zeros([20, 30, 40, 5]) mask = np.ones([20, 30, 40]) data_img = nibabel.Nifti1Image(data, np.eye(4)) mask_img = nibabel.Nifti1Image(mask, np.eye(4)) masker = NiftiMasker() params = get_params(NiftiMasker, masker) # Test return_affine = False data = filter_and_mask(data_img, mask_img, params) assert_equal(data.shape, (5, 24000))
def test_filter_and_mask_error(): data = np.zeros([20, 30, 40, 5]) mask = np.zeros([20, 30, 40, 2]) mask[10, 15, 20, :] = 1 data_img = nibabel.Nifti1Image(data, np.eye(4)) mask_img = nibabel.Nifti1Image(mask, np.eye(4)) masker = NiftiMasker() params = get_params(NiftiMasker, masker) assert_raises_regex( DimensionError, "Input data has incompatible dimensionality: " "Expected dimension is 3D and you provided " "a 4D image.", filter_and_mask, data_img, mask_img, params)
def test_filter_and_mask_error(): data = np.zeros([20, 30, 40, 5]) mask = np.zeros([20, 30, 40, 2]) mask[10, 15, 20, :] = 1 data_img = nibabel.Nifti1Image(data, np.eye(4)) mask_img = nibabel.Nifti1Image(mask, np.eye(4)) masker = NiftiMasker() params = get_params(NiftiMasker, masker) assert_raises_regex(DimensionError, "Input data has incompatible dimensionality: " "Expected dimension is 3D and you provided " "a 4D image.", filter_and_mask, data_img, mask_img, params)
def transform_single_imgs(self, imgs, confounds=None): """Extract signals from a single 4D niimg. Parameters ---------- imgs: 3D/4D Niimg-like object See http://nilearn.github.io/manipulating_images/input_output.html Images to process. It must boil down to a 4D image with scans number as last dimension. confounds: CSV file or array-like, optional This parameter is passed to signal.clean. Please see the related documentation for details. shape: (number of scans, number of confounds) Returns ------- region_signals: 2D numpy.ndarray Signal for each sphere. shape: (number of scans, number of spheres) """ self._check_fitted() params = get_params(NiftiSpheresMasker, self) signals, _ = self._cache( filter_and_extract, ignore=['verbose', 'memory', 'memory_level'])( # Images imgs, _ExtractionFunctor(self.seeds_, self.radius, self.mask_img, self.allow_overlap, self.dtype), # Pre-processing params, confounds=confounds, dtype=self.dtype, # Caching memory=self.memory, memory_level=self.memory_level, # kwargs verbose=self.verbose) return signals
def our_transform_single_imgs(self, imgs, confounds=None, copy=True): """Apply mask, spatial and temporal preprocessing Parameters ---------- imgs: 3D/4D Niimg-like object See http://nilearn.github.io/manipulating_images/input_output.html. Images to process. It must boil down to a 4D image with scans number as last dimension. confounds: CSV file or array-like, optional This parameter is passed to signal.clean. Please see the related documentation for details. shape: (number of scans, number of confounds) Returns ------- region_signals: 2D numpy.ndarray Signal for each voxel inside the mask. shape: (number of scans, number of voxels) """ # Ignore the mask-computing params: they are not useful and will # just invalid the cache for no good reason # target_shape and target_affine are conveyed implicitly in mask_img imgs = check_niimg(imgs) params = get_params(self.__class__, self, ignore=['mask_img', 'mask_args', 'mask_strategy']) data = self._cache(filter_and_mask, ignore=['verbose', 'memory', 'memory_level', 'copy'], shelve=self._shelving)( imgs, self.mask_img_, params, memory_level=self.memory_level, memory=self.memory, verbose=self.verbose, confounds=confounds, copy=copy ) return data
def test_get_params(): b = B() params_a_in_b = class_inspect.get_params(A, b) assert params_a_in_b == dict(a=1) params_a_in_b = class_inspect.get_params(A, b, ignore=['a']) assert params_a_in_b == {}
def check_embedded_nifti_masker(estimator): """Base function for using a masker within a BaseEstimator class This creates a masker from instance parameters : - If instance contains a mask image in mask parameter, we use this image as new masker mask_img, forwarding instance parameters to new masker : smoothing_fwhm, standardize, detrend, low_pass= high_pass, t_r, target_affine, target_shape, mask_strategy, mask_args, - If instance contains a masker in mask parameter, we use a copy of this masker, overriding all instance masker related parameters. In all case, we forward system parameters of instance to new masker : memory, memory_level, verbose, n_jobs Parameters ---------- instance: object, instance of BaseEstimator The object that gives us the values of the parameters multi_subject: boolean Indicates whether to return a MultiNiftiMasker or a NiftiMasker (the default is True) Returns ------- masker: MultiNiftiMasker or NiftiMasker New masker """ estimator_params = get_params(MultiNiftiMasker, estimator) mask = getattr(estimator, 'mask', None) if mask is not None and hasattr(mask, 'mask_img'): # Creating (Multi)NiftiMasker from provided masker masker_class = mask.__class__ masker_params = get_params(MultiNiftiMasker, mask) new_masker_params = masker_params else: # Creating (Multi)NiftiMasker # with parameters extracted from estimator masker_class = MultiNiftiMasker new_masker_params = estimator_params new_masker_params['mask_img'] = mask # Forwarding technical params new_masker_params['n_jobs'] = estimator.n_jobs new_masker_params['memory'] = estimator.memory new_masker_params['memory_level'] = max(0, estimator.memory_level - 1) new_masker_params['verbose'] = estimator.verbose # Raising warning if masker override parameters conflict_string = "" for param_key in sorted(estimator_params): if np.any(new_masker_params[param_key] != estimator_params[param_key]): conflict_string += ( "Parameter {0} :\n" " Masker parameter {1}" " - overriding estimator parameter {2}\n").format( param_key, new_masker_params[param_key], estimator_params[param_key]) if conflict_string != "": warn_str = ("Overriding provided-default estimator parameters with" " provided masker parameters :\n" "{0:s}").format(conflict_string) warnings.warn(warn_str) masker = masker_class(**new_masker_params) # Forwarding potential attribute of provided masker if hasattr(mask, 'mask_img_'): # Allow free fit of returned mask masker.mask_img = mask.mask_img_ return masker
def transform_imgs(self, imgs_list, confounds=None, copy=True, n_jobs=1, shelve=False): """Prepare multi subject data in parallel Parameters ---------- imgs_list: list of Niimg-like objects See http://nilearn.github.io/manipulating_visualizing/manipulating_images.html#niimg. List of imgs file to prepare. One item per subject. confounds: list of confounds, optional List of confounds (2D arrays or filenames pointing to CSV files). Must be of same length than imgs_list. copy: boolean, optional If True, guarantees that output array has no memory in common with input array. n_jobs: integer, optional The number of cpus to use to do the computation. -1 means 'all cpus'. Returns ------- region_signals: list of 2D numpy.ndarray List of signal for each element per subject. shape: list of (number of scans, number of elements) """ if not hasattr(self, 'mask_img_'): raise ValueError( 'It seems that %s has not been fitted. ' 'You must call fit() before calling transform().' % self.__class__.__name__) target_fov = None if self.target_affine is None: # Force resampling on first image target_fov = 'first' niimg_iter = _iter_check_niimg(imgs_list, ensure_ndim=None, atleast_4d=False, target_fov=target_fov, memory=self.memory, memory_level=self.memory_level, verbose=self.verbose) if confounds is None: confounds = itertools.repeat(None, len(imgs_list)) # Ignore the mask-computing params: they are not useful and will # just invalidate the cache for no good reason # target_shape and target_affine are conveyed implicitly in mask_img params = get_params( self.__class__, self, ignore=['mask_img', 'mask_args', 'mask_strategy', 'copy']) data = Parallel(n_jobs=n_jobs)( delayed(shelved_filter_and_mask)(self, imgs, params, confounds=cfs, copy=copy, shelve=shelve, squeeze=True) for imgs, cfs in izip(niimg_iter, confounds)) return data
def test_get_params(): b = B() params_a_in_b = class_inspect.get_params(A, b) assert_equal(params_a_in_b, dict(a=1)) params_a_in_b = class_inspect.get_params(A, b, ignore=['a']) assert_equal(params_a_in_b, {})
def fit(self, niimgs=None, y=None, confounds=None, connectivity=None): """Compute the mask and the components Parameters ---------- niimgs: list of filenames or NiImages Data on which the PCA must be calculated. If this is a list, the affine is considered the same for all. """ # Hack to support single-subject data: if isinstance(niimgs, (basestring, nibabel.Nifti1Image)): niimgs = [niimgs] # This is a very incomplete hack, as it won't work right for # single-subject list of 3D filenames # First, learn the mask if not isinstance(self.mask, MultiNiftiMasker): self.masker_ = MultiNiftiMasker(mask=self.mask, smoothing_fwhm=self.smoothing_fwhm, target_affine=self.target_affine, target_shape=self.target_shape, low_pass=self.low_pass, high_pass=self.high_pass, t_r=self.t_r, memory=self.memory, memory_level=self.memory_level) else: try: self.masker_ = clone(self.mask) except TypeError as e: # Workaround for a joblib bug: in joblib 0.6, a Memory object # with cachedir = None cannot be cloned. masker_memory = self.mask.memory if masker_memory.cachedir is None: self.mask.memory = None self.masker_ = clone(self.mask) self.mask.memory = masker_memory self.masker_.memory = Memory(cachedir=None) else: # The error was raised for another reason raise e for param_name in ['target_affine', 'target_shape', 'smoothing_fwhm', 'low_pass', 'high_pass', 't_r', 'memory', 'memory_level']: if getattr(self.masker_, param_name) is not None: warnings.warn('Parameter %s of the masker overriden' % param_name) setattr(self.masker_, param_name, getattr(self, param_name)) if self.masker_.mask is None: self.masker_.fit(niimgs) else: self.masker_.fit() self.mask_img_ = self.masker_.mask_img_ parameters = get_params(MultiNiftiMasker, self) # Now compute the covariances self.covariances_ = Parallel(n_jobs=self.n_jobs, verbose=self.verbose)( delayed(subject_covariance)( self.estimator, niimg, self.masker_.mask_img_, parameters, memory=self.memory, ref_memory_level=self.memory_level, confounds=confounds, connectivity=connectivity, verbose=self.verbose ) for niimg in niimgs) return self
def transform_imgs(self, imgs_list, confounds=None, copy=True, n_jobs=1, shelve=False): """Prepare multi subject data in parallel Parameters ---------- imgs_list: list of Niimg-like objects See http://nilearn.github.io/manipulating_visualizing/manipulating_images.html#niimg. List of imgs file to prepare. One item per subject. confounds: list of confounds, optional List of confounds (2D arrays or filenames pointing to CSV files). Must be of same length than imgs_list. copy: boolean, optional If True, guarantees that output array has no memory in common with input array. n_jobs: integer, optional The number of cpus to use to do the computation. -1 means 'all cpus'. Returns ------- region_signals: list of 2D numpy.ndarray List of signal for each element per subject. shape: list of (number of scans, number of elements) """ if not hasattr(self, 'mask_img_'): raise ValueError('It seems that %s has not been fitted. ' 'You must call fit() before calling transform().' % self.__class__.__name__) target_fov = None if self.target_affine is None: # Force resampling on first image target_fov = 'first' niimg_iter = _iter_check_niimg(imgs_list, ensure_ndim=None, atleast_4d=False, target_fov=target_fov, memory=self.memory, memory_level=self.memory_level, verbose=self.verbose) if confounds is None: confounds = itertools.repeat(None, len(imgs_list)) # Ignore the mask-computing params: they are not useful and will # just invalidate the cache for no good reason # target_shape and target_affine are conveyed implicitly in mask_img params = get_params(self.__class__, self, ignore=['mask_img', 'mask_args', 'mask_strategy', 'copy']) data = Parallel(n_jobs=n_jobs)( delayed(shelved_filter_and_mask)(self, imgs, params, confounds=cfs, copy=copy, shelve=shelve, squeeze=True) for imgs, cfs in izip(niimg_iter, confounds)) return data