def _check_memory(memory, verbose=0): """Function to ensure an instance of a joblib.Memory object. Parameters ---------- memory: None or instance of joblib.Memory or str Used to cache the masking process. If a str is given, it is the path to the caching directory. verbose : int, optional (default 0) Verbosity level. Returns ------- instance of joblib.Memory. """ if memory is None: memory = Memory(cachedir=None, verbose=verbose) if isinstance(memory, _basestring): cache_dir = memory if nilearn.EXPAND_PATH_WILDCARDS: cache_dir = os.path.expanduser(cache_dir) # Perform some verifications on given path. split_cache_dir = os.path.split(cache_dir) if (len(split_cache_dir) > 1 and (not os.path.exists(split_cache_dir[0]) and split_cache_dir[0] != '')): if (not nilearn.EXPAND_PATH_WILDCARDS and cache_dir.startswith("~")): # Maybe the user want to enable expanded user path. error_msg = ("Given cache path parent directory doesn't " "exists, you gave '{0}'. Enabling " "nilearn.EXPAND_PATH_WILDCARDS could solve " "this issue.".format(split_cache_dir[0])) elif memory.startswith("~"): # Path built on top of expanded user path doesn't exist. error_msg = ("Given cache path parent directory doesn't " "exists, you gave '{0}' which was expanded " "as '{1}' but doesn't exist either. Use " "nilearn.EXPAND_PATH_WILDCARDS to deactivate " "auto expand user path (~) behavior." .format(split_cache_dir[0], os.path.dirname(memory))) else: # The given cache base path doesn't exist. error_msg = ("Given cache path parent directory doesn't " "exists, you gave '{0}'." .format(split_cache_dir[0])) raise ValueError(error_msg) memory = Memory(cachedir=cache_dir, verbose=verbose) return memory
def test_cache_memory_level(): with tempfile.TemporaryDirectory() as temp_dir: joblib_dir = Path(temp_dir, 'joblib', 'nilearn', 'tests', 'test_cache_mixin', 'f') mem = Memory(cachedir=temp_dir, verbose=0) cache_mixin.cache(f, mem, func_memory_level=2, memory_level=1)(2) assert_equal(len(_get_subdirs(joblib_dir)), 0) cache_mixin.cache(f, Memory(cachedir=None))(2) assert_equal(len(_get_subdirs(joblib_dir)), 0) cache_mixin.cache(f, mem, func_memory_level=2, memory_level=3)(2) assert_equal(len(_get_subdirs(joblib_dir)), 1) cache_mixin.cache(f, mem)(3) assert_equal(len(_get_subdirs(joblib_dir)), 2)
def test_cache_memory_level(): temp_dir = tempfile.mkdtemp() job_glob = os.path.join(temp_dir, 'joblib', 'nilearn', 'tests', 'test_cache_mixin', 'f', '*') mem = Memory(cachedir=temp_dir, verbose=0) cache_mixin.cache(f, mem, func_memory_level=2, memory_level=1)(2) assert_equal(len(glob.glob(job_glob)), 0) cache_mixin.cache(f, Memory(cachedir=None))(2) assert_equal(len(glob.glob(job_glob)), 0) cache_mixin.cache(f, mem, func_memory_level=2, memory_level=3)(2) assert_equal(len(glob.glob(job_glob)), 2) cache_mixin.cache(f, mem)(3) assert_equal(len(glob.glob(job_glob)), 3)
def test_shelving(): mask_img = Nifti1Image(np.ones((2, 2, 2), dtype=np.int8), affine=np.diag((4, 4, 4, 1))) epi_img1 = Nifti1Image(np.ones((2, 2, 2)), affine=np.diag((4, 4, 4, 1))) epi_img2 = Nifti1Image(np.ones((2, 2, 2)), affine=np.diag((2, 2, 2, 1))) cachedir = mkdtemp() try: masker_shelved = MultiNiftiMasker(mask_img=mask_img, memory=Memory(cachedir=cachedir, mmap_mode='r', verbose=0)) masker_shelved._shelving = True masker = MultiNiftiMasker(mask_img=mask_img) epis_shelved = masker_shelved.fit_transform([epi_img1, epi_img2]) epis = masker.fit_transform([epi_img1, epi_img2]) for epi_shelved, epi in zip(epis_shelved, epis): epi_shelved = epi_shelved.get() assert_array_equal(epi_shelved, epi) epi = masker.fit_transform(epi_img1) epi_shelved = masker_shelved.fit_transform(epi_img1) epi_shelved = epi_shelved.get() assert_array_equal(epi_shelved, epi) finally: # enables to delete "filename" on windows del masker shutil.rmtree(cachedir, ignore_errors=True)
def __init__(self, n_components=20, mask=None, smoothing_fwhm=None, do_cca=True, random_state=None, standardize=False, detrend=False, low_pass=None, high_pass=None, t_r=None, target_affine=None, target_shape=None, mask_strategy='epi', mask_args=None, memory=Memory(cachedir=None), memory_level=0, n_jobs=1, verbose=0 ): self.n_components = n_components self.do_cca = do_cca BaseDecomposition.__init__(self, n_components=n_components, random_state=random_state, mask=mask, smoothing_fwhm=smoothing_fwhm, standardize=standardize, detrend=detrend, low_pass=low_pass, high_pass=high_pass, t_r=t_r, target_affine=target_affine, target_shape=target_shape, mask_strategy=mask_strategy, mask_args=mask_args, memory=memory, memory_level=memory_level, n_jobs=n_jobs, verbose=verbose)
def __init__(self, labels_img, background_label=0, mask_img=None, smoothing_fwhm=None, standardize=False, detrend=False, low_pass=None, high_pass=None, t_r=None, dtype=None, resampling_target="data", memory=Memory(cachedir=None, verbose=0), memory_level=1, verbose=0): self.labels_img = labels_img self.background_label = background_label self.mask_img = mask_img # Parameters for _smooth_array self.smoothing_fwhm = smoothing_fwhm # Parameters for clean() self.standardize = standardize self.detrend = detrend self.low_pass = low_pass self.high_pass = high_pass self.t_r = t_r self.dtype = dtype # Parameters for resampling self.resampling_target = resampling_target # Parameters for joblib self.memory = memory self.memory_level = memory_level self.verbose = verbose if resampling_target not in ("labels", "data", None): raise ValueError("invalid value for 'resampling_target' " "parameter: " + str(resampling_target))
def test_joblib_cache(): from nilearn._utils.compat import hash, Memory mask = np.zeros((40, 40, 40)) mask[20, 20, 20] = 1 mask_img = Nifti1Image(mask, np.eye(4)) with testing.write_tmp_imgs(mask_img, create_files=True) as filename: masker = NiftiMasker(mask_img=filename) masker.fit() mask_hash = hash(masker.mask_img_) get_data(masker.mask_img_) assert_true(mask_hash == hash(masker.mask_img_)) # Test a tricky issue with memmapped joblib.memory that makes # imgs return by inverse_transform impossible to save cachedir = mkdtemp() try: masker.memory = Memory(cachedir=cachedir, mmap_mode='r', verbose=0) X = masker.transform(mask_img) # inverse_transform a first time, so that the result is cached out_img = masker.inverse_transform(X) out_img = masker.inverse_transform(X) out_img.to_filename(os.path.join(cachedir, 'test.nii')) finally: # enables to delete "filename" on windows del masker shutil.rmtree(cachedir, ignore_errors=True)
def __init__(self, model, mask=None, n_cluster=40, n_components=20, group=True, sub_num=1, smoothing_fwhm=6, do_cca=True, threshold='auto', n_init=10, random_state=None, standardize=True, detrend=True, low_pass=None, high_pass=None, t_r=None, target_affine=None, target_shape=None, mask_strategy='epi', mask_args=None, memory=Memory(cachedir=None), memory_level=0, n_jobs=1, verbose=0): super(ClusterProcess, self).__init__( n_components=n_components, do_cca=do_cca, random_state=random_state, # feature_compression=feature_compression, mask=mask, smoothing_fwhm=smoothing_fwhm, standardize=standardize, detrend=detrend, low_pass=low_pass, high_pass=high_pass, t_r=t_r, target_affine=target_affine, target_shape=target_shape, mask_strategy=mask_strategy, mask_args=mask_args, memory=memory, memory_level=memory_level, n_jobs=n_jobs, verbose=verbose) self.n_cluster = n_cluster self.model_ = model self.group = group self.sub_num = sub_num self.train_data = None self.model = None
def __init__(self, maps_img, mask_img=None, min_region_size=1350, threshold=1., thresholding_strategy='ratio_n_voxels', extractor='local_regions', smoothing_fwhm=6, standardize=False, detrend=False, low_pass=None, high_pass=None, t_r=None, memory=Memory(cachedir=None), memory_level=0, verbose=0): super(RegionExtractor, self).__init__(maps_img=maps_img, mask_img=mask_img, smoothing_fwhm=smoothing_fwhm, standardize=standardize, detrend=detrend, low_pass=low_pass, high_pass=high_pass, t_r=t_r, memory=memory, memory_level=memory_level, verbose=verbose) self.maps_img = maps_img self.min_region_size = min_region_size self.thresholding_strategy = thresholding_strategy self.threshold = threshold self.extractor = extractor self.smoothing_fwhm = smoothing_fwhm
def __init__(self, mask=None, smoothing_fwhm=None, standardize=False, detrend=False, low_pass=None, high_pass=None, t_r=None, target_affine=None, target_shape=None, mask_strategy='background', mask_args=None, memory=Memory(cachedir=None), memory_level=0, n_jobs=1, verbose=0, dummy=None): self.mask = mask self.smoothing_fwhm = smoothing_fwhm self.standardize = standardize self.detrend = detrend self.low_pass = low_pass self.high_pass = high_pass self.t_r = t_r self.target_affine = target_affine self.target_shape = target_shape self.mask_strategy = mask_strategy self.mask_args = mask_args self.memory = memory self.memory_level = memory_level self.n_jobs = n_jobs self.verbose = verbose self.dummy = dummy
def filter_and_mask(imgs, mask_img_, parameters, memory_level=0, memory=Memory(cachedir=None), verbose=0, confounds=None, copy=True, dtype=None): imgs = _utils.check_niimg(imgs, atleast_4d=True, ensure_ndim=4) # Check whether resampling is truly necessary. If so, crop mask # as small as possible in order to speed up the process if not _check_same_fov(imgs, mask_img_): parameters = copy_object(parameters) # now we can crop mask_img_ = image.crop_img(mask_img_, copy=False) parameters['target_shape'] = mask_img_.shape parameters['target_affine'] = mask_img_.affine data, affine = filter_and_extract(imgs, _ExtractionFunctor(mask_img_), parameters, memory_level=memory_level, memory=memory, verbose=verbose, confounds=confounds, copy=copy, dtype=dtype) # For _later_: missing value removal or imputing of missing data # (i.e. we want to get rid of NaNs, if smoothing must be done # earlier) # Optionally: 'doctor_nan', remove voxels with NaNs, other option # for later: some form of imputation return data
def test__safe_cache_flush(): # Test the _safe_cache function that is supposed to flush the # cache if the nibabel version changes with tempfile.TemporaryDirectory() as temp_dir: mem = Memory(cachedir=temp_dir) version_file = os.path.join(temp_dir, 'joblib', 'module_versions.json') # Create an mock version_file with old module versions with open(version_file, 'w') as f: json.dump({"nibabel": [0, 0]}, f) # Create some store structure nibabel_dir = os.path.join(temp_dir, 'joblib', 'nibabel_') os.makedirs(nibabel_dir) # First turn off version checking nilearn.CHECK_CACHE_VERSION = False cache_mixin._safe_cache(mem, f) assert_true(os.path.exists(nibabel_dir)) # Second turn on version checking nilearn.CHECK_CACHE_VERSION = True # Make sure that the check will run again cache_mixin.__CACHE_CHECKED = {} with open(version_file, 'w') as f: json.dump({"nibabel": [0, 0]}, f) cache_mixin._safe_cache(mem, f) assert_true(os.path.exists(version_file)) assert_false(os.path.exists(nibabel_dir))
def test_check_memory(): # Test if _check_memory returns a memory object with the cachedir equal to # input path with tempfile.TemporaryDirectory() as temp_dir: mem_none = Memory(cachedir=None) mem_temp = Memory(cachedir=temp_dir) for mem in [None, mem_none]: memory = cache_mixin._check_memory(mem, verbose=False) assert_true(memory, Memory) assert_equal(memory.cachedir, mem_none.cachedir) for mem in [temp_dir, mem_temp]: memory = cache_mixin._check_memory(mem, verbose=False) assert_equal(memory.cachedir, mem_temp.cachedir) assert_true(memory, Memory)
def __init__(self, alpha=0.1, tol=1e-3, max_iter=10, verbose=0, memory=Memory(cachedir=None), memory_level=0): self.alpha = alpha self.tol = tol self.max_iter = max_iter self.memory = memory self.memory_level = memory_level self.verbose = verbose
def test_cache_shelving(): with tempfile.TemporaryDirectory() as temp_dir: joblib_dir = Path(temp_dir, 'joblib', 'nilearn', 'tests', 'test_cache_mixin', 'f') mem = Memory(cachedir=temp_dir, verbose=0) res = cache_mixin.cache(f, mem, shelve=True)(2) assert_equal(res.get(), 2) assert_equal(len(_get_subdirs(joblib_dir)), 1) res = cache_mixin.cache(f, mem, shelve=True)(2) assert_equal(res.get(), 2) assert_equal(len(_get_subdirs(joblib_dir)), 1)
def __init__(self, labels_img, background_label=0, mask_img=None, smoothing_fwhm=None, standardize=False, detrend=False, low_pass=None, high_pass=None, t_r=None, dtype=None, resampling_target="data", memory=Memory(cachedir=None, verbose=0), memory_level=1, verbose=0, strategy="mean"): self.labels_img = labels_img self.background_label = background_label self.mask_img = mask_img # Parameters for _smooth_array self.smoothing_fwhm = smoothing_fwhm # Parameters for clean() self.standardize = standardize self.detrend = detrend self.low_pass = low_pass self.high_pass = high_pass self.t_r = t_r self.dtype = dtype # Parameters for resampling self.resampling_target = resampling_target # Parameters for joblib self.memory = memory self.memory_level = memory_level self.verbose = verbose available_reduction_strategies = { 'mean', 'median', 'sum', 'minimum', 'maximum', 'standard_deviation', 'variance' } if strategy not in available_reduction_strategies: raise ValueError( str.format("Invalid strategy '{}'. Valid strategies are {}.", strategy, available_reduction_strategies)) self.strategy = strategy if resampling_target not in ("labels", "data", None): raise ValueError("invalid value for 'resampling_target' " "parameter: " + str(resampling_target))
def test_check_memory(): # Test if _check_memory returns a memory object with the cachedir equal to # input path try: temp_dir = tempfile.mkdtemp() mem_none = Memory(cachedir=None) mem_temp = Memory(cachedir=temp_dir) for mem in [None, mem_none]: memory = cache_mixin._check_memory(mem, verbose=False) assert_true(memory, Memory) assert_equal(memory.cachedir, mem_none.cachedir) for mem in [temp_dir, mem_temp]: memory = cache_mixin._check_memory(mem, verbose=False) assert_equal(memory.cachedir, mem_temp.cachedir) assert_true(memory, Memory) finally: if os.path.exists(temp_dir): shutil.rmtree(temp_dir)
def __init__(self, mask=None, n_components=20, smoothing_fwhm=6, do_cca=True, threshold='auto', n_init=10, random_state=None, standardize=True, detrend=True, low_pass=None, high_pass=None, t_r=None, target_affine=None, target_shape=None, mask_strategy='epi', mask_args=None, memory=Memory(cachedir=None), memory_level=0, n_jobs=1, verbose=0): super(CanICA, self).__init__( n_components=n_components, do_cca=do_cca, random_state=random_state, # feature_compression=feature_compression, mask=mask, smoothing_fwhm=smoothing_fwhm, standardize=standardize, detrend=detrend, low_pass=low_pass, high_pass=high_pass, t_r=t_r, target_affine=target_affine, target_shape=target_shape, mask_strategy=mask_strategy, mask_args=mask_args, memory=memory, memory_level=memory_level, n_jobs=n_jobs, verbose=verbose) if isinstance(threshold, float) and threshold > n_components: raise ValueError("Threshold must not be higher than number " "of maps. " "Number of maps is %s and you provided " "threshold=%s" % (str(n_components), str(threshold))) self.threshold = threshold self.n_init = n_init
def __init__( self, mask_img=None, sessions=None, smoothing_fwhm=None, standardize=False, detrend=False, low_pass=None, high_pass=None, t_r=None, target_affine=None, target_shape=None, mask_strategy='background', mask_args=None, sample_mask=None, dtype=None, memory_level=1, memory=Memory(cachedir=None), verbose=0, reports=True, ): # Mask is provided or computed self.mask_img = mask_img self.sessions = sessions self.smoothing_fwhm = smoothing_fwhm self.standardize = standardize self.detrend = detrend self.low_pass = low_pass self.high_pass = high_pass self.t_r = t_r self.target_affine = target_affine self.target_shape = target_shape self.mask_strategy = mask_strategy self.mask_args = mask_args self.sample_mask = sample_mask self.dtype = dtype self.memory = memory self.memory_level = memory_level self.verbose = verbose self.reports = reports self._report_description = ('This report shows the input Nifti ' 'image overlaid with the outlines of the ' 'mask (in green). We recommend to inspect ' 'the report for the overlap between the ' 'mask and its input image. ') self._overlay_text = ('\n To see the input Nifti image before ' 'resampling, hover over the displayed image.') self._shelving = False
def _cache(self, func, func_memory_level=1, shelve=False, **kwargs): """Return a joblib.Memory object. The memory_level determines the level above which the wrapped function output is cached. By specifying a numeric value for this level, the user can to control the amount of cache memory used. This function will cache the function call or not depending on the cache level. Parameters ---------- func: function The function the output of which is to be cached. memory_level: int The memory_level from which caching must be enabled for the wrapped function. shelve: bool Whether to return a joblib MemorizedResult, callable by a .get() method, instead of the return value of func Returns ------- mem: joblib.MemorizedFunc, wrapped in _ShelvedFunc if shelving Object that wraps the function func to cache its further call. This object may be a no-op, if the requested level is lower than the value given to _cache()). For consistency, a callable object is always returned. """ verbose = getattr(self, 'verbose', 0) # Creates attributes if they don't exist # This is to make creating them in __init__() optional. if not hasattr(self, "memory_level"): self.memory_level = 0 if not hasattr(self, "memory"): self.memory = Memory(cachedir=None, verbose=verbose) self.memory = _check_memory(self.memory, verbose=verbose) # If cache level is 0 but a memory object has been provided, set # memory_level to 1 with a warning. if self.memory_level == 0 and self.memory.cachedir is not None: warnings.warn("memory_level is currently set to 0 but " "a Memory object has been provided. " "Setting memory_level to 1.") self.memory_level = 1 return cache(func, self.memory, func_memory_level=func_memory_level, memory_level=self.memory_level, shelve=shelve, **kwargs)
def test__safe_cache_dir_creation(): # Test the _safe_cache function that is supposed to flush the # cache if the nibabel version changes with tempfile.TemporaryDirectory() as temp_dir: mem = Memory(cachedir=temp_dir) version_file = os.path.join(temp_dir, 'joblib', 'module_versions.json') assert_false(os.path.exists(version_file)) # First test that a version file get created cache_mixin._safe_cache(mem, f) assert_true(os.path.exists(version_file)) # Test that it does not get recreated during the same session os.unlink(version_file) cache_mixin._safe_cache(mem, f) assert_false(os.path.exists(version_file))
def __init__(self, maps_img, mask_img=None, allow_overlap=True, smoothing_fwhm=None, standardize=False, detrend=False, low_pass=None, high_pass=None, t_r=None, dtype=None, resampling_target="data", memory=Memory(cachedir=None, verbose=0), memory_level=0, verbose=0): self.maps_img = maps_img self.mask_img = mask_img # Maps Masker parameter self.allow_overlap = allow_overlap # Parameters for image.smooth self.smoothing_fwhm = smoothing_fwhm # Parameters for clean() self.standardize = standardize self.detrend = detrend self.low_pass = low_pass self.high_pass = high_pass self.t_r = t_r self.dtype = dtype # Parameters for resampling self.resampling_target = resampling_target # Parameters for joblib self.memory = memory self.memory_level = memory_level self.verbose = verbose if resampling_target not in ("mask", "maps", "data", None): raise ValueError("invalid value for 'resampling_target'" " parameter: " + str(resampling_target)) if self.mask_img is None and resampling_target == "mask": raise ValueError( "resampling_target has been set to 'mask' but no mask " "has been provided.\nSet resampling_target to something else" " or provide a mask.")
def test_cache_shelving(): try: temp_dir = tempfile.mkdtemp() job_glob = os.path.join(temp_dir, 'joblib', 'nilearn', 'tests', 'test_cache_mixin', 'f', '*') mem = Memory(cachedir=temp_dir, verbose=0) res = cache_mixin.cache(f, mem, shelve=True)(2) assert_equal(res.get(), 2) assert_equal(len(glob.glob(job_glob)), 1) res = cache_mixin.cache(f, mem, shelve=True)(2) assert_equal(res.get(), 2) assert_equal(len(glob.glob(job_glob)), 1) finally: del mem shutil.rmtree(temp_dir, ignore_errors=True)
def __init__(self, n_components=20, n_epochs=1, alpha=10, reduction_ratio='auto', dict_init=None, random_state=None, batch_size=20, method="cd", mask=None, smoothing_fwhm=4, standardize=True, detrend=True, low_pass=None, high_pass=None, t_r=None, target_affine=None, target_shape=None, mask_strategy='epi', mask_args=None, n_jobs=1, verbose=0, memory=Memory(cachedir=None), memory_level=0): BaseDecomposition.__init__(self, n_components=n_components, random_state=random_state, mask=mask, smoothing_fwhm=smoothing_fwhm, standardize=standardize, detrend=detrend, low_pass=low_pass, high_pass=high_pass, t_r=t_r, target_affine=target_affine, target_shape=target_shape, mask_strategy=mask_strategy, mask_args=mask_args, memory=memory, memory_level=memory_level, n_jobs=n_jobs, verbose=verbose) self.n_epochs = n_epochs self.batch_size = batch_size self.method = method self.alpha = alpha self.reduction_ratio = reduction_ratio self.dict_init = dict_init
def __init__(self, method, n_parcels=50, random_state=0, mask=None, smoothing_fwhm=4., standardize=False, detrend=False, low_pass=None, high_pass=None, t_r=None, target_affine=None, target_shape=None, mask_strategy='epi', mask_args=None, scaling=False, n_iter=10, memory=Memory(cachedir=None), memory_level=0, n_jobs=1, verbose=1): self.method = method self.n_parcels = n_parcels self.scaling = scaling self.n_iter = n_iter MultiPCA.__init__(self, n_components=200, random_state=random_state, mask=mask, memory=memory, smoothing_fwhm=smoothing_fwhm, standardize=standardize, detrend=detrend, low_pass=low_pass, high_pass=high_pass, t_r=t_r, target_affine=target_affine, target_shape=target_shape, mask_strategy=mask_strategy, mask_args=mask_args, memory_level=memory_level, n_jobs=n_jobs, verbose=verbose)
def __init__(self, mask_img=None, sessions=None, smoothing_fwhm=None, standardize=False, detrend=False, low_pass=None, high_pass=None, t_r=None, target_affine=None, target_shape=None, mask_strategy='background', mask_args=None, sample_mask=None, dtype=None, memory_level=1, memory=Memory(cachedir=None), verbose=0): # Mask is provided or computed self.mask_img = mask_img self.sessions = sessions self.smoothing_fwhm = smoothing_fwhm self.standardize = standardize self.detrend = detrend self.low_pass = low_pass self.high_pass = high_pass self.t_r = t_r self.target_affine = target_affine self.target_shape = target_shape self.mask_strategy = mask_strategy self.mask_args = mask_args self.sample_mask = sample_mask self.dtype = dtype self.memory = memory self.memory_level = memory_level self.verbose = verbose self._shelving = False
def test_rena_clustering(): data_img, mask_img = generate_fake_fmri(shape=(10, 11, 12), length=5) data = get_data(data_img) mask = get_data(mask_img) X = np.empty((data.shape[3], int(mask.sum()))) for i in range(data.shape[3]): X[i, :] = np.copy(data[:, :, :, i])[get_data(mask_img) != 0] nifti_masker = NiftiMasker(mask_img=mask_img).fit() n_voxels = nifti_masker.transform(data_img).shape[1] rena = ReNA(mask_img, n_clusters=10) X_red = rena.fit_transform(X) X_compress = rena.inverse_transform(X_red) assert 10 == rena.n_clusters_ assert X.shape == X_compress.shape memory = Memory(cachedir=None) rena = ReNA(mask_img, n_clusters=-2, memory=memory) pytest.raises(ValueError, rena.fit, X) rena = ReNA(mask_img, n_clusters=10, scaling=True) X_red = rena.fit_transform(X) X_compress = rena.inverse_transform(X_red) for n_iter in [-2, 0]: rena = ReNA(mask_img, n_iter=n_iter, memory=memory) pytest.raises(ValueError, rena.fit, X) for n_clusters in [1, 2, 4, 8]: rena = ReNA(mask_img, n_clusters=n_clusters, n_iter=1, memory=memory).fit(X) assert n_clusters != rena.n_clusters_ del n_voxels, X_red, X_compress
def __init__(self, n_components=20, random_state=None, mask=None, smoothing_fwhm=None, standardize=True, detrend=True, low_pass=None, high_pass=None, t_r=None, target_affine=None, target_shape=None, mask_strategy='epi', mask_args=None, memory=Memory(cachedir=None), memory_level=0, n_jobs=1, verbose=0): self.n_components = n_components self.random_state = random_state self.mask = mask self.smoothing_fwhm = smoothing_fwhm self.standardize = standardize self.detrend = detrend self.low_pass = low_pass self.high_pass = high_pass self.t_r = t_r self.target_affine = target_affine self.target_shape = target_shape self.mask_strategy = mask_strategy self.mask_args = mask_args self.memory = memory self.memory_level = memory_level self.n_jobs = n_jobs self.verbose = verbose
def __init__(self, seeds, radius=None, mask_img=None, allow_overlap=False, smoothing_fwhm=None, standardize=False, detrend=False, low_pass=None, high_pass=None, t_r=None, dtype=None, memory=Memory(cachedir=None, verbose=0), memory_level=1, verbose=0): self.seeds = seeds self.mask_img = mask_img self.radius = radius self.allow_overlap = allow_overlap # Parameters for _smooth_array self.smoothing_fwhm = smoothing_fwhm # Parameters for clean() self.standardize = standardize self.detrend = detrend self.low_pass = low_pass self.high_pass = high_pass self.t_r = t_r self.dtype = dtype # Parameters for joblib self.memory = memory self.memory_level = memory_level self.verbose = verbose
def _iter_check_niimg(niimgs, ensure_ndim=None, atleast_4d=False, target_fov=None, dtype=None, memory=Memory(cachedir=None), memory_level=0, verbose=0): """Iterate over a list of niimgs and do sanity checks and resampling Parameters ---------- niimgs: list of niimg or glob pattern Image to iterate over ensure_ndim: integer, optional If specified, an error is raised if the data does not have the required dimension. atleast_4d: boolean, optional If True, any 3D image is converted to a 4D single scan. target_fov: tuple of affine and shape If specified, images are resampled to this field of view dtype: {dtype, "auto"} Data type toward which the data should be converted. If "auto", the data will be converted to int32 if dtype is discrete and float32 if it is continuous. See also -------- check_niimg, check_niimg_3d, check_niimg_4d """ # If niimgs is a string, use glob to expand it to the matching filenames. niimgs = _resolve_globbing(niimgs) ref_fov = None resample_to_first_img = False ndim_minus_one = ensure_ndim - 1 if ensure_ndim is not None else None if target_fov is not None and target_fov != "first": ref_fov = target_fov i = -1 for i, niimg in enumerate(niimgs): try: niimg = check_niimg( niimg, ensure_ndim=ndim_minus_one, atleast_4d=atleast_4d, dtype=dtype) if i == 0: ndim_minus_one = len(niimg.shape) if ref_fov is None: ref_fov = (niimg.affine, niimg.shape[:3]) resample_to_first_img = True if not _check_fov(niimg, ref_fov[0], ref_fov[1]): if target_fov is not None: from nilearn import image # we avoid a circular import if resample_to_first_img: warnings.warn('Affine is different across subjects.' ' Realignement on first subject ' 'affine forced') niimg = cache(image.resample_img, memory, func_memory_level=2, memory_level=memory_level)( niimg, target_affine=ref_fov[0], target_shape=ref_fov[1]) else: raise ValueError( "Field of view of image #%d is different from " "reference FOV.\n" "Reference affine:\n%r\nImage affine:\n%r\n" "Reference shape:\n%r\nImage shape:\n%r\n" % (i, ref_fov[0], niimg.affine, ref_fov[1], niimg.shape)) yield niimg except DimensionError as exc: # Keep track of the additional dimension in the error exc.increment_stack_counter() raise except TypeError as exc: img_name = '' if isinstance(niimg, _basestring): img_name = " (%s) " % niimg exc.args = (('Error encountered while loading image #%d%s' % (i, img_name),) + exc.args) raise # Raising an error if input generator is empty. if i == -1: raise ValueError("Input niimgs list is empty.")