def test_unlock_raises(path, path2, path3): # make sure, we are not within a dataset: _cwd = getpwd() chpwd(path) # no dataset and no path: assert_raises(InsufficientArgumentsError, unlock, dataset=None, path=None) # no dataset and path not within a dataset: with swallow_logs(new_level=logging.WARNING) as cml: unlock(dataset=None, path=path2) assert_in( "ignored paths that do not belong to any dataset: ['{0}'".format( path2), cml.out) create(path=path, no_annex=True) ds = Dataset(path) # no complaints ds.unlock() # make it annex, but call unlock with invalid path: AnnexRepo(path, create=True) with swallow_logs(new_level=logging.WARNING) as cml: ds.unlock(path="notexistent.txt") assert_in("ignored non-existing paths", cml.out) chpwd(_cwd)
def test_unlock_raises(path, path2, path3): # make sure, we are not within a dataset: _cwd = getpwd() chpwd(path) # no dataset and no path: assert_raises(InsufficientArgumentsError, unlock, dataset=None, path=None) # no dataset and path not within a dataset: with swallow_logs(new_level=logging.WARNING) as cml: unlock(dataset=None, path=path2) assert_in("ignored paths that do not belong to any dataset: ['{0}'".format(path2), cml.out) create(path=path, no_annex=True) ds = Dataset(path) # no complaints ds.unlock() # make it annex, but call unlock with invalid path: AnnexRepo(path, create=True) with swallow_logs(new_level=logging.WARNING) as cml: ds.unlock(path="notexistent.txt") assert_in("ignored non-existing paths", cml.out) chpwd(_cwd)
def test_unlock_raises(path, path2, path3): # make sure, we are not within a dataset: _cwd = getpwd() chpwd(path) # no dataset and no path: assert_raises(InsufficientArgumentsError, unlock, dataset=None, path=None) # no dataset and path not within a dataset: res = unlock(dataset=None, path=path2, result_xfm=None, on_failure='ignore', return_type='item-or-list') eq_(res['message'], "path not associated with any dataset") eq_(res['path'], path2) create(path=path, no_annex=True) ds = Dataset(path) # no complaints ds.unlock() # make it annex, but call unlock with invalid path: AnnexRepo(path, create=True) res = ds.unlock(path="notexistent.txt", result_xfm=None, on_failure='ignore', return_type='item-or-list') eq_(res['message'], "path does not exist") chpwd(_cwd)
def read_bids_data(bids_root, subject, datatype="meg", task="memento", suffix="meg"): """ Read in a BIDS directory. :param subject: str, subject identifier, takes the form '001' :param bids_root: str, path to the root of a BIDS directory to read from :param datatype: str, descriptor of the data type, e.g., 'meg' :param task: str, descriptor of the task name ('memento') :param suffix: str, BIDS suffix for the data, e.g., 'meg' """ bids_path = BIDSPath(root=bids_root, datatype=datatype, subject=subject, task=task, suffix=suffix) try: # Only now (Apr. 2021) MNE python gained the ability to read in split # annexed data. Until this is released and established, we're making # sure files are read fully, and if not, we attempt to unlock them first raw = read_raw_bids( bids_path=bids_path, extra_params=dict(on_split_missing="raise"), ) except ValueError: logging.warn( "Ooops! I can't load all splits of the data. This may be because " "you run a version of MNE-python that does not read in annexed " "data automatically. I will try to datalad-unlock them for you.") import datalad.api as dl dl.unlock(bids_path.directory) raw = read_raw_bids( bids_path=bids_path, extra_params=dict(on_split_missing="raise"), ) # return the raw data, and also the bids path as it has return raw, bids_path