def test_find_matching_sidecar(return_bids_test_dir): """Test finding a sidecar file from a BIDS dir.""" bids_root = return_bids_test_dir bids_fpath = bids_path.copy().update(root=bids_root) # Now find a sidecar sidecar_fname = _find_matching_sidecar(bids_fpath, suffix='coordsystem', extension='.json') expected_file = op.join('sub-01', 'ses-01', 'meg', 'sub-01_ses-01_coordsystem.json') assert sidecar_fname.endswith(expected_file) # Find multiple sidecars, tied in score, triggering an error with pytest.raises(RuntimeError, match='Expected to find a single'): open(sidecar_fname.replace('coordsystem.json', '2coordsystem.json'), 'w').close() print_dir_tree(bids_root) _find_matching_sidecar(bids_fpath, suffix='coordsystem', extension='.json') # Find nothing and raise. with pytest.raises(RuntimeError, match='Did not find any'): fname = _find_matching_sidecar(bids_fpath, suffix='foo', extension='.bogus') # Find nothing and receive None and a warning. on_error = 'warn' with pytest.warns(RuntimeWarning, match='Did not find any'): fname = _find_matching_sidecar(bids_fpath, suffix='foo', extension='.bogus', on_error=on_error) assert fname is None # Find nothing and receive None. on_error = 'ignore' fname = _find_matching_sidecar(bids_fpath, suffix='foo', extension='.bogus', on_error=on_error) assert fname is None # Invalid on_error. on_error = 'hello' with pytest.raises(ValueError, match='Acceptable values for on_error are'): _find_matching_sidecar(bids_fpath, suffix='coordsystem', extension='.json', on_error=on_error)
"447", "448", "449", "451", ] event_id = { "lexical/low": 11, "lexical/high": 21, "nonlex/low": 31, "nonlex/high": 41, "target/low": 16, "target/high": 26, } print_dir_tree(bids_root, max_depth=3) datatype = "meg" bids_path = BIDSPath(root=bids_root, datatype=datatype) task = "lexicaldecision" suffix = "meg" for subject in subjects: bids_path = BIDSPath( subject=subject, task=task, suffix=suffix, datatype=datatype, root=bids_root, )
# %% # Download the data # ----------------- # # First, we need some data to work with. We will use the # `Finger Tapping Dataset <https://github.com/rob-luke/BIDS-NIRS-Tapping>`_ # available on GitHub. # We will use the MNE-NIRS package which includes convenient functions to # download openly available datasets. data_dir = mne_nirs.datasets.fnirs_motor_group.data_path() # Let's see whether the data has been downloaded using a quick visualization # of the directory tree. print_dir_tree(data_dir) # %% # The data are already in BIDS format. However, we will just use one of the # SNIRF files and demonstrate how this could be used to generate a new BIDS # compliant dataset from this single file. # Specify file to use as input to BIDS generation process file_path = data_dir / "sub-01" / "nirs" / "sub-01_task-tapping_nirs.snirf" # %% # Convert to BIDS # --------------- # # Let's start with loading the data and updating the annotations. # We are reading the data using MNE-Python's ``io`` module and the
subject_id = '01' task = 'somato' datatype = 'meg' bids_path = BIDSPath(subject=subject_id, task=task, datatype=datatype, suffix=datatype, root=bids_root) # bids basename is nicely formatted print(bids_path) ############################################################################### # Print the directory tree print_dir_tree(bids_root) ############################################################################### # Step 2: Read a BIDS dataset # --------------------------- # # Let's read in the dataset and show off a few features of the # loading function `read_raw_bids`. Note, this is just one line of code. raw = read_raw_bids(bids_path=bids_path, verbose=True) ############################################################################### # `raw.info` has the basic subject metadata print(raw.info['subject_info']) # `raw.info` has the PowerLineFrequency loaded in, which should be 50 Hz here print(raw.info['line_freq'])
def test_match(return_bids_test_dir): """Test retrieval of matching basenames.""" bids_root = Path(return_bids_test_dir) bids_path_01 = BIDSPath(root=bids_root) paths = bids_path_01.match() assert len(paths) == 9 assert all('sub-01_ses-01' in p.basename for p in paths) assert all([p.root == bids_root for p in paths]) bids_path_01 = BIDSPath(root=bids_root, run='01') paths = bids_path_01.match() assert len(paths) == 3 assert paths[0].basename == ('sub-01_ses-01_task-testing_run-01_' 'channels.tsv') bids_path_01 = BIDSPath(root=bids_root, subject='unknown') paths = bids_path_01.match() assert len(paths) == 0 bids_path_01 = bids_path.copy().update(root=None) with pytest.raises(RuntimeError, match='Cannot match'): bids_path_01.match() bids_path_01.update(datatype='meg', root=bids_root) same_paths = bids_path_01.match() assert len(same_paths) == 3 # Check handling of `extension`, part 1: no extension specified. bids_path_01 = BIDSPath(root=bids_root, run='01') paths = bids_path_01.match() assert [p.extension for p in paths] == ['.tsv', '.tsv', '.fif'] # Check handling of `extension`, part 2: extension specified. bids_path_01 = BIDSPath(root=bids_root, run='01', extension='.fif', datatype='meg') paths = bids_path_01.match() assert len(paths) == 1 assert paths[0].extension == '.fif' # Check handling of `extension` and `suffix`, part 1: no suffix bids_path_01 = BIDSPath(root=bids_root, run='01', extension='.tsv', datatype='meg') paths = bids_path_01.match() assert len(paths) == 2 assert paths[0].extension == '.tsv' # Check handling of `extension` and `suffix`, part 1: suffix passed bids_path_01 = BIDSPath(root=bids_root, run='01', suffix='channels', extension='.tsv', datatype='meg') paths = bids_path_01.match() assert len(paths) == 1 assert paths[0].extension == '.tsv' assert paths[0].suffix == 'channels' # Check handling of `datatype` when explicitly passed in print_dir_tree(bids_root) bids_path_01 = BIDSPath(root=bids_root, run='01', suffix='channels', extension='.tsv', datatype='meg') paths = bids_path_01.match() print(paths) assert len(paths) == 1 assert paths[0].extension == '.tsv' assert paths[0].suffix == 'channels' assert Path(paths[0]).parent.name == 'meg' # Check handling of `datatype`, no datatype passed in # should be exactly the same if there is only one datatype # present in the dataset bids_path_01 = BIDSPath(root=bids_root, run='01', suffix='channels', extension='.tsv') paths = bids_path_01.match() assert len(paths) == 1 assert paths[0].extension == '.tsv' assert paths[0].suffix == 'channels' assert Path(paths[0]).parent.name == 'meg'
def test_print_dir_tree(capsys): """Test printing a dir tree.""" with pytest.raises(ValueError, match='Directory does not exist'): print_dir_tree('i_dont_exist') # We check the testing directory test_dir = op.dirname(__file__) with pytest.raises(ValueError, match='must be a positive integer'): print_dir_tree(test_dir, max_depth=-1) with pytest.raises(ValueError, match='must be a positive integer'): print_dir_tree(test_dir, max_depth='bad') # Do not limit depth print_dir_tree(test_dir) captured = capsys.readouterr() assert '|--- test_utils.py' in captured.out.split('\n') assert '|--- __pycache__{}'.format(os.sep) in captured.out.split('\n') assert '.pyc' in captured.out # Now limit depth ... we should not descend into pycache print_dir_tree(test_dir, max_depth=1) captured = capsys.readouterr() assert '|--- test_utils.py' in captured.out.split('\n') assert '|--- __pycache__{}'.format(os.sep) in captured.out.split('\n') assert '.pyc' not in captured.out # Limit depth even more print_dir_tree(test_dir, max_depth=0) captured = capsys.readouterr() assert captured.out == '|tests{}\n'.format(os.sep) # test if pathlib.Path object print_dir_tree(Path(test_dir))
raw = mne.io.read_raw_fif(raw_fname) raw.info['line_freq'] = 60 # specify power line frequency as required by BIDS sub = '01' ses = '01' task = 'audiovisual' run = '01' bids_path = BIDSPath(subject=sub, session=ses, task=task, run=run, root=output_path) write_raw_bids(raw, bids_path, events_data=events_data, event_id=event_id, overwrite=True) ############################################################################### # Print the directory tree print_dir_tree(output_path) ############################################################################### # Now let's assume that we have also collected some T1 weighted MRI data for # our subject. And furthermore, that we have already aligned our coordinate # frames (using e.g., the `coregistration GUI`_) and obtained a transformation # matrix :code:`trans`. # Get the path to our MRI scan t1_mgh_fname = op.join(data_path, 'subjects', 'sample', 'mri', 'T1.mgz') # Load the transformation matrix and show what it looks like trans_fname = op.join(data_path, 'MEG', 'sample', 'sample_audvis_raw-trans.fif') trans = mne.read_trans(trans_fname) print(trans)
def test_find_matching_sidecar(return_bids_test_dir, tmp_path): """Test finding a sidecar file from a BIDS dir.""" bids_root = return_bids_test_dir bids_path = _bids_path.copy().update(root=bids_root) # Now find a sidecar sidecar_fname = _find_matching_sidecar(bids_path, suffix='coordsystem', extension='.json') expected_file = op.join('sub-01', 'ses-01', 'meg', 'sub-01_ses-01_coordsystem.json') assert sidecar_fname.endswith(expected_file) # Find multiple sidecars, tied in score, triggering an error with pytest.raises(RuntimeError, match='Expected to find a single'): open(sidecar_fname.replace('coordsystem.json', '2coordsystem.json'), 'w').close() print_dir_tree(bids_root) _find_matching_sidecar(bids_path, suffix='coordsystem', extension='.json') # Find nothing and raise. with pytest.raises(RuntimeError, match='Did not find any'): fname = _find_matching_sidecar(bids_path, suffix='foo', extension='.bogus') # Find nothing and receive None and a warning. on_error = 'warn' with pytest.warns(RuntimeWarning, match='Did not find any'): fname = _find_matching_sidecar(bids_path, suffix='foo', extension='.bogus', on_error=on_error) assert fname is None # Find nothing and receive None. on_error = 'ignore' fname = _find_matching_sidecar(bids_path, suffix='foo', extension='.bogus', on_error=on_error) assert fname is None # Invalid on_error. on_error = 'hello' with pytest.raises(ValueError, match='Acceptable values for on_error are'): _find_matching_sidecar(bids_path, suffix='coordsystem', extension='.json', on_error=on_error) # Test behavior of suffix and extension params when suffix and extension # are also (not) present in the passed BIDSPath bids_path = BIDSPath( subject='test', task='task', datatype='eeg', root=tmp_path ) bids_path.mkdir() for suffix, extension in zip( ['eeg', 'eeg', 'events', 'events'], ['.fif', '.json', '.tsv', '.json'] ): bids_path.suffix = suffix bids_path.extension = extension bids_path.fpath.touch() # suffix parameter should always override BIDSPath.suffix bids_path.extension = '.json' for bp_suffix in (None, 'eeg'): bids_path.suffix = bp_suffix s = _find_matching_sidecar(bids_path=bids_path, suffix='events') assert Path(s).name == 'sub-test_task-task_events.json' # extension parameter should always override BIDSPath.extension bids_path.suffix = 'events' for bp_extension in (None, '.json'): bids_path.extension = bp_extension s = _find_matching_sidecar(bids_path=bids_path, extension='.tsv') assert Path(s).name == 'sub-test_task-task_events.tsv' # If suffix and extension parameters are not passed, use BIDSPath # attributes bids_path.suffix = 'events' bids_path.extension = '.tsv' s = _find_matching_sidecar(bids_path=bids_path) assert Path(s).name == 'sub-test_task-task_events.tsv'
# preproced meg and mri sublist = ['{0:0>2d}'.format(i) for i in np.arange(1, 12)] for sub in sublist: bids_preproc_ses_dir = os.path.join(bids_preproc_dir, 'sub-{0}'.format(sub), 'ses-movie') if os.path.exists(bids_preproc_ses_dir) is False: os.makedirs(bids_preproc_ses_dir) subprocess.call('mv {0}/sub-{1}/ses-movie/anat {2}/anat'.format( preproc_mri, sub, bids_preproc_ses_dir), shell=True) subprocess.call('mv {0}/sub-{1}/ses-movie/meg {2}/meg'.format( preproc_meg, sub, bids_preproc_ses_dir), shell=True) ses_ass_f = glob.glob( os.path.join(preproc_meg, 'sub-{0}'.format(sub), 'ses-movie', ses_ass_files[0]))[0] subprocess.call('mv {0} {1}'.format(ses_ass_f, bids_preproc_ses_dir), shell=True) # copy study associated files for f in mri_study_ass_files: subprocess.call('cp {0}/{1} {2}'.format(preproc_mri, f, bids_preproc_dir), shell=True) # %% tree = mne_bids.print_dir_tree(bids_dir, return_str=True) with open('/nfs/s2/userhome/liuxingyu/workingdir/temp/gump_bids', 'w') as f: f.write(tree)
write_anat(image=t1w_path, bids_path=bids_path, landmarks=mri_landmarks, verbose=False) # %% # Basic anonymization # ------------------- # Now we're ready to anonymize the dataset! anonymize_dataset(bids_root_in=bids_root, bids_root_out=bids_root_anon) # %% # That's it! Let's have a look at directory structure of the anonymized # dataset. print_dir_tree(bids_root_anon) # %% # You can see that the subject ID was changed to a number (in this case, the # digit ``1```), and the recording dates have been shifted backward in time (as # indicated by the ``emptyroom`` session name). Anonymized IDs are zero-padded # numbers ranging from 1 to :math:`N`, where :math:`N` is the total number of # participants (excluding the ``emptyroom`` pseudo-subject). # # Limiting to specific data types # ------------------------------- # By default, :func:`mne_bids.anonymize_dataset` will anonymize # electrophysiological data and anatomical MR scans (T1-weighted and FLASH). # You can limit which data types to convert using the ``datatypes`` keyword # argument. The parameter can be a string (e.g., ``'meg'``, ``'eeg'``, # ``'anat'``) or a list of such strings.
# First, we need some data to work with. We will use the test dataset # available with the repository under ``data/`` directory. # # root of BIDs dataset root = Path('../data/') # BIDS entities subject = '01' session = 'interictalsleep' run = '01' datatype = 'ieeg' ############################################################################### # show the contents of the BIDS dataset print_dir_tree(root) # Let's summarize the dataset. print(make_report(root, verbose=False)) ############################################################################### # Load the dataset. bids_path = BIDSPath(subject=subject, session=session, run=run, datatype=datatype, root=root, suffix='ieeg', extension='.vhdr') raw = read_raw_bids(bids_path)