Ejemplo n.º 1
0
def convert_img_to_bids(image_input, bids_root, bids_fname, verbose=True):
    """Run Bids Conversion script to be updated.

    Performs BIDS conversion for Ct/T1/DTI/fMRI data.

    TODO: demo for DTI/FMRI
    """
    if verbose:
        print(f"bids_root is {bids_root}")
        print(f"Reading in image files from: {image_input}")

    # create temporary filepath to store the nifti file
    with tempfile.TemporaryDirectory() as tmpdir:
        output_fpath = Path(tmpdir, "tmp.nii").as_posix()

        if len([x for x in Path(image_input).glob("*.dcm")]) > 0:
            print("Converting dicom -> Nifti...")
            # try to run mrconvert and reorient to `LAS` direction
            # try:
            image_input = _convert_dicom_to_nifti(image_input, output_fpath)
            # except Exception as e:
            #     "mrconvert {params.CT_FOLDER} {output.CT_bids_fname};"
        else:
            print(
                "Passed NIFTI image, so skipping mrconvert from dicom -> nifti..."
            )
            image_input = str(image_input)

        print(image_input)
        # determine the BIDS identifiers
        params = _parse_bids_filename(bids_fname, verbose=True)
        subject = params["sub"]
        session = params["ses"]

        print("\n\nWriting now to BIDS...")
        # write to BIDS
        anat_dir = write_anat(
            bids_root,
            subject,
            t1w=image_input,
            session=session,
            overwrite=True,
            verbose=True,
        )
Ejemplo n.º 2
0
def test_write_anat(_bids_validate):
    """Test writing anatomical data."""
    # Get the MNE testing sample data
    import nibabel as nib
    output_path = _TempDir()
    data_path = testing.data_path()
    raw_fname = op.join(data_path, 'MEG', 'sample',
                        'sample_audvis_trunc_raw.fif')

    event_id = {
        'Auditory/Left': 1,
        'Auditory/Right': 2,
        'Visual/Left': 3,
        'Visual/Right': 4,
        'Smiley': 5,
        'Button': 32
    }
    events_fname = op.join(data_path, 'MEG', 'sample',
                           'sample_audvis_trunc_raw-eve.fif')

    raw = mne.io.read_raw_fif(raw_fname)
    write_raw_bids(raw,
                   bids_basename,
                   output_path,
                   events_data=events_fname,
                   event_id=event_id,
                   overwrite=False)

    # Write some MRI data and supply a `trans`
    trans_fname = raw_fname.replace('_raw.fif', '-trans.fif')
    trans = mne.read_trans(trans_fname)

    # Get the T1 weighted MRI data file
    # Needs to be converted to Nifti because we only have mgh in our test base
    t1w_mgh = op.join(data_path, 'subjects', 'sample', 'mri', 'T1.mgz')

    anat_dir = write_anat(output_path,
                          subject_id,
                          t1w_mgh,
                          session_id,
                          acq,
                          raw=raw,
                          trans=trans,
                          deface=True,
                          verbose=True,
                          overwrite=True)
    _bids_validate(output_path)

    # Validate that files are as expected
    t1w_json_path = op.join(anat_dir, 'sub-01_ses-01_acq-01_T1w.json')
    assert op.exists(t1w_json_path)
    assert op.exists(op.join(anat_dir, 'sub-01_ses-01_acq-01_T1w.nii.gz'))
    with open(t1w_json_path, 'r') as f:
        t1w_json = json.load(f)
    print(t1w_json)
    # We only should have AnatomicalLandmarkCoordinates as key
    np.testing.assert_array_equal(list(t1w_json.keys()),
                                  ['AnatomicalLandmarkCoordinates'])
    # And within AnatomicalLandmarkCoordinates only LPA, NAS, RPA in that order
    anat_dict = t1w_json['AnatomicalLandmarkCoordinates']
    point_list = ['LPA', 'NAS', 'RPA']
    np.testing.assert_array_equal(list(anat_dict.keys()), point_list)
    # test the actual values of the voxels (no floating points)
    for i, point in enumerate([(66, 51, 46), (41, 32, 74), (17, 53, 47)]):
        coords = anat_dict[point_list[i]]
        np.testing.assert_array_equal(np.asarray(coords, dtype=int), point)

        # BONUS: test also that we can find the matching sidecar
        side_fname = _find_matching_sidecar('sub-01_ses-01_acq-01_T1w.nii.gz',
                                            output_path, 'T1w.json')
        assert op.split(side_fname)[-1] == 'sub-01_ses-01_acq-01_T1w.json'

    # Now try some anat writing that will fail
    # We already have some MRI data there
    with pytest.raises(IOError, match='`overwrite` is set to False'):
        write_anat(output_path,
                   subject_id,
                   t1w_mgh,
                   session_id,
                   acq,
                   raw=raw,
                   trans=trans,
                   verbose=True,
                   deface=False,
                   overwrite=False)

    # pass some invalid type as T1 MRI
    with pytest.raises(ValueError, match='must be a path to a T1 weighted'):
        write_anat(output_path,
                   subject_id,
                   9999999999999,
                   session_id,
                   raw=raw,
                   trans=trans,
                   verbose=True,
                   deface=False,
                   overwrite=True)

    # Return without writing sidecar
    sh.rmtree(anat_dir)
    write_anat(output_path, subject_id, t1w_mgh, session_id)
    # Assert that we truly cannot find a sidecar
    with pytest.raises(RuntimeError, match='Did not find any'):
        _find_matching_sidecar('sub-01_ses-01_acq-01_T1w.nii.gz', output_path,
                               'T1w.json')

    # trans has a wrong type
    wrong_type = 1
    match = 'transform type {} not known, must be'.format(type(wrong_type))
    with pytest.raises(ValueError, match=match):
        write_anat(output_path,
                   subject_id,
                   t1w_mgh,
                   session_id,
                   raw=raw,
                   trans=wrong_type,
                   verbose=True,
                   deface=False,
                   overwrite=True)

    # trans is a str, but file does not exist
    wrong_fname = 'not_a_trans'
    match = 'trans file "{}" not found'.format(wrong_fname)
    with pytest.raises(IOError, match=match):
        write_anat(output_path,
                   subject_id,
                   t1w_mgh,
                   session_id,
                   raw=raw,
                   trans=wrong_fname,
                   verbose=True,
                   overwrite=True)

    # However, reading trans if it is a string pointing to trans is fine
    write_anat(output_path,
               subject_id,
               t1w_mgh,
               session_id,
               raw=raw,
               trans=trans_fname,
               verbose=True,
               deface=False,
               overwrite=True)

    # Writing without a session does NOT yield "ses-None" anywhere
    anat_dir2 = write_anat(output_path, subject_id, t1w_mgh, None)
    assert 'ses-None' not in anat_dir2
    assert op.exists(op.join(anat_dir2, 'sub-01_T1w.nii.gz'))

    # specify trans but not raw
    with pytest.raises(ValueError, match='must be specified if `trans`'):
        write_anat(output_path,
                   subject_id,
                   t1w_mgh,
                   session_id,
                   raw=None,
                   trans=trans,
                   verbose=True,
                   deface=False,
                   overwrite=True)

    # test deface
    anat_dir = write_anat(output_path,
                          subject_id,
                          t1w_mgh,
                          session_id,
                          raw=raw,
                          trans=trans_fname,
                          verbose=True,
                          deface=True,
                          overwrite=True)
    t1w = nib.load(op.join(anat_dir, 'sub-01_ses-01_T1w.nii.gz'))
    vox_sum = t1w.get_data().sum()

    anat_dir2 = write_anat(output_path,
                           subject_id,
                           t1w_mgh,
                           session_id,
                           raw=raw,
                           trans=trans_fname,
                           verbose=True,
                           deface=dict(inset=25.),
                           overwrite=True)
    t1w2 = nib.load(op.join(anat_dir2, 'sub-01_ses-01_T1w.nii.gz'))
    vox_sum2 = t1w2.get_data().sum()

    assert vox_sum > vox_sum2

    anat_dir3 = write_anat(output_path,
                           subject_id,
                           t1w_mgh,
                           session_id,
                           raw=raw,
                           trans=trans_fname,
                           verbose=True,
                           deface=dict(theta=25),
                           overwrite=True)
    t1w3 = nib.load(op.join(anat_dir3, 'sub-01_ses-01_T1w.nii.gz'))
    vox_sum3 = t1w3.get_data().sum()

    assert vox_sum > vox_sum3

    with pytest.raises(ValueError,
                       match='The raw object, trans and raw or the landmarks'):
        write_anat(output_path,
                   subject_id,
                   t1w_mgh,
                   session_id,
                   raw=raw,
                   trans=None,
                   verbose=True,
                   deface=True,
                   overwrite=True)

    with pytest.raises(ValueError, match='inset must be numeric'):
        write_anat(output_path,
                   subject_id,
                   t1w_mgh,
                   session_id,
                   raw=raw,
                   trans=trans,
                   verbose=True,
                   deface=dict(inset='small'),
                   overwrite=True)

    with pytest.raises(ValueError, match='inset should be positive'):
        write_anat(output_path,
                   subject_id,
                   t1w_mgh,
                   session_id,
                   raw=raw,
                   trans=trans,
                   verbose=True,
                   deface=dict(inset=-2.),
                   overwrite=True)

    with pytest.raises(ValueError, match='theta must be numeric'):
        write_anat(output_path,
                   subject_id,
                   t1w_mgh,
                   session_id,
                   raw=raw,
                   trans=trans,
                   verbose=True,
                   deface=dict(theta='big'),
                   overwrite=True)

    with pytest.raises(ValueError,
                       match='theta should be between 0 and 90 degrees'):
        write_anat(output_path,
                   subject_id,
                   t1w_mgh,
                   session_id,
                   raw=raw,
                   trans=trans,
                   verbose=True,
                   deface=dict(theta=100),
                   overwrite=True)

    # Write some MRI data and supply `landmarks`
    mri_voxel_landmarks = mne.channels.make_dig_montage(
        lpa=[66.08580, 51.33362, 46.52982],
        nasion=[41.87363, 32.24694, 74.55314],
        rpa=[17.23812, 53.08294, 47.01789],
        coord_frame='mri_voxel')

    mri_landmarks = mne.channels.make_dig_montage(
        lpa=[-0.07629625, -0.00062556, -0.00776012],
        nasion=[0.00267222, 0.09362256, 0.03224791],
        rpa=[0.07635873, -0.00258065, -0.01212903],
        coord_frame='mri')

    meg_landmarks = mne.channels.make_dig_montage(
        lpa=[-7.13766068e-02, 0.00000000e+00, 5.12227416e-09],
        nasion=[3.72529030e-09, 1.02605611e-01, 4.19095159e-09],
        rpa=[7.52676800e-02, 0.00000000e+00, 5.58793545e-09],
        coord_frame='head')

    # test mri voxel landmarks
    anat_dir = write_anat(output_path,
                          subject_id,
                          t1w_mgh,
                          session_id,
                          acq,
                          deface=True,
                          landmarks=mri_voxel_landmarks,
                          verbose=True,
                          overwrite=True)
    _bids_validate(output_path)

    t1w1 = nib.load(op.join(anat_dir, 'sub-01_ses-01_acq-01_T1w.nii.gz'))
    vox1 = t1w1.get_data()

    # test mri landmarks
    anat_dir = write_anat(output_path,
                          subject_id,
                          t1w_mgh,
                          session_id,
                          acq,
                          deface=True,
                          landmarks=mri_landmarks,
                          verbose=True,
                          overwrite=True)
    _bids_validate(output_path)

    t1w2 = nib.load(op.join(anat_dir, 'sub-01_ses-01_acq-01_T1w.nii.gz'))
    vox2 = t1w2.get_data()

    # because of significant rounding errors the voxels are fairly different
    # but the deface works in all three cases and was checked
    assert abs(vox1 - vox2).sum() / abs(vox1).sum() < 0.2

    # crash for raw also
    with pytest.raises(ValueError, match='Please use either `landmarks`'):
        anat_dir = write_anat(output_path,
                              subject_id,
                              t1w_mgh,
                              session_id,
                              acq,
                              raw=raw,
                              trans=trans,
                              deface=True,
                              landmarks=mri_landmarks,
                              verbose=True,
                              overwrite=True)

    # crash for trans also
    with pytest.raises(ValueError, match='`trans` was provided'):
        anat_dir = write_anat(output_path,
                              subject_id,
                              t1w_mgh,
                              session_id,
                              acq,
                              trans=trans,
                              deface=True,
                              landmarks=mri_landmarks,
                              verbose=True,
                              overwrite=True)

    # test meg landmarks
    tmp_dir = _TempDir()
    meg_landmarks.save(op.join(tmp_dir, 'meg_landmarks.fif'))
    anat_dir = write_anat(output_path,
                          subject_id,
                          t1w_mgh,
                          session_id,
                          acq,
                          deface=True,
                          trans=trans,
                          landmarks=op.join(tmp_dir, 'meg_landmarks.fif'),
                          verbose=True,
                          overwrite=True)
    _bids_validate(output_path)

    t1w3 = nib.load(op.join(anat_dir, 'sub-01_ses-01_acq-01_T1w.nii.gz'))
    vox3 = t1w3.get_data()

    assert abs(vox1 - vox3).sum() / abs(vox1).sum() < 0.2

    # test raise error on meg_landmarks with no trans
    with pytest.raises(ValueError, match='Head space landmarks provided'):
        anat_dir = write_anat(output_path,
                              subject_id,
                              t1w_mgh,
                              session_id,
                              acq,
                              deface=True,
                              landmarks=meg_landmarks,
                              verbose=True,
                              overwrite=True)

    # test unsupported (any coord_frame other than head and mri) coord_frame
    fail_landmarks = meg_landmarks.copy()
    fail_landmarks.dig[0]['coord_frame'] = 3
    fail_landmarks.dig[1]['coord_frame'] = 3
    fail_landmarks.dig[2]['coord_frame'] = 3

    with pytest.raises(ValueError, match='Coordinate frame not recognized'):
        anat_dir = write_anat(output_path,
                              subject_id,
                              t1w_mgh,
                              session_id,
                              acq,
                              deface=True,
                              landmarks=fail_landmarks,
                              verbose=True,
                              overwrite=True)
Ejemplo n.º 3
0
    )
    # MRI scan
    t1_fname = op.join(subjects_dir, f"sub-{subject}", "mri", "T1.mgz")
    # transformation matrix
    trans = mne.read_trans(op.join(bids_root, f"sub-{subject}-trans.fif"))
    t1w_bids_path = BIDSPath(subject=subject, root=bids_root, suffix="T1w")
    landmarks = get_anat_landmarks(
        t1_fname,
        info=raw.info,
        trans=trans,
        fs_subject=f"sub-{subject}",
        fs_subjects_dir=subjects_dir,
    )
    t1w_bids_path = write_anat(
        image=t1_fname,
        bids_path=t1w_bids_path,
        landmarks=landmarks,
        verbose=True,
    )
    anat_dir = t1w_bids_path.directory
    # ERM
    erm_fname = op.join(bids_root, "sub-%s_erm_raw.fif" % subject)
    erm = mne.io.read_raw_fif(erm_fname, allow_maxshield="yes")
    erm.info["line_freq"] = 60
    er_date = erm.info["meas_date"].strftime("%Y%m%d")
    er_bids_path = BIDSPath(subject="emptyroom",
                            session=er_date,
                            task="noise",
                            root=bids_root)
    write_raw_bids(erm, er_bids_path, overwrite=True)
    print(bids_path)
Ejemplo n.º 4
0
print(trans)

###############################################################################
# We can save the MRI to our existing BIDS directory and at the same time
# create a JSON sidecar file that contains metadata, we will later use to
# retrieve our transformation matrix :code:`trans`.

# First create the BIDSPath object.
t1w_bids_path = \
    BIDSPath(subject=sub, session=ses, root=output_path, suffix='T1w')

# We use the write_anat function
t1w_bids_path = write_anat(
    image=t1_mgh_fname,  # path to the MRI scan
    bids_path=t1w_bids_path,
    raw=raw,  # the raw MEG data file connected to the MRI
    trans=trans,  # our transformation matrix
    verbose=True  # this will print out the sidecar file
)
anat_dir = t1w_bids_path.directory

###############################################################################
# Let's have another look at our BIDS directory
print_dir_tree(output_path)

###############################################################################
# Our BIDS dataset is now ready to be shared. We can easily estimate the
# transformation matrix using ``MNE-BIDS`` and the BIDS dataset.
estim_trans = get_head_mri_trans(bids_path=bids_path)

###############################################################################
Ejemplo n.º 5
0
def test_write_anat(_bids_validate):
    """Test writing anatomical data."""
    # Get the MNE testing sample data
    import nibabel as nib
    output_path = _TempDir()
    data_path = testing.data_path()
    raw_fname = op.join(data_path, 'MEG', 'sample',
                        'sample_audvis_trunc_raw.fif')

    event_id = {
        'Auditory/Left': 1,
        'Auditory/Right': 2,
        'Visual/Left': 3,
        'Visual/Right': 4,
        'Smiley': 5,
        'Button': 32
    }
    events_fname = op.join(data_path, 'MEG', 'sample',
                           'sample_audvis_trunc_raw-eve.fif')

    raw = mne.io.read_raw_fif(raw_fname)
    write_raw_bids(raw,
                   bids_basename,
                   output_path,
                   events_data=events_fname,
                   event_id=event_id,
                   overwrite=False)

    # Write some MRI data and supply a `trans`
    trans_fname = raw_fname.replace('_raw.fif', '-trans.fif')
    trans = mne.read_trans(trans_fname)

    # Get the T1 weighted MRI data file
    # Needs to be converted to Nifti because we only have mgh in our test base
    t1w_mgh = op.join(data_path, 'subjects', 'sample', 'mri', 'T1.mgz')

    anat_dir = write_anat(output_path,
                          subject_id,
                          t1w_mgh,
                          session_id,
                          acq,
                          raw=raw,
                          trans=trans,
                          deface=True,
                          verbose=True,
                          overwrite=True)
    _bids_validate(output_path)

    # Validate that files are as expected
    t1w_json_path = op.join(anat_dir, 'sub-01_ses-01_acq-01_T1w.json')
    assert op.exists(t1w_json_path)
    assert op.exists(op.join(anat_dir, 'sub-01_ses-01_acq-01_T1w.nii.gz'))
    with open(t1w_json_path, 'r') as f:
        t1w_json = json.load(f)
    print(t1w_json)
    # We only should have AnatomicalLandmarkCoordinates as key
    np.testing.assert_array_equal(list(t1w_json.keys()),
                                  ['AnatomicalLandmarkCoordinates'])
    # And within AnatomicalLandmarkCoordinates only LPA, NAS, RPA in that order
    anat_dict = t1w_json['AnatomicalLandmarkCoordinates']
    point_list = ['LPA', 'NAS', 'RPA']
    np.testing.assert_array_equal(list(anat_dict.keys()), point_list)
    # test the actual values of the voxels (no floating points)
    for i, point in enumerate([(66, 51, 46), (41, 32, 74), (17, 53, 47)]):
        coords = anat_dict[point_list[i]]
        np.testing.assert_array_equal(np.asarray(coords, dtype=int), point)

        # BONUS: test also that we can find the matching sidecar
        side_fname = _find_matching_sidecar('sub-01_ses-01_acq-01_T1w.nii.gz',
                                            output_path, 'T1w.json')
        assert op.split(side_fname)[-1] == 'sub-01_ses-01_acq-01_T1w.json'

    # Now try some anat writing that will fail
    # We already have some MRI data there
    with pytest.raises(IOError, match='`overwrite` is set to False'):
        write_anat(output_path,
                   subject_id,
                   t1w_mgh,
                   session_id,
                   acq,
                   raw=raw,
                   trans=trans,
                   verbose=True,
                   deface=False,
                   overwrite=False)

    # pass some invalid type as T1 MRI
    with pytest.raises(ValueError, match='must be a path to a T1 weighted'):
        write_anat(output_path,
                   subject_id,
                   9999999999999,
                   session_id,
                   raw=raw,
                   trans=trans,
                   verbose=True,
                   deface=False,
                   overwrite=True)

    # Return without writing sidecar
    sh.rmtree(anat_dir)
    write_anat(output_path, subject_id, t1w_mgh, session_id)
    # Assert that we truly cannot find a sidecar
    with pytest.raises(RuntimeError, match='Did not find any'):
        _find_matching_sidecar('sub-01_ses-01_acq-01_T1w.nii.gz', output_path,
                               'T1w.json')

    # trans has a wrong type
    wrong_type = 1
    match = 'transform type {} not known, must be'.format(type(wrong_type))
    with pytest.raises(ValueError, match=match):
        write_anat(output_path,
                   subject_id,
                   t1w_mgh,
                   session_id,
                   raw=raw,
                   trans=wrong_type,
                   verbose=True,
                   deface=False,
                   overwrite=True)

    # trans is a str, but file does not exist
    wrong_fname = 'not_a_trans'
    match = 'trans file "{}" not found'.format(wrong_fname)
    with pytest.raises(IOError, match=match):
        write_anat(output_path,
                   subject_id,
                   t1w_mgh,
                   session_id,
                   raw=raw,
                   trans=wrong_fname,
                   verbose=True,
                   overwrite=True)

    # However, reading trans if it is a string pointing to trans is fine
    write_anat(output_path,
               subject_id,
               t1w_mgh,
               session_id,
               raw=raw,
               trans=trans_fname,
               verbose=True,
               deface=False,
               overwrite=True)

    # Writing without a session does NOT yield "ses-None" anywhere
    anat_dir2 = write_anat(output_path, subject_id, t1w_mgh, None)
    assert 'ses-None' not in anat_dir2
    assert op.exists(op.join(anat_dir2, 'sub-01_T1w.nii.gz'))

    # specify trans but not raw
    with pytest.raises(ValueError, match='must be specified if `trans`'):
        write_anat(output_path,
                   subject_id,
                   t1w_mgh,
                   session_id,
                   raw=None,
                   trans=trans,
                   verbose=True,
                   deface=False,
                   overwrite=True)

    # test deface
    anat_dir = write_anat(output_path,
                          subject_id,
                          t1w_mgh,
                          session_id,
                          raw=raw,
                          trans=trans_fname,
                          verbose=True,
                          deface=True,
                          overwrite=True)
    t1w = nib.load(op.join(anat_dir, 'sub-01_ses-01_T1w.nii.gz'))
    vox_sum = t1w.get_data().sum()

    anat_dir2 = write_anat(output_path,
                           subject_id,
                           t1w_mgh,
                           session_id,
                           raw=raw,
                           trans=trans_fname,
                           verbose=True,
                           deface=dict(inset=25.),
                           overwrite=True)
    t1w2 = nib.load(op.join(anat_dir2, 'sub-01_ses-01_T1w.nii.gz'))
    vox_sum2 = t1w2.get_data().sum()

    assert vox_sum > vox_sum2

    anat_dir3 = write_anat(output_path,
                           subject_id,
                           t1w_mgh,
                           session_id,
                           raw=raw,
                           trans=trans_fname,
                           verbose=True,
                           deface=dict(theta=25),
                           overwrite=True)
    t1w3 = nib.load(op.join(anat_dir3, 'sub-01_ses-01_T1w.nii.gz'))
    vox_sum3 = t1w3.get_data().sum()

    assert vox_sum > vox_sum3

    with pytest.raises(ValueError,
                       match='The raw object, trans and raw must be provided'):
        write_anat(output_path,
                   subject_id,
                   t1w_mgh,
                   session_id,
                   raw=raw,
                   trans=None,
                   verbose=True,
                   deface=True,
                   overwrite=True)

    with pytest.raises(ValueError, match='inset must be numeric'):
        write_anat(output_path,
                   subject_id,
                   t1w_mgh,
                   session_id,
                   raw=raw,
                   trans=trans,
                   verbose=True,
                   deface=dict(inset='small'),
                   overwrite=True)

    with pytest.raises(ValueError, match='inset should be positive'):
        write_anat(output_path,
                   subject_id,
                   t1w_mgh,
                   session_id,
                   raw=raw,
                   trans=trans,
                   verbose=True,
                   deface=dict(inset=-2.),
                   overwrite=True)

    with pytest.raises(ValueError, match='theta must be numeric'):
        write_anat(output_path,
                   subject_id,
                   t1w_mgh,
                   session_id,
                   raw=raw,
                   trans=trans,
                   verbose=True,
                   deface=dict(theta='big'),
                   overwrite=True)

    with pytest.raises(ValueError,
                       match='theta should be between 0 and 90 degrees'):
        write_anat(output_path,
                   subject_id,
                   t1w_mgh,
                   session_id,
                   raw=raw,
                   trans=trans,
                   verbose=True,
                   deface=dict(theta=100),
                   overwrite=True)
Ejemplo n.º 6
0
def test_update_anat_landmarks(tmp_path):
    """Test updating the anatomical landmarks of an MRI scan."""
    data_path = Path(testing.data_path())
    raw_path = data_path / 'MEG' / 'sample' / 'sample_audvis_trunc_raw.fif'
    trans_path = Path(str(raw_path).replace('_raw.fif', '-trans.fif'))
    t1_path = data_path / 'subjects' / 'sample' / 'mri' / 'T1.mgz'
    fs_subject = 'sample'
    fs_subjects_dir = data_path / 'subjects'
    bids_root = tmp_path
    bids_path_mri = BIDSPath(subject=subject_id,
                             session=session_id,
                             acquisition=acq,
                             root=bids_root,
                             datatype='anat',
                             suffix='T1w')

    # First, write the MRI scan to BIDS, including the anatomical landmarks
    info = mne.io.read_info(raw_path)
    trans = mne.read_trans(trans_path)
    landmarks = get_anat_landmarks(image=t1_path,
                                   info=info,
                                   trans=trans,
                                   fs_subject=fs_subject,
                                   fs_subjects_dir=fs_subjects_dir)
    bids_path_mri = write_anat(image=t1_path,
                               bids_path=bids_path_mri,
                               landmarks=landmarks,
                               deface=False)
    bids_path_mri_json = bids_path_mri.copy().update(extension='.json')

    # Modify the landmarks
    # Move the nasion a bit
    landmarks_new = landmarks.copy()
    landmarks_new.dig[1]['r'] *= 0.9
    update_anat_landmarks(bids_path=bids_path_mri, landmarks=landmarks_new)

    with bids_path_mri_json.fpath.open(encoding='utf-8') as f:
        mri_json = json.load(f)

    assert np.allclose(landmarks_new.dig[1]['r'],
                       mri_json['AnatomicalLandmarkCoordinates']['NAS'])

    # Remove JSON sidecar; updating the anatomical landmarks should re-create
    # the file unless `on_missing` is `'raise'`
    bids_path_mri_json.fpath.unlink()
    with pytest.raises(KeyError,
                       match='No AnatomicalLandmarkCoordinates section found'):
        update_anat_landmarks(bids_path=bids_path_mri, landmarks=landmarks_new)

    update_anat_landmarks(bids_path=bids_path_mri,
                          landmarks=landmarks_new,
                          on_missing='ignore')

    with pytest.raises(KeyError, match='landmark not found'):
        update_anat_landmarks(bids_path=bids_path_mri,
                              landmarks=landmarks_new,
                              kind='ses-1')
    update_anat_landmarks(bids_path=bids_path_mri,
                          landmarks=landmarks_new,
                          kind='ses-1',
                          on_missing='ignore')

    mri_json = json.loads(bids_path_mri_json.fpath.read_text(encoding='utf-8'))
    assert 'NAS' in mri_json['AnatomicalLandmarkCoordinates']
    assert 'NAS_ses-1' in mri_json['AnatomicalLandmarkCoordinates']

    assert np.allclose(landmarks_new.dig[1]['r'],
                       mri_json['AnatomicalLandmarkCoordinates']['NAS'])

    # Check without extension provided
    bids_path_mri_no_ext = bids_path_mri.copy().update(extension=None)
    update_anat_landmarks(bids_path=bids_path_mri_no_ext,
                          landmarks=landmarks_new)

    # Check without datatytpe provided
    bids_path_mri_no_datatype = bids_path_mri.copy().update(datatype=None)
    update_anat_landmarks(bids_path=bids_path_mri_no_datatype,
                          landmarks=landmarks)

    # Check handling of invalid input
    bids_path_invalid = bids_path_mri.copy().update(datatype='meg')
    with pytest.raises(ValueError, match='Can only operate on "anat"'):
        update_anat_landmarks(bids_path=bids_path_invalid, landmarks=landmarks)

    bids_path_invalid = bids_path_mri.copy().update(suffix=None)
    with pytest.raises(ValueError, match='lease specify the "suffix"'):
        update_anat_landmarks(bids_path=bids_path_invalid, landmarks=landmarks)

    bids_path_invalid = bids_path_mri.copy().update(suffix='meg')
    with pytest.raises(ValueError,
                       match='Can only operate on "T1w" and "FLASH"'):
        update_anat_landmarks(bids_path=bids_path_invalid, landmarks=landmarks)

    bids_path_invalid = bids_path_mri.copy().update(subject='invalid')
    with pytest.raises(ValueError, match='Could not find an MRI scan'):
        update_anat_landmarks(bids_path=bids_path_invalid, landmarks=landmarks)

    # Unsupported coordinate frame
    landmarks_invalid = landmarks.copy()
    for digpoint in landmarks_invalid.dig:
        digpoint['coord_frame'] = FIFF.FIFFV_MNE_COORD_RAS

    with pytest.raises(ValueError, match='must be specified in MRI voxel'):
        update_anat_landmarks(bids_path=bids_path_mri,
                              landmarks=landmarks_invalid)

    # Missing cardinal point
    landmarks_invalid = landmarks.copy()
    del landmarks_invalid.dig[0]
    with pytest.raises(ValueError,
                       match='did not contain all required cardinal points'):
        update_anat_landmarks(bids_path=bids_path_mri,
                              landmarks=landmarks_invalid)

    # Test with path-like landmarks
    fiducials_path = (data_path / 'subjects' / 'sample' / 'bem' /
                      'sample-fiducials.fif')

    update_anat_landmarks(bids_path=bids_path_mri,
                          landmarks=fiducials_path,
                          fs_subject='sample',
                          fs_subjects_dir=data_path / 'subjects')
    expected_coords_in_voxels = np.array([
        [68.38202, 45.24057, 43.439808],  # noqa: E241
        [42.27006, 30.758774, 74.09837],  # noqa: E202, E241
        [17.044853, 46.586075, 42.618504]
    ])
    mri_json = json.loads(bids_path_mri_json.fpath.read_text(encoding='utf-8'))
    for landmark, expected_coords in zip(('LPA', 'NAS', 'RPA'),
                                         expected_coords_in_voxels):
        assert np.allclose(mri_json['AnatomicalLandmarkCoordinates'][landmark],
                           expected_coords)
Ejemplo n.º 7
0
# note that the origin is centered on the anterior commissure (AC)
# with the y-axis passing through the posterior commissure (PC)
T1_fname = op.join(subjects_dir, 'sample_seeg', 'mri', 'T1.mgz')
fig = plot_anat(T1_fname, cut_coords=(0, 0, 0))
fig.axes['x'].ax.annotate('AC', (2., -2.), (30., -40.),
                          color='w',
                          arrowprops=dict(facecolor='w', alpha=0.5))
fig.axes['x'].ax.annotate('PC', (-31., -2.), (-80., -40.),
                          color='w',
                          arrowprops=dict(facecolor='w', alpha=0.5))

# write ACPC-aligned T1
landmarks = get_anat_landmarks(T1_fname, raw.info, trans, 'sample_seeg',
                               subjects_dir)
T1_bids_path = write_anat(T1_fname,
                          bids_path,
                          deface=True,
                          landmarks=landmarks)

# write `raw` to BIDS and anonymize it (converts to BrainVision format)
#
# we need to pass the `montage` argument for coordinate frames other than
# "head" which is what MNE uses internally in the `raw` object
#
# `acpc_aligned=True` affirms that our MRI is aligned to ACPC
# if this is not true, convert to `fsaverage` (see below)!
write_raw_bids(raw,
               bids_path,
               anonymize=dict(daysback=40000),
               montage=montage,
               acpc_aligned=True,
               overwrite=True)
Ejemplo n.º 8
0
def test_update_anat_landmarks(tmpdir):
    """Test updating the anatomical landmarks of an MRI scan."""
    data_path = Path(testing.data_path())
    raw_path = data_path / 'MEG' / 'sample' / 'sample_audvis_trunc_raw.fif'
    trans_path = Path(str(raw_path).replace('_raw.fif', '-trans.fif'))
    t1_path = data_path / 'subjects' / 'sample' / 'mri' / 'T1.mgz'
    fs_subject = 'sample'
    fs_subjects_dir = data_path / 'subjects'
    bids_root = Path(tmpdir)
    bids_path_mri = BIDSPath(subject=subject_id, session=session_id,
                             acquisition=acq, root=bids_root, datatype='anat',
                             suffix='T1w')

    # First, write the MRI scan to BIDS, including the anatomical landmarks
    info = mne.io.read_info(raw_path)
    trans = mne.read_trans(trans_path)
    landmarks = get_anat_landmarks(
        image=t1_path, info=info, trans=trans, fs_subject=fs_subject,
        fs_subjects_dir=fs_subjects_dir
    )
    bids_path_mri = write_anat(image=t1_path, bids_path=bids_path_mri,
                               landmarks=landmarks, deface=False)
    bids_path_mri_json = bids_path_mri.copy().update(extension='.json')

    # Modify the landmarks
    # Move the nasion a bit
    landmarks_new = landmarks.copy()
    landmarks_new.dig[1]['r'] *= 0.9
    update_anat_landmarks(bids_path=bids_path_mri, landmarks=landmarks_new)

    with bids_path_mri_json.fpath.open(encoding='utf-8') as f:
        mri_json = json.load(f)

    assert np.allclose(
        landmarks_new.dig[1]['r'],
        mri_json['AnatomicalLandmarkCoordinates']['NAS']
    )

    # Remove JSON sidecar; updating the anatomical landmarks should re-create
    # the file
    bids_path_mri_json.fpath.unlink()
    update_anat_landmarks(bids_path=bids_path_mri, landmarks=landmarks_new)

    with bids_path_mri_json.fpath.open(encoding='utf-8') as f:
        mri_json = json.load(f)

    assert np.allclose(
        landmarks_new.dig[1]['r'],
        mri_json['AnatomicalLandmarkCoordinates']['NAS']
    )

    # Check without extension provided
    bids_path_mri_no_ext = bids_path_mri.copy().update(extension=None)
    update_anat_landmarks(bids_path=bids_path_mri_no_ext,
                          landmarks=landmarks_new)

    # Check without datatytpe provided
    bids_path_mri_no_datatype = bids_path_mri.copy().update(datatype=None)
    update_anat_landmarks(bids_path=bids_path_mri_no_datatype,
                          landmarks=landmarks)

    # Check handling of invalid input
    bids_path_invalid = bids_path_mri.copy().update(datatype='meg')
    with pytest.raises(ValueError, match='Can only operate on "anat"'):
        update_anat_landmarks(bids_path=bids_path_invalid, landmarks=landmarks)

    bids_path_invalid = bids_path_mri.copy().update(suffix=None)
    with pytest.raises(ValueError, match='lease specify the "suffix"'):
        update_anat_landmarks(bids_path=bids_path_invalid, landmarks=landmarks)

    bids_path_invalid = bids_path_mri.copy().update(suffix='meg')
    with pytest.raises(ValueError,
                       match='Can only operate on "T1w" and "FLASH"'):
        update_anat_landmarks(bids_path=bids_path_invalid, landmarks=landmarks)

    bids_path_invalid = bids_path_mri.copy().update(subject='invalid')
    with pytest.raises(ValueError, match='Could not find an MRI scan'):
        update_anat_landmarks(bids_path=bids_path_invalid, landmarks=landmarks)

    # Unsupported coordinate frame
    landmarks_invalid = landmarks.copy()
    for digpoint in landmarks_invalid.dig:
        digpoint['coord_frame'] = FIFF.FIFFV_MNE_COORD_RAS

    with pytest.raises(ValueError, match='must be specified in MRI voxel'):
        update_anat_landmarks(bids_path=bids_path_mri,
                              landmarks=landmarks_invalid)

    # Missing cardinal point
    landmarks_invalid = landmarks.copy()
    del landmarks_invalid.dig[0]
    with pytest.raises(ValueError,
                       match='did not contain all required cardinal points'):
        update_anat_landmarks(bids_path=bids_path_mri,
                              landmarks=landmarks_invalid)
Ejemplo n.º 9
0
trans_fname = op.join(data_path, 'MEG', 'sample',
                      'sample_audvis_raw-trans.fif')
trans = mne.read_trans(trans_fname)
print(trans)

###############################################################################
# We can save the MRI to our existing BIDS directory and at the same time
# create a JSON sidecar file that contains metadata, we will later use to
# retrieve our transformation matrix :code:`trans`.

# We use the write_anat function
anat_dir = write_anat(
    bids_root=output_path,  # the BIDS dir we wrote earlier
    subject=sub,
    t1w=t1_mgh_fname,  # path to the MRI scan
    session=ses,
    raw=raw,  # the raw MEG data file connected to the MRI
    trans=trans,  # our transformation matrix
    verbose=True  # this will print out the sidecar file
)

###############################################################################
# Let's have another look at our BIDS directory
print_dir_tree(output_path)

###############################################################################
# Our BIDS dataset is now ready to be shared. We can easily estimate the
# transformation matrix using ``MNE-BIDS`` and the BIDS dataset.
estim_trans = get_head_mri_trans(
    bids_basename=bids_basename,
    bids_root=output_path  # root of our BIDS dir
Ejemplo n.º 10
0
               empty_room=bids_path_er,
               verbose=False)
write_meg_calibration(cal_path, bids_path=bids_path, verbose=False)
write_meg_crosstalk(ct_path, bids_path=bids_path, verbose=False)

# Write anatomical scan
# We pass the MRI landmark coordinates, which will later be required for
# automated defacing
mri_landmarks = mne.channels.make_dig_montage(
    lpa=[66.08580, 51.33362, 46.52982],
    nasion=[41.87363, 32.24694, 74.55314],
    rpa=[17.23812, 53.08294, 47.01789],
    coord_frame='mri_voxel')
bids_path.datatype = 'anat'
write_anat(image=t1w_path,
           bids_path=bids_path,
           landmarks=mri_landmarks,
           verbose=False)

# %%
# Basic anonymization
# -------------------
# Now we're ready to anonymize the dataset!

anonymize_dataset(bids_root_in=bids_root, bids_root_out=bids_root_anon)

# %%
# That's it! Let's have a look at directory structure of the anonymized
# dataset.
print_dir_tree(bids_root_anon)

# %%
Ejemplo n.º 11
0
                                     min_duration=config.min_event_duration,
                                     shortest_event=config.shortest_event)

            write_raw_bids(raw,
                           bids_basename,
                           output_path=bids_root,
                           events_data=events,
                           event_id=config.event_id,
                           overwrite=True)

        if task != 'empty':
            # Take care of anatomy
            anat_dir = write_anat(bids_root,
                                  subject,
                                  t1w,
                                  acquisition="t1w",
                                  trans=trans,
                                  raw=raw,
                                  overwrite=True)

            #%% plot to check landmarks
            if plot:

                import numpy as np
                import matplotlib.pyplot as plt

                from nilearn.plotting import plot_anat
                from mne.source_space import head_to_mri
                from mne_bids import get_head_mri_trans

                # Get Landmarks from MEG file, 0, 1, and 2 correspond to LPA, NAS, RPA
Ejemplo n.º 12
0
                                          duration=durations,
                                          description=descriptions,
                                          orig_time=raw.info['meas_date'])
            raw.set_annotations(annotations)
            del annotations, onsets, descriptions, durations

        bids_basename = make_bids_basename(subject=participant, task=exp)
        write_raw_bids(raw,
                       bids_basename,
                       bids_root=bids_root,
                       events_data=events,
                       event_id=event_name_to_id_mapping,
                       overwrite=True,
                       verbose=False)

        write_anat(bids_root=bids_root,
                   subject=participant,
                   t1w=t1w_fname,
                   acquisition='t1w',
                   trans=trans_fname,
                   raw=raw,
                   overwrite=True,
                   verbose=False)

        del bids_basename, participant, trans_fname, raw_fname, raw, events

print('Finished conversion.')
t_end = datetime.now()

print(f'Process took {t_end - t_start}.')
Ejemplo n.º 13
0
                                 datatype='meg',
                                 root=output_dir)
            write_raw_bids(raw,
                           bids_path=bids_path,
                           events_data=events,
                           event_id=event_name_to_id_mapping,
                           overwrite=True,
                           verbose=False)

            write_meg_calibration(mf_cal_fname, bids_path)
            write_meg_crosstalk(mf_ctc_fname, bids_path)

            t1w_bids_path = BIDSPath(subject=participant,
                                     root=output_dir,
                                     acquisition='t1w')
            write_anat(t1w=t1w_fname,
                       bids_path=t1w_bids_path,
                       trans=trans_fname,
                       raw=raw,
                       overwrite=True,
                       verbose=False)

            del bids_path, trans_fname, raw_fname, raw, events

        progress_bar.update()

print('Finished conversion.')
t_end = datetime.now()

print(f'Process took {t_end - t_start}.')
Ejemplo n.º 14
0
def main():
    #splitter = "\\" if platform.system().lower().startswith("win") else "/"

    parser = argparse.ArgumentParser()
    parser.add_argument("--subject",
                        action="store",
                        type=str,
                        required=False,
                        help="Name of the Patient/ Subject to process")
    parser.add_argument("--bidsroot",
                        action="store",
                        type=str,
                        required=False,
                        help="Specify a different BIDS root directory to use")
    parser.add_argument("--inputfolder",
                        action="store",
                        type=str,
                        required=False,
                        help="Specify a different data input folder")
    parser.add_argument(
        "--fsonly",
        action="store",
        type=str,
        required=False,
        help="Use --fsonly true if you only want to do a freesurfer segmentation"
    )  # do only freesurfer segmentation
    parser.add_argument("--openmp",
                        action="store",
                        type=str,
                        required=False,
                        help="Specify how many jobs/ processor cores to use")
    parser.add_argument("--srcspacing",
                        action="store",
                        type=str,
                        required=False,
                        help="Source spacing: \
                            -defaults to ico4 --> 2562 Source points \
                            || other options: \
                            oct5 --> 1026 Source points \
                            || oct6 --> 4098 Source points \
                            || ico5 --> 10242 Source points")
    parser.add_argument("--extras",
                        action="store",
                        type=str,
                        required=False,
                        help="Specify directory containing extras for report")

    args = parser.parse_args()

    # additional arguments
    if args.bidsroot:
        bids_root = args.bidsroot
    else:
        bids_root = os.environ.get("BIDS_ROOT")

    if args.openmp:
        n_jobs = openmp = int(args.openmp)
    else:
        n_jobs = openmp = int(os.environ.get("OPENMP"))
        if n_jobs == None:
            n_jobs = openmp = int(1)

    if args.inputfolder:
        input_folder = args.inputfolder
    else:
        input_folder = os.environ.get("INPUT_FOLDER")

    if args.extras:
        extras_directory = args.extras
    else:
        extras_directory = os.environ.get("EXTRAS_DIRECTORY")

# define subject
    subject = args.subject
    if not subject:
        poss = [s for s in os.listdir(input_folder)]
        print(
            f"No subject specified, maybe you want to choose from those:\n {poss}"
        )
        subject = input()

    if not subject.startswith("sub-"):
        ject = str(subject)
        subject = "sub-" + subject
    else:
        ject = subject.split("sub-")[-1]

# create folder structure and copy
    dfc = Folderer.DerivativesFoldersCreator(BIDS_root=bids_root,
                                             extras_directory=extras_directory,
                                             subject=subject)
    dfc.make_derivatives_folders()

    # logging
    logfile = opj(dfc.freport, "SourceLocPipeline.log")
    logging.basicConfig(filename=logfile,
                        filemode="w",
                        format="\n%(levelname)s --> %(message)s")
    rootlog = logging.getLogger()
    rootlog.setLevel(logging.INFO)
    rootlog.info("Now running SourceLoc pipeline...")

    # log parameters
    rootlog.info(f"*" * 20)
    rootlog.info("Parameters")
    rootlog.info(f"*" * 20)
    rootlog.info(f"Subject name = {ject}")
    rootlog.info(f"Input folder is set to: {input_folder}.")
    rootlog.info(f"BIDS root is set to: {bids_root}.")
    rootlog.info(f"Extras directory is set to: {extras_directory}.")
    rootlog.info(f"Using {openmp} processor cores/ jobs.")
    rootlog.info("Folder structure has been created.")

    # check if freesurfer subjects_dir exists
    FS_SUBJECTS_DIR = str(os.environ.get("SUBJECTS_DIR"))
    if FS_SUBJECTS_DIR == None:
        print(f"It seems freesurfer is not properly set up on your computer")
        rootlog.warning(
            "No working freesurfer environment found - SUBJECTS_DIR is not set"
        )

# check if source spacing was set + is valid
    if not args.srcspacing:
        spacing = str(os.environ.get("SRCSPACING"))
        if not spacing:
            spacing = "oct6"
    else:
        spacing = args.srcspacing
    if spacing not in ["ico4", "oct5", "oct6", "ico5"]:
        spacing = "oct6"
        print('The desired spacing isn\'t allowed, typo?\n \
                        Options are: "ico4", "oct5", "oct6", "ico5"\n \
                        --> spacing was automatically set to "oct6".')
        rootlog.warning(
            "Spacing was set to \"oct6\", as input given was invalid.")
    rootlog.info(f"Final source spacing is {spacing}.")

    # MRI to nii.gz, then freesurfer, then hippocampal subfields
    # Naturally, this only works with a freesurfer environment
    # and this will take some time...
    anafolder = opj(input_folder, ject)
    if os.path.isdir(anafolder):
        rootlog.setLevel(logging.ERROR)
        rap = Anatomist.RawAnatomyProcessor(anafolder,
                                            FS_SUBJECTS_DIR,
                                            n_jobs=n_jobs)
        try:
            rap.run_anatomy_pipeline()
        except Exception as e:
            rootlog.error(
                f"Something went wrong while processing anatomy: {e}")
        rootlog.setLevel(logging.INFO)

# Check if only freesurfer segmentation was desired and comply, if true
    if args.fsonly and args.fsonly.lower() == "true":
        rootlog.info(
            "Only freesurfer segmentation was desired - finished without errors."
        )
        exit()

# copy freesurfer files to local subjects_dir
    try:
        segmentation = opj(FS_SUBJECTS_DIR, subject)
        target = opj(dfc.fanat, subject)
        if not os.path.isdir(target):
            os.mkdir(target)
        rootlog.info(
            f"Copying freesurfer segmentation {segmentation} to {target}")
        dfc._recursive_overwrite(segmentation, target)
    except Exception as e:
        rootlog.error(f"Couldn't copy freesurfer segmentation\n--> {e}.")

# create source models
    sourcerer = Anatomist.SourceModeler(subjects_dir=dfc.fanat,
                                        subject=subject,
                                        spacing=spacing,
                                        n_jobs=n_jobs)
    sourcerer.calculate_source_models()

    # process raw fifs
    raws = glob.glob(input_folder + "/*.fif")
    raws = [f for f in raws if ject in f]
    epo_filename = opj(dfc.spikes, str(subject) + "-epo.fif")
    concatname = opj(os.path.dirname(raws[0]), str(subject) + "_concat.fif")

    def raw_processing_already_done():
        r = os.path.isfile(concatname)
        c = os.path.isfile(epo_filename)
        return r and c

    if not raw_processing_already_done():
        # parse list of appropriate raws
        rootlog.info(
            f"The following raw files were found for preprocessing:\n{raws}")
        prepper = u.RawPreprocessor()
        for run, rawfile in enumerate(raws):
            if "tsss" in rawfile and ject in rawfile and not "-epo" in rawfile:
                # --> search for matching eventfile and combine
                rawname = rawfile.strip(".fif") + "_prep.fif"
                if not "_prep" in rawfile:
                    # epochs
                    epochs = prepper.raw_to_epoch(rawfile)
                    if epochs is not None:
                        epochs = epochs.load_data().filter(
                            l_freq=l_freq,
                            fir_design=fir_design,
                            h_freq=h_freq,
                            n_jobs=n_jobs)
                        epo_filename = rawfile.strip(".fif") + "-epo.fif"
                        epochs.save(epo_filename, overwrite=True)
                    # preprocessing
                    raw = mne.io.read_raw(rawfile,
                                          preload=False,
                                          on_split_missing="ignore")
                    raw = prepper.filter_raw(raw,
                                             l_freq=l_freq,
                                             fir_design=fir_design,
                                             h_freq=h_freq,
                                             n_jobs=n_jobs)
                    raw = prepper.resample_raw(raw,
                                               s_freq=s_freq,
                                               n_jobs=n_jobs)
                    #
                    # Artifacts
                    # ECG artifacts
                    # It's smarter to supervise this step (--> look at the topomaps!)
                    raw.load_data()
                    try:
                        ecg_projs, _ = mne.preprocessing.compute_proj_ecg(
                            raw,
                            n_grad=n_grad,
                            n_mag=n_mag,
                            n_eeg=n_eeg,
                            reject=None)
                        raw.add_proj(ecg_projs, remove_existing=False)
                        fig = mne.viz.plot_projs_topomap(ecg_projs,
                                                         info=raw.info,
                                                         show=False)
                        savename = os.path.join(dfc.fprep,
                                                "ECG_projs_Topomap.png")
                        fig.savefig(savename)
                    except Exception as e:
                        rootlog.error(
                            f"ECG - Atrifact correction failed --> {e}")
                    #EOG artifacts
                    # It's a bad idea to do this in an automated step
                    try:
                        eog_evoked = mne.preprocessing.create_eog_epochs(
                            raw).average()
                        #eog_evoked.apply_baseline((None, None))
                        eog_projs, _ = mne.preprocessing.compute_proj_eog(
                            raw,
                            n_grad=n_grad,
                            n_mag=n_mag,
                            n_eeg=n_eeg,
                            n_jobs=n_jobs,
                            reject=None)
                        raw.add_proj(
                            eog_projs, remove_existing=False
                        )  # --> don't do this in the early stages - see documentation
                        figs = eog_evoked.plot_joint(show=False)
                        for idx, fig in enumerate(figs):
                            savename = os.path.join(
                                dfc.fprep, "EOG Topomap_" + str(idx) + ".png")
                            fig.savefig(savename)
                    except Exception as e:
                        rootlog.error(
                            f"EOG - Atrifact correction failed --> {e}")
                    # save raw, store projs
                    all_projs = raw.info["projs"]
                    raw.save(rawname, overwrite=True)
                    del (raw)

        # concatenate epochs
        epo_filename = opj(dfc.spikes, str(subject) + "-epo.fif")
        if not os.path.isfile(epo_filename):
            epoch_files = glob.glob(input_folder + "/*-epo.fif")
            epoch_files = [f for f in epoch_files if ject in f]
            all_epochs = dict()
            rootlog.info("Concatenating epochs now...")
            for f in epoch_files:
                all_epochs[f] = mne.read_epochs(f)
            concat_epochs = mne.concatenate_epochs(
                [all_epochs[f] for f in epoch_files])
            concat_epochs.add_proj(all_projs, remove_existing=True)
            concat_epochs.apply_proj()
            rootlog.info(f"Saving concatenated epoch file as {epo_filename}")
            concat_epochs.save(epo_filename)

        # concatenate filtered and resampled files
        raws = glob.glob(input_folder + "/*.fif")
        raws = [f for f in raws if ject in f]
        raws = [f for f in raws if "_prep" in f]
        all_raws = dict()
        concatname = opj(os.path.dirname(raws[0]),
                         str(subject) + "_concat.fif")
        if not os.path.isfile(concatname):
            for r in raws:
                all_raws[r] = mne.io.read_raw(r, preload=False)
                all_raws[r].del_proj()
            rootlog.info(
                f"Concatenating the following (filtered and resampled) raw files: {raws}"
            )
            try:
                raw = mne.concatenate_raws(
                    [all_raws[r] for r in all_raws.keys()])
                rootlog.info(
                    "Rawfiles have successfully been concatenated....")
            except Exception as e:
                rootlog.error(
                    f"Failed trying to concatenate raw file\n {r} --> {e}")
                #print("Loading only first raw file!")
                #raw = mne.io.read_raw(raws[0])
            rootlog.info("Applying SSP projectors on concatenated file...")
            raw.add_proj(all_projs, remove_existing=True)
            raw.apply_proj()
            rootlog.info(f"Saving concatenated rawfile as {concatname}")
            raw.save(concatname)

        # Save in BIDS format
        derivatives_root = opj(bids_root, "derivatives")
        # meg
        bids_path = BIDSPath(subject=ject,
                             session="resting",
                             task="resting",
                             root=derivatives_root,
                             processing="concat")
        raw = mne.io.read_raw(concatname, preload=False)
        write_raw_bids(raw, bids_path, overwrite=True)

    # anatomy
    rootlog.info("Running dicom2nifti on MRI...")
    derivatives_root = opj(bids_root, "derivatives")
    # meg
    bids_path = BIDSPath(subject=ject,
                         session="resting",
                         task="resting",
                         root=bids_root,
                         processing="concat")
    nii = glob.glob(opj(input_folder, ject, "*.nii*"))

    try:
        for n in nii:
            write_anat(n, bids_path=bids_path, overwrite=True)
    except Exception as e:
        rootlog.error(f"Conversion of MRI to nift failed --> {e}")

    # Create Dataset
    the_roots = [bids_root, derivatives_root]
    rootlog.info("Creating BIDS dataset...")
    for r in the_roots:
        make_dataset_description(r,
                                 name="CDK Epilepsy Dataset",
                                 data_license="closed",
                                 authors="Rudi Kreidenhuber",
                                 overwrite=True)

# Coregistration --> this doesn't work with WSLg - from here on
# run on windows, if you are on a windows machine
    transfile = opj(dfc.ftrans, subject + "-trans.fif")
    rootlog.info("Starting coregistration...")
    if os.path.isfile(transfile):
        rootlog.info(
            f"Skipping coregistration, because a transfile ({transfile}) already exists"
        )
    else:
        print(f"\n\n\n--> Transfile should be called: {transfile}\n\n\n")
        try:
            mne.gui.coregistration(
                subject=subject,
                subjects_dir=dfc.fanat,
                inst=bids_path,
                advanced_rendering=False)  # BIDS: inst=raw.filenames[0])
        except:
            print("failed with bids_derivatives folder")
            rawfile = opj(dfc.fbase, "ses-resting", "meg", "*concat_meg.fif")
            rawfile = glob.glob(rawfile)[0]
            rootlog.info(
                f"Coregistration with BIDS-file failed, Rawfile used was: {rawfile}"
            )
            mne.gui.coregistration(subject=subject,
                                   subjects_dir=dfc.fanat,
                                   inst=rawfile,
                                   advanced_rendering=False)

# frequency spectrum
    if do_frequencies:
        rootlog.info("Calculating frequency spectrum...")
        bem_sol = opj(dfc.fsrc, subject + "-3-layer-BEM-sol.fif")
        if not os.path.isfile(bem_sol) and use_single_shell_model:
            rootlog.warning("Working with a single shell head model")
            bem_sol = opj(dfc.fsrc, subject + "-single-shell-BEM-sol.fif")
        fwd_name = opj(dfc.fsrc, subject + "-fwd.fif")
        srcfilename = opj(dfc.fsrc, subject + "-" + spacing + "-src.fif")
        filebase = str(subject) + "_Freqs"
        all_stcs_filename = (filebase + '-stc-psd-MNE.pkl')
        all_stcs_filename = opj(dfc.freq, all_stcs_filename)
        sensor_psd_filename = (filebase + '-sensor-psd-MNE.pkl')
        sensor_psd_filename = opj(dfc.freq, sensor_psd_filename)
        if not os.path.isfile(all_stcs_filename) or not os.path.isfile(
                sensor_psd_filename
        ):  # so this should run only on the first file..
            # load again in case preprocessing didn't happen before
            concatname = opj(input_folder, str(subject) + "_concat.fif")
            raw = mne.io.read_raw(concatname, preload=True)
            if os.path.isfile(fwd_name):
                fwd = mne.read_forward_solution(fwd_name)
            else:
                fwd = mne.make_forward_solution(raw.info,
                                                src=srcfilename,
                                                bem=bem_sol,
                                                trans=transfile,
                                                meg=True,
                                                eeg=False,
                                                mindist=0.2,
                                                ignore_ref=False,
                                                n_jobs=n_jobs,
                                                verbose=True)
                mne.write_forward_solution(fwd_name, fwd)
            noise_cov = mne.compute_raw_covariance(raw,
                                                   method="empirical",
                                                   n_jobs=n_jobs)
            inv = mne.minimum_norm.make_inverse_operator(raw.info,
                                                         forward=fwd,
                                                         noise_cov=noise_cov,
                                                         loose="auto",
                                                         depth=0.8)
            snr = 3.
            stc_psd, sensor_psd = mne.minimum_norm.compute_source_psd(
                raw,
                inv,
                lambda2=lambda2,
                method='MNE',
                fmin=1,
                fmax=45,
                n_fft=2048,
                n_jobs=n_jobs,
                return_sensor=True,
                verbose=True)
            pickle.dump(stc_psd, open(all_stcs_filename, "wb"))
            pickle.dump(sensor_psd, open(sensor_psd_filename, "wb"))
        else:
            stc_psd = pickle.load(open(all_stcs_filename, "rb"))
            sensor_psd = pickle.load(open(sensor_psd_filename, "rb"))
        # Visualization
        topos = dict()
        stcs = dict()
        topo_norm = sensor_psd.data.sum(axis=1, keepdims=True)
        stc_norm = stc_psd.sum()
        for band, limits in freq_bands.items():  # normalize...
            data = sensor_psd.copy().crop(*limits).data.sum(axis=1,
                                                            keepdims=True)
            topos[band] = mne.EvokedArray(100 * data / topo_norm,
                                          sensor_psd.info)
            stcs[band] = 100 * stc_psd.copy().crop(
                *limits).sum() / stc_norm.data
        brain = dict()
        x_hemi_freq = dict()
        mne.viz.set_3d_backend('pyvista')
        for band in freq_bands.keys():
            brain[band] = u.plot_freq_band_dors(stcs[band],
                                                band=band,
                                                subject=subject,
                                                subjects_dir=dfc.fanat,
                                                filebase=filebase)
            freqfilename3d = (filebase + '_' + band +
                              '_freq_topomap_3d_dors.png')
            freqfilename3d = os.path.join(dfc.freq, freqfilename3d)
            image = brain[band].save_image(freqfilename3d)
            brain_lh, brain_rh = u.plot_freq_band_lat(stcs[band],
                                                      band=band,
                                                      subject=subject,
                                                      subjects_dir=dfc.fanat,
                                                      filebase=filebase)
            freqfilename3d = (filebase + '_' + band +
                              '_freq_topomap_3d_lat_lh.png')
            freqfilename3d = os.path.join(dfc.freq, freqfilename3d)
            image = brain_lh.save_image(freqfilename3d)
            freqfilename3d = (filebase + '_' + band +
                              '_freq_topomap_3d_lat_rh.png')
            freqfilename3d = os.path.join(dfc.freq, freqfilename3d)
            image = brain_rh.save_image(freqfilename3d)
            brain_lh, brain_rh = u.plot_freq_band_med(stcs[band],
                                                      band=band,
                                                      subject=subject,
                                                      subjects_dir=dfc.fanat,
                                                      filebase=filebase)
            freqfilename3d = (filebase + '_' + band +
                              '_freq_topomap_3d_med_lh.png')
            freqfilename3d = os.path.join(dfc.freq, freqfilename3d)
            image = brain_lh.save_image(freqfilename3d)
            freqfilename3d = (filebase + '_' + band +
                              '_freq_topomap_3d_med_rh.png')
            freqfilename3d = os.path.join(dfc.freq, freqfilename3d)
            image = brain_rh.save_image(freqfilename3d)
            # Cross hemisphere comparison
            # make sure fsaverage_sym exists in local subjects dir:
            rootlog.info(
                f"Calculating cross hemisphere comparison for {band}.")
            target = os.path.join(dfc.fanat, "fsaverage_sym")
            if not os.path.isdir(target):
                # try to find it in $SUBJECTS_DIR and copy
                os_subj_dir = os.environ.get("SUBJECTS_DIR")
                fs_avg_sym_dir = os.path.join(os_subj_dir, "fsaverage_sym")
                u.recursive_overwrite(fs_avg_sym_dir, target)
            if not os.path.isdir(target):
                rootlog.error("fsaverage_sym not found - aborting")
                raise Exception
            mstc = stcs[band].copy()
            mstc = mne.compute_source_morph(mstc,
                                            subject,
                                            'fsaverage_sym',
                                            smooth=5,
                                            warn=False,
                                            subjects_dir=dfc.fanat).apply(mstc)
            morph = mne.compute_source_morph(mstc,
                                             'fsaverage_sym',
                                             'fsaverage_sym',
                                             spacing=mstc.vertices,
                                             warn=False,
                                             subjects_dir=dfc.fanat,
                                             xhemi=True,
                                             verbose='error')
            stc_xhemi = morph.apply(mstc)
            diff = mstc - stc_xhemi
            title = ('blue = RH; ' + subject + ' -Freq-x_hemi- ' + band)
            x_hemi_freq[band] = diff.plot(
                hemi='lh',
                subjects_dir=dfc.fanat,
                size=(1200, 800),
                time_label=title,
                add_data_kwargs=dict(time_label_size=10))
            freqfilename3d = (filebase + '_x_hemi_' + band + '.png')
            freqfilename3d = os.path.join(dfc.freq, freqfilename3d)
            image = x_hemi_freq[band].save_image(freqfilename3d)


# Source localization
    rootlog.info("Now starting source localization...")
    epo_filename = opj(dfc.spikes, str(subject) + "-epo.fif")
    concat_epochs = mne.read_epochs(epo_filename)
    noise_cov_file = opj(dfc.spikes, "Spikes_noise_covariance.pkl")
    srcfilename = opj(dfc.fsrc, subject + "-" + spacing + "-src.fif")
    if not os.path.isfile(noise_cov_file):
        noise_cov = mne.compute_covariance(concat_epochs,
                                           tmax=-1.,
                                           method='auto',
                                           n_jobs=n_jobs,
                                           rank="full")
        pickle.dump(noise_cov, open(noise_cov_file, "wb"))
    else:
        with open(noise_cov_file, 'rb') as f:
            noise_cov = pickle.load(f)

    rootlog.info(
        f"The following events have been found: \n{concat_epochs.event_id.keys()}"
    )
    for event in concat_epochs.event_id.keys():
        eventname = str(event)
        if eventname == "ignore_me" or eventname == "AAA" or eventname.startswith(
                "."):
            rootlog.info(f"Omitting event {event}")
        else:
            try:
                rootlog.info(f"Now localizing event: {event}")
                e = concat_epochs[eventname].load_data().crop(
                    tmin=-0.5, tmax=0.5).average()
                e_folder = os.path.join(dfc.spikes, eventname)
                evoked_filename = opj(e_folder,
                                      ject + "_" + eventname + "-ave.fif")
                cp_folder = os.path.join(dfc.spikes, eventname, "custom_pics")
                cts_folder = os.path.join(dfc.spikes, eventname,
                                          "custom_time_series")
                gp_folder = os.path.join(dfc.spikes, eventname, "generic_pics")
                folders = [e_folder, cp_folder, cts_folder, gp_folder]
                if not os.path.isdir(e_folder):
                    for f in folders:
                        os.mkdir(f)
                e.save(evoked_filename)
                src = mne.read_source_spaces(srcfilename)
                bem_sol = opj(dfc.fsrc, subject + "-3-layer-BEM-sol.fif")
                if not os.path.isfile(bem_sol) and use_single_shell_model:
                    bem_sol = opj(dfc.fsrc,
                                  subject + "-single-shell-BEM-sol.fif")

                fwd_name = opj(dfc.fsrc, subject + "-fwd.fif")
                if os.path.isfile(fwd_name):
                    fwd = mne.read_forward_solution(fwd_name)
                else:
                    fwd = mne.make_forward_solution(e.info,
                                                    src=src,
                                                    bem=bem_sol,
                                                    trans=transfile,
                                                    meg=True,
                                                    eeg=False,
                                                    mindist=0.2,
                                                    ignore_ref=False,
                                                    n_jobs=n_jobs,
                                                    verbose=True)
                # inv for cortical surface
                inv = mne.minimum_norm.make_inverse_operator(
                    e.info,
                    forward=fwd,
                    noise_cov=noise_cov,
                    loose=0.2,
                    depth=0.8)
                # inv with volume source space
                vol_srcfilename = opj(dfc.fsrc, subject + "-vol-src.fif")
                src_vol = mne.read_source_spaces(vol_srcfilename)
                fwd_vol = mne.make_forward_solution(e.info,
                                                    src=src_vol,
                                                    bem=bem_sol,
                                                    trans=transfile,
                                                    meg=True,
                                                    eeg=False,
                                                    mindist=0.2,
                                                    ignore_ref=False,
                                                    n_jobs=n_jobs,
                                                    verbose=True)
                inv_vol = mne.minimum_norm.make_inverse_operator(
                    e.info,
                    forward=fwd_vol,
                    noise_cov=noise_cov,
                    loose=1,
                    depth=0.8)

                # Distributed source models
                for m in source_loc_methods:
                    stc_name = 'stc_' + m
                    if m == 'dSPM':
                        # calculate vector solution in volume source space
                        try:
                            rootlog.info(
                                "Now calculating dSPM vector solution...")
                            stc_name = mne.minimum_norm.apply_inverse(
                                e,
                                inv_vol,
                                lambda2,
                                method='dSPM',
                                pick_ori='vector')
                            surfer_kwargs = dict(
                                subjects_dir=dfc.fanat,  # hemi='split', 
                                clim=dict(kind='percent', lims=[90, 96,
                                                                99.85]),
                                views=['lat', 'med'],
                                colorbar=True,
                                initial_time=0,
                                time_unit='ms',
                                size=(1000, 800),
                                smoothing_steps=10)
                            brain = stc_name.plot(**surfer_kwargs)
                            label = str(ject + " - " + eventname +
                                        " - Vector solution")
                            brain.add_text(0.1,
                                           0.9,
                                           label,
                                           'title',
                                           font_size=10)
                            img_f_name = ('img_stc_' + ject + '_' + eventname +
                                          '_' + m + '.png')
                            img_f_name = os.path.join(gp_folder, img_f_name)
                            brain.save_image(img_f_name)
                            stc_f_name = ("stc_" + ject + '_' + eventname +
                                          '_' + m + ".h5")
                            stc_f_name = os.path.join(e_folder, stc_f_name)
                            stc_name = stc_name.crop(tmin=stc_tmin,
                                                     tmax=stc_tmax)
                            rootlog.info("Saving dSPM vector solution.")
                            stc_name.save(stc_f_name)
                        except Exception as ex:
                            rootlog.error(f"dSPM failed --> {ex}")
                    else:
                        stc_name = mne.minimum_norm.apply_inverse(
                            e, inv, lambda2, method=m, pick_ori=None)
                        surfer_kwargs = dict(hemi='split',
                                             subjects_dir=dfc.fanat,
                                             clim=dict(kind='percent',
                                                       lims=[90, 96, 99.85]),
                                             views=['lat', 'med'],
                                             colorbar=True,
                                             initial_time=0,
                                             time_unit='ms',
                                             size=(1000, 800),
                                             smoothing_steps=10)
                        brain = stc_name.plot(**surfer_kwargs)
                        label = str(ject + " - " + eventname + " - " + m)
                        brain.add_text(0.1, 0.9, label, 'title', font_size=10)
                        img_f_name = ('img_stc_' + ject + '_' + eventname +
                                      '_' + m + '.png')
                        img_f_name = os.path.join(gp_folder, img_f_name)
                        brain.save_image(img_f_name)
                        stc_f_name = ('stc_' + ject + '_' + eventname + '_' +
                                      m)
                        stc_f_name = os.path.join(e_folder, stc_f_name)
                        stc_name = stc_name.crop(tmin=stc_tmin, tmax=stc_tmax)
                        rootlog.info("Saving eLORETA.")
                        stc_name.save(stc_f_name)
                        if m == "eLORETA":
                            try:
                                rootlog.info(
                                    "Now calculating eLORETA with peaks...")
                                rh_peaks = u.get_peak_points(
                                    stc_name,
                                    hemi='rh',
                                    tmin=peaks_tmin,
                                    tmax=peaks_tmax,
                                    nr_points=peaks_nr_of_points,
                                    mode=peaks_mode)
                                lh_peaks = u.get_peak_points(
                                    stc_name,
                                    hemi='lh',
                                    tmin=peaks_tmin,
                                    tmax=peaks_tmax,
                                    nr_points=peaks_nr_of_points,
                                    mode=peaks_mode)
                                label = str(ject + " - " + eventname + " - " +
                                            m + " - max. activation points")
                                brain.add_text(0.1, 0.9, label,
                                               font_size=10)  #, 'title'
                                for p in rh_peaks:
                                    brain.add_foci(p,
                                                   color='green',
                                                   coords_as_verts=True,
                                                   hemi='rh',
                                                   scale_factor=0.6,
                                                   alpha=0.9)
                                for p in lh_peaks:
                                    brain.add_foci(p,
                                                   color='green',
                                                   coords_as_verts=True,
                                                   hemi='lh',
                                                   scale_factor=0.6,
                                                   alpha=0.9)
                                stc_f_name = ('stc_' + ject + '_' + eventname +
                                              '_' + m + "_with_peaks-ave")
                                stc_f_name = os.path.join(e_folder, stc_f_name)
                                stc_name.save(stc_f_name)
                                img_f_name = ('img_stc_' + ject + '_' +
                                              eventname + '_' + m +
                                              '_with_peaks.png')
                                img_f_name = os.path.join(
                                    gp_folder, img_f_name)
                                brain.save_image(img_f_name)
                            except Exception as ex:
                                rootlog.error(
                                    f"eLORETA with peaks failed --> {ex}")
                # Dipoles
                rootlog.info("Now calculating ECD.")
                try:
                    for start, stop in dip_times.values():
                        dip_epoch = e.copy().crop(start, stop).pick('meg')
                        ecd = mne.fit_dipole(dip_epoch,
                                             noise_cov,
                                             bem_sol,
                                             trans=transfile)[0]
                        best_idx = np.argmax(ecd.gof)
                        best_time = ecd.times[best_idx]
                        trans = mne.read_trans(transfile)
                        mri_pos = mne.head_to_mri(ecd.pos,
                                                  mri_head_t=trans,
                                                  subject=subject,
                                                  subjects_dir=dfc.fanat)
                        t1_file_name = os.path.join(dfc.fanat, subject, 'mri',
                                                    'T1.mgz')
                        stoptime = str(abs(int(stop * int(e.info["sfreq"]))))
                        if stoptime == "5":
                            stoptime = "05"
                        title = str(eventname + ' - ECD @ minus ' + stoptime +
                                    ' ms')
                        t1_fig = plot_anat(t1_file_name,
                                           cut_coords=mri_pos[0],
                                           title=title)
                        t1_f_name_pic = ('img_ecd_' + eventname + '_' +
                                         '_Dipol_' + stoptime + '.png')
                        t1_f_name_pic = os.path.join(e_folder, "generic_pics",
                                                     t1_f_name_pic)
                        t1_fig.savefig(t1_f_name_pic)
                        fig_3d = ecd.plot_locations(trans,
                                                    subject,
                                                    dfc.fanat,
                                                    mode="orthoview")
                        fig_3d_pic = ('img_3d_ecd_' + eventname + '_' +
                                      '_Dipol_' + stoptime + '.png')
                        fig_3d_pic = os.path.join(e_folder, "generic_pics",
                                                  fig_3d_pic)
                        fig_3d.savefig(fig_3d_pic)
                        plt.close("all")
                except Exception as ex:
                    rootlog.error(f"ECD calculation failed --> {ex}")
            except Exception as ex:
                rootlog.error(f"Source localization failed because of:\n {ex}")

    # Create report
    if do_report:
        reporter = Reporter.EpilepsyReportBuilder(
            derivatives_root=derivatives_root,
            subject=subject,
            extras_dir=extras_directory)
        reporter.create_report()

    # Last words
    logging.info("Finished SourceLocPipeline.")
    print("SourceLocPipeline completed!")
Ejemplo n.º 15
0
                         suffix='T1w')

# use ``trans`` to transform landmarks from the ``raw`` file to
# the voxel space of the image
landmarks = get_anat_landmarks(
    t1_fname,  # path to the MRI scan
    info=raw.info,  # the MEG data file info from the same subject as the MRI
    trans=trans,  # our transformation matrix
    fs_subject='sample',  # FreeSurfer subject
    fs_subjects_dir=fs_subjects_dir,  # FreeSurfer subjects directory
)

# We use the write_anat function
t1w_bids_path = write_anat(
    image=t1_fname,  # path to the MRI scan
    bids_path=t1w_bids_path,
    landmarks=landmarks,  # the landmarks in MRI voxel space
    verbose=True  # this will print out the sidecar file
)
anat_dir = t1w_bids_path.directory

# %%
# Let's have another look at our BIDS directory
print_dir_tree(output_path)

# %%
# Our BIDS dataset is now ready to be shared. We can easily estimate the
# transformation matrix using ``MNE-BIDS`` and the BIDS dataset.
# This function converts the anatomical landmarks stored in the T1 sidecar
# file into FreeSurfer surface RAS space, and aligns the landmarks in the
# electrophysiology data with them. This way your electrophysiology channel
# locations can be transformed to surface RAS space using the ``trans`` which
Ejemplo n.º 16
0
# overwrite the json, updating it
with open(dataset_description_json, 'w') as fout:
    json.dump(jsondict, fout, indent=2, sort_keys=True)

# Write anatomical data to BIDS
# we take the original T1 from the FreeSurfer directory
t1w = op.join(somato_path, 'subjects', 'somato', 'mri', 'T1.mgz')

# we also take the trans file, and use it to write the coordinates of
# anatomical landmarks to a T1w.json file
trans = op.join(somato_path, 'MEG', 'somato', 'sef_raw_sss-trans.fif')

# Copy over the MRI, convert it to NIfTI format, and write the anatomical
# landmarks in voxel coordinates
anat_dir = write_anat(somato_path_bids, subject='01', t1w=t1w, trans=trans,
                      raw=raw, overwrite=True, verbose=True)
t1w_nii = op.join(anat_dir, 'sub-01_T1w.nii.gz')

# Add derivatives
# not all of this is defined in BIDS as of yet
# General derivatives and FreeSurfer directory names
derivatives_dir = op.join(somato_path_bids, 'derivatives')
subjects_dir_bids = op.join(derivatives_dir, 'freesurfer', 'subjects')

# Export the FreeSurfer SUBJECTS_DIR as environment variable, available to
# this process and all subprocesses
os.environ['SUBJECTS_DIR'] = subjects_dir_bids

# Run recon-all from FreeSurfer
run_subprocess(['recon-all', '-i', t1w_nii, '-s', '01', '-all'])