示例#1
0
def _nyu_rest_factory(session=1):
    from pypreprocess.nipype_preproc_spm_utils import SubjectData

    nyu_data = fetch_nyu_rest(sessions=[session], n_subjects=7)

    session_func = [x for x in nyu_data.func if "session%i" % session in x]
    session_anat = [
        x for x in nyu_data.anat_skull if "session%i" % session in x]

    for subject_id in set([os.path.basename(os.path.dirname
                                            (os.path.dirname(x)))
                           for x in session_func]):
        # instantiate subject_data object
        subject_data = SubjectData()
        subject_data.subject_id = subject_id
        subject_data.session_id = session

        # set func
        subject_data.func = [x for x in session_func if subject_id in x]
        assert len(subject_data.func) == 1
        subject_data.func = subject_data.func[0]

        # set anat
        subject_data.anat = [x for x in session_anat if subject_id in x]
        assert len(subject_data.anat) == 1
        subject_data.anat = subject_data.anat[0]

        # set subject output directory
        subject_data.output_dir = "/tmp/%s" % subject_id

        subject_data.sanitize(deleteorient=True, niigz2nii=False)

        yield (subject_data.subject_id, subject_data.func[0],
               subject_data.anat)
def subject_factory():
    """
    Producer for HAXBY subject data.

    """

    for subject_id in set(
        [os.path.basename(os.path.dirname(x)) for x in haxby_data.func]):

        # instantiate subject_data object
        subject_data = SubjectData()
        subject_data.subject_id = subject_id
        subject_data.session_id = "haxby2001"

        # set func
        subject_data.func = [x for x in haxby_data.func if subject_id in x]

        assert len(subject_data.func) == 1
        subject_data.func = subject_data.func[0]

        # set anat
        subject_data.anat = [x for x in haxby_data.func if subject_id in x]
        assert len(subject_data.anat) == 1
        subject_data.anat = subject_data.anat[0]

        # set subject output directory
        subject_data.output_dir = os.path.join(OUTPUT_DIR,
                                               subject_data.subject_id)

        yield subject_data
def _nyu_rest_factory(session=1):
    from pypreprocess.nipype_preproc_spm_utils import SubjectData

    nyu_data = fetch_nyu_rest(sessions=[session], n_subjects=7)

    session_func = [x for x in nyu_data.func if "session%i" % session in x]
    session_anat = [
        x for x in nyu_data.anat_skull if "session%i" % session in x]

    for subject_id in set([os.path.basename(os.path.dirname
                                            (os.path.dirname(x)))
                           for x in session_func]):
        # instantiate subject_data object
        subject_data = SubjectData()
        subject_data.subject_id = subject_id
        subject_data.session_id = session

        # set func
        subject_data.func = [x for x in session_func if subject_id in x]
        assert len(subject_data.func) == 1
        subject_data.func = subject_data.func[0]

        # set anat
        subject_data.anat = [x for x in session_anat if subject_id in x]
        assert len(subject_data.anat) == 1
        subject_data.anat = subject_data.anat[0]

        # set subject output directory
        subject_data.output_dir = "/tmp/%s" % subject_id

        subject_data.sanitize(deleteorient=True, niigz2nii=False)

        yield (subject_data.subject_id, subject_data.func[0],
               subject_data.anat)
def subject_factory():
    """
    Producer for HAXBY subject data.

    """

    for subject_id in set([os.path.basename(
                os.path.dirname(x))
                           for x in haxby_data.func]):

        # instantiate subject_data object
        subject_data = SubjectData()
        subject_data.subject_id = subject_id
        subject_data.session_id = "haxby2001"

        # set func
        subject_data.func = [x for x in haxby_data.func if subject_id in x]

        assert len(subject_data.func) == 1
        subject_data.func = subject_data.func[0]

        # set anat
        subject_data.anat = [x for x in haxby_data.func if subject_id in x]
        assert len(subject_data.anat) == 1
        subject_data.anat = subject_data.anat[0]

        # set subject output directory
        subject_data.output_dir = os.path.join(OUTPUT_DIR,
                                               subject_data.subject_id)

        yield subject_data
    def subject_factory():
        for subject_id in subject_ids:
            if subject_id in exclusions:
                continue

            # construct subject data structure
            subject_data = SubjectData()
            subject_data.session_id = session_ids
            subject_data.subject_id = subject_id
            subject_data.func = []

            # glob for bold data
            has_bad_sessions = False
            for session_id in subject_data.session_id:
                bold_dir = os.path.join(
                    data_dir,
                    "%s/BOLD/%s" % (subject_id, session_id))

                # # extract .nii.gz to .nii
                # unzip_nii_gz(bold_dir)

                # glob bold data for this session
                func = glob.glob(os.path.join(bold_dir, "bold.nii.gz"))

                # check that this session is OK (has bold data, etc.)
                if not func:
                    has_bad_sessions = True
                    break

                subject_data.func.append(func[0])

            # exclude subject if necessary
            if has_bad_sessions:
                continue

            # glob for anatomical data
            # anat_dir = os.path.join(
            #     data_dir,
            #     "%s/anatomy" % subject_id)

            # # extract .nii.gz to .ni
            # unzip_nii_gz(anat_dir)

            # glob anatomical data proper
            subject_data.anat = glob.glob(
                os.path.join(
                    data_dir,
                    "%s/anatomy/highres001_brain.nii.gz" % subject_id))[0]

            # set subject output dir (all calculations for
            # this subject go here)
            subject_data.output_dir = os.path.join(
                    output_dir,
                    subject_id)

            yield subject_data
示例#6
0
def to_T2space(t2_img, t1_img, output_dir):
    """
    Wrapper for pypreprocess's coregister function,
    used here to coregister T1 image to T2 image 
    """
    data = SubjectData()
    data.anat = t1_img
    data.func = [t2_img]
    data.output_dir = output_dir
    coreged = _do_subject_coregister(data,
                                     caching=False,
                                     hardlink_output=False,
                                     coreg_anat_to_func=True)
    return coreged
def _abide_factory(institute="KKI"):
    for scans in sorted(glob.glob(
            "/home/elvis/CODE/datasets/ABIDE/%s_*/%s_*/scans" % (
                institute, institute))):
        subject_data = SubjectData()
        subject_data.subject_id = os.path.basename(os.path.dirname(
                os.path.dirname(scans)))
        subject_data.func = os.path.join(scans,
                                         "rest/resources/NIfTI/files/rest.nii")
        subject_data.anat = os.path.join(
            scans, "anat/resources/NIfTI/files/mprage.nii")
        subject_data.output_dir = os.path.join(ABIDE_OUTPUT_DIR,
                                               subject_data.subject_id)

        yield subject_data
def _abide_factory(institute="KKI"):
    for scans in sorted(
            glob.glob("/home/elvis/CODE/datasets/ABIDE/%s_*/%s_*/scans" %
                      (institute, institute))):
        subject_data = SubjectData()
        subject_data.subject_id = os.path.basename(
            os.path.dirname(os.path.dirname(scans)))
        subject_data.func = os.path.join(
            scans, "rest/resources/NIfTI/files/rest.nii")
        subject_data.anat = os.path.join(
            scans, "anat/resources/NIfTI/files/mprage.nii")
        subject_data.output_dir = os.path.join(ABIDE_OUTPUT_DIR,
                                               subject_data.subject_id)

        yield subject_data
示例#9
0
    def subject_factory():
        for subject_id in subject_ids:
            subject_data = SubjectData()
            subject_data.subject_id = subject_id

            try:
                subject_data.func = glob.glob(
                    os.path.join(
                        abide_data_dir,
                        "%s/%s/scans/rest*/resources/NIfTI/files/rest.nii" % (
                            subject_id, subject_id)))[0]
            except IndexError:
                ignored_because = "no rest data found"
                print("Ignoring subject %s (%s)" % (subject_id,)
                                                    ignored_because)
                ignored_subject_ids.append((subject_id, ignored_because))
                continue

            try:
                subject_data.anat = glob.glob(
                    os.path.join(
                        abide_data_dir,
                        "%s/%s/scans/anat/resources/NIfTI/files/mprage.nii" % (
                            subject_id, subject_id)))[0]
            except IndexError:
                if do_dartel:
                    # can't do DARTEL in under such conditions
                    continue

                try:
                    subject_data.hires = glob.glob(
                        os.path.join(
                            abide_data_dir,
                            ("%s/%s/scans/hires/resources/NIfTI/"
                             "files/hires.nii") % (subject_id, subject_id)))[0]
                except IndexError:
                    ignored_because = "no anat/hires data found"
                    print("Ignoring subject %s (%s)" % (subject_id,)
                                                        ignored_because)
                    ignored_subject_ids.append((subject_id, ignored_because))
                    continue

            subject_data.output_dir = os.path.join(
                os.path.join(
                    institute_output_dir, subject_id))

            # yield data for this subject
            yield subject_data
def subject_factory():
    anats = glob.glob(
        os.path.join(data_dir, 'sub*', 'ses-*', 'anat',
                     'sub-*_ses-*_acq-highres_T1w.nii'))
    subject_sessions = [(anat.split('/')[-4], anat.split('/')[-3])
                        for anat in anats]
    subject_sessions = [('sub-01', 'ses-12')]
    for subject_session in subject_sessions:
        subject, session = subject_session
        subject_data = SubjectData(isdicom=False,
                                   scratch=scratch,
                                   session_output_dirs=[],
                                   n_sessions=0)
        subject_data.subject_id = subject
        subject_data.anat = os.path.join(
            data_dir, subject, session, 'anat',
            '%s_%s_acq-highres_T1w.nii' % (subject, session))
        subject_data.func = []
        subject_data.output_dir = os.path.join(data_dir, subject, session,
                                               'anat', 'dartel')
        # yield data for this subject
        yield subject_data
                    # do_report=False,
                    # do_dartel=True
                    )

if 0x0:
    for (with_anat, do_segment, do_normalize,
         fwhm, hard_link_output) in itertools.product(
        [False, True], [False, True], [False, True], [0, 8, [8, 8, 8]],
        [False, True]):
        # load spm auditory data

        sd = fetch_spm_auditory_data(os.path.join(
                os.environ['HOME'], 'CODE/datasets/spm_auditory'))
        subject_data1 = SubjectData(func=[sd.func],
                                    anat=sd.anat if with_anat else None)
        subject_data1.output_dir = "/tmp/kimbo/sub001/"

        # load spm multimodal fmri data
        sd = fetch_spm_multimodal_fmri_data(os.path.join(
                os.environ['HOME'], 'CODE/datasets/spm_multimodal_fmri'))
        subject_data2 = SubjectData(func=[sd.func1, sd.func2],
                                    anat=sd.anat if with_anat else None,
                                   session_id=['Session 1', "Session 2"])
        subject_data2.output_dir = "/tmp/kiki/sub001/"

        do_subjects_preproc([subject_data1, subject_data2],
                            do_dartel=True,
                            do_segment=do_segment,
                            do_normalize=do_normalize,
                            fwhm=fwhm,
                            hard_link_output=hard_link_output
                                   paradigm=paradigm,
                                   hrf_model=hrf_model,
                                   drift_model=drift_model,
                                   period_cut=hfcut)

"""fetch input data"""
_subject_data = fetch_fsl_feeds()
subject_data = SubjectData()
subject_data.subject_id = "sub001"
subject_data.func = _subject_data.func
subject_data.anat = _subject_data.anat

output_dir = os.path.join(_subject_data.data_dir, "pypreprocess_output")
if not os.path.exists(output_dir):
    os.makedirs(output_dir)
subject_data.output_dir = os.path.join(
    output_dir, subject_data.subject_id)



"""preprocess the data"""
results = do_subjects_preproc(
    [subject_data],
    output_dir=output_dir,
    dataset_id="FSL FEEDS single-subject",
    dataset_description=DATASET_DESCRIPTION,
    do_shutdown_reloaders=False,
    )

"""collect preprocessed data"""
fmri_files = results[0]['func']
anat_file = results[0]['anat']
    subject_data = SubjectData()
    subject_data.subject_id = subject_id
    subject_data.session_id = "haxby2001"

    # set func
    subject_data.func = [x for x in haxby_data.func if subject_id in x]

    assert len(subject_data.func) == 1
    subject_data.func = subject_data.func[0]

    # set anat
    subject_data.anat = [x for x in haxby_data.anat if subject_id in x]
    assert len(subject_data.anat) == 1
    subject_data.anat = subject_data.anat[0]

    # set subject output directory
    subject_data.output_dir = os.path.join(OUTPUT_DIR, subject_data.subject_id)

    # add this subject to list
    subjects.append(subject_data)

# do preprocessing proper
results = do_subjects_preproc(subjects,
                              output_dir=OUTPUT_DIR,
                              dataset_id="HAXBY 2001",
                              realign=False,
                              coregister=False,
                              dartel=DARTEL,
                              tsdiffana=False,
                              dataset_description=DATASET_DESCRIPTION)
示例#14
0
                                   paradigm=paradigm,
                                   hrf_model=hrf_model,
                                   drift_model=drift_model,
                                   period_cut=hfcut)

"""fetch input data"""
_subject_data = fetch_fsl_feeds()
subject_data = SubjectData()
subject_data.subject_id = "sub001"
subject_data.func = _subject_data.func
subject_data.anat = _subject_data.anat

output_dir = os.path.join(_subject_data.data_dir, "pypreprocess_output")
if not os.path.exists(output_dir):
    os.makedirs(output_dir)
subject_data.output_dir = os.path.join(
    output_dir, subject_data.subject_id)



"""preprocess the data"""
results = do_subjects_preproc(
    [subject_data],
    output_dir=output_dir,
    dataset_id="FSL FEEDS single-subject",
    dataset_description=DATASET_DESCRIPTION,
    do_shutdown_reloaders=False,
    )

"""collect preprocessed data"""
fmri_files = results[0]['func']
anat_file = results[0]['anat']
                       for x in haxby_data.func]):
    # instantiate subject_data object
    subject_data = SubjectData()
    subject_data.subject_id = subject_id
    subject_data.session_id = "haxby2001"

    # set func
    subject_data.func = [x for x in haxby_data.func if subject_id in x]

    assert len(subject_data.func) == 1
    subject_data.func = subject_data.func[0]

    # set anat
    subject_data.anat = [x for x in haxby_data.anat if subject_id in x]
    assert len(subject_data.anat) == 1
    subject_data.anat = subject_data.anat[0]

    # set subject output directory
    subject_data.output_dir = os.path.join(OUTPUT_DIR,
                                           subject_data.subject_id)

    # add this subject to list
    subjects.append(subject_data)

# do preprocessing proper
results = do_subjects_preproc(subjects, output_dir=OUTPUT_DIR,
                              dataset_id="HAXBY 2001", realign=False,
                              coregister=False, dartel=DARTEL,
                              tsdiffana=False,
                              dataset_description=DATASET_DESCRIPTION)
    # do_dartel=True
)

if 0x0:
    for (with_anat, do_segment, do_normalize, fwhm,
         hard_link_output) in itertools.product([False, True], [False, True],
                                                [False, True],
                                                [0, 8, [8, 8, 8]],
                                                [False, True]):
        # load spm auditory data

        sd = fetch_spm_auditory_data(
            os.path.join(os.environ['HOME'], 'CODE/datasets/spm_auditory'))
        subject_data1 = SubjectData(func=[sd.func],
                                    anat=sd.anat if with_anat else None)
        subject_data1.output_dir = "/tmp/kimbo/sub001/"

        # load spm multimodal fmri data
        sd = fetch_spm_multimodal_fmri_data(
            os.path.join(os.environ['HOME'],
                         'CODE/datasets/spm_multimodal_fmri'))
        subject_data2 = SubjectData(func=[sd.func1, sd.func2],
                                    anat=sd.anat if with_anat else None,
                                    session_id=['Session 1', "Session 2"])
        subject_data2.output_dir = "/tmp/kiki/sub001/"

        do_subjects_preproc([subject_data1, subject_data2],
                            do_dartel=True,
                            do_segment=do_segment,
                            do_normalize=do_normalize,
                            fwhm=fwhm,