Exemplo n.º 1
0
def s3_1_start(params, volumes, inputs=[]):
    from subscripts.utilities import run,smart_remove,smart_mkdir,write,record_start,add_binary_vol,sub_binary_vol
    from os.path import join
    from shutil import copyfile
    sdir = params['sdir']
    use_gpu = params['use_gpu']
    stdout = params['stdout']
    record_start(params)
    if use_gpu:
        write(stdout, "Running Probtrackx with GPU")
    else:
        write(stdout, "Running Probtrackx without GPU")
    outdir = join(sdir, 'fast_outdir')
    smart_remove(outdir)
    smart_mkdir(outdir)

    EDI_allvols = join(sdir,"EDI","allvols")
    allvoxelscortsubcort = join(sdir,"allvoxelscortsubcort.nii.gz")
    terminationmask = join(sdir,"terminationmask.nii.gz")
    bs = join(sdir,"bs.nii.gz")
    exclusion = join(outdir, "exclusion.nii.gz")
    termination = join(outdir, "termination.nii.gz")
    copyfile(allvoxelscortsubcort, exclusion)
    copyfile(terminationmask, termination)
    for vol in volumes:
        vol_file = join(EDI_allvols, vol + "_s2fa.nii.gz")
        if not exists(vol_file):
            raise Exception('Failed to find volume {}'.format(vol_file))
        sub_binary_vol(vol_file, exclusion, params)
        add_binary_vol(vol_file, termination, params)
    run("fslmaths {} -add {} {}".format(exclusion, bs, exclusion), params)
Exemplo n.º 2
0
def s2a_bedpostx(params, inputs=[]):
    import time
    from subscripts.utilities import run,smart_mkdir,smart_remove,write,record_start,record_apptime,record_finish,update_permissions,validate
    from os.path import exists,join,split
    from shutil import copyfile,rmtree
    sdir = params['sdir']
    stdout = params['stdout']
    container = params['container']
    cores_per_task = params['cores_per_task']
    use_gpu = params['use_gpu']
    group = params['group']
    record_start(params)
    start_time = time.time()
    bedpostx = join(sdir,"bedpostx_b1000")
    bedpostxResults = join(sdir,"bedpostx_b1000.bedpostX")
    th1 = join(bedpostxResults, "merged_th1samples")
    ph1 = join(bedpostxResults, "merged_ph1samples")
    th2 = join(bedpostxResults, "merged_th2samples")
    ph2 = join(bedpostxResults, "merged_ph2samples")
    dyads1 = join(bedpostxResults, "dyads1")
    dyads2 = join(bedpostxResults, "dyads2")
    brain_mask = join(bedpostxResults, "nodif_brain_mask")
    if exists(bedpostxResults):
        rmtree(bedpostxResults)
    smart_mkdir(bedpostx)
    smart_mkdir(bedpostxResults)
    copyfile(join(sdir,"data_eddy.nii.gz"),join(bedpostx,"data.nii.gz"))
    copyfile(join(sdir,"data_bet_mask.nii.gz"),join(bedpostx,"nodif_brain_mask.nii.gz"))
    copyfile(join(sdir,"bvals"),join(bedpostx,"bvals"))
    copyfile(join(sdir,"bvecs"),join(bedpostx,"bvecs"))

    if use_gpu:
        write(stdout, "Running Bedpostx with GPU")
        bedpostx_sh = join(sdir, "bedpostx.sh")
        smart_remove(bedpostx_sh)
        odir = split(sdir)[0]
        write(bedpostx_sh, "export CUDA_LIB_DIR=$CUDA_8_LIB_DIR\n" +
                           "export LD_LIBRARY_PATH=$CUDA_LIB_DIR:$LD_LIBRARY_PATH")
        if container:
            write(bedpostx_sh, "bedpostx_gpu {} -NJOBS 4".format(bedpostx.replace(odir, "/share")))
        else:
            write(bedpostx_sh, "bedpostx_gpu {} -NJOBS 4".format(bedpostx))
        run("sh " + bedpostx_sh, params)
        # hacky validation step
        with open(stdout) as f:
            log_content = f.read()
            for i in range(1, 5):
                assert("{:d} parts processed out of 4".format(i) in log_content)
    else:
        write(stdout, "Running Bedpostx without GPU")
        run("bedpostx {}".format(bedpostx), params)
    run("make_dyadic_vectors {} {} {} {}".format(th1,ph1,brain_mask,dyads1), params)
    run("make_dyadic_vectors {} {} {} {}".format(th2,ph2,brain_mask,dyads2), params)
    validate(th1, params)
    validate(ph1, params)
    validate(dyads1, params)
    update_permissions(params)
    record_apptime(params, start_time, 1)
    record_finish(params)
Exemplo n.º 3
0
def s2b_1_recon_all(params, inputs=[]):
    import time
    from copy import deepcopy
    from subscripts.utilities import run, smart_mkdir, smart_remove, write, record_apptime, record_start, copy_dir
    from os import environ
    from os.path import exists, join, split, basename
    # work_sdir = params['work_sdir']
    # if work_sdir:
    #     old_sdir = params['sdir']
    #     copy_dir(old_sdir, work_sdir)
    #     params = deepcopy(params) # don't modify original param dict
    #     params['sdir'] = work_sdir
    sdir = params['sdir']
    stdout = params['stdout']
    container = params['container']
    cores_per_task = params['cores_per_task']
    use_gpu = params['use_gpu']
    group = params['group']
    subject = split(sdir)[1]
    record_start(params)
    start_time = time.time()
    T1 = join(sdir, "T1.nii.gz")
    if not exists(T1):
        raise Exception('Missing T1 file at {}'.format(T1))
    mri_out = join(sdir, "mri", "orig", "001.mgz")
    smart_mkdir(join(sdir, "mri"))
    smart_mkdir(join(sdir, "mri", "orig"))
    run("mri_convert {} {}".format(T1, mri_out), params)

    if not container:
        environ['SUBJECTS_DIR'] = split(sdir)[0]
    else:
        pass  # SUBJECTS_DIR already set to /share in recipe at [REPO]/container/Singularity

    if use_gpu:
        write(
            stdout,
            "Running Freesurfer with GPU and {} cores".format(cores_per_task))
        freesurfer_sh = join(sdir, "freesurfer.sh")
        smart_remove(freesurfer_sh)
        write(
            freesurfer_sh, "export CUDA_LIB_DIR=$CUDA_5_LIB_DIR\n" +
            "export LD_LIBRARY_PATH=$CUDA_LIB_DIR:$LD_LIBRARY_PATH\n" +
            "recon-all -s {} -all -notal-check -no-isrunning -use-gpu -parallel -openmp {}"
            .format(subject, cores_per_task))
        run("sh " + freesurfer_sh, params)
    elif cores_per_task > 1:
        write(stdout,
              "Running Freesurfer with {} cores".format(cores_per_task))
        run(
            "recon-all -s {} -all -notal-check -no-isrunning -parallel -openmp {}"
            .format(subject, cores_per_task), params)
    else:
        write(stdout, "Running Freesurfer with a single core")
        run("recon-all -s {} -all -notal-check -no-isrunning".format(subject),
            params)
    record_apptime(params, start_time, 1)
Exemplo n.º 4
0
def s3_1_start(params, inputs=[]):
    from subscripts.utilities import record_start
    use_gpu = params['use_gpu']
    stdout = params['stdout']
    record_start(params)
    if use_gpu:
        write(stdout, "Initializing Probtrackx with GPU")
    else:
        write(stdout, "Initializing Probtrackx without GPU")
Exemplo n.º 5
0
def s_1_debug(params, inputs=[]):
    import time
    from subscripts.utilities import record_start, record_apptime, write
    record_start(params)
    start_time = time.time()
    sdir = params['sdir']
    container = params['container']
    if container:
        run(
            "echo 'Testing Singularity on compute node\nShare dir is {}'".
            format(sdir), params)
    time.sleep(10)
    record_apptime(params, start_time, 1)
Exemplo n.º 6
0
def s4_1_start(params, inputs=[]):
    from subscripts.utilities import record_start
    record_start(params)
Exemplo n.º 7
0
def s1_1_dicom_preproc(params, inputs=[]):
    import time,tarfile
    from subscripts.utilities import run,record_apptime,record_start,smart_remove,smart_copy, \
                                     smart_mkdir,write,strip_trailing_slash
    from os.path import join,split,exists,basename
    from shutil import copyfile
    from glob import glob
    import numpy as np
    sdir = params['sdir']
    stdout = params['stdout']
    T1_dicom_dir = params['T1_dicom_dir']
    DTI_dicom_dir = params['DTI_dicom_dir']
    extra_b0_dirs = params['extra_b0_dirs']
    src_nifti_dir = params['src_nifti_dir']

    sourcedata_dir = params['sourcedata_dir']
    rawdata_dir = params['rawdata_dir']
    derivatives_dir = params['derivatives_dir']
    bids_dicom_dir = params['bids_dicom_dir']
    bids_nifti_dir = params['bids_nifti_dir']
    subject_name = params['subject_name']
    session_name = params['session_name']

    container = params['container']
    DTI_dicom_dir = params['DTI_dicom_dir']
    T1_dicom_dir = params['T1_dicom_dir']
    dicom_tmp_dir = join(sdir, 'tmp_dicom')

    smart_remove(dicom_tmp_dir)
    smart_mkdir(dicom_tmp_dir)
    
    smart_mkdir(join(bids_nifti_dir, "dwi"))
    smart_mkdir(join(bids_nifti_dir, "anat"))
    DTI_dicom_tmp_dir = join(dicom_tmp_dir, 'DTI')
    T1_dicom_tmp_dir = join(dicom_tmp_dir, 'T1')
    extra_b0_tmp_dirs = [join(dicom_tmp_dir, basename(dirname)) for dirname in extra_b0_dirs]

    hardi_file = join(bids_nifti_dir, "dwi", "{}_{}_dwi.nii.gz".format(subject_name, session_name))
    T1_file = join(bids_nifti_dir, "anat", "{}_{}_T1w.nii.gz".format(subject_name, session_name))
    bvals_file = join(bids_nifti_dir, "dwi", "{}_{}_dwi.bval".format(subject_name, session_name))
    bvecs_file = join(bids_nifti_dir, "dwi", "{}_{}_dwi.bvec".format(subject_name, session_name))

    start_time = time.time()
    record_start(params)

    if src_nifti_dir:
        smart_copy(join(src_nifti_dir, "hardi.nii.gz"), hardi_file)
        smart_copy(join(src_nifti_dir, "anat.nii.gz"), T1_file)
        smart_copy(join(src_nifti_dir, "bvals"), bvals_file)
        smart_copy(join(src_nifti_dir, "bvecs"), bvecs_file)
    elif T1_dicom_dir and DTI_dicom_dir:
        smart_remove(DTI_dicom_tmp_dir)
        smart_remove(T1_dicom_tmp_dir)

        # copy everything from DICOM dir except old NiFTI outputs
        smart_copy(T1_dicom_dir, T1_dicom_tmp_dir, ['*.nii', '*.nii.gz', '*.bval', '*.bvec'])
        write(stdout, 'Copied {} to {}'.format(T1_dicom_dir, T1_dicom_tmp_dir))
        smart_copy(DTI_dicom_dir, DTI_dicom_tmp_dir, ['*.nii', '*.nii.gz', '*.bval', '*.bvec'])
        write(stdout, 'Copied {} to {}'.format(DTI_dicom_dir, DTI_dicom_tmp_dir))
        for (extra_b0_dir, extra_b0_tmp_dir) in zip(extra_b0_dirs, extra_b0_tmp_dirs):
            smart_remove(extra_b0_tmp_dir)
            smart_copy(extra_b0_dir, extra_b0_tmp_dir, ['*.nii', '*.nii.gz', '*.bval', '*.bvec'])
            write(stdout, 'Copied {} to {}'.format(extra_b0_dir, extra_b0_tmp_dir))

        # Run dcm2nii in script to ensure Singularity container finds the right paths
        dicom_sh = join(sdir, "dicom.sh")
        smart_remove(dicom_sh)

        # Convert DTI dicom to many individual NiFTI files
        dicom_sh_contents = "dcm2nii -4 N"
        for file in glob(join(DTI_dicom_tmp_dir, '*.dcm')):
            dicom_sh_contents += " " + file

        for extra_b0_tmp_dir in extra_b0_tmp_dirs:
            dicom_sh_contents += "\ndcm2nii -4 N"
            for file in glob(join(extra_b0_tmp_dir, '*.dcm')):
                dicom_sh_contents += " " + file

        dicom_sh_contents += "\ndcm2nii -4 N"
        for file in glob(join(T1_dicom_tmp_dir, '*.dcm')):
            dicom_sh_contents += " " + file

        if container:
            odir = split(sdir)[0]
            write(dicom_sh, dicom_sh_contents.replace(odir, "/share"))
        else:
            write(dicom_sh, dicom_sh_contents)
        write(stdout, 'Running dcm2nii with script {}'.format(dicom_sh))
        run("sh " + dicom_sh, params)

        b0_slices = {}
        normal_slices = []
        all_slices = {}

        # Check that dcm2nii outputs exist
        found_bvals = glob(join(DTI_dicom_tmp_dir, '*.bval'))
        found_bvecs = glob(join(DTI_dicom_tmp_dir, '*.bvec'))
        found_T1 = glob(join(T1_dicom_tmp_dir, 'co*.nii.gz'))

        if len(found_bvals) != 1:
            raise Exception('Did not find exactly one bvals output in {}'.format(DTI_dicom_tmp_dir))
        else:
            copyfile(found_bvals[0], bvals_file)

        if len(found_bvecs) != 1:
            raise Exception('Did not find exactly one bvecs output in {}'.format(DTI_dicom_tmp_dir))
        else:
            copyfile(found_bvecs[0], bvecs_file)

        # If we don't find the usual T1 file name, just try any NifTI file in the T1 directory
        if len(found_T1) == 0:
            found_T1 = glob(join(T1_dicom_tmp_dir, '*.nii.gz'))
        if len(found_T1) == 0:
            raise Exception('Did not find T1 output in {}'.format(T1_dicom_tmp_dir))
        elif len(found_T1) > 1:
            write(stdout, 'Warning: Found more than one T1 output in {}'.format(T1_dicom_tmp_dir))
        found_T1.sort()
        copyfile(found_T1[0], T1_file)

        # Copy extra b0 values to DTI temp dir
        for extra_b0_tmp_dir in extra_b0_tmp_dirs:
            for file in glob(join(extra_b0_tmp_dir, "*.nii.gz")):
                copyfile(file, join(DTI_dicom_tmp_dir, "extra_b0_" + basename(file)))
            write(stdout, 'Copied NiFTI outputs from {} to {}'.format(extra_b0_tmp_dir, DTI_dicom_tmp_dir))

        # Sort slices into DTI and b0
        for file in glob(join(DTI_dicom_tmp_dir, '*.nii.gz')):
            slice_val = run("fslmeants -i {} | head -n 1".format(file), params) # based on getconnectome script
            all_slices[file] = float(slice_val)
        normal_median = np.median(list(all_slices.values()))
        for file in list(all_slices.keys()):
            slice_val = all_slices[file]
            # mark as b0 if more than 20% from normal slice median
            if abs(slice_val - normal_median) > 0.2 * normal_median:
                b0_slices[file] = slice_val
            else:
                normal_slices.append(file)
        if not b0_slices:
            raise Exception('Failed to find b0 values in {}'.format(DTI_dicom_dir))
        write(stdout, 'Found {} normal DTI slices'.format(len(normal_slices)))

        # Remove outliers from b0 values
        max_outliers = 1
        if len(b0_slices) > max_outliers:
            num_outliers = 0
            b0_median = np.median(list(b0_slices.values()))
            for file in list(b0_slices.keys()):
                slice_val = b0_slices[file]
                # remove outlier if more than 20% from b0 median
                if abs(slice_val - b0_median) > 0.2 * b0_median:
                    b0_slices.pop(file)
                    num_outliers += 1
            if num_outliers > max_outliers:
                raise Exception('Found more than {} outliers in b0 values. This probably means that this script has incorrectly identified b0 slices.'.format(max_outliers))
        write(stdout, 'Found {} b0 slices'.format(len(b0_slices)))

        # Average b0 slices into a single image
        avg_b0 = join(DTI_dicom_tmp_dir, 'avg_b0.nii.gz')
        smart_remove(avg_b0)
        for file in list(b0_slices.keys()):
            if not exists(avg_b0):
                copyfile(file, avg_b0)
            else:
                run("fslmaths {0} -add {1} {1}".format(file, avg_b0), params)
        run("fslmaths {0} -div {1} {0}".format(avg_b0, len(b0_slices)), params)

        # Concatenate average b0 and DTI slices into a single hardi.nii.gz
        normal_slices.sort()
        tmp_hardi = join(dicom_tmp_dir, "hardi.nii.gz")
        run("fslmerge -t {} {}".format(tmp_hardi, " ".join([avg_b0] + normal_slices)), params)
        copyfile(tmp_hardi, hardi_file)
        write(stdout, 'Concatenated b0 and DTI slices into {}'.format(hardi_file))

        # Clean extra zeroes from bvals and bvecs files
        num_slices = len(normal_slices)
        with open(bvals_file, 'r+') as f:
            entries = [x.strip() for x in f.read().split() if x]
            extra_zero = entries.pop(0) # strip leading zero
            if extra_zero != "0":
                raise Exception("{} should begin with zero, as a placeholder for the averaged b0 slice".format(bvals_file))

            # remove zero sequences
            min_sequence_length = 5
            if all(x == "0" for x in entries[0:min_sequence_length]):
                write(stdout, "Stripped leading zero sequence from {}".format(bvals_file))
                while len(entries) > num_slices:
                    extra_zero = entries.pop(0)
                    if extra_zero != "0":
                        raise Exception("Failed to clean extra zeros from {}".format(bvals_file))
            elif all(x == "0" for x in entries[-1:-min_sequence_length-1:-1]):
                write(stdout, "Stripped trailing zero sequence from {}".format(bvals_file))
                while len(entries) > num_slices:
                    extra_zero = entries.pop(-1)
                    if extra_zero != "0":
                        raise Exception("Failed to clean extra zeros from {}".format(bvals_file))

            if len(entries) > num_slices:
                raise Exception('Failed to clean bvals file {}. Since {} has {} slices, bvals must have {} columns'.
                    format(bvals_file, hardi_file, num_slices, num_slices))
            text = "0 " + " ".join(entries) + "\n" # restore leading zero
            f.seek(0)
            f.write(text)
            f.truncate()
            write(stdout, 'Generated bvals file with values:\n{}'.format(text))
        with open(bvecs_file, 'r+') as f:
            text = ""
            for line in f.readlines():
                if not line:
                    continue
                entries = [x.strip() for x in line.split() if x]
                extra_zero = entries.pop(0) # strip leading zero
                if extra_zero != "0":
                    raise Exception("Each line in {} should begin with zero, as a placeholder for the averaged b0 slice".format(bvecs_file))

                # remove zero sequences
                min_sequence_length = 5
                if all(x == "0" for x in entries[0:min_sequence_length]):
                    write(stdout, "Stripped leading zero sequence from {}".format(bvecs_file))
                    while len(entries) > num_slices:
                        extra_zero = entries.pop(0)
                        if extra_zero != "0":
                            raise Exception("Failed to clean extra zeros from {}".format(bvecs_file))
                elif all(x == "0" for x in entries[-1:-min_sequence_length-1:-1]):
                    write(stdout, "Stripped trailing zero sequence from {}".format(bvecs_file))
                    while len(entries) > num_slices:
                        extra_zero = entries.pop(-1)
                        if extra_zero != "0":
                            raise Exception("Failed to clean extra zeros from {}".format(bvecs_file))

                if len(entries) > num_slices:
                    raise Exception('Failed to clean bvecs file {}. Since {} has {} slices, bvecs must have {} columns'.
                        format(bvecs_file, hardi_file, num_slices, num_slices))
                text += "0 " + " ".join(entries) + "\n" # restore leading zero
            f.seek(0)
            f.write(text)
            f.truncate()
            write(stdout, 'Generated bvecs file with values:\n{}'.format(text))

        # Compress DICOM inputs
        dicom_tmp_archive = join(bids_dicom_dir, 'sourcedata.tar.gz')
        smart_remove(dicom_tmp_archive)
        with tarfile.open(dicom_tmp_archive, mode='w:gz') as archive:
            archive.add(dicom_tmp_dir, recursive=True, arcname=basename(dicom_tmp_dir))
        smart_remove(dicom_tmp_dir)
        write(stdout, 'Compressed temporary DICOM files to {}'.format(dicom_tmp_archive))

    smart_copy(hardi_file, join(sdir, "hardi.nii.gz"))
    smart_copy(T1_file, join(sdir,"T1.nii.gz"))
    smart_copy(bvecs_file, join(sdir,"bvecs"))
    smart_copy(bvals_file, join(sdir,"bvals"))
    record_apptime(params, start_time, 1)