Exemplo n.º 1
0
def hcp_dki(subject, aws_access_key, aws_secret_key, hcp_aws_access_key,
            hcp_aws_secret_key, outbucket):

    fs = s3fs.S3FileSystem(key=aws_access_key, secret=aws_secret_key)
    remote_wm_path =\
        "%s/derivatives/wm_mask" % (outbucket)

    logging.basicConfig(level=logging.INFO)
    log = logging.getLogger(__name__)  # noqa
    rpath = op.join(remote_wm_path, f'sub-{subject}', 'ses-01', 'dwi',
                    f'sub-{subject}_dwi_wm_mask.nii.gz')
    lpath = "./wm_mask.nii.gz"

    if not fs.exists(rpath):
        log.info(f"Getting data for subject {subject}")
        t1 = time.time()
        # get HCP data for the given subject / session
        _, hcp_bids = fetch_hcp([subject],
                                profile_name=False,
                                aws_access_key_id=hcp_aws_access_key,
                                aws_secret_access_key=hcp_aws_secret_key)

        dwi_path = op.join(afd.afq_home, 'HCP_1200', 'derivatives', 'dmriprep',
                           f'sub-{subject}', 'ses-01', 'dwi')
        dwi_img = nib.load(op.join(dwi_path, f'sub-{subject}_dwi.nii.gz'))
        dwi_data = dwi_img.get_fdata()
        log.info(f"That took {time.time() - t1} seconds")

        log.info("Extracting and resampling the white matter mask")
        t1 = time.time()
        anat_path = op.join(afd.afq_home, 'HCP_1200', 'derivatives',
                            'dmriprep', f'sub-{subject}', 'ses-01', 'anat')

        wm_labels = [250, 251, 252, 253, 254, 255, 41, 2, 16, 77]
        seg_img = nib.load(
            op.join(anat_path, f'sub-{subject}_aparc+aseg_seg.nii.gz'))

        seg_data_orig = seg_img.get_fdata()
        # For different sets of labels, extract all the voxels that
        # have any of these values:
        wm_mask = np.sum(
            np.concatenate([(seg_data_orig == ll)[..., None]
                            for ll in wm_labels], -1), -1)

        # Resample to DWI data:
        wm_mask = np.round(
            resample(wm_mask, dwi_data[..., 0], seg_img.affine,
                     dwi_img.affine).get_fdata())
        # Save locally
        nib.save(nib.Nifti1Image(wm_mask.astype(int), dwi_img.affine), lpath)
        log.info(f"That took {time.time() - t1} seconds")
        log.info(f"Uplodading to {rpath}")
        fs.upload(lpath, rpath)
    else:
        log.info(f"Looks like I've already done subject {subject}. Moving on")
Exemplo n.º 2
0
def afq_hcp(subject, aws_access_key, aws_secret_key, hcp_aws_access_key,
            hcp_aws_secret_key, outbucket):

    fs = s3fs.S3FileSystem(key=aws_access_key, secret=aws_secret_key)

    # Configuration:

    # session = "1200"
    session = "Retest"

    seg_algo = "reco80"
    reuse_tractography = False
    bundle_info = None
    shell = "multi"
    my_hcp_key = f"{outbucket}/hcp_reliability"
    logging.basicConfig(level=logging.INFO)
    log = logging.getLogger(__name__) # noqa
    log.info(f"Subject: {subject}")
    remote_export_path = (
        f"{my_hcp_key}/{shell}_shell/"
        f"hcp_{session.lower()}_{seg_algo}_csd_det_azure")


    # get HCP data for the given subject / session
    _, hcp_bids = fetch_hcp(
        [subject],
        profile_name=False,
        aws_access_key_id=hcp_aws_access_key,
        aws_secret_access_key=hcp_aws_secret_key)

    tracking_params = {
        'seed_mask': afm.ScalarMask('dki_fa'),
        'stop_mask': afm.ScalarMask('dki_fa'),
        "odf_model": "CSD",
        "directions": "det"}
    kwargs = {
        "scalars": ["dki_fa", "dki_md"]
    }

    # Whether to reuse a previous tractography that has already been
    # uploaded to s3 by another run of this function. Useful if you want to
    # try new parameters that do not change the tractography.
    if reuse_tractography:
        rpath = (
            f"{my_hcp_key}/{shell}_shell/"
            f"hcp_{session.lower()}_afq/sub-{subject}/ses-01/"
            f"sub-{subject}_dwi_space-RASMM_model-"
            f"{tracking_params['odf_model']}_desc-prob_tractography.trk")
        lpath = (
            f"derivatives/dmriprep/sub-{subject}/"
            f"ses-01/sub-{subject}_customtrk.trk")
        if fs.exists(rpath):
            log.info(f"Gettng {rpath}")
            fs.get(
                rpath,
                op.join(hcp_bids, lpath))
        else:
            raise ValueError(f"Could not find {rpath}")

        custom_tractography_bids_filters = {
            "suffix": "customtrk", "scope": "dmriprep"}
    else:
        custom_tractography_bids_filters = None

    # Initialize the AFQ object with all of the parameters we have set so
    # far Also uses the brain mask provided by HCP Sets viz_backend='plotly'
    # to make GIFs in addition to the default html visualizations (this adds
    # ~45 minutes)
    myafq = api.AFQ(
        hcp_bids,
        brain_mask=afm.LabelledMaskFile(
                    'seg', {'scope': 'dmriprep'}, exclusive_labels=[0]),
        custom_tractography_bids_filters=custom_tractography_bids_filters,
        tracking_params=tracking_params,
        bundle_info=bundle_info,
        segmentation_params={"seg_algo": seg_algo, "reg_algo": "syn"},
        viz_backend='plotly',
        **kwargs)
    # run the AFQ objects
    myafq.export_all()
    myafq.upload_to_s3(fs, remote_export_path)
Exemplo n.º 3
0
def afq_process_subject(subject, seed_mask, n_seeds,
                        aws_access_key, aws_secret_key):
    # define a function that each job will run
    # In this case, each process does a single subject
    import logging
    import s3fs
    # all imports must be at the top of the function
    # cloudknot installs the appropriate packages from pip
    from AFQ.data import fetch_hcp
    import AFQ.api as api
    import AFQ.definitions.mask as afm

    import numpy as np
    import os.path as op

    # set logging level to your choice
    logging.basicConfig(level=logging.INFO)
    log = logging.getLogger(__name__)

    # Download the given subject to the AWS Batch machine from s3
    _, hcp_bids = fetch_hcp(
        [subject],
        profile_name=False,
        study=f"HCP_1200",
        aws_access_key_id=aws_access_key,
        aws_secret_access_key=aws_secret_key)

    # We make a new seed mask for each process based off of the
    # seed_mask argument, which is a string.
    # This is to avoid any complications with pickling the masks.
    if seed_mask == "roi":
        seed_mask_obj = afm.RoiMask()
    elif seed_mask == "fa":
        seed_mask_obj = afm.ScalarMask("dti_fa")
    else:
        seed_mask_obj = afm.FullMask()

    # Determined if n_seeds is per voxel or not
    if n_seeds > 3:
        random_seeds = True
    else:
        random_seeds = False

    # set the tracking_params based off our inputs
    tracking_params = {
        "seed_mask": seed_mask_obj,
        "n_seeds": n_seeds,
        "random_seeds": random_seeds}

    # use segmentation file from HCP to get a brain mask,
    # where everything not labelled 0 is considered a part of the brain
    brain_mask = afm.LabelledMaskFile(
        'seg', {'scope': 'dmriprep'}, exclusive_labels=[0])

    # define the api AFQ object
    myafq = api.AFQ(
        hcp_bids,
        brain_mask=brain_mask,
        tracking_params=tracking_params)

    # export_all runs the entire pipeline and creates many useful derivates
    myafq.export_all()

    # upload the results to some location on s3
    myafq.upload_to_s3(
        s3fs.S3FileSystem(),
        (f"my_study_bucket/my_study_prefix_{seed_mask}_{n_seeds}"
        f"/derivatives/afq"))
Exemplo n.º 4
0
def afq_hcp(subject, aws_access_key, aws_secret_key, hcp_aws_access_key,
            hcp_aws_secret_key, outbucket):

    fs = s3fs.S3FileSystem(key=aws_access_key, secret=aws_secret_key)
    remote_export_path =\
        "%s/derivatives/afq_dki_det" % (outbucket)

    logging.basicConfig(level=logging.INFO)
    log = logging.getLogger(__name__)  # noqa

    # get HCP data for the given subject / session
    _, hcp_bids = fetch_hcp([subject],
                            profile_name=False,
                            aws_access_key_id=hcp_aws_access_key,
                            aws_secret_access_key=hcp_aws_secret_key)

    or_rois = afd.read_or_templates()

    bundles = {
        "L_OR": {
            "ROIs": [
                or_rois["left_OR_1"], or_rois["left_OR_2"],
                or_rois["left_OP_MNI"], or_rois["left_TP_MNI"],
                or_rois["left_pos_thal_MNI"]
            ],
            "rules": [True, True, False, False, False],
            "cross_midline":
            False,
            "uid":
            1
        },
        "R_OR": {
            "ROIs": [
                or_rois["right_OR_1"], or_rois["right_OR_2"],
                or_rois["right_OP_MNI"], or_rois["right_TP_MNI"],
                or_rois["right_pos_thal_MNI"]
            ],
            "rules": [True, True, False, False, False],
            "cross_midline":
            False,
            "uid":
            2
        }
    }

    brain_mask = LabelledMaskFile("seg", {"scope": "dmriprep"},
                                  exclusive_labels=[0])

    tractography_afq = api.AFQ(bids_path=op.join(afd.afq_home, 'HCP_1200'),
                               brain_mask=brain_mask,
                               tracking_params={
                                   "n_seeds": 3,
                                   "directions": "det",
                                   "odf_model": "DKI",
                                   "seed_mask": RoiMask()
                               },
                               bundle_info=bundles)

    # Use this one just to define the streamlines, oversampling around OR:
    tractography_afq.get_streamlines()
    tractography_afq.upload_to_s3(fs, remote_export_path)

    rb_afq = api.AFQ(bids_path=op.join(afd.afq_home, 'HCP_1200'),
                     brain_mask=brain_mask,
                     viz_backend='plotly_no_gif',
                     bundle_info=["OR"],
                     tracking_params={
                         "n_seeds": 3,
                         "directions": "det",
                         "odf_model": "DKI",
                         "seed_mask": RoiMask()
                     },
                     segmentation_params=dict(seg_algo="reco80"),
                     scalars=["dki_fa", "dki_md", "dki_mk", "dki_awf"])

    # Use this one to segment with Recobundles
    rb_afq.export_all()

    rb_afq.upload_to_s3(fs, remote_export_path)
Exemplo n.º 5
0
def afq_hcp(subject, aws_access_key, aws_secret_key, hcp_aws_access_key,
            hcp_aws_secret_key, outbucket):

    fs = s3fs.S3FileSystem(key=aws_access_key, secret=aws_secret_key)
    log = logging.getLogger(__name__)  # noqa
    log.info(f"Subject: {subject}")

    # Only do it if the output file doesn't already exist:
    if not fs.exists(f"hcp.afq/derivatives/afq/sub-{subject}/"
                     f"ses-01/sub-{subject}"
                     "_dwi_space-RASMM_model-CSD_desc-prob-"
                     "afq_profiles.csv"):
        # Configuration:
        # session = "Retest"
        session = "1200"
        seg_algo = "afq"
        reuse_tractography = True
        bundle_info = api.BUNDLES + api.CALLOSUM_BUNDLES
        shell = "multi"
        my_hcp_key = f"{outbucket}/derivatives/afq/"
        logging.basicConfig(level=logging.INFO)
        remote_export_path = my_hcp_key

        # get HCP data for the given subject / session
        _, hcp_bids = fetch_hcp([subject],
                                profile_name=False,
                                aws_access_key_id=hcp_aws_access_key,
                                aws_secret_access_key=hcp_aws_secret_key)

        tracking_params = {
            'seed_mask': afm.ScalarMask('dki_fa'),
            'stop_mask': afm.ScalarMask('dki_fa'),
            "odf_model": "CSD",
            "directions": "prob"
        }
        kwargs = {"scalars": ["dki_fa", "dki_md", "dki_mk", "dki_awf"]}

        # Whether to reuse a previous tractography that has already been
        # uploaded to s3 by another run of this function. Useful if you want to
        # try new parameters that do not change the tractography.
        custom_tractography_bids_filters = None

        if reuse_tractography:
            rpath = (f"profile-hcp-west/hcp_reliability/multi_shell/"
                     f"hcp_{session.lower()}_reco80_csd_azure/sub-{subject}"
                     f"/ses-01/sub-{subject}_dwi_space-RASMM"
                     f"_model-CSD_desc-prob_tractography.trk")
            #  rpath=(
            #      f"{my_hcp_key}/{shell}_shell/"
            #      f"hcp_{session.lower()}_afq/sub-{subject}/ses-01/"
            #      f"sub-{subject}_dwi_space-RASMM_model-"
            #      f"{tracking_params['odf_model']}_desc-prob_tractography.trk")
            lpath = (f"derivatives/dmriprep/sub-{subject}/"
                     f"ses-01/sub-{subject}_customtrk.trk")
            if fs.exists(rpath):
                log.info(f"Gettng {rpath}")
                fs.get(rpath, op.join(hcp_bids, lpath))
                custom_tractography_bids_filters = {
                    "suffix": "customtrk",
                    "scope": "dmriprep"
                }

        # Initialize the AFQ object with all of the parameters we have set so
        # far Also uses the brain mask provided by HCP Sets viz_backend='plotly'
        # to make GIFs in addition to the default html visualizations (this adds
        # ~45 minutes)
        myafq = api.AFQ(
            hcp_bids,
            brain_mask=afm.LabelledMaskFile('seg', {'scope': 'dmriprep'},
                                            exclusive_labels=[0]),
            custom_tractography_bids_filters=custom_tractography_bids_filters,
            tracking_params=tracking_params,
            bundle_info=bundle_info,
            segmentation_params={
                "seg_algo": seg_algo,
                "reg_algo": "syn"
            },
            viz_backend='plotly',
            **kwargs)
        # run the AFQ objects
        log.info("Running the pyAFQ pipeline")
        myafq.export_all(afqbrowser=False, xforms=False)
        log.info(f"Uploading to {remote_export_path}")
        myafq.upload_to_s3(fs, remote_export_path)
    else:
        log.info(f"Already completed analysis for this subject")
Exemplo n.º 6
0
def hcp_dki(subject, aws_access_key, aws_secret_key, hcp_aws_access_key,
            hcp_aws_secret_key, outbucket):

    fs = s3fs.S3FileSystem(key=aws_access_key, secret=aws_secret_key)
    remote_p2s_path =\
        "%s/derivatives/patch2self" % (outbucket)

    remote_dti_path =\
        "%s/derivatives/dti" % (outbucket)

    remote_dti1000_path =\
        "%s/derivatives/dti1000" % (outbucket)

    remote_dki_path =\
        "%s/derivatives/dki" % (outbucket)

    remote_sst_path =\
        "%s/derivatives/sst" % (outbucket)

    logging.basicConfig(level=logging.INFO)
    log = logging.getLogger(__name__)  # noqa

    log.info(f"Getting data for subject {subject}")
    # get HCP data for the given subject / session
    _, hcp_bids = fetch_hcp([subject],
                            profile_name=False,
                            aws_access_key_id=hcp_aws_access_key,
                            aws_secret_access_key=hcp_aws_secret_key)

    dwi_path = op.join(afd.afq_home, 'HCP_1200', 'derivatives', 'dmriprep',
                       f'sub-{subject}', 'ses-01', 'dwi')

    dwi_img = nib.load(op.join(dwi_path, f'sub-{subject}_dwi.nii.gz'))
    dwi_data = dwi_img.get_fdata()
    b0_threshold = 50

    gtab = gradient_table(op.join(dwi_path, f'sub-{subject}_dwi.bval'),
                          op.join(dwi_path, f'sub-{subject}_dwi.bvec'),
                          b0_threshold=b0_threshold)

    rpath = op.join(remote_p2s_path, f'sub-{subject}', 'ses-01', 'dwi',
                    f'sub-{subject}_desc-denoised_dwi.nii.gz')
    lpath = "./denoised_data.nii.gz"

    if not fs.exists(rpath):
        log.info("Denoising with patch2self")
        t1 = time.time()
        den_data = patch2self(dwi_data,
                              gtab.bvals,
                              b0_threshold=b0_threshold,
                              clip_negative_vals=False,
                              shift_intensity=True)
        log.info(f"That took {time.time() - t1} seconds")
        den_img = nib.Nifti1Image(den_data, dwi_img.affine)
        nib.save(den_img, lpath)
        fs.upload(lpath, rpath)
    else:
        log.info("Looks like I've already denoised this subject")
        log.info("Downloading data from S3")
        fs.download(rpath, lpath)
        den_data = nib.load(lpath).get_fdata()

    log.info("Calculating SST")
    data_dwi = den_data[..., ~gtab.b0s_mask]
    mean_dwi = np.mean(den_data[..., ~gtab.b0s_mask], -1)
    sst = np.sum((data_dwi - mean_dwi[..., None])**2, -1)
    lpath = "data_sst.nii.gz"
    nib.save(nib.Nifti1Image(sst, dwi_img.affine), lpath)
    rpath = op.join(remote_sst_path, f'sub-{subject}', 'ses-01', 'dwi',
                    f'sub-{subject}_dwi_desc-sst.nii.gz')
    fs.upload(lpath, rpath)

    lpath = "dti_params.nii.gz"
    rpath = op.join(remote_dti_path, f'sub-{subject}', 'ses-01', 'dwi',
                    f'sub-{subject}_dwi_model-DTI_diffmodel.nii.gz')

    if not fs.exists(rpath):
        log.info("Fitting DTI")
        t1 = time.time()
        dtim = dti.TensorModel(gtab)
        dtif = dtim.fit(den_data, mask=np.ones(den_data.shape[:3]))
        nib.save(nib.Nifti1Image(dtif.model_params, dwi_img.affine), lpath)
        fs.upload(lpath, rpath)
        log.info(f"That took {time.time() - t1} seconds")
    else:
        log.info("Looks like I've already fit DTI")
        log.info("Downloading DTI params from S3")
        fs.download(rpath, lpath)
        dtim = dti.TensorModel(gtab)

    dti_params = nib.load("dti_params.nii.gz")
    S0 = np.mean(den_data[..., gtab.b0s_mask], -1)
    pred = dtim.predict(dti_params.get_fdata(), S0=S0)

    lpath = "dti_pred.nii.gz"
    rpath = op.join(remote_dti_path, f'sub-{subject}', 'ses-01', 'dwi',
                    f'sub-{subject}_dwi_prediction-DTI_diffmodel.nii.gz')

    nib.save(nib.Nifti1Image(pred, dwi_img.affine), lpath)
    fs.upload(lpath, rpath)

    # We calculate SSE only over diffusion-weighted volumes
    sse = np.sum(
        (pred[..., ~gtab.b0s_mask] - den_data[..., ~gtab.b0s_mask])**2, -1)
    lpath = "dti_sse.nii.gz"
    rpath = op.join(remote_dti_path, f'sub-{subject}', 'ses-01', 'dwi',
                    f'sub-{subject}_dwi_SSE-DTI_diffmodel.nii.gz')
    nib.save(nib.Nifti1Image(sse, dwi_img.affine), lpath)
    fs.upload(lpath, rpath)

    ### DTI 1000
    lpath = "dti1000_params.nii.gz"
    rpath = op.join(remote_dti1000_path, f'sub-{subject}', 'ses-01', 'dwi',
                    f'sub-{subject}_dwi_model-DTI_diffmodel.nii.gz')

    dwi1000 = den_data[..., gtab.bvals < 1100]
    gtab1000 = gradient_table(gtab.bvals[gtab.bvals < 1100],
                              gtab.bvecs[gtab.bvals < 1100])
    if not fs.exists(rpath):
        log.info("Fitting DTI")
        t1 = time.time()
        dtim = dti.TensorModel(gtab1000)
        dtif = dtim.fit(dwi1000, mask=np.ones(den_data.shape[:3]))
        nib.save(nib.Nifti1Image(dtif.model_params, dwi_img.affine), lpath)
        fs.upload(lpath, rpath)
        log.info(f"That took {time.time() - t1} seconds")
    else:
        log.info("Looks like I've already fit DTI with b=1000")
        log.info("Downloading DTI params from S3")
        fs.download(rpath, lpath)
        dtim = dti.TensorModel(gtab1000)

    dti_params = nib.load("dti_params.nii.gz")
    S0 = np.mean(dwi1000[..., gtab1000.b0s_mask], -1)
    pred = dtim.predict(dti_params.get_fdata(), S0=S0)

    lpath = "dti1000_pred.nii.gz"
    rpath = op.join(remote_dti1000_path, f'sub-{subject}', 'ses-01', 'dwi',
                    f'sub-{subject}_dwi_prediction-DTI_diffmodel.nii.gz')

    nib.save(nib.Nifti1Image(pred, dwi_img.affine), lpath)
    fs.upload(lpath, rpath)

    # We calculate SSE only over diffusion-weighted volumes
    sse = np.sum(
        (pred[..., ~gtab1000.b0s_mask] - dwi1000[..., ~gtab1000.b0s_mask])**2,
        -1)
    lpath = "dti1000_sse.nii.gz"
    rpath = op.join(remote_dti1000_path, f'sub-{subject}', 'ses-01', 'dwi',
                    f'sub-{subject}_dwi_SSE-DTI_diffmodel.nii.gz')
    nib.save(nib.Nifti1Image(sse, dwi_img.affine), lpath)
    fs.upload(lpath, rpath)

    ### DKI
    lpath = "dki_params.nii.gz"
    rpath = op.join(remote_dki_path, f'sub-{subject}', 'ses-01', 'dwi',
                    f'sub-{subject}_dwi_model-DKI_diffmodel.nii.gz')

    if not fs.exists(rpath):
        log.info("Fitting DKI")
        t1 = time.time()
        dkim = dki.DiffusionKurtosisModel(gtab)
        dkif = dkim.fit(den_data)
        log.info(f"That took {time.time() - t1} seconds")
        nib.save(nib.Nifti1Image(dkif.model_params, dwi_img.affine), lpath)
        fs.upload(lpath, rpath)
    else:
        log.info("Looks like I've already fit DKI")
        log.info("Downloading DKI params from S3")
        fs.download(rpath, lpath)
        dkim = dki.DiffusionKurtosisModel(gtab)

    dki_params = nib.load("dki_params.nii.gz")
    pred = dkim.predict(dki_params.get_fdata(), S0=S0)
    lpath = "dki_pred.nii.gz"
    rpath = op.join(remote_dki_path, f'sub-{subject}', 'ses-01', 'dwi',
                    f'sub-{subject}_dwi_prediction-DKI_diffmodel.nii.gz')

    nib.save(nib.Nifti1Image(pred, dwi_img.affine), lpath)
    fs.upload(lpath, rpath)

    # We calculate SSE only over diffusion-weighted volumes
    sse = np.sum(
        (pred[..., ~gtab.b0s_mask] - den_data[..., ~gtab.b0s_mask])**2, -1)
    lpath = "dki_sse.nii.gz"
    rpath = op.join(remote_dki_path, f'sub-{subject}', 'ses-01', 'dwi',
                    f'sub-{subject}_dwi_SSE-DKI_diffmodel.nii.gz')

    nib.save(nib.Nifti1Image(sse, dwi_img.affine), lpath)
    fs.upload(lpath, rpath)
def hcp_dki(subject, aws_access_key, aws_secret_key, hcp_aws_access_key,
            hcp_aws_secret_key, outbucket):

    fs = s3fs.S3FileSystem(key=aws_access_key, secret=aws_secret_key)

    remote_dti1000_path =\
        "%s/derivatives/dti1000" % (outbucket)

    remote_dti1000_2000_path =\
        "%s/derivatives/dti1000_2000" % (outbucket)

    remote_dki1000_2000_path =\
        "%s/derivatives/dki1000_2000" % (outbucket)

    remote_dki2000_3000_path =\
        "%s/derivatives/dki2000_3000" % (outbucket)

    remote_dki1000_3000_path =\
        "%s/derivatives/dki1000_3000" % (outbucket)


    logging.basicConfig(level=logging.INFO)
    log = logging.getLogger(__name__) # noqa

    log.info(f"Getting data for subject {subject}")
    # get HCP data for the given subject / session
    _, hcp_bids = fetch_hcp(
        [subject],
        profile_name=False,
        aws_access_key_id=hcp_aws_access_key,
        aws_secret_access_key=hcp_aws_secret_key)

    dwi_path = op.join(afd.afq_home, 'HCP_1200', 'derivatives', 'dmriprep',
                       f'sub-{subject}', 'ses-01', 'dwi')

    dwi_img = nib.load(op.join(dwi_path, f'sub-{subject}_dwi.nii.gz'))
    dwi_data = dwi_img.get_fdata()
    b0_threshold = 50

    gtab = gradient_table(
        op.join(dwi_path, f'sub-{subject}_dwi.bval'),
        op.join(dwi_path, f'sub-{subject}_dwi.bvec'),
        b0_threshold=b0_threshold)

    ### DTI 1000
    last_result = op.join(
        remote_dti1000_path, f'sub-{subject}', 'ses-01', 'dwi',
        f'sub-{subject}_dwi_model-DTI_MD.nii.gz')
    if not fs.exists(last_result):
        lpath = "dti1000_params.nii.gz"
        rpath = op.join(remote_dti1000_path, f'sub-{subject}', 'ses-01', 'dwi',
                        f'sub-{subject}_dwi_model-DTI_diffmodel.nii.gz')

        dwi1000 = dwi_data[..., gtab.bvals < 1100]
        gtab1000 = gradient_table(gtab.bvals[gtab.bvals < 1100],
                                gtab.bvecs[gtab.bvals < 1100])
        if not fs.exists(rpath):
            log.info("Fitting DTI")
            t1 = time.time()
            dtim = dti.TensorModel(gtab1000)
            dtif = dtim.fit(dwi1000, mask=np.ones(dwi_data.shape[:3]))
            nib.save(nib.Nifti1Image(dtif.model_params, dwi_img.affine), lpath)
            fs.upload(lpath, rpath)
            log.info(f"That took {time.time() - t1} seconds")
        else:
            log.info("Looks like I've already fit DTI with b=1000")
            log.info("Downloading DTI params from S3")
            fs.download(rpath, lpath)
            dtim = dti.TensorModel(gtab1000)
            dti_params = nib.load(lpath).get_fdata()
            dtif = dti.TensorFit(dtim, dti_params)

        lpath = "dti1000_fa.nii.gz"
        nib.save(nib.Nifti1Image(dtif.fa, dwi_img.affine), lpath)
        rpath = op.join(remote_dti1000_path, f'sub-{subject}', 'ses-01', 'dwi',
                        f'sub-{subject}_dwi_model-DTI_FA.nii.gz')
        fs.upload(lpath, rpath)

        lpath = "dti1000_md.nii.gz"
        nib.save(nib.Nifti1Image(dtif.md, dwi_img.affine), lpath)
        rpath = op.join(remote_dti1000_path, f'sub-{subject}', 'ses-01', 'dwi',
                        f'sub-{subject}_dwi_model-DTI_MD.nii.gz')
        fs.upload(lpath, rpath)


    ### DTI 1000 + 2000
    last_result = op.join(
        remote_dti1000_2000_path, f'sub-{subject}', 'ses-01', 'dwi',
        f'sub-{subject}_dwi_model-DTI_MD.nii.gz')
    if not fs.exists(last_result):
        lpath = "dti1000_2000_params.nii.gz"
        rpath = op.join(
            remote_dti1000_2000_path, f'sub-{subject}', 'ses-01', 'dwi',
            f'sub-{subject}_dwi_model-DTI_diffmodel.nii.gz')

        dwi1000_2000 = dwi_data[..., gtab.bvals < 2100]
        gtab1000_2000 = gradient_table(
            gtab.bvals[gtab.bvals < 2100],
            gtab.bvecs[gtab.bvals < 2100])
        if not fs.exists(rpath):
            log.info("Fitting DTI with b=1000 and 2000")
            t1 = time.time()
            dtim = dti.TensorModel(gtab1000_2000)
            dtif = dtim.fit(dwi1000_2000, mask=np.ones(dwi_data.shape[:3]))
            nib.save(nib.Nifti1Image(dtif.model_params, dwi_img.affine), lpath)
            fs.upload(lpath, rpath)
            log.info(f"That took {time.time() - t1} seconds")
        else:
            log.info("Looks like I've already fit DTI with b=1000 and b=2000")
            log.info("Downloading DTI params from S3")
            fs.download(rpath, lpath)
            dtim = dti.TensorModel(gtab1000_2000)
            dti_params = nib.load(lpath).get_fdata()
            dtif = dti.TensorFit(dtim, dti_params)

        lpath = "dti1000_2000_fa.nii.gz"
        nib.save(nib.Nifti1Image(dtif.fa, dwi_img.affine), lpath)
        rpath = op.join(
            remote_dti1000_2000_path, f'sub-{subject}', 'ses-01', 'dwi',
            f'sub-{subject}_dwi_model-DTI_FA.nii.gz')
        fs.upload(lpath, rpath)

        lpath = "dti1000_2000_md.nii.gz"
        nib.save(nib.Nifti1Image(dtif.md, dwi_img.affine), lpath)
        rpath = op.join(
            remote_dti1000_2000_path, f'sub-{subject}', 'ses-01', 'dwi',
            f'sub-{subject}_dwi_model-DTI_MD.nii.gz')
        fs.upload(lpath, rpath)


    ### DKI 1000 + 2000
    last_result = op.join(
        remote_dki1000_2000_path, f'sub-{subject}', 'ses-01', 'dwi',
        f'sub-{subject}_dwi_model-DKI_MD.nii.gz')

    if not fs.exists(last_result):
        lpath = "dki1000_2000_params.nii.gz"
        rpath = op.join(
            remote_dki1000_2000_path, f'sub-{subject}', 'ses-01', 'dwi',
            f'sub-{subject}_dwi_model-DKI_diffmodel.nii.gz')

        dwi1000_2000 = dwi_data[..., gtab.bvals < 2100]
        gtab1000_2000 = gradient_table(gtab.bvals[gtab.bvals < 2100],
                                       gtab.bvecs[gtab.bvals < 2100])
        if not fs.exists(rpath):
            log.info("Fitting DKI with b=1000 + 2000")
            t1 = time.time()
            dkim = dki.DiffusionKurtosisModel(gtab1000_2000)
            dkif = dkim.fit(dwi1000_2000, mask=np.ones(dwi_data.shape[:3]))
            nib.save(nib.Nifti1Image(dkif.model_params, dwi_img.affine), lpath)
            fs.upload(lpath, rpath)
            log.info(f"That took {time.time() - t1} seconds")
        else:
            log.info("Looks like I've already fit DKI with b=1000 and b=2000")
            log.info("Downloading DKI params from S3")
            fs.download(rpath, lpath)
            dkim = dki.DiffusionKurtosisModel(gtab1000_2000)
            dki_params = nib.load(lpath).get_fdata()
            dkif = dki.DiffusionKurtosisFit(dkim, dki_params)

        lpath = "dki1000_2000_fa.nii.gz"
        nib.save(nib.Nifti1Image(dkif.fa, dwi_img.affine), lpath)
        rpath = op.join(
            remote_dki1000_2000_path, f'sub-{subject}', 'ses-01', 'dwi',
            f'sub-{subject}_dwi_model-DKI_FA.nii.gz')
        fs.upload(lpath, rpath)

        lpath = "dki1000_2000_md.nii.gz"
        nib.save(nib.Nifti1Image(dkif.md, dwi_img.affine), lpath)
        rpath = op.join(
            remote_dki1000_2000_path, f'sub-{subject}', 'ses-01', 'dwi',
            f'sub-{subject}_dwi_model-DKI_MD.nii.gz')
        fs.upload(lpath, rpath)

    ### DKI 2000 + 3000
    last_result = op.join(
        remote_dki2000_3000_path, f'sub-{subject}', 'ses-01', 'dwi',
        f'sub-{subject}_dwi_model-DKI_MD.nii.gz')
    if not fs.exists(last_result):
        lpath = "dki2000_3000_params.nii.gz"
        rpath = op.join(
            remote_dki2000_3000_path, f'sub-{subject}', 'ses-01', 'dwi',
            f'sub-{subject}_dwi_model-DKI_diffmodel.nii.gz')

        dwi2000_3000 = dwi_data[..., (gtab.bvals > 1985) | (gtab.bvals < 50)]
        gtab2000_3000 = gradient_table(
            gtab.bvals[(gtab.bvals > 1985) | (gtab.bvals < 50)],
            gtab.bvecs[(gtab.bvals > 1985) | (gtab.bvals < 50)])

        if not fs.exists(rpath):
            log.info("Fitting DKI with b=2000 + 3000")
            t1 = time.time()
            dkim = dki.DiffusionKurtosisModel(gtab2000_3000)
            dkif = dkim.fit(dwi2000_3000, mask=np.ones(dwi_data.shape[:3]))
            nib.save(nib.Nifti1Image(dkif.model_params, dwi_img.affine), lpath)
            fs.upload(lpath, rpath)
            log.info(f"That took {time.time() - t1} seconds")
        else:
            log.info("Looks like I've already fit DKI with b=2000 and b=3000")
            log.info("Downloading DKI params from S3")
            fs.download(rpath, lpath)
            dkim = dki.DiffusionKurtosisModel(gtab2000_3000)
            dki_params = nib.load(lpath).get_fdata()
            dkif = dki.DiffusionKurtosisFit(dkim, dki_params)

        lpath = "dki2000_3000_fa.nii.gz"
        nib.save(nib.Nifti1Image(dkif.fa, dwi_img.affine), lpath)
        rpath = op.join(
            remote_dki2000_3000_path, f'sub-{subject}', 'ses-01', 'dwi',
            f'sub-{subject}_dwi_model-DKI_FA.nii.gz')
        fs.upload(lpath, rpath)

        lpath = "dki2000_3000_md.nii.gz"
        nib.save(nib.Nifti1Image(dkif.md, dwi_img.affine), lpath)
        rpath = op.join(
            remote_dki2000_3000_path, f'sub-{subject}', 'ses-01', 'dwi',
            f'sub-{subject}_dwi_model-DKI_MD.nii.gz')
        fs.upload(lpath, rpath)

    ### DKI 1000 + 3000
    last_result = op.join(
        remote_dki1000_3000_path, f'sub-{subject}', 'ses-01', 'dwi',
        f'sub-{subject}_dwi_model-DKI_MD.nii.gz')
    if not fs.exists(last_result):
        lpath = "dki1000_3000_params.nii.gz"
        rpath = op.join(
            remote_dki1000_3000_path, f'sub-{subject}', 'ses-01', 'dwi',
            f'sub-{subject}_dwi_model-DKI_diffmodel.nii.gz')

        dwi1000_3000 = dwi_data[..., (gtab.bvals > 2500) | (gtab.bvals < 1500)]
        gtab1000_3000 = gradient_table(
            gtab.bvals[(gtab.bvals > 2500) | (gtab.bvals < 1500)],
            gtab.bvecs[(gtab.bvals > 2500) | (gtab.bvals < 1500)])

        if not fs.exists(rpath):
            log.info("Fitting DKI with b=1000 + 3000")
            t1 = time.time()
            dkim = dki.DiffusionKurtosisModel(gtab1000_3000)
            dkif = dkim.fit(dwi1000_3000, mask=np.ones(dwi_data.shape[:3]))
            nib.save(nib.Nifti1Image(dkif.model_params, dwi_img.affine), lpath)
            fs.upload(lpath, rpath)
            log.info(f"That took {time.time() - t1} seconds")
        else:
            log.info("Looks like I've already fit DKI with b=1000 and b=3000")
            log.info("Downloading DKI params from S3")
            fs.download(rpath, lpath)
            dkim = dki.DiffusionKurtosisModel(gtab1000_3000)
            dki_params = nib.load(lpath).get_fdata()
            dkif = dki.DiffusionKurtosisFit(dkim, dki_params)

        lpath = "dki1000_3000_fa.nii.gz"
        nib.save(nib.Nifti1Image(dkif.fa, dwi_img.affine), lpath)
        rpath = op.join(
            remote_dki1000_3000_path, f'sub-{subject}', 'ses-01', 'dwi',
            f'sub-{subject}_dwi_model-DKI_FA.nii.gz')
        fs.upload(lpath, rpath)

        lpath = "dki1000_3000_md.nii.gz"
        nib.save(nib.Nifti1Image(dkif.md, dwi_img.affine), lpath)
        rpath = op.join(
            remote_dki1000_3000_path, f'sub-{subject}', 'ses-01', 'dwi',
            f'sub-{subject}_dwi_model-DKI_MD.nii.gz')
        fs.upload(lpath, rpath)