Beispiel #1
0
def compute_dti(subject):
    # Initialize logger
    logger = logging.getLogger(subject['id'] + subject['t'])
    logger.addHandler(
        logging.FileHandler(subject['path'].joinpath(
            "2_preprocess_{}_{}.log".format(subject['id'], subject['t'])),
                            mode='w'))
    logger.setLevel(logging.INFO)
    logger.info("Preprocessing subject {} at {}, step 2...".format(
        subject['id'], subject['t']))
    logger.info("{}".format(datetime.datetime.today()))
    logger.info("FSL version {}".format(utils.get_fsl_version()))
    logger.info("")

    start = time.perf_counter()

    dtifit_output_root = subject['path'].joinpath(subject['id'] + "_" +
                                                  subject['t'] + "_dtifit")
    mask_file = subject['path'].joinpath(subject['id'] + "_" + subject['t'] +
                                         "_mask.nii.gz")
    data_file = subject['path'].joinpath(subject['id'] + "_" + subject['t'] +
                                         "_eddy.nii.gz")

    success = utils.run_and_log([
        "dtifit", "--data={}".format(
            subject['dti']), "--out={}".format(dtifit_output_root),
        "--mask={}".format(mask_file), "--bvecs={}".format(
            subject['bvec']), "--bvals={}".format(subject['bval']), "-V"
    ], logger)

    logger.info(
        "2_preprocess done. Elapsed time={}".format(time.perf_counter() -
                                                    start))

    return success
Beispiel #2
0
def clone_repo(git_url, repo_dir, git_tag=None):
    '''Clone a repo to the given location.'''
    if git_tag is None:
        clone_cmd = "git clone " + git_url + " " + repo_dir
    else:
        clone_cmd = "git clone -b " + git_tag + " --single-branch " + git_url + " " + repo_dir
    if utils.run_and_log(clone_cmd) != 0:
        raise Exception("Unable to clone repository: {}".format(git_url))
def apply_eddy(subject):
    print("Processing subject {} at {}.".format(subject['id'], subject['t']))

    # Initialize logger
    logger = logging.getLogger(subject['id']+subject['t'])
    logger.addHandler(logging.FileHandler(subject['path'].joinpath("1_preprocess_{}_{}.log".format(subject['id'], subject['t'])), mode='w'))
    logger.setLevel(logging.INFO)
    logger.info("Preprocessing subject {} at {}, step 1...".format(subject['id'], subject['t']))
    logger.info("{}".format(datetime.datetime.today()))
    logger.info("FSL version {}".format(utils.get_fsl_version()))
    logger.info("")

    start = time.perf_counter()

    index_file = subject['path'].joinpath(subject['id'] + "_" + subject['t'] + "_index.txt")

    # First generate index file to match the number of b0 values.
    with open(subject['bval'], 'r') as f:
        count = len(f.read()[:-2].split(" "))

    with open(index_file, 'w') as f:
        f.write("1 "*count)

    logger.info("Found {} diffusion directions.".format(count))

    eddy_output_root = subject['path'].joinpath(subject['id'] + "_" + subject['t'] + "_eddy")
    fieldmap_file = subject['path'].joinpath(subject['id'] + "_" + subject['t'] + "_fieldmap")
    mask_file = subject['path'].joinpath(subject['id'] + "_" + subject['t'] + "_mask.nii.gz")
    acqp_file = pathlib.Path("acqparams.txt")

    success = utils.run_and_log(["eddy_cuda9.1",
                                 "--imain=" + str(subject['dti']),
                                 "--mask=" + str(mask_file),
                                 "--acqp=" + str(acqp_file),
                                 "--index=" + str(index_file),
                                 "--bvecs=" + str(subject['bvec']),
                                 "--bvals=" + str(subject['bval']),
                                 "--out=" + str(eddy_output_root),
                                 "--field=" + str(fieldmap_file),
                                 "--repol",
                                 "--slspec=slspec.txt",
                                 # "--json=DTI_spec.json",
                                 "--niter=6",
                                 "--fwhm=10,5,1,0,0,0",
                                 "--mporder=8",
                                 "--s2v_niter=8",
                                 "--ol_type=both",
                                 "--estimate_move_by_susceptibility",
                                 "--very_verbose"], logger)

    logger.info("1_preprocess done. Elapsed time={}".format(time.perf_counter() - start))

    return success
Beispiel #4
0
def main(arg_strings=None):
    '''
    Entry function.
    '''
    parser = make_parser()
    args = parser.parse_args(arg_strings)
    variants = utils.make_variants(args.python_versions, args.build_types,
                                   args.mpi_types, args.cuda_versions)

    pr_branch = utils.get_output("git log -1 --format='%H'")
    utils.run_and_log("git remote set-head origin -a")
    default_branch = utils.get_output(
        "git symbolic-ref refs/remotes/origin/HEAD | sed 's@^refs/remotes/origin/@@'"
    )

    variant_build_results = dict()
    for variant in variants:
        utils.run_and_log("git checkout {}".format(default_branch))
        master_build_config_data, master_config = _get_configs(variant)
        master_build_numbers = _get_build_numbers(master_build_config_data,
                                                  master_config, variant)

        utils.run_and_log("git checkout {}".format(pr_branch))
        pr_build_config_data, pr_config = _get_configs(variant)
        current_pr_build_numbers = _get_build_numbers(pr_build_config_data,
                                                      pr_config, variant)

        print("Build Info for Variant:   {}".format(variant))
        print("Current PR Build Info:    {}".format(current_pr_build_numbers))
        print("Master Branch Build Info: {}".format(master_build_numbers))

        #No build numbers can go backwards without a version change.
        for package in master_build_numbers:
            if package in current_pr_build_numbers and current_pr_build_numbers[
                    package]["version"] == master_build_numbers[package][
                        "version"]:
                assert int(current_pr_build_numbers[package]["number"]) >= int(
                    master_build_numbers[package]["number"]
                ), "If the version doesn't change, the build number can't be reduced."

        #If packages are added or removed, don't require a version change
        if set(master_build_numbers.keys()) != set(
                current_pr_build_numbers.keys()):
            return

        #At least one package needs to increase the build number or change the version.
        checks = [
            current_pr_build_numbers[package]["version"] !=
            master_build_numbers[package]["version"]
            or int(current_pr_build_numbers[package]["number"]) > int(
                master_build_numbers[package]["number"])
            for package in master_build_numbers
        ]
        variant_build_results[utils.variant_string(
            variant["python"], variant["build_type"], variant["mpi_type"],
            variant["cudatoolkit"])] = any(checks)
    assert any(
        variant_build_results.values()
    ), "At least one package needs to increase the build number or change the version in at least one variant."
Beispiel #5
0
def validate_config(arg_strings=None):
    '''
    Entry function.
    '''
    args = make_parser().parse_args(arg_strings)
    variants = utils.make_variants(args.python_versions, args.build_types, args.mpi_types)
    for variant in variants:
        print('Validating {} for {}'.format(args.conda_build_config, variant))
        for env_file in args.env_config_file:
            print('Validating {} for {} : {}'.format(args.conda_build_config, env_file, variant))
            try:
                recipes = build_tree.BuildTree([env_file],
                                               variant['python'],
                                               variant['build_type'],
                                               variant['mpi_type'],
                                               repository_folder=args.repository_folder,
                                               conda_build_config=args.conda_build_config)
            except OpenCEError as err:
                print(err.msg)
                print('Error while validating {} for {} : {}'.format(args.conda_build_config, env_file, variant))
                return 1

            packages = [package for recipe in recipes for package in recipe.packages]
            channels = {channel for recipe in recipes for channel in recipe.channels}
            deps = {dep for recipe in recipes for dep in recipe.run_dependencies}
            deps.update(recipes.get_external_dependencies(variant))

            pkg_args = " ".join(["\"{}\"".format(generalize_version(dep)) for dep in deps
                                                                          if not utils.remove_version(dep) in packages])

            channel_args = " ".join({"-c \"{}\"".format(channel) for channel in channels})

            cli = "conda create --dry-run -n test_conda_dependencies {} {}".format(channel_args, pkg_args)

            retval = utils.run_and_log(cli)

            if retval != 0:
                print('Error while validating {} for {} : {}'.format(args.conda_build_config, env_file, variant))
                return 1

            print('Successfully validated {} for {} : {}'.format(args.conda_build_config, env_file, variant))

        print('Successfully validated {} for {}'.format(args.conda_build_config, variant))

    print("{} Successfully validated!".format(args.conda_build_config))
    return 0
Beispiel #6
0
def eddy_qc(subject):
    # Initialize logger
    logger = logging.getLogger(subject['id'] + subject['t'])
    logger.addHandler(
        logging.FileHandler(subject['path'].joinpath("1_qc_{}_{}.log".format(
            subject['id'], subject['t'])),
                            mode='w'))
    logger.setLevel(logging.INFO)
    logger.info("Eddy output quality control for subject {} at {}...".format(
        subject['id'], subject['t']))
    logger.info("{}".format(datetime.datetime.today()))
    logger.info("FSL version {}".format(utils.get_fsl_version()))
    logger.info("")

    start = time.perf_counter()

    eddy_qc_folder = subject['path'].joinpath(subject['id'] + "_" +
                                              subject['t'] + "_eddy.qc")
    if eddy_qc_folder.exists():
        files = eddy_qc_folder.glob("*")
        [f.unlink() for f in files]
        eddy_qc_folder.rmdir()

    success = utils.run_and_log([
        "eddy_quad", subject['path'].joinpath(subject['id'] + "_" +
                                              subject['t'] + "_eddy"), "-idx",
        subject['path'].joinpath(subject['id'] + "_" + subject['t'] +
                                 "_index.txt"), "-par", "acqparams.txt", "-m",
        subject['path'].joinpath(subject['id'] + "_" + subject['t'] +
                                 "_mask.nii.gz"), "-b", subject['bval'],
        "--field={}".format(
            subject['path'].joinpath(subject['id'] + "_" + subject['t'] +
                                     "_fieldmap.nii.gz")), "-g",
        subject['bvec']
    ], logger)

    logger.info("1_qc done. Elapsed time={}".format(time.perf_counter() -
                                                    start))

    return success
def preprocess(subject):
    # Initialize logger
    logger = logging.getLogger(subject['id']+subject['t'])
    logger.addHandler(logging.FileHandler(subject['path'].joinpath("0_preprocess_{}_{}.log".format(subject['id'], subject['t'])), mode='w'))
    logger.setLevel(logging.INFO)
    logger.info("Preprocessing subject {} at {}, step 0...".format(subject['id'], subject['t']))
    logger.info("{}".format(datetime.datetime.today()))
    logger.info("FSL version {}".format(utils.get_fsl_version()))
    logger.info("")
    start = time.perf_counter()

    # Load subject params for time t
    params = {}
    try:
        params = subjects_params.loc[(subject['id'], subject['t'])].to_dict()
        params = dict([(key, params[key]) for key in params.keys() if not math.isnan(float(params[key]))])
    except:
        pass

    # Compute fieldmap for b0 image unwrapping and eddy
    # First split the magnitude file
    mag_name = subject['mag'].name.split(".")[0]
    utils.run_and_log(["fslsplit", subject['mag'], subject['path'].joinpath(mag_name)], logger)
    mag_0_file = subject['path'].joinpath(mag_name + "0000.nii.gz")
    mag_1_file = subject['path'].joinpath(mag_name + "0001.nii.gz")

    # Ressample mag and phase files into the dti data resolution
    mag_file = subject['path'].joinpath(subject['t'] + "_mag.nii.gz")
    phase_file = subject['path'].joinpath(subject['t'] + "_phase.nii.gz")

    # mag_file = subject['path'].joinpath(mag_name + "0000.nii.gz")
    # phase_file = subject['phase']

    # utils.run_and_log(["flirt", "-in", mag_0_file, "-ref", subject['dti'], "-out", mag_file, "-interp", "nearestneighbour", "-applyxfm", "-usesqform", "-datatype", "short"], logger)
    # utils.run_and_log(["flirt", "-in", subject['phase'], "-ref", subject['dti'], "-out", phase_file, "-interp", "nearestneighbour", "-applyxfm", "-usesqform", "-datatype", "short"], logger)

    f_mag_mask = params.get('f_mag_mask', 0.6)
    g_mag_mask = params.get('g_mag_mask', -0.01)

    # # Mask from magnitude
    # logger.info("Masking mag image with f={} and g={}".format(f_mag_mask, g_mag_mask))
    # mag_mask_file = subject['path'].joinpath(subject['t'] + "_mag_mask.nii.gz")
    # utils.run_and_log(["bet", mag_file, subject['path'].joinpath(subject['t'] + "_mag"), "-m", "-f", str(f_mag_mask), "-g", str(g_mag_mask), "-n", "-v", "-R"], logger)

    # # Some erode/dilation pass to smoothen and "regularize" the mask
    # utils.run_and_log(["fslmaths", mag_mask_file, "-fillh", mag_mask_file], logger)
    # utils.run_and_log(["fslmaths", mag_mask_file, "-kernel", "gauss", "3", "-fmean", "-thr", "0.55", "-bin", mag_mask_file], logger)
    # #utils.run_and_log(["fslmaths", mag_mask_file, "-kernel", "sphere", "5", "-ero", mag_mask_file], logger)
    # # Mask magnitude
    # utils.run_and_log(["fslmaths", mag_file, "-mas", mag_mask_file, mag_file], logger)
    # # utils.run_and_log(["fslmaths", mag_file, "-mas", mask_file, mag_file], logger)

    # # Compute fieldmap
    # logger.info("Computing fieldmap...")
    fieldmap_file = subject['path'].joinpath(subject['id'] + "_" + subject['t'] + "_fieldmap.nii.gz")
    # utils.run_and_log(["fsl_prepare_fieldmap", "SIEMENS", phase_file, mag_file, fieldmap_file, "2.46"], logger)

    # Smooth fieldmap
    # utils.run_and_log(["fugue", "--loadfmap={}".format(fieldmap_file),
    #                    "--smooth3=4",
    #                    "--savefmap={}".format(fieldmap_file)], logger)

    utils.run_and_log(["flirt", "-in", fieldmap_file, "-ref", subject['dti'], "-out", fieldmap_file, "-interp", "trilinear", "-applyxfm", "-usesqform"], logger)

    # Convert Hz into rad.s^-1 (for fugue)
    utils.run_and_log(["fslmaths", fieldmap_file, "-mul", "6.2830", fieldmap_file], logger)

    # Generate mask on the 4 unwrapped b0 images
    b0_file = subject['path'].joinpath(subject['t'] + "_b0.nii.gz")
    b0_mean_file = subject['path'].joinpath(subject['id'] + "_" + subject['t'] + "_b0_mean.nii.gz")
    b0_mean_unwarped_file = subject['path'].joinpath(subject['id'] + "_" + subject ['t'] + "_b0_unwarped.nii.gz")
    mask_file = subject['path'].joinpath(subject['id'] + "_" + subject['t'] + "_mask.nii.gz")

    f_mask = params.get('f_mask', 0.4)
    g_mask = params.get('g_mask', -0.1)

    logger.info("Masking mean unwarped b0 image with f={} and g={}".format(f_mask, g_mask))
    utils.run_and_log(["fslroi", subject['dti'], b0_file, "0", "4"], logger)
    utils.run_and_log(["fslmaths", b0_file, "-Tmean", b0_mean_file], logger)
    utils.run_and_log(["fugue", "-i", b0_mean_file, "--dwell=0.000289996", "--loadfmap={}".format(fieldmap_file), "--unwarpdir=y", "-u", b0_mean_unwarped_file], logger)
    utils.run_and_log(["bet", b0_mean_unwarped_file, subject['path'].joinpath(subject['id'] + "_" + subject['t']), "-m", "-f", str(f_mask), "-g", str(g_mask), "-S"], logger)
    utils.run_and_log(["fslmaths", mask_file, "-kernel", "gauss", "3", "-fmean", "-thr", "0.55", "-bin", mask_file], logger)
    utils.run_and_log(["fslmaths", mask_file, "-fillh", mask_file], logger)


    #b0_file.unlink()
    #b0_mean_file.unlink()


    # Convert fieldmap back into Hz
    utils.run_and_log(["fslmaths", fieldmap_file, "-div", "6.2830", fieldmap_file], logger)

    # mag_0_file.unlink()
    # mag_1_file.unlink()

    logger.info("")
    logger.info("0_preprocess done. Elapsed time={}".format(time.perf_counter() - start))
def main(arg_strings=None):
    '''
    Entry function.
    '''
    parser = make_parser()
    args = parser.parse_args(arg_strings)
    variants = utils.make_variants(args.python_versions, args.build_types,
                                   args.mpi_types)

    build_config_data, _ = build_feedstock.load_package_config()

    pr_branch = utils.get_output("git log -1 --format='%H'")
    utils.run_and_log("git remote set-head origin -a")
    default_branch = utils.get_output(
        "git symbolic-ref refs/remotes/origin/HEAD | sed 's@^refs/remotes/origin/@@'"
    )

    config = get_or_merge_config(None)
    config.variant_config_files = [utils.DEFAULT_CONDA_BUILD_CONFIG]
    recipe_conda_build_config = os.path.join(os.getcwd(), "config",
                                             "conda_build_config.yaml")
    if os.path.exists(recipe_conda_build_config):
        config.variant_config_files += [recipe_conda_build_config]
    config.verbose = False

    utils.run_and_log("git checkout {}".format(default_branch))
    master_build_numbers = dict()
    for recipe in build_config_data["recipes"]:
        metas = conda_build.api.render(recipe['path'],
                                       config=config,
                                       variants=variants[0],
                                       bypass_env_check=True,
                                       finalize=False)
        for meta, _, _ in metas:
            master_build_numbers[meta.meta['package']['name']] = {
                "version": meta.meta['package']['version'],
                "number": meta.meta['build']['number']
            }

    utils.run_and_log("git checkout {}".format(pr_branch))
    current_pr_build_numbers = dict()
    for recipe in build_config_data["recipes"]:
        metas = conda_build.api.render(recipe['path'],
                                       config=config,
                                       variants=variants[0],
                                       bypass_env_check=True,
                                       finalize=False)
        for meta, _, _ in metas:
            current_pr_build_numbers[meta.meta['package']['name']] = {
                "version": meta.meta['package']['version'],
                "number": meta.meta['build']['number']
            }

    print("Current PR Build Info:    ", current_pr_build_numbers)
    print("Master Branch Build Info: ", master_build_numbers)

    #No build numbers can go backwards without a version change.
    for package in master_build_numbers:
        if package in current_pr_build_numbers and current_pr_build_numbers[
                package]["version"] == master_build_numbers[package]["version"]:
            assert int(current_pr_build_numbers[package]["number"]) >= int(
                master_build_numbers[package]["number"]
            ), "If the version doesn't change, the build number can't be reduced."

    #If packages are added or removed, don't require a version change
    if set(master_build_numbers.keys()) != set(
            current_pr_build_numbers.keys()):
        return

    #At least one package needs to increase the build number or change the version.
    checks = [
        current_pr_build_numbers[package]["version"] !=
        master_build_numbers[package]["version"]
        or int(current_pr_build_numbers[package]["number"]) > int(
            master_build_numbers[package]["number"])
        for package in master_build_numbers
    ]
    assert any(
        checks
    ), "At least one package needs to increase the build number or change the version."