Exemplo n.º 1
0
def skullstrip(sequences, skullstrip_base_key):
    """Perform skullstripping and mask sequences accordingly."""
    log.info('Skullstripping...')
    if skullstrip_base_key not in sequences:
        raise ValueError('The configured skullstripping base sequence {} is'
                         ' not availabe in the current case: {}'
                         .format(skullstrip_base_key, sequences.keys()))
    try:
        _skullstrip = mem.PipeFunc(nipype.interfaces.fsl.BET,
                                   config.get().cache_dir)
    except IOError as e:
        log.error('IOError: {}\n\tTry sourcing /etc/fsl/5.0/fsl.h in your'
                  ' .bashrc file (see "man fsl").'.format(e.message))
        sys.exit(1)
    _apply_mask = mem.PipeFunc(albo.interfaces.utility.ApplyMask,
                               config.get().cache_dir)

    skullstripped = dict()
    result = _skullstrip(in_file=sequences[skullstrip_base_key], mask=True,
                         robust=True,  output_type='NIFTI_GZ')
    mask = result.outputs.mask_file

    for key in sequences:
        result = _apply_mask(in_file=sequences[key], mask_file=mask)
        skullstripped[key] = result.outputs.out_file
    return skullstripped, mask
Exemplo n.º 2
0
def standardize_intensityrange(sequences, mask, intensity_models):
    """Standardize intensityrange for given sequences."""
    log.info('Intensityrange standardization...')
    for key in sequences:
        if key not in intensity_models:
            raise KeyError(
                'No intensity model for sequence {} present!'.format(key))
    _irs = mem.PipeFunc(
        albo.interfaces.medpy.MedpyIntensityRangeStandardization,
        config.get().cache_dir)
    _condense_outliers = mem.PipeFunc(
        albo.interfaces.utility.CondenseOutliers,
        config.get().cache_dir)
    result = dict()
    for key in sequences:
        previouscwd = os.getcwd()
        try:
            result_irs = _irs(in_file=sequences[key], out_dir='.',
                              mask_file=mask, lmodel=intensity_models[key])
        except RuntimeError as re:
            if "InformationLossException" in re.message:
                try:
                    result_irs = _irs(in_file=sequences[key], out_dir='.',
                                      ignore=True,
                                      mask_file=mask,
                                      lmodel=intensity_models[key])
                    log.warn("Loss of information may have occured when "
                             "transforming image {} to learned standard "
                             "intensity space. Re-train model to avoid this."
                             .format(sequences[key]))
                except RuntimeError as re2:
                    if "unrecognized arguments: --ignore" in re2.message:
                        log.error(
                            "Image {} can not be transformed to the learned "
                            "standard intensity space without loss of "
                            "information. Please re-train intensity models."
                            .format(sequences[key]))
                        sys.exit(1)
                    else:
                        raise re2
            elif "SingleIntensityAccumulationError" in re.message:
                log.error("An error occured while transforming the image {}"
                          " to learned standard intensity space: {}"
                          .format(sequences[key], re.message))
                sys.exit(1)
            else:
                raise re
        finally:
            os.chdir(previouscwd)
        result_co = _condense_outliers(
            in_file=result_irs.outputs.out_file)
        result[key] = result_co.outputs.out_file
    return result
Exemplo n.º 3
0
def main(args):
    """Update atlas overlaps."""
    output_dir = config.get().output_dir
    if not os.path.isdir(output_dir):
        log.error('Output directory {} does not exist!'.format(output_dir))
        sys.exit(1)
    log.info('Output directory: {}'.format(output_dir))
    case_dirs = [os.path.join(output_dir, d)
                 for d in os.listdir(output_dir)
                 if os.path.isdir(os.path.join(output_dir, d))]
    for d in case_dirs:
        log.info('Updating case {}'.format(d))
        config.get().case_output_dir = d
        segmentation_path = os.path.join(d, 'standard_segmentation.nii')
        if os.path.isfile(segmentation_path):
            atl.calculate_atlas_overlaps(segmentation_path)
    log.info('Done.')
Exemplo n.º 4
0
def correct_biasfield(sequences, mask, metadata_corrections=[]):
    """Correct biasfied in given sequences."""
    # -- Biasfield correction
    log.info('Biasfield correction...')
    _bfc = mem.PipeFunc(albo.interfaces.cmtk.MRBias,
                        config.get().cache_dir)
    _mod_metadata = mem.PipeFunc(
        albo.interfaces.utility.NiftiModifyMetadata,
        config.get().cache_dir)

    bfced = dict()
    for key in sequences:
        result_bfc = _bfc(in_file=sequences[key], mask_file=mask)
        result_mmd = _mod_metadata(in_file=result_bfc.outputs.out_file,
                                   tasks=metadata_corrections)
        bfced[key] = result_mmd.outputs.out_file
    return bfced
Exemplo n.º 5
0
def resample(sequences, pixel_spacing, fixed_image_key):
    """Resample and coregister the given set of sequences."""
    log.info('Resampling...')
    if fixed_image_key not in sequences:
        raise ValueError('The configured registration base sequence {} is not'
                         ' availabe in the current case: {}'
                         .format(fixed_image_key, sequences.keys()))
    # check pixelspacing format
    try:
        spacing = map(float, pixel_spacing)
        if len(spacing) != 3:
            raise ValueError
    except ValueError:
        raise ValueError('The configured pixel spacing {} is invalid; must'
                         'be exactly 3 comma-separated numbers with a dot'
                         'as decimal mark!'.format(spacing))
    spacing_string = ','.join(map(str, spacing))

    resampled = dict()
    transforms = dict()

    _resample = mem.PipeFunc(albo.interfaces.medpy.MedpyResample,
                             config.get().cache_dir)
    _register = mem.PipeFunc(nipype.interfaces.fsl.FLIRT,
                             config.get().cache_dir)

    result = _resample(in_file=sequences[fixed_image_key],
                       spacing=spacing_string)
    fixed_image = result.outputs.out_file
    resampled[fixed_image_key] = fixed_image

    for key in (sequences.viewkeys() - {fixed_image_key}):
        try:
            result = _register(in_file=sequences[key],
                               reference=fixed_image,
                               cost='mutualinfo',
                               cost_func='mutualinfo',
                               terminal_output='none')
        except IOError as e:
            log.error('IOError: {}\n\tTry sourcing /etc/fsl/5.0/fsl.h in your'
                      ' .bashrc file (see "man fsl").'.format(e.message))
            sys.exit(1)
        resampled[key] = result.outputs.out_file
        transforms[key] = result.outputs.out_matrix_file
    return resampled, transforms
Exemplo n.º 6
0
def main(args):
    """Update atlas overlaps."""
    output_dir = config.get().output_dir
    if not os.path.isdir(output_dir):
        log.error('Output directory {} does not exist!'.format(output_dir))
        sys.exit(1)
    log.info('Output directory: {}'.format(output_dir))
    case_dirs = [
        os.path.join(output_dir, d) for d in os.listdir(output_dir)
        if os.path.isdir(os.path.join(output_dir, d))
    ]
    for d in case_dirs:
        log.info('Updating case {}'.format(d))
        config.get().case_output_dir = d
        segmentation_path = os.path.join(d, 'standard_segmentation.nii')
        if os.path.isfile(segmentation_path):
            atl.calculate_atlas_overlaps(segmentation_path)
    log.info('Done.')
Exemplo n.º 7
0
def _get_region_name_map(atlas_name):
    mapping = collections.defaultdict(str)
    atlas_dir = config.get().atlas_dir
    csv_path = os.path.join(atlas_dir, atlas_name + '.csv')
    if not os.path.isfile(csv_path):
        return mapping
    with open(csv_path, 'r') as f:
        for row in csv.reader(f):
            try:
                mapping[int(row[0])] = row[1]
            except ValueError:
                pass
            except IndexError:
                pass
    return mapping
Exemplo n.º 8
0
def main(args):
    """List classifiers."""
    classifier_dir = config.get().classifier_dir
    print "Directory: {}".format(classifier_dir)
    classifiers = clf.load_classifiers_from(classifier_dir)
    if len(classifiers) == 0:
        print "No classifiers found."
        sys.exit(0)
    names = [c.name for c in classifiers]
    seqs = [", ".join(c.sequences) for c in classifiers]
    issues = [', '.join(clf.check_consistency(c)) for c in classifiers]
    longest_name = max(names, key=len)
    for name, seq, issue in \
            [("Classifier:", "Sequences:", "")] + zip(names, seqs, issues):
        print "{} {}".format(name.ljust(len(longest_name)), seq)
        if len(issue) > 0:
            print "\tDetected issues: " + issue
Exemplo n.º 9
0
def segment(sequences, mask, features, classifier_file):
    """Segment the lesions in the given images."""
    log.info('Extracting features...')
    tasks = [dict(in_file=sequences[key], mask_file=mask,
                  function=function, kwargs=kwargs, pass_voxelspacing=vs)
             for key, function, kwargs, vs in features]
    pool = mp.Pool()
    features = pool.map(_extract_feature, tasks)

    log.info('Applying classifier...')
    _apply_rdf = mem.PipeFunc(
        albo.interfaces.classification.RDFClassifier,
        config.get().cache_dir)

    result = _apply_rdf(classifier_file=classifier_file,
                        feature_files=features, mask_file=mask)
    return result.outputs.segmentation_file, result.outputs.probability_file
Exemplo n.º 10
0
def output(filepath, save_as=None, prefix='', postfix=''):
    """Copy given file to output folder.

    If save_as is given, the file is saved with that name, otherwise the
    original filename is kept. Prefix and postfix are added in any case, where
    the postfix will be added between filename and file extension.
    """
    filename = save_as if save_as is not None else os.path.basename(filepath)

    components = filename.split('.')
    components[0] += postfix
    filename = prefix + '.'.join(components)

    case_dir = config.get().case_output_dir
    if not os.path.isdir(case_dir):
        os.makedirs(case_dir)
    out_path = os.path.join(case_dir, filename)
    if os.path.isfile(out_path):
        os.remove(out_path)
    shutil.copy2(filepath, out_path)
Exemplo n.º 11
0
def calculate_atlas_overlaps(mask):
    """Given an image mask, calculate overlap with all available atlases."""
    atlas_files = _get_atlas_files()
    mask, mask_header = mio.load(mask)

    mask_spacing = mio.get_pixel_spacing(mask_header)
    pixel_volume = mask_spacing[0] * mask_spacing[1] * mask_spacing[2]

    for atlas_file in atlas_files:
        atlas, atlas_header = mio.load(atlas_file)
        # if dimensions of mask (standardbrain) and atlas do not match, skip
        # if atlas.shape != mask.shape:
        atlas_spacing = mio.get_pixel_spacing(atlas_header)
        if mask_spacing != atlas_spacing:
            log.warning('Atlas {} will be skipped due to mismatching pixel'
                        ' spacing (atlas: {}, segmentation: {})'
                        .format(os.path.basename(atlas_file), atlas_spacing,
                                mask_spacing))
            continue
        overlap = atlas[mask.astype(numpy.bool)]

        region_sizes = numpy.bincount(atlas.ravel())
        overlap_region_sizes = numpy.bincount(overlap.ravel())

        atlas_name = os.path.basename(atlas_file).split('.')[0]
        region_names = _get_region_name_map(atlas_name)
        out_csv_path = os.path.join(config.get().case_output_dir,
                                    atlas_name + '.csv')
        w = csv.writer(open(out_csv_path, 'w'))
        w.writerow(
            ['value', 'id', 'voxel overlap', 'mL overlap', 'percent overlap'])
        for index, number in enumerate(overlap_region_sizes):
            if number != 0:
                w.writerow([index,
                            region_names[index],
                            number,
                            (number * pixel_volume) / 1000,
                            float(number) / region_sizes[index]])
Exemplo n.º 12
0
def _get_atlas_files():
    atlas_dir = config.get().atlas_dir
    return [os.path.join(atlas_dir, name)
            for name in os.listdir(atlas_dir)
            if os.path.isfile(os.path.join(atlas_dir, name))
            if '.nii' in name]
Exemplo n.º 13
0
def register_to_standardbrain(
        segmentation_mask, standardbrain, auxilliary_image,
        standardbrain_mask=None, auxilliary_transform=None,
        auxilliary_original_spacing=None):
    """Register the given segmentation to a standard brain."""
    log.info('Standardbrain registration...')

    # 1. transform lesion mask to original t1/t2 space
    if auxilliary_transform is not None:
        _invert_transformation = mem.PipeFunc(
            albo.interfaces.utility.InvertTransformation,
            config.get().cache_dir)
        _apply_tranformation = mem.PipeFunc(
            nipype.interfaces.fsl.ApplyXfm,
            config.get().cache_dir)
        invert_result = _invert_transformation(in_file=auxilliary_transform)
        transformation_result = _apply_tranformation(
            in_file=segmentation_mask,
            in_matrix_file=invert_result.outputs.out_file,
            reference=auxilliary_image,
            interp="nearestneighbour"
        )
        segmentation_mask = transformation_result.outputs.out_file
    elif auxilliary_original_spacing is not None:
        _resample = mem.PipeFunc(albo.interfaces.medpy.MedpyResample,
                                 config.get().cache_dir)
        resample_result = _resample(
            in_file=segmentation_mask, spacing=auxilliary_original_spacing)
        segmentation_mask = resample_result.outputs.out_file

    _register_affine = mem.PipeFunc(albo.interfaces.niftyreg.Aladin,
                                    config.get().cache_dir)
    _register_freeform = mem.PipeFunc(albo.interfaces.niftyreg.F3D,
                                      config.get().cache_dir)
    _resample = mem.PipeFunc(albo.interfaces.niftyreg.Resample,
                             config.get().cache_dir)

    # 2. register t1/t2/flair to standardbrain
    if standardbrain_mask is not None:
        affine_result = _register_affine(
            flo_image=auxilliary_image, ref_image=standardbrain,
            rmask_file=standardbrain_mask
        )
        freeform_result = _register_freeform(
            flo_image=auxilliary_image, ref_image=standardbrain,
            in_affine=affine_result.outputs.affine,
            rmask_file=standardbrain_mask
        )
    else:
        affine_result = _register_affine(
            flo_image=auxilliary_image, ref_image=standardbrain
        )
        freeform_result = _register_freeform(
            flo_image=auxilliary_image, ref_image=standardbrain,
            in_affine=affine_result.outputs.affine
        )

    # 3. warp lesion mask to standardbrain
    resample_result = _resample(
        flo_image=segmentation_mask,
        ref_image=standardbrain, in_cpp=freeform_result.outputs.cpp_file,
        interpolation_order='0'
    )
    return resample_result.outputs.result_file
Exemplo n.º 14
0
def _extract_feature(kwargs):
    f = mem.PipeFunc(
        albo.interfaces.classification.ExtractFeature,
        config.get().cache_dir)
    result = f(**kwargs)
    return result.outputs.out_file