예제 #1
0
파일: run_lama.py 프로젝트: antshp-vc/LAMA
def invert_volumes(config: LamaConfig):
    """
    Invert volumes, such as masks and labelmaps from population average space to input volumes space using
    pre-calculated elastix inverse transform parameter files

    Returns
    -------
    Status of inverions for masks and labels

    """

    invert_config = config['inverted_transforms'] / INVERT_CONFIG

    if config['stats_mask']:
        mask_inversion_dir = config.mkdir('inverted_stats_masks')
        InvertLabelMap(invert_config,
                       config['stats_mask'],
                       mask_inversion_dir,
                       threads=config['threads']).run()

    if config['label_map']:
        labels_inverion_dir = config.mkdir('inverted_labels')
        InvertLabelMap(invert_config,
                       config['label_map'],
                       labels_inverion_dir,
                       threads=config['threads']).run()
예제 #2
0
파일: normalise.py 프로젝트: antshp-vc/LAMA
    def memorymap_data(self, lama_root_dir: Path) -> Dict[str, np.memmap]:
        """
        Iterate over output folder getting each ...........
        Parameters
        ----------
        lama_root_dir

        Returns
        -------

        """

        imgs = OrderedDict()

        for line_dir, spec_dir in specimen_iterator(lama_root_dir):
            config_file = common.getfile_endswith(
                '.toml')  # Get the Lama config from the specimen directory
            config = LamaConfig(config_file)
            reg_dir = config['root_reg_dir']
            basename = os.path.basename(imgpath)
            loader = common.LoadImage(imgpath)

            if not loader:
                logging.error("Problem normalising image: {}".format(
                    loader.error_msg))
                sys.exit()
            arr = loader.array
            t = tempfile.TemporaryFile()
            m = np.memmap(t, dtype=arr.dtype, mode='w+', shape=arr.shape)
            m[:] = arr
            imgs[basename] = m
        return imgs
예제 #3
0
파일: run_lama.py 프로젝트: antshp-vc/LAMA
def create_glcms(config: LamaConfig, final_reg_dir):
    """
    Create grey level co-occurence matrices. This is done in the main registration pipeline as we don't
    want to have to create GLCMs for the wildtypes multiple times when doing phenotype detection
    """
    if not config['glcm']:
        return
    logging.info("Creating GLCMS")
    glcm_dir = config.mkdir('glcm_dir')

    mask_path = config['fixed_mask']
    if mask_path:
        mask = common.img_path_to_array(mask_path)
    else:
        logging.warn("Cannot make GLCMs without a mask")
        return

    glcm3d.pyradiomics_glcm(final_reg_dir, glcm_dir, mask)
    logging.info("Finished creating GLCMs")
예제 #4
0
def reverse_registration(config: Union[str, LamaConfig]):
    common.test_installation('elastix')

    if isinstance(config, (Path, str)):
        config = LamaConfig(Path(config))

    # threads = str(config['threads'])

    inv_outdir = config.mkdir('inverted_transforms')

    # Set the fixed volume to be the rigidly-aligned volume from the forward registration
    paths = LamaSpecimenData(config.config_dir).setup()
    fixed_vols_dir = paths.reg_dirs[0]
    # Get the fixed vols
    fixed_vol_paths = common.get_images_ignore_elx_itermediates(fixed_vols_dir)

    # Get the fixed and moving images. They are flipped compared to the forward registration
    moving_vol = config['fixed_volume']

    # Do the forward registration for each image (usually just one image if using the jobrunner script)
    for fixed_vol in fixed_vol_paths:
        run_registration_schedule(config, fixed_vol, moving_vol, inv_outdir)
예제 #5
0
def make_deformations_at_different_scales(config: Union[LamaConfig, dict]) -> Union[None, np.array]:
    """
    Generate jacobian determinants and optionaly defromation vectors

    Parameters
    ----------
    config:
        LamaConfig object if running from other lama module
        Path to config file if running this module independently

    Notes
    -----
    How to generate the hacobian determinants is defined by the config['generate_deformation_fields'] entry.

    toml representation from the LAMA config:

    [generate_deformation_fields]
    192_to_10 = [ "deformable_192_to_10",]

    This will create a set of jacobian determinants and optional deformation fields called 192_to_10 and using the
    the named regisrtation stages in the list.

    Multiple sets of key/value pairs are allowed so that diffrent jacobians determinatnts might be made. For eaxmple
    you may want to include the affine transformation in the jacobians, which would look like so:

    affine_192_to_10 = [ "affine", "deformable_192_to_10"]

    Returns
    -------
    jacobian array if there are any negative values
    """

    if isinstance(config, (str, Path)):
        config = LamaConfig(Path(config))

    if not config['generate_deformation_fields']:
        return

    deformation_dir = config.mkdir('deformations')
    jacobians_dir = config.mkdir('jacobians')
    log_jacobians_dir = config.mkdir('log_jacobians')

    write_vectors = config['write_deformation_vectors']
    write_raw_jacobians = config ['write_raw_jacobians']
    write_log_jacobians = config['write_log_jacobians']

    for deformation_id, stage_info in config['generate_deformation_fields'].items():
        reg_stage_dirs: List[Path] = []

        resolutions: List[int] = []  # Specify the resolutions from a stage to generate defs and jacs from

        if len(stage_info) > 1:

            try:
                int(stage_info[1])
            except ValueError:
                # 1. We are spcifiying multiple lama-specified stages: Use the TransformParameters.O.txt from each stage
                for stage_id in stage_info:
                    reg_stage_dirs.append(config['root_reg_dir'] / stage_id)

            else:
                # 2. It is one stage but we are using the elastix defined stages, which will be specified by ints
                #   Use TransformParameters.O.R<x>.txt files where x is the numbers specified
                reg_stage_dirs.append(config['root_reg_dir'] / stage_info[0])
                resolutions = stage_info[1:]
        else:
            # Just one stage is defined so use the TransformParameters.O.txt from that stage
            reg_stage_dirs.append(config['root_reg_dir'] / stage_info[0])

        deformation_id = str(deformation_id)

        deformation_scale_dir = deformation_dir / deformation_id
        deformation_scale_dir.mkdir()
        jacobians_scale_dir =  jacobians_dir / deformation_id
        jacobians_scale_dir.mkdir()
        log_jacobians_scale_dir = log_jacobians_dir / deformation_id
        log_jacobians_scale_dir.mkdir()

        neg_jac_arr = _generate_deformation_fields(reg_stage_dirs, resolutions, deformation_scale_dir, jacobians_scale_dir,
                                                   log_jacobians_scale_dir, write_vectors, write_raw_jacobians, write_log_jacobians,
                                                   threads=config['threads'], filetype=config['filetype'])
        return neg_jac_arr
예제 #6
0
def make_deformations_at_different_scales(config: Union[LamaConfig, dict]):
    """
    Generate jacobian determinants ans optionaly defromation vectors

    Parameters
    ----------
    config:
        LamaConfig object if running from other lama module
        Path to config file if running this module independently
    """
    if isinstance(config, Path):
        config = LamaConfig(config)

    if not config['generate_deformation_fields']:
        return

    deformation_dir = config.mkdir('deformations')
    jacobians_dir = config.mkdir('jacobians')
    log_jacobians_dir = config.mkdir('log_jacobians')

    make_vectors = not config['skip_deformation_fields']

    for deformation_id, stage_info in config[
            'generate_deformation_fields'].items():
        reg_stage_dirs: List[Path] = []

        resolutions: List[int] = [
        ]  # Specify the resolutions from a stage to generate defs and jacs from

        if len(stage_info) > 1:

            try:
                int(stage_info[1])
            except ValueError:
                # 1. We are spcifiying multiple lama-specified stages: Use the TransformParameters.O.txt from each stage
                for stage_id in stage_info:
                    reg_stage_dirs.append(config['root_reg_dir'] / stage_id)

            else:
                # 2. It is one stage but we are using the elastix defined stages, which will be specified by ints
                #   Use TransformParameters.O.R<x>.txt files where x is the numbers specified
                reg_stage_dirs.append(config['root_reg_dir'] / stage_info[0])
                resolutions = stage_info[1:]
        else:
            # Just one stage is defined so use the TransformParameters.O.txt from that stage
            reg_stage_dirs.append(config['root_reg_dir'] / stage_info[0])

        deformation_id = str(deformation_id)

        deformation_scale_dir = deformation_dir / deformation_id
        deformation_scale_dir.mkdir()
        jacobians_scale_dir = jacobians_dir / deformation_id
        jacobians_scale_dir.mkdir()
        log_jacobians_scale_dir = log_jacobians_dir / deformation_id
        log_jacobians_scale_dir.mkdir()

        generate_deformation_fields(reg_stage_dirs,
                                    resolutions,
                                    deformation_scale_dir,
                                    jacobians_scale_dir,
                                    log_jacobians_scale_dir,
                                    make_vectors,
                                    threads=config['threads'],
                                    filetype=config['filetype'])
예제 #7
0
def job_runner(config_path: Path) -> Path:
    """
    Run the registrations specified in the config file

    Returns
    -------
    The path to the final registrered images
    """

    # Load the lama config
    config = LamaConfig(config_path)

    mean_transforms = config_path.parent / 'mean_transforms'
    mean_transforms.mkdir(exist_ok=True)

    avg_dir = config_path.parent / 'averages'
    avg_dir.mkdir(exist_ok=True)

    # Folder to create logic control files
    status_dir = config_path.parent / 'status'
    status_dir.mkdir(exist_ok=True)

    # Get list of specimens
    inputs_dir = config.options['inputs']
    pairs = get_pairs(inputs_dir)

    previous_mean_dir = inputs_dir
    first = True

    for i, reg_stage in enumerate(config['registration_stage_params']):

        stage_id = reg_stage['stage_id']
        avg_out = avg_dir / f'{stage_id}.nrrd'
        logging.info(stage_id)
        reg_stage_dir = Path(config.stage_dirs[stage_id])

        # Make stage dir if not made by another instance of the script
        reg_stage_dir.mkdir(exist_ok=True, parents=True)

        elastix_stage_parameters = generate_elx_parameters(config, do_pairwise=True)[stage_id]
        # Make the elastix parameter file for this stage
        elxparam_path = reg_stage_dir / f'{ELX_PARAM_PREFIX}{stage_id}.txt'

        if not elxparam_path.is_file():
            with open(elxparam_path, 'w') as fh:
                fh.write(elastix_stage_parameters)

        stage_mean_dir = mean_transforms / stage_id
        stage_mean_dir.mkdir(exist_ok=True, parents=True)

        stage_status_dir = status_dir / stage_id
        stage_status_started = stage_status_dir / 'started'
        stage_status_failed = stage_status_dir / 'failed'
        stage_status_finished = stage_status_dir / 'finished'

        stage_status_started.mkdir(exist_ok=True, parents=True)
        stage_status_failed.mkdir(exist_ok=True, parents=True)
        stage_status_finished.mkdir(exist_ok=True, parents=True)

        stage_tform_started = stage_status_dir / 'mean_started'
        stage_tform_finished = stage_status_dir / 'mean_finished'
        stage_tform_started.mkdir(exist_ok=True)
        stage_tform_finished.mkdir(exist_ok=True)

        do_stage_reg(pairs, stage_status_dir, reg_stage_dir, previous_mean_dir,
                     elxparam_path, config, first)

        do_mean_transforms(pairs, stage_status_dir, reg_stage_dir, stage_mean_dir, previous_mean_dir, avg_out)

        first = False
        previous_mean_dir = stage_mean_dir
예제 #8
0
def batch_invert_transform_parameters(config: Union[Path, LamaConfig],
                                      clobber=True, new_log:bool=False):
    """
    Create new elastix TransformParameter files that can then be used by transformix to invert labelmaps, stats etc

    Parameters
    ----------
    config
        path to original reg pipeline config file

    clobber
        if True overwrite inverted parameters present

    new_log:
        Whether to create a new log file. If called from another module, logging may happen there
    """
    common.test_installation('elastix')

    if isinstance(config, (Path, str)):
        config = LamaConfig(config)

    threads = str(config['threads'])

    if new_log:
        common.init_logging(config / 'invert_transforms.log')

    reg_dirs = get_reg_dirs(config)

    # Get the image basenames from the first stage registration folder (usually rigid)
    # ignore images in non-relevent folder that may be present
    volume_names = [x.stem for x in common.get_file_paths(reg_dirs[0], ignore_folders=[RESOLUTION_IMGS_DIR, IMG_PYRAMID_DIR])]

    inv_outdir = config.mkdir('inverted_transforms')

    stages_to_invert = defaultdict(list)

    jobs: List[Dict] = []

    reg_stage_dir: Path

    for i, vol_id in enumerate(volume_names):

        for reg_stage_dir in reg_dirs:

            if not reg_stage_dir.is_dir():
                logging.error('cannot find {}'.format(reg_stage_dir))
                raise FileNotFoundError(f'Cannot find registration dir {reg_stage_dir}')

            inv_stage_dir = inv_outdir / reg_stage_dir.name

            specimen_stage_reg_dir = reg_stage_dir / vol_id
            specimen_stage_inversion_dir = inv_stage_dir / vol_id

            transform_file = common.getfile_startswith(specimen_stage_reg_dir, ELX_TRANSFORM_NAME)
            parameter_file = common.getfile_startswith(reg_stage_dir, ELX_PARAM_PREFIX)

            # Create the folder to put the specimen inversion parameter files in.
            inv_stage_dir.mkdir(exist_ok=True)

            # Add the stage to the inversion order config (in reverse order), if not already.
            if reg_stage_dir.name not in stages_to_invert['label_propagation_order']:
                stages_to_invert['label_propagation_order'].insert(0, reg_stage_dir.name)

            if clobber:
                common.mkdir_force(specimen_stage_inversion_dir)  # Overwrite any inversion file that exist for a single specimen

            # Each registration directory contains a metadata file, which contains the relative path to the fixed volume
            reg_metadata = cfg_load(specimen_stage_reg_dir / common.INDV_REG_METADATA)
            fixed_volume = (specimen_stage_reg_dir / reg_metadata['fixed_vol']).resolve()

            # Invert the Transform parameters with options for normal image inversion

            job = {
                'specimen_stage_inversion_dir': specimen_stage_inversion_dir,
                'parameter_file': abspath(parameter_file),
                'transform_file': transform_file,
                'fixed_volume': fixed_volume,
                'param_file_output_name': 'inversion_parameters.txt',
                'image_replacements': IMAGE_REPLACEMENTS,
                'label_replacements': LABEL_REPLACEMENTS,
                'image_transform_file': PROPAGATE_IMAGE_TRANSFORM,
                'label_transform_file': PROPAGATE_LABEL_TRANFORM,
                'clobber': clobber,
                'threads': threads
            }

            jobs.append(job)

    # By putting each inverison job (a single job per registration stage) we can speed things up a bit
    # If we can get multithreded inversion in elastix we can remove this python multithreading
    pool = Pool(8)
    try:
        pool.map(_invert_transform_parameters, jobs)

    except KeyboardInterrupt:
        print('terminating inversion')
        pool.terminate()
        pool.join()

    # TODO: Should we replace the need for this invert.yaml?
    reg_dir = Path(os.path.relpath(reg_stage_dir, inv_outdir))
    stages_to_invert['registration_directory'] = str(reg_dir)  # Doc why we need this
    # Create a yaml config file so that inversions can be run seperatley
    invert_config = config['inverted_transforms'] / PROPAGATE_CONFIG

    with open(invert_config, 'w') as yf:
        yf.write(yaml.dump(dict(stages_to_invert), default_flow_style=False))
예제 #9
0
def batch_invert_transform_parameters(config: Union[str, LamaConfig],
                                      clobber=True,
                                      new_log: bool = False):
    """
    Create new elastix TransformParameter files that can then be used by transformix to invert labelmaps, stats etc

    Parameters
    ----------
    config
        path to original reg pipeline config file

    clobber
        if True overwrite inverted parameters present

    new_log:
        Whether to create a new log file. If called from another module, logging may happen there
    """
    common.test_installation('elastix')

    if isinstance(config, Path):
        config = LamaConfig(config)

    threads = str(config['threads'])

    if new_log:
        common.init_logging(config / 'invert_transforms.log')

    reg_dirs = get_reg_dirs(config)

    # Get the image basenames from the first stage registration folder (usually rigid)
    # ignore images in non-relevent folder that may be present
    volume_names = [
        x.stem for x in common.get_file_paths(reg_dirs[0],
                                              ignore_folder=IGNORE_FOLDER)
    ]

    inv_outdir = config.mkdir('inverted_transforms')

    stages_to_invert = defaultdict(list)

    jobs: List[Dict] = []

    reg_stage_dir: Path

    for i, vol_id in enumerate(volume_names):

        label_replacements = {
            'FinalBSplineInterpolationOrder': '0',
            'FixedInternalImagePixelType': 'short',
            'MovingInternalImagePixelType': 'short',
            'ResultImagePixelType': 'unsigned char',
            'WriteTransformParametersEachResolution': 'false',
            'WriteResultImageAfterEachResolution': 'false'
        }

        image_replacements = {
            'FinalBSplineInterpolationOrder': '3',
            'FixedInternalImagePixelType': 'float',
            'MovingInternalImagePixelType': 'float',
            'ResultImagePixelType': 'float',
            'WriteTransformParametersEachResolution': 'false',
            'WriteResultImageAfterEachResolution': 'false'
        }

        for reg_stage_dir in reg_dirs:

            if not reg_stage_dir.is_dir():
                logging.error('cannot find {}'.format(reg_stage_dir))
                raise FileNotFoundError(
                    f'Cannot find registration dir {reg_stage_dir}')

            inv_stage_dir = inv_outdir / reg_stage_dir.name

            specimen_stage_reg_dir = reg_stage_dir / vol_id
            specimen_stage_inversion_dir = inv_stage_dir / vol_id

            transform_file = common.getfile_startswith(specimen_stage_reg_dir,
                                                       ELX_TRANSFORM_PREFIX)
            parameter_file = common.getfile_startswith(reg_stage_dir,
                                                       ELX_PARAM_PREFIX)

            # Create the folder to put the specimen inversion parameter files in.
            inv_stage_dir.mkdir(exist_ok=True)

            # Add the stage to the inversion order config (in reverse order), if not already.
            if reg_stage_dir.name not in stages_to_invert['inversion_order']:
                stages_to_invert['inversion_order'].insert(
                    0, reg_stage_dir.name)

            if clobber:
                common.mkdir_force(
                    specimen_stage_inversion_dir
                )  # Overwrite any inversion file that exist for a single specimen

            # Each registration directory contains a metadata file, which contains the relative path to the fixed volume
            reg_metadata = cfg_load(specimen_stage_reg_dir /
                                    common.INDV_REG_METADATA)
            fixed_volume = (specimen_stage_reg_dir /
                            reg_metadata['fixed_vol']).resolve()

            # Invert the Transform parameters with options for normal image inversion

            job = {
                'specimen_stage_inversion_dir': specimen_stage_inversion_dir,
                'parameter_file': abspath(parameter_file),
                'transform_file': transform_file,
                'fixed_volume': fixed_volume,
                'param_file_output_name': 'inversion_parameters.txt',
                'image_replacements': image_replacements,
                'label_replacements': label_replacements,
                'image_transform_file': IMAGE_INVERTED_TRANSFORM,
                'label_transform_file': LABEL_INVERTED_TRANFORM,
                'clobber': clobber,
                'threads': threads
            }

            jobs.append(job)

    # Run the inversion jobs. Currently using only one thread as it seems that elastix now uses multiple threads on the
    # Inversions

    logging.info('inverting with {} threads: '.format(threads))
    pool = Pool(
        1
    )  # 17/09/18 If we can get multithreded inversion in elastix 4.9 we can remove the python multithreading
    try:
        pool.map(_invert_transform_parameters, jobs)

    except KeyboardInterrupt:
        print('terminating inversion')
        pool.terminate()
        pool.join()

    # TODO: Should we replace the need for this invert.yaml?
    reg_dir = Path(os.path.relpath(reg_stage_dir, inv_outdir))
    stages_to_invert['registration_directory'] = str(
        reg_dir)  # Doc why we need this
    # Create a yaml config file so that inversions can be run seperatley
    invert_config = config['inverted_transforms'] / INVERT_CONFIG

    with open(invert_config, 'w') as yf:
        yf.write(yaml.dump(dict(stages_to_invert), default_flow_style=False))
예제 #10
0
def job_runner(config_path: Path) -> Path:
    """
    Run the registrations specified in the config file

    Returns
    -------
    The path to the final registrered images
    """

    config = LamaConfig(config_path)
    print(common.git_log())

    avg_dir = config.options['average_folder']
    avg_dir.mkdir(exist_ok=True, parents=True)

    elastix_stage_parameters = generate_elx_parameters(
        config, do_pairwise=config['pairwise_registration'])

    # Set the fixed volume up for the first stage. This will checnge each stage if doing population average
    fixed_vol = config['fixed_volume']

    # Get list of specimens
    inputs_dir = config.options['inputs']
    spec_ids = [Path(x).stem for x in common.get_file_paths(inputs_dir)]

    for i, reg_stage in enumerate(config['registration_stage_params']):

        stage_id = reg_stage['stage_id']
        logging.info(stage_id)
        stage_dir = Path(config.stage_dirs[stage_id])

        # Make stage dir if not made by another instance of the script
        stage_dir.mkdir(exist_ok=True, parents=True)

        starting_avg = stage_dir / 'avg_started'
        average_done = stage_dir / "avg_done"

        while True:  # Pick up unstarted speciemens. Only break when reg and avergae complete

            # Check if any specimens left (It's possible the avg is being made but all specimens are registered)
            spec_stage_dirs = [
                x.name for x in stage_dir.iterdir() if x.is_dir()
            ]
            not_started = set(spec_ids).difference(spec_stage_dirs)

            next_stage = False  # No breaking out yet

            if len(not_started) > 0:
                next_spec_id = list(not_started)[
                    0]  # Some specimens left. Pick up spec_id and process

            else:  # All specimens are being processed
                next_stage = True

                #  This block controls what happens if we have all speciemns registered
                while True:
                    if not check_stage_done(stage_dir):
                        print('waiting for stage to finish')
                        time.sleep(5)
                        continue

                    print('stage finished')

                    if average_done.is_file():
                        print('found average done file')
                        break  # Next stage
                    else:
                        if starting_avg.is_file():
                            print('found starting average file')
                            time.sleep(5)
                            continue
                        else:
                            try:
                                open(starting_avg, 'x')
                            except FileExistsError:
                                time.sleep(5)
                                print('cannot write avg starting file')
                                continue
                            else:
                                average_path = avg_dir / f'{stage_id}.nrrd'
                                make_avg(stage_dir, average_path,
                                         avg_dir / f'{stage_id}.log')
                                open(average_done, 'x').close()
                                print('making average')
                                break

            if next_stage:
                print('breaking stage')
                break

            # Get the input for this specimen
            if i == 0:  # The first stage
                moving = inputs_dir / f'{next_spec_id}.nrrd'
            else:
                moving = list(config.stage_dirs.values())[
                    i - 1] / next_spec_id / f'{next_spec_id}.nrrd'
                fixed_vol = avg_dir / f'{list(config.stage_dirs.keys())[i-1]}.nrrd'
            reg_method = TargetBasedRegistration

            # Make the elastix parameter file for this stage
            elxparam = elastix_stage_parameters[stage_id]
            elxparam_path = stage_dir / f'{ELX_PARAM_PREFIX}{stage_id}.txt'

            if not elxparam_path.is_file():
                with open(elxparam_path, 'w') as fh:
                    if elxparam:
                        fh.write(elxparam)

            fixed_mask = None

            logging.info(moving)

            # Do the registrations
            registrator = reg_method(elxparam_path, moving, stage_dir,
                                     config['filetype'], config['threads'],
                                     fixed_mask)

            registrator.set_target(fixed_vol)

            try:
                registrator.run()  # Do the registrations for a single stage
            except FileExistsError as e:
                # 040620: Bodge as some specimens are picked up twice.
                # Need a better way to make sure each speciemn picked up only once
                continue

            spec_done = stage_dir / next_spec_id / 'spec_done'  # The directory gets created in .run()
            open(spec_done, 'x').close()
예제 #11
0
파일: run_lama.py 프로젝트: antshp-vc/LAMA
def run(configfile: Path):
    """
        This is the main function Lama script for generating data from registering volumes
        It reads in the config file, creates directories, and initialises the registration process.

        Looks for paths to inputs relative the directory containing the config file

        Parameters
        ----------
        param config
            A toml config file
        """
    try:
        config = LamaConfig(configfile)
    except OSError as e:
        logging.error(f'Cannot open LAMA config file: {str(configfile)}\n{e}')
        raise
    except Exception as e:
        raise (LamaConfigError(e))

    config.mkdir('output_dir')
    qc_dir = config.mkdir('qc_dir')
    config.mkdir('average_folder')
    config.mkdir('root_reg_dir')

    # TODO find the histogram batch code
    # if not config['no_qc']:
    #     input_histogram_dir = config.mkdir('input_image_histograms')
    #     make_histograms(config['inputs'], input_histogram_dir)

    logpath = config.config_path.parent / LOG_FILE  # Make log in same directory as config file
    common.init_logging(logpath)

    if not common.test_installation('elastix'):
        raise OSError('Make sure elastix is installed')

    # Catch ctr-c signals so we can write that to logs
    # signal.signal(signal.SIGTERM, common.service_shutdown)
    signal.signal(signal.SIGINT, common.service_shutdown)

    mem_monitor = MonitorMemory(Path(config['output_dir']).absolute())

    # Disable QC output?
    no_qc: bool = config['no_qc']

    logging.info(common.git_log()
                 )  # If running from a git repo, log the branch and commit #

    logging.info("Registration started")

    final_registration_dir = run_registration_schedule(config)

    make_deformations_at_different_scales(config)

    create_glcms(config, final_registration_dir)

    if config['skip_transform_inversion']:
        logging.info('Skipping inversion of transforms')
    else:
        logging.info('inverting transforms')
        batch_invert_transform_parameters(config)

        logging.info('inverting volumes')
        invert_volumes(config)

        if config['label_map']:

            generate_organ_volumes(config)

    if not generate_staging_data(config):
        logging.warning('No staging data generated')

    # Write out the names of the registration dirs in the order they were run
    with open(config['root_reg_dir'] / REG_DIR_ORDER, 'w') as fh:
        for reg_stage in config['registration_stage_params']:
            fh.write(f'{reg_stage["stage_id"]}\n')

    if not no_qc:
        if config['skip_transform_inversion']:
            inverted_label_overlay_dir = None
        else:
            inverted_label_overlay_dir = config.mkdir(
                'inverted_label_overlay_dir')

        # registered_midslice_dir = config.mkdir('registered_midslice_dir')

        make_qc_images(config.config_dir, config['fixed_volume'], qc_dir)

    mem_monitor.stop()

    return True
예제 #12
0
def run(configfile: Path):
    """
        This is the main function Lama script for generating data from registering volumes
        It reads in the config file, creates directories, and initialises the registration process.

        Looks for paths to inputs relative the directory containing the config file

        Parameters
        ----------
        param config
            A toml config file
        """
    try:
        config = LamaConfig(configfile)
    except OSError as e:
        logging.error(f'Cannot open LAMA config file: {str(configfile)}\n{e}')
        raise
    except Exception as e:
        raise (LamaConfigError(e))

    config.mkdir('output_dir')
    qc_dir = config.mkdir('qc_dir')
    config.mkdir('average_folder')
    config.mkdir('root_reg_dir')

    # TODO find the histogram batch code
    # if not config['no_qc']:
    #     input_histogram_dir = config.mkdir('input_image_histograms')
    #     make_histograms(config['inputs'], input_histogram_dir)

    logpath = config.config_path.parent / LOG_FILE  # Make log in same directory as config file
    common.init_logging(logpath)

    if not common.test_installation('elastix'):
        raise OSError('Make sure elastix is installed')

    # Catch ctr-c signals so we can write that to logs
    # signal.signal(signal.SIGTERM, common.service_shutdown)
    signal.signal(signal.SIGINT, common.service_shutdown)

    mem_monitor = MonitorMemory(Path(config['output_dir']).absolute())

    # Disable QC output?
    no_qc: bool = config['no_qc']

    logging.info(common.git_log()
                 )  # If running from a git repo, log the branch and commit #

    logging.info("Registration started")

    first_stage_only = config['skip_forward_registration']
    # If we only want the reverse label propagation we just need the initial rigid registration to act as the
    # Fixed image for the moving populaiton average
    final_registration_dir = run_registration_schedule(
        config, first_stage_only=first_stage_only)

    if not first_stage_only:
        neg_jac = make_deformations_at_different_scales(config)
        folding_report(neg_jac,
                       config['output_dir'],
                       config['label_info'],
                       outdir=config['output_dir'])

        create_glcms(config, final_registration_dir)

    # Write out the names of the registration dirs in the order they were run
    with open(config['root_reg_dir'] / REG_DIR_ORDER_CFG, 'w') as fh:
        for reg_stage in config['registration_stage_params']:
            fh.write(f'{reg_stage["stage_id"]}\n')
            if first_stage_only:
                break

    if config['skip_transform_inversion']:
        logging.info('Skipping inversion of transforms')
    else:
        logging.info('inverting transforms')

        if config['label_propagation'] == 'reverse_registration':
            reverse_registration(config)
        else:  # invert_transform method is the default
            batch_invert_transform_parameters(config)

        logging.info('propagating volumes')
        invert_volumes(config)

        # Now that labels have been inverted, should we delete the transorm files?
        if config['delete_inverted_transforms']:
            shutil.rmtree(config['output_dir'] / 'inverted_transforms')

        if config['label_map']:

            generate_organ_volumes(config)

            if config['seg_plugin_dir']:
                plugin_interface.secondary_segmentation(config)

    if not generate_staging_data(config):
        logging.warning('No staging data generated')

    if not no_qc:

        rev_reg = True if config[
            'label_propagation'] == 'reverse_registration' else False
        make_qc_images(config.config_dir,
                       config['fixed_volume'],
                       qc_dir,
                       mask=None,
                       reverse_reg_propagation=rev_reg)

    mem_monitor.stop()

    return True
예제 #13
0
def secondary_segmentation(config: LamaConfig):
    """
    Use user-added scripts to segment/cleanup organs

    Parameters
    ----------
    config

    Returns
    -------

    """

    plugin_dir = config.config_dir / config['seg_plugin_dir']

    if not plugin_dir.is_dir():
        logging.error(f'Cannot find plugin director: {plugin_dir}')
        return

    # Find the directories containing the segmentations
    # Get the final inversion stage
    invert_config = config['inverted_transforms'] / PROPAGATE_CONFIG
    segmentation_dir = cfg_load(invert_config)['label_propagation_order'][
        -1]  # rename to segmentation stage
    inverted_label_dir = config['inverted_labels'] / segmentation_dir
    initial_segmentation_path = next(inverted_label_dir.glob('**/*.nrrd'))

    first_reg_dir = config['root_reg_dir'] / config[
        'registration_stage_params'][0]['stage_id']  # usually rigid
    image_to_segment = next(first_reg_dir.glob('**/*.nrrd'))

    segmentations = []

    for plugin_src in [
            x for x in plugin_dir.iterdir()
            if str(x).endswith('.py') and x.name != 'plugin_interface.py'
    ]:

        # catch all exceptions as we don't want plugin crashing the pipeline
        try:
            spec = importlib.util.spec_from_file_location(
                plugin_src.stem, str(plugin_src))
            plugin = importlib.util.module_from_spec(spec)
            spec.loader.exec_module(plugin)

            new_segmetation = plugin.run(image_to_segment,
                                         initial_segmentation_path)

        except Exception as e:
            logging.error(f'Plugin {plugin_src} failed\n{e}')
        else:
            segmentations.append(new_segmetation)

    if not segmentations:
        logging.error(f'No segmentations returned from {plugin_src.name}')

    # Merge all the segmentations into a single label map. If there are any overlaps, the plugin called last will have
    # priority

    seg = None

    for s in segmentations:
        if not seg:
            seg = s
            continue
        seg[s != 0] = s[s != 0]

    additional_seg_dir = config.mkdir('additional_seg_dir')
    write_array(seg, additional_seg_dir /
                f'{config.config_dir.name}_additonal_seg.nrrd'
                )  # TODO include specimen name