Esempio n. 1
0
import long_range_compare  # Add missing package-paths
from long_range_compare.data_paths import get_trendytukan_drive_path, get_hci_home_path

import vigra
import numpy as np
import os

import sys
sys.path += [
    os.path.join(get_hci_home_path(), "python_libraries/cremi_tools"),
]
"""
Prepare segmentation for submission
"""

original_pad = ((37, 38), (911, 911), (911, 911))

slice_original_pad = (slice(37, -38), slice(911, -911), slice(911, -911))
padded_shape = (200, 3072, 3072)

sample = "A+"

# raw_file = os.path.join(get_trendytukan_drive_path(), "datasets/CREMI/official_test_samples/sample_{}+_padded_20160601.hdf".format(sample))
#
# import h5py
# GT_mask_file = os.path.join(get_trendytukan_drive_path(), "datasets/CREMI/alignment_experiments/sample_{}+_GT_mask.hdf".format(sample))
# mask_inner_path = "volumes/labels/mask"
# GT_box = np.zeros(padded_shape, dtype="uint8")
# GT_box[slice_original_pad] = 1
#
# with h5py.File(GT_mask_file, 'w') as f:
from long_range_compare import cremi_utils as cremi_utils
from long_range_compare import cremi_experiments as cremi_experiments

from segmfriends.utils.various import starmap_with_kwargs



if __name__ == '__main__':

    parser = argparse.ArgumentParser()
    parser.add_argument('--exp_name', type=str, default="FullTestSamples")  #DebugExp
    parser.add_argument('--project_directory', default="projects/agglo_cluster_compare/quadxeon5_results/",  type=str)
    # TODO: option to pass some other fixed kwargs and overwrite it...?

    args = parser.parse_args()

    exp_name = args.exp_name
    project_dir = os.path.join(get_trendytukan_drive_path(), args.project_directory)

    fixed_kargs = {
        "experiment_name": exp_name,
        "project_directory": project_dir,
        "configs_dir_path": os.path.join(get_hci_home_path(), "pyCharm_projects/longRangeAgglo/experiments/cremi/configs")
    }

    # Select experiment and plot results:
    experiment = cremi_experiments.get_experiment_by_name(exp_name)(fixed_kwargs=fixed_kargs)
    experiment.prepare_submission(project_dir)


                        default="projects/agglo_cluster_compare",
                        type=str)
    # TODO: option to pass some other fixed kwargs and overwrite it...?

    args = parser.parse_args()

    exp_name = args.exp_name

    fixed_kargs = {
        "experiment_name":
        exp_name,
        "project_directory":
        os.path.join(get_trendytukan_drive_path(), args.project_directory),
        "configs_dir_path":
        os.path.join(
            get_hci_home_path(),
            "pyCharm_projects/longRangeAgglo/experiments/cremi/configs")
    }

    # Select experiment and load kwargs:
    experiment = cremi_experiments.get_experiment_by_name(exp_name)(
        fixed_kwargs=fixed_kargs)
    kwargs_iter, nb_threads_pool = experiment.get_data()
    print("Agglomarations to run: ", len(kwargs_iter))

    # # Start pool:
    # pool = ThreadPool(processes=nb_threads_pool)
    # starmap_with_kwargs(pool, cremi_utils.run_clustering, args_iter=repeat([]),
    #                     kwargs_iter=kwargs_iter)

    from long_range_compare.two_pass_utls import two_pass_agglomeration, GUACA_agglomerator
Esempio n. 4
0
import yaml
import json
import os

import segmfriends.vis as vis_utils
from segmfriends.utils.config_utils import adapt_configs_to_model, recursive_dict_update, return_recursive_key_in_dict

from long_range_compare.load_datasets import CREMI_crop_slices, CREMI_sub_crops_slices


import matplotlib.animation as manimation




root_path = os.path.join(get_hci_home_path(), "GEC_comparison_longRangeGraph")

# file_names = ["9054435_B_sum_False", "9575382_B_max_False"]
file_names = ["998353761_B_sum_False", "229028997_B_max_False", "839260953_B_mean_False"]

SELECTED_OFFSET = 1
SELECTED_SLICE = 0
NB_FRAMES = 100

for filename in file_names:
    ucm_path = os.path.join(root_path, "UCM", filename + ".h5")
    config_path = os.path.join(root_path, filename + ".json")

    assert len(filename.split("_")) == 4

    ID, sample, agglo_type, _ = filename.split("_")
Esempio n. 5
0
def get_segmentation(affinities,
                     GT,
                     json_filename,
                     dataset,
                     run_conn_comp=False):

    offsets = get_dataset_offsets(dataset)

    configs = {
        'models': yaml2dict('./experiments/models_config.yml'),
        'postproc': yaml2dict('./experiments/post_proc_config.yml')
    }
    configs = adapt_configs_to_model([], debug=True, **configs)
    post_proc_config = configs['postproc']

    EXPORT_PATH = os.path.join(get_hci_home_path(),
                               'GEC_comparison_longRangeGraph')

    export_file = os.path.join(EXPORT_PATH,
                               json_filename.replace('.json', '.h5'))
    print(export_file)
    pred_segm = vigra.readHDF5(export_file, 'segm').astype('uint64')

    # Run connected components (for max in affogato):
    if run_conn_comp:
        print("Run connected comp...")
        tick = time.time()
        pred_segm = vigra.analysis.labelVolume(pred_segm.astype('uint32'))
        print("Done in {} s".format(time.time() - tick))
    print("WS growing:")
    tick = time.time()

    grow = SizeThreshAndGrowWithWS(
        post_proc_config['thresh_segm_size'],
        offsets,
        hmap_kwargs=post_proc_config['prob_map_kwargs'],
        apply_WS_growing=True,
        size_of_2d_slices=True)
    pred_segm_WS = grow(affinities, pred_segm)
    print("Done in {} s".format(time.time() - tick))

    # SAVING RESULTS:
    # evals = cremi_score(GT, pred_segm, border_threshold=None, return_all_scores=True)

    evals_WS = cremi_score(GT,
                           pred_segm_WS,
                           border_threshold=None,
                           return_all_scores=True)
    print("Scores achieved : ", evals_WS)
    # print("Scores achieved: ", evals)

    result_file = os.path.join(EXPORT_PATH, json_filename)
    with open(result_file, 'r') as f:
        new_results = json.load(f)

    new_results['score_WS'] = evals_WS

    with open(result_file, 'w') as f:
        json.dump(new_results, f, indent=4, sort_keys=True)
        # yaml.dump(result_dict, f)

    vigra.writeHDF5(pred_segm_WS.astype('uint32'), export_file, 'segm_WS')
Esempio n. 6
0
def get_segmentation(image_path,
                     input_model_keys,
                     agglo,
                     local_attraction,
                     save_UCM,
                     from_superpixels=False,
                     use_multicut=False):
    edge_prob = 1.
    THRESH = input_model_keys[0]

    inst_out_file = image_path.replace('.input.h5', '.output.h5')
    inst_out_conf_file = image_path.replace('.input.h5', '.inst.confidence.h5')

    # TODO: 1
    # NAME_AGGLO = "orig_affs"
    # THRESH = 'thresh030'
    # NAME_AGGLO = "finetuned_affs"
    # THRESH = 'thresh050'
    NAME_AGGLO = "finetuned_affs_avg"
    # THRESH = 'thresh050'

    # inner_path = agglo + "_avg_retrained_bal_affs_thresh050"
    partial_path = ""
    for key in input_model_keys:
        partial_path += "_{}".format(key)
    inner_path = "{}_{}{}".format(agglo, NAME_AGGLO, partial_path)
    # inner_path = "{}_orig_affs_thresh030".format(agglo)
    # print(inner_path)

    model_keys = [agglo] if not local_attraction else [
        agglo, "impose_local_attraction"
    ]
    model_keys += input_model_keys

    already_exists = True
    with h5py.File(inst_out_file, 'r') as f:
        if inner_path not in f:
            already_exists = False
    with h5py.File(inst_out_conf_file, 'r') as f:
        if inner_path not in f:
            already_exists = False

    # TODO: already exist, writing, load affs

    # if already_exists:
    #     pbar.update(1)
    #     return
    # print(image_path)

    # print("Processing {}...".format(image_path))
    # Load data:
    with h5py.File(image_path, 'r') as f:
        # TODO: 2
        affinities_orig = f['instance_affinities'][:]
        affinities_noAvg = f['finetuned_affs_noAvg'][:]
        affinities = f['finetuned_affs'][:]

        # shape = f['shape'][:]
        # strides = f['offset_ranges'][:]
        # affs_prob = f['instance_affinities'][:]
        # affs_balanced = f['balanced_affs'][:]
        # class_prob = f['semantic_affinities'][:]
        # class_mask = f['semantic_argmax'][:]

    strides = np.array([1, 2, 4, 8, 16, 32], dtype=np.int32)
    offsets = GMIS_utils.get_offsets(strides)

    #
    # # -----------------------------------
    # # Pre-process affinities:
    # # -----------------------------------
    #
    # TODO: 3
    # affinities_orig, foreground_mask_affs = GMIS_utils.combine_affs_and_mask(affinities_orig, class_prob, class_mask, offsets)

    config_path = os.path.join(
        get_hci_home_path(),
        "pyCharm_projects/longRangeAgglo/experiments/cityscapes/configs")
    configs = {
        'models': yaml2dict(os.path.join(config_path, 'models_config.yml')),
        'postproc': yaml2dict(os.path.join(config_path,
                                           'post_proc_config.yml'))
    }

    if from_superpixels:
        if use_multicut:
            model_keys = ["use_fragmenter", 'multicut_exact']
        else:
            model_keys += ["gen_HC_DTWS"]
    configs = adapt_configs_to_model(model_keys, debug=False, **configs)
    post_proc_config = configs['postproc']
    post_proc_config['generalized_HC_kwargs']['agglomeration_kwargs'][
        'offsets_probabilities'] = edge_prob
    post_proc_config['generalized_HC_kwargs']['agglomeration_kwargs'][
        'return_UCM'] = save_UCM

    # # Add longRange weights:
    # offset_weights = np.ones_like(offsets[:,0])
    # offset_weights[:16] = 35
    # offset_weights[16:32] = 20
    # offset_weights[32:] = 1
    #

    # affs_balanced = GMIS_utils.combine_affs_with_class(affs_balanced, class_prob, refine_bike=True, class_mask=class_mask)
    # affs_balanced = np.expand_dims(affs_balanced.reshape(affs_balanced.shape[0], affs_balanced.shape[1], -1), axis=0)
    # affs_balanced = np.rollaxis(affs_balanced, axis=-1, start=0)
    # affs_balanced *= foreground_mask_affs
    # post_proc_config['generalized_HC_kwargs']['agglomeration_kwargs']['offsets_weights'] = affs_balanced
    # post_proc_config['generalized_HC_kwargs']['agglomeration_kwargs']['extra_aggl_kwargs']['threshold'] = 0.25

    n_threads = post_proc_config.pop('nb_threads')
    invert_affinities = post_proc_config.pop('invert_affinities', False)
    segm_pipeline_type = post_proc_config.pop('segm_pipeline_type', 'gen_HC')

    segmentation_pipeline = get_segmentation_pipeline(
        segm_pipeline_type,
        offsets,
        nb_threads=n_threads,
        invert_affinities=invert_affinities,
        return_fragments=False,
        **post_proc_config)

    if post_proc_config.get('use_final_agglomerater', False):
        final_agglomerater = GreedyEdgeContractionAgglomeraterFromSuperpixels(
            offsets,
            n_threads=n_threads,
            invert_affinities=invert_affinities,
            **post_proc_config['generalized_HC_kwargs']
            ['final_agglomeration_kwargs'])
    else:
        final_agglomerater = None

    post_proc_solver = BlockWise(
        segmentation_pipeline=segmentation_pipeline,
        offsets=offsets,
        final_agglomerater=final_agglomerater,
        blockwise=post_proc_config.get('blockwise', False),
        invert_affinities=invert_affinities,
        nb_threads=n_threads,
        return_fragments=False,
        blockwise_config=post_proc_config.get('blockwise_kwargs', {}))

    # print("Starting prediction...")
    tick = time.time()
    outs = post_proc_solver(affinities)
    if save_UCM:
        pred_segm, MC_energy, UCM, mergeTimes = outs
    else:
        pred_segm, MC_energy = outs
        # pred_segm = outs
        # MC_energy = 0
    comp_time = time.time() - tick
    # print("Post-processing took {} s".format(comp_time))

    # pred_segm *= foreground_mask

    # if post_proc_config.get('thresh_segm_size', 0) != 0:
    if from_superpixels:
        pred_segm_WS = pred_segm
    else:
        grow = SizeThreshAndGrowWithWS(
            post_proc_config['thresh_segm_size'],
            offsets,
            hmap_kwargs=post_proc_config['prob_map_kwargs'],
            apply_WS_growing=False,
            debug=False)
        # pred_segm_WS = vigra.analysis.labelVolumeWithBackground(grow(affinities, pred_segm).astype(np.uint32), neighborhood='indirect')
        pred_segm_WS = grow(affinities, pred_segm)
        pred_segm_WS, _, _ = vigra.analysis.relabelConsecutive(pred_segm_WS)

    # TODO: allow all kinds of merges (not onyl local); skip connected components on the seeds
    # pred_segm_WS *= foreground_mask
    confidence_scores = GMIS_utils.get_confidence_scores(
        pred_segm_WS, affinities, offsets)

    # inner_path = "MEAN_bk_fixed"
    vigra.writeHDF5(pred_segm_WS[0].astype('uint16'), inst_out_file,
                    inner_path)
    vigra.writeHDF5(confidence_scores, inst_out_conf_file, inner_path)
    vigra.writeHDF5(np.array([MC_energy['MC_energy']]), inst_out_conf_file,
                    "MC_energy/" + inner_path)

    # # # -------------------------------------------------
    # # # PLOTTING:
    # #
    from segmfriends import vis as vis

    # fig, axes = plt.subplots(ncols=1, nrows=3, figsize=(7, 7))
    # for a in fig.get_axes():
    #     a.axis('off')
    #
    # # affs_repr = np.linalg.norm(affs_repr, axis=-1)
    # # ax.imshow(affs_repr, interpolation="none")
    #
    #
    #
    # vis.plot_segm(ax, pred_segm_WS, z_slice=0)
    #
    # # fig.savefig(pdf_path)
    # pdf_path = "./segm.pdf"
    # vis.save_plot(fig, os.path.dirname(pdf_path), os.path.basename(pdf_path))

    # for off_stride in [0,8,16,24,32,40]:
    #     # affs_repr = GMIS_utils.get_affinities_representation(affinities[:off_stride+8], offsets[:off_stride+8])
    #     # # affs_repr = GMIS_utils.get_affinities_representation(affinities[16:32], offsets[16:32])
    #     # affs_repr = np.rollaxis(affs_repr, axis=0, start=4)[0]
    #     # if affs_repr.min() < 0:
    #     #     affs_repr += np.abs(affs_repr.min())
    #     # affs_repr /= affs_repr.max()
    #
    #
    #     fig, ax = plt.subplots(ncols=1, nrows=3, figsize=(7, 7))
    #     for a in fig.get_axes():
    #         a.axis('off')
    #
    #
    #     # affs_repr = np.linalg.norm(affs_repr, axis=-1)
    #     # ax.imshow(affs_repr, interpolation="none")
    #
    #     vis.plot_output_affin(ax[0], affinities, nb_offset=off_stride+3, z_slice=0)
    #     vis.plot_output_affin(ax[1], affinities_orig, nb_offset=off_stride + 3, z_slice=0)
    #     vis.plot_output_affin(ax[2], affinities_noAvg, nb_offset=off_stride + 3, z_slice=0)
    #
    #     pdf_path = image_path.replace(
    #         '.input.h5', '.affs_{}.pdf'.format(off_stride))
    #     # fig.savefig(pdf_path)
    #     pdf_path = "./comparison_affs_{}_noAvg.pdf".format(off_stride)
    #     vis.save_plot(fig, os.path.dirname(pdf_path), os.path.basename(pdf_path))
    #     print(off_stride)

    # print("Waiting...")
    # time.sleep(1000)

    pbar.update(1)
Esempio n. 7
0
def get_segmentation(image_path,
                     input_model_keys,
                     agglo,
                     local_attraction,
                     save_UCM,
                     from_superpixels=False,
                     use_multicut=False):
    edge_prob = 1.
    THRESH = input_model_keys[0]

    inst_out_file = image_path.replace('.input.h5', '.output.h5')
    inst_out_conf_file = image_path.replace('.input.h5', '.inst.confidence.h5')

    # TODO: 1
    # NAME_AGGLO = "orig_affs"
    # THRESH = 'thresh030'
    # NAME_AGGLO = "finetuned_affs"
    # THRESH = 'thresh050'
    NAME_AGGLO = "finetuned_affs_avg"
    # THRESH = 'thresh050'

    # inner_path = agglo + "_avg_retrained_bal_affs_thresh050"
    partial_path = ""
    for key in input_model_keys:
        partial_path += "_{}".format(key)
    inner_path = "{}_{}{}".format(agglo, NAME_AGGLO, partial_path)
    # inner_path = "{}_orig_affs_thresh030".format(agglo)
    # print(inner_path)

    model_keys = [agglo] if not local_attraction else [
        agglo, "impose_local_attraction"
    ]
    model_keys += input_model_keys

    # print("Processing {}...".format(image_path))
    affinities = trained_log_regr.infer(image_path)

    # Load data:
    with h5py.File(image_path, 'r+') as f:
        # TODO: 2
        # affinities_orig = f['instance_affinities'][:]
        # affinities_noAvg = f['finetuned_affs_noAvg'][:]
        if 'finetuned_affs' in f:
            del f['finetuned_affs']
        f['finetuned_affs'] = affinities

        # shape = f['shape'][:]
        # strides = f['offset_ranges'][:]
        # affs_prob = f['instance_affinities'][:]
        # affs_balanced = f['balanced_affs'][:]
        # class_prob = f['semantic_affinities'][:]
        # class_mask = f['semantic_argmax'][:]

    strides = np.array([1, 2, 4, 8, 16, 32], dtype=np.int32)
    offsets = GMIS_utils.get_offsets(strides)

    #
    # # -----------------------------------
    # # Pre-process affinities:
    # # -----------------------------------
    #
    # TODO: 3
    # affinities_orig, foreground_mask_affs = GMIS_utils.combine_affs_and_mask(affinities_orig, class_prob, class_mask, offsets)

    config_path = os.path.join(
        get_hci_home_path(),
        "pyCharm_projects/longRangeAgglo/experiments/cityscapes/configs")
    configs = {
        'models': yaml2dict(os.path.join(config_path, 'models_config.yml')),
        'postproc': yaml2dict(os.path.join(config_path,
                                           'post_proc_config.yml'))
    }

    if from_superpixels:
        if use_multicut:
            model_keys = ["use_fragmenter", 'multicut_exact']
        else:
            model_keys += ["gen_HC_DTWS"]
    configs = adapt_configs_to_model(model_keys, debug=False, **configs)
    post_proc_config = configs['postproc']
    post_proc_config['generalized_HC_kwargs']['agglomeration_kwargs'][
        'offsets_probabilities'] = edge_prob
    post_proc_config['generalized_HC_kwargs']['agglomeration_kwargs'][
        'return_UCM'] = save_UCM

    # # Add longRange weights:
    # offset_weights = np.ones_like(offsets[:,0])
    # offset_weights[:16] = 35
    # offset_weights[16:32] = 20
    # offset_weights[32:] = 1
    #

    # affs_balanced = GMIS_utils.combine_affs_with_class(affs_balanced, class_prob, refine_bike=True, class_mask=class_mask)
    # affs_balanced = np.expand_dims(affs_balanced.reshape(affs_balanced.shape[0], affs_balanced.shape[1], -1), axis=0)
    # affs_balanced = np.rollaxis(affs_balanced, axis=-1, start=0)
    # affs_balanced *= foreground_mask_affs
    # post_proc_config['generalized_HC_kwargs']['agglomeration_kwargs']['offsets_weights'] = affs_balanced
    # post_proc_config['generalized_HC_kwargs']['agglomeration_kwargs']['extra_aggl_kwargs']['threshold'] = 0.25

    n_threads = post_proc_config.pop('nb_threads')
    invert_affinities = post_proc_config.pop('invert_affinities', False)
    segm_pipeline_type = post_proc_config.pop('segm_pipeline_type', 'gen_HC')

    segmentation_pipeline = get_segmentation_pipeline(
        segm_pipeline_type,
        offsets,
        nb_threads=n_threads,
        invert_affinities=invert_affinities,
        return_fragments=False,
        **post_proc_config)

    if post_proc_config.get('use_final_agglomerater', False):
        final_agglomerater = GreedyEdgeContractionAgglomeraterFromSuperpixels(
            offsets,
            n_threads=n_threads,
            invert_affinities=invert_affinities,
            **post_proc_config['generalized_HC_kwargs']
            ['final_agglomeration_kwargs'])
    else:
        final_agglomerater = None

    post_proc_solver = BlockWise(
        segmentation_pipeline=segmentation_pipeline,
        offsets=offsets,
        final_agglomerater=final_agglomerater,
        blockwise=post_proc_config.get('blockwise', False),
        invert_affinities=invert_affinities,
        nb_threads=n_threads,
        return_fragments=False,
        blockwise_config=post_proc_config.get('blockwise_kwargs', {}))

    # print("Starting prediction...")
    tick = time.time()
    outs = post_proc_solver(affinities)
    if save_UCM:
        pred_segm, MC_energy, UCM, mergeTimes = outs
    else:
        pred_segm, MC_energy = outs
        # pred_segm = outs
        # MC_energy = 0
    comp_time = time.time() - tick
    # print("Post-processing took {} s".format(comp_time))

    # pred_segm *= foreground_mask

    # if post_proc_config.get('thresh_segm_size', 0) != 0:
    if from_superpixels:
        pred_segm_WS = pred_segm
    else:
        grow = SizeThreshAndGrowWithWS(
            post_proc_config['thresh_segm_size'],
            offsets,
            hmap_kwargs=post_proc_config['prob_map_kwargs'],
            apply_WS_growing=False,
            debug=False)
        # pred_segm_WS = vigra.analysis.labelVolumeWithBackground(grow(affinities, pred_segm).astype(np.uint32), neighborhood='indirect')
        pred_segm_WS = grow(affinities, pred_segm)
        pred_segm_WS, _, _ = vigra.analysis.relabelConsecutive(pred_segm_WS)

    confidence_scores = GMIS_utils.get_confidence_scores(
        pred_segm_WS, affinities, offsets)

    # inner_path = "MEAN_bk_fixed"
    vigra.writeHDF5(pred_segm_WS[0].astype('uint16'), inst_out_file,
                    inner_path)
    vigra.writeHDF5(confidence_scores, inst_out_conf_file, inner_path)
    vigra.writeHDF5(np.array([MC_energy['MC_energy']]), inst_out_conf_file,
                    "MC_energy/" + inner_path)
    #

    pbar.update(1)