Пример #1
0
    def __init__(self, experiment_directory=None, config=None):
        super(PostProcessingExperiment, self).__init__(experiment_directory)
        # Privates
        self._meta_config['exclude_attrs_from_save'] = [
            'data_loader', '_device'
        ]
        if config is not None:
            self.read_config_file(config)

        self.auto_setup()

        # Where the segmentation will be saved: (each experiment is in a sub-folder)
        proj_dir_path = self.get(
            'proj_dir_path',
            os.path.join(get_trendytukan_drive_path(),
                         "projects/pixel_embeddings"))
        self.set("proj_dir_path", proj_dir_path)

        # Load the file with all the presets of postprocessing:
        postproc_presets_path = self.get(
            'postproc_presets_path',
            os.path.join(
                get_abailoni_hci_home_path(),
                "pyCharm_projects/uppsala_hackathon/experiments/cremi/"
                "postproc_configs/postproc_presets.yml"))
        self.set("postproc_presets",
                 segm_utils.yaml2dict(postproc_presets_path))

        self.build_experiment()
        self.build_offsets()
Пример #2
0
 def save_infer_output(self, output):
     import h5py
     import numpy as np
     print("Saving....")
     from segmfriends.utils.various import check_dir_and_create
     dir_path = os.path.join(
         get_trendytukan_drive_path(), "projects/pixel_embeddings",
         self.get("name_experiment", default="generic_experiment"))
     check_dir_and_create(dir_path)
     with h5py.File(
             os.path.join(
                 dir_path, "predictions_sample_{}.h5".format(
                     self.get("loaders/infer/name"))), 'w') as f:
         f.create_dataset('data',
                          data=output.astype(np.float16),
                          compression='gzip')
Пример #3
0
    def save_infer_output(self, output):
        import h5py
        import numpy as np
        print("Saving....")
        from segmfriends.utils.various import check_dir_and_create
        dir_path = os.path.join(
            get_trendytukan_drive_path(), "projects/pixel_embeddings",
            self.get("name_experiment", default="generic_experiment"))
        check_dir_and_create(dir_path)
        filename = os.path.join(
            dir_path,
            "predictions_sample_{}.h5".format(self.get("loaders/infer/name")))
        print("Writing to ", self.get("inner_path_output", 'data'))
        from segmfriends.utils.various import writeHDF5
        writeHDF5(output.astype(np.float16), filename,
                  self.get("inner_path_output", 'data'))
        print("Saved to ", filename)

        # Dump configuration to export folder:
        self.dump_configuration(os.path.join(dir_path,
                                             "prediction_config.yml"))
This could create an effect with thin processes, but apparently it doesn't
"""

# -----------
# LOAD data
# -----------
from vaeAffs.utils.path_utils import get_abailoni_hci_home_path, get_trendytukan_drive_path
from segmfriends.utils.various import parse_data_slice, readHDF5, writeHDF5, readHDF5_from_volume_config
import os
import json
import h5py

from scipy.ndimage import zoom
import nifty.graph.rag as nrag

project_dir = os.path.join(get_trendytukan_drive_path(),
                           "projects/pixel_embeddings")

IGNORE_LABELS = [0, 1]
nb_threads = 6

EXP_NAMES = [
    # TEST:
    "v4_addSparseAffs_fullGT_eff",
    "v4_addSparseAffs_fullGT_avgDirectVar",

    # VAL:
    # "v4_addSparseAffs_eff",
    # "v4_onlySparseAffs_eff",
    # "v4_main_avgDirectVar",
    # "v4_addSparseAffs_avgDirectVar",
Пример #5
0
}

parsed_slice = parse_data_slice(slices[sample])
data_path = os.path.join(get_abailoni_hci_home_path(),
                         "datasets/new_cremi/sample{}.h5".format(sample))
# data_path = os.path.join(get_trendytukan_drive_path(), "datasets/new_cremi/fib25/sample{}.h5".format(sample))
with h5py.File(data_path, 'r') as f:
    print([atr for atr in f['volumes/labels']])
    #     glia = f['volumes/labels/glia'][:]
    raw = f['volumes/raw'][parsed_slice[1:]]
    GT = f['volumes/labels/neuron_ids_fixed'][parsed_slice[1:]]

# Load affs:
from segmfriends.utils.various import writeHDF5, readHDF5
affs_path = os.path.join(
    get_trendytukan_drive_path(),
    "projects/pixel_embeddings/{}/predictions_sample_{}.h5".format(
        "v4_addSparseAffs_eff", sample))
affs_dice = 1. - readHDF5(
    affs_path, "data", crop_slice="3:5,70:-6,25:-25,25:-25")
affs_path = os.path.join(
    get_trendytukan_drive_path(),
    "projects/pixel_embeddings/{}/predictions_sample_{}.h5".format(
        "v4_addSparseAffs_avgDirectVar", sample))
affs_avg = readHDF5(affs_path, "data", crop_slice="3:5,70:-6,25:-25,25:-25")

# Load segm:
folder_path = os.path.join(get_trendytukan_drive_path(),
                           "projects/pixel_embeddings/")
segm_path = os.path.join(
    folder_path, "{}/out_segms/{}.h5".format(
Пример #6
0
sample = "C"
# slices = {'A': ":,:,:,:", 'B': ":, :, 90:, 580: 1900", 'C': ":, :, 70:1450, 95:1425"}
slices = {
    'A': ":,:,:,:",
    'B': ":,:,:,:",
    'C': ":,70:-6,50:-50,50:-50",
    "0": ":,:,:,:",
    "1": ":,:,:,:",
    "2": ":,:,:,:",
    "3": ":,:,:,:",
}

parsed_slice = parse_data_slice(slices[sample])
conf_folder_path = os.path.join(
    get_trendytukan_drive_path(),
    "projects/pixel_embeddings/v4_addSparseAffs_avgDirectVar/scores")

prefix_math = [
    "C__MEAN___",
    # "C__MutexWatershed___",
    "C__MWS__stride10___"
]

labels = {
    "C__MEAN___": "Gasp Average",
    "C__MWS__stride10___": "Mutex Watershed"
}

keys = [
    "nb_nodes",
Пример #7
0
copy_from_previous = {
    "A": [],
    "B": [],
    "C": [22, 82],
    "0": [122],
    "1": [122],
    "2": [],
}

# for sample in ["A", "B", "C"]:
for sample in ["C", "0", "1", "2"]:
    print("Sample", sample)
    raise DeprecationWarning("Fix bug of glia and boundary label")

    data_path = os.path.join(get_trendytukan_drive_path(), "datasets/new_cremi/sample{}.h5".format(sample))

    with h5py.File(data_path, 'r') as f:
        print([atr for atr in f['volumes/labels']])
        #     glia = f['volumes/labels/glia'][:]
        # raw = f['volumes/raw'][:]
        GT = f['volumes/labels/neuron_ids'][:]
        glia = f['volumes/labels/glia'][:]

    from affogato.affinities import compute_affinities

    offsets = [
        [0, 1, 0],
        [0, 0, 1],
    ]
    print(GT.max())
Пример #8
0
import os
from copy import deepcopy
from vaeAffs.utils.path_utils import get_trendytukan_drive_path, get_abailoni_hci_home_path


from segmfriends.utils.config_utils import assign_color_to_table_value, return_recursive_key_in_dict
import json
import numpy as np
from segmfriends.utils.various import yaml2dict
# -----------------------
# Script options:
# -----------------------

project_dir = os.path.join(get_trendytukan_drive_path(),"projects/pixel_embeddings")

EXP_NAMES = [
    "v4_addSparseAffs_eff",
    "v4_onlySparseAffs_eff",
    "v4_main_avgDirectVar",
    "v4_main_eff",
    "v4_addSparseAffs_avgDirectVar",
]

REQUIRED_STRINGS = [
    # "_mergedGlia",
    # "affs_withLR_z"
]

EXCLUDE_STRINGS = [
    "_mergedGlia",
    "multicut_kerLin",