Exemplo n.º 1
0
    def build_offsets(self):
        if self.get("offsets_file_name") is not None:
            print("Loading offsets from file...")
            offset_dir = self.get(
                "offsets_dir_path",
                os.path.join(
                    get_abailoni_hci_home_path(),
                    "pyCharm_projects/uppsala_hackathon/experiments/cremi/offsets"
                ))
            offsets_path = os.path.join(offset_dir,
                                        self.get("offsets_file_name"))
            assert os.path.exists(offsets_path)
            with open(offsets_path, 'r') as f:
                self.set("offsets", json.load(f))
        else:
            print("Loading offsets from inference config...")
            offset_path_in_infer_config = self.get(
                "offset_path_in_infer_config", ensure_exists=True)
            affs_dir_path = self.get("affinities_dir_path", ensure_exists=True)
            prediction_config = segm_utils.yaml2dict(
                os.path.join(affs_dir_path, "prediction_config.yml"))

            # Recursively look into the prediction_config:
            paths = offset_path_in_infer_config.split("/")
            data = prediction_config
            for path in paths:
                assert path in data
                data = data.get(path)
            offsets = data
            assert isinstance(offsets, list)
            self.set("offsets", offsets)
Exemplo n.º 2
0
    def __init__(self, experiment_directory=None, config=None):
        super(PostProcessingExperiment, self).__init__(experiment_directory)
        # Privates
        self._meta_config['exclude_attrs_from_save'] = [
            'data_loader', '_device'
        ]
        if config is not None:
            self.read_config_file(config)

        self.auto_setup()

        # Where the segmentation will be saved: (each experiment is in a sub-folder)
        proj_dir_path = self.get(
            'proj_dir_path',
            os.path.join(get_trendytukan_drive_path(),
                         "projects/pixel_embeddings"))
        self.set("proj_dir_path", proj_dir_path)

        # Load the file with all the presets of postprocessing:
        postproc_presets_path = self.get(
            'postproc_presets_path',
            os.path.join(
                get_abailoni_hci_home_path(),
                "pyCharm_projects/uppsala_hackathon/experiments/cremi/"
                "postproc_configs/postproc_presets.yml"))
        self.set("postproc_presets",
                 segm_utils.yaml2dict(postproc_presets_path))

        self.build_experiment()
        self.build_offsets()
def load_offsets():
    offsets_path = os.path.join(
        get_abailoni_hci_home_path(),
        "pyCharm_projects/uppsala_hackathon/experiments/cremi/offsets",
        offsets_file_name)
    assert os.path.exists(offsets_path)
    with open(offsets_path, 'r') as f:
        return json.load(f)
Exemplo n.º 4
0
        self.dump_configuration(os.path.join(dir_path,
                                             "prediction_config.yml"))


if __name__ == '__main__':
    print(sys.argv[1])

    source_path = os.path.dirname(os.path.realpath(__file__))
    config_path = os.path.join(source_path, 'configs')
    experiments_path = os.path.join(source_path, 'runs')

    # Update HCI_HOME paths:
    for i, key in enumerate(sys.argv):
        if "HCI__HOME" in sys.argv[i]:
            sys.argv[i] = sys.argv[i].replace("HCI__HOME/",
                                              get_abailoni_hci_home_path())

    # Update RUNS paths:
    for i, key in enumerate(sys.argv):
        if "RUNS__HOME" in sys.argv[i]:
            sys.argv[i] = sys.argv[i].replace("RUNS__HOME", experiments_path)

    sys.argv[1] = os.path.join(experiments_path, sys.argv[1])
    if '--inherit' in sys.argv:
        i = sys.argv.index('--inherit') + 1
        if sys.argv[i].endswith(('.yml', '.yaml')):
            sys.argv[i] = change_paths_config_file(
                os.path.join(config_path, sys.argv[i]))
        else:
            sys.argv[i] = os.path.join(experiments_path, sys.argv[i])
    if '--update' in sys.argv:
Exemplo n.º 5
0
from matplotlib import pyplot as plt

sample = "C"
# slices = {'A': ":,:,:,:", 'B': ":, :, 90:, 580: 1900", 'C': ":, :, 70:1450, 95:1425"}
slices = {
    'A': ":,:,:,:",
    'B': ":,:,:,:",
    'C': ":,70:-6,50:-50,50:-50",
    "0": ":,:,:,:",
    "1": ":,:,:,:",
    "2": ":,:,:,:",
    "3": ":,:,:,:",
}

parsed_slice = parse_data_slice(slices[sample])
data_path = os.path.join(get_abailoni_hci_home_path(),
                         "datasets/new_cremi/sample{}.h5".format(sample))
# data_path = os.path.join(get_trendytukan_drive_path(), "datasets/new_cremi/fib25/sample{}.h5".format(sample))
with h5py.File(data_path, 'r') as f:
    print([atr for atr in f['volumes/labels']])
    #     glia = f['volumes/labels/glia'][:]
    raw = f['volumes/raw'][parsed_slice[1:]]
    GT = f['volumes/labels/neuron_ids_fixed'][parsed_slice[1:]]

# Load affs:
from segmfriends.utils.various import writeHDF5, readHDF5
affs_path = os.path.join(
    get_trendytukan_drive_path(),
    "projects/pixel_embeddings/{}/predictions_sample_{}.h5".format(
        "v4_addSparseAffs_eff", sample))
affs_dice = 1. - readHDF5(
Exemplo n.º 6
0
        active_nodes = np.empty(self.shape, dtype='int64')
        for n in range(self.nb_nodes):
            out_segm[tuple(self.from_label_to_coord(
                n))] = self.c_graph.findRepresentativeNode(n)
            active_nodes[tuple(
                self.from_label_to_coord(n))] = self.is_node_active[n]
        active_nodes[tuple(self.starting_coordinate)] = 2
        return out_segm, active_nodes


import os
from vaeAffs.utils.path_utils import get_abailoni_hci_home_path
import h5py
sample = "A"
data_path = os.path.join(
    get_abailoni_hci_home_path(),
    "../ialgpu1_local_home/datasets/cremi/SOA_affinities/sample{}_train.h5".
    format(sample))
crop_slice = ":,20:21,300:400,300:400"
from segmfriends.utils.various import parse_data_slice

crop_slice = parse_data_slice(crop_slice)
with h5py.File(data_path, 'r') as f:
    affs = f['predictions']['full_affs'][crop_slice]
    raw = f['raw'][crop_slice[1:]]

offsets = [[-1, 0, 0], [0, -1, 0], [0, 0, -1], [-2, 0, 0], [0, -3, 0],
           [0, 0, -3], [-3, 0, 0], [0, -9, 0], [0, 0, -9], [-4, 0, 0],
           [0, -27, 0], [0, 0, -27]]

# Fake duplicate affinities:
Exemplo n.º 7
0
import vaeAffs

import nifty
import numpy as np
import vigra

import os
from vaeAffs.utils.path_utils import get_abailoni_hci_home_path
import h5py

sample = "A"
data_path = os.path.join(
    get_abailoni_hci_home_path(),
    "../ialgpu1_local_home/datasets/cremi/SOA_affinities/sample{}_train.h5".
    format(sample))
crop_slice = ":,20:30,300:600,300:600"
from segmfriends.utils.various import parse_data_slice

crop_slice = parse_data_slice(crop_slice)
with h5py.File(data_path, 'r') as f:
    affs = f['predictions']['full_affs'][crop_slice]
    raw = f['raw'][crop_slice[1:]]

offsets = [[-1, 0, 0], [0, -1, 0], [0, 0, -1], [-2, 0, 0], [0, -3, 0],
           [0, 0, -3], [-3, 0, 0], [0, -9, 0], [0, 0, -9], [-4, 0, 0],
           [0, -27, 0], [0, 0, -27]]

from affogato.affinities import compute_multiscale_affinities, compute_affinities
from affogato.segmentation import compute_mws_segmentation

_, mask = compute_affinities(np.zeros_like(raw, dtype='int64'), offsets)