Пример #1
0
def reflect_and_swap_dataset(raw_dataset, reflectx, reflecty, reflectz, swapxy):
    new_dataset = dict()
    new_dataset['name'] = raw_dataset['name'] \
                          + '_x' + str(reflectx) \
                          + '_y' + str(reflecty) \
                          + '_z' + str(reflectz) \
                          + '_xy' + str(swapxy)
    new_dataset['reflectz'] = reflectz
    new_dataset['reflecty'] = reflecty
    new_dataset['reflectx'] = reflectx
    new_dataset['swapxy'] = swapxy
    for array_key in ["data", "components", "mask"]:
        if array_key in raw_dataset:
            array_copy = raw_dataset[array_key][:]
            array_copy = array_copy.reshape(array_copy.shape[-3:])
            reflection_slices = [slice(None)] * 3
            if reflectz:
                reflection_slices[0] = slice(None, None, -1)
            if reflecty:
                reflection_slices[1] = slice(None, None, -1)
            if reflectx:
                reflection_slices[2] = slice(None, None, -1)
            reflection_slices = tuple(reflection_slices)
            array_copy = array_copy[reflection_slices]
            if swapxy:
                array_copy = array_copy.transpose((0, 2, 1))
            new_dataset[array_key] = array_copy
    new_dataset['nhood'] = raw_dataset['nhood']
    new_dataset['label'] = malis.seg_to_affgraph(new_dataset['components'], new_dataset['nhood'])
    for key in raw_dataset:
        if key not in new_dataset:
            # add any other attributes we're not aware of
            new_dataset[key] = raw_dataset[key]
    return new_dataset
    def get_data(self):
        ndropout = 3
        nblur = 3
        nblackout = 0
        max_range_slide = 18 * 2
        drop_1st = 1
        if self.set_type == 'train' or self.set_type == 'valid':
            drop_1st = 1
        for i in range(self.iter_per_epoch):
            if self.set_type == 'train' or self.set_type == 'valid':
                volume_patch, gt_seg_patch = self.get_random_block(
                    size=(16 + ndropout + drop_1st, 256 + max_range_slide,
                          256 + max_range_slide))
                # volume_patch, gt_seg_patch = self.get_random_block (size=(16+ndropout+drop_1st, 256, 256))
            else:
                volume_patch, gt_seg_patch = self.get_random_block(size=(16,
                                                                         256,
                                                                         256))

            if self.set_type == 'train' or self.set_type == 'valid':

                volume_patch, gt_seg_patch = self.sliding(
                    [volume_patch, gt_seg_patch],
                    17,
                    17, (256, 256),
                    max_range=17)

                seed = time_seed()
                volume_patch, gt_seg_patch = apply_aug(volume_patch,
                                                       gt_seg_patch,
                                                       func=random_flip,
                                                       seed=seed)
                volume_patch, gt_seg_patch = apply_aug(volume_patch,
                                                       gt_seg_patch,
                                                       func=random_reverse,
                                                       seed=seed)
                volume_patch, gt_seg_patch = apply_aug(
                    volume_patch,
                    gt_seg_patch,
                    func=random_square_rotate,
                    seed=seed)
                volume_patch, gt_seg_patch = apply_aug(volume_patch,
                                                       gt_seg_patch,
                                                       func=random_elastic,
                                                       seed=seed)
                volume_patch, gt_seg_patch = self.data_dropout(
                    volume_patch, gt_seg_patch, ndropout=ndropout)
                if self.set_type == 'train' or self.set_type == 'valid':
                    # volume_patch = random_blackout (volume_patch, nblackout, self.data_rand)
                    volume_patch = random_gaussian_blur(volume_patch, nblur)

            gt_affs_patch = malis.seg_to_affgraph(gt_seg_patch, nhood)
            if self.set_type == 'train' or self.set_type == 'valid':
                yield [
                    volume_patch[drop_1st:], gt_seg_patch[drop_1st:],
                    gt_affs_patch[:, drop_1st:]
                ]
            else:
                yield [volume_patch, gt_seg_patch, gt_affs_patch]
Пример #3
0
def np_seg_to_aff(seg, nhood=malis.mknhood3d(1)):
    # return lambda seg, nhood: malis.seg_to_affgraph (seg, nhood).astype(np.float32)
    seg = np.squeeze(seg)
    seg = seg.astype(np.int32)
    ret = malis.seg_to_affgraph(seg, nhood)  # seg zyx
    ret = ret.astype(np.float32)
    ret = np.squeeze(ret)  # ret 3zyx
    ret = np.transpose(ret, [1, 2, 3, 0])  # ret zyx3
    return ret
def create_augmented_dataset(dname, reflectz, reflecty, reflectx, swapxy, angle):
    dataset = dict()
    new_dname = '{dname}_z{z}_y{y}_x{x}_xy{swapxy}_angle{angle:05.1f}'.format(
        dname=dname, z=reflectz, y=reflecty, x=reflectx, swapxy=swapxy,
        angle=angle
    )
    dataset['name'] = new_dname
    dataset['already_saved'] = False
    new_dname_folder = os.path.join(data_folder_target, dataset['name'])
    # if os.path.exists(new_dname_folder):
    #     dataset['already_saved'] = True
    #     return dataset
    # load and transform original image and components
    for key in ['data', 'components']:
        filename = data_filenames[key]
        source_filename = source_data_filenames[key]
        source_filepath = os.path.join(data_folder_source, dname,
                                       source_filename)
        rotation_order = rotation_orders[key]
        if key in scaling_factors:
            scaling_factor = scaling_factors[key]
        else:
            scaling_factor = None
        with h5py.File(source_filepath, 'r') as h5_file:
            input_array = np.array(h5_file['main'])
        print("original", key, "from", source_filepath, "had dtype & shape", 
              input_array.dtype, input_array.shape)
        new_array = create_transformed_array(input_array, reflectz, reflecty, reflectx, swapxy, angle,
                                             rotation_order, scaling_factor)
        if new_array.dtype != dtypes[key]:
            print("converting {k} from {old} to {new}".format(
                k=key, old=new_array.dtype, new=dtypes[key]
            ))
            print(new_array.max())
            new_array = new_array.astype(dtypes[key])
        print('transformed {0}: '.format(filename),
              new_array.dtype,
              new_array.shape)
        dataset[key] = new_array
        if key == 'components':
            # make mask array
            original_shape = input_array.shape
            mask_input_array = np.ones(shape=original_shape, dtype=dtypes['mask'])
            mask_array = create_transformed_array(mask_input_array, reflectz, reflecty, reflectx, swapxy, angle,
                                                  rotation_order, scaling_factor=None)
            dataset['mask'] = mask_array.astype(dtypes['mask'])
            dataset['mask_sum'] = np.sum(dataset['mask'])
        # if new_array.dtype != array_original.dtype:
        #     print('dtype mismatch: new_array.dtype = {0}, array_original.dtype = {1}'
        #           .format(new_array.dtype, array_original.dtype
        #                   ))
            # raise ValueError
    # make affinities from transformed component values
    dataset['label'] = malis.seg_to_affgraph(
        dataset['components'], neighborhood_3d
    ).astype(dtypes['label'])
    return dataset
Пример #5
0
def _make_affinities(labels, nhood=None, size_thresh=1):
    """
    Construct an affinity graph from a segmentation (IDs) 
    
    Segments with ID 0 are regarded as disconnected
    The spatial shape of the affinity graph is the same as of seg_gt.
    This means that some edges are are undefined and therefore treated as disconnected.
    If the offsets in nhood are positive, the edges with largest spatial index are undefined.
    
    Connected components is run on the affgraph to relabel the IDs locally.
    
    Parameters
    ----------
    
    labels: 4d np.ndarray, int (any precision)
        Volumes of segmentation IDs (bs, z, x, y)
    nhood: 2d np.ndarray, int
        Neighbourhood pattern specifying the edges in the affinity graph
        Shape: (#edges, ndim)
        nhood[i] contains the displacement coordinates of edge i
        The number and order of edges is arbitrary
    size_thresh: int
        Size filters for connected compontens, smaller objects are mapped to BG
        
        
    Returns
    -------
    
    aff: 5d np.ndarray int16
        Affinity graph of shape (bs, #edges, x, y, z)
        1: connected, 0: disconnected  
    seg_gt:
        4d np.ndarray int16
        Affinity graph of shape (bs, x, y, z)
        Relabelling of components     
    """

    if not malis_avail:
        raise RuntimeError("Please install malis to use affinities")

    if nhood is None:
        nhood = np.eye(3, dtype=np.int32)

    aff_sh = [
        labels.shape[0],
        nhood.shape[0],
    ] + list(labels.shape[1:])
    out_aff = np.zeros(aff_sh, dtype=np.int16)
    out_seg = np.zeros(labels.shape, dtype=np.int16)
    for i, l in enumerate(labels):
        out_aff[i] = malis.seg_to_affgraph(l, nhood)
        # we throw away the seg sizes
        out_seg[i], _ = malis.affgraph_to_seg(out_aff[i], nhood, size_thresh)
    return out_aff, out_seg
 def get_datasets(dataset, origin):
     # output_shape = (40, 30, 80)
     # input_shape = (100, 110, 120)
     output_shape = (2, 3, 4)
     input_shape = tuple([x + 2 for x in output_shape])
     borders = tuple([(in_ - out_) / 2
                      for (in_, out_) in zip(input_shape, output_shape)])
     input_slices = tuple(
         [slice(x, x + l) for x, l in zip(origin, input_shape)])
     output_slices = tuple([
         slice(x + b, x + b + l)
         for x, b, l in zip(origin, borders, output_shape)
     ])
     expected_dataset = dict()
     data_slices = [slice(0, l) for l in dataset['data'].shape]
     data_slices[-3:] = input_slices
     data_slices = tuple(data_slices)
     expected_data_array = np.array(dataset['data'][data_slices],
                                    dtype=np.float32)
     expected_data_array = expected_data_array.reshape((1, ) + input_shape)
     expected_data_array /= (2.0**8)
     expected_dataset['data'] = expected_data_array
     components_slices = [slice(0, l) for l in dataset['components'].shape]
     components_slices[-3:] = output_slices
     components_slices = tuple(components_slices)
     expected_components_array = np.array(
         dataset['components'][components_slices]).reshape((1, ) +
                                                           output_shape)
     if type(dataset['components']) is DVIDDataInstance:
         print("Is DVIDDataInstance...")
         print("uniques before:", np.unique(expected_components_array))
         dvid_uuid = dataset['components'].uuid
         body_names_to_exclude = dataset.get('body_names_to_exclude')
         good_bodies = get_good_components(dvid_uuid, body_names_to_exclude)
         expected_components_array = \
             replace_array_except_whitelist(expected_components_array, 0, good_bodies)
         print("uniques after:", np.unique(expected_components_array))
     expected_dataset['components'] = expected_components_array
     components_for_affinity_generation = expected_components_array.reshape(
         output_shape)
     expected_label = malis.seg_to_affgraph(
         components_for_affinity_generation, malis.mknhood3d())
     expected_dataset['label'] = expected_label
     if type(dataset['components']) is DVIDDataInstance:
         expected_mask = np.array(expected_components_array > 0).astype(
             np.uint8)
     else:
         expected_mask = np.ones(shape=(1, ) + output_shape, dtype=np.uint8)
     expected_dataset['mask'] = expected_mask
     numpy_dataset = get_numpy_dataset(dataset, input_slices, output_slices,
                                       True)
     return numpy_dataset, expected_dataset
Пример #7
0
def condense_and_split_components(components, output_shape, malis_neighborhood):
    '''
    :param components: numpy array with component values
    :param output_shape: tuple with spatial dimensions of components
    :param malis_neighborhood: array definition of malis neighborhood
    :return: numpy array of same shape as components, with new component values
    '''
    original_shape = components.shape
    components_for_malis = components.reshape(output_shape)
    affinities = malis.seg_to_affgraph(components_for_malis, malis_neighborhood)
    recomputed_components, _ = malis.connected_components_affgraph(affinities.astype(np.int32), malis_neighborhood)
    recomputed_components = recomputed_components.reshape(original_shape)
    return recomputed_components
Пример #8
0
def seg_to_aff_op(seg, nhood=tf.constant(malis.mknhood3d(1)), name='SegToAff'):
	# Squeeze the segmentation to 3D
	seg = tf.squeeze(seg, axis=-1)
	# Define the numpy function to transform segmentation to affinity graph
	np_func = lambda seg, nhood: malis.seg_to_affgraph (seg.astype(np.int32), nhood).astype(np.float32)
	# Convert the numpy function to tensorflow function
	tf_func = tf.py_func(np_func, [tf.cast(seg, tf.int32), nhood], [tf.float32], name=name)
	# Reshape the result, notice that layout format from malis is 3, dimx, dimy, dimx
	ret = tf.reshape(tf_func[0], [3, seg.shape[0], seg.shape[1], seg.shape[2]])
	# Transpose the result so that the dimension 3 go to the last channel
	ret = tf.transpose(ret, [1, 2, 3, 0])
	# print ret.get_shape().as_list()
	return ret
Пример #9
0
def _make_affinities(labels, nhood=None, size_thresh=1):
    """
    Construct an affinity graph from a segmentation (IDs) 
    
    Segments with ID 0 are regarded as disconnected
    The spatial shape of the affinity graph is the same as of seg_gt.
    This means that some edges are are undefined and therefore treated as disconnected.
    If the offsets in nhood are positive, the edges with largest spatial index are undefined.
    
    Connected components is run on the affgraph to relabel the IDs locally.
    
    Parameters
    ----------
    
    labels: 4d np.ndarray, int (any precision)
        Volumes of segmentation IDs (bs, z, x, y)
    nhood: 2d np.ndarray, int
        Neighbourhood pattern specifying the edges in the affinity graph
        Shape: (#edges, ndim)
        nhood[i] contains the displacement coordinates of edge i
        The number and order of edges is arbitrary
    size_thresh: int
        Size filters for connected compontens, smaller objects are mapped to BG
        
        
    Returns
    -------
    
    aff: 5d np.ndarray int16
        Affinity graph of shape (bs, #edges, x, y, z)
        1: connected, 0: disconnected  
    seg_gt:
        4d np.ndarray int16
        Affinity graph of shape (bs, x, y, z)
        Relabelling of components     
    """

    if not malis_avail:
        raise RuntimeError("Please install malis to use affinities")

    if nhood is None:
        nhood = np.eye(3, dtype=np.int32)

    aff_sh = [labels.shape[0], nhood.shape[0], ] + list(labels.shape[1:])
    out_aff = np.zeros(aff_sh, dtype=np.int16)
    out_seg = np.zeros(labels.shape, dtype=np.int16)
    for i, l in enumerate(labels):
        out_aff[i] = malis.seg_to_affgraph(l, nhood)
        # we throw away the seg sizes
        out_seg[i], _ = malis.affgraph_to_seg(out_aff[i], nhood, size_thresh)
    return out_aff, out_seg
Пример #10
0
def condense_and_split_components(components, output_shape,
                                  malis_neighborhood):
    '''
    :param components: numpy array with component values
    :param output_shape: tuple with spatial dimensions of components
    :param malis_neighborhood: array definition of malis neighborhood
    :return: numpy array of same shape as components, with new component values
    '''
    original_shape = components.shape
    components_for_malis = components.reshape(output_shape)
    affinities = malis.seg_to_affgraph(components_for_malis,
                                       malis_neighborhood)
    recomputed_components, _ = malis.connected_components_affgraph(
        affinities.astype(np.int32), malis_neighborhood)
    recomputed_components = recomputed_components.reshape(original_shape)
    return recomputed_components
Пример #11
0
def augment_data_elastic(dataset, ncopy_per_dset):
    dsetout = []
    nset = len(dataset)
    for iset in range(nset):
        for icopy in range(ncopy_per_dset):
            reflectz = np.random.rand() > .5
            reflecty = np.random.rand() > .5
            reflectx = np.random.rand() > .5
            swapxy = np.random.rand() > .5

            dataset.append({})
            dataset[-1]['reflectz'] = reflectz
            dataset[-1]['reflecty'] = reflecty
            dataset[-1]['reflectx'] = reflectx
            dataset[-1]['swapxy'] = swapxy

            dataset[-1]['name'] = dataset[iset]['name']
            dataset[-1]['nhood'] = dataset[iset]['nhood']
            dataset[-1]['data'] = dataset[iset]['data'][:]
            dataset[-1]['components'] = dataset[iset]['components'][:]

            if reflectz:
                dataset[-1]['data'] = dataset[-1]['data'][::-1, :, :]
                dataset[-1]['components'] = dataset[-1][
                    'components'][::-1, :, :]

            if reflecty:
                dataset[-1]['data'] = dataset[-1]['data'][:, ::-1, :]
                dataset[-1]['components'] = dataset[-1][
                    'components'][:, ::-1, :]

            if reflectx:
                dataset[-1]['data'] = dataset[-1]['data'][:, :, ::-1]
                dataset[-1]['components'] = dataset[-1][
                    'components'][:, :, ::-1]

            if swapxy:
                dataset[-1]['data'] = dataset[-1]['data'].transpose((0, 2, 1))
                dataset[-1]['components'] = dataset[-1][
                    'components'].transpose((0, 2, 1))

            # elastic deformations

            dataset[-1]['label'] = malis.seg_to_affgraph(
                dataset[-1]['components'], dataset[-1]['nhood'])

    return dataset
Пример #12
0
def augment_data_simple(dataset):
    nset = len(dataset)
    for iset in range(nset):
        for reflectz in range(2):
            for reflecty in range(2):
                for reflectx in range(2):
                    for swapxy in range(2):

                        if reflectz==0 and reflecty==0 and reflectx==0 and swapxy==0:
                            continue

                        dataset.append({})
                        dataset[-1]['name'] = dataset[iset]['name']+'_x'+str(reflectx)+'_y'+str(reflecty)+'_z'+str(reflectz)+'_xy'+str(swapxy)



                        dataset[-1]['nhood'] = dataset[iset]['nhood']
                        dataset[-1]['data'] = dataset[iset]['data'][:]
                        dataset[-1]['components'] = dataset[iset]['components'][:]

                        if reflectz:
                            dataset[-1]['data']         = dataset[-1]['data'][::-1,:,:]
                            dataset[-1]['components']   = dataset[-1]['components'][::-1,:,:]

                        if reflecty:
                            dataset[-1]['data']         = dataset[-1]['data'][:,::-1,:]
                            dataset[-1]['components']   = dataset[-1]['components'][:,::-1,:]

                        if reflectx:
                            dataset[-1]['data']         = dataset[-1]['data'][:,:,::-1]
                            dataset[-1]['components']   = dataset[-1]['components'][:,:,::-1]

                        if swapxy:
                            dataset[-1]['data']         = dataset[-1]['data'].transpose((0,2,1))
                            dataset[-1]['components']   = dataset[-1]['components'].transpose((0,2,1))

                        dataset[-1]['label'] = malis.seg_to_affgraph(dataset[-1]['components'],dataset[-1]['nhood'])

                        dataset[-1]['reflectz']=reflectz
                        dataset[-1]['reflecty']=reflecty
                        dataset[-1]['reflectx']=reflectx
                        dataset[-1]['swapxy']=swapxy
    return dataset
Пример #13
0
def augment_data_elastic(dataset,ncopy_per_dset):
    dsetout = []
    nset = len(dataset)
    for iset in range(nset):
        for icopy in range(ncopy_per_dset):
            reflectz = np.random.rand()>.5
            reflecty = np.random.rand()>.5
            reflectx = np.random.rand()>.5
            swapxy = np.random.rand()>.5

            dataset.append({})
            dataset[-1]['reflectz']=reflectz
            dataset[-1]['reflecty']=reflecty
            dataset[-1]['reflectx']=reflectx
            dataset[-1]['swapxy']=swapxy

            dataset[-1]['name'] = dataset[iset]['name']
            dataset[-1]['nhood'] = dataset[iset]['nhood']
            dataset[-1]['data'] = dataset[iset]['data'][:]
            dataset[-1]['components'] = dataset[iset]['components'][:]

            if reflectz:
                dataset[-1]['data']         = dataset[-1]['data'][::-1,:,:]
                dataset[-1]['components']   = dataset[-1]['components'][::-1,:,:]

            if reflecty:
                dataset[-1]['data']         = dataset[-1]['data'][:,::-1,:]
                dataset[-1]['components']   = dataset[-1]['components'][:,::-1,:]

            if reflectx:
                dataset[-1]['data']         = dataset[-1]['data'][:,:,::-1]
                dataset[-1]['components']   = dataset[-1]['components'][:,:,::-1]

            if swapxy:
                dataset[-1]['data']         = dataset[-1]['data'].transpose((0,2,1))
                dataset[-1]['components']   = dataset[-1]['components'].transpose((0,2,1))

            # elastic deformations

            dataset[-1]['label'] = malis.seg_to_affgraph(dataset[-1]['components'],dataset[-1]['nhood'])

    return dataset
Пример #14
0
 def get_datasets(dataset, origin):
     # output_shape = (40, 30, 80)
     # input_shape = (100, 110, 120)
     output_shape = (2,3,4)
     input_shape = tuple([x + 2 for x in output_shape])
     borders = tuple([(in_ - out_) / 2 for (in_, out_) in zip(input_shape, output_shape)])
     input_slices = tuple([slice(x, x + l) for x, l in zip(origin, input_shape)])
     output_slices = tuple([slice(x + b, x + b + l) for x, b, l in zip(origin, borders, output_shape)])
     expected_dataset = dict()
     data_slices = [slice(0, l) for l in dataset['data'].shape]
     data_slices[-3:] = input_slices
     data_slices = tuple(data_slices)
     expected_data_array = np.array(dataset['data'][data_slices], dtype=np.float32)
     expected_data_array = expected_data_array.reshape((1,) + input_shape)
     expected_data_array /= (2.0 ** 8)
     expected_dataset['data'] = expected_data_array
     components_slices = [slice(0, l) for l in dataset['components'].shape]
     components_slices[-3:] = output_slices
     components_slices = tuple(components_slices)
     expected_components_array = np.array(dataset['components'][components_slices]).reshape((1,) + output_shape)
     if type(dataset['components']) is DVIDDataInstance:
         print("Is DVIDDataInstance...")
         print("uniques before:", np.unique(expected_components_array))
         dvid_uuid = dataset['components'].uuid
         body_names_to_exclude = dataset.get('body_names_to_exclude')
         good_bodies = get_good_components(dvid_uuid, body_names_to_exclude)
         expected_components_array = \
             replace_array_except_whitelist(expected_components_array, 0, good_bodies)
         print("uniques after:", np.unique(expected_components_array))
     expected_dataset['components'] = expected_components_array
     components_for_affinity_generation = expected_components_array.reshape(output_shape)
     expected_label = malis.seg_to_affgraph(components_for_affinity_generation, malis.mknhood3d())
     expected_dataset['label'] = expected_label
     if type(dataset['components']) is DVIDDataInstance:
         expected_mask = np.array(expected_components_array > 0).astype(np.uint8)
     else:
         expected_mask = np.ones(shape=(1,) + output_shape, dtype=np.uint8)
     expected_dataset['mask'] = expected_mask
     numpy_dataset = get_numpy_dataset(dataset, input_slices, output_slices, True)
     return numpy_dataset, expected_dataset
Пример #15
0
def ignore_disconnected_components(cells):

    global ignore_label
    if ignore_label is None:
        ignore_label = int(cells.max() + 1)

    print("Relabelling connected components...")
    simple_neighborhood = malis.mknhood3d()
    affinities = malis.seg_to_affgraph(cells, simple_neighborhood)
    relabelled, _ = malis.connected_components_affgraph(
        affinities, simple_neighborhood)

    print("Creating overlay...")
    overlay = np.array([cells.flatten(), relabelled.flatten()])
    print("Finding unique pairs...")
    matches = np.unique(overlay, axis=1)

    print("Finding disconnected labels...")
    orig_to_new = {}
    disconnected = set()
    for orig_id, new_id in zip(matches[0], matches[1]):
        if orig_id == 0 or new_id == 0:
            continue
        if orig_id not in orig_to_new:
            orig_to_new[orig_id] = [new_id]
        else:
            orig_to_new[orig_id].append(new_id)
            disconnected.add(orig_id)

    print("Masking %d disconnected labels..." % len(disconnected))
    ignore_mask = replace(cells, np.array([l for l in disconnected]),
                          np.array([ignore_label], dtype=np.uint64))
    ignore_mask = (ignore_mask == ignore_label).astype(np.uint8)
    print("done.")

    return ignore_mask
Пример #16
0
def train(solver, test_net, data_arrays, train_data_arrays, options):

    global data_slices, label_slices, data_offsets
    caffe.select_device(options.train_device, False)
    
    net = solver.net
    
    test_eval = None
    if (options.test_net != None):
        test_eval = TestNetEvaluator(test_net, net, train_data_arrays, options)
    
    input_dims, output_dims, input_padding = get_spatial_io_dims(net)
    fmaps_in, fmaps_out = get_fmap_io_dims(net)

    dims = len(output_dims)
    losses = []
    
    shapes = []
    # Raw data slice input         (n = 1, f = 1, spatial dims)
    shapes += [[1,fmaps_in] + input_dims]
    # Label data slice input    (n = 1, f = #edges, spatial dims)
    shapes += [[1,fmaps_out] + output_dims]
    
    if (options.loss_function == 'malis'):
        # Connected components input   (n = 1, f = 1, spatial dims)
        shapes += [[1,1] + output_dims]
    if (options.loss_function == 'euclid'):
        # Error scale input   (n = 1, f = #edges, spatial dims)
        shapes += [[1,fmaps_out] + output_dims]
    # Nhood specifications         (n = #edges, f = 3)
    if (('nhood' in data_arrays[0]) and (options.loss_function == 'malis')):
        shapes += [[1,1] + list(np.shape(data_arrays[0]['nhood']))]

    net_io = NetInputWrapper(net, shapes)
    
    data_sizes = [fmaps_in]+[output_dims[di] + input_padding[di] for di in range(0, dims)]
    label_sizes = [fmaps_out] + output_dims

    # Begin the generation of the training set
    training_set = TrainingSetGenerator(data_arrays, options, data_sizes, label_sizes, input_padding)
    training_set.generate_training()

    # Loop from current iteration to last iteration
    for i in range(solver.iter, solver.max_iter):
        
        if (options.test_net != None and i % options.test_interval == 0):
            test_eval.evaluate(i)
        
        # First pick the dataset to train with
        dataset = randint(0, len(data_arrays) - 1)

        offsets = []
        for j in range(0, dims):
            offsets.append(randint(0, data_arrays[dataset]['data'].shape[1+j] - (output_dims[j] + input_padding[j])))
                
        # These are the raw data elements
        data_slice_old = slice_data(data_arrays[dataset]['data'], [0]+offsets, data_sizes)
        data_slice = data_slices.get()

        # print "Compare sizes of data_slices: {0} and {1}".format(data_slice_old.shape, data_slice.shape)

        label_slice = None
        components_slice = None

        if (options.training_method == 'affinity'):
            if ('label' in data_arrays[dataset]):
                label_slice_old = slice_data(data_arrays[dataset]['label'], [0] + [offsets[di] + int(math.ceil(input_padding[di] / float(2))) for di in range(0, dims)], label_sizes)
                label_slice = label_slices.get()

                # print "Compare sizes of label_slices: {0} and {1}".format(label_slice_old.shape, label_slice.shape)
            
            # TODO: Not sure about what to do for components_slice
            if ('components' in data_arrays[dataset]):
                data_offset = data_offsets.get()
                components_slice = slice_data(data_arrays[dataset]['components'][0,:], [data_offset[di] + int(math.ceil(input_padding[di] / float(2))) for di in range(0, dims)], output_dims)
                if (label_slice is None or options.recompute_affinity):
                    label_slice = malis.seg_to_affgraph(components_slice, data_arrays[dataset]['nhood']).astype(float32)
            
            if (components_slice is None or options.recompute_affinity):
                components_slice,ccSizes = malis.connected_components_affgraph(label_slice.astype(int32), data_arrays[dataset]['nhood'])

        else:
            label_slice_old = slice_data(data_arrays[dataset]['label'], [0] + [offsets[di] + int(math.ceil(input_padding[di] / float(2))) for di in range(0, dims)], [fmaps_out] + output_dims)
            label_slice = label_slices.get()


        if options.loss_function == 'malis':
            # Also recomputing the corresponding labels (connected components)
            net_io.setInputs([data_slice, label_slice, components_slice, data_arrays[0]['nhood']])
            
        if options.loss_function == 'euclid':
            if(options.scale_error == True):
                frac_pos = np.clip(label_slice.mean(),0.05,0.95)
                w_pos = 1.0/(2.0*frac_pos)
                w_neg = 1.0/(2.0*(1.0-frac_pos))
            else:
                w_pos = 1
                w_neg = 1
                      
            net_io.setInputs([data_slice, label_slice, error_scale(label_slice,w_neg,w_pos)])

        if options.loss_function == 'softmax':
            # These are the affinity edge values
            net_io.setInputs([data_slice, label_slice])
        
        # Single step
        loss = solver.step(1)
        # sanity_check_net_blobs(net)
        
        while gc.collect():
            pass


        if options.loss_function == 'euclid' or options.loss_function == 'euclid_aniso':
            print("[Iter %i] Loss: %f, frac_pos=%f, w_pos=%f" % (i,loss,frac_pos,w_pos))
        else:
            print("[Iter %i] Loss: %f" % (i,loss))
        # TODO: Store losses to file
        losses += [loss]
Пример #17
0
def get_outputs(original_dataset, output_slice):
    output_shape = tuple(
        [slice_.stop - slice_.start for slice_ in output_slice])
    n_spatial_dimensions = len(output_slice)
    components_shape = (1, ) + output_shape
    mask_shape = (1, ) + output_shape
    affinities_shape = (n_spatial_dimensions, ) + output_shape
    component_slices = [
        slice(0, l) for l in original_dataset['components'].shape
    ]
    component_slices[-n_spatial_dimensions:] = output_slice
    logger.debug("component_slices: {}".format(component_slices))
    components_array = get_zero_padded_array_slice(
        original_dataset['components'], component_slices)
    source_class = type(original_dataset['components'])
    components_are_from_dvid = source_class in dvid_classes
    exclude_strings = original_dataset.get('body_names_to_exclude', [])
    if exclude_strings and components_are_from_dvid:
        dvid_uuid = original_dataset['components'].uuid
        components_to_keep = get_good_components(dvid_uuid, exclude_strings)
        logger.debug("components before: {}".format(
            list(np.unique(components_array))))
        components_array = replace_array_except_whitelist(
            components_array, 0, components_to_keep)
        logger.debug("components after: {}".format(
            list(np.unique(components_array))))
    minimum_component_size = original_dataset.get('minimum_component_size', 0)
    if minimum_component_size > 0:
        components_array = replace_infrequent_values(components_array,
                                                     minimum_component_size, 0)
    component_erosion_steps = original_dataset.get('component_erosion_steps',
                                                   0)
    if component_erosion_steps > 0:
        components_array = erode_value_blobs(components_array,
                                             steps=component_erosion_steps,
                                             values_to_ignore=(0, ))
    components_for_malis = components_array.reshape(output_shape)
    affinities_from_components = malis.seg_to_affgraph(
        components_for_malis, original_dataset['nhood'])
    components_array, _ = malis.connected_components_affgraph(
        affinities_from_components, original_dataset['nhood'])
    components_array = shift_up_component_values(components_array)
    components_array = components_array.reshape(components_shape)
    if 'label' in original_dataset:
        label_shape = original_dataset['label'].shape
        label_slices = [slice(0, l) for l in label_shape]
        label_slices[-n_spatial_dimensions:] = output_slice
        affinities_array = get_zero_padded_array_slice(
            original_dataset['label'], label_slices)
    else:
        # compute affinities from components
        logger.debug(
            "Computing affinity labels from components because 'label' wasn't provided in data source."
        )
        affinities_array = affinities_from_components
    assert affinities_array.shape == affinities_shape, \
        "affinities_array.shape is {actual} but should be {desired}".format(
            actual=str(affinities_array.shape), desired=str(affinities_shape))
    if 'mask' in original_dataset:
        mask_array = get_zero_padded_array_slice(original_dataset['mask'],
                                                 output_slice)
    else:
        if components_are_from_dvid:
            # infer mask values: 1 if component is nonzero, 0 otherwise
            mask_array = np.not_equal(components_array, 0)
            logger.debug(
                "No mask provided. Setting to 1 where components != 0.")
        else:
            # assume no masking
            mask_array = np.ones_like(components_array, dtype=np.uint8)
            logger.debug("No mask provided. Setting to 1 where outputs exist.")
    mask_dilation_steps = original_dataset.get('mask_dilation_steps', 0)
    if mask_dilation_steps > 0:
        mask_array = ndimage.binary_dilation(mask_array,
                                             iterations=mask_dilation_steps)
    mask_array = mask_array.astype(np.uint8)
    mask_array = mask_array.reshape(mask_shape)
    return components_array, affinities_array, mask_array
Пример #18
0
              metrics=[
                  metr_pos_count, metr_neg_count, metr_max_pos_count,
                  metr_max_neg_count, metr_pos_cost, metr_neg_cost
              ])
hist = model.evaluate(data[[0]], gt[[0]])
pdb.set_trace()
training_hist = model.fit(data, gt, batch_size=3, nb_epoch=n_epochs, verbose=0)
plt.figure()
plt.plot(training_hist.history['loss'])
plt.xlabel("epochs")
plt.ylabel("training loss")

# predict an affinity graph and compare it with the affinity graph
# created by the true segmentation
plot_sample = 1
from malis import mknhood3d, seg_to_affgraph
pred_aff = model.predict(data)[plot_sample]
aff = seg_to_affgraph(gt[plot_sample, 0], mknhood3d())
plt.figure()
plt.subplot(131)
plt.pcolor(data[plot_sample, 0, 1], cmap="gray")
plt.title("data")
plt.subplot(132)
plt.pcolor(aff[1, 1], cmap="gray")
plt.title("aff from gt")
plt.subplot(133)
plt.pcolor(pred_aff[1, 1], cmap="gray")
plt.title("predicted aff")

plt.show()
Пример #19
0
training_hist = model.fit(data,
                        gt,
                        batch_size=3,
                        nb_epoch=n_epochs,
                        verbose=0)
plt.figure()
plt.plot(training_hist.history['loss'])
plt.xlabel("epochs")
plt.ylabel("training loss")



# predict an affinity graph and compare it with the affinity graph
# created by the true segmentation
plot_sample = 1
from malis import mknhood3d, seg_to_affgraph
pred_aff = model.predict(data)[plot_sample]
aff = seg_to_affgraph(gt[plot_sample,0], mknhood3d())
plt.figure()
plt.subplot(131)
plt.pcolor(data[plot_sample,0,1], cmap="gray")
plt.title("data")
plt.subplot(132)
plt.pcolor(aff[1,1], cmap="gray")
plt.title("aff from gt")
plt.subplot(133)
plt.pcolor(pred_aff[1,1], cmap="gray")
plt.title("predicted aff")

plt.show()
Пример #20
0
def get_outputs(original_dataset, output_slice):
    output_shape = tuple([slice_.stop - slice_.start for slice_ in output_slice])
    n_spatial_dimensions = len(output_slice)
    components_shape = (1,) + output_shape
    mask_shape = (1,) + output_shape
    affinities_shape = (n_spatial_dimensions,) + output_shape
    component_slices = [slice(0, l) for l in original_dataset['components'].shape]
    component_slices[-n_spatial_dimensions:] = output_slice
    logger.debug("component_slices: {}".format(component_slices))
    components_array = get_zero_padded_array_slice(original_dataset['components'], component_slices)
    source_class = type(original_dataset['components'])
    components_are_from_dvid = source_class in dvid_classes
    exclude_strings = original_dataset.get('body_names_to_exclude', [])
    if exclude_strings and components_are_from_dvid:
        dvid_uuid = original_dataset['components'].uuid
        components_to_keep = get_good_components(dvid_uuid, exclude_strings)
        logger.debug("components before: {}".format(list(np.unique(components_array))))
        components_array = replace_array_except_whitelist(components_array, 0, components_to_keep)
        logger.debug("components after: {}".format(list(np.unique(components_array))))
    minimum_component_size = original_dataset.get('minimum_component_size', 0)
    if minimum_component_size > 0:
        components_array = replace_infrequent_values(components_array, minimum_component_size, 0)
    component_erosion_steps = original_dataset.get('component_erosion_steps', 0)
    if component_erosion_steps > 0:
        components_array = erode_value_blobs(
            components_array,
            steps=component_erosion_steps,
            values_to_ignore=(0,))
    components_for_malis = components_array.reshape(output_shape)
    affinities_from_components = malis.seg_to_affgraph(
        components_for_malis,
        original_dataset['nhood'])
    components_array, _ = malis.connected_components_affgraph(
        affinities_from_components,
        original_dataset['nhood'])
    components_array = shift_up_component_values(components_array)
    components_array = components_array.reshape(components_shape)
    if 'label' in original_dataset:
        label_shape = original_dataset['label'].shape
        label_slices = [slice(0, l) for l in label_shape]
        label_slices[-n_spatial_dimensions:] = output_slice
        affinities_array = get_zero_padded_array_slice(original_dataset['label'], label_slices)
    else:
        # compute affinities from components
        logger.debug("Computing affinity labels from components because 'label' wasn't provided in data source.")
        affinities_array = affinities_from_components
    assert affinities_array.shape == affinities_shape, \
        "affinities_array.shape is {actual} but should be {desired}".format(
            actual=str(affinities_array.shape), desired=str(affinities_shape))
    if 'mask' in original_dataset:
        mask_array = get_zero_padded_array_slice(original_dataset['mask'], output_slice)
    else:
        if components_are_from_dvid:
            # infer mask values: 1 if component is nonzero, 0 otherwise
            mask_array = np.not_equal(components_array, 0)
            logger.debug("No mask provided. Setting to 1 where components != 0.")
        else:
            # assume no masking
            mask_array = np.ones_like(components_array, dtype=np.uint8)
            logger.debug("No mask provided. Setting to 1 where outputs exist.")
    mask_dilation_steps = original_dataset.get('mask_dilation_steps', 0)
    if mask_dilation_steps > 0:
        mask_array = ndimage.binary_dilation(mask_array, iterations=mask_dilation_steps)
    mask_array = mask_array.astype(np.uint8)
    mask_array = mask_array.reshape(mask_shape)
    return components_array, affinities_array, mask_array
Пример #21
0
def old_augment_data_simple(dataset, trn_method='affinity'):
    nset = len(dataset)
    for iset in range(nset):
        for reflectz in range(2):
            for reflecty in range(2):
                for reflectx in range(2):
                    for swapxy in range(2):

                        if reflectz == 0 and reflecty == 0 and reflectx == 0 and swapxy == 0:
                            continue

                        dataset.append({})
                        if trn_method == 'affinity':
                            dataset[-1]['name'] = dataset[iset]['name']
                            dataset[-1]['nhood'] = dataset[iset]['nhood']
                            dataset[-1]['data'] = dataset[iset]['data'][:]
                            dataset[-1]['components'] = dataset[iset][
                                'components'][:]

                            if reflectz:
                                dataset[-1]['data'] = dataset[-1][
                                    'data'][::-1, :, :]
                                dataset[-1]['components'] = dataset[-1][
                                    'components'][::-1, :, :]

                            if reflecty:
                                dataset[-1]['data'] = dataset[-1][
                                    'data'][:, ::-1, :]
                                dataset[-1]['components'] = dataset[-1][
                                    'components'][:, ::-1, :]

                            if reflectx:
                                dataset[-1]['data'] = dataset[-1][
                                    'data'][:, :, ::-1]
                                dataset[-1]['components'] = dataset[-1][
                                    'components'][:, :, ::-1]

                            if swapxy:
                                dataset[-1]['data'] = dataset[-1][
                                    'data'].transpose((0, 2, 1))
                                dataset[-1]['components'] = dataset[-1][
                                    'components'].transpose((0, 2, 1))

                            dataset[-1]['label'] = malis.seg_to_affgraph(
                                dataset[-1]['components'],
                                dataset[-1]['nhood'])

                        elif trn_method == 'pixel':
                            dataset[-1]['name'] = dataset[iset]['name']
                            dataset[-1]['nhood'] = dataset[iset]['nhood']
                            dataset[-1]['data'] = dataset[iset]['data'][:]
                            dataset[-1]['label'] = dataset[iset]['label'][:]
                            #dataset[-1]['components'] = dataset[iset]['components'][:]

                            if reflectz:
                                dataset[-1]['data'] = dataset[-1][
                                    'data'][::-1, :, :]
                                if len(dataset[-1]['label'].shape) == 3:
                                    dataset[-1]['label'] = dataset[-1][
                                        'label'][::-1, :, :]
                                elif len(dataset[-1]['label'].shape) == 4:
                                    dataset[-1]['label'] = dataset[-1][
                                        'label'][:, ::-1, :, :]

                            if reflecty:
                                dataset[-1]['data'] = dataset[-1][
                                    'data'][:, ::-1, :]
                                if len(dataset[-1]['label'].shape) == 3:
                                    dataset[-1]['label'] = dataset[-1][
                                        'label'][:, ::-1, :]
                                elif len(dataset[-1]['label'].shape) == 4:
                                    dataset[-1]['label'] = dataset[-1][
                                        'label'][:, :, ::-1, :]

                            if reflectx:
                                dataset[-1]['data'] = dataset[-1][
                                    'data'][:, :, ::-1]
                                if len(dataset[-1]['label'].shape) == 3:
                                    dataset[-1]['label'] = dataset[-1][
                                        'label'][:, :, ::-1]
                                elif len(dataset[-1]['label'].shape) == 4:
                                    dataset[-1]['label'] = dataset[-1][
                                        'label'][:, :, :, ::-1]

                            if swapxy:
                                dataset[-1]['data'] = dataset[-1][
                                    'data'].transpose((0, 2, 1))
                                if len(dataset[-1]['label'].shape) == 3:
                                    dataset[-1]['label'] = dataset[-1][
                                        'label'].transpose((0, 2, 1))
                                elif len(dataset[-1]['label'].shape) == 4:
                                    dataset[-1]['label'] = dataset[-1][
                                        'label'].transpose((0, 1, 3, 2))

                            #dataset[-1]['label'] = malis.seg_to_affgraph(dataset[-1]['components'],dataset[-1]['nhood'])

####dataset[-1]['transform'] = dataset[iset]['transform']

                        dataset[-1]['reflectz'] = reflectz
                        dataset[-1]['reflecty'] = reflecty
                        dataset[-1]['reflectx'] = reflectx
                        dataset[-1]['swapxy'] = swapxy

    #pdb.set_trace()
    return dataset
Пример #22
0
def train(solver, test_net, data_arrays, train_data_arrays, options):

    global data_slices, label_slices, data_offsets
    caffe.select_device(options.train_device, False)

    net = solver.net

    test_eval = None
    if (options.test_net != None):
        test_eval = TestNetEvaluator(test_net, net, train_data_arrays, options)

    input_dims, output_dims, input_padding = get_spatial_io_dims(net)
    fmaps_in, fmaps_out = get_fmap_io_dims(net)

    dims = len(output_dims)
    losses = []

    shapes = []
    # Raw data slice input         (n = 1, f = 1, spatial dims)
    shapes += [[1, fmaps_in] + input_dims]
    # Label data slice input    (n = 1, f = #edges, spatial dims)
    shapes += [[1, fmaps_out] + output_dims]

    if (options.loss_function == 'malis'):
        # Connected components input   (n = 1, f = 1, spatial dims)
        shapes += [[1, 1] + output_dims]
    if (options.loss_function == 'euclid'):
        # Error scale input   (n = 1, f = #edges, spatial dims)
        shapes += [[1, fmaps_out] + output_dims]
    # Nhood specifications         (n = #edges, f = 3)
    if (('nhood' in data_arrays[0]) and (options.loss_function == 'malis')):
        shapes += [[1, 1] + list(np.shape(data_arrays[0]['nhood']))]

    net_io = NetInputWrapper(net, shapes)

    data_sizes = [fmaps_in] + [
        output_dims[di] + input_padding[di] for di in range(0, dims)
    ]
    label_sizes = [fmaps_out] + output_dims

    # Begin the generation of the training set
    training_set = TrainingSetGenerator(data_arrays, options, data_sizes,
                                        label_sizes, input_padding)
    training_set.generate_training()

    # Loop from current iteration to last iteration
    for i in range(solver.iter, solver.max_iter):

        if (options.test_net != None and i % options.test_interval == 0):
            test_eval.evaluate(i)

        # First pick the dataset to train with
        dataset = randint(0, len(data_arrays) - 1)

        offsets = []
        for j in range(0, dims):
            offsets.append(
                randint(
                    0, data_arrays[dataset]['data'].shape[1 + j] -
                    (output_dims[j] + input_padding[j])))

        # These are the raw data elements
        data_slice_old = slice_data(data_arrays[dataset]['data'],
                                    [0] + offsets, data_sizes)
        data_slice = data_slices.get()

        # print "Compare sizes of data_slices: {0} and {1}".format(data_slice_old.shape, data_slice.shape)

        label_slice = None
        components_slice = None

        if (options.training_method == 'affinity'):
            if ('label' in data_arrays[dataset]):
                label_slice_old = slice_data(
                    data_arrays[dataset]['label'], [0] + [
                        offsets[di] +
                        int(math.ceil(input_padding[di] / float(2)))
                        for di in range(0, dims)
                    ], label_sizes)
                label_slice = label_slices.get()

                # print "Compare sizes of label_slices: {0} and {1}".format(label_slice_old.shape, label_slice.shape)

            # TODO: Not sure about what to do for components_slice
            if ('components' in data_arrays[dataset]):
                data_offset = data_offsets.get()
                components_slice = slice_data(
                    data_arrays[dataset]['components'][0, :], [
                        data_offset[di] +
                        int(math.ceil(input_padding[di] / float(2)))
                        for di in range(0, dims)
                    ], output_dims)
                if (label_slice is None or options.recompute_affinity):
                    label_slice = malis.seg_to_affgraph(
                        components_slice,
                        data_arrays[dataset]['nhood']).astype(float32)

            if (components_slice is None or options.recompute_affinity):
                components_slice, ccSizes = malis.connected_components_affgraph(
                    label_slice.astype(int32), data_arrays[dataset]['nhood'])

        else:
            label_slice_old = slice_data(data_arrays[dataset]['label'], [0] + [
                offsets[di] + int(math.ceil(input_padding[di] / float(2)))
                for di in range(0, dims)
            ], [fmaps_out] + output_dims)
            label_slice = label_slices.get()

        if options.loss_function == 'malis':
            # Also recomputing the corresponding labels (connected components)
            net_io.setInputs([
                data_slice, label_slice, components_slice,
                data_arrays[0]['nhood']
            ])

        if options.loss_function == 'euclid':
            if (options.scale_error == True):
                frac_pos = np.clip(label_slice.mean(), 0.05, 0.95)
                w_pos = 1.0 / (2.0 * frac_pos)
                w_neg = 1.0 / (2.0 * (1.0 - frac_pos))
            else:
                w_pos = 1
                w_neg = 1

            net_io.setInputs([
                data_slice, label_slice,
                error_scale(label_slice, w_neg, w_pos)
            ])

        if options.loss_function == 'softmax':
            # These are the affinity edge values
            net_io.setInputs([data_slice, label_slice])

        # Single step
        loss = solver.step(1)
        # sanity_check_net_blobs(net)

        while gc.collect():
            pass

        if options.loss_function == 'euclid' or options.loss_function == 'euclid_aniso':
            print("[Iter %i] Loss: %f, frac_pos=%f, w_pos=%f" %
                  (i, loss, frac_pos, w_pos))
        else:
            print("[Iter %i] Loss: %f" % (i, loss))
        # TODO: Store losses to file
        losses += [loss]
Пример #23
0
def generate_aff_graph(gt, save_location=None):

    seg = malis.seg_to_affgraph(gt)
Пример #24
0
print  "[" +str(datetime.datetime.now())+"]" + "Reading test volume from " + datadir
# hdf5_raw_file = datadir + 'img_normalized.h5'
hdf5_gt_file = datadir + 'groundtruth_seg.h5'
# hdf5_aff_file = datadir + 'groundtruth_aff.h5'

#hdf5_raw_file = 'zebrafish_friedrich/raw.hdf5'
#hdf5_gt_file = 'zebrafish_friedrich/labels_2.hdf5'


# hdf5_raw = h5py.File(hdf5_raw_file, 'r')
h5seg = h5py.File(hdf5_gt_file, 'r')
# hdf5_aff = h5py.File(hdf5_aff_file, 'r')

seg = np.asarray(h5seg['main']).astype(np.int32)
print "[" +str(datetime.datetime.now())+"]" + "Making affinity graph..."
aff = m.seg_to_affgraph(seg,nhood)


print "[" +str(datetime.datetime.now())+"]" + "Affinity shape:" + str(aff.shape)
print "[" +str(datetime.datetime.now())+"]" + "Computing connected components..."
cc,ccSizes = m.connected_components_affgraph(aff,nhood)
print "[" +str(datetime.datetime.now())+"]" + "Making affinity graph again..."
aff2 = m.seg_to_affgraph(cc,nhood)
print "[" +str(datetime.datetime.now())+"]" + "Computing connected components..."
cc2,ccSizes2 = m.connected_components_affgraph(aff2,nhood)

print "[" +str(datetime.datetime.now())+"]" + "Comparing 'seg' and 'cc':"
# ri,fscore,prec,rec = m.rand_index(seg,cc)
# print "\tRand index: %f, fscore: %f, prec: %f, rec: %f" % (ri,fscore,prec,rec)
V_rand,V_rand_split,V_rand_merge = m.compute_V_rand_N2(seg,cc)
print "[" +str(datetime.datetime.now())+"]" + "\tV_rand: %f, V_rand_split: %f, V_rand_merge: %f" % (V_rand,V_rand_split,V_rand_merge)
def train(solver, data_arrays, label_arrays, mode='malis'):
    losses = []
    
    net = solver.net
    if mode == 'malis':
        nhood = malis.mknhood3d()
    if mode == 'euclid':
        nhood = malis.mknhood3d()
    if mode == 'malis_aniso':
        nhood = malis.mknhood3d_aniso()
    if mode == 'euclid_aniso':
        nhood = malis.mknhood3d_aniso()
        
    data_slice_cont = np.zeros((1,1,132,132,132), dtype=float32)
    label_slice_cont  = np.zeros((1,1,44,44,44), dtype=float32)
    aff_slice_cont = np.zeros((1,3,44,44,44), dtype=float32)
    nhood_cont = np.zeros((1,1,3,3), dtype=float32)
    error_scale_cont = np.zeros((1,1,44,44,44), dtype=float32)
    
    dummy_slice = np.ascontiguousarray([0]).astype(float32)
    
    # Loop from current iteration to last iteration
    for i in range(solver.iter, solver.max_iter):
        
        # First pick the dataset to train with
        dataset = randint(0, len(data_arrays) - 1)
        data_array = data_arrays[dataset]
        label_array = label_arrays[dataset]
        # affinity_array = affinity_arrays[dataset]
        
        offsets = []
        for j in range(0, dims):
            offsets.append(randint(0, data_array.shape[j] - (config.output_dims[j] + config.input_padding[j])))
        
        
        # These are the raw data elements
        data_slice = slice_data(data_array, offsets, [config.output_dims[di] + config.input_padding[di] for di in range(0, dims)])
        
        # These are the labels (connected components)
        label_slice = slice_data(label_array, [offsets[di] + int(math.ceil(config.input_padding[di] / float(2))) for di in range(0, dims)], config.output_dims)
        
        # These are the affinity edge values
        # Also recomputing the corresponding labels (connected components)
        aff_slice = malis.seg_to_affgraph(label_slice,nhood)
        label_slice,ccSizes = malis.connected_components_affgraph(aff_slice,nhood)

        print (data_slice[None, None, :]).shape
        print (label_slice[None, None, :]).shape
        print (aff_slice[None, :]).shape
        print (nhood).shape
        
        if mode == 'malis':
            np.copyto(data_slice_cont, np.ascontiguousarray(data_slice[None, None, :]).astype(float32))
            np.copyto(label_slice_cont, np.ascontiguousarray(label_slice[None, None, :]).astype(float32))
            np.copyto(aff_slice_cont, np.ascontiguousarray(aff_slice[None, :]).astype(float32))
            np.copyto(nhood_cont, np.ascontiguousarray(nhood[None, None, :]).astype(float32))
            
            net.set_input_arrays(0, data_slice_cont, dummy_slice)
            net.set_input_arrays(1, label_slice_cont, dummy_slice)
            net.set_input_arrays(2, aff_slice_cont, dummy_slice)
            net.set_input_arrays(3, nhood_cont, dummy_slice)
            
        # We pass the raw and affinity array only
        if mode == 'euclid':
            net.set_input_arrays(0, np.ascontiguousarray(data_slice[None, None, :]).astype(float32), np.ascontiguousarray(dummy_slice).astype(float32))
            net.set_input_arrays(1, np.ascontiguousarray(aff_slice[None, :]).astype(float32), np.ascontiguousarray(dummy_slice).astype(float32))
            net.set_input_arrays(2, np.ascontiguousarray(error_scale(aff_slice[None, :],1.0,0.045)).astype(float32), np.ascontiguousarray(dummy_slice).astype(float32))

        if mode == 'softmax':
            net.set_input_arrays(0, np.ascontiguousarray(data_slice[None, None, :]).astype(float32), np.ascontiguousarray(dummy_slice).astype(float32))
            net.set_input_arrays(1, np.ascontiguousarray(label_slice[None, None, :]).astype(float32), np.ascontiguousarray(dummy_slice).astype(float32))
        
        # Single step
        loss = solver.step(1)

        # Memory clean up and report
        print("Memory usage (before GC): %d MiB" % ((resource.getrusage(resource.RUSAGE_SELF).ru_maxrss) / (1024)))
        
        while gc.collect():
            pass

        print("Memory usage (after GC): %d MiB" % ((resource.getrusage(resource.RUSAGE_SELF).ru_maxrss) / (1024)))


        # m = volume_slicer.VolumeSlicer(data=np.squeeze((net.blobs['Convolution18'].data[0])[0,:,:]))
        # m.configure_traits()

        print("Loss: %s" % loss)
        losses += [loss]
Пример #26
0
print("[" + str(datetime.datetime.now()) + "]" + "Reading test volume from " +
      datadir)
# hdf5_raw_file = datadir + 'img_normalized.h5'
hdf5_gt_file = datadir + 'groundtruth_seg.h5'
# hdf5_aff_file = datadir + 'groundtruth_aff.h5'

#hdf5_raw_file = 'zebrafish_friedrich/raw.hdf5'
#hdf5_gt_file = 'zebrafish_friedrich/labels_2.hdf5'

# hdf5_raw = h5py.File(hdf5_raw_file, 'r')
h5seg = h5py.File(hdf5_gt_file, 'r')
# hdf5_aff = h5py.File(hdf5_aff_file, 'r')

seg = np.asarray(h5seg['main']).astype(np.int32)
print("[" + str(datetime.datetime.now()) + "]" + "Making affinity graph...")
aff = m.seg_to_affgraph(seg, nhood)

print("[" + str(datetime.datetime.now()) + "]" + "Affinity shape:" +
      str(aff.shape))
print("[" + str(datetime.datetime.now()) + "]" +
      "Computing connected components...")
cc, ccSizes = m.connected_components_affgraph(aff, nhood)
print("[" + str(datetime.datetime.now()) + "]" +
      "Making affinity graph again...")
aff2 = m.seg_to_affgraph(cc, nhood)
print("[" + str(datetime.datetime.now()) + "]" +
      "Computing connected components...")
cc2, ccSizes2 = m.connected_components_affgraph(aff2, nhood)

print("[" + str(datetime.datetime.now()) + "]" + "Comparing 'seg' and 'cc':")
# ri,fscore,prec,rec = m.rand_index(seg,cc)
Пример #27
0
def train(solver, data_arrays, label_arrays, mode='malis'):
    losses = []

    net = solver.net
    if mode == 'malis':
        nhood = malis.mknhood3d()
    if mode == 'euclid':
        nhood = malis.mknhood3d()
    if mode == 'malis_aniso':
        nhood = malis.mknhood3d_aniso()
    if mode == 'euclid_aniso':
        nhood = malis.mknhood3d_aniso()

    data_slice_cont = np.zeros((1, 1, 132, 132, 132), dtype=float32)
    label_slice_cont = np.zeros((1, 1, 44, 44, 44), dtype=float32)
    aff_slice_cont = np.zeros((1, 3, 44, 44, 44), dtype=float32)
    nhood_cont = np.zeros((1, 1, 3, 3), dtype=float32)
    error_scale_cont = np.zeros((1, 1, 44, 44, 44), dtype=float32)

    dummy_slice = np.ascontiguousarray([0]).astype(float32)

    # Loop from current iteration to last iteration
    for i in range(solver.iter, solver.max_iter):

        # First pick the dataset to train with
        dataset = randint(0, len(data_arrays) - 1)
        data_array = data_arrays[dataset]
        label_array = label_arrays[dataset]
        # affinity_array = affinity_arrays[dataset]

        offsets = []
        for j in range(0, dims):
            offsets.append(
                randint(
                    0, data_array.shape[j] -
                    (config.output_dims[j] + config.input_padding[j])))

        # These are the raw data elements
        data_slice = slice_data(data_array, offsets, [
            config.output_dims[di] + config.input_padding[di]
            for di in range(0, dims)
        ])

        # These are the labels (connected components)
        label_slice = slice_data(label_array, [
            offsets[di] + int(math.ceil(config.input_padding[di] / float(2)))
            for di in range(0, dims)
        ], config.output_dims)

        # These are the affinity edge values
        # Also recomputing the corresponding labels (connected components)
        aff_slice = malis.seg_to_affgraph(label_slice, nhood)
        label_slice, ccSizes = malis.connected_components_affgraph(
            aff_slice, nhood)

        print(data_slice[None, None, :]).shape
        print(label_slice[None, None, :]).shape
        print(aff_slice[None, :]).shape
        print(nhood).shape

        if mode == 'malis':
            np.copyto(
                data_slice_cont,
                np.ascontiguousarray(data_slice[None,
                                                None, :]).astype(float32))
            np.copyto(
                label_slice_cont,
                np.ascontiguousarray(label_slice[None,
                                                 None, :]).astype(float32))
            np.copyto(aff_slice_cont,
                      np.ascontiguousarray(aff_slice[None, :]).astype(float32))
            np.copyto(
                nhood_cont,
                np.ascontiguousarray(nhood[None, None, :]).astype(float32))

            net.set_input_arrays(0, data_slice_cont, dummy_slice)
            net.set_input_arrays(1, label_slice_cont, dummy_slice)
            net.set_input_arrays(2, aff_slice_cont, dummy_slice)
            net.set_input_arrays(3, nhood_cont, dummy_slice)

        # We pass the raw and affinity array only
        if mode == 'euclid':
            net.set_input_arrays(
                0,
                np.ascontiguousarray(data_slice[None,
                                                None, :]).astype(float32),
                np.ascontiguousarray(dummy_slice).astype(float32))
            net.set_input_arrays(
                1,
                np.ascontiguousarray(aff_slice[None, :]).astype(float32),
                np.ascontiguousarray(dummy_slice).astype(float32))
            net.set_input_arrays(
                2,
                np.ascontiguousarray(
                    error_scale(aff_slice[None, :], 1.0,
                                0.045)).astype(float32),
                np.ascontiguousarray(dummy_slice).astype(float32))

        if mode == 'softmax':
            net.set_input_arrays(
                0,
                np.ascontiguousarray(data_slice[None,
                                                None, :]).astype(float32),
                np.ascontiguousarray(dummy_slice).astype(float32))
            net.set_input_arrays(
                1,
                np.ascontiguousarray(label_slice[None,
                                                 None, :]).astype(float32),
                np.ascontiguousarray(dummy_slice).astype(float32))

        # Single step
        loss = solver.step(1)

        # Memory clean up and report
        print("Memory usage (before GC): %d MiB" %
              ((resource.getrusage(resource.RUSAGE_SELF).ru_maxrss) / (1024)))

        while gc.collect():
            pass

        print("Memory usage (after GC): %d MiB" %
              ((resource.getrusage(resource.RUSAGE_SELF).ru_maxrss) / (1024)))

        # m = volume_slicer.VolumeSlicer(data=np.squeeze((net.blobs['Convolution18'].data[0])[0,:,:]))
        # m.configure_traits()

        print("Loss: %s" % loss)
        losses += [loss]
Пример #28
0
def create_augmented_dataset(dname, reflectz, reflecty, reflectx, swapxy,
                             angle):
    dataset = dict()
    new_dname = '{dname}_z{z}_y{y}_x{x}_xy{swapxy}_angle{angle:05.1f}'.format(
        dname=dname,
        z=reflectz,
        y=reflecty,
        x=reflectx,
        swapxy=swapxy,
        angle=angle)
    dataset['name'] = new_dname
    dataset['already_saved'] = False
    new_dname_folder = os.path.join(data_folder_target, dataset['name'])
    # if os.path.exists(new_dname_folder):
    #     dataset['already_saved'] = True
    #     return dataset
    # load and transform original image and components
    for key in ['data', 'components']:
        filename = data_filenames[key]
        source_filename = source_data_filenames[key]
        source_filepath = os.path.join(data_folder_source, dname,
                                       source_filename)
        rotation_order = rotation_orders[key]
        if key in scaling_factors:
            scaling_factor = scaling_factors[key]
        else:
            scaling_factor = None
        with h5py.File(source_filepath, 'r') as h5_file:
            input_array = np.array(h5_file['main'])
        print("original", key, "from", source_filepath, "had dtype & shape",
              input_array.dtype, input_array.shape)
        new_array = create_transformed_array(input_array, reflectz, reflecty,
                                             reflectx, swapxy, angle,
                                             rotation_order, scaling_factor)
        if new_array.dtype != dtypes[key]:
            print("converting {k} from {old} to {new}".format(
                k=key, old=new_array.dtype, new=dtypes[key]))
            print(new_array.max())
            new_array = new_array.astype(dtypes[key])
        print('transformed {0}: '.format(filename), new_array.dtype,
              new_array.shape)
        dataset[key] = new_array
        if key == 'components':
            # make mask array
            original_shape = input_array.shape
            mask_input_array = np.ones(shape=original_shape,
                                       dtype=dtypes['mask'])
            mask_array = create_transformed_array(mask_input_array,
                                                  reflectz,
                                                  reflecty,
                                                  reflectx,
                                                  swapxy,
                                                  angle,
                                                  rotation_order,
                                                  scaling_factor=None)
            dataset['mask'] = mask_array.astype(dtypes['mask'])
            dataset['mask_sum'] = np.sum(dataset['mask'])
        # if new_array.dtype != array_original.dtype:
        #     print('dtype mismatch: new_array.dtype = {0}, array_original.dtype = {1}'
        #           .format(new_array.dtype, array_original.dtype
        #                   ))
        # raise ValueError
    # make affinities from transformed component values
    dataset['label'] = malis.seg_to_affgraph(
        dataset['components'], neighborhood_3d).astype(dtypes['label'])
    return dataset
Пример #29
0
import numpy as np
import affinities as af
import waterz as wz
import malis as mal

gt = np.load("data/spir_gt.npy")

sample = gt[0:100, 0:400, 0:400]

nhood = mal.mknhood3d(1)

aff = mal.seg_to_affgraph(sample, nhood)

num_act = np.shape(np.unique(sample))[0] - 1

aff = np.asarray(aff, dtype=np.float32)

seg = wz.agglomerate(aff, thresholds=[1])

for segmentation in seg:
    seg = segmentation

num_calc = np.shape(np.unique(seg))[0] - 1

print("Calculated: %i" % num_calc)
print("Actual: %i" % num_act)

#print(np.unique(seg))
#print(np.unique(sample))

if (np.equal(seg, sample).all):