def write_saalfeld(fn, raw, labels, res=np.array([12., 1, 1])): imio.write_h5_stack(raw, fn, group='raw') imio.write_h5_stack(labels, fn, group='labels') f = h5py.File(fn, 'a') f['/raw'].attrs['resolution'] = res f['/labels'].attrs['resolution'] = res f.close()
def write_h5(arr, path, group="stack", print_top_image=True, dry=False): arr += 1 print "Writing stack with shape %s to %s" % (str(arr.shape), path) ensure_file(path) if not dry: imio.write_h5_stack(arr, path, group=group, compression="lzf") if not print_top_image: return top_im = arr[0,:,:][np.newaxis,:,:] colors = h5topng.build_color_map(top_im,"random") h5topng.output_pngs(top_im, colors, path+"-top-")
np.random.RandomState(0) (X, y, w, merges) = map(np.copy, map(np.ascontiguousarray, g_train.learn_agglomerate(gt_train, fc)[0])) print X.shape np.savez('example-data/train-set.npz', X=X, y=y) y = y[:, 0] rf = classify.DefaultRandomForest() X.shape np.random.RandomState(0) rf = rf.fit(X, y) classify.save_classifier(rf, 'example-data/rf-1.joblib') learned_policy = agglo.classifier_probability(fc, rf) g_test = agglo.Rag(ws_test, pr_test, learned_policy, feature_manager=fc) g_test.agglomerate(0.5) seg_test1 = g_test.get_segmentation() imio.write_h5_stack(seg_test1, 'example-data/test-seg1.lzf.h5', compression='lzf') g_train4 = agglo.Rag(ws_train, p4_train, feature_manager=fc) np.random.RandomState(0) (X4, y4, w4, merges4) = map(np.copy, map(np.ascontiguousarray, g_train4.learn_agglomerate(gt_train, fc)[0])) print X4.shape np.savez('example-data/train-set4.npz', X=X4, y=y4) y4 = y4[:, 0] rf4 = classify.DefaultRandomForest() np.random.RandomState(0) rf4 = rf4.fit(X4, y4) classify.save_classifier(rf4, 'example-data/rf-4.joblib') learned_policy4 = agglo.classifier_probability(fc, rf4) g_test4 = agglo.Rag(ws_test, p4_test, learned_policy4, feature_manager=fc) g_test4.agglomerate(0.5) seg_test4 = g_test4.get_segmentation()
from gala import morpho from gala import imio import numpy as np pr = imio.read_image_stack('membrane/*.tiff') pr = 1 - pr / np.max(pr) ws = morpho.watershed_sequence(pr, axis=0, n_jobs=4, connectivity=2, smooth_thresh=0.04, minimum_seed_size=0) imio.write_h5_stack(ws, 'watershed.lzf.h5', compression='lzf') slices = [(slice(None), slice(None, 625), slice(None, 625)), (slice(None), slice(None, 625), slice(625, None)), (slice(None), slice(625, None), slice(None, 625)), (slice(None), slice(625, None), slice(625, None))] wss = [ws[s] for s in slices] from skimage.measure import label for i, vol in enumerate(wss): fn = 'watershed-%i.lzf.h5' % i vol_relabel = label(vol) print(np.max(vol_relabel)) imio.write_h5_stack(vol_relabel, fn, compression='lzf')
# IPython log file from gala import imio import numpy as np slices = [(slice(None), slice(None, 625), slice(None, 625)), (slice(None), slice(None, 625), slice(625, None)), (slice(None), slice(625, None), slice(None, 625)), (slice(None), slice(625, None), slice(625, None))] gt = imio.read_h5_stack('ground-truth.h5', group='bodies') gts = [gt[s] for s in slices] from skimage.measure import label for i, vol in enumerate(gts): fn = 'ground-truth-%i.lzf.h5' % i vol_relabel = label(vol) print(np.max(vol_relabel)) imio.write_h5_stack(vol_relabel.astype(np.uint16), fn, compression='lzf') pr = imio.read_image_stack('membrane/*.tiff') prs = [pr[s] for s in slices] for i, vol in enumerate(prs): fn = 'probabilities-%i.lzf.h5' % i imio.write_h5_stack(vol.astype(np.uint8), fn, compression='lzf')
def write_h5(arr, path, group): print "Writing stack with shape %s to %s" % (str(arr.shape), path) ensure_file(path) imio.write_h5_stack(arr, path, group=group, compression="lzf")