def auto(session_location, options, master_logger): master_logger.info("Reading gt_stack") gt_stack = imio.read_image_stack(options.gt_stack) master_logger.info("Reading test_stack") test_stack = imio.read_image_stack(options.test_stack) master_logger.info("Finished reading stacks") master_logger.info("Loading graph json") pairprob_list = load_graph_json(options.ragprob_file) master_logger.info("Finished loading graph json") master_logger.info("Matching bodies to GT") body2gtbody = find_gt_bodies(gt_stack, test_stack) master_logger.info("Finished matching bodies to GT") body2body = {} for (node1, node2, dummy) in pairprob_list: body2body[node1] = node1 body2body[node2] = node2 for (node1, node2, dummy) in pairprob_list: if body2gtbody[node1] == body2gtbody[node2]: print "merge: " , node1, node2 node2 = body2body[node2] node1 = body2body[node1] body2body[node1] = node2 remap_list = [] for b1, b2 in body2body.items(): if b2 == node1: remap_list.append(b1) for b1 in remap_list: body2body[b1] = node2 else: print "split: " , node1, node2 f1 = h5py.File('proofread.h5', 'w') f1.create_dataset('stack', data=test_stack) arr = numpy.array(body2body.items()) f1.create_dataset('transforms', data=arr)
def auto(session_location, options, master_logger): master_logger.info("Reading gt_stack") gt_stack = imio.read_image_stack(options.gt_stack) master_logger.info("Reading test_stack") test_stack = imio.read_image_stack(options.test_stack) master_logger.info("Finished reading stacks") master_logger.info("Loading graph json") pairprob_list = load_graph_json(options.ragprob_file) master_logger.info("Finished loading graph json") master_logger.info("Matching bodies to GT") body2gtbody = find_gt_bodies(gt_stack, test_stack) master_logger.info("Finished matching bodies to GT") body2body = {} for (node1, node2, dummy) in pairprob_list: body2body[node1] = node1 body2body[node2] = node2 for (node1, node2, dummy) in pairprob_list: if body2gtbody[node1] == body2gtbody[node2]: print "merge: ", node1, node2 node2 = body2body[node2] node1 = body2body[node1] body2body[node1] = node2 remap_list = [] for b1, b2 in body2body.items(): if b2 == node1: remap_list.append(b1) for b1 in remap_list: body2body[b1] = node2 else: print "split: ", node1, node2 f1 = h5py.File("proofread.h5", "w") f1.create_dataset("stack", data=test_stack) arr = numpy.array(body2body.items()) f1.create_dataset("transforms", data=arr)
def valprob(session_location, options, master_logger): master_logger.info("Reading gt_stack") gt_stack = imio.read_image_stack(options.gt_stack) master_logger.info("Reading test_stack") test_stack = imio.read_image_stack(options.test_stack) master_logger.info("Finished reading stacks") master_logger.info("Loading graph json") pairprob_list = load_graph_json(options.ragprob_file) master_logger.info("Finished loading graph json") master_logger.info("Matching bodies to GT") body2gtbody = find_gt_bodies(gt_stack, test_stack) master_logger.info("Finished matching bodies to GT") nomerge_hist = [] tot_hist = [] for iter1 in range(0, 101): nomerge_hist.append(0) tot_hist.append(0) for (node1, node2, prob) in pairprob_list: tot_hist[int(prob*100)] += 1 if body2gtbody[node1] != body2gtbody[node2]: nomerge_hist[int(prob*100)] += 1 master_logger.info("Probability Agreement with Groundtruth") for iter1 in range(0, 101): if tot_hist[iter1] == 0: per = 0 else: per = (float(nomerge_hist[iter1])/float(tot_hist[iter1]) * 100) print iter1, ", ", per , ", " , tot_hist[iter1] auto_proofread(body2gtbody, options.ragprob_file, options.size_threshold, master_logger, options.test_stack, session_location)
type=float, metavar='SIGMA', help='Apply a gaussian filter before watershed.') parser.add_argument('-P', '--show-progress', action='store_true', default=True, help='Show a progress bar for the agglomeration.') parser.add_argument('-v', '--verbose', action='store_true', default=False, help='Print runtime information about execution.') args = parser.parse_args() probs = read_image_stack(*args.fin) if args.invert_image: probs = probs.max() - probs if args.median_filter: probs = median_filter(probs, 3) elif args.gaussian_filter is not None: probs = gaussian_filter(probs, args.gaussian_filter) if args.watershed is None: args.watershed = watershed(probs, show_progress=args.show_progress) ws = args.watershed thickness = args.thickness zcrop1 = [0, thickness] overlaps = [2**i + 1 for i in range(1, 8)] results_table = zeros([len(args.thresholds), len(range(1, 8))], dtype=bool)
parser.add_argument("-T", "--thickness", type=int, default=250, help="How thick each substack should be.") parser.add_argument( "-m", "--median-filter", action="store_true", default=False, help="Run a median filter on the input image." ) parser.add_argument( "-g", "--gaussian-filter", type=float, metavar="SIGMA", help="Apply a gaussian filter before watershed." ) parser.add_argument( "-P", "--show-progress", action="store_true", default=True, help="Show a progress bar for the agglomeration." ) parser.add_argument( "-v", "--verbose", action="store_true", default=False, help="Print runtime information about execution." ) args = parser.parse_args() probs = read_image_stack(*args.fin) if args.invert_image: probs = probs.max() - probs if args.median_filter: probs = median_filter(probs, 3) elif args.gaussian_filter is not None: probs = gaussian_filter(probs, args.gaussian_filter) if args.watershed is None: args.watershed = watershed(probs, show_progress=args.show_progress) ws = args.watershed thickness = args.thickness zcrop1 = [0, thickness] overlaps = [2 ** i + 1 for i in range(1, 8)] results_table = zeros([len(args.thresholds), len(range(1, 8))], dtype=bool)
) parser.add_argument('fout', help='output filename (.h5)') parser.add_argument('-I', '--invert-image', action='store_true', help='invert the image before applying watershed' ) parser.add_argument('-m', '--median-filter', action='store_true', help='Apply a median filter before watershed.' ) parser.add_argument('-g', '--gaussian-filter', type=float, metavar='SIGMA', help='Apply a gaussian filter before watershed.' ) parser.add_argument('-P', '--show-progress', action='store_true', help='Show a progress bar for the watershed transform.' ) args = parser.parse_args() v = imio.read_image_stack(*args.fin) if args.invert_image: v = v.max() - v if args.median_filter: v = filters.median_filter(v, 3) if args.gaussian_filter is not None: v = filters.gaussian_filter(v, args.gaussian_filter) if args.seed is not None: args.seed, _ = label(args.seed == 0, diamondse(3, args.seed.ndim)) ws = watershed(v, seeds=args.seed, dams=args.build_dams, show_progress=args.show_progress) if os.access(args.fout, os.F_OK): os.remove(args.fout) imio.write_h5_stack(ws, args.fout)