def merge_copy( ctx, src_layer_spec, dst_folder, spec_path, chunk_xy, start_coord, end_coord, coord_mip, mip, suffix, ): scheduler = ctx.obj["scheduler"] if suffix is None: suffix = "" else: suffix = f"_{suffix}" corgie_logger.debug("Setting up layers...") assert len(src_layer_spec) % 2 == 0 src_stacks = {} for k in range(len(src_layer_spec) // 2): src_stack = create_stack_from_spec(src_layer_spec[2 * k:2 * k + 2], name="src", readonly=True) name = src_stack.get_layers_of_type("img")[0].path src_stacks[name] = src_stack with open(spec_path, "r") as f: spec = json.load(f) # if force_chunk_xy: # force_chunk_xy = chunk_xy # else: # force_chunk_xy = None # if force_chunk_z: # force_chunk_z = chunk_z # else: # force_chunk_z = None dst_stack = stack.create_stack_from_reference( reference_stack=list(src_stacks.values())[0], folder=dst_folder, name="dst", types=["img"], readonly=False, suffix=suffix, force_chunk_xy=None, force_chunk_z=None, overwrite=True, ) bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip) for z in range(*bcube.z_range()): spec_z = str(z) if spec_z in spec.keys(): src_dict = spec[str(z)] job_bcube = bcube.reset_coords(zs=z, ze=z + 1, in_place=False) src_stack = src_stacks[src_dict["cv_path"]] z_list = src_dict["z_list"] copy_job = MergeCopyJob( src_stack=src_stack, dst_stack=dst_stack, mip=mip, bcube=job_bcube, chunk_xy=chunk_xy, z_list=z_list, ) # create scheduler and execute the job scheduler.register_job(copy_job, job_name="MergeCopy {}".format(job_bcube)) scheduler.execute_until_completion()
def align_block( ctx, src_layer_spec, dst_folder, vote_dist, render_pad, render_chunk_xy, processor_spec, pad, crop, processor_mip, chunk_xy, start_coord, end_coord, coord_mip, blend_xy, force_chunk_xy, suffix, copy_start, use_starters, seethrough_spec, seethrough_limit, seethrough_spec_mip, mode, chunk_z=1, ): scheduler = ctx.obj["scheduler"] if suffix is None: suffix = "_aligned" else: suffix = f"_{suffix}" if crop is None: crop = pad corgie_logger.debug("Setting up layers...") src_stack = create_stack_from_spec(src_layer_spec, name="src", readonly=True) src_stack.folder = dst_folder force_chunk_xy = chunk_xy if force_chunk_xy else None dst_stack = stack.create_stack_from_reference( reference_stack=src_stack, folder=dst_folder, name="dst", types=["img", "mask"], readonly=False, suffix=suffix, force_chunk_xy=force_chunk_xy, overwrite=True, ) render_method = helpers.PartialSpecification( f=RenderJob, pad=render_pad, chunk_xy=render_chunk_xy, chunk_z=1, render_masks=False, ) if seethrough_spec != tuple(): assert seethrough_spec_mip is not None seethrough_method = helpers.PartialSpecification( f=SeethroughCompareJob, mip=seethrough_spec_mip, processor_spec=seethrough_spec, chunk_xy=chunk_xy, pad=pad, crop=pad, seethrough_limit=seethrough_limit, ) else: seethrough_method = None cf_method = helpers.PartialSpecification( f=ComputeFieldJob, pad=pad, crop=crop, processor_mip=processor_mip, processor_spec=processor_spec, chunk_xy=chunk_xy, blend_xy=blend_xy, chunk_z=1, ) bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip) if mode == "bidirectional": z_mid = (bcube.z_range()[1] + bcube.z_range()[0]) // 2 bcube_back = bcube.reset_coords(ze=z_mid, in_place=False) bcube_forv = bcube.reset_coords(zs=z_mid, in_place=False) align_block_job_back = AlignBlockJob( src_stack=src_stack, dst_stack=dst_stack, bcube=bcube_back, render_method=render_method, cf_method=cf_method, seethrough_method=seethrough_method, suffix=suffix, copy_start=copy_start, backward=True, vote_dist=vote_dist, use_starters=use_starters, ) scheduler.register_job( align_block_job_back, job_name="Backward Align Block {}".format(bcube), ) align_block_job_forv = AlignBlockJob( src_stack=src_stack, dst_stack=deepcopy(dst_stack), bcube=bcube_forv, render_method=render_method, cf_method=cf_method, seethrough_method=seethrough_method, suffix=suffix, copy_start=True, backward=False, vote_dist=vote_dist, use_starters=use_starters, ) scheduler.register_job( align_block_job_forv, job_name="Forward Align Block {}".format(bcube), ) else: align_block_job = AlignBlockJob( src_stack=src_stack, dst_stack=dst_stack, bcube=bcube, render_method=render_method, cf_method=cf_method, seethrough_method=seethrough_method, suffix=suffix, copy_start=copy_start, backward=mode == "backward", vote_dist=vote_dist, use_starters=use_starters, ) # create scheduler and execute the job scheduler.register_job(align_block_job, job_name="Align Block {}".format(bcube)) scheduler.execute_until_completion() result_report = ( f"Aligned layers {[str(l) for l in src_stack.get_layers_of_type('img')]}. " f"Results in {[str(l) for l in dst_stack.get_layers_of_type('img')]}" ) corgie_logger.info(result_report)
def render( ctx, src_layer_spec, dst_folder, pad, render_masks, blackout_masks, seethrough, chunk_xy, chunk_z, start_coord, end_coord, mips, coord_mip, force_chunk_xy, force_chunk_z, suffix, ): scheduler = ctx.obj["scheduler"] if suffix is None: suffix = "_rendered" else: suffix = f"_{suffix}" corgie_logger.debug("Setting up layers...") src_stack = create_stack_from_spec(src_layer_spec, name="src", readonly=True) if not force_chunk_xy: force_chunk_xy = chunk_xy if not force_chunk_z: force_chunk_z = chunk_z dst_stack = stack.create_stack_from_reference( reference_stack=src_stack, folder=dst_folder, name="dst", types=["img", "mask"], force_chunk_xy=force_chunk_xy, force_chunk_z=force_chunk_z, suffix=suffix, overwrite=True, ) bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip) render_job = RenderJob( src_stack=src_stack, dst_stack=dst_stack, mips=mips, pad=pad, bcube=bcube, chunk_xy=chunk_xy, chunk_z=chunk_z, render_masks=render_masks, blackout_masks=blackout_masks, ) # create scheduler and execute the job scheduler.register_job(render_job, job_name="Render {}".format(bcube)) scheduler.execute_until_completion()
def align_block(ctx, src_layer_spec, tgt_layer_spec, dst_folder, render_pad, render_chunk_xy, processor_spec, pad, crop, processor_mip, chunk_xy, start_coord, end_coord, coord_mip, suffix, copy_start, mode, chunk_z=1): scheduler = ctx.obj['scheduler'] if suffix is None: suffix = '_aligned' else: suffix = f"_{suffix}" if crop is None: crop = pad corgie_logger.debug("Setting up layers...") src_stack = create_stack_from_spec(src_layer_spec, name='src', readonly=True) tgt_stack = create_stack_from_spec(tgt_layer_spec, name='tgt', readonly=True, reference=src_stack) dst_stack = stack.create_stack_from_reference(reference_stack=src_stack, folder=dst_folder, name="dst", types=["img", "mask"], readonly=False, suffix=suffix) render_method = helpers.PartialSpecification( f=RenderJob, pad=render_pad, chunk_xy=render_chunk_xy, chunk_z=1, blackout_masks=False, render_masks=True, mip=min(processor_mip) ) cf_method = helpers.PartialSpecification( f=ComputeFieldJob, pad=pad, crop=crop, processor_mip=processor_mip, processor_spec=processor_spec, chunk_xy=chunk_xy, chunk_z=1 ) bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip) if mode == 'bidirectional': z_mid = (bcube.z_range()[1] + bcube.z_range()[0]) // 2 bcube_back = bcube.reset_coords(ze=z_mid, in_place=False) bcube_forv = bcube.reset_coords(zs=z_mid, in_place=False) align_block_job_back = AlignBlockJob(src_stack=src_stack, tgt_stack=tgt_stack, dst_stack=dst_stack, bcube=bcube_back, render_method=render_method, cf_method=cf_method, suffix=suffix, copy_start=copy_start, backward=True) scheduler.register_job(align_block_job_back, job_name="Backward Align Block {}".format(bcube)) align_block_job_forv = AlignBlockJob(src_stack=src_stack, tgt_stack=tgt_stack, dst_stack=deepcopy(dst_stack), bcube=bcube_forv, render_method=render_method, cf_method=cf_method, suffix=suffix, copy_start=True, backward=False) scheduler.register_job(align_block_job_forv, job_name="Forward Align Block {}".format(bcube)) else: align_block_job = AlignBlockJob(src_stack=src_stack, tgt_stack=tgt_stack, dst_stack=dst_stack, bcube=bcube, render_method=render_method, cf_method=cf_method, suffix=suffix, copy_start=copy_start, backward=mode=='backward') # create scheduler and execute the job scheduler.register_job(align_block_job, job_name="Align Block {}".format(bcube)) scheduler.execute_until_completion() result_report = f"Aligned layers {[str(l) for l in src_stack.get_layers_of_type('img')]}. " \ f"Results in {[str(l) for l in dst_stack.get_layers_of_type('img')]}" corgie_logger.info(result_report)
def seethrough_block( ctx, src_layer_spec, dst_folder, chunk_xy, start_coord, end_coord, coord_mip, suffix, seethrough_spec, seethrough_limit, seethrough_spec_mip, force_chunk_z=1, ): scheduler = ctx.obj["scheduler"] if suffix is None: suffix = "_seethrough" else: suffix = f"_{suffix}" crop, pad = 0, 0 corgie_logger.debug("Setting up layers...") src_stack = create_stack_from_spec(src_layer_spec, name="src", readonly=True) src_stack.folder = dst_folder dst_stack = stack.create_stack_from_reference( reference_stack=src_stack, folder=dst_folder, name="dst", types=["img", "mask"], readonly=False, suffix=suffix, overwrite=True, force_chunk_z=force_chunk_z, ) render_method = helpers.PartialSpecification( f=RenderJob, pad=pad, chunk_xy=chunk_xy, chunk_z=1, render_masks=False, ) seethrough_method = helpers.PartialSpecification( f=SeethroughCompareJob, mip=seethrough_spec_mip, processor_spec=seethrough_spec, chunk_xy=chunk_xy, pad=pad, crop=pad, seethrough_limit=seethrough_limit, ) bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip) seethrough_block_job = SeethroughBlockJob( src_stack=src_stack, dst_stack=dst_stack, bcube=bcube, render_method=render_method, seethrough_method=seethrough_method, suffix=suffix, ) # create scheduler and execute the job scheduler.register_job(seethrough_block_job, job_name="Seethrough Block {}".format(bcube)) scheduler.execute_until_completion() result_report = ( f"Rendered layers {[str(l) for l in src_stack.get_layers_of_type('img')]}. " f"Results in {[str(l) for l in dst_stack.get_layers_of_type('img')]}") corgie_logger.info(result_report)
def align(ctx, src_layer_spec, tgt_layer_spec, dst_folder, render_pad, render_chunk_xy, processor_spec, pad, crop, processor_mip, chunk_xy, start_coord, end_coord, coord_mip, bad_starter_path, block_size, block_overlap, blend_xy, force_chunk_xy, suffix, copy_start, seethrough_spec, seethrough_spec_mip): scheduler = ctx.obj['scheduler'] if suffix is None: suffix = '_aligned' else: suffix = f"_{suffix}" if crop is None: crop = pad corgie_logger.debug("Setting up layers...") src_stack = create_stack_from_spec(src_layer_spec, name='src', readonly=True) src_stack.folder = dst_folder tgt_stack = create_stack_from_spec(tgt_layer_spec, name='tgt', readonly=True, reference=src_stack) if force_chunk_xy is None: force_chunk_xy = chunk_xy dst_stack = stack.create_stack_from_reference( reference_stack=src_stack, folder=dst_folder, name="dst", types=["img", "mask"], readonly=False, suffix=suffix, force_chunk_xy=force_chunk_xy, overwrite=True) even_stack = stack.create_stack_from_reference( reference_stack=src_stack, folder=os.path.join(dst_folder, 'even'), name="even", types=["img", "mask"], readonly=False, suffix=suffix, force_chunk_xy=force_chunk_xy, overwrite=True) odd_stack = stack.create_stack_from_reference( reference_stack=src_stack, folder=os.path.join(dst_folder, 'odd'), name="odd", types=["img", "mask"], readonly=False, suffix=suffix, force_chunk_xy=force_chunk_xy, overwrite=True) corgie_logger.debug("Done!") bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip) corgie_logger.debug("Calculating blocks...") # TODO: read in bad starter sections bad_starter_sections = [] blocks = [] z = bcube.z_range()[0] while z < bcube.z_range()[-1]: block_start = z block_end = z + block_size while block_end + block_overlap in bad_starter_sections and \ block_end + block_overlap < bcube.z_range()[-1]: block_end += 1 block = Block(block_start, block_end + block_overlap) blocks.append(block) z = block_end corgie_logger.debug("Done!") render_method = helpers.PartialSpecification( f=RenderJob, pad=render_pad, chunk_xy=render_chunk_xy, chunk_z=1, render_masks=False, ) cf_method = helpers.PartialSpecification(f=ComputeFieldJob, pad=pad, crop=crop, processor_mip=processor_mip, processor_spec=processor_spec, chunk_xy=chunk_xy, blend_xy=blend_xy, chunk_z=1) if seethrough_spec is not None: assert seethrough_spec_mip is not None seethrough_method = helpers.PartialSpecification( f=CompareSectionsJob, mip=seethrough_spec_mip, processor_spec=seethrough_spec, chunk_xy=chunk_xy, pad=pad, crop=pad, ) else: seethrough_method = None corgie_logger.debug("Aligning blocks...") for i in range(len(blocks)): block = blocks[i] block_bcube = bcube.copy() block_bcube.reset_coords(zs=block.z_start, ze=block.z_end) if i % 2 == 0: block_dst_stack = even_stack else: block_dst_stack = odd_stack align_block_job_forv = AlignBlockJob( src_stack=src_stack, tgt_stack=tgt_stack, dst_stack=block_dst_stack, bcube=block_bcube, render_method=render_method, cf_method=cf_method, seethrough_method=seethrough_method, suffix=suffix, copy_start=copy_start, backward=False) scheduler.register_job(align_block_job_forv, job_name=f"Forward Align {block} {block_bcube}") scheduler.execute_until_completion() corgie_logger.debug("Done!") corgie_logger.debug("Stitching blocks...") #TODO #stitch_blocks_job = StitchBlockJob( # blocks=blocks, # src_stack=src_stack, # dst_stack=dst_stack, # bcube=bcube, # suffix=suffix, # render_method=render_method, # cf_method=cf_method #) #scheduler.register_job(stitch_blocks_job, job_name=f"Stitch blocks {bcube}") #scheduler.execute_until_completion() corgie_logger.debug("Done!") result_report = f"Aligned layers {[str(l) for l in src_stack.get_layers_of_type('img')]}. " \ f"Results in {[str(l) for l in dst_stack.get_layers_of_type('img')]}" corgie_logger.info(result_report)
def normalize(ctx, src_layer_spec, dst_folder, stats_mip, mip_start, mip_end, chunk_xy, chunk_z, start_coord, end_coord, coord_mip, suffix, recompute_stats, mask_value): if chunk_z != 1: raise NotImplemented("Compute Statistics command currently only \ supports per-section statistics.") result_report = "" scheduler = ctx.obj['scheduler'] if suffix is None: suffix = '_norm' else: suffix = f"_{suffix}" if stats_mip is None: stats_mip = mip_end corgie_logger.debug("Setting up layers...") src_stack = create_stack_from_spec(src_layer_spec, name='src', readonly=True) dst_stack = stack.create_stack_from_reference(reference_stack=src_stack, folder=dst_folder, name="dst", types=["img"], readonly=False, suffix=suffix, overwrite=True) bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip) img_layers = src_stack.get_layers_of_type("img") mask_layers = src_stack.get_layers_of_type("mask") field_layers = src_stack.get_layers_of_type("field") assert len(field_layers) == 0 for l in img_layers: import pdb pdb.set_trace() mean_layer = l.get_sublayer( name=f"mean_{l.name}{suffix}", path=os.path.join(dst_folder, f"mean_{l.name}{suffix}"), layer_type="section_value", ) var_layer = l.get_sublayer( name=f"var_{l.name}{suffix}", path=os.path.join(dst_folder, f"var_{l.name}{suffix}"), layer_type="section_value", ) if recompute_stats: compute_stats_job = ComputeStatsJob(src_layer=l, mask_layers=mask_layers, mean_layer=mean_layer, var_layer=var_layer, bcube=bcube, mip=stats_mip, chunk_xy=chunk_xy, chunk_z=chunk_z) # create scheduler and execute the job scheduler.register_job( compute_stats_job, job_name=f"Compute Stats. Layer: {l}, Bcube: {bcube}") scheduler.execute_until_completion() dst_layer = l.get_sublayer(name=f"{l.name}{suffix}", path=os.path.join(dst_folder, "img", f"{l.name}{suffix}"), layer_type=l.get_layer_type(), dtype='float32', overwrite=True) result_report += f"Normalized {l} -> {dst_layer}\n" for mip in range(mip_start, mip_end + 1): normalize_job = NormalizeJob(src_layer=l, mask_layers=mask_layers, dst_layer=deepcopy(dst_layer), mean_layer=mean_layer, var_layer=var_layer, stats_mip=stats_mip, mip=mip, bcube=bcube, chunk_xy=chunk_xy, chunk_z=chunk_z, mask_value=mask_value) # create scheduler and execute the job scheduler.register_job(normalize_job, job_name=f"Normalize {bcube}, MIP {mip}") scheduler.execute_until_completion() corgie_logger.info(result_report)
def create_segmentation_masks( ctx, src_layer_spec, dst_folder, processor_spec, pad, crop, processor_mip, dst_mip, chunk_xy, chunk_z, force_chunk_xy, start_coord, end_coord, coord_mip, suffix, similarity_threshold, compute_similarities, compute_slip_mask, compute_step_mask, compute_affinity_mask, ): scheduler = ctx.obj["scheduler"] if suffix is None: suffix = "_aligned" else: suffix = f"_{suffix}" if crop is None: crop = pad corgie_logger.debug("Setting up layers...") src_stack = create_stack_from_spec(src_layer_spec, name="src", readonly=True) src_stack.folder = dst_folder if force_chunk_xy is None: force_chunk_xy = chunk_xy dst_stack = stack.create_stack_from_reference( reference_stack=src_stack, folder=dst_folder, name="dst", types=["img", "mask"], readonly=False, suffix=suffix, force_chunk_xy=force_chunk_xy, overwrite=True, ) corgie_logger.debug("Done!") bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip) if compute_similarities: z_offsets = range(-1, -4, -1) for tgt_z_offset in z_offsets: if tgt_z_offset not in src_stack: dst_layer = dst_stack.create_sublayer( name=tgt_z_offset, layer_type="img", overwrite=True, layer_args={"dtype": "uint8"}, ) proc_spec = json.loads(processor_spec) if isinstance(proc_spec, dict): assert str(tgt_z_offset) in proc_spec proc_spec = json.dumps(proc_spec[str(tgt_z_offset)]) else: proc_spec = processor_spec compare_job = CompareSectionsJob( src_stack=src_stack, tgt_stack=src_stack, dst_layer=dst_layer, chunk_xy=chunk_xy, processor_spec=proc_spec, pad=pad, bcube=bcube, tgt_z_offset=tgt_z_offset, suffix=suffix, mip=processor_mip, dst_mip=dst_mip, ) scheduler.register_job( compare_job, job_name="Compare Sections Job {}, tgt z offset {}".format( bcube, tgt_z_offset ), ) scheduler.execute_until_completion() result_report = f"Similarity results in {[str(l) for l in dst_stack.get_layers_of_type('img')]}" corgie_logger.info(result_report) # If similarities were just computed, add them to the src_stack as masks # Otherwise, they need to be included with src_stack_specs as masks # See arg help above for similarity mask and misalignment mask definitions. for layer_name in z_offsets: img_layer = dst_stack[layer_name] binarizer = { "binarization": ["gt", similarity_threshold], # "cv_params": {"cache": True}, } layer_dict = { "path": img_layer.path, "name": img_layer.name, "type": "mask", "args": binarizer, } mask_layer = create_layer_from_dict(layer_dict, reference=dst_stack) src_stack.add_layer(mask_layer) corgie_logger.info("Computing slip & step masks") if compute_slip_mask: slip_layer = dst_stack.create_sublayer( name="slip", layer_type="mask", overwrite=True ) slip_bcube = bcube.reset_coords( zs=bcube.z[0] + 1, ze=bcube.z[1] - 1, in_place=False ) slip_misalignments_job = DetectSlipMisalignmentsJob( src_stack=src_stack, dst_layer=slip_layer, mip=dst_mip, bcube=slip_bcube, pad=pad, chunk_xy=chunk_xy, chunk_z=chunk_z, ) scheduler.register_job( slip_misalignments_job, job_name="Detect Slip Misalignments {}".format(bcube), ) if compute_step_mask: step_layer = dst_stack.create_sublayer( name="step", layer_type="mask", overwrite=True ) step_bcube = bcube.reset_coords( zs=bcube.z[0] + 2, ze=bcube.z[1] - 2, in_place=False ) step_misalignments_job = DetectStepMisalignmentsJob( src_stack=src_stack, dst_layer=step_layer, mip=dst_mip, bcube=step_bcube, pad=pad, chunk_xy=chunk_xy, chunk_z=chunk_z, ) scheduler.register_job( step_misalignments_job, job_name="Detect Step Misalignments {}".format(bcube), ) if compute_slip_mask or compute_step_mask: # Execute slip & step masks at the same time scheduler.execute_until_completion() result_report = f"Slip & step masks in in {str(slip_layer), str(step_layer)}" corgie_logger.info(result_report) if compute_affinity_mask: corgie_logger.info("Creating affinity masks") affinity_layer = dst_stack.create_sublayer( name="affinity", layer_type="mask", overwrite=True ) three_consecutive_exp = DetectConsecutiveMasksJob.get_exp(n=3, key="slip") exp = { "inputs": [ three_consecutive_exp, {"weight": 1, "key": "step", "offset": 0}, ], "threshold": 0, } affinity_masks_job = CombineMasksJob( src_stack=dst_stack, exp=exp, dst_layer=affinity_layer, mip=dst_mip, bcube=bcube, pad=pad, chunk_xy=chunk_xy, chunk_z=chunk_z, ) scheduler.register_job( affinity_masks_job, job_name="Affinity Masks {}".format(bcube) ) scheduler.execute_until_completion() result_report = f"Results in {str(affinity_layer)}" corgie_logger.info(result_report)