Ejemplo n.º 1
0
def create_skeletons(
    ctx,
    seg_layer_spec,
    dst_folder,
    timestamp,
    mip,
    teasar_scale,
    teasar_const,
    ids,
    ids_filepath,
    tick_threshold,
    chunk_xy,
    chunk_z,
    single_merge_mode,
    start_coord,
    end_coord,
    coord_mip,
):
    scheduler = ctx.obj["scheduler"]

    corgie_logger.debug("Setting up layers...")
    seg_stack = create_stack_from_spec(seg_layer_spec,
                                       name="src",
                                       readonly=True)
    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    object_ids = ids
    if ids_filepath is not None:
        object_ids = []
        with open(ids_filepath, "r") as f:
            line = f.readline()
            while line:
                object_ids.append(int(line))
                line = f.readline()
    if object_ids is None or len(object_ids) == 0:
        raise ValueError("Must specify ids to skeletonize")
    object_ids = list(object_ids)
    teasar_params = {"scale": teasar_scale, "const": teasar_const}

    seg_layer = seg_stack.get_layers_of_type("segmentation")[0]
    skeleton_job = SkeletonJob(
        seg_layer=seg_layer,
        dst_path=dst_folder,
        timestamp=timestamp,
        bcube=bcube,
        chunk_xy=chunk_xy,
        chunk_z=chunk_z,
        mip=mip,
        teasar_params=teasar_params,
        object_ids=object_ids,
        tick_threshold=tick_threshold,
        single_merge_mode=single_merge_mode,
    )

    scheduler.register_job(skeleton_job,
                           job_name="Skeletonize {}".format(bcube))

    scheduler.execute_until_completion()
    result_report = f"Skeletonized {str(seg_layer)}. "
    corgie_logger.info(result_report)
def downsample(ctx, src_layer_spec, dst_layer_spec, mip_start,
        mip_end, chunk_xy, chunk_z, mips_per_task, start_coord,
        end_coord, coord_mip):
    scheduler = ctx.obj['scheduler']
    corgie_logger.debug("Setting up Source and Destination layers...")

    src_layer = create_layer_from_spec(src_layer_spec,
            caller_name='src layer',
            readonly=True)

    if dst_layer_spec is None:
        corgie_logger.info("Destination layer not specified. Using Source layer "
                "as Destination.")
        dst_layer = src_layer
        dst_layer.readonly = False
    else:
        dst_layer = create_layer_from_spec(dst_layer_spec,
            caller_name='dst_layer layer',
            readonly=False,
            reference=src_layer, chunk_z=chunk_z, overwrite=True)
    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)
    downsample_job = DownsampleJob(src_layer=src_layer,
                                   dst_layer=dst_layer,
                                   mip_start=mip_start,
                                   mip_end=mip_end,
                                   bcube=bcube,
                                   chunk_xy=chunk_xy,
                                   chunk_z=chunk_z,
                                   mips_per_task=mips_per_task)

    # create scheduler and execute the job
    scheduler.register_job(downsample_job, job_name="downsample")
    scheduler.execute_until_completion()
    result_report = f"Downsampled {src_layer} from {mip_start} to {mip_end}. Result in {dst_layer}"
    corgie_logger.info(result_report)
Ejemplo n.º 3
0
def render(ctx, src_layer_spec, dst_folder, pad, render_masks, blackout_masks,
           chunk_xy, chunk_z, start_coord, end_coord, coord_mip, suffix):
    scheduler = ctx.obj['scheduler']

    corgie_logger.debug("Setting up layers...")
    src_stack = create_stack_from_spec(src_layer_spec,
                                       name='src',
                                       readonly=True)

    dst_stack = stack.create_stack_from_reference(reference_stack=src_stack,
                                                  folder=dst_folder,
                                                  name="dst",
                                                  types=["img", "mask"])

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    render_job = RenderJob(src_stack=src_stack,
                           dst_stack=dst_stack,
                           mip=mip,
                           pad=pad,
                           bcube=bcube,
                           chunk_xy=chunk_xy,
                           chunk_z=chunk_z,
                           render_masks=render_masks,
                           blackout_masks=blackout_masks)

    # create scheduler and execute the job
    scheduler.register_job(render_job, job_name="Render {}".format(bcube))
    scheduler.execute_until_completion()
Ejemplo n.º 4
0
def multi_section_compare(
    ctx,
    src_layer_spec,
    dst_folder,
    chunk_xy,
    pad,
    crop,
    force_chunk_xy,
    z_offsets,
    processor_spec,
    processor_mip,
    start_coord,
    end_coord,
    coord_mip,
):

    scheduler = ctx.obj["scheduler"]

    if crop is None:
        crop = pad

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)
    corgie_logger.debug("Setting up layers...")
    src_stack = create_stack_from_spec(src_layer_spec,
                                       name="src",
                                       readonly=True)

    if force_chunk_xy is None:
        force_chunk_xy = chunk_xy

    dst_stack = stack.Stack(name="dst", folder=dst_folder)
    dst_stack.reference_layer = src_stack.get_layers()[0]

    for z_offset in z_offsets:
        dst_stack.create_sublayer(
            name=z_offset,
            layer_type="img",
            dtype="float32",
            force_chunk_xy=force_chunk_xy,
            overwrite=True,
        )

    multi_section_compare_job = MultiSectionCompareJob(
        src_stack=src_stack,
        dst_stack=dst_stack,
        chunk_xy=chunk_xy,
        bcube=bcube,
        pad=pad,
        processor_spec=processor_spec,
        mip=processor_mip[0],
        dst_mip=processor_mip[0],
    )
    scheduler.register_job(
        multi_section_compare_job,
        job_name=f"MultiSectionCompare {bcube}",
    )

    scheduler.execute_until_completion()
    corgie_logger.debug("Done!")
Ejemplo n.º 5
0
def merge_render(
    ctx,
    spec_path,
    dst_folder,
    chunk_xy,
    pad,
    start_coord,
    end_coord,
    coord_mip,
    force_chunk_xy,
    mip,
    suffix,
):

    scheduler = ctx.obj["scheduler"]
    if suffix is None:
        suffix = ""
    else:
        suffix = f"_{suffix}"

    corgie_logger.debug("Setting up layers...")
    # create layers
    # collect image paths
    # collect mask paths

    if not force_chunk_xy:
        force_chunk_xy = chunk_xy

    with open(spec_path, "r") as f:
        spec = json.load(f)

    src_layers = spec_to_layer_dict_readonly(spec["src"])
    reference_layer = src_layers[list(src_layers.keys())[0]]
    dst_layer = create_layer_from_dict(
        {"path": dst_folder, "type": "img"},
        reference=reference_layer,
        force_chunk_xy=force_chunk_xy,
        overwrite=True
    )

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    for z in range(*bcube.z_range()):
        tgt_z = str(z)
        if tgt_z in spec["job_specs"].keys():
            job_bcube = bcube.reset_coords(zs=z, ze=z + 1, in_place=False)
            render_job = MergeRenderJob(
                src_layers=src_layers,
                src_specs=spec["job_specs"][tgt_z],
                dst_layer=dst_layer,
                mip=mip,
                pad=pad,
                bcube=job_bcube,
                chunk_xy=chunk_xy,
            )
            scheduler.register_job(
                render_job, job_name="MergeRender {}".format(job_bcube)
            )
    scheduler.execute_until_completion()
Ejemplo n.º 6
0
def compute_field(ctx, src_layer_spec, tgt_layer_spec, dst_layer_spec, suffix,
                  processor_spec, pad, crop, chunk_xy, start_coord,
                  processor_mip, end_coord, coord_mip, blend_xy, tgt_z_offset,
                  chunk_z, reference_key, clear_nontissue_field):
    if suffix is None:
        suffix = ''
    else:
        suffix = f"_{suffix}"

    scheduler = ctx.obj['scheduler']

    corgie_logger.debug("Setting up layers...")
    src_stack = create_stack_from_spec(src_layer_spec,
                                       name='src',
                                       readonly=True)

    tgt_stack = create_stack_from_spec(tgt_layer_spec,
                                       name='tgt',
                                       readonly=True,
                                       reference=src_stack)

    reference_layer = None
    if reference_key in src_stack.layers:
        reference_layer = src_stack.layers[reference_key]
    dst_layer = create_layer_from_spec(dst_layer_spec,
                                       allowed_types=['field'],
                                       default_type='field',
                                       readonly=False,
                                       caller_name='dst_layer',
                                       reference=reference_layer,
                                       overwrite=True)

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    if crop is None:
        crop = pad

    compute_field_job = ComputeFieldJob(
        src_stack=src_stack,
        tgt_stack=tgt_stack,
        dst_layer=dst_layer,
        chunk_xy=chunk_xy,
        chunk_z=chunk_z,
        blend_xy=blend_xy,
        processor_spec=processor_spec,
        pad=pad,
        crop=crop,
        bcube=bcube,
        tgt_z_offset=tgt_z_offset,
        suffix=suffix,
        processor_mip=processor_mip,
        clear_nontissue_field=clear_nontissue_field)

    # create scheduler and execute the job
    scheduler.register_job(compute_field_job,
                           job_name="Compute field {}, tgt z offset {}".format(
                               bcube, tgt_z_offset))
    scheduler.execute_until_completion()
Ejemplo n.º 7
0
def fill_nearest(
    ctx,
    src_layer_spec,
    dst_folder,
    chunk_xy,
    start_coord,
    end_coord,
    coord_mip,
    suffix,
    mip,
    radius,
    force_chunk_z=1,
):
    scheduler = ctx.obj["scheduler"]

    if suffix is None:
        suffix = "_seethrough"
    else:
        suffix = f"_{suffix}"

    crop, pad = 0, 0
    corgie_logger.debug("Setting up layers...")
    src_stack = create_stack_from_spec(src_layer_spec,
                                       name="src",
                                       readonly=True)
    src_stack.folder = dst_folder
    dst_stack = stack.create_stack_from_reference(
        reference_stack=src_stack,
        folder=dst_folder,
        name="dst",
        types=["img", "mask"],
        readonly=False,
        suffix=suffix,
        overwrite=True,
        force_chunk_z=force_chunk_z,
    )
    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    fill_nearest_job = FillNearestJob(
        src_stack=src_stack,
        dst_stack=dst_stack,
        bcube=bcube,
        radius=radius,
        mip=mip,
        chunk_xy=chunk_xy,
    )
    # create scheduler and execute the job
    scheduler.register_job(fill_nearest_job,
                           job_name="Fill Nearest Block {}".format(bcube))

    scheduler.execute_until_completion()
    result_report = (
        f"Rendered layers {[str(l) for l in src_stack.get_layers_of_type('img')]}. "
        f"Results in {[str(l) for l in dst_stack.get_layers_of_type('img')]}")
    corgie_logger.info(result_report)
Ejemplo n.º 8
0
def combine_masks(
    ctx,
    src_layer_spec,
    dst_layer_spec,
    exp,
    chunk_xy,
    chunk_z,
    force_chunk_xy,
    force_chunk_z,
    start_coord,
    end_coord,
    coord_mip,
    mip,
    pad,
):
    scheduler = ctx.obj["scheduler"]

    if not force_chunk_xy:
        force_chunk_xy = chunk_xy

    corgie_logger.debug("Setting up layers...")
    src_stack = create_stack_from_spec(src_layer_spec, name="src", readonly=True)
    reference_layer = src_stack.reference_layer

    dst_layer = create_layer_from_spec(
        dst_layer_spec,
        allowed_types=["mask"],
        default_type="mask",
        readonly=False,
        caller_name="dst_layer",
        reference=reference_layer,
        force_chunk_xy=force_chunk_xy,
        force_chunk_z=force_chunk_z,
        overwrite=True,
    )
    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    combine_masks_job = CombineMasksJob(
        src_stack=src_stack,
        exp=json.loads(exp),
        dst_layer=dst_layer,
        mip=mip,
        bcube=bcube,
        pad=pad,
        chunk_xy=chunk_xy,
        chunk_z=chunk_z,
    )
    # create scheduler and execute the job
    scheduler.register_job(combine_masks_job, job_name="Combine Masks {}".format(bcube))

    scheduler.execute_until_completion()
    result_report = f"Results in {str(dst_layer)}"
    corgie_logger.info(result_report)
Ejemplo n.º 9
0
def render(ctx, src_layer_spec, dst_folder, pad, render_masks, blackout_masks,
           seethrough, chunk_xy, chunk_z, start_coord, end_coord, mips,
           coord_mip, force_chunk_xy, force_chunk_z, suffix):
    scheduler = ctx.obj['scheduler']

    if suffix is None:
        suffix = '_rendered'
    else:
        suffix = f"_{suffix}"

    corgie_logger.debug("Setting up layers...")
    src_stack = create_stack_from_spec(src_layer_spec,
                                       name='src',
                                       readonly=True)

    if force_chunk_xy:
        force_chunk_xy = chunk_xy
    else:
        force_chunk_xy = None

    if force_chunk_z:
        force_chunk_z = chunk_z
    else:
        force_chunk_z = None

    dst_stack = stack.create_stack_from_reference(
        reference_stack=src_stack,
        folder=dst_folder,
        name="dst",
        types=["img", "mask"],
        force_chunk_xy=force_chunk_xy,
        force_chunk_z=force_chunk_z,
        suffix=suffix,
        overwrite=True)

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    render_job = RenderJob(src_stack=src_stack,
                           dst_stack=dst_stack,
                           mips=mips,
                           pad=pad,
                           bcube=bcube,
                           chunk_xy=chunk_xy,
                           chunk_z=chunk_z,
                           render_masks=render_masks,
                           blackout_masks=blackout_masks,
                           seethrough=seethrough)

    # create scheduler and execute the job
    scheduler.register_job(render_job, job_name="Render {}".format(bcube))
    scheduler.execute_until_completion()
def apply_processor_by_spec(ctx, src_layer_spec, spec_path, dst_layer_spec,
                            processor_spec, pad, crop, chunk_xy, start_coord,
                            processor_mip, end_coord, coord_mip, blend_xy,
                            chunk_z, reference_key):
    scheduler = ctx.obj['scheduler']

    corgie_logger.debug("Setting up layers...")
    src_stack = create_stack_from_spec(src_layer_spec,
                                       name='src',
                                       readonly=True)

    with open(spec_path, 'r') as f:
        spec = set(json.load(f))

    reference_layer = None
    if reference_key in src_stack.layers:
        reference_layer = src_stack.layers[reference_key]

    dst_layer = create_layer_from_spec(dst_layer_spec,
                                       allowed_types=['img', 'mask'],
                                       default_type='img',
                                       readonly=False,
                                       caller_name='dst_layer',
                                       reference=reference_layer,
                                       overwrite=True)

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    if crop is None:
        crop = pad

    for z in range(*bcube.z_range()):
        if z in spec:
            job_bcube = bcube.reset_coords(zs=z, ze=z + 1, in_place=False)
            apply_processor_job = ApplyProcessorJob(
                src_stack=src_stack,
                dst_layer=dst_layer,
                chunk_xy=chunk_xy,
                chunk_z=chunk_z,
                blend_xy=blend_xy,
                processor_spec=processor_spec,
                pad=pad,
                crop=crop,
                bcube=job_bcube,
                processor_mip=processor_mip)

            # create scheduler and execute the job
            scheduler.register_job(
                apply_processor_job,
                job_name="Apply Processor {}".format(job_bcube))
    scheduler.execute_until_completion()
Ejemplo n.º 11
0
def copy(ctx, src_layer_spec, dst_folder, copy_masks, blackout_masks, chunk_xy,
         chunk_z, start_coord, end_coord, coord_mip, mip, suffix,
         force_chunk_xy, force_chunk_z):

    scheduler = ctx.obj['scheduler']
    if suffix is None:
        suffix = ''
    else:
        suffix = f"_{suffix}"

    corgie_logger.debug("Setting up layers...")
    src_stack = create_stack_from_spec(src_layer_spec,
                                       name='src',
                                       readonly=True)

    if force_chunk_xy:
        force_chunk_xy = chunk_xy
    else:
        force_chunk_xy = None

    if force_chunk_z:
        force_chunk_z = chunk_z
    else:
        force_chunk_z = None

    dst_stack = stack.create_stack_from_reference(
        reference_stack=src_stack,
        folder=dst_folder,
        name="dst",
        types=["img", "mask"],
        readonly=False,
        suffix=suffix,
        force_chunk_xy=force_chunk_xy,
        force_chunk_z=force_chunk_z,
        overwrite=True)

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    copy_job = CopyJob(src_stack=src_stack,
                       dst_stack=dst_stack,
                       mip=mip,
                       bcube=bcube,
                       chunk_xy=chunk_xy,
                       chunk_z=chunk_z,
                       copy_masks=copy_masks,
                       blackout_masks=blackout_masks)

    # create scheduler and execute the job
    scheduler.register_job(copy_job, job_name="Copy {}".format(bcube))
    scheduler.execute_until_completion()
Ejemplo n.º 12
0
def downsample_by_spec(ctx, src_layer_spec, spec_path, dst_layer_spec,
                       mip_start, mip_end, chunk_xy, chunk_z, mips_per_task,
                       start_coord, end_coord, coord_mip):
    scheduler = ctx.obj['scheduler']
    corgie_logger.debug("Setting up Source and Destination layers...")

    src_layer = create_layer_from_spec(src_layer_spec,
                                       caller_name='src layer',
                                       readonly=True)

    with open(spec_path, 'r') as f:
        spec = set(json.load(f))

    if dst_layer_spec is None:
        corgie_logger.info(
            "Destination layer not specified. Using Source layer "
            "as Destination.")
        dst_layer = src_layer
        dst_layer.readonly = False
    else:
        dst_layer = create_layer_from_spec(dst_layer_spec,
                                           caller_name='dst_layer layer',
                                           readonly=False,
                                           reference=src_layer,
                                           chunk_z=chunk_z,
                                           overwrite=True)
    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)
    for z in range(*bcube.z_range()):
        if z in spec:
            job_bcube = bcube.reset_coords(zs=z, ze=z + 1, in_place=False)
            downsample_job = DownsampleJob(src_layer=src_layer,
                                           dst_layer=dst_layer,
                                           mip_start=mip_start,
                                           mip_end=mip_end,
                                           bcube=job_bcube,
                                           chunk_xy=chunk_xy,
                                           chunk_z=chunk_z,
                                           mips_per_task=mips_per_task)

            # create scheduler and execute the job
            scheduler.register_job(downsample_job,
                                   job_name=f"Downsample {job_bcube}")
    scheduler.execute_until_completion()
    result_report = f"Downsampled {src_layer} from {mip_start} to {mip_end}. Result in {dst_layer}"
    corgie_logger.info(result_report)
def invert_field(ctx, src_layer_spec, dst_layer_spec, pad, crop, chunk_xy,
                 start_coord, mip, end_coord, coord_mip, blend_xy, chunk_z,
                 force_chunk_xy):
    scheduler = ctx.obj['scheduler']

    if force_chunk_xy:
        force_chunk_xy = chunk_xy
    else:
        force_chunk_xy = None

    corgie_logger.debug("Setting up layers...")
    src_layer = create_layer_from_spec(src_layer_spec,
                                       allowed_types=['field'],
                                       default_type='field',
                                       readonly=True,
                                       caller_name='src_layer')

    dst_layer = create_layer_from_spec(dst_layer_spec,
                                       allowed_types=['field'],
                                       default_type='field',
                                       readonly=False,
                                       caller_name='dst_layer',
                                       reference=src_layer,
                                       overwrite=True,
                                       force_chunk_xy=force_chunk_xy)

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    if crop is None:
        crop = pad

    invert_field_job = InvertFieldJob(src_layer=src_layer,
                                      dst_layer=dst_layer,
                                      chunk_xy=chunk_xy,
                                      chunk_z=chunk_z,
                                      blend_xy=blend_xy,
                                      pad=pad,
                                      mip=mip,
                                      crop=crop,
                                      bcube=bcube)

    # create scheduler and execute the job
    scheduler.register_job(invert_field_job,
                           job_name="Invert Field {}".format(bcube))
    scheduler.execute_until_completion()
def apply_processor(ctx, src_layer_spec, dst_layer_spec, processor_spec, pad,
                    crop, chunk_xy, start_coord, processor_mip, end_coord,
                    coord_mip, blend_xy, chunk_z, reference_key):
    scheduler = ctx.obj['scheduler']

    corgie_logger.debug("Setting up layers...")
    src_stack = create_stack_from_spec(src_layer_spec,
                                       name='src',
                                       readonly=True)

    reference_layer = None
    if reference_key in src_stack.layers:
        reference_layer = src_stack.layers[reference_key]

    dst_layer = create_layer_from_spec(dst_layer_spec,
                                       allowed_types=['img'],
                                       default_type='img',
                                       readonly=False,
                                       caller_name='dst_layer',
                                       reference=reference_layer,
                                       overwrite=True)

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    if crop is None:
        crop = pad

    apply_processor_job = ApplyProcessorJob(src_stack=src_stack,
                                            dst_layer=dst_layer,
                                            chunk_xy=chunk_xy,
                                            chunk_z=chunk_z,
                                            blend_xy=blend_xy,
                                            processor_spec=processor_spec,
                                            pad=pad,
                                            crop=crop,
                                            bcube=bcube,
                                            processor_mip=processor_mip)

    # create scheduler and execute the job
    scheduler.register_job(apply_processor_job,
                           job_name="Apply Processor {}".format(bcube))
    scheduler.execute_until_completion()
Ejemplo n.º 15
0
def compare_sections(
    ctx,
    src_layer_spec,
    tgt_layer_spec,
    dst_layer_spec,
    suffix,
    processor_spec,
    pad,
    chunk_xy,
    force_chunk_xy,
    start_coord,
    mip,
    dst_mip,
    end_coord,
    coord_mip,
    tgt_z_offset,
    reference_key,
):
    if suffix is None:
        suffix = ""
    else:
        suffix = f"_{suffix}"

    if not force_chunk_xy:
        force_chunk_xy = chunk_xy

    if not dst_mip:
        dst_mip = mip

    scheduler = ctx.obj["scheduler"]

    corgie_logger.debug("Setting up layers...")
    src_stack = create_stack_from_spec(src_layer_spec,
                                       name="src",
                                       readonly=True)

    tgt_stack = create_stack_from_spec(tgt_layer_spec,
                                       name="tgt",
                                       readonly=True,
                                       reference=src_stack)

    reference_layer = None
    if reference_key in src_stack.layers:
        reference_layer = src_stack.layers[reference_key]

    dst_layer = create_layer_from_spec(
        dst_layer_spec,
        allowed_types=["img", "mask"],
        default_type="field",
        readonly=False,
        caller_name="dst_layer",
        reference=reference_layer,
        force_chunk_xy=force_chunk_xy,
        overwrite=True,
    )

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    compare_job = CompareSectionsJob(
        src_stack=src_stack,
        tgt_stack=tgt_stack,
        dst_layer=dst_layer,
        chunk_xy=chunk_xy,
        processor_spec=processor_spec,
        pad=pad,
        bcube=bcube,
        tgt_z_offset=tgt_z_offset,
        suffix=suffix,
        mip=mip,
        dst_mip=dst_mip,
    )

    # create scheduler and execute the job
    scheduler.register_job(
        compare_job,
        job_name="Compare Job {}, tgt z offset {}".format(bcube, tgt_z_offset),
    )
    scheduler.execute_until_completion()
Ejemplo n.º 16
0
def normalize_by_spec(ctx, src_layer_spec, spec_path, dst_folder, stats_mip,
                      mip_start, mip_end, chunk_xy, chunk_z, start_coord,
                      end_coord, coord_mip, suffix, recompute_stats,
                      mask_value):
    if chunk_z != 1:
        raise NotImplemented("Compute Statistics command currently only \
                supports per-section statistics.")
    result_report = ""
    scheduler = ctx.obj['scheduler']

    if suffix is None:
        suffix = '_norm'
    else:
        suffix = f"_{suffix}"

    if stats_mip is None:
        stats_mip = mip_end

    with open(spec_path, 'r') as f:
        spec = set(json.load(f))

    corgie_logger.debug("Setting up layers...")
    src_stack = create_stack_from_spec(src_layer_spec,
                                       name='src',
                                       readonly=True)

    # dst_stack = stack.create_stack_from_reference(reference_stack=src_stack,
    #         folder=dst_folder, name="dst", types=["img"], readonly=False,
    #         suffix=suffix, overwrite=True)

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    img_layers = src_stack.get_layers_of_type("img")
    mask_layers = src_stack.get_layers_of_type("mask")
    field_layers = src_stack.get_layers_of_type("field")
    assert len(field_layers) == 0

    for l in img_layers:
        mean_layer = l.get_sublayer(
            name=f"mean{suffix}",
            path=os.path.join(dst_folder, f"mean{suffix}"),
            layer_type="section_value",
        )

        var_layer = l.get_sublayer(name=f"var{suffix}",
                                   path=os.path.join(dst_folder,
                                                     f"var{suffix}"),
                                   layer_type="section_value")

        if recompute_stats:
            for z in range(*bcube.z_range()):
                if z in spec:
                    job_bcube = bcube.reset_coords(zs=z,
                                                   ze=z + 1,
                                                   in_place=False)
                    compute_stats_job = ComputeStatsJob(
                        src_layer=l,
                        mask_layers=mask_layers,
                        mean_layer=mean_layer,
                        var_layer=var_layer,
                        bcube=job_bcube,
                        mip=stats_mip,
                        chunk_xy=chunk_xy,
                        chunk_z=chunk_z)

                    # create scheduler and execute the job
                    scheduler.register_job(
                        compute_stats_job,
                        job_name=
                        f"Compute Stats. Layer: {l}, Bcube: {job_bcube}")
            scheduler.execute_until_completion()

        dst_layer = l.get_sublayer(name=f"{l.name}{suffix}",
                                   path=os.path.join(dst_folder, "img",
                                                     f"{l.name}{suffix}"),
                                   layer_type=l.get_layer_type(),
                                   dtype='float32',
                                   overwrite=True)

        for z in range(*bcube.z_range()):
            if z in spec:
                job_bcube = bcube.reset_coords(zs=z, ze=z + 1, in_place=False)
                result_report += f"Normalized {l} -> {dst_layer}\n"
                for mip in range(mip_start, mip_end + 1):
                    normalize_job = NormalizeJob(src_layer=l,
                                                 mask_layers=mask_layers,
                                                 dst_layer=deepcopy(dst_layer),
                                                 mean_layer=mean_layer,
                                                 var_layer=var_layer,
                                                 stats_mip=stats_mip,
                                                 mip=mip,
                                                 bcube=job_bcube,
                                                 chunk_xy=chunk_xy,
                                                 chunk_z=chunk_z,
                                                 mask_value=mask_value)

                    # create scheduler and execute the job
                    scheduler.register_job(
                        normalize_job,
                        job_name=f"Normalize {job_bcube}, MIP {mip}")
    scheduler.execute_until_completion()
    corgie_logger.info(result_report)
Ejemplo n.º 17
0
def apply_processor(
    ctx,
    src_layer_spec,
    dst_layer_spec,
    spec_path,
    processor_spec,
    pad,
    crop,
    chunk_xy,
    start_coord,
    force_chunk_xy,
    processor_mip,
    end_coord,
    coord_mip,
    blend_xy,
    chunk_z,
    reference_key,
):
    scheduler = ctx.obj["scheduler"]

    corgie_logger.debug("Setting up layers...")
    src_stack = create_stack_from_spec(src_layer_spec, name="src", readonly=True)

    reference_layer = None
    if reference_key in src_stack.layers:
        reference_layer = src_stack.layers[reference_key]

    if force_chunk_xy is None:
        force_chunk_xy = chunk_xy

    dst_layer = create_layer_from_spec(
        dst_layer_spec,
        allowed_types=["img", "mask", "section_value", "field"],
        default_type="img",
        readonly=False,
        caller_name="dst_layer",
        force_chunk_xy=force_chunk_xy,
        reference=reference_layer,
        overwrite=True,
    )

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    if crop is None:
        crop = pad

    if spec_path:
        with open(spec_path, "r") as f:
            spec = json.load(f)

        for z in spec:
            job_bcube = bcube.reset_coords(zs=z, ze=z + 1, in_place=False)
            apply_processor_job = ApplyProcessorJob(
                src_stack=src_stack,
                dst_layer=dst_layer,
                chunk_xy=chunk_xy,
                chunk_z=chunk_z,
                blend_xy=blend_xy,
                processor_spec=processor_spec,
                pad=pad,
                crop=crop,
                bcube=job_bcube,
                processor_mip=processor_mip,
            )

            # create scheduler and execute the job
            scheduler.register_job(
                apply_processor_job, job_name="Apply Processor {}".format(job_bcube)
            )
    else:
        apply_processor_job = ApplyProcessorJob(
            src_stack=src_stack,
            dst_layer=dst_layer,
            chunk_xy=chunk_xy,
            chunk_z=chunk_z,
            blend_xy=blend_xy,
            processor_spec=processor_spec,
            pad=pad,
            crop=crop,
            bcube=bcube,
            processor_mip=processor_mip,
        )

        # create scheduler and execute the job
        scheduler.register_job(
            apply_processor_job, job_name="Apply Processor {}".format(bcube)
        )
    scheduler.execute_until_completion()
Ejemplo n.º 18
0
def compute_stats_fn(ctx, src_layer_spec, dst_folder, suffix, mip, chunk_xy,
                     chunk_z, start_coord, end_coord, coord_mip):

    if chunk_z != 1:
        raise NotImplemented("Compute Statistics command currently only \
                supports per-section statistics.")

    scheduler = ctx.obj['scheduler']

    src_layer = create_layer_from_spec(src_layer_spec,
                                       caller_name='src layer',
                                       readonly=True)

    mask_layers = [
        create_layer_from_spec(mask_spec,
                               caller_name='src mask',
                               readonly=True,
                               allowed_types=['mask'],
                               default_type='mask')
        for mask_spec in mask_layers_spec
    ]

    if suffix is None:
        suffix = ''
    else:
        suffix = '_' + suffix

    src_stack = create_stack_from_spec(src_layer_spec,
                                       name='src',
                                       readonly=True)

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    mask_layers = src_stack.get_layers_of_type(["mask"])
    non_mask_layers = src_stack.get_layers_of_type(["img", "field"])

    for l in non_mask_layers:
        mean_layer = src_layer.get_sublayer(
            name=f"mean{suffix}",
            path=os.path.join(dst_dir, f"mean{suffix}"),
            layer_type="section_value",
        )

        var_layer = src_layer.get_sublayer(path=os.path.join(
            dst_dir, f"var{suffix}"),
                                           layer_type="section_value")

        compute_stats_job = ComputeStatsJob(src_layer=l,
                                            mask_layers=mask_layers,
                                            mean_layer=mean_layer,
                                            var_layer=var_layer,
                                            bcube=bcube,
                                            mip=mip,
                                            chunk_xy=chunk_xy,
                                            chunk_z=chunk_z)

        # create scheduler and execute the job
        scheduler.register_job(
            compute_stats_job,
            job_name=f"Compute Stats. Layer: {l}, Bcube: {bcube}")
    scheduler.execute_until_completion()
Ejemplo n.º 19
0
def merge_copy(ctx, 
         src_layer_spec, 
         dst_folder, 
         spec_path,
         chunk_xy, 
         start_coord, 
         end_coord, 
         coord_mip, 
         mip, 
         suffix):

    scheduler = ctx.obj['scheduler']
    if suffix is None:
        suffix = ''
    else:
        suffix = f"_{suffix}"

    corgie_logger.debug("Setting up layers...")
    assert(len(src_layer_spec) % 2 == 0)
    src_stacks = {}
    for k in range(len(src_layer_spec) // 2):
        src_stack = create_stack_from_spec(src_layer_spec[2*k:2*k+2],
                                            name='src', 
                                            readonly=True)
        name = src_stack.get_layers_of_type('img')[0].path
        src_stacks[name] = src_stack

    with open(spec_path, 'r') as f:
        spec = json.load(f)

    # if force_chunk_xy:
    #     force_chunk_xy = chunk_xy
    # else:
    #     force_chunk_xy = None

    # if force_chunk_z:
    #     force_chunk_z = chunk_z
    # else:
    #     force_chunk_z = None

    dst_stack = stack.create_stack_from_reference(reference_stack=list(src_stacks.values())[0],
                                                    folder=dst_folder, 
                                                    name="dst", 
                                                    types=["img"],
                                                    readonly=False,
                                                    suffix=suffix, 
                                                    force_chunk_xy=None, 
                                                    force_chunk_z =None,
                                                    overwrite=True)

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    for z in range(*bcube.z_range()):
        spec_z = str(z)
        if spec_z in spec.keys():
            src_dict = spec[str(z)]
            job_bcube = bcube.reset_coords(zs=z, ze=z+1, in_place=False)
            src_stack = src_stacks[src_dict['cv_path']]
            z_list = src_dict['z_list']
            copy_job = MergeCopyJob(src_stack=src_stack,
                               dst_stack=dst_stack,
                               mip=mip,
                               bcube=job_bcube,
                               chunk_xy=chunk_xy,
                               z_list=z_list)
            # create scheduler and execute the job
            scheduler.register_job(copy_job, job_name="MergeCopy {}".format(job_bcube))
    scheduler.execute_until_completion()
Ejemplo n.º 20
0
def align(ctx, src_layer_spec, tgt_layer_spec, dst_folder, render_pad,
          render_chunk_xy, processor_spec, pad, crop, processor_mip, chunk_xy,
          start_coord, end_coord, coord_mip, bad_starter_path, block_size,
          block_overlap, blend_xy, force_chunk_xy, suffix, copy_start,
          seethrough_spec, seethrough_spec_mip):

    scheduler = ctx.obj['scheduler']

    if suffix is None:
        suffix = '_aligned'
    else:
        suffix = f"_{suffix}"

    if crop is None:
        crop = pad

    corgie_logger.debug("Setting up layers...")

    src_stack = create_stack_from_spec(src_layer_spec,
                                       name='src',
                                       readonly=True)
    src_stack.folder = dst_folder

    tgt_stack = create_stack_from_spec(tgt_layer_spec,
                                       name='tgt',
                                       readonly=True,
                                       reference=src_stack)

    if force_chunk_xy is None:
        force_chunk_xy = chunk_xy

    dst_stack = stack.create_stack_from_reference(
        reference_stack=src_stack,
        folder=dst_folder,
        name="dst",
        types=["img", "mask"],
        readonly=False,
        suffix=suffix,
        force_chunk_xy=force_chunk_xy,
        overwrite=True)

    even_stack = stack.create_stack_from_reference(
        reference_stack=src_stack,
        folder=os.path.join(dst_folder, 'even'),
        name="even",
        types=["img", "mask"],
        readonly=False,
        suffix=suffix,
        force_chunk_xy=force_chunk_xy,
        overwrite=True)

    odd_stack = stack.create_stack_from_reference(
        reference_stack=src_stack,
        folder=os.path.join(dst_folder, 'odd'),
        name="odd",
        types=["img", "mask"],
        readonly=False,
        suffix=suffix,
        force_chunk_xy=force_chunk_xy,
        overwrite=True)

    corgie_logger.debug("Done!")

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    corgie_logger.debug("Calculating blocks...")
    # TODO: read in bad starter sections
    bad_starter_sections = []

    blocks = []
    z = bcube.z_range()[0]
    while z < bcube.z_range()[-1]:
        block_start = z
        block_end = z + block_size
        while block_end + block_overlap in bad_starter_sections and \
                block_end + block_overlap < bcube.z_range()[-1]:
            block_end += 1

        block = Block(block_start, block_end + block_overlap)
        blocks.append(block)
        z = block_end
    corgie_logger.debug("Done!")

    render_method = helpers.PartialSpecification(
        f=RenderJob,
        pad=render_pad,
        chunk_xy=render_chunk_xy,
        chunk_z=1,
        render_masks=False,
    )

    cf_method = helpers.PartialSpecification(f=ComputeFieldJob,
                                             pad=pad,
                                             crop=crop,
                                             processor_mip=processor_mip,
                                             processor_spec=processor_spec,
                                             chunk_xy=chunk_xy,
                                             blend_xy=blend_xy,
                                             chunk_z=1)
    if seethrough_spec is not None:
        assert seethrough_spec_mip is not None

        seethrough_method = helpers.PartialSpecification(
            f=CompareSectionsJob,
            mip=seethrough_spec_mip,
            processor_spec=seethrough_spec,
            chunk_xy=chunk_xy,
            pad=pad,
            crop=pad,
        )
    else:
        seethrough_method = None

    corgie_logger.debug("Aligning blocks...")
    for i in range(len(blocks)):
        block = blocks[i]

        block_bcube = bcube.copy()
        block_bcube.reset_coords(zs=block.z_start, ze=block.z_end)

        if i % 2 == 0:
            block_dst_stack = even_stack
        else:
            block_dst_stack = odd_stack

        align_block_job_forv = AlignBlockJob(
            src_stack=src_stack,
            tgt_stack=tgt_stack,
            dst_stack=block_dst_stack,
            bcube=block_bcube,
            render_method=render_method,
            cf_method=cf_method,
            seethrough_method=seethrough_method,
            suffix=suffix,
            copy_start=copy_start,
            backward=False)
        scheduler.register_job(align_block_job_forv,
                               job_name=f"Forward Align {block} {block_bcube}")

    scheduler.execute_until_completion()
    corgie_logger.debug("Done!")

    corgie_logger.debug("Stitching blocks...")
    #TODO
    #stitch_blocks_job = StitchBlockJob(
    #    blocks=blocks,
    #    src_stack=src_stack,
    #    dst_stack=dst_stack,
    #    bcube=bcube,
    #    suffix=suffix,
    #    render_method=render_method,
    #    cf_method=cf_method
    #)

    #scheduler.register_job(stitch_blocks_job, job_name=f"Stitch blocks {bcube}")
    #scheduler.execute_until_completion()

    corgie_logger.debug("Done!")

    result_report = f"Aligned layers {[str(l) for l in src_stack.get_layers_of_type('img')]}. " \
            f"Results in {[str(l) for l in dst_stack.get_layers_of_type('img')]}"
    corgie_logger.info(result_report)
Ejemplo n.º 21
0
def align(
    ctx,
    src_layer_spec,
    dst_folder,
    render_pad,
    render_chunk_xy,
    processor_spec,
    pad,
    crop,
    processor_mip,
    chunk_xy,
    start_coord,
    end_coord,
    coord_mip,
    bad_starter_path,
    block_size,
    stitch_size,
    vote_dist,
    consensus_threshold,
    blur_sigma,
    kernel_size,
    blend_xy,
    force_chunk_xy,
    suffix,
    seethrough_spec,
    seethrough_limit,
    seethrough_spec_mip,
    decay_dist,
    blur_rate,
    restart_stage,
    restart_suffix,
):

    scheduler = ctx.obj["scheduler"]

    if suffix is None:
        suffix = "_aligned"
    else:
        suffix = f"_{suffix}"
    if (restart_suffix is None) or (restart_stage == 0):
        restart_suffix = suffix

    if crop is None:
        crop = pad

    corgie_logger.debug("Setting up layers...")
    # TODO: store stitching images in layer other than even & odd
    if vote_dist + stitch_size - 2 >= block_size:
        raise exceptions.CorgieException(
            "block_size too small for stitching + voting requirements (stitch_size + vote_dist)"
        )

    corgie_logger.debug("Setting up layers...")

    src_stack = create_stack_from_spec(src_layer_spec,
                                       name="src",
                                       readonly=True)
    src_stack.folder = dst_folder

    if force_chunk_xy is None:
        force_chunk_xy = chunk_xy

    dst_stack = stack.create_stack_from_reference(
        reference_stack=src_stack,
        folder=dst_folder,
        name="dst",
        types=["img", "mask"],
        readonly=False,
        suffix=restart_suffix,
        force_chunk_xy=force_chunk_xy,
        overwrite=True,
    )

    even_stack = stack.create_stack_from_reference(
        reference_stack=src_stack,
        folder=os.path.join(dst_folder, "even"),
        name="even",
        types=["img", "mask"],
        readonly=False,
        suffix=suffix,
        force_chunk_xy=force_chunk_xy,
        overwrite=True,
    )

    odd_stack = stack.create_stack_from_reference(
        reference_stack=src_stack,
        folder=os.path.join(dst_folder, "odd"),
        name="odd",
        types=["img", "mask"],
        readonly=False,
        suffix=suffix,
        force_chunk_xy=force_chunk_xy,
        overwrite=True,
    )

    corgie_logger.debug("Done!")

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    corgie_logger.debug("Calculating blocks...")
    skip_list = []
    if bad_starter_path is not None:
        with open(bad_starter_path) as f:
            line = f.readline()
            while line:
                skip_list.append(int(line))
                line = f.readline()
    blocks = get_blocks(
        start=bcube.z_range()[0],
        stop=bcube.z_range()[1],
        block_size=block_size,
        block_overlap=1,
        skip_list=skip_list,
        src_stack=src_stack,
        even_stack=even_stack,
        odd_stack=odd_stack,
    )
    stitch_blocks = [b.overlap(stitch_size) for b in blocks[1:]]
    corgie_logger.debug("All Blocks")
    for block, stitch_block in zip(blocks, [None] + stitch_blocks):
        corgie_logger.debug(block)
        corgie_logger.debug(f"Stitch {stitch_block}")
        corgie_logger.debug("\n")

    max_blur_mip = (math.ceil(math.log(decay_dist * blur_rate + 1, 2)) +
                    processor_mip[-1])
    corgie_logger.debug(f"Max blur mip for stitching field: {max_blur_mip}")

    # Set all field names, adjusting for restart suffix
    block_field_name = f"field{suffix}"
    stitch_estimated_suffix = f"_stitch_estimated{suffix}"
    stitch_estimated_name = f"field{stitch_estimated_suffix}"
    stitch_corrected_name = f"stitch_corrected{suffix}"
    stitch_corrected_field = None
    composed_name = f"composed{suffix}"
    if restart_stage <= 2:
        stitch_estimated_suffix = f"_stitch_estimated{restart_suffix}"
        stitch_estimated_name = f"field{stitch_estimated_suffix}"
        stitch_corrected_name = f"stitch_corrected{restart_suffix}"
    if restart_stage <= 3:
        composed_name = f"composed{restart_suffix}"

    render_method = helpers.PartialSpecification(
        f=RenderJob,
        pad=render_pad,
        chunk_xy=render_chunk_xy,
        chunk_z=1,
        render_masks=False,
    )

    cf_method = helpers.PartialSpecification(
        f=ComputeFieldJob,
        pad=pad,
        crop=crop,
        processor_mip=processor_mip,
        processor_spec=processor_spec,
        chunk_xy=chunk_xy,
        blend_xy=blend_xy,
        chunk_z=1,
    )
    if seethrough_spec != tuple():
        assert seethrough_spec_mip is not None
        seethrough_method = helpers.PartialSpecification(
            f=SeethroughCompareJob,
            mip=seethrough_spec_mip,
            processor_spec=seethrough_spec,
            chunk_xy=chunk_xy,
            pad=pad,
            crop=pad,
            seethrough_limit=seethrough_limit,
        )
    else:
        seethrough_method = None

    #restart_stage = 4
    #import pdb; pdb.set_trace()
    if restart_stage == 0:
        corgie_logger.debug("Aligning blocks...")
        for block in blocks:
            block_bcube = block.get_bcube(bcube)
            # Use copies of src & dst so that aligning the stitching blocks
            # is not affected by these block fields.
            # Copying also allows local compute to not modify objects for other tasks
            align_block_job_forv = AlignBlockJob(
                src_stack=deepcopy(block.src_stack),
                dst_stack=deepcopy(block.dst_stack),
                bcube=block_bcube,
                render_method=render_method,
                cf_method=cf_method,
                vote_dist=vote_dist,
                seethrough_method=seethrough_method,
                suffix=suffix,
                copy_start=True,
                use_starters=True,
                backward=False,
                consensus_threshold=consensus_threshold,
                blur_sigma=blur_sigma,
                kernel_size=kernel_size,
            )
            scheduler.register_job(
                align_block_job_forv,
                job_name=f"Forward Align {block} {block_bcube}",
            )

        scheduler.execute_until_completion()
        corgie_logger.debug("Done!")

    if restart_stage <= 1:
        corgie_logger.debug("Aligning stitching blocks...")
        for stitch_block in stitch_blocks:
            block_bcube = stitch_block.get_bcube(bcube)
            # These blocks will have block-aligned images, but not
            # the block_fields that warped them.
            align_block_job_forv = AlignBlockJob(
                src_stack=deepcopy(stitch_block.src_stack),
                dst_stack=deepcopy(stitch_block.dst_stack),
                bcube=block_bcube,
                render_method=render_method,
                cf_method=cf_method,
                vote_dist=vote_dist,
                seethrough_method=seethrough_method,
                suffix=stitch_estimated_suffix,
                copy_start=False,
                use_starters=False,
                backward=False,
                consensus_threshold=consensus_threshold,
                blur_sigma=blur_sigma,
                kernel_size=kernel_size,
            )
            scheduler.register_job(
                align_block_job_forv,
                job_name=f"Stitch Align {stitch_block} {block_bcube}",
            )

        scheduler.execute_until_completion()
        corgie_logger.debug("Done!")

    # Add in the stitch_estimated fields that were just created above
    even_stack.create_sublayer(
        stitch_estimated_name,
        layer_type="field",
        overwrite=False,
    )
    odd_stack.create_sublayer(
        stitch_estimated_name,
        layer_type="field",
        overwrite=False,
    )
    if restart_stage <= 2:
        if stitch_size > 1:
            corgie_logger.debug("Voting over stitching blocks")
            stitch_corrected_field = dst_stack.create_sublayer(
                stitch_corrected_name, layer_type="field", overwrite=True)
            for stitch_block in stitch_blocks:
                stitch_estimated_field = stitch_block.dst_stack[
                    stitch_estimated_name]
                block_bcube = bcube.reset_coords(
                    zs=stitch_block.start,
                    ze=stitch_block.start + 1,
                    in_place=False,
                )
                z_offsets = [
                    z - block_bcube.z_range()[0]
                    for z in range(stitch_block.start, stitch_block.stop)
                ]
                vote_stitch_job = VoteJob(
                    input_fields=[stitch_estimated_field],
                    output_field=stitch_corrected_field,
                    chunk_xy=chunk_xy,
                    bcube=block_bcube,
                    z_offsets=z_offsets,
                    mip=processor_mip[-1],
                    consensus_threshold=consensus_threshold,
                    blur_sigma=blur_sigma,
                    kernel_size=kernel_size,
                )
                scheduler.register_job(
                    vote_stitch_job,
                    job_name=f"Stitching Vote {stitch_block} {block_bcube}",
                )

            scheduler.execute_until_completion()
            corgie_logger.debug("Done!")

        for stitch_block in stitch_blocks:
            block_bcube = bcube.reset_coords(zs=stitch_block.start,
                                             ze=stitch_block.start + 1,
                                             in_place=False)
            field_to_downsample = stitch_block.dst_stack[stitch_estimated_name]
            if stitch_corrected_field is not None:
                field_to_downsample = stitch_corrected_field
            # Hack for fafb
            field_info = field_to_downsample.get_info()
            for scale in field_info['scales']:
                scale['chunk_sizes'][-1][-1] = 1
                scale['encoding'] = 'raw'
            field_to_downsample.cv.store_info(field_info)
            field_to_downsample.cv.fetch_info()
            downsample_field_job = DownsampleJob(
                src_layer=field_to_downsample,
                mip_start=processor_mip[-1],
                mip_end=max_blur_mip,
                bcube=block_bcube,
                chunk_xy=
                chunk_xy,  # TODO: This probably needs to be modified at highest mips
                chunk_z=1,
                mips_per_task=2,
            )
            scheduler.register_job(
                downsample_field_job,
                job_name=f"Downsample stitching field {block_bcube}",
            )
        scheduler.execute_until_completion()
        corgie_logger.debug("Done!")

    # Add in the block-align fields
    even_stack.create_sublayer(
        block_field_name,
        layer_type="field",
        overwrite=False,
    )
    odd_stack.create_sublayer(
        block_field_name,
        layer_type="field",
        overwrite=False,
    )
    composed_field = dst_stack.create_sublayer(composed_name,
                                               layer_type="field",
                                               overwrite=True)
    if (restart_stage > 2) and (stitch_size > 1):
        stitch_corrected_field = dst_stack.create_sublayer(
            stitch_corrected_name, layer_type="field", overwrite=False)
    if restart_stage <= 3:
        corgie_logger.debug("Stitching blocks...")
        for block, stitch_block in zip(blocks[1:], stitch_blocks):
            block_bcube = block.broadcastable().get_bcube(bcube)
            block_list = block.get_neighbors(dist=decay_dist)
            corgie_logger.debug(f"src_block: {block}")
            corgie_logger.debug(f"influencing blocks: {block_list}")
            z_list = [b.stop for b in block_list]
            # stitch_corrected_field used if there is multi-section block overlap,
            # which requires voting to produce a corrected field.
            # If there is only single-section block overlap, then use
            # stitch_estimated_fields from each stitch_block
            if stitch_corrected_field is not None:
                stitching_fields = [stitch_corrected_field]
            else:
                # Order with furthest block first (convention of FieldSet).
                stitching_fields = [
                    stitch_block.dst_stack[stitch_estimated_name],
                    stitch_block.src_stack[stitch_estimated_name],
                ]

            broadcast_job = BroadcastJob(
                block_field=block.dst_stack[block_field_name],
                stitching_fields=stitching_fields,
                output_field=composed_field,
                chunk_xy=chunk_xy,
                bcube=block_bcube,
                pad=pad,
                z_list=z_list,
                mip=processor_mip[-1],
                decay_dist=decay_dist,
                blur_rate=blur_rate,
            )
            scheduler.register_job(broadcast_job,
                                   job_name=f"Broadcast {block} {block_bcube}")

        scheduler.execute_until_completion()
        corgie_logger.debug("Done!")

        if len(blocks) > 1:
            block_bcube = blocks[0].get_bcube(bcube)
            copy_job = CopyLayerJob(
                src_layer=even_stack[block_field_name],
                dst_layer=composed_field,
                mip=processor_mip[-1],
                bcube=block_bcube,
                chunk_xy=chunk_xy,
                chunk_z=1,
            )
            scheduler.register_job(
                copy_job,
                job_name=f"Copy first block_field to composed_field location")
            scheduler.execute_until_completion()
            corgie_logger.debug("Done!")

    if restart_stage <= 4:
        if len(blocks) == 1:
            block_bcube = blocks[0].get_bcube(bcube)
            render_job = RenderJob(
                src_stack=src_stack,
                dst_stack=dst_stack,
                mips=processor_mip[-1],
                pad=pad,
                bcube=block_bcube,
                chunk_xy=chunk_xy,
                chunk_z=1,
                render_masks=True,
                blackout_masks=False,
                additional_fields=[even_stack[block_field_name]],
            )
            scheduler.register_job(
                render_job, job_name=f"Render first block {block_bcube}")
        else:
            block_bcube = bcube.reset_coords(zs=blocks[0].start,
                                             ze=blocks[-1].stop,
                                             in_place=False)
            render_job = RenderJob(
                src_stack=src_stack,
                dst_stack=dst_stack,
                mips=processor_mip[-1],
                pad=pad,
                bcube=block_bcube,
                chunk_xy=chunk_xy,
                chunk_z=1,
                render_masks=True,
                blackout_masks=True,
                additional_fields=[composed_field],
            )
            scheduler.register_job(render_job,
                                   job_name=f"Render all blocks {block_bcube}")
        scheduler.execute_until_completion()
        corgie_logger.debug("Done!")

    result_report = (
        f"Aligned layers {[str(l) for l in src_stack.get_layers_of_type('img')]}. "
        f"Results in {[str(l) for l in dst_stack.get_layers_of_type('img')]}")
    corgie_logger.info(result_report)
Ejemplo n.º 22
0
def align_block(
    ctx,
    src_layer_spec,
    dst_folder,
    vote_dist,
    render_pad,
    render_chunk_xy,
    processor_spec,
    pad,
    crop,
    processor_mip,
    chunk_xy,
    start_coord,
    end_coord,
    coord_mip,
    blend_xy,
    force_chunk_xy,
    suffix,
    copy_start,
    use_starters,
    seethrough_spec,
    seethrough_limit,
    seethrough_spec_mip,
    mode,
    chunk_z=1,
):
    scheduler = ctx.obj["scheduler"]

    if suffix is None:
        suffix = "_aligned"
    else:
        suffix = f"_{suffix}"

    if crop is None:
        crop = pad
    corgie_logger.debug("Setting up layers...")
    src_stack = create_stack_from_spec(src_layer_spec, name="src", readonly=True)
    src_stack.folder = dst_folder

    force_chunk_xy = chunk_xy if force_chunk_xy else None
    dst_stack = stack.create_stack_from_reference(
        reference_stack=src_stack,
        folder=dst_folder,
        name="dst",
        types=["img", "mask"],
        readonly=False,
        suffix=suffix,
        force_chunk_xy=force_chunk_xy,
        overwrite=True,
    )

    render_method = helpers.PartialSpecification(
        f=RenderJob,
        pad=render_pad,
        chunk_xy=render_chunk_xy,
        chunk_z=1,
        render_masks=False,
    )

    if seethrough_spec != tuple():
        assert seethrough_spec_mip is not None

        seethrough_method = helpers.PartialSpecification(
            f=SeethroughCompareJob,
            mip=seethrough_spec_mip,
            processor_spec=seethrough_spec,
            chunk_xy=chunk_xy,
            pad=pad,
            crop=pad,
            seethrough_limit=seethrough_limit,
        )
    else:
        seethrough_method = None

    cf_method = helpers.PartialSpecification(
        f=ComputeFieldJob,
        pad=pad,
        crop=crop,
        processor_mip=processor_mip,
        processor_spec=processor_spec,
        chunk_xy=chunk_xy,
        blend_xy=blend_xy,
        chunk_z=1,
    )

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    if mode == "bidirectional":
        z_mid = (bcube.z_range()[1] + bcube.z_range()[0]) // 2
        bcube_back = bcube.reset_coords(ze=z_mid, in_place=False)
        bcube_forv = bcube.reset_coords(zs=z_mid, in_place=False)

        align_block_job_back = AlignBlockJob(
            src_stack=src_stack,
            dst_stack=dst_stack,
            bcube=bcube_back,
            render_method=render_method,
            cf_method=cf_method,
            seethrough_method=seethrough_method,
            suffix=suffix,
            copy_start=copy_start,
            backward=True,
            vote_dist=vote_dist,
            use_starters=use_starters,
        )
        scheduler.register_job(
            align_block_job_back,
            job_name="Backward Align Block {}".format(bcube),
        )

        align_block_job_forv = AlignBlockJob(
            src_stack=src_stack,
            dst_stack=deepcopy(dst_stack),
            bcube=bcube_forv,
            render_method=render_method,
            cf_method=cf_method,
            seethrough_method=seethrough_method,
            suffix=suffix,
            copy_start=True,
            backward=False,
            vote_dist=vote_dist,
            use_starters=use_starters,
        )
        scheduler.register_job(
            align_block_job_forv,
            job_name="Forward Align Block {}".format(bcube),
        )
    else:
        align_block_job = AlignBlockJob(
            src_stack=src_stack,
            dst_stack=dst_stack,
            bcube=bcube,
            render_method=render_method,
            cf_method=cf_method,
            seethrough_method=seethrough_method,
            suffix=suffix,
            copy_start=copy_start,
            backward=mode == "backward",
            vote_dist=vote_dist,
            use_starters=use_starters,
        )

        # create scheduler and execute the job
        scheduler.register_job(align_block_job, job_name="Align Block {}".format(bcube))

    scheduler.execute_until_completion()
    result_report = (
        f"Aligned layers {[str(l) for l in src_stack.get_layers_of_type('img')]}. "
        f"Results in {[str(l) for l in dst_stack.get_layers_of_type('img')]}"
    )
    corgie_logger.info(result_report)
Ejemplo n.º 23
0
def normalize(
    ctx,
    src_layer_spec,
    dst_folder,
    stats_mip,
    mip_start,
    mip_end,
    chunk_xy,
    chunk_z,
    force_chunk_xy,
    start_coord,
    end_coord,
    coord_mip,
    suffix,
    recompute_stats,
    mask_value,
):
    if chunk_z != 1:
        raise NotImplemented("Compute Statistics command currently only \
                supports per-section statistics.")
    result_report = ""
    scheduler = ctx.obj["scheduler"]

    if suffix is None:
        suffix = "_norm"
    else:
        suffix = f"_{suffix}"

    if stats_mip is None:
        stats_mip = mip_end

    if not force_chunk_xy:
        force_chunk_xy = chunk_xy

    corgie_logger.debug("Setting up layers...")
    src_stack = create_stack_from_spec(src_layer_spec,
                                       name="src",
                                       readonly=True)

    dst_stack = stack.create_stack_from_reference(
        reference_stack=src_stack,
        folder=dst_folder,
        name="dst",
        types=["img"],
        readonly=False,
        suffix=suffix,
        overwrite=True,
    )

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    img_layers = src_stack.get_layers_of_type("img")
    mask_layers = src_stack.get_layers_of_type("mask")
    field_layers = src_stack.get_layers_of_type("field")
    assert len(field_layers) == 0

    for l in img_layers:
        mean_layer = l.get_sublayer(
            name=f"mean_{l.name}{suffix}",
            path=os.path.join(dst_folder, f"mean_{l.name}{suffix}"),
            layer_type="section_value",
        )

        var_layer = l.get_sublayer(
            name=f"var_{l.name}{suffix}",
            path=os.path.join(dst_folder, f"var_{l.name}{suffix}"),
            layer_type="section_value",
        )

        if recompute_stats:
            compute_stats_job = ComputeStatsJob(
                src_layer=l,
                mask_layers=mask_layers,
                mean_layer=mean_layer,
                var_layer=var_layer,
                bcube=bcube,
                mip=stats_mip,
                chunk_xy=chunk_xy,
                chunk_z=chunk_z,
            )

            # create scheduler and execute the job
            scheduler.register_job(
                compute_stats_job,
                job_name=f"Compute Stats. Layer: {l}, Bcube: {bcube}")
            scheduler.execute_until_completion()

        dst_layer = l.get_sublayer(
            name=f"{l.name}{suffix}",
            path=os.path.join(dst_folder, "img", f"{l.name}{suffix}"),
            layer_type=l.get_layer_type(),
            dtype="float32",
            force_chunk_xy=force_chunk_xy,
            overwrite=True,
        )

        result_report += f"Normalized {l} -> {dst_layer}\n"
        for mip in range(mip_start, mip_end + 1):
            normalize_job = NormalizeJob(
                src_layer=l,
                mask_layers=mask_layers,
                dst_layer=dst_layer,
                mean_layer=mean_layer,
                var_layer=var_layer,
                stats_mip=stats_mip,
                mip=mip,
                bcube=bcube,
                chunk_xy=chunk_xy,
                chunk_z=chunk_z,
                mask_value=mask_value,
            )

            # create scheduler and execute the job
            scheduler.register_job(normalize_job,
                                   job_name=f"Normalize {bcube}, MIP {mip}")
    scheduler.execute_until_completion()
    corgie_logger.info(result_report)
Ejemplo n.º 24
0
def compute_field_by_spec(ctx, spec_path, chunk_xy, blend_xy, pad, crop,
                          processor_spec, processor_mip, clear_nontissue_field,
                          start_coord, end_coord, coord_mip, suffix):

    scheduler = ctx.obj['scheduler']
    if suffix is None:
        suffix = ''
    else:
        suffix = f"_{suffix}"

    with open(spec_path, 'r') as f:
        spec = json.load(f)

    src_layers = spec_to_layer_dict_readonly(spec['src'])
    tgt_layers = spec_to_layer_dict_readonly(spec['tgt'])

    # if force_chunk_xy:
    #     force_chunk_xy = chunk_xy
    # else:
    #     force_chunk_xy = None

    # if force_chunk_z:
    #     force_chunk_z = chunk_z
    # else:
    #     force_chunk_z = None
    if crop is None:
        crop = pad

    reference_layer = src_layers[list(src_layers.keys())[0]]
    dst_layers = spec_to_layer_dict_overwrite(spec['dst'],
                                              reference_layer=reference_layer,
                                              default_type='field')

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    for dst_z in range(*bcube.z_range()):
        spec_z = str(dst_z)
        if spec_z in spec['job_specs'].keys():
            for job_spec in spec['job_specs'][spec_z]:
                src_stack = spec_to_stack(job_spec, 'src', src_layers)
                tgt_stack = spec_to_stack(job_spec, 'tgt', tgt_layers)
                dst_layer = dst_layers[str(job_spec['dst_img'])]
                ps = json.loads(processor_spec[0])
                ps["ApplyModel"]["params"]["val"] = job_spec["mask_id"]
                ps["ApplyModel"]["params"]["scale"] = job_spec["scale"]
                processor_spec = (json.dumps(ps), )
                job_bcube = bcube.reset_coords(zs=job_spec['src_z'],
                                               ze=job_spec['src_z'] + 1,
                                               in_place=False)
                tgt_z_offset = job_spec['tgt_z'] - job_spec['src_z']
                compute_field_job = ComputeFieldJob(
                    src_stack=src_stack,
                    tgt_stack=tgt_stack,
                    dst_layer=dst_layer,
                    chunk_xy=chunk_xy,
                    chunk_z=1,
                    blend_xy=blend_xy,
                    processor_spec=processor_spec,
                    pad=pad,
                    crop=crop,
                    bcube=job_bcube,
                    tgt_z_offset=tgt_z_offset,
                    suffix=suffix,
                    processor_mip=processor_mip,
                    clear_nontissue_field=clear_nontissue_field)
                scheduler.register_job(compute_field_job,
                                       job_name="ComputeField {},{}".format(
                                           job_bcube, job_spec['mask_id']))
    scheduler.execute_until_completion()
Ejemplo n.º 25
0
def vote(
    ctx,
    src_layer_spec,
    dst_folder,
    chunk_xy,
    mip,
    z_offsets,
    force_chunk_xy,
    consensus_threshold,
    blur_sigma,
    kernel_size,
    start_coord,
    end_coord,
    coord_mip,
    suffix,
):

    scheduler = ctx.obj["scheduler"]

    if suffix is None:
        suffix = "_voted"
    else:
        suffix = f"_{suffix}"

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)
    corgie_logger.debug("Setting up layers...")
    src_stack = create_stack_from_spec(src_layer_spec, name="src", readonly=True)

    if force_chunk_xy is None:
        force_chunk_xy = chunk_xy

    dst_stack = stack.create_stack_from_reference(
        reference_stack=src_stack,
        folder=dst_folder,
        name="dst",
        types=["field", "float_tensor"],
        readonly=False,
        suffix=suffix,
        force_chunk_xy=force_chunk_xy,
        overwrite=True,
    )

    vote_weights = dst_stack.create_sublayer(
        name="vote_weights",
        layer_type="float_tensor",
        overwrite=True,
        num_channels=max(len(src_stack), len(z_offsets)),
    )
    voted_field = dst_stack.create_sublayer(
        name="voted_field", layer_type="field", overwrite=True
    )

    vote_stitch_job = VoteJob(
        input_fields=src_stack.get_layers(),
        output_field=voted_field,
        chunk_xy=chunk_xy,
        bcube=bcube,
        z_offsets=z_offsets,
        mip=mip,
        consensus_threshold=consensus_threshold,
        blur_sigma=blur_sigma,
        kernel_size=kernel_size,
        weights_layer=vote_weights,
    )
    scheduler.register_job(
        vote_stitch_job, job_name=f"Vote {bcube}",
    )

    scheduler.execute_until_completion()
    corgie_logger.debug("Done!")
Ejemplo n.º 26
0
def seethrough_block(
    ctx,
    src_layer_spec,
    dst_folder,
    chunk_xy,
    start_coord,
    end_coord,
    coord_mip,
    suffix,
    seethrough_spec,
    seethrough_limit,
    seethrough_spec_mip,
    force_chunk_z=1,
):
    scheduler = ctx.obj["scheduler"]

    if suffix is None:
        suffix = "_seethrough"
    else:
        suffix = f"_{suffix}"

    crop, pad = 0, 0
    corgie_logger.debug("Setting up layers...")
    src_stack = create_stack_from_spec(src_layer_spec,
                                       name="src",
                                       readonly=True)
    src_stack.folder = dst_folder
    dst_stack = stack.create_stack_from_reference(
        reference_stack=src_stack,
        folder=dst_folder,
        name="dst",
        types=["img", "mask"],
        readonly=False,
        suffix=suffix,
        overwrite=True,
        force_chunk_z=force_chunk_z,
    )
    render_method = helpers.PartialSpecification(
        f=RenderJob,
        pad=pad,
        chunk_xy=chunk_xy,
        chunk_z=1,
        render_masks=False,
    )
    seethrough_method = helpers.PartialSpecification(
        f=SeethroughCompareJob,
        mip=seethrough_spec_mip,
        processor_spec=seethrough_spec,
        chunk_xy=chunk_xy,
        pad=pad,
        crop=pad,
        seethrough_limit=seethrough_limit,
    )
    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    seethrough_block_job = SeethroughBlockJob(
        src_stack=src_stack,
        dst_stack=dst_stack,
        bcube=bcube,
        render_method=render_method,
        seethrough_method=seethrough_method,
        suffix=suffix,
    )
    # create scheduler and execute the job
    scheduler.register_job(seethrough_block_job,
                           job_name="Seethrough Block {}".format(bcube))

    scheduler.execute_until_completion()
    result_report = (
        f"Rendered layers {[str(l) for l in src_stack.get_layers_of_type('img')]}. "
        f"Results in {[str(l) for l in dst_stack.get_layers_of_type('img')]}")
    corgie_logger.info(result_report)
Ejemplo n.º 27
0
def align_block(ctx, src_layer_spec, tgt_layer_spec, dst_folder, render_pad, render_chunk_xy,
        processor_spec, pad, crop, processor_mip, chunk_xy, start_coord, end_coord, coord_mip,
        suffix, copy_start, mode, chunk_z=1):
    scheduler = ctx.obj['scheduler']

    if suffix is None:
        suffix = '_aligned'
    else:
        suffix = f"_{suffix}"

    if crop is None:
        crop = pad
    corgie_logger.debug("Setting up layers...")
    src_stack = create_stack_from_spec(src_layer_spec,
            name='src', readonly=True)

    tgt_stack = create_stack_from_spec(tgt_layer_spec,
            name='tgt', readonly=True, reference=src_stack)

    dst_stack = stack.create_stack_from_reference(reference_stack=src_stack,
            folder=dst_folder, name="dst", types=["img", "mask"], readonly=False,
            suffix=suffix)

    render_method = helpers.PartialSpecification(
            f=RenderJob,
            pad=render_pad,
            chunk_xy=render_chunk_xy,
            chunk_z=1,
            blackout_masks=False,
            render_masks=True,
            mip=min(processor_mip)
            )

    cf_method = helpers.PartialSpecification(
            f=ComputeFieldJob,
            pad=pad,
            crop=crop,
            processor_mip=processor_mip,
            processor_spec=processor_spec,
            chunk_xy=chunk_xy,
            chunk_z=1
            )

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    if mode == 'bidirectional':
        z_mid = (bcube.z_range()[1] + bcube.z_range()[0]) // 2
        bcube_back = bcube.reset_coords(ze=z_mid, in_place=False)
        bcube_forv = bcube.reset_coords(zs=z_mid, in_place=False)

        align_block_job_back = AlignBlockJob(src_stack=src_stack,
                                    tgt_stack=tgt_stack,
                                    dst_stack=dst_stack,
                                    bcube=bcube_back,
                                    render_method=render_method,
                                    cf_method=cf_method,
                                    suffix=suffix,
                                    copy_start=copy_start,
                                    backward=True)
        scheduler.register_job(align_block_job_back, job_name="Backward Align Block {}".format(bcube))

        align_block_job_forv = AlignBlockJob(src_stack=src_stack,
                                    tgt_stack=tgt_stack,
                                    dst_stack=deepcopy(dst_stack),
                                    bcube=bcube_forv,
                                    render_method=render_method,
                                    cf_method=cf_method,
                                    suffix=suffix,
                                    copy_start=True,
                                    backward=False)
        scheduler.register_job(align_block_job_forv, job_name="Forward Align Block {}".format(bcube))
    else:
        align_block_job = AlignBlockJob(src_stack=src_stack,
                                        tgt_stack=tgt_stack,
                                        dst_stack=dst_stack,
                                        bcube=bcube,
                                        render_method=render_method,
                                        cf_method=cf_method,
                                        suffix=suffix,
                                        copy_start=copy_start,
                                        backward=mode=='backward')

        # create scheduler and execute the job
        scheduler.register_job(align_block_job, job_name="Align Block {}".format(bcube))

    scheduler.execute_until_completion()
    result_report = f"Aligned layers {[str(l) for l in src_stack.get_layers_of_type('img')]}. " \
            f"Results in {[str(l) for l in dst_stack.get_layers_of_type('img')]}"
    corgie_logger.info(result_report)
Ejemplo n.º 28
0
def compute_field(
    ctx,
    src_layer_spec,
    tgt_layer_spec,
    dst_layer_spec,
    suffix,
    processor_spec,
    pad,
    crop,
    chunk_xy,
    start_coord,
    mip,
    end_coord,
    coord_mip,
    tgt_z_offset,
    chunk_z,
    reference_key,
):

    scheduler = ctx.obj["scheduler"]

    corgie_logger.debug("Setting up layers...")
    src_stack = create_stack_from_spec(src_layer_spec, name="src", readonly=True)

    tgt_stack = create_stack_from_spec(
        tgt_layer_spec, name="tgt", readonly=True, reference=src_stack
    )

    reference_layer = None
    if reference_key in src_stack.layers:
        reference_layer = src_stack.layers[reference_key]
    dst_layer = create_layer_from_spec(
        dst_layer_spec,
        allowed_types=["field"],
        default_type="field",
        readonly=False,
        caller_name="dst_layer",
        reference=reference_layer,
    )

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    if crop is None:
        crop = pad

    compute_field_job = ComputeFieldJob(
        src_stack=src_stack,
        tgt_stack=tgt_stack,
        dst_layer=dst_layer,
        chunk_xy=chunk_xy,
        chunk_z=chunk_z,
        processor_spec=processor_spec,
        pad=pad,
        crop=crop,
        bcube=bcube,
        tgt_z_offset=tgt_z_offset,
        suffix=suffix,
        mip=mip,
    )

    # create scheduler and execute the job
    scheduler.register_job(
        align_block_job,
        job_name="Compute field {}, tgt z offset {}".format(bcube, tgt_z_offset),
    )
    scheduler.execute_until_completion()
Ejemplo n.º 29
0
def copy(
    ctx,
    src_layer_spec,
    dst_folder,
    copy_masks,
    blackout_masks,
    chunk_xy,
    chunk_z,
    start_coord,
    end_coord,
    coord_mip,
    mip,
    suffix,
    force_chunk_xy,
    force_chunk_z,
):

    scheduler = ctx.obj["scheduler"]
    if suffix is None:
        suffix = ""
    else:
        suffix = f"_{suffix}"

    corgie_logger.debug("Setting up layers...")
    src_stack = create_stack_from_spec(src_layer_spec,
                                       name="src",
                                       readonly=True)

    if not force_chunk_xy:
        force_chunk_xy = chunk_xy

    if not force_chunk_z:
        force_chunk_z = chunk_z

    dst_stack = stack.create_stack_from_reference(
        reference_stack=src_stack,
        folder=dst_folder,
        name="dst",
        types=["img", "mask"],
        readonly=False,
        suffix=suffix,
        force_chunk_xy=force_chunk_xy,
        force_chunk_z=force_chunk_z,
        overwrite=True,
    )

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    copy_job = CopyJob(
        src_stack=src_stack,
        dst_stack=dst_stack,
        mip=mip,
        bcube=bcube,
        chunk_xy=chunk_xy,
        chunk_z=chunk_z,
        copy_masks=copy_masks,
        blackout_masks=blackout_masks,
    )
    # create scheduler and execute the job
    scheduler.register_job(copy_job, job_name="Copy {}".format(bcube))
    scheduler.execute_until_completion()

    result_report = (
        f"Copied layers {[str(l) for l in src_stack.get_layers_of_type('img')]}. "
        f"to {[str(l) for l in dst_stack.get_layers_of_type('img')]}")
    corgie_logger.info(result_report)
Ejemplo n.º 30
0
def create_segmentation_masks(
    ctx,
    src_layer_spec,
    dst_folder,
    processor_spec,
    pad,
    crop,
    processor_mip,
    dst_mip,
    chunk_xy,
    chunk_z,
    force_chunk_xy,
    start_coord,
    end_coord,
    coord_mip,
    suffix,
    similarity_threshold,
    compute_similarities,
    compute_slip_mask,
    compute_step_mask,
    compute_affinity_mask,
):

    scheduler = ctx.obj["scheduler"]

    if suffix is None:
        suffix = "_aligned"
    else:
        suffix = f"_{suffix}"

    if crop is None:
        crop = pad

    corgie_logger.debug("Setting up layers...")

    src_stack = create_stack_from_spec(src_layer_spec, name="src", readonly=True)
    src_stack.folder = dst_folder

    if force_chunk_xy is None:
        force_chunk_xy = chunk_xy

    dst_stack = stack.create_stack_from_reference(
        reference_stack=src_stack,
        folder=dst_folder,
        name="dst",
        types=["img", "mask"],
        readonly=False,
        suffix=suffix,
        force_chunk_xy=force_chunk_xy,
        overwrite=True,
    )

    corgie_logger.debug("Done!")

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    if compute_similarities:
        z_offsets = range(-1, -4, -1)
        for tgt_z_offset in z_offsets:
            if tgt_z_offset not in src_stack:
                dst_layer = dst_stack.create_sublayer(
                    name=tgt_z_offset,
                    layer_type="img",
                    overwrite=True,
                    layer_args={"dtype": "uint8"},
                )

                proc_spec = json.loads(processor_spec)
                if isinstance(proc_spec, dict):
                    assert str(tgt_z_offset) in proc_spec
                    proc_spec = json.dumps(proc_spec[str(tgt_z_offset)])
                else:
                    proc_spec = processor_spec

                compare_job = CompareSectionsJob(
                    src_stack=src_stack,
                    tgt_stack=src_stack,
                    dst_layer=dst_layer,
                    chunk_xy=chunk_xy,
                    processor_spec=proc_spec,
                    pad=pad,
                    bcube=bcube,
                    tgt_z_offset=tgt_z_offset,
                    suffix=suffix,
                    mip=processor_mip,
                    dst_mip=dst_mip,
                )
                scheduler.register_job(
                    compare_job,
                    job_name="Compare Sections Job {}, tgt z offset {}".format(
                        bcube, tgt_z_offset
                    ),
                )
        scheduler.execute_until_completion()
        result_report = f"Similarity results in {[str(l) for l in dst_stack.get_layers_of_type('img')]}"
        corgie_logger.info(result_report)

        # If similarities were just computed, add them to the src_stack as masks
        # Otherwise, they need to be included with src_stack_specs as masks
        # See arg help above for similarity mask and misalignment mask definitions.
        for layer_name in z_offsets:
            img_layer = dst_stack[layer_name]
            binarizer = {
                "binarization": ["gt", similarity_threshold],
                # "cv_params": {"cache": True},
            }
            layer_dict = {
                "path": img_layer.path,
                "name": img_layer.name,
                "type": "mask",
                "args": binarizer,
            }
            mask_layer = create_layer_from_dict(layer_dict, reference=dst_stack)
            src_stack.add_layer(mask_layer)

    corgie_logger.info("Computing slip & step masks")
    if compute_slip_mask:
        slip_layer = dst_stack.create_sublayer(
            name="slip", layer_type="mask", overwrite=True
        )
        slip_bcube = bcube.reset_coords(
            zs=bcube.z[0] + 1, ze=bcube.z[1] - 1, in_place=False
        )
        slip_misalignments_job = DetectSlipMisalignmentsJob(
            src_stack=src_stack,
            dst_layer=slip_layer,
            mip=dst_mip,
            bcube=slip_bcube,
            pad=pad,
            chunk_xy=chunk_xy,
            chunk_z=chunk_z,
        )
        scheduler.register_job(
            slip_misalignments_job,
            job_name="Detect Slip Misalignments {}".format(bcube),
        )
    if compute_step_mask:
        step_layer = dst_stack.create_sublayer(
            name="step", layer_type="mask", overwrite=True
        )
        step_bcube = bcube.reset_coords(
            zs=bcube.z[0] + 2, ze=bcube.z[1] - 2, in_place=False
        )
        step_misalignments_job = DetectStepMisalignmentsJob(
            src_stack=src_stack,
            dst_layer=step_layer,
            mip=dst_mip,
            bcube=step_bcube,
            pad=pad,
            chunk_xy=chunk_xy,
            chunk_z=chunk_z,
        )
        scheduler.register_job(
            step_misalignments_job,
            job_name="Detect Step Misalignments {}".format(bcube),
        )
    if compute_slip_mask or compute_step_mask:
        # Execute slip & step masks at the same time
        scheduler.execute_until_completion()
        result_report = f"Slip & step masks in in {str(slip_layer), str(step_layer)}"
        corgie_logger.info(result_report)

    if compute_affinity_mask:
        corgie_logger.info("Creating affinity masks")
        affinity_layer = dst_stack.create_sublayer(
            name="affinity", layer_type="mask", overwrite=True
        )
        three_consecutive_exp = DetectConsecutiveMasksJob.get_exp(n=3, key="slip")
        exp = {
            "inputs": [
                three_consecutive_exp,
                {"weight": 1, "key": "step", "offset": 0},
            ],
            "threshold": 0,
        }
        affinity_masks_job = CombineMasksJob(
            src_stack=dst_stack,
            exp=exp,
            dst_layer=affinity_layer,
            mip=dst_mip,
            bcube=bcube,
            pad=pad,
            chunk_xy=chunk_xy,
            chunk_z=chunk_z,
        )
        scheduler.register_job(
            affinity_masks_job, job_name="Affinity Masks {}".format(bcube)
        )
        scheduler.execute_until_completion()
        result_report = f"Results in {str(affinity_layer)}"
        corgie_logger.info(result_report)