Exemplo n.º 1
0
    def create_sublayer(self,
                        name,
                        layer_type,
                        suffix='',
                        reference=None,
                        **kwargs):
        if self.folder is None:
            raise exceptions.CorgieException(
                "Stack must have 'folder' field set "
                "before sublayers can be created")

        if self.reference_layer is None and reference is None:
            raise exceptions.CorgieException(
                "Stack must either have at least one layer "
                "or reference layer must be provided for sublayer creation")

        if reference is None:
            reference = self.reference_layer
        path = os.path.join(self.folder, layer_type, f"{name}{suffix}")
        l = reference.backend.create_layer(path=path,
                                           layer_type=layer_type,
                                           name=name,
                                           reference=reference,
                                           **kwargs)
        self.add_layer(l)
        return l
Exemplo n.º 2
0
    def create_layer(self, path, layer_type=None, reference=None, layer_args={}, **kwargs):
        if layer_type not in self.layer_constr_dict:
            raise exceptions.CorgieException("Layer type {} is not \
                    defined".format(layer_type))
        if self.layer_constr_dict[layer_type] is None:
            raise exceptions.CorgieException("Layer type {} is not \
                    implemented for {} backend".format(layer_type, type(self)))

        corgie_logger.debug("Creating layer '{}' on device '{}' with reference '{}'...".format(
                path, self.device, reference
                ))
        layer = self.layer_constr_dict[layer_type](path=path, device=self.device,
                reference=reference,
                backend=self,
                **layer_args,
                **kwargs
                )
        corgie_logger.debug("Done")
        return layer
Exemplo n.º 3
0
def create_layer_from_dict(param_dict,
                           reference=None,
                           caller_name=None,
                           allowed_types=None,
                           default_type=None,
                           **kwargs):
    if default_type is None:
        default_type = DEFAULT_LAYER_TYPE
    default_param_dict = {
        "path": None,
        "name": None,
        "type": default_type,
        "data_backend": DEFAULT_DATA_BACKEND,
        "args": {},
        "readonly": False
    }
    for k in param_dict.keys():
        if k not in default_param_dict:
            raise exceptions.CorgieException(f"Unkown layer parameter '{k}'")
    params = {**default_param_dict, **param_dict}

    if params["path"] is None:
        arg_spec = '"path" key in layer specification'
        if caller_name is not None:
            arg_spec += ' of {}'.format(caller_name)
        raise exceptions.ArgumentError(arg_spec, 'not given')

    layer_path = params["path"]
    layer_type = params["type"]
    layer_args = params["args"]
    data_backend = params["data_backend"]
    corgie_logger.debug("Parsing layer path: {}".format(layer_path))

    if allowed_types is not None and layer_type not in allowed_types:
        raise exceptions.ArgumentError("layer_type",
                                       f'must be of type in {allowed_types}')

    backend = str_to_backend(data_backend)()

    layer = backend.create_layer(path=layer_path,
                                 layer_type=layer_type,
                                 reference=reference,
                                 layer_args=layer_args,
                                 **kwargs)

    name = params["name"]
    if name is None:
        name = layer.get_layer_type()
    layer.name = name

    return layer
Exemplo n.º 4
0
    def __init__(
        self,
        src_stack,
        tgt_stack,
        dst_layer,
        chunk_xy,
        chunk_z,
        processor_spec,
        processor_mip,
        pad,
        crop,
        bcube,
        tgt_z_offset,
        processor_vv=[],
        clear_nontissue_field=True,
        blend_xy=0,
        suffix="",
    ):
        self.src_stack = src_stack
        self.tgt_stack = tgt_stack
        self.dst_layer = dst_layer
        self.chunk_xy = chunk_xy
        self.chunk_z = chunk_z
        self.blend_xy = blend_xy
        self.pad = pad
        self.crop = crop
        self.bcube = bcube
        self.tgt_z_offset = tgt_z_offset
        self.suffix = suffix  # in case this job wants to make more layers
        self.clear_nontissue_field = clear_nontissue_field

        self.processor_spec = processor_spec
        self.processor_mip = processor_mip
        self.processor_vv = processor_vv
        if isinstance(self.processor_spec, str):
            self.processor_spec = [self.processor_spec]
        if isinstance(self.processor_mip, int):
            self.processor_mip = [self.processor_mip]

        if len(self.processor_vv) == 0:
            self.processor_vv = [1 for _ in self.processor_spec]

        if len(self.processor_mip) != len(self.processor_spec) or len(
                self.processor_mip) != len(self.processor_vv):
            raise exceptions.CorgieException(
                "The number of processors doesn't "
                "match the number of specified processor MIPs or VV values")

        super().__init__()
Exemplo n.º 5
0
def align(
    ctx,
    src_layer_spec,
    dst_folder,
    render_pad,
    render_chunk_xy,
    processor_spec,
    pad,
    crop,
    processor_mip,
    chunk_xy,
    start_coord,
    end_coord,
    coord_mip,
    bad_starter_path,
    block_size,
    stitch_size,
    vote_dist,
    consensus_threshold,
    blur_sigma,
    kernel_size,
    blend_xy,
    force_chunk_xy,
    suffix,
    seethrough_spec,
    seethrough_limit,
    seethrough_spec_mip,
    decay_dist,
    blur_rate,
    restart_stage,
    restart_suffix,
):

    scheduler = ctx.obj["scheduler"]

    if suffix is None:
        suffix = "_aligned"
    else:
        suffix = f"_{suffix}"
    if (restart_suffix is None) or (restart_stage == 0):
        restart_suffix = suffix

    if crop is None:
        crop = pad

    corgie_logger.debug("Setting up layers...")
    # TODO: store stitching images in layer other than even & odd
    if vote_dist + stitch_size - 2 >= block_size:
        raise exceptions.CorgieException(
            "block_size too small for stitching + voting requirements (stitch_size + vote_dist)"
        )

    corgie_logger.debug("Setting up layers...")

    src_stack = create_stack_from_spec(src_layer_spec,
                                       name="src",
                                       readonly=True)
    src_stack.folder = dst_folder

    if force_chunk_xy is None:
        force_chunk_xy = chunk_xy

    dst_stack = stack.create_stack_from_reference(
        reference_stack=src_stack,
        folder=dst_folder,
        name="dst",
        types=["img", "mask"],
        readonly=False,
        suffix=restart_suffix,
        force_chunk_xy=force_chunk_xy,
        overwrite=True,
    )

    even_stack = stack.create_stack_from_reference(
        reference_stack=src_stack,
        folder=os.path.join(dst_folder, "even"),
        name="even",
        types=["img", "mask"],
        readonly=False,
        suffix=suffix,
        force_chunk_xy=force_chunk_xy,
        overwrite=True,
    )

    odd_stack = stack.create_stack_from_reference(
        reference_stack=src_stack,
        folder=os.path.join(dst_folder, "odd"),
        name="odd",
        types=["img", "mask"],
        readonly=False,
        suffix=suffix,
        force_chunk_xy=force_chunk_xy,
        overwrite=True,
    )

    corgie_logger.debug("Done!")

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    corgie_logger.debug("Calculating blocks...")
    skip_list = []
    if bad_starter_path is not None:
        with open(bad_starter_path) as f:
            line = f.readline()
            while line:
                skip_list.append(int(line))
                line = f.readline()
    blocks = get_blocks(
        start=bcube.z_range()[0],
        stop=bcube.z_range()[1],
        block_size=block_size,
        block_overlap=1,
        skip_list=skip_list,
        src_stack=src_stack,
        even_stack=even_stack,
        odd_stack=odd_stack,
    )
    stitch_blocks = [b.overlap(stitch_size) for b in blocks[1:]]
    corgie_logger.debug("All Blocks")
    for block, stitch_block in zip(blocks, [None] + stitch_blocks):
        corgie_logger.debug(block)
        corgie_logger.debug(f"Stitch {stitch_block}")
        corgie_logger.debug("\n")

    max_blur_mip = (math.ceil(math.log(decay_dist * blur_rate + 1, 2)) +
                    processor_mip[-1])
    corgie_logger.debug(f"Max blur mip for stitching field: {max_blur_mip}")

    # Set all field names, adjusting for restart suffix
    block_field_name = f"field{suffix}"
    stitch_estimated_suffix = f"_stitch_estimated{suffix}"
    stitch_estimated_name = f"field{stitch_estimated_suffix}"
    stitch_corrected_name = f"stitch_corrected{suffix}"
    stitch_corrected_field = None
    composed_name = f"composed{suffix}"
    if restart_stage <= 2:
        stitch_estimated_suffix = f"_stitch_estimated{restart_suffix}"
        stitch_estimated_name = f"field{stitch_estimated_suffix}"
        stitch_corrected_name = f"stitch_corrected{restart_suffix}"
    if restart_stage <= 3:
        composed_name = f"composed{restart_suffix}"

    render_method = helpers.PartialSpecification(
        f=RenderJob,
        pad=render_pad,
        chunk_xy=render_chunk_xy,
        chunk_z=1,
        render_masks=False,
    )

    cf_method = helpers.PartialSpecification(
        f=ComputeFieldJob,
        pad=pad,
        crop=crop,
        processor_mip=processor_mip,
        processor_spec=processor_spec,
        chunk_xy=chunk_xy,
        blend_xy=blend_xy,
        chunk_z=1,
    )
    if seethrough_spec != tuple():
        assert seethrough_spec_mip is not None
        seethrough_method = helpers.PartialSpecification(
            f=SeethroughCompareJob,
            mip=seethrough_spec_mip,
            processor_spec=seethrough_spec,
            chunk_xy=chunk_xy,
            pad=pad,
            crop=pad,
            seethrough_limit=seethrough_limit,
        )
    else:
        seethrough_method = None

    #restart_stage = 4
    #import pdb; pdb.set_trace()
    if restart_stage == 0:
        corgie_logger.debug("Aligning blocks...")
        for block in blocks:
            block_bcube = block.get_bcube(bcube)
            # Use copies of src & dst so that aligning the stitching blocks
            # is not affected by these block fields.
            # Copying also allows local compute to not modify objects for other tasks
            align_block_job_forv = AlignBlockJob(
                src_stack=deepcopy(block.src_stack),
                dst_stack=deepcopy(block.dst_stack),
                bcube=block_bcube,
                render_method=render_method,
                cf_method=cf_method,
                vote_dist=vote_dist,
                seethrough_method=seethrough_method,
                suffix=suffix,
                copy_start=True,
                use_starters=True,
                backward=False,
                consensus_threshold=consensus_threshold,
                blur_sigma=blur_sigma,
                kernel_size=kernel_size,
            )
            scheduler.register_job(
                align_block_job_forv,
                job_name=f"Forward Align {block} {block_bcube}",
            )

        scheduler.execute_until_completion()
        corgie_logger.debug("Done!")

    if restart_stage <= 1:
        corgie_logger.debug("Aligning stitching blocks...")
        for stitch_block in stitch_blocks:
            block_bcube = stitch_block.get_bcube(bcube)
            # These blocks will have block-aligned images, but not
            # the block_fields that warped them.
            align_block_job_forv = AlignBlockJob(
                src_stack=deepcopy(stitch_block.src_stack),
                dst_stack=deepcopy(stitch_block.dst_stack),
                bcube=block_bcube,
                render_method=render_method,
                cf_method=cf_method,
                vote_dist=vote_dist,
                seethrough_method=seethrough_method,
                suffix=stitch_estimated_suffix,
                copy_start=False,
                use_starters=False,
                backward=False,
                consensus_threshold=consensus_threshold,
                blur_sigma=blur_sigma,
                kernel_size=kernel_size,
            )
            scheduler.register_job(
                align_block_job_forv,
                job_name=f"Stitch Align {stitch_block} {block_bcube}",
            )

        scheduler.execute_until_completion()
        corgie_logger.debug("Done!")

    # Add in the stitch_estimated fields that were just created above
    even_stack.create_sublayer(
        stitch_estimated_name,
        layer_type="field",
        overwrite=False,
    )
    odd_stack.create_sublayer(
        stitch_estimated_name,
        layer_type="field",
        overwrite=False,
    )
    if restart_stage <= 2:
        if stitch_size > 1:
            corgie_logger.debug("Voting over stitching blocks")
            stitch_corrected_field = dst_stack.create_sublayer(
                stitch_corrected_name, layer_type="field", overwrite=True)
            for stitch_block in stitch_blocks:
                stitch_estimated_field = stitch_block.dst_stack[
                    stitch_estimated_name]
                block_bcube = bcube.reset_coords(
                    zs=stitch_block.start,
                    ze=stitch_block.start + 1,
                    in_place=False,
                )
                z_offsets = [
                    z - block_bcube.z_range()[0]
                    for z in range(stitch_block.start, stitch_block.stop)
                ]
                vote_stitch_job = VoteJob(
                    input_fields=[stitch_estimated_field],
                    output_field=stitch_corrected_field,
                    chunk_xy=chunk_xy,
                    bcube=block_bcube,
                    z_offsets=z_offsets,
                    mip=processor_mip[-1],
                    consensus_threshold=consensus_threshold,
                    blur_sigma=blur_sigma,
                    kernel_size=kernel_size,
                )
                scheduler.register_job(
                    vote_stitch_job,
                    job_name=f"Stitching Vote {stitch_block} {block_bcube}",
                )

            scheduler.execute_until_completion()
            corgie_logger.debug("Done!")

        for stitch_block in stitch_blocks:
            block_bcube = bcube.reset_coords(zs=stitch_block.start,
                                             ze=stitch_block.start + 1,
                                             in_place=False)
            field_to_downsample = stitch_block.dst_stack[stitch_estimated_name]
            if stitch_corrected_field is not None:
                field_to_downsample = stitch_corrected_field
            # Hack for fafb
            field_info = field_to_downsample.get_info()
            for scale in field_info['scales']:
                scale['chunk_sizes'][-1][-1] = 1
                scale['encoding'] = 'raw'
            field_to_downsample.cv.store_info(field_info)
            field_to_downsample.cv.fetch_info()
            downsample_field_job = DownsampleJob(
                src_layer=field_to_downsample,
                mip_start=processor_mip[-1],
                mip_end=max_blur_mip,
                bcube=block_bcube,
                chunk_xy=
                chunk_xy,  # TODO: This probably needs to be modified at highest mips
                chunk_z=1,
                mips_per_task=2,
            )
            scheduler.register_job(
                downsample_field_job,
                job_name=f"Downsample stitching field {block_bcube}",
            )
        scheduler.execute_until_completion()
        corgie_logger.debug("Done!")

    # Add in the block-align fields
    even_stack.create_sublayer(
        block_field_name,
        layer_type="field",
        overwrite=False,
    )
    odd_stack.create_sublayer(
        block_field_name,
        layer_type="field",
        overwrite=False,
    )
    composed_field = dst_stack.create_sublayer(composed_name,
                                               layer_type="field",
                                               overwrite=True)
    if (restart_stage > 2) and (stitch_size > 1):
        stitch_corrected_field = dst_stack.create_sublayer(
            stitch_corrected_name, layer_type="field", overwrite=False)
    if restart_stage <= 3:
        corgie_logger.debug("Stitching blocks...")
        for block, stitch_block in zip(blocks[1:], stitch_blocks):
            block_bcube = block.broadcastable().get_bcube(bcube)
            block_list = block.get_neighbors(dist=decay_dist)
            corgie_logger.debug(f"src_block: {block}")
            corgie_logger.debug(f"influencing blocks: {block_list}")
            z_list = [b.stop for b in block_list]
            # stitch_corrected_field used if there is multi-section block overlap,
            # which requires voting to produce a corrected field.
            # If there is only single-section block overlap, then use
            # stitch_estimated_fields from each stitch_block
            if stitch_corrected_field is not None:
                stitching_fields = [stitch_corrected_field]
            else:
                # Order with furthest block first (convention of FieldSet).
                stitching_fields = [
                    stitch_block.dst_stack[stitch_estimated_name],
                    stitch_block.src_stack[stitch_estimated_name],
                ]

            broadcast_job = BroadcastJob(
                block_field=block.dst_stack[block_field_name],
                stitching_fields=stitching_fields,
                output_field=composed_field,
                chunk_xy=chunk_xy,
                bcube=block_bcube,
                pad=pad,
                z_list=z_list,
                mip=processor_mip[-1],
                decay_dist=decay_dist,
                blur_rate=blur_rate,
            )
            scheduler.register_job(broadcast_job,
                                   job_name=f"Broadcast {block} {block_bcube}")

        scheduler.execute_until_completion()
        corgie_logger.debug("Done!")

        if len(blocks) > 1:
            block_bcube = blocks[0].get_bcube(bcube)
            copy_job = CopyLayerJob(
                src_layer=even_stack[block_field_name],
                dst_layer=composed_field,
                mip=processor_mip[-1],
                bcube=block_bcube,
                chunk_xy=chunk_xy,
                chunk_z=1,
            )
            scheduler.register_job(
                copy_job,
                job_name=f"Copy first block_field to composed_field location")
            scheduler.execute_until_completion()
            corgie_logger.debug("Done!")

    if restart_stage <= 4:
        if len(blocks) == 1:
            block_bcube = blocks[0].get_bcube(bcube)
            render_job = RenderJob(
                src_stack=src_stack,
                dst_stack=dst_stack,
                mips=processor_mip[-1],
                pad=pad,
                bcube=block_bcube,
                chunk_xy=chunk_xy,
                chunk_z=1,
                render_masks=True,
                blackout_masks=False,
                additional_fields=[even_stack[block_field_name]],
            )
            scheduler.register_job(
                render_job, job_name=f"Render first block {block_bcube}")
        else:
            block_bcube = bcube.reset_coords(zs=blocks[0].start,
                                             ze=blocks[-1].stop,
                                             in_place=False)
            render_job = RenderJob(
                src_stack=src_stack,
                dst_stack=dst_stack,
                mips=processor_mip[-1],
                pad=pad,
                bcube=block_bcube,
                chunk_xy=chunk_xy,
                chunk_z=1,
                render_masks=True,
                blackout_masks=True,
                additional_fields=[composed_field],
            )
            scheduler.register_job(render_job,
                                   job_name=f"Render all blocks {block_bcube}")
        scheduler.execute_until_completion()
        corgie_logger.debug("Done!")

    result_report = (
        f"Aligned layers {[str(l) for l in src_stack.get_layers_of_type('img')]}. "
        f"Results in {[str(l) for l in dst_stack.get_layers_of_type('img')]}")
    corgie_logger.info(result_report)