def downsample(ctx, src_layer_spec, dst_layer_spec, mip_start,
        mip_end, chunk_xy, chunk_z, mips_per_task, start_coord,
        end_coord, coord_mip):
    scheduler = ctx.obj['scheduler']
    corgie_logger.debug("Setting up Source and Destination layers...")

    src_layer = create_layer_from_spec(src_layer_spec,
            caller_name='src layer',
            readonly=True)

    if dst_layer_spec is None:
        corgie_logger.info("Destination layer not specified. Using Source layer "
                "as Destination.")
        dst_layer = src_layer
        dst_layer.readonly = False
    else:
        dst_layer = create_layer_from_spec(dst_layer_spec,
            caller_name='dst_layer layer',
            readonly=False,
            reference=src_layer, chunk_z=chunk_z, overwrite=True)
    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)
    downsample_job = DownsampleJob(src_layer=src_layer,
                                   dst_layer=dst_layer,
                                   mip_start=mip_start,
                                   mip_end=mip_end,
                                   bcube=bcube,
                                   chunk_xy=chunk_xy,
                                   chunk_z=chunk_z,
                                   mips_per_task=mips_per_task)

    # create scheduler and execute the job
    scheduler.register_job(downsample_job, job_name="downsample")
    scheduler.execute_until_completion()
    result_report = f"Downsampled {src_layer} from {mip_start} to {mip_end}. Result in {dst_layer}"
    corgie_logger.info(result_report)
Exemple #2
0
def compute_field(ctx, src_layer_spec, tgt_layer_spec, dst_layer_spec, suffix,
                  processor_spec, pad, crop, chunk_xy, start_coord,
                  processor_mip, end_coord, coord_mip, blend_xy, tgt_z_offset,
                  chunk_z, reference_key, clear_nontissue_field):
    if suffix is None:
        suffix = ''
    else:
        suffix = f"_{suffix}"

    scheduler = ctx.obj['scheduler']

    corgie_logger.debug("Setting up layers...")
    src_stack = create_stack_from_spec(src_layer_spec,
                                       name='src',
                                       readonly=True)

    tgt_stack = create_stack_from_spec(tgt_layer_spec,
                                       name='tgt',
                                       readonly=True,
                                       reference=src_stack)

    reference_layer = None
    if reference_key in src_stack.layers:
        reference_layer = src_stack.layers[reference_key]
    dst_layer = create_layer_from_spec(dst_layer_spec,
                                       allowed_types=['field'],
                                       default_type='field',
                                       readonly=False,
                                       caller_name='dst_layer',
                                       reference=reference_layer,
                                       overwrite=True)

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    if crop is None:
        crop = pad

    compute_field_job = ComputeFieldJob(
        src_stack=src_stack,
        tgt_stack=tgt_stack,
        dst_layer=dst_layer,
        chunk_xy=chunk_xy,
        chunk_z=chunk_z,
        blend_xy=blend_xy,
        processor_spec=processor_spec,
        pad=pad,
        crop=crop,
        bcube=bcube,
        tgt_z_offset=tgt_z_offset,
        suffix=suffix,
        processor_mip=processor_mip,
        clear_nontissue_field=clear_nontissue_field)

    # create scheduler and execute the job
    scheduler.register_job(compute_field_job,
                           job_name="Compute field {}, tgt z offset {}".format(
                               bcube, tgt_z_offset))
    scheduler.execute_until_completion()
def invert_field(ctx, src_layer_spec, dst_layer_spec, pad, crop, chunk_xy,
                 start_coord, mip, end_coord, coord_mip, blend_xy, chunk_z,
                 force_chunk_xy):
    scheduler = ctx.obj['scheduler']

    if force_chunk_xy:
        force_chunk_xy = chunk_xy
    else:
        force_chunk_xy = None

    corgie_logger.debug("Setting up layers...")
    src_layer = create_layer_from_spec(src_layer_spec,
                                       allowed_types=['field'],
                                       default_type='field',
                                       readonly=True,
                                       caller_name='src_layer')

    dst_layer = create_layer_from_spec(dst_layer_spec,
                                       allowed_types=['field'],
                                       default_type='field',
                                       readonly=False,
                                       caller_name='dst_layer',
                                       reference=src_layer,
                                       overwrite=True,
                                       force_chunk_xy=force_chunk_xy)

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    if crop is None:
        crop = pad

    invert_field_job = InvertFieldJob(src_layer=src_layer,
                                      dst_layer=dst_layer,
                                      chunk_xy=chunk_xy,
                                      chunk_z=chunk_z,
                                      blend_xy=blend_xy,
                                      pad=pad,
                                      mip=mip,
                                      crop=crop,
                                      bcube=bcube)

    # create scheduler and execute the job
    scheduler.register_job(invert_field_job,
                           job_name="Invert Field {}".format(bcube))
    scheduler.execute_until_completion()
Exemple #4
0
def downsample_by_spec(ctx, src_layer_spec, spec_path, dst_layer_spec,
                       mip_start, mip_end, chunk_xy, chunk_z, mips_per_task,
                       start_coord, end_coord, coord_mip):
    scheduler = ctx.obj['scheduler']
    corgie_logger.debug("Setting up Source and Destination layers...")

    src_layer = create_layer_from_spec(src_layer_spec,
                                       caller_name='src layer',
                                       readonly=True)

    with open(spec_path, 'r') as f:
        spec = set(json.load(f))

    if dst_layer_spec is None:
        corgie_logger.info(
            "Destination layer not specified. Using Source layer "
            "as Destination.")
        dst_layer = src_layer
        dst_layer.readonly = False
    else:
        dst_layer = create_layer_from_spec(dst_layer_spec,
                                           caller_name='dst_layer layer',
                                           readonly=False,
                                           reference=src_layer,
                                           chunk_z=chunk_z,
                                           overwrite=True)
    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)
    for z in range(*bcube.z_range()):
        if z in spec:
            job_bcube = bcube.reset_coords(zs=z, ze=z + 1, in_place=False)
            downsample_job = DownsampleJob(src_layer=src_layer,
                                           dst_layer=dst_layer,
                                           mip_start=mip_start,
                                           mip_end=mip_end,
                                           bcube=job_bcube,
                                           chunk_xy=chunk_xy,
                                           chunk_z=chunk_z,
                                           mips_per_task=mips_per_task)

            # create scheduler and execute the job
            scheduler.register_job(downsample_job,
                                   job_name=f"Downsample {job_bcube}")
    scheduler.execute_until_completion()
    result_report = f"Downsampled {src_layer} from {mip_start} to {mip_end}. Result in {dst_layer}"
    corgie_logger.info(result_report)
Exemple #5
0
def combine_masks(
    ctx,
    src_layer_spec,
    dst_layer_spec,
    exp,
    chunk_xy,
    chunk_z,
    force_chunk_xy,
    force_chunk_z,
    start_coord,
    end_coord,
    coord_mip,
    mip,
    pad,
):
    scheduler = ctx.obj["scheduler"]

    if not force_chunk_xy:
        force_chunk_xy = chunk_xy

    corgie_logger.debug("Setting up layers...")
    src_stack = create_stack_from_spec(src_layer_spec, name="src", readonly=True)
    reference_layer = src_stack.reference_layer

    dst_layer = create_layer_from_spec(
        dst_layer_spec,
        allowed_types=["mask"],
        default_type="mask",
        readonly=False,
        caller_name="dst_layer",
        reference=reference_layer,
        force_chunk_xy=force_chunk_xy,
        force_chunk_z=force_chunk_z,
        overwrite=True,
    )
    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    combine_masks_job = CombineMasksJob(
        src_stack=src_stack,
        exp=json.loads(exp),
        dst_layer=dst_layer,
        mip=mip,
        bcube=bcube,
        pad=pad,
        chunk_xy=chunk_xy,
        chunk_z=chunk_z,
    )
    # create scheduler and execute the job
    scheduler.register_job(combine_masks_job, job_name="Combine Masks {}".format(bcube))

    scheduler.execute_until_completion()
    result_report = f"Results in {str(dst_layer)}"
    corgie_logger.info(result_report)
def apply_processor_by_spec(ctx, src_layer_spec, spec_path, dst_layer_spec,
                            processor_spec, pad, crop, chunk_xy, start_coord,
                            processor_mip, end_coord, coord_mip, blend_xy,
                            chunk_z, reference_key):
    scheduler = ctx.obj['scheduler']

    corgie_logger.debug("Setting up layers...")
    src_stack = create_stack_from_spec(src_layer_spec,
                                       name='src',
                                       readonly=True)

    with open(spec_path, 'r') as f:
        spec = set(json.load(f))

    reference_layer = None
    if reference_key in src_stack.layers:
        reference_layer = src_stack.layers[reference_key]

    dst_layer = create_layer_from_spec(dst_layer_spec,
                                       allowed_types=['img', 'mask'],
                                       default_type='img',
                                       readonly=False,
                                       caller_name='dst_layer',
                                       reference=reference_layer,
                                       overwrite=True)

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    if crop is None:
        crop = pad

    for z in range(*bcube.z_range()):
        if z in spec:
            job_bcube = bcube.reset_coords(zs=z, ze=z + 1, in_place=False)
            apply_processor_job = ApplyProcessorJob(
                src_stack=src_stack,
                dst_layer=dst_layer,
                chunk_xy=chunk_xy,
                chunk_z=chunk_z,
                blend_xy=blend_xy,
                processor_spec=processor_spec,
                pad=pad,
                crop=crop,
                bcube=job_bcube,
                processor_mip=processor_mip)

            # create scheduler and execute the job
            scheduler.register_job(
                apply_processor_job,
                job_name="Apply Processor {}".format(job_bcube))
    scheduler.execute_until_completion()
Exemple #7
0
def spec_to_layer_dict_overwrite(layer_specs, reference_layer, default_type):
    """Create dict of layers from a corgie spec indexed by unique id

    These layers will be of type overwrite

    Args:
        layer_specs (dict): layer specs indexed by unique id
        reference_layer (layer)
        default_type (str): e.g. img, field
    """
    layers = {}
    for k, s in layer_specs.items():
        layers[k] = create_layer_from_spec(json.dumps(s),
                                           default_type=default_type,
                                           readonly=False,
                                           reference=reference_layer,
                                           overwrite=True)
    return layers
def apply_processor(ctx, src_layer_spec, dst_layer_spec, processor_spec, pad,
                    crop, chunk_xy, start_coord, processor_mip, end_coord,
                    coord_mip, blend_xy, chunk_z, reference_key):
    scheduler = ctx.obj['scheduler']

    corgie_logger.debug("Setting up layers...")
    src_stack = create_stack_from_spec(src_layer_spec,
                                       name='src',
                                       readonly=True)

    reference_layer = None
    if reference_key in src_stack.layers:
        reference_layer = src_stack.layers[reference_key]

    dst_layer = create_layer_from_spec(dst_layer_spec,
                                       allowed_types=['img'],
                                       default_type='img',
                                       readonly=False,
                                       caller_name='dst_layer',
                                       reference=reference_layer,
                                       overwrite=True)

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    if crop is None:
        crop = pad

    apply_processor_job = ApplyProcessorJob(src_stack=src_stack,
                                            dst_layer=dst_layer,
                                            chunk_xy=chunk_xy,
                                            chunk_z=chunk_z,
                                            blend_xy=blend_xy,
                                            processor_spec=processor_spec,
                                            pad=pad,
                                            crop=crop,
                                            bcube=bcube,
                                            processor_mip=processor_mip)

    # create scheduler and execute the job
    scheduler.register_job(apply_processor_job,
                           job_name="Apply Processor {}".format(bcube))
    scheduler.execute_until_completion()
Exemple #9
0
def compare_sections(
    ctx,
    src_layer_spec,
    tgt_layer_spec,
    dst_layer_spec,
    suffix,
    processor_spec,
    pad,
    chunk_xy,
    force_chunk_xy,
    start_coord,
    mip,
    dst_mip,
    end_coord,
    coord_mip,
    tgt_z_offset,
    reference_key,
):
    if suffix is None:
        suffix = ""
    else:
        suffix = f"_{suffix}"

    if not force_chunk_xy:
        force_chunk_xy = chunk_xy

    if not dst_mip:
        dst_mip = mip

    scheduler = ctx.obj["scheduler"]

    corgie_logger.debug("Setting up layers...")
    src_stack = create_stack_from_spec(src_layer_spec,
                                       name="src",
                                       readonly=True)

    tgt_stack = create_stack_from_spec(tgt_layer_spec,
                                       name="tgt",
                                       readonly=True,
                                       reference=src_stack)

    reference_layer = None
    if reference_key in src_stack.layers:
        reference_layer = src_stack.layers[reference_key]

    dst_layer = create_layer_from_spec(
        dst_layer_spec,
        allowed_types=["img", "mask"],
        default_type="field",
        readonly=False,
        caller_name="dst_layer",
        reference=reference_layer,
        force_chunk_xy=force_chunk_xy,
        overwrite=True,
    )

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    compare_job = CompareSectionsJob(
        src_stack=src_stack,
        tgt_stack=tgt_stack,
        dst_layer=dst_layer,
        chunk_xy=chunk_xy,
        processor_spec=processor_spec,
        pad=pad,
        bcube=bcube,
        tgt_z_offset=tgt_z_offset,
        suffix=suffix,
        mip=mip,
        dst_mip=dst_mip,
    )

    # create scheduler and execute the job
    scheduler.register_job(
        compare_job,
        job_name="Compare Job {}, tgt z offset {}".format(bcube, tgt_z_offset),
    )
    scheduler.execute_until_completion()
Exemple #10
0
def compute_stats_fn(ctx, src_layer_spec, dst_folder, suffix, mip, chunk_xy,
                     chunk_z, start_coord, end_coord, coord_mip):

    if chunk_z != 1:
        raise NotImplemented("Compute Statistics command currently only \
                supports per-section statistics.")

    scheduler = ctx.obj['scheduler']

    src_layer = create_layer_from_spec(src_layer_spec,
                                       caller_name='src layer',
                                       readonly=True)

    mask_layers = [
        create_layer_from_spec(mask_spec,
                               caller_name='src mask',
                               readonly=True,
                               allowed_types=['mask'],
                               default_type='mask')
        for mask_spec in mask_layers_spec
    ]

    if suffix is None:
        suffix = ''
    else:
        suffix = '_' + suffix

    src_stack = create_stack_from_spec(src_layer_spec,
                                       name='src',
                                       readonly=True)

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    mask_layers = src_stack.get_layers_of_type(["mask"])
    non_mask_layers = src_stack.get_layers_of_type(["img", "field"])

    for l in non_mask_layers:
        mean_layer = src_layer.get_sublayer(
            name=f"mean{suffix}",
            path=os.path.join(dst_dir, f"mean{suffix}"),
            layer_type="section_value",
        )

        var_layer = src_layer.get_sublayer(path=os.path.join(
            dst_dir, f"var{suffix}"),
                                           layer_type="section_value")

        compute_stats_job = ComputeStatsJob(src_layer=l,
                                            mask_layers=mask_layers,
                                            mean_layer=mean_layer,
                                            var_layer=var_layer,
                                            bcube=bcube,
                                            mip=mip,
                                            chunk_xy=chunk_xy,
                                            chunk_z=chunk_z)

        # create scheduler and execute the job
        scheduler.register_job(
            compute_stats_job,
            job_name=f"Compute Stats. Layer: {l}, Bcube: {bcube}")
    scheduler.execute_until_completion()
Exemple #11
0
def compute_field(
    ctx,
    src_layer_spec,
    tgt_layer_spec,
    dst_layer_spec,
    suffix,
    processor_spec,
    pad,
    crop,
    chunk_xy,
    start_coord,
    mip,
    end_coord,
    coord_mip,
    tgt_z_offset,
    chunk_z,
    reference_key,
):

    scheduler = ctx.obj["scheduler"]

    corgie_logger.debug("Setting up layers...")
    src_stack = create_stack_from_spec(src_layer_spec, name="src", readonly=True)

    tgt_stack = create_stack_from_spec(
        tgt_layer_spec, name="tgt", readonly=True, reference=src_stack
    )

    reference_layer = None
    if reference_key in src_stack.layers:
        reference_layer = src_stack.layers[reference_key]
    dst_layer = create_layer_from_spec(
        dst_layer_spec,
        allowed_types=["field"],
        default_type="field",
        readonly=False,
        caller_name="dst_layer",
        reference=reference_layer,
    )

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    if crop is None:
        crop = pad

    compute_field_job = ComputeFieldJob(
        src_stack=src_stack,
        tgt_stack=tgt_stack,
        dst_layer=dst_layer,
        chunk_xy=chunk_xy,
        chunk_z=chunk_z,
        processor_spec=processor_spec,
        pad=pad,
        crop=crop,
        bcube=bcube,
        tgt_z_offset=tgt_z_offset,
        suffix=suffix,
        mip=mip,
    )

    # create scheduler and execute the job
    scheduler.register_job(
        align_block_job,
        job_name="Compute field {}, tgt z offset {}".format(bcube, tgt_z_offset),
    )
    scheduler.execute_until_completion()
Exemple #12
0
def apply_processor(
    ctx,
    src_layer_spec,
    dst_layer_spec,
    spec_path,
    processor_spec,
    pad,
    crop,
    chunk_xy,
    start_coord,
    force_chunk_xy,
    processor_mip,
    end_coord,
    coord_mip,
    blend_xy,
    chunk_z,
    reference_key,
):
    scheduler = ctx.obj["scheduler"]

    corgie_logger.debug("Setting up layers...")
    src_stack = create_stack_from_spec(src_layer_spec, name="src", readonly=True)

    reference_layer = None
    if reference_key in src_stack.layers:
        reference_layer = src_stack.layers[reference_key]

    if force_chunk_xy is None:
        force_chunk_xy = chunk_xy

    dst_layer = create_layer_from_spec(
        dst_layer_spec,
        allowed_types=["img", "mask", "section_value", "field"],
        default_type="img",
        readonly=False,
        caller_name="dst_layer",
        force_chunk_xy=force_chunk_xy,
        reference=reference_layer,
        overwrite=True,
    )

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    if crop is None:
        crop = pad

    if spec_path:
        with open(spec_path, "r") as f:
            spec = json.load(f)

        for z in spec:
            job_bcube = bcube.reset_coords(zs=z, ze=z + 1, in_place=False)
            apply_processor_job = ApplyProcessorJob(
                src_stack=src_stack,
                dst_layer=dst_layer,
                chunk_xy=chunk_xy,
                chunk_z=chunk_z,
                blend_xy=blend_xy,
                processor_spec=processor_spec,
                pad=pad,
                crop=crop,
                bcube=job_bcube,
                processor_mip=processor_mip,
            )

            # create scheduler and execute the job
            scheduler.register_job(
                apply_processor_job, job_name="Apply Processor {}".format(job_bcube)
            )
    else:
        apply_processor_job = ApplyProcessorJob(
            src_stack=src_stack,
            dst_layer=dst_layer,
            chunk_xy=chunk_xy,
            chunk_z=chunk_z,
            blend_xy=blend_xy,
            processor_spec=processor_spec,
            pad=pad,
            crop=crop,
            bcube=bcube,
            processor_mip=processor_mip,
        )

        # create scheduler and execute the job
        scheduler.register_job(
            apply_processor_job, job_name="Apply Processor {}".format(bcube)
        )
    scheduler.execute_until_completion()