def downsample(ctx, src_layer_spec, dst_layer_spec, mip_start,
        mip_end, chunk_xy, chunk_z, mips_per_task, start_coord,
        end_coord, coord_mip):
    scheduler = ctx.obj['scheduler']
    corgie_logger.debug("Setting up Source and Destination layers...")

    src_layer = create_layer_from_spec(src_layer_spec,
            caller_name='src layer',
            readonly=True)

    if dst_layer_spec is None:
        corgie_logger.info("Destination layer not specified. Using Source layer "
                "as Destination.")
        dst_layer = src_layer
        dst_layer.readonly = False
    else:
        dst_layer = create_layer_from_spec(dst_layer_spec,
            caller_name='dst_layer layer',
            readonly=False,
            reference=src_layer, chunk_z=chunk_z, overwrite=True)
    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)
    downsample_job = DownsampleJob(src_layer=src_layer,
                                   dst_layer=dst_layer,
                                   mip_start=mip_start,
                                   mip_end=mip_end,
                                   bcube=bcube,
                                   chunk_xy=chunk_xy,
                                   chunk_z=chunk_z,
                                   mips_per_task=mips_per_task)

    # create scheduler and execute the job
    scheduler.register_job(downsample_job, job_name="downsample")
    scheduler.execute_until_completion()
    result_report = f"Downsampled {src_layer} from {mip_start} to {mip_end}. Result in {dst_layer}"
    corgie_logger.info(result_report)
Ejemplo n.º 2
0
def create_skeletons(
    ctx,
    seg_layer_spec,
    dst_folder,
    timestamp,
    mip,
    teasar_scale,
    teasar_const,
    ids,
    ids_filepath,
    tick_threshold,
    chunk_xy,
    chunk_z,
    single_merge_mode,
    start_coord,
    end_coord,
    coord_mip,
):
    scheduler = ctx.obj["scheduler"]

    corgie_logger.debug("Setting up layers...")
    seg_stack = create_stack_from_spec(seg_layer_spec,
                                       name="src",
                                       readonly=True)
    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    object_ids = ids
    if ids_filepath is not None:
        object_ids = []
        with open(ids_filepath, "r") as f:
            line = f.readline()
            while line:
                object_ids.append(int(line))
                line = f.readline()
    if object_ids is None or len(object_ids) == 0:
        raise ValueError("Must specify ids to skeletonize")
    object_ids = list(object_ids)
    teasar_params = {"scale": teasar_scale, "const": teasar_const}

    seg_layer = seg_stack.get_layers_of_type("segmentation")[0]
    skeleton_job = SkeletonJob(
        seg_layer=seg_layer,
        dst_path=dst_folder,
        timestamp=timestamp,
        bcube=bcube,
        chunk_xy=chunk_xy,
        chunk_z=chunk_z,
        mip=mip,
        teasar_params=teasar_params,
        object_ids=object_ids,
        tick_threshold=tick_threshold,
        single_merge_mode=single_merge_mode,
    )

    scheduler.register_job(skeleton_job,
                           job_name="Skeletonize {}".format(bcube))

    scheduler.execute_until_completion()
    result_report = f"Skeletonized {str(seg_layer)}. "
    corgie_logger.info(result_report)
Ejemplo n.º 3
0
    def get_field(self, layer, bcube, mip, dist, **kwargs):
        """Get field, adjusted by distance

        Args:
            layer (Layer)
            bcube (BoundingCube)
            mip (int)
            dist (float)

        Returns:
            TorchField adjusted (blurred & attenuated) by distance
        """
        c = min(max(1.0 - (dist / self.decay_dist), 0.0), 1.0)
        sigma_mip = math.log(dist * self.blur_rate + 1, 2)
        lower_mip = math.floor(sigma_mip)
        upper_mip = math.ceil(sigma_mip)
        corgie_logger.debug(
            f"get_field, c={c:.3f}, mip={sigma_mip+mip:.3f}, lower_mip={lower_mip+mip}, upper_mip={upper_mip+mip}"
        )
        if lower_mip == upper_mip:
            f = layer.read(bcube=bcube, mip=lower_mip + mip, **kwargs).field_()
        else:
            lf = layer.read(bcube=bcube, mip=lower_mip + mip, **kwargs).field_()
            uf = layer.read(bcube=bcube, mip=upper_mip + mip, **kwargs).field_()
            uf = uf.up(mips=1) * 2
            alpha = sigma_mip - lower_mip
            f = alpha * lf + (1 - alpha) * uf
        f = f.up(mips=lower_mip) * (2 ** lower_mip)
        return f * c
    def create(self, mip):

        corgie_logger.debug('Creating CloudVolume for {0} at MIP{1}'.format(
            self.path, mip))
        self.extend_info_to_mip(mip)

        self.cvs[mip] = self.obj(self.path, mip=mip, **self.cv_params)
Ejemplo n.º 5
0
def transform_skeletons(
    ctx,
    vector_field_spec,
    src_folder,
    dst_folder,
    field_mip,
    ids,
    ids_filepath,
    task_vertex_size,
    calculate_skeleton_lengths,
    mip0_field,
):
    scheduler = ctx.obj["scheduler"]

    corgie_logger.debug("Setting up layers...")
    vf_stack = create_stack_from_spec(vector_field_spec,
                                      name="src",
                                      readonly=True)

    skeleton_ids = ids
    if ids_filepath is not None:
        skeleton_ids = []
        with open(ids_filepath, "r") as f:
            line = f.readline()
            while line:
                skeleton_ids.append(int(line))
                line = f.readline()
    if len(skeleton_ids) == 0:
        skeleton_ids = None
    else:
        skeleton_ids = list(skeleton_ids)

    skeleton_length_file = None
    if calculate_skeleton_lengths:
        import time

        if not os.path.exists("skeleton_lengths"):
            os.makedirs("skeleton_lengths")
        skeleton_length_file = f"skeleton_lengths/skeleton_lengths_{int(time.time())}"

    vf_layer = vf_stack.get_layers_of_type("field")[0]
    transform_skeletons_job = TransformSkeletonsJob(
        vector_field_layer=vf_layer,
        src_path=src_folder,
        dst_path=dst_folder,
        field_mip=field_mip,
        skeleton_ids=skeleton_ids,
        task_vertex_size=task_vertex_size,
        skeleton_length_file=skeleton_length_file,
        mip0_field=mip0_field,
    )

    scheduler.register_job(
        transform_skeletons_job,
        job_name="Transforming skeletons in {}".format(src_folder),
    )

    scheduler.execute_until_completion()
    result_report = f"Transformed skeletons stored at {dst_folder}. "
    corgie_logger.info(result_report)
Ejemplo n.º 6
0
    def read_backend(self, bcube, mip):
        x_range = bcube.x_range(mip)
        y_range = bcube.y_range(mip)
        z_range = bcube.z_range()

        this_cv = self.cv[mip]
        x_off, y_off, z_off = this_cv.voxel_offset
        '''if x_range[0] < x_off:
            corgie_logger.debug(f"READ from {str(self)}: \n"
                    f"   reducing xs from {x_range[0]} to {x_off} MIP: {mip}")
            x_range[0] = x_off
        if y_range[0] < y_off:
            corgie_logger.debug(f"READ from {str(self)}: \n"
                    f"   reducing ys from {y_range[0]} to {y_off} MIP: {mip}")
            y_range[0] = y_off
        if z_range[0] < z_off:
            corgie_logger.debug(f"READ from {str(self)}: \n"
                    f"   reducing zs from {z_range[0]} to {z_off} MIP: {mip}")
            z_range[0] = z_off'''

        corgie_logger.debug(
            "READ from {}: \n   x: {}, y: {}, z: {}, MIP: {}".format(
                str(self), x_range, y_range, z_range, mip))
        data = self.cv[mip][x_range[0]:x_range[1], y_range[0]:y_range[1],
                            z_range[0]:z_range[1]]
        data = np.transpose(data, (2, 3, 0, 1))
        return data
Ejemplo n.º 7
0
    def execute(self):
        corgie_logger.debug(f"ComputeFieldTask")
        src_bcube = self.bcube.uncrop(self.pad, self.mip)
        tgt_bcube = src_bcube.translate(z_offset=self.tgt_z_offset)

        processor = procspec.parse_proc(spec_str=self.processor_spec)

        corgie_logger.debug(f"Read target")
        tgt_translation, tgt_data_dict = self.tgt_stack.read_data_dict(
            tgt_bcube, mip=self.mip, stack_name="tgt")

        # Compensate if target was moved to one side a lot
        # tgt_drift = helpers.percentile_trans_adjuster(
        #     tgt_data_dict["tgt_agg_field"], unaligned_img=tgt_data_dict["tgt_img"]
        # )
        tgt_drift = helpers.Translation(0, 0)
        corgie_logger.debug(f"Read source")
        src_translation, src_data_dict = self.src_stack.read_data_dict(
            src_bcube, mip=self.mip, stack_name="src", translation=tgt_drift)
        # translation_adjuster=helpers.percentile_trans_adjuster)
        # )
        processor_input = {**src_data_dict, **tgt_data_dict}

        corgie_logger.debug(f"Compute field")
        predicted_field = processor(processor_input, output_key="src_cf_field")

        predicted_field.x += tgt_drift.x
        predicted_field.y += tgt_drift.y

        cropped_field = helpers.crop(predicted_field, self.crop)
        corgie_logger.debug(f"Write field")
        self.dst_layer.write(cropped_field, bcube=self.bcube, mip=self.mip)
Ejemplo n.º 8
0
def render(ctx, src_layer_spec, dst_folder, pad, render_masks, blackout_masks,
           chunk_xy, chunk_z, start_coord, end_coord, coord_mip, suffix):
    scheduler = ctx.obj['scheduler']

    corgie_logger.debug("Setting up layers...")
    src_stack = create_stack_from_spec(src_layer_spec,
                                       name='src',
                                       readonly=True)

    dst_stack = stack.create_stack_from_reference(reference_stack=src_stack,
                                                  folder=dst_folder,
                                                  name="dst",
                                                  types=["img", "mask"])

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    render_job = RenderJob(src_stack=src_stack,
                           dst_stack=dst_stack,
                           mip=mip,
                           pad=pad,
                           bcube=bcube,
                           chunk_xy=chunk_xy,
                           chunk_z=chunk_z,
                           render_masks=render_masks,
                           blackout_masks=blackout_masks)

    # create scheduler and execute the job
    scheduler.register_job(render_job, job_name="Render {}".format(bcube))
    scheduler.execute_until_completion()
Ejemplo n.º 9
0
 def execute(self):
     radii = [i for r in range(1, self.radius + 1) for i in [r, -r]]
     z = self.bcube.z_range()[0]
     _, src_data_dict = self.src_stack.read_data_dict(
         bcube=self.bcube,
         mip=self.mip,
         add_prefix=False,
         translation_adjuster=None,
     )
     layer = self.dst_stack.get_layers_of_type("img")[0]
     agg_img = src_data_dict[f"{layer.name}"]
     agg_mask = self.get_masks(data_dict=src_data_dict, bcube=self.bcube)
     mask_count = agg_mask.sum()
     k = 0
     while (mask_count > self.count_threshold) and (k < len(radii)):
         corgie_logger.debug(f"mask_count={mask_count}")
         corgie_logger.debug(f"radius={radii[k]}")
         bcube = self.bcube.reset_coords(zs=z + radii[k],
                                         ze=z + radii[k] + 1,
                                         in_place=False)
         _, src_data_dict = self.src_stack.read_data_dict(
             bcube=bcube,
             mip=self.mip,
             add_prefix=False,
             translation_adjuster=None,
         )
         img = src_data_dict[f"{layer.name}"]
         agg_img[agg_mask] = img[agg_mask]
         mask = self.get_masks(data_dict=src_data_dict, bcube=bcube)
         agg_mask = (agg_mask == 1) * (mask == 1)
         mask_count = agg_mask.sum()
         k += 1
     layer.write(agg_img, bcube=self.bcube, mip=self.mip)
Ejemplo n.º 10
0
def merge_render(
    ctx,
    spec_path,
    dst_folder,
    chunk_xy,
    pad,
    start_coord,
    end_coord,
    coord_mip,
    force_chunk_xy,
    mip,
    suffix,
):

    scheduler = ctx.obj["scheduler"]
    if suffix is None:
        suffix = ""
    else:
        suffix = f"_{suffix}"

    corgie_logger.debug("Setting up layers...")
    # create layers
    # collect image paths
    # collect mask paths

    if not force_chunk_xy:
        force_chunk_xy = chunk_xy

    with open(spec_path, "r") as f:
        spec = json.load(f)

    src_layers = spec_to_layer_dict_readonly(spec["src"])
    reference_layer = src_layers[list(src_layers.keys())[0]]
    dst_layer = create_layer_from_dict(
        {"path": dst_folder, "type": "img"},
        reference=reference_layer,
        force_chunk_xy=force_chunk_xy,
        overwrite=True
    )

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    for z in range(*bcube.z_range()):
        tgt_z = str(z)
        if tgt_z in spec["job_specs"].keys():
            job_bcube = bcube.reset_coords(zs=z, ze=z + 1, in_place=False)
            render_job = MergeRenderJob(
                src_layers=src_layers,
                src_specs=spec["job_specs"][tgt_z],
                dst_layer=dst_layer,
                mip=mip,
                pad=pad,
                bcube=job_bcube,
                chunk_xy=chunk_xy,
            )
            scheduler.register_job(
                render_job, job_name="MergeRender {}".format(job_bcube)
            )
    scheduler.execute_until_completion()
    def task_generator(self):
        self.dst_layer.declare_write_region(self.bcube,
                    mips=self.processor_mip, chunk_xy=chunk_xy, chunk_z=chunk_z)

        all_layers = self.src_stack.get_layers() + self.tgt_stack.get_layers()

        last_mip = None
        for i in range(len(self.processor_spec)) :
            this_proc = self.processor_spec[i]
            this_proc_mip = self.processor_mip[i]
            is_last_proc = i == len(self.processor_spec) - 1

            chunks = self.dst_layer.break_bcube_into_chunks(
                        bcube=self.bcube,
                        chunk_xy=self.chunk_xy,
                        chunk_z=self.chunk_z,
                        mip=this_proc_mip)

            tasks = [ComputeFieldTask(src_stack=self.src_stack,
                                      tgt_stack=self.tgt_stack,
                                      dst_layer=self.dst_layer,
                                      processor_spec=this_proc,
                                      mip=this_proc_mip,
                                      pad=self.pad,
                                      crop=self.crop,
                                      tgt_z_offset = self.tgt_z_offset,
                                      bcube=chunk) for chunk in chunks]

            corgie_logger.debug("Yielding CF tasks for bcube: {}, MIP: {}".format(
                self.bcube, this_proc_mip))
            yield tasks

            if not is_last_proc:
                yield scheduling.wait_until_done
                next_proc_mip = self.processor_mip[i + 1]
                if this_proc_mip > next_proc_mip:
                    downsample_job = DownsampleJob(
                                src_layer=self.dst_layer,
                                chunk_xy=self.chunk_xy,
                                chunk_z=self.chunk_z,
                                mip_start=this_proc_mip,
                                mip_end=next_proc_mip,
                                bcube=self.bcube
                                )
                    yield from downsample_job.task_generator
                    yield scheduling.wait_until_done


        if self.processor_mip[0] > self.processor_mip[-1]:
            # good manners
            # prepare the ground for the next you
            downsample_job = DownsampleJob(
                            src_layer=self.dst_layer,
                            chunk_xy=self.chunk_xy,
                            chunk_z=self.chunk_z,
                            mip_start=self.processor_mip[-1],
                            mip_end=self.processor_mip[0],
                            bcube=self.bcube
                            )
Ejemplo n.º 12
0
def multi_section_compare(
    ctx,
    src_layer_spec,
    dst_folder,
    chunk_xy,
    pad,
    crop,
    force_chunk_xy,
    z_offsets,
    processor_spec,
    processor_mip,
    start_coord,
    end_coord,
    coord_mip,
):

    scheduler = ctx.obj["scheduler"]

    if crop is None:
        crop = pad

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)
    corgie_logger.debug("Setting up layers...")
    src_stack = create_stack_from_spec(src_layer_spec,
                                       name="src",
                                       readonly=True)

    if force_chunk_xy is None:
        force_chunk_xy = chunk_xy

    dst_stack = stack.Stack(name="dst", folder=dst_folder)
    dst_stack.reference_layer = src_stack.get_layers()[0]

    for z_offset in z_offsets:
        dst_stack.create_sublayer(
            name=z_offset,
            layer_type="img",
            dtype="float32",
            force_chunk_xy=force_chunk_xy,
            overwrite=True,
        )

    multi_section_compare_job = MultiSectionCompareJob(
        src_stack=src_stack,
        dst_stack=dst_stack,
        chunk_xy=chunk_xy,
        bcube=bcube,
        pad=pad,
        processor_spec=processor_spec,
        mip=processor_mip[0],
        dst_mip=processor_mip[0],
    )
    scheduler.register_job(
        multi_section_compare_job,
        job_name=f"MultiSectionCompare {bcube}",
    )

    scheduler.execute_until_completion()
    corgie_logger.debug("Done!")
Ejemplo n.º 13
0
 def read_backend(self, bcube, **kwargs):
     z_range = bcube.z_range()
     corgie_logger.debug(f'Read from {str(self)}, z: {z_range}')
     data = []
     for z in z_range:
         f = self.cf.get_json(self.get_filename(z))
         data.append(f)
     return data
Ejemplo n.º 14
0
def compute_field(ctx, src_layer_spec, tgt_layer_spec, dst_layer_spec, suffix,
                  processor_spec, pad, crop, chunk_xy, start_coord,
                  processor_mip, end_coord, coord_mip, blend_xy, tgt_z_offset,
                  chunk_z, reference_key, clear_nontissue_field):
    if suffix is None:
        suffix = ''
    else:
        suffix = f"_{suffix}"

    scheduler = ctx.obj['scheduler']

    corgie_logger.debug("Setting up layers...")
    src_stack = create_stack_from_spec(src_layer_spec,
                                       name='src',
                                       readonly=True)

    tgt_stack = create_stack_from_spec(tgt_layer_spec,
                                       name='tgt',
                                       readonly=True,
                                       reference=src_stack)

    reference_layer = None
    if reference_key in src_stack.layers:
        reference_layer = src_stack.layers[reference_key]
    dst_layer = create_layer_from_spec(dst_layer_spec,
                                       allowed_types=['field'],
                                       default_type='field',
                                       readonly=False,
                                       caller_name='dst_layer',
                                       reference=reference_layer,
                                       overwrite=True)

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    if crop is None:
        crop = pad

    compute_field_job = ComputeFieldJob(
        src_stack=src_stack,
        tgt_stack=tgt_stack,
        dst_layer=dst_layer,
        chunk_xy=chunk_xy,
        chunk_z=chunk_z,
        blend_xy=blend_xy,
        processor_spec=processor_spec,
        pad=pad,
        crop=crop,
        bcube=bcube,
        tgt_z_offset=tgt_z_offset,
        suffix=suffix,
        processor_mip=processor_mip,
        clear_nontissue_field=clear_nontissue_field)

    # create scheduler and execute the job
    scheduler.register_job(compute_field_job,
                           job_name="Compute field {}, tgt z offset {}".format(
                               bcube, tgt_z_offset))
    scheduler.execute_until_completion()
Ejemplo n.º 15
0
def fill_nearest(
    ctx,
    src_layer_spec,
    dst_folder,
    chunk_xy,
    start_coord,
    end_coord,
    coord_mip,
    suffix,
    mip,
    radius,
    force_chunk_z=1,
):
    scheduler = ctx.obj["scheduler"]

    if suffix is None:
        suffix = "_seethrough"
    else:
        suffix = f"_{suffix}"

    crop, pad = 0, 0
    corgie_logger.debug("Setting up layers...")
    src_stack = create_stack_from_spec(src_layer_spec,
                                       name="src",
                                       readonly=True)
    src_stack.folder = dst_folder
    dst_stack = stack.create_stack_from_reference(
        reference_stack=src_stack,
        folder=dst_folder,
        name="dst",
        types=["img", "mask"],
        readonly=False,
        suffix=suffix,
        overwrite=True,
        force_chunk_z=force_chunk_z,
    )
    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    fill_nearest_job = FillNearestJob(
        src_stack=src_stack,
        dst_stack=dst_stack,
        bcube=bcube,
        radius=radius,
        mip=mip,
        chunk_xy=chunk_xy,
    )
    # create scheduler and execute the job
    scheduler.register_job(fill_nearest_job,
                           job_name="Fill Nearest Block {}".format(bcube))

    scheduler.execute_until_completion()
    result_report = (
        f"Rendered layers {[str(l) for l in src_stack.get_layers_of_type('img')]}. "
        f"Results in {[str(l) for l in dst_stack.get_layers_of_type('img')]}")
    corgie_logger.info(result_report)
Ejemplo n.º 16
0
def cli(ctx, queue_name, device, verbose):
    # This little hack let's us make group options look like
    # child command options, and at the same time only execute
    # the setup once
    if ctx.invoked_subcommand is None:
        configure_logger(verbose)
        ctx.obj = {}
        DataBackendBase.default_device = device
        corgie_logger.debug("Creting scheduler...")
        ctx.obj['scheduler'] = create_scheduler(queue_name=queue_name)
        corgie_logger.debug("Scheduler created.")
Ejemplo n.º 17
0
def combine_masks(
    ctx,
    src_layer_spec,
    dst_layer_spec,
    exp,
    chunk_xy,
    chunk_z,
    force_chunk_xy,
    force_chunk_z,
    start_coord,
    end_coord,
    coord_mip,
    mip,
    pad,
):
    scheduler = ctx.obj["scheduler"]

    if not force_chunk_xy:
        force_chunk_xy = chunk_xy

    corgie_logger.debug("Setting up layers...")
    src_stack = create_stack_from_spec(src_layer_spec, name="src", readonly=True)
    reference_layer = src_stack.reference_layer

    dst_layer = create_layer_from_spec(
        dst_layer_spec,
        allowed_types=["mask"],
        default_type="mask",
        readonly=False,
        caller_name="dst_layer",
        reference=reference_layer,
        force_chunk_xy=force_chunk_xy,
        force_chunk_z=force_chunk_z,
        overwrite=True,
    )
    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    combine_masks_job = CombineMasksJob(
        src_stack=src_stack,
        exp=json.loads(exp),
        dst_layer=dst_layer,
        mip=mip,
        bcube=bcube,
        pad=pad,
        chunk_xy=chunk_xy,
        chunk_z=chunk_z,
    )
    # create scheduler and execute the job
    scheduler.register_job(combine_masks_job, job_name="Combine Masks {}".format(bcube))

    scheduler.execute_until_completion()
    result_report = f"Results in {str(dst_layer)}"
    corgie_logger.info(result_report)
Ejemplo n.º 18
0
    def write_backend(self, data, bcube, mip):
        x_range = bcube.x_range(mip)
        y_range = bcube.y_range(mip)
        z_range = bcube.z_range()

        data = np.transpose(data, (2, 3, 0, 1))
        corgie_logger.debug(
            "Write to {}: \n x: {}, y: {}, z: {}, MIP: {}".format(
                str(self), x_range, y_range, z_range, mip))
        self.cv[mip].autocrop = True
        self.cv[mip][x_range[0]:x_range[1], y_range[0]:y_range[1],
                     z_range[0]:z_range[1]] = data
        self.cv[mip].autocrop = False
Ejemplo n.º 19
0
def create_layer_from_dict(param_dict,
                           reference=None,
                           caller_name=None,
                           allowed_types=None,
                           default_type=None,
                           **kwargs):
    if default_type is None:
        default_type = DEFAULT_LAYER_TYPE
    default_param_dict = {
        "path": None,
        "name": None,
        "type": default_type,
        "data_backend": DEFAULT_DATA_BACKEND,
        "args": {},
        "readonly": False
    }
    for k in param_dict.keys():
        if k not in default_param_dict:
            raise exceptions.CorgieException(f"Unkown layer parameter '{k}'")
    params = {**default_param_dict, **param_dict}

    if params["path"] is None:
        arg_spec = '"path" key in layer specification'
        if caller_name is not None:
            arg_spec += ' of {}'.format(caller_name)
        raise exceptions.ArgumentError(arg_spec, 'not given')

    layer_path = params["path"]
    layer_type = params["type"]
    layer_args = params["args"]
    data_backend = params["data_backend"]
    corgie_logger.debug("Parsing layer path: {}".format(layer_path))

    if allowed_types is not None and layer_type not in allowed_types:
        raise exceptions.ArgumentError("layer_type",
                                       f'must be of type in {allowed_types}')

    backend = str_to_backend(data_backend)()

    layer = backend.create_layer(path=layer_path,
                                 layer_type=layer_type,
                                 reference=reference,
                                 layer_args=layer_args,
                                 **kwargs)

    name = params["name"]
    if name is None:
        name = layer.get_layer_type()
    layer.name = name

    return layer
def apply_processor_by_spec(ctx, src_layer_spec, spec_path, dst_layer_spec,
                            processor_spec, pad, crop, chunk_xy, start_coord,
                            processor_mip, end_coord, coord_mip, blend_xy,
                            chunk_z, reference_key):
    scheduler = ctx.obj['scheduler']

    corgie_logger.debug("Setting up layers...")
    src_stack = create_stack_from_spec(src_layer_spec,
                                       name='src',
                                       readonly=True)

    with open(spec_path, 'r') as f:
        spec = set(json.load(f))

    reference_layer = None
    if reference_key in src_stack.layers:
        reference_layer = src_stack.layers[reference_key]

    dst_layer = create_layer_from_spec(dst_layer_spec,
                                       allowed_types=['img', 'mask'],
                                       default_type='img',
                                       readonly=False,
                                       caller_name='dst_layer',
                                       reference=reference_layer,
                                       overwrite=True)

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    if crop is None:
        crop = pad

    for z in range(*bcube.z_range()):
        if z in spec:
            job_bcube = bcube.reset_coords(zs=z, ze=z + 1, in_place=False)
            apply_processor_job = ApplyProcessorJob(
                src_stack=src_stack,
                dst_layer=dst_layer,
                chunk_xy=chunk_xy,
                chunk_z=chunk_z,
                blend_xy=blend_xy,
                processor_spec=processor_spec,
                pad=pad,
                crop=crop,
                bcube=job_bcube,
                processor_mip=processor_mip)

            # create scheduler and execute the job
            scheduler.register_job(
                apply_processor_job,
                job_name="Apply Processor {}".format(job_bcube))
    scheduler.execute_until_completion()
Ejemplo n.º 21
0
def render(ctx, src_layer_spec, dst_folder, pad, render_masks, blackout_masks,
           seethrough, chunk_xy, chunk_z, start_coord, end_coord, mips,
           coord_mip, force_chunk_xy, force_chunk_z, suffix):
    scheduler = ctx.obj['scheduler']

    if suffix is None:
        suffix = '_rendered'
    else:
        suffix = f"_{suffix}"

    corgie_logger.debug("Setting up layers...")
    src_stack = create_stack_from_spec(src_layer_spec,
                                       name='src',
                                       readonly=True)

    if force_chunk_xy:
        force_chunk_xy = chunk_xy
    else:
        force_chunk_xy = None

    if force_chunk_z:
        force_chunk_z = chunk_z
    else:
        force_chunk_z = None

    dst_stack = stack.create_stack_from_reference(
        reference_stack=src_stack,
        folder=dst_folder,
        name="dst",
        types=["img", "mask"],
        force_chunk_xy=force_chunk_xy,
        force_chunk_z=force_chunk_z,
        suffix=suffix,
        overwrite=True)

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    render_job = RenderJob(src_stack=src_stack,
                           dst_stack=dst_stack,
                           mips=mips,
                           pad=pad,
                           bcube=bcube,
                           chunk_xy=chunk_xy,
                           chunk_z=chunk_z,
                           render_masks=render_masks,
                           blackout_masks=blackout_masks,
                           seethrough=seethrough)

    # create scheduler and execute the job
    scheduler.register_job(render_job, job_name="Render {}".format(bcube))
    scheduler.execute_until_completion()
Ejemplo n.º 22
0
def copy(ctx, src_layer_spec, dst_folder, copy_masks, blackout_masks, chunk_xy,
         chunk_z, start_coord, end_coord, coord_mip, mip, suffix,
         force_chunk_xy, force_chunk_z):

    scheduler = ctx.obj['scheduler']
    if suffix is None:
        suffix = ''
    else:
        suffix = f"_{suffix}"

    corgie_logger.debug("Setting up layers...")
    src_stack = create_stack_from_spec(src_layer_spec,
                                       name='src',
                                       readonly=True)

    if force_chunk_xy:
        force_chunk_xy = chunk_xy
    else:
        force_chunk_xy = None

    if force_chunk_z:
        force_chunk_z = chunk_z
    else:
        force_chunk_z = None

    dst_stack = stack.create_stack_from_reference(
        reference_stack=src_stack,
        folder=dst_folder,
        name="dst",
        types=["img", "mask"],
        readonly=False,
        suffix=suffix,
        force_chunk_xy=force_chunk_xy,
        force_chunk_z=force_chunk_z,
        overwrite=True)

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    copy_job = CopyJob(src_stack=src_stack,
                       dst_stack=dst_stack,
                       mip=mip,
                       bcube=bcube,
                       chunk_xy=chunk_xy,
                       chunk_z=chunk_z,
                       copy_masks=copy_masks,
                       blackout_masks=blackout_masks)

    # create scheduler and execute the job
    scheduler.register_job(copy_job, job_name="Copy {}".format(bcube))
    scheduler.execute_until_completion()
Ejemplo n.º 23
0
def cli(ctx, device, verbose, **kwargs):
    # This little hack let's us make group options look like
    # child command options, and at the same time only execute
    # the setup once
    if ctx.invoked_subcommand is None:
        configure_logger(verbose)
        ctx.obj = {}
        DataBackendBase.default_device = device
        corgie_logger.debug("Creating scheduler...")
        kwargs['command_name'] = ctx.command.name
        ctx.obj['scheduler'] = scheduling.parse_scheduler_from_kwargs(
                kwargs)

        corgie_logger.debug("Scheduler created.")
Ejemplo n.º 24
0
    def task_generator(self):
        seethrough_offset = -1
        seethrough_mask_layer = self.dst_stack.create_sublayer(
            f"seethrough_mask{self.suffix}",
            layer_type="mask",
            overwrite=True,
            force_chunk_z=1,
        )
        z_start, z_stop = self.bcube.z_range()
        for z in range(z_start, z_stop):
            bcube = self.bcube.reset_coords(zs=z, ze=z + 1, in_place=False)
            if z == z_start:
                corgie_logger.debug(f"Copy section {z}")
                render_job = self.render_method(
                    src_stack=self.src_stack,
                    dst_stack=self.dst_stack,
                    bcube=bcube,
                    mips=self.seethrough_method.mip,
                    blackout_masks=True,
                )

                yield from render_job.task_generator
                yield scheduling.wait_until_done
            else:
                # Now, we'll apply misalignment detection to produce a mask
                # this mask will be used in the final render step
                seethrough_mask_job = self.seethrough_method(
                    src_stack=self.src_stack,
                    tgt_stack=self.dst_stack,
                    bcube=bcube,
                    tgt_z_offset=seethrough_offset,
                    suffix=self.suffix,
                    dst_layer=seethrough_mask_layer,
                )

                yield from seethrough_mask_job.task_generator
                yield scheduling.wait_until_done
                corgie_logger.debug(f"Render {z}")
                render_job = self.render_method(
                    src_stack=self.src_stack,
                    dst_stack=self.dst_stack,
                    bcube=bcube,
                    blackout_masks=False,
                    seethrough_mask_layer=seethrough_mask_layer,
                    seethrough_offset=seethrough_offset,
                    mips=self.seethrough_method.mip,
                )
                yield from render_job.task_generator
                yield scheduling.wait_until_done
    def noblend_task_generator(self):
        chunks = self.dst_layer.break_bcube_into_chunks(bcube=self.bcube,
                                                        chunk_xy=self.chunk_xy,
                                                        chunk_z=self.chunk_z,
                                                        mip=self.mip)

        tasks = [
            self.task_class(dst_layer=self.dst_layer,
                            mip=self.mip,
                            bcube=chunk) for chunk in chunks
        ]

        corgie_logger.debug(f"Yielding {type(tasks[0])} tasks "\
                f"for bcube: {self.bcube}, MIP: {self.mip}")
        yield tasks
Ejemplo n.º 26
0
    def execute(self):
        device = "cuda" if torch.cuda.is_available() else "cpu"
        padded_bcube = self.bcube.uncrop(self.pad, self.mip)
        for k, specs in enumerate(self.src_specs[::-1]):
            src_z = specs["src_z"]
            dst_z = self.bcube.z_range()[0]

            corgie_logger.info(f"Load fields for {padded_bcube}")
            # backwards compatible
            if not isinstance(specs["src_field"], list):
                specs["src_field"] = [specs["src_field"]]
            mask_layer = self.src_layers[str(specs["src_mask"])]

            field_ids = list(map(str, specs["src_field"]))
            corgie_logger.info(f"field ids={field_ids}")
            z_list = specs.get("src_field_z", [src_z] * len(field_ids))
            fields = FieldSet([self.src_layers[n] for n in field_ids])
            field = fields.read(
                bcube=padded_bcube, z_list=z_list, mip=self.mip, device=device
            )
            bcube = padded_bcube.reset_coords(zs=src_z, ze=src_z + 1, in_place=False)

            mask_trans = helpers.percentile_trans_adjuster(field)
            mask_trans = mask_trans.round_to_mip(self.mip, mask_layer.data_mip)
            corgie_logger.debug(f"mask_trans: {mask_trans}")

            mask_bcube = bcube.translate(
                x_offset=mask_trans.y, y_offset=mask_trans.x, mip=self.mip
            )

            corgie_logger.info(f"Load masks for {mask_bcube}")
            mask_id = specs["mask_id"]
            mask_layer.binarizer = helpers.Binarizer(["eq", mask_id])
            mask = mask_layer.read(bcube=mask_bcube, mip=self.mip, device=device)
            mask = residuals.res_warp_img(
                mask.float(), field - mask_trans.to_tensor(device=field.device)
            ).tensor()
            mask = (mask > 0.4).bool()
            cropped_mask = helpers.crop(mask, self.pad)

            relabel_id = torch.as_tensor(specs.get("relabel_id", k + 1), dtype=torch.uint8)
            if k == 0:
                dst_img = cropped_mask * relabel_id
                dst_img[~cropped_mask] = 0
            else:
                dst_img[cropped_mask] = cropped_mask[cropped_mask] * relabel_id

        self.dst_layer.write(dst_img.cpu(), bcube=self.bcube, mip=self.mip)
Ejemplo n.º 27
0
    def read_backend(self, bcube, mip, transpose=True, timestamp=None, **kwargs):
        x_range = bcube.x_range(mip)
        y_range = bcube.y_range(mip)
        z_range = bcube.z_range()

        this_cv = self.cv[mip]
        x_off, y_off, z_off = this_cv.voxel_offset
        """if x_range[0] < x_off:
            corgie_logger.debug(f"READ from {str(self)}: \n"
                    f"   reducing xs from {x_range[0]} to {x_off} MIP: {mip}")
            x_range[0] = x_off
        if y_range[0] < y_off:
            corgie_logger.debug(f"READ from {str(self)}: \n"
                    f"   reducing ys from {y_range[0]} to {y_off} MIP: {mip}")
            y_range[0] = y_off
        if z_range[0] < z_off:
            corgie_logger.debug(f"READ from {str(self)}: \n"
                    f"   reducing zs from {z_range[0]} to {z_off} MIP: {mip}")
            z_range[0] = z_off"""

        corgie_logger.debug(
            "READ from {}: \n   x: {}, y: {}, z: {}, MIP: {}".format(
                str(self), x_range, y_range, z_range, mip
            )
        )
        if timestamp is not None:
            data = self.cv[mip].download(
                bbox=(
                    slice(x_range[0], x_range[1], None),
                    slice(y_range[0], y_range[1], None),
                    slice(z_range[0], z_range[1], None),
                ),
                mip=mip,
                preserve_zeros=True,
                parallel=self.cv[mip].parallel,
                agglomerate=True,
                timestamp=timestamp,
            )
        else:
            data = self.cv[mip][
                x_range[0] : x_range[1],
                y_range[0] : y_range[1],
                z_range[0] : z_range[1],
            ]
        if transpose:
            data = np.transpose(data, (2, 3, 0, 1))
        return data
def filter_skeletons(
            ctx,
            src_folder,
            dst_folder,
            ids,
            bad_sections,
            ids_filepath,
            z_start,
            z_end
        ):
    scheduler = ctx.obj["scheduler"]

    corgie_logger.debug("Setting up layers...")

    skeleton_ids = ids
    if ids_filepath is not None:
        skeleton_ids = []
        with open(ids_filepath, "r") as f:
            line = f.readline()
            while line:
                skeleton_ids.append(int(line))
                line = f.readline()

    if len(skeleton_ids) == 0:
        skeleton_ids = None
    else:
        skeleton_ids = list(skeleton_ids)


    transform_skeletons_job = FilterSkeletonsJob(
        src_path=src_folder,
        dst_path=dst_folder,
        skeleton_ids=skeleton_ids,
        bad_sections=bad_sections,
        z_start=z_start,
        z_end=z_end
    )

    scheduler.register_job(
        transform_skeletons_job,
        job_name="Filtering skeletons in {}".format(src_folder),
    )

    scheduler.execute_until_completion()
    result_report = f"Filtered skeletons stored at {dst_folder}. "
    corgie_logger.info(result_report)
def invert_field(ctx, src_layer_spec, dst_layer_spec, pad, crop, chunk_xy,
                 start_coord, mip, end_coord, coord_mip, blend_xy, chunk_z,
                 force_chunk_xy):
    scheduler = ctx.obj['scheduler']

    if force_chunk_xy:
        force_chunk_xy = chunk_xy
    else:
        force_chunk_xy = None

    corgie_logger.debug("Setting up layers...")
    src_layer = create_layer_from_spec(src_layer_spec,
                                       allowed_types=['field'],
                                       default_type='field',
                                       readonly=True,
                                       caller_name='src_layer')

    dst_layer = create_layer_from_spec(dst_layer_spec,
                                       allowed_types=['field'],
                                       default_type='field',
                                       readonly=False,
                                       caller_name='dst_layer',
                                       reference=src_layer,
                                       overwrite=True,
                                       force_chunk_xy=force_chunk_xy)

    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)

    if crop is None:
        crop = pad

    invert_field_job = InvertFieldJob(src_layer=src_layer,
                                      dst_layer=dst_layer,
                                      chunk_xy=chunk_xy,
                                      chunk_z=chunk_z,
                                      blend_xy=blend_xy,
                                      pad=pad,
                                      mip=mip,
                                      crop=crop,
                                      bcube=bcube)

    # create scheduler and execute the job
    scheduler.register_job(invert_field_job,
                           job_name="Invert Field {}".format(bcube))
    scheduler.execute_until_completion()
Ejemplo n.º 30
0
def downsample_by_spec(ctx, src_layer_spec, spec_path, dst_layer_spec,
                       mip_start, mip_end, chunk_xy, chunk_z, mips_per_task,
                       start_coord, end_coord, coord_mip):
    scheduler = ctx.obj['scheduler']
    corgie_logger.debug("Setting up Source and Destination layers...")

    src_layer = create_layer_from_spec(src_layer_spec,
                                       caller_name='src layer',
                                       readonly=True)

    with open(spec_path, 'r') as f:
        spec = set(json.load(f))

    if dst_layer_spec is None:
        corgie_logger.info(
            "Destination layer not specified. Using Source layer "
            "as Destination.")
        dst_layer = src_layer
        dst_layer.readonly = False
    else:
        dst_layer = create_layer_from_spec(dst_layer_spec,
                                           caller_name='dst_layer layer',
                                           readonly=False,
                                           reference=src_layer,
                                           chunk_z=chunk_z,
                                           overwrite=True)
    bcube = get_bcube_from_coords(start_coord, end_coord, coord_mip)
    for z in range(*bcube.z_range()):
        if z in spec:
            job_bcube = bcube.reset_coords(zs=z, ze=z + 1, in_place=False)
            downsample_job = DownsampleJob(src_layer=src_layer,
                                           dst_layer=dst_layer,
                                           mip_start=mip_start,
                                           mip_end=mip_end,
                                           bcube=job_bcube,
                                           chunk_xy=chunk_xy,
                                           chunk_z=chunk_z,
                                           mips_per_task=mips_per_task)

            # create scheduler and execute the job
            scheduler.register_job(downsample_job,
                                   job_name=f"Downsample {job_bcube}")
    scheduler.execute_until_completion()
    result_report = f"Downsampled {src_layer} from {mip_start} to {mip_end}. Result in {dst_layer}"
    corgie_logger.info(result_report)