示例#1
0
def frangi(
    src: DataURI,
    dst: DataURI,
    scale_min: Float = 1.0,
    scale_max: Float = 4.0,
    scale_step: Float = 1.0,
    alpha: Float = 0.5,
    beta: Float = 0.5,
    gamma=15,
) -> "BLOB":
    from ..server.filtering.blob import compute_frangi

    map_blocks(
        compute_frangi,
        src,
        out=dst,
        scale_range=(scale_min, scale_max),
        scale_step=1.0,
        alpha=0.5,
        beta=0.5,
        gamma=15,
        dark_response=True,
        normalize=True,
        pad=max(4, int((scale_max * 2))),
    )
示例#2
0
def wavelet(
    src: DataURI,
    dst: DataURI,
    threshold: Float = 64.0,
    level: Int = 1,
    wavelet: String = "sym3",
    hard: SmartBoolean = True,
) -> "WAVELET":
    from ..server.filtering import wavelet as wavelet_fn

    with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
        src_dataset_arr = DM.sources[0][:]

    result = wavelet_fn(src_dataset_arr,
                        level=level,
                        wavelet=str(wavelet),
                        threshold=threshold,
                        hard=hard)
    # map_blocks(
    #     wavelet,
    #     src,
    #     level=level,
    #     out=dst,
    #     normalize=True,
    #     wavelet="sym3",
    #     threshold=threshold,
    #     hard=hard,
    #     pad=max(4, int(level * 2)),
    # )

    map_blocks(pass_through, result, out=dst, normalize=False)
示例#3
0
def label_postprocess(
    level_over: DataURI,
    level_base: DataURI,
    selected_label: Int,
    offset: Int,
    dst: DataURI,
):
    if level_over != 'None':
        src1 = DataModel.g.dataset_uri(level_over, group="annotations")
        with DatasetManager(src1, out=None, dtype="uint16", fillvalue=0) as DM:
            src1_dataset = DM.sources[0]
            anno1_level = src1_dataset[:] & 15
            logger.info(f"Obtained over annotation level with labels {np.unique(anno1_level)}")

    src_base = DataModel.g.dataset_uri(level_base, group="annotations")
    with DatasetManager(src_base, out=None, dtype="uint16", fillvalue=0) as DM:
        src_base_dataset = DM.sources[0]
        anno_base_level = src_base_dataset[:] & 15
        logger.info(f"Obtained base annotation level with labels {np.unique(anno_base_level)}")

    print(f"Selected label {selected_label}")
    
    #if int(selected_label) != -1:
    #    anno_base_level = (anno_base_level == int(selected_label)) * 1.0

    result = anno_base_level
    

    if level_over != 'None':
        result = anno_base_level * (1.0 - ((anno1_level > 0) * 1.0))
        anno1_level[anno1_level == selected_label] += offset

        result += anno1_level
        
    map_blocks(pass_through, result, out=dst, normalize=False)
示例#4
0
def find_connected_components(src: DataURI, dst: DataURI,
                              pipelines_id: DataURI, label_index: Int,
                              workspace: String) -> "SEGMENTATION":
    logger.debug(
        f"Finding connected components on segmentation: {pipelines_id}")
    print(f"{DataModel.g.current_workspace}")
    src = DataModel.g.dataset_uri(pipelines_id, group="pipelines")
    print(src)
    with DatasetManager(src, out=None, dtype="int32", fillvalue=0) as DM:
        src_dataset_arr = DM.sources[0][:]
        logger.debug(f"src_dataset shape {src_dataset_arr[:].shape}")

    single_label_level = (src_dataset_arr == label_index) * 1.0

    bbs_tables, selected_entities = detect_blobs(single_label_level)
    print(bbs_tables)
    print(selected_entities)

    result_list = []
    for i in range(len(bbs_tables[0])):
        result_list.append([
            bbs_tables[0].iloc[i]["area"],
            bbs_tables[0].iloc[i]["z"],
            bbs_tables[0].iloc[i]["y"],
            bbs_tables[0].iloc[i]["x"],
        ])

    map_blocks(pass_through, single_label_level, out=dst, normalize=False)

    print(result_list)
    return result_list
def supervoxels(
    src: DataURI,
    dst: DataURI,
    n_segments: Int = 10,
    compactness: Float = 20,
    spacing: FloatList = [1, 1, 1],
    multichannel: SmartBoolean = False,
    enforce_connectivity: SmartBoolean = False,
    out_dtype="int",
):
    with DatasetManager(src, out=None, dtype=out_dtype, fillvalue=0) as DM:
        src_data_arr = DM.sources[0][:]

    supervoxel_image = slic(
        src_data_arr,
        n_segments=n_segments,
        spacing=spacing,
        compactness=compactness,
        multichannel=False,
    )
    print(supervoxel_image)

    def pass_through(x):
        return x

    map_blocks(pass_through, supervoxel_image, out=dst, normalize=False)
def merge_regions(src: DataURI, labels: DataURI, dst: DataURI, min_size: Float):
    """
    API wrapper for `survos2.improc.regions.merge_small`.
    """
    from ..improc.regions.ccl import merge_small

    map_blocks(merge_small, src, labels, out=dst, min_size=min_size)
def connected_components(src: DataURI, dst: DataURI, remap: SmartBoolean):
    """
    API wrapper for `survos2.improc.regions.ccl3d`.
    """
    from ..improc.regions.ccl import ccl3d

    map_blocks(ccl3d, src, out=dst, remap=remap)
示例#8
0
def threshold(src: DataURI, dst: DataURI, thresh: Float = 0.5) -> "BASE":
    from ..server.filtering import threshold as threshold_fn

    with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
        src_dataset_arr = DM.sources[0][:]
        filtered = threshold_fn(src_dataset_arr, thresh=thresh)

    map_blocks(pass_through, filtered, out=dst, normalize=False)
示例#9
0
def gaussian(src: DataURI,
             dst: DataURI,
             sigma: FloatOrVector = 1) -> 'Denoising':
    """
    API wrapper around `survos2.improc.features.gauss.gaussian`.
    """
    from ..improc.features.gauss import gaussian
    map_blocks(gaussian, src, out=dst, sigma=sigma, normalize=True)
示例#10
0
def supervoxels(src:DataURIList, dst:DataURI, shape:IntList=[10,10,10],
                compactness:Float=30, spacing:FloatList=[1,1,1]):
    """
    API wrapper for `survos2.improc.regions.slic3d`.
    """
    from ..improc.regions.slic import slic3d
    map_blocks(slic3d, *src, out=dst, sp_shape=shape, spacing=spacing,
               compactness=compactness, stack=True)
示例#11
0
def rescale(src: DataURI, dst: DataURI) -> "BASE":

    logger.debug(f"Rescaling src {src}")
    with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
        src_dataset = DM.sources[0][:]

        filtered = rescale_denan(src_dataset)

    map_blocks(pass_through, filtered, out=dst, normalize=False)
示例#12
0
def distance_transform_edt(src: DataURI, dst: DataURI) -> "MORPHOLOGY":
    from ..server.filtering import distance_transform_edt

    logger.debug(f"Calculating distance transform")
    with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
        src_dataset_arr = DM.sources[0][:]
        filtered = distance_transform_edt(src_dataset_arr)

    map_blocks(pass_through, filtered, out=dst, normalize=False)
示例#13
0
def skeletonize(src: DataURI, dst: DataURI) -> "MORPHOLOGY":
    from ..server.filtering import skeletonize

    logger.debug(f"Calculating medial axis")
    with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
        src_dataset_arr = DM.sources[0][:]
        filtered = skeletonize(src_dataset_arr)

    map_blocks(pass_through, filtered, out=dst, normalize=False)
示例#14
0
def spatial_gradient_3d(src: DataURI, dst: DataURI, dim: Int = 0) -> "EDGES":
    from ..server.filtering import spatial_gradient_3d

    map_blocks(
        spatial_gradient_3d,
        src,
        out=dst,
        dim=dim,
        normalize=True,
    )
示例#15
0
def dilation(src: DataURI, dst: DataURI, num_iter: Int = 1) -> "MORPHOLOGY":
    from ..server.filtering import dilate

    map_blocks(
        dilate,
        src,
        num_iter=num_iter,
        out=dst,
        normalize=True,
        pad=max(4, int(num_iter * 2)),
    )
示例#16
0
def closing(src: DataURI, dst: DataURI, num_iter: Int = 1) -> "MORPHOLOGY":
    from ..server.filtering import closing

    map_blocks(
        closing,
        src,
        num_iter=num_iter,
        out=dst,
        normalize=True,
        pad=max(4, int(num_iter * 2)),
    )
示例#17
0
def structure_tensor_determinant(src: DataURI,
                                 dst: DataURI,
                                 sigma: FloatOrVector = 1) -> "BLOB":
    from ..server.filtering.blob import compute_structure_tensor_determinant

    map_blocks(
        compute_structure_tensor_determinant,
        src,
        out=dst,
        sigma=sigma,
        pad=max(4, int((max(sigma) * 2))),
        normalize=True,
    )
示例#18
0
def hessian_eigenvalues(src: DataURI,
                        dst: DataURI,
                        sigma: FloatOrVector = 1) -> "BLOB":
    from ..server.filtering.blob import hessian_eigvals_image

    map_blocks(
        hessian_eigvals_image,
        src,
        out=dst,
        pad=max(4, int((max(sigma) * 2))),
        sigma=sigma,
        normalize=True,
    )
示例#19
0
def gaussian_norm(src: DataURI,
                  dst: DataURI,
                  sigma: FloatOrVector = 1) -> "NEIGHBORHOOD":
    from ..server.filtering.blur import gaussian_norm

    map_blocks(
        gaussian_norm,
        src,
        out=dst,
        sigma=sigma,
        pad=max(4, int((max(sigma) * 2))),
        normalize=True,
    )
示例#20
0
def annotate_regions(
    workspace: String,
    level: String,
    region: DataURI,
    r: IntList,
    label: Int,
    full: SmartBoolean,
    parent_level: String,
    parent_label_idx: Int,
    bb: IntList,
    viewer_order: tuple,
):
    from survos2.api.annotate import annotate_regions

    ds = get_level(workspace, level, full)
    region = dataset_from_uri(region, mode="r")

    from survos2.frontend.frontend import get_level_from_server

    if parent_level != "-1" and parent_level != -1:
        parent_arr, parent_annotations_dataset = get_level_from_server(
            {"level_id": parent_level}, retrieval_mode="volume"
        )
        parent_arr = parent_arr & 15
        # print(f"Using parent dataset for masking {parent_annotations_dataset}")
        parent_mask = parent_arr == parent_label_idx
    else:
        # print("Not masking using parent level")
        parent_arr = None
        parent_mask = None

    logger.debug(f"BB in annotate_regions {bb}")
    anno = annotate_regions(
        ds,
        region,
        r=r,
        label=label,
        parent_mask=parent_mask,
        bb=bb,
        viewer_order=viewer_order,
    )

    def pass_through(x):
        return x

    dst = DataModel.g.dataset_uri(level, group="annotations")
    map_blocks(pass_through, anno, out=dst, normalize=False)
    modified_ds = dataset_from_uri(dst, mode="r")
    modified = [1]
    modified_ds.set_attr("modified", modified)
示例#21
0
def laplacian(src: DataURI,
              dst: DataURI,
              kernel_size: FloatOrVector = 1) -> "EDGES":
    from ..server.filtering import ndimage_laplacian

    map_blocks(
        ndimage_laplacian,
        src,
        out=dst,
        kernel_size=kernel_size,
        pad=max(4,
                int(max(kernel_size)) * 3),
        normalize=False,
    )
示例#22
0
def total_variation(src: DataURI,
                    dst: DataURI,
                    lamda: Float = 10,
                    max_iter: Int = 100) -> 'Denoising':
    """
    API wrapper around `survos2.improc.features.tv.tvdenoising3d`.
    """
    from ..improc.features.tv import tvdenoising3d
    map_blocks(tvdenoising3d,
               src,
               out=dst,
               lamda=lamda,
               max_iter=max_iter,
               normalize=True)
示例#23
0
def median(src: DataURI,
           dst: DataURI,
           median_size: Int = 1,
           num_iter: Int = 1) -> "DENOISING":
    from ..server.filtering import median

    map_blocks(
        median,
        src,
        median_size=median_size,
        num_iter=num_iter,
        out=dst,
        pad=max(4, int((median_size * 2))),
        normalize=False,
    )
示例#24
0
def gaussian_blur(src: DataURI,
                  dst: DataURI,
                  sigma: FloatOrVector = 1) -> "DENOISING":
    from ..server.filtering import gaussian_blur_kornia

    if isinstance(sigma, numbers.Number):
        sigma = (sigma, sigma, sigma)
    map_blocks(
        gaussian_blur_kornia,
        src,
        out=dst,
        sigma=sigma,
        pad=max(4, int(max(sigma))),
        normalize=False,
    )
示例#25
0
def difference_of_gaussians(src: DataURI,
                            dst: DataURI,
                            sigma: FloatOrVector = 1,
                            sigma_ratio: Float = 2) -> "EDGES":
    from ..server.filtering.edge import compute_difference_gaussians

    map_blocks(
        compute_difference_gaussians,
        src,
        out=dst,
        sigma=sigma,
        sigma_ratio=sigma_ratio,
        pad=max(4, int((max(sigma) * 3))),
        normalize=False,
    )
示例#26
0
def predict_2d_unet(
    src: DataURI,
    dst: DataURI,
    workspace: String,
    anno_id: DataURI,
    feature_id: DataURI,
    model_path: str,
    no_of_planes: int
):
    logger.debug(
        f"Predict_2d_unet with feature {feature_id} in {no_of_planes} planes"
    )

    src = DataModel.g.dataset_uri(anno_id, group="annotations")
    with DatasetManager(src, out=None, dtype="uint16", fillvalue=0) as DM:
        src_dataset = DM.sources[0]
        anno_level = src_dataset[:] & 15
    logger.debug(f"Obtained annotation level with labels {np.unique(anno_level)}")

    src = DataModel.g.dataset_uri(feature_id, group="features")
    logger.debug(f"Getting features {src}")
    with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
        src_dataset = DM.sources[0]
        logger.debug(f"Adding feature of shape {src_dataset.shape}")
        feature = src_dataset[:]

    logger.info(
        f"Predict_2d_unet with feature shape {feature.shape} using model {model_path}"
    )
    from survos2.server.unet2d.unet2d import Unet2dPredictor
    from survos2.server.unet2d.data_utils import PredictionHDF5DataSlicer
    ws_object = ws.get(workspace)
    root_path = Path(ws_object.path, "unet2d")
    root_path.mkdir(exist_ok=True, parents=True)
    predictor = Unet2dPredictor(root_path)
    predictor.create_model_from_zip(Path(model_path))
    now = datetime.now()
    dt_string = now.strftime("%d%m%Y_%H_%M_%S")
    slicer = PredictionHDF5DataSlicer(predictor, feature, clip_data=True)
    if no_of_planes == 1:
        segmentation = slicer.predict_1_way(root_path, output_prefix=dt_string)
    elif no_of_planes == 3:
        segmentation = slicer.predict_3_ways(root_path, output_prefix=dt_string)
    segmentation += np.ones_like(segmentation)
    def pass_through(x):
        return x

    map_blocks(pass_through, segmentation, out=dst, normalize=False)
示例#27
0
def generate_supervoxels(
    dataset_feats: List[np.ndarray],
    filtered_stack: np.ndarray,
    dataset_feats_idx: int,
    slic_params: dict,
):
    """Generate a supervoxel volume image

    Arguments:
        dataset_feats {list of filtered volumes} -- list of filters of original input image volume
        filtered_stack {volume that is a stack of dataset_feats} -- reshaped version of dataset feats for rmeans
        dataset_feats_idx {int} -- index of the filter to use for supervoxel calculation
        slic_params {dict} -- Supervoxel generation parameters

    Returns: dataclass with all the information required for prediction
    """
    from cuda_slic import slic

    logger.debug(f"Using feature idx {dataset_feats_idx} for supervoxels.")
    logger.debug(
        f"SRFeatures for supervoxels have shape {dataset_feats[dataset_feats_idx].shape}"
    )
    logger.debug(f"Generating supervoxels with params: {slic_params}")

    block_z, block_x, block_y = dataset_feats[0].shape

    # Make a copy of the dictionary without the 'shape' parameter
    slic_params_copy = copy.deepcopy(slic_params)
    slic_params_copy.pop("shape", None)
    # map_blocks through Dask
    supervoxel_vol = map_blocks(
        slic,
        dataset_feats[dataset_feats_idx].astype(np.float32),
        **slic_params_copy,
        timeit=False,
    )
    supervoxel_vol = supervoxel_vol.astype(np.uint32, copy=True)
    logger.debug(
        f"Finished slic with supervoxel vol of shape {supervoxel_vol.shape}")

    supervoxel_vol = supervoxel_vol[...]
    supervoxel_vol = np.asarray(supervoxel_vol)
    supervoxel_vol = np.nan_to_num(supervoxel_vol)
    logger.debug(
        f"Calling rmeans with filtered_stack { len(filtered_stack)} and supervoxel_vol {supervoxel_vol.shape}"
    )
    supervoxel_features = rmeans(filtered_stack, supervoxel_vol)

    logger.debug(
        f"Finished rmeans with supervoxel_features of shape {supervoxel_features.shape}"
    )

    supervoxel_rag = create_rag(np.array(supervoxel_vol), connectivity=6)

    logger.debug("MaxMin SV Feat: {} {}".format(np.max(supervoxel_vol),
                                                np.min(supervoxel_vol)))

    superregions = SRData(supervoxel_vol, supervoxel_features, supervoxel_rag)

    return superregions
示例#28
0
def tvdenoise(
    src: DataURI,
    dst: DataURI,
    regularization_amount: Float = 0.001,
    pad: Int = 8,
    max_iter: Int = 100,
) -> "DENOISING":

    from ..server.filtering.blur import tvdenoise_kornia

    map_blocks(
        tvdenoise_kornia,
        src,
        out=dst,
        regularization_amount=regularization_amount,
        max_iter=max_iter,
        pad=pad,
        normalize=True,
    )
示例#29
0
def supervoxels_chunked(
    src: DataURIList,
    dst: DataURI,
    n_segments: Int = 10,
    compactness: Float = 20,
    spacing: FloatList = [1, 1, 1],
    multichannel: SmartBoolean = False,
    enforce_connectivity: SmartBoolean = False,
    out_dtype="int",
):

    map_blocks(
        slic,
        *src,
        out=dst,
        n_segments=n_segments,
        spacing=spacing,
        compactness=compactness,
        multichannel=False,
        enforce_connectivity=True,
        stack=False,
        timeit=True,
        uses_gpu=False,
        out_dtype=out_dtype,
        relabel=True,
    )

    with DatasetManager(dst, out=None, dtype=out_dtype, fillvalue=0) as DM:
        dst_dataset = DM.sources[0]
        supervoxel_image = dst_dataset[:]
        print(supervoxel_image.dtype)

    num_sv = len(np.unique(supervoxel_image))
    print(f"Number of supervoxels created: {num_sv}")

    dst_dataset.set_attr("num_supervoxels", num_sv)
示例#30
0
def gaussian_center(src: DataURI, dst: DataURI, sigma: FloatOrVector = 1):
    """
    API wrapper around `survos2.improc.features.gauss.gaussian_center`.
    """
    from ..improc.features.gauss import gaussian_center
    map_blocks(gaussian_center, src, out=dst, sigma=sigma, normalize=True)