コード例 #1
0
def predict_segmentation_fcn(
    feature_id: DataURI,
    model_fullname: String,
    dst: DataURI,
    patch_size: IntOrVector = 64,
    patch_overlap: IntOrVector = 8,
    threshold: Float = 0.5,
    model_type: String = "unet3d",
):
    from survos2.entity.pipeline_ops import make_proposal

    src = DataModel.g.dataset_uri(feature_id, group="features")

    with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
        src_dataset = DM.sources[0]
        logger.debug(f"Adding feature of shape {src_dataset.shape}")
        
    proposal = make_proposal(
        src_dataset,
        model_fullname,
        model_type=model_type,
        patch_size=patch_size,
        patch_overlap=patch_overlap,
    )

    proposal -= np.min(proposal)
    proposal = proposal / np.max(proposal)
    proposal = ((proposal < threshold) * 1) + 1

    # store resulting segmentation in dst
    dst = DataModel.g.dataset_uri(dst, group="pipelines")
    with DatasetManager(dst, out=dst, dtype="float32", fillvalue=0) as DM:
        DM.out[:] = proposal
コード例 #2
0
def cleaning(
    # object_id : DataURI,
    feature_id: DataURI,
    dst: DataURI,
    min_component_size: Int = 100,
):
    from survos2.entity.saliency import (
        single_component_cleaning,
        filter_small_components,
    )

    # src = DataModel.g.dataset_uri(ntpath.basename(object_id), group="objects")
    # logger.debug(f"Getting objects {src}")
    # with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
    #     ds_objects = DM.sources[0]
    # entities_fullname = ds_objects.get_metadata("fullname")
    # tabledata, entities_df = setup_entity_table(entities_fullname)
    # selected_entities = np.array(entities_df)

    logger.debug(f"Calculating stats on feature: {feature_id}")
    src = DataModel.g.dataset_uri(ntpath.basename(feature_id), group="features")
    with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
        feature_dataset_arr = DM.sources[0][:]

    seg_cleaned, tables, labeled_images = filter_small_components(
        [feature_dataset_arr], min_component_size=min_component_size
    )
    # seg_cleaned = single_component_cleaning(selected_entities, feature_dataset_arr, bvol_dim=(42,42,42))

    # map_blocks(pass_through, (seg_cleaned > 0) * 1.0, out=dst, normalize=False)

    with DatasetManager(dst, out=dst, dtype="uint32", fillvalue=0) as DM:
        DM.out[:] = (seg_cleaned > 0) * 1.0
コード例 #3
0
    def load_as_float(self):
        logger.debug(f"Loading prediction {self.pipeline_id} as float image.")

        # get pipeline output
        src = DataModel.g.dataset_uri(self.pipeline_id, group="pipelines")
        with DatasetManager(src, out=None, dtype="uint32", fillvalue=0) as DM:
            src_arr = DM.sources[0][:]
        # create new float image
        params = dict(feature_type="raw", workspace=True)
        result = Launcher.g.run("features", "create", **params)

        if result:
            fid = result["id"]
            ftype = result["kind"]
            fname = result["name"]
            logger.debug(
                f"Created new object in workspace {fid}, {ftype}, {fname}")

            dst = DataModel.g.dataset_uri(fid, group="features")
            with DatasetManager(dst, out=dst, dtype="float32",
                                fillvalue=0) as DM:
                DM.out[:] = src_arr

            cfg.ppw.clientEvent.emit({
                "source": "workspace_gui",
                "data": "refresh",
                "value": None
            })
コード例 #4
0
def label_postprocess(
    level_over: DataURI,
    level_base: DataURI,
    selected_label: Int,
    offset: Int,
    dst: DataURI,
):
    if level_over != 'None':
        src1 = DataModel.g.dataset_uri(level_over, group="annotations")
        with DatasetManager(src1, out=None, dtype="uint16", fillvalue=0) as DM:
            src1_dataset = DM.sources[0]
            anno1_level = src1_dataset[:] & 15
            logger.info(f"Obtained over annotation level with labels {np.unique(anno1_level)}")

    src_base = DataModel.g.dataset_uri(level_base, group="annotations")
    with DatasetManager(src_base, out=None, dtype="uint16", fillvalue=0) as DM:
        src_base_dataset = DM.sources[0]
        anno_base_level = src_base_dataset[:] & 15
        logger.info(f"Obtained base annotation level with labels {np.unique(anno_base_level)}")

    print(f"Selected label {selected_label}")
    
    #if int(selected_label) != -1:
    #    anno_base_level = (anno_base_level == int(selected_label)) * 1.0

    result = anno_base_level
    

    if level_over != 'None':
        result = anno_base_level * (1.0 - ((anno1_level > 0) * 1.0))
        anno1_level[anno1_level == selected_label] += offset

        result += anno1_level
        
    map_blocks(pass_through, result, out=dst, normalize=False)
コード例 #5
0
ファイル: objects.py プロジェクト: DiamondLightSource/SuRVoS2
def patches(
    dst: DataURI,
    fullname: String,
    scale: float,
    offset: FloatOrVector,
    crop_start: FloatOrVector,
    crop_end: FloatOrVector,
) -> "GEOMETRY":
    src = DataModel.g.dataset_uri("__data__")
    with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
        src_dataset = DM.sources[0]
        img_volume = src_dataset[:]
        logger.info(f"Got __data__ volume of size {img_volume.shape}")
    # store in dst
    logger.info(f"Storing in dataset {dst}")

    with DatasetManager(dst, out=dst, dtype="float32", fillvalue=0) as DM:
        DM.out[:] = np.zeros_like(img_volume)
        dst_dataset = DM.sources[0]
        dst_dataset.set_attr("scale", scale)
        dst_dataset.set_attr("offset", offset)
        dst_dataset.set_attr("crop_start", crop_start)
        dst_dataset.set_attr("crop_end", crop_end)

        csv_saved_fullname = dst_dataset.save_file(fullname)
        logger.info(f"Saving {fullname} to {csv_saved_fullname}")
        dst_dataset.set_attr("fullname", csv_saved_fullname)
コード例 #6
0
def object_stats(
    src: DataURI,
    dst: DataURI,
    object_id: DataURI,
    feature_ids: DataURIList,
    stat_name: String,
) -> "OBJECTS":
    logger.debug(f"Calculating stats on objects: {object_id}")
    logger.debug(f"With features: {feature_ids}")

    src = DataModel.g.dataset_uri(ntpath.basename(feature_ids[0]),
                                  group="features")
    logger.debug(f"Getting features {src}")
    with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
        ds_feature = DM.sources[0][:]
        # logger.debug(f"summary_stats {src_dataset[:]}")

    src = DataModel.g.dataset_uri(ntpath.basename(object_id), group="objects")
    logger.debug(f"Getting objects {src}")
    with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
        ds_objects = DM.sources[0]
    scale = ds_objects.get_metadata("scale")
    print(f"Scaling objects by: {scale}")

    entities_fullname = ds_objects.get_metadata("fullname")
    tabledata, entities_df = setup_entity_table(entities_fullname)
    sel_start, sel_end = 0, len(entities_df)
    logger.info(
        f"Viewing entities {entities_fullname} from {sel_start} to {sel_end}")

    centers = np.array([[
        np.int32(np.float32(entities_df.iloc[i]["z"]) * scale),
        np.int32(np.float32(entities_df.iloc[i]["x"]) * scale),
        np.int32(np.float32(entities_df.iloc[i]["y"]) * scale),
    ] for i in range(sel_start, sel_end)])
    box_size = 4

    print(f"Calculating statistic {stat_name} with box size of {box_size}")
    if stat_name == "0":
        stat_op = np.mean
        title = "Mean"
    elif stat_name == "1":
        stat_op = np.std
        title = "Standard Deviation"
    elif stat_name == "2":
        stat_op = np.var
        title = "Variance"
    point_features = [
        stat_op(ds_feature[c[0] - box_size:c[0] + box_size,
                           c[1] - box_size:c[1] + box_size,
                           c[2] - box_size:c[2] + box_size, ]) for c in centers
    ]

    plot_image = plot_to_image(point_features, title=title)

    return (point_features, encode_numpy(plot_image))
コード例 #7
0
ファイル: objects.py プロジェクト: DiamondLightSource/SuRVoS2
    def make_patches(self):
        src = DataModel.g.dataset_uri(self.feature_source.value(),
                                      group="features")
        with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
            src_array = DM.sources[0][:]

        objects_scale = 1.0
        entity_meta = {
            "0": {
                "name": "class1",
                "size": np.array((15, 15, 15)) * objects_scale,
                "core_radius": np.array((7, 7, 7)) * objects_scale,
            },
        }

        entity_arr = np.array(self.entities_df)

        combined_clustered_pts, classwise_entities = organize_entities(
            src_array, entity_arr, entity_meta, plot_all=False)

        wparams = {}
        wparams["entities_offset"] = (0, 0, 0)
        wparams["entity_meta"] = entity_meta
        wparams["workflow_name"] = "Make_Patches"
        wparams["proj"] = DataModel.g.current_workspace
        wf = PatchWorkflow([src_array], combined_clustered_pts,
                           classwise_entities, src_array, wparams,
                           combined_clustered_pts)

        src = DataModel.g.dataset_uri(self.annotations_source.value().rsplit(
            "/", 1)[-1],
                                      group="annotations")
        with DatasetManager(src, out=None, dtype="uint16", fillvalue=0) as DM:
            src_dataset = DM.sources[0]
            anno_level = src_dataset[:] & 15

        logger.debug(
            f"Obtained annotation level with labels {np.unique(anno_level)}")

        logger.debug(f"Making patches in path {src_dataset._path}")
        train_v_density = make_patches(wf,
                                       entity_arr,
                                       src_dataset._path,
                                       proposal_vol=(anno_level > 0) * 1.0,
                                       padding=(32, 32, 32),
                                       num_augs=0,
                                       max_vols=-1)

        self.patches = train_v_density

        cfg.ppw.clientEvent.emit({
            "source": "panel_gui",
            "data": "view_patches",
            "patches_fullname": train_v_density
        })
コード例 #8
0
def remove_masked_objects(
    src: DataURI,
    dst: DataURI,
    feature_id: DataURI,
    object_id: DataURI,
) -> "OBJECTS":
    src = DataModel.g.dataset_uri(ntpath.basename(object_id), group="objects")
    logger.debug(f"Getting objects {src}")

    with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
        ds_objects = DM.sources[0]
    scale = ds_objects.get_metadata("scale")
    print(f"Scaling objects by: {scale}")

    objects_fullname = ds_objects.get_metadata("fullname")
    objects_scale = ds_objects.get_metadata("scale")
    objects_offset = ds_objects.get_metadata("offset")
    objects_crop_start = ds_objects.get_metadata("crop_start")
    objects_crop_end = ds_objects.get_metadata("crop_end")

    logger.debug(f"Getting objects from {src} and file {objects_fullname}")
    from survos2.frontend.components.entity import make_entity_df, setup_entity_table

    tabledata, entities_df = setup_entity_table(
        objects_fullname,
        scale=objects_scale,
        offset=objects_offset,
        crop_start=objects_crop_start,
        crop_end=objects_crop_end,
    )

    entities = np.array(make_entity_df(np.array(entities_df), flipxy=False))

    logger.debug(f"Removing entities using feature as mask: {feature_id}")
    src = DataModel.g.dataset_uri(ntpath.basename(feature_id),
                                  group="features")
    with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
        mask = DM.sources[0][:]

    logger.debug(f"Initial number of objects: {len(entities_df)}")
    refined_entity_df = make_entity_df(
        remove_masked_entities((mask == 0) * 1.0, np.array(entities_df)))

    logger.debug(f"Removing entities using mask with shape {mask.shape}")
    result_list = []
    for i in range(len(refined_entity_df)):
        result_list.append([
            refined_entity_df.iloc[i]["class_code"],
            refined_entity_df.iloc[i]["z"],
            refined_entity_df.iloc[i]["y"],
            refined_entity_df.iloc[i]["x"],
        ])

    return result_list
コード例 #9
0
    def test_sr_predict_shape(self, datamodel):
        DataModel = datamodel
        src = DataModel.g.dataset_uri("__data__", None)
        dst = DataModel.g.dataset_uri("001_gaussian_blur", group="features")
        result = survos.run_command(
            "features", "gaussian_blur", uri=None, src=src, dst=dst
        )

        with DatasetManager(src, out=dst, dtype="float32", fillvalue=0) as DM:
            src_dataset = DM.sources[0]
            dst_dataset = DM.out
            src_arr = src_dataset[:]
            gblur_arr = dst_dataset[:]

        result = survos.run_command("superregions", "create", uri=None)
        features_src = DataModel.g.dataset_uri("001_gaussian_blur", group="features")
        dst = DataModel.g.dataset_uri("001_superregions", group="superregions")

        result = supervoxels(
            features_src,
            dst,
            n_segments=8,
            compactness=0.5,
            spacing=[1, 1, 1],
            multichannel=False,
            enforce_connectivity=False,
        )
        with DatasetManager(src, out=dst, dtype="float32", fillvalue=0) as DM:
            src_dataset = DM.sources[0]
            dst_dataset = DM.out
            src_arr = src_dataset[:]
            dst_arr = dst_dataset[:]

        superseg_cfg = cfg.pipeline
        superseg_cfg["type"] = "rf"
        superseg_cfg["predict_params"]["clf"] = "Ensemble"

        refine = False
        lam = (1.0,)

        anno_arr = np.ones_like(dst_arr)
        anno_arr[2:4, 2:4, 2:4] = 2
        feature_arr = view_dataset("001_gaussian_blur", "features", 3)
        segmentation = sr_predict(
            dst_arr,
            anno_arr,
            [feature_arr, gblur_arr],
            None,
            superseg_cfg,
            refine,
            lam,
        )
コード例 #10
0
ファイル: objects.py プロジェクト: DiamondLightSource/SuRVoS2
    def train_fpn(self):
        from survos2.entity.train import train_seg
        from survos2.entity.pipeline_ops import make_proposal

        wf_params = {}
        wf_params["torch_models_fullpath"] = "/experiments"
        model_file = train_seg(self.patches, wf_params, num_epochs=1)

        patch_size = (64, 64, 64)
        patch_overlap = (16, 16, 16)
        overlap_mode = "crop"
        model_type = "fpn3d"

        threshold_devs = 1.5,
        invert = True,

        src = DataModel.g.dataset_uri(self.feature_source.value(),
                                      group="features")
        with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
            src_array = DM.sources[0][:]

        proposal = make_proposal(
            src_array,
            os.path.join(wf_params["torch_models_fullpath"], model_file),
            model_type=model_type,
            patch_size=patch_size,
            patch_overlap=patch_overlap,
            overlap_mode=overlap_mode,
        )

        # create new float image
        params = dict(feature_type="raw", workspace=True)
        result = Launcher.g.run("features", "create", **params)

        if result:
            fid = result["id"]
            ftype = result["kind"]
            fname = result["name"]
            logger.debug(
                f"Created new object in workspace {fid}, {ftype}, {fname}")

            dst = DataModel.g.dataset_uri(fid, group="features")
            with DatasetManager(dst, out=dst, dtype="float32",
                                fillvalue=0) as DM:
                DM.out[:] = proposal

            cfg.ppw.clientEvent.emit({
                "source": "workspace_gui",
                "data": "refresh",
                "value": None
            })
コード例 #11
0
def predict_2d_unet(
    src: DataURI,
    dst: DataURI,
    workspace: String,
    anno_id: DataURI,
    feature_id: DataURI,
    model_path: str,
    no_of_planes: int
):
    logger.debug(
        f"Predict_2d_unet with feature {feature_id} in {no_of_planes} planes"
    )

    src = DataModel.g.dataset_uri(anno_id, group="annotations")
    with DatasetManager(src, out=None, dtype="uint16", fillvalue=0) as DM:
        src_dataset = DM.sources[0]
        anno_level = src_dataset[:] & 15
    logger.debug(f"Obtained annotation level with labels {np.unique(anno_level)}")

    src = DataModel.g.dataset_uri(feature_id, group="features")
    logger.debug(f"Getting features {src}")
    with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
        src_dataset = DM.sources[0]
        logger.debug(f"Adding feature of shape {src_dataset.shape}")
        feature = src_dataset[:]

    logger.info(
        f"Predict_2d_unet with feature shape {feature.shape} using model {model_path}"
    )
    from survos2.server.unet2d.unet2d import Unet2dPredictor
    from survos2.server.unet2d.data_utils import PredictionHDF5DataSlicer
    ws_object = ws.get(workspace)
    root_path = Path(ws_object.path, "unet2d")
    root_path.mkdir(exist_ok=True, parents=True)
    predictor = Unet2dPredictor(root_path)
    predictor.create_model_from_zip(Path(model_path))
    now = datetime.now()
    dt_string = now.strftime("%d%m%Y_%H_%M_%S")
    slicer = PredictionHDF5DataSlicer(predictor, feature, clip_data=True)
    if no_of_planes == 1:
        segmentation = slicer.predict_1_way(root_path, output_prefix=dt_string)
    elif no_of_planes == 3:
        segmentation = slicer.predict_3_ways(root_path, output_prefix=dt_string)
    segmentation += np.ones_like(segmentation)
    def pass_through(x):
        return x

    map_blocks(pass_through, segmentation, out=dst, normalize=False)
コード例 #12
0
def wavelet(
    src: DataURI,
    dst: DataURI,
    threshold: Float = 64.0,
    level: Int = 1,
    wavelet: String = "sym3",
    hard: SmartBoolean = True,
) -> "WAVELET":
    from ..server.filtering import wavelet as wavelet_fn

    with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
        src_dataset_arr = DM.sources[0][:]

    result = wavelet_fn(src_dataset_arr,
                        level=level,
                        wavelet=str(wavelet),
                        threshold=threshold,
                        hard=hard)
    # map_blocks(
    #     wavelet,
    #     src,
    #     level=level,
    #     out=dst,
    #     normalize=True,
    #     wavelet="sym3",
    #     threshold=threshold,
    #     hard=hard,
    #     pad=max(4, int(level * 2)),
    # )

    map_blocks(pass_through, result, out=dst, normalize=False)
コード例 #13
0
def get_level_from_server(msg, retrieval_mode="volume"):
    if retrieval_mode == "slice":  # get a slice over http
        src_annotations_dataset = DataModel.g.dataset_uri(
            msg["level_id"], group="annotations"
        )
        params = dict(
            workpace=True,
            src=src_annotations_dataset,
            slice_idx=cfg.current_slice,
            order=cfg.order,
        )
        result = Launcher.g.run("annotations", "get_slice", **params)
        if result:
            src_arr = decode_numpy(result)
    elif retrieval_mode == "volume_http":  # get a slice over http
        src_annotations_dataset = DataModel.g.dataset_uri(
            msg["level_id"], group="annotations"
        )
        params = dict(workpace=True, src=src_annotations_dataset)
        result = Launcher.g.run("annotations", "get_volume", **params)
        if result:
            src_arr = decode_numpy(result)
    elif retrieval_mode == "volume":  # get entire volume
        src = DataModel.g.dataset_uri(msg["level_id"], group="annotations")
        with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
            src_annotations_dataset = DM.sources[0][:]
            src_arr = get_array_from_dataset(src_annotations_dataset)

    return src_arr, src_annotations_dataset
コード例 #14
0
def binary_image_stats(src: DataURI,
                       dst: DataURI,
                       feature_id: DataURI,
                       threshold: Float = 0.5) -> "IMAGE":
    logger.debug(f"Calculating stats on feature: {feature_id}")
    src = DataModel.g.dataset_uri(ntpath.basename(feature_id),
                                  group="features")
    with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
        src_dataset_arr = DM.sources[0][:]

    src_thresh = (src_dataset_arr > threshold) * 1.0
    bbs_tables, selected_entities = detect_blobs(src_thresh)
    print(bbs_tables)
    print(selected_entities)

    result_list = []
    for i in range(len(bbs_tables[0])):
        result_list.append([
            bbs_tables[0].iloc[i]["area"],
            bbs_tables[0].iloc[i]["z"],
            bbs_tables[0].iloc[i]["x"],
            bbs_tables[0].iloc[i]["y"],
        ])

    return result_list
コード例 #15
0
def find_connected_components(src: DataURI, dst: DataURI,
                              pipelines_id: DataURI, label_index: Int,
                              workspace: String) -> "SEGMENTATION":
    logger.debug(
        f"Finding connected components on segmentation: {pipelines_id}")
    print(f"{DataModel.g.current_workspace}")
    src = DataModel.g.dataset_uri(pipelines_id, group="pipelines")
    print(src)
    with DatasetManager(src, out=None, dtype="int32", fillvalue=0) as DM:
        src_dataset_arr = DM.sources[0][:]
        logger.debug(f"src_dataset shape {src_dataset_arr[:].shape}")

    single_label_level = (src_dataset_arr == label_index) * 1.0

    bbs_tables, selected_entities = detect_blobs(single_label_level)
    print(bbs_tables)
    print(selected_entities)

    result_list = []
    for i in range(len(bbs_tables[0])):
        result_list.append([
            bbs_tables[0].iloc[i]["area"],
            bbs_tables[0].iloc[i]["z"],
            bbs_tables[0].iloc[i]["y"],
            bbs_tables[0].iloc[i]["x"],
        ])

    map_blocks(pass_through, single_label_level, out=dst, normalize=False)

    print(result_list)
    return result_list
コード例 #16
0
def upload(body, request, response):
    print(f"Request: {request}")
    print(f"Response: {response}")
    
    encoded_array = body['file']
    array_shape = body['shape']
    anno_id = body['name']
    print(f"shape {array_shape} name {anno_id}")
    
    level_arr = np.frombuffer(encoded_array, dtype="uint32")
    
    print(f"level_arr: {level_arr.shape}")
    from ast import literal_eval 
    level_arr.shape = literal_eval(array_shape)
    print(f"Uploaded feature of shape {level_arr.shape}")

    dst = DataModel.g.dataset_uri(anno_id, group="annotations")

    with DatasetManager(dst, out=dst, dtype="uint32", fillvalue=0) as DM:
        DM.out[:] = level_arr


    modified_ds = dataset_from_uri(dst, mode="r")    
    modified = [1]
    modified_ds.set_attr("modified", modified)
コード例 #17
0
def supervoxels(
    src: DataURI,
    dst: DataURI,
    n_segments: Int = 10,
    compactness: Float = 20,
    spacing: FloatList = [1, 1, 1],
    multichannel: SmartBoolean = False,
    enforce_connectivity: SmartBoolean = False,
    out_dtype="int",
):
    with DatasetManager(src, out=None, dtype=out_dtype, fillvalue=0) as DM:
        src_data_arr = DM.sources[0][:]

    supervoxel_image = slic(
        src_data_arr,
        n_segments=n_segments,
        spacing=spacing,
        compactness=compactness,
        multichannel=False,
    )
    print(supervoxel_image)

    def pass_through(x):
        return x

    map_blocks(pass_through, supervoxel_image, out=dst, normalize=False)
コード例 #18
0
def threshold(src: DataURI, dst: DataURI, thresh: Float = 0.5) -> "BASE":
    from ..server.filtering import threshold as threshold_fn

    with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
        src_dataset_arr = DM.sources[0][:]
        filtered = threshold_fn(src_dataset_arr, thresh=thresh)

    map_blocks(pass_through, filtered, out=dst, normalize=False)
コード例 #19
0
def rescale(src: DataURI, dst: DataURI) -> "BASE":

    logger.debug(f"Rescaling src {src}")
    with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
        src_dataset = DM.sources[0][:]

        filtered = rescale_denan(src_dataset)

    map_blocks(pass_through, filtered, out=dst, normalize=False)
コード例 #20
0
def distance_transform_edt(src: DataURI, dst: DataURI) -> "MORPHOLOGY":
    from ..server.filtering import distance_transform_edt

    logger.debug(f"Calculating distance transform")
    with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
        src_dataset_arr = DM.sources[0][:]
        filtered = distance_transform_edt(src_dataset_arr)

    map_blocks(pass_through, filtered, out=dst, normalize=False)
コード例 #21
0
def skeletonize(src: DataURI, dst: DataURI) -> "MORPHOLOGY":
    from ..server.filtering import skeletonize

    logger.debug(f"Calculating medial axis")
    with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
        src_dataset_arr = DM.sources[0][:]
        filtered = skeletonize(src_dataset_arr)

    map_blocks(pass_through, filtered, out=dst, normalize=False)
コード例 #22
0
ファイル: roi.py プロジェクト: DiamondLightSource/SuRVoS2
def remove(workspace: String, roi_fname: String):
    src = DataModel.g.dataset_uri("__data__")
    with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
        src_dataset = DM.sources[0]
        roi_fnames = src_dataset.get_metadata("roi_fnames")
        for k, v in roi_fnames.items():
            if (v == roi_fname):
                selected = k
        del roi_fnames[selected]
        src_dataset.set_metadata("roi_fnames", roi_fnames)
コード例 #23
0
ファイル: roi.py プロジェクト: DiamondLightSource/SuRVoS2
def existing():
    src = DataModel.g.dataset_uri("__data__")
    with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
        src_dataset = DM.sources[0]
        ds_metadata = src_dataset.get_metadata()
        if not "roi_fnames" in ds_metadata:
            src_dataset.set_metadata("roi_fnames", {})
            return {}
        roi_fnames = ds_metadata["roi_fnames"]
        return roi_fnames
コード例 #24
0
def spatial_clustering(
    src: DataURI,
    feature_id: DataURI,
    object_id: DataURI,
    workspace: String,
    params: dict,
) -> "OBJECTS":
    src = DataModel.g.dataset_uri(ntpath.basename(object_id), group="objects")
    logger.debug(f"Getting objects {src}")

    with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
        ds_objects = DM.sources[0]
    scale = ds_objects.get_metadata("scale")
    print(f"Scaling objects by: {scale}")

    entities_fullname = ds_objects.get_metadata("fullname")
    tabledata, entities_df = setup_entity_table(entities_fullname)

    logger.debug(
        f"Spatial clustering using feature as reference image: {feature_id}")
    src = DataModel.g.dataset_uri(ntpath.basename(feature_id),
                                  group="features")
    with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
        src_dataset_arr = DM.sources[0][:]

    from survos2.entity.anno.crowd import aggregate

    refined_entity_df = aggregate(entities_df,
                                  src_dataset_arr.shape,
                                  params=params)
    print(refined_entity_df)

    result_list = []
    for i in range(len(refined_entity_df)):
        result_list.append([
            refined_entity_df.iloc[i]["class_code"],
            refined_entity_df.iloc[i]["z"],
            refined_entity_df.iloc[i]["y"],
            refined_entity_df.iloc[i]["x"],
        ])

    return result_list
コード例 #25
0
 def update_annotations(msg):
     logger.debug(f"update_annotation {msg}")
     
     if cfg.local_sv:
         update_annotation_layer_in_viewer(msg["level_id"], cfg.anno_data)
     else:
         src = DataModel.g.dataset_uri(msg["level_id"], group="annotations")
         with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
             src_annotations_dataset = DM.sources[0][:]
             src_arr = get_array_from_dataset(src_annotations_dataset)
         update_annotation_layer_in_viewer(msg["level_id"], src_arr)
コード例 #26
0
def watershed(src: DataURI, anno_id: DataURI, dst: DataURI):
    from ..server.filtering import watershed

    # get marker anno
    anno_uri = DataModel.g.dataset_uri(anno_id, group="annotations")
    with DatasetManager(anno_uri, out=None, dtype="uint16", fillvalue=0) as DM:
        src_dataset = DM.sources[0]
        anno_level = src_dataset[:] & 15
        logger.debug(f"Obtained annotation level with labels {np.unique(anno_level)}")

    logger.debug(f"Calculating watershed")
    with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
        src_dataset_arr = DM.sources[0][:]

    filtered = watershed(src_dataset_arr, anno_level)

    dst = DataModel.g.dataset_uri(dst, group="pipelines")

    with DatasetManager(dst, out=dst, dtype="uint32", fillvalue=0) as DM:
        DM.out[:] = filtered
コード例 #27
0
def view_dataset(dataset_name, group, z=None):
    src = DataModel.g.dataset_uri(dataset_name, group=group)

    with DatasetManager(src, out=None, dtype="float32") as DM:
        src_dataset = DM.sources[0]
        src_arr = src_dataset[:]
    if z:
        plt.figure()
        plt.imshow(src_arr[z, :])

    return src_arr
コード例 #28
0
ファイル: objects.py プロジェクト: DiamondLightSource/SuRVoS2
    def make_entity_mask(self):
        src = DataModel.g.dataset_uri(self.feature_source.value(),
                                      group="features")
        with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
            src_array = DM.sources[0][:]

        entity_arr = np.array(self.entities_df)

        bvol_dim = self.entity_mask_bvol_size.value()
        entity_arr[:, 0] -= bvol_dim[0]
        entity_arr[:, 1] -= bvol_dim[1]
        entity_arr[:, 2] -= bvol_dim[2]

        from survos2.entity.entities import make_entity_mask

        gold_mask = make_entity_mask(src_array,
                                     entity_arr,
                                     flipxy=True,
                                     bvol_dim=bvol_dim)[0]

        # create new raw feature
        params = dict(feature_type="raw", workspace=True)
        result = Launcher.g.run("features", "create", **params)

        if result:
            fid = result["id"]
            ftype = result["kind"]
            fname = result["name"]
            logger.debug(
                f"Created new object in workspace {fid}, {ftype}, {fname}")

            dst = DataModel.g.dataset_uri(fid, group="features")
            with DatasetManager(dst, out=dst, dtype="float32",
                                fillvalue=0) as DM:
                DM.out[:] = gold_mask

            cfg.ppw.clientEvent.emit({
                "source": "objects_plugin",
                "data": "refresh",
                "value": None
            })
コード例 #29
0
    def test_feature_generation(self, datamodel):
        DataModel = datamodel

        src = DataModel.g.dataset_uri("__data__", None)
        with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
            src_dataset = DM.sources[0]
            raw_arr = src_dataset[:]

        random_blobs = binary_blobs(length=max(raw_arr.shape), n_dim=3)
        random_blobs_anno = np.zeros_like(raw_arr)
        random_blobs_anno[
            0 : raw_arr.shape[0], 0 : raw_arr.shape[1], 0 : raw_arr.shape[2]
        ] = random_blobs[
            0 : raw_arr.shape[0], 0 : raw_arr.shape[1], 0 : raw_arr.shape[2]
        ]
        result = survos.run_command(
            "annotations",
            "add_level",
            uri=None,
            workspace=DataModel.g.current_workspace,
        )
        assert "id" in result[0]

        level_id = result[0]["id"]
        label_values = np.unique(random_blobs_anno)

        for v in label_values:
            params = dict(
                level=level_id,
                idx=int(v),
                name=str(v),
                color="#11FF11",
                workspace=DataModel.g.current_workspace,
            )
        label_result = survos.run_command("annotations", "add_label", **params)

        dst = DataModel.g.dataset_uri(level_id, group="annotations")
        with DatasetManager(dst, out=dst, dtype="uint32", fillvalue=0) as DM:
            DM.out[:] = random_blobs_anno
コード例 #30
0
def _transfer_features(selected_layer):
    logger.debug("Transferring Image layer to Features.")
    params = dict(feature_type="raw", workspace=True)
    result = Launcher.g.run("features", "create", **params)
    fid = result["id"]
    ftype = result["kind"]
    fname = result["name"]
    logger.debug(f"Created new object in workspace {fid}, {ftype}, {fname}")
    result = DataModel.g.dataset_uri(fid, group="features")
    with DatasetManager(result, out=result, dtype="float32",
                        fillvalue=0) as DM:
        DM.out[:] = selected_layer.data
    return result