Ejemplo n.º 1
0
    def wrapper(src, dst, *args, **kwargs):
        logger.debug(f"save_metadata wrapper: src {src}, dst {dst}")
        from pprint import pprint

        pprint(args)
        pprint(kwargs)
        result = func(src, dst, *args, **kwargs)
        ds = dataset_from_uri(dst, mode="r+")
        if ds.supports_metadata():
            for param in ["kind", "name"]:
                if not ds.has_attr(param):
                    print(f"Setting param {param} {fname}")
                    ds.set_attr(param, fname)
            for k, v in kwargs.items():
                print(f"Setting key value {k}, {v}")
                ds.set_attr(k, v)
            if type(src) == list:
                src_id = [dataset_from_uri(s, mode="r").id for s in src]
            else:
                src_id = dataset_from_uri(src, mode="r").id
            ds.set_attr("source", src_id)
        else:
            logger.debug("Dataset doesn't support metadata.")
        result = dataset_repr(ds)
        ds.close()
        logger.info("+ Computed: {}".format(fname))
        return result
Ejemplo n.º 2
0
def annotate_regions(
    workspace: String,
    level: String,
    region: DataURI,
    r: IntList,
    label: Int,
    full: SmartBoolean,
    parent_level: String,
    parent_label_idx: Int,
    bb: IntList,
    viewer_order: tuple,
):
    from survos2.api.annotate import annotate_regions

    ds = get_level(workspace, level, full)
    region = dataset_from_uri(region, mode="r")

    from survos2.frontend.frontend import get_level_from_server

    if parent_level != "-1" and parent_level != -1:
        parent_arr, parent_annotations_dataset = get_level_from_server(
            {"level_id": parent_level}, retrieval_mode="volume"
        )
        parent_arr = parent_arr & 15
        # print(f"Using parent dataset for masking {parent_annotations_dataset}")
        parent_mask = parent_arr == parent_label_idx
    else:
        # print("Not masking using parent level")
        parent_arr = None
        parent_mask = None

    logger.debug(f"BB in annotate_regions {bb}")
    anno = annotate_regions(
        ds,
        region,
        r=r,
        label=label,
        parent_mask=parent_mask,
        bb=bb,
        viewer_order=viewer_order,
    )

    def pass_through(x):
        return x

    dst = DataModel.g.dataset_uri(level, group="annotations")
    map_blocks(pass_through, anno, out=dst, normalize=False)
    modified_ds = dataset_from_uri(dst, mode="r")
    modified = [1]
    modified_ds.set_attr("modified", modified)
Ejemplo n.º 3
0
def upload(body, request, response):
    print(f"Request: {request}")
    print(f"Response: {response}")
    
    encoded_array = body['file']
    array_shape = body['shape']
    anno_id = body['name']
    print(f"shape {array_shape} name {anno_id}")
    
    level_arr = np.frombuffer(encoded_array, dtype="uint32")
    
    print(f"level_arr: {level_arr.shape}")
    from ast import literal_eval 
    level_arr.shape = literal_eval(array_shape)
    print(f"Uploaded feature of shape {level_arr.shape}")

    dst = DataModel.g.dataset_uri(anno_id, group="annotations")

    with DatasetManager(dst, out=dst, dtype="uint32", fillvalue=0) as DM:
        DM.out[:] = level_arr


    modified_ds = dataset_from_uri(dst, mode="r")    
    modified = [1]
    modified_ds.set_attr("modified", modified)
Ejemplo n.º 4
0
def prepare_features(features, roi_crop, resample_amt):
    """Calculate filters on image volume to generate features for survos segmentation

    Arguments:
        features {list of string} -- list of feature uri
        roi_crop {tuple of int} -- tuple defining a bounding box for cropping the image volume
        resample_amt {float} -- amount to scale the input volume

    Returns:
        features -- dataclass containing the processed image layers, and a stack made from them
    """
    # features_stack = []
    filtered_layers = []

    for i, feature in enumerate(features):

        logger.info(f"Loading feature number {i}: {os.path.basename(feature)}")

        data = dataset_from_uri(feature, mode="r")
        data = data[roi_crop[0]:roi_crop[1], roi_crop[2]:roi_crop[3],
                    roi_crop[4]:roi_crop[5], ]
        data = scipy.ndimage.zoom(data, resample_amt, order=1)

        logger.info(f"Cropped and resampled feature shape: {data.shape}")
        filtered_layers.append(data)

        # features_stack.append(data[...].ravel())

    # features_stack = np.stack(features_stack, axis=1)

    dataset_feats, features_stack = prepare_prediction_features(
        filtered_layers)
    features = SRFeatures(filtered_layers, dataset_feats, features_stack)

    return features
Ejemplo n.º 5
0
def test_data(data_func):
    dataset = dataset_from_uri(data_func())
    data = asnparray(dataset)
    assert data.shape == data_func.__shape__
    assert data.dtype == np.float32
    assert data.max() == 1.0
    assert data.min() == 0.0
    dataset.close()
Ejemplo n.º 6
0
 def wrapper(src, dst, *args, **kwargs):
     result = func(src, dst, *args, **kwargs)
     ds = dataset_from_uri(dst, mode='r+')
     if ds.supports_metadata():
         for param in ['kind', 'name']:
             if not ds.has_attr(param):
                 ds.set_attr(param, fname)
         for k, v in kwargs.items():
             ds.set_attr(k, v)
         if type(src) == list:
             src_id = [dataset_from_uri(s, mode='r').id for s in src]
         else:
             src_id = dataset_from_uri(src, mode='r').id
         ds.set_attr('source', src_id)
     result = dataset_repr(ds)
     ds.close()
     logger.info('+ Computed: {}'.format(fname))
     return result
Ejemplo n.º 7
0
def load_metadata(source: "Dataset URI to load in HDF5, MRC or SuRVoS format"):
    from survos2.io import dataset_from_uri, supports_metadata

    if supports_metadata(source):
        source = dataset_from_uri(source)
        logger.info(format_yaml(source.metadata()))
        source.close()
    else:
        logger.info("Dataset `{}` has no metadata.".format(source))
Ejemplo n.º 8
0
def annotate_regions(workspace: String,
                     level: String,
                     region: DataURI,
                     r: IntList,
                     label: Int,
                     full: SmartBoolean = False):
    from survos2.api.annotations.annotate import annotate_regions as annotate
    ds = get_level(workspace, level, full)
    region = dataset_from_uri(region, mode='r')
    annotate(ds, region, r=r, label=label)
Ejemplo n.º 9
0
def add_data(workspace: String, dataset: String):
    import dask.array as da
    from survos2.improc.utils import optimal_chunksize
    ws = get(workspace)
    with dataset_from_uri(dataset, mode='r') as data:
        chunk_size = optimal_chunksize(data, Config['computing.chunk_size'])
        data = da.from_array(data, chunks=chunk_size)
        data -= da.min(data)
        data /= da.max(data)
        ds = ws.add_data(data)
    logger.info(type(ds))
    return ds
Ejemplo n.º 10
0
def view_data(
    source: "Dataset URI to load in HDF5, MRC or SuRVoS format",
    boundaries: "Boundaries to show on top of the `source`" = None,
    overlay: "Overlay dataset to show on top of the `source`" = None,
    bcolor: "Color of the overlaid boundaries" = "#000099",
    balpha: "Alpha of the overlaid boundaries" = 0.7,
    oalpha: "Overlay alpha." = 0.5,
):
    """
    Visualizes a 3D volume with sliders.
    Allows to overlay a segmentation / other image and to overlay
    boundaries, from either supervoxels or other boundary extraction
    method.
    """
    from survos2.io import dataset_from_uri
    from survos2.volume_utils import view

    logger.info(f"Loading source volume {source}")
    source = dataset_from_uri(source)
    if boundaries:
        logger.info("Loading boundaries")
        boundaries = dataset_from_uri(boundaries)
    if overlay:
        logger.info("Loading overlay")
        overlay = dataset_from_uri(overlay)

    view(
        source,
        boundaries=boundaries,
        overlay=overlay,
        bcolor=bcolor,
        balpha=balpha,
        oalpha=oalpha,
    )

    source.close()
    if boundaries:
        boundaries.close()
    if overlay:
        overlay.close()
Ejemplo n.º 11
0
def pull_anno(roi_fname: String):
    roi_ws = ws.get(roi_fname)
    ds = ws.get_dataset(roi_fname, '001_level', group="annotations")
    roi_parts = roi_fname.split("_")
    z_min = int(roi_parts[-6])
    z_max = int(roi_parts[-5])
    x_min = int(roi_parts[-4])
    x_max = int(roi_parts[-3])
    y_min = int(roi_parts[-2])
    y_max = int(roi_parts[-1])

    dst = DataModel.g.dataset_uri('001_level', group="annotations")
    main_anno = dataset_from_uri(dst, mode="rw")
    main_anno[z_min:z_max, x_min:x_max, y_min:y_max] = ds[:]
Ejemplo n.º 12
0
def annotate_voxels(
    workspace: String,
    level: String,
    slice_idx: Int,
    yy: IntList,
    xx: IntList,
    label: Int,
    full: SmartBoolean,
    parent_level: String,
    parent_label_idx: Int,
    viewer_order: tuple,
    three_dim: SmartBoolean,
    brush_size: Int,
    centre_point: tuple
):
    from survos2.api.annotate import annotate_voxels

    ds = get_level(workspace, level, full)

    from survos2.frontend.frontend import get_level_from_server

    if parent_level != "-1" and parent_level != -1:
        parent_arr, parent_annotations_dataset = get_level_from_server(
            {"level_id": parent_level}, retrieval_mode="volume"
        )
        parent_arr = parent_arr & 15
        parent_mask = parent_arr == parent_label_idx
    else:
        parent_arr = None
        parent_mask = None

    logger.info(f"slice_idx {slice_idx} Viewer order: {viewer_order}")
    annotate_voxels(
        ds,
        slice_idx=slice_idx,
        yy=yy,
        xx=xx,
        label=label,
        parent_mask=parent_mask,
        viewer_order=viewer_order,
        three_dim = three_dim,
        brush_size=brush_size,
        centre_point=centre_point
    )

    dst = DataModel.g.dataset_uri(level, group="annotations")
    modified_ds = dataset_from_uri(dst, mode="r")
    modified = [1]
    modified_ds.set_attr("modified", modified)
Ejemplo n.º 13
0
def prepare_supervoxels(
    supervoxels: List[str],
    filtered_stack: np.ndarray,
    roi_crop: np.ndarray,
    resample_amt: float,
) -> SRData:
    """Load supervoxels from file, then generate supervoxel features from a
    features stack and the supervoxel rag,then bundle as SRData and return.

    Args:
        supervoxels (List[str]): list of supervoxels
        filtered_stack (np.ndarray): stack of filters
        roi_crop (np.ndarray): roi to crop to
        resample_amt (float): zoom level

    Returns:
        [SRData]: superregions dataclass object
    """

    logger.debug(f"Loading supervoxel file {supervoxels[0]}")
    logger.debug(f"Roi crop {roi_crop}")

    supervoxel_vol = dataset_from_uri(supervoxels[0], mode="r")
    supervoxel_vol = np.array(supervoxel_vol).astype(np.uint32, copy=False)
    supervoxel_vol = np.nan_to_num(supervoxel_vol)

    supervoxel_proc = supervoxel_vol[roi_crop[0]:roi_crop[1],
                                     roi_crop[2]:roi_crop[3],
                                     roi_crop[4]:roi_crop[5]].astype(
                                         np.uint32, copy=False)

    supervoxel_proc = scipy.ndimage.zoom(supervoxel_proc,
                                         resample_amt,
                                         order=1)

    logger.debug(
        f"Loading Supervoxel {os.path.basename(supervoxels[0])} with shape {supervoxel_proc.shape}"
    )

    supervoxel_features = rmeans(filtered_stack, supervoxel_proc)
    supervoxel_rag = create_rag(np.array(supervoxel_proc).astype(np.uint32),
                                connectivity=6)

    supervoxel_features = []
    supervoxel_rag = []

    superregions = SRData(supervoxel_proc, supervoxel_features, supervoxel_rag)

    return superregions
Ejemplo n.º 14
0
def get_entities_metadata(src: DataURI):
    ds = dataset_from_uri(src, mode="r")[:]

    with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
        ds_objects = DM.sources[0]
        logger.debug(f"Using dataset {ds_objects}")

    entities_metadata = {
        'fullname': ds_objects.get_metadata("fullname"),
        'scale': ds_objects.get_metadata("scale"),
        'offset': ds_objects.get_metadata("offset"),
        'crop_start': ds_objects.get_metadata("crop_start"),
        'crop_end': ds_objects.get_metadata("crop_end")
    }

    return entities_metadata
Ejemplo n.º 15
0
def add_data(workspace: String, data_fname: String):
    import dask.array as da

    from survos2.improc.utils import optimal_chunksize

    ws = get(workspace)
    logger.info(f"Adding data to workspace {ws}")

    with dataset_from_uri(data_fname, mode="r") as data:

        chunk_size = optimal_chunksize(data, Config["computing.chunk_size"])
        logger.debug(
            f'Calculating optimal chunk size using chunk_size {Config["computing.chunk_size"]}: {chunk_size}'
        )

        data = da.from_array(data, chunks=chunk_size)
        data -= da.min(data)
        data /= da.max(data)
        ds = ws.add_data(data)
        # ds.set_attr("chunk_size", chunk_size)
    return ds
Ejemplo n.º 16
0
def test_workspace():
    ws = Workspace(".")
    workspace_fpath = "./newws1"
    ws = ws.create(workspace_fpath)
    data_fname = "./tmp/testvol_4x4x4b.h5"

    with dataset_from_uri(data_fname, mode="r") as data:
        chunk_size = optimal_chunksize(data, Config["computing.chunk_size"])
        data = da.from_array(data, chunks=chunk_size)
        data -= da.min(data)
        data /= da.max(data)
        ds = ws.add_data(data)
        # ds.set_attr("chunk_size", chunk_size)

    ws.add_dataset("testds", "float32")
    assert ws.exists(workspace_fpath)
    assert ws.has_data()
    assert ws.available_datasets() == ['testds']
    ws.add_session('newsesh')
    assert ws.has_session('newsesh')

    ws.delete()
Ejemplo n.º 17
0
def get_slice(src:DataURI, slice_idx:Int):
    ds = dataset_from_uri(src, mode='r')
    data = ds[slice_idx]
    return encode_numpy(data)
Ejemplo n.º 18
0
def get_crop(src: DataURI, roi: IntList):
    logger.debug("Getting anno crop")
    ds = dataset_from_uri(src, mode="r")
    data = ds[roi[0] : roi[1], roi[2] : roi[3], roi[4] : roi[5]]
    return encode_numpy(data)
Ejemplo n.º 19
0
def get_slice(src: DataURI, slice_idx: Int, order: tuple):
    ds = dataset_from_uri(src, mode="r")[:]
    ds = np.transpose(ds, order)
    data = ds[slice_idx]
    return encode_numpy(data)
Ejemplo n.º 20
0
def get_volume(src: DataURI):
    logger.debug("Getting annotation volume")
    ds = dataset_from_uri(src, mode="r")
    data = ds[:]
    return encode_numpy(data)
Ejemplo n.º 21
0
def get_slice(src: DataURI, slice_idx: Int, order: tuple):
    order = np.array(order)
    ds = dataset_from_uri(src, mode="r")[:]
    ds = np.transpose(ds, order).astype(np.float32)
    data = ds[slice_idx]
    return encode_numpy_slice(data.astype(np.float32))