def object_stats( src: DataURI, dst: DataURI, object_id: DataURI, feature_ids: DataURIList, stat_name: String, ) -> "OBJECTS": logger.debug(f"Calculating stats on objects: {object_id}") logger.debug(f"With features: {feature_ids}") src = DataModel.g.dataset_uri(ntpath.basename(feature_ids[0]), group="features") logger.debug(f"Getting features {src}") with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM: ds_feature = DM.sources[0][:] # logger.debug(f"summary_stats {src_dataset[:]}") src = DataModel.g.dataset_uri(ntpath.basename(object_id), group="objects") logger.debug(f"Getting objects {src}") with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM: ds_objects = DM.sources[0] scale = ds_objects.get_metadata("scale") print(f"Scaling objects by: {scale}") entities_fullname = ds_objects.get_metadata("fullname") tabledata, entities_df = setup_entity_table(entities_fullname) sel_start, sel_end = 0, len(entities_df) logger.info( f"Viewing entities {entities_fullname} from {sel_start} to {sel_end}") centers = np.array([[ np.int32(np.float32(entities_df.iloc[i]["z"]) * scale), np.int32(np.float32(entities_df.iloc[i]["x"]) * scale), np.int32(np.float32(entities_df.iloc[i]["y"]) * scale), ] for i in range(sel_start, sel_end)]) box_size = 4 print(f"Calculating statistic {stat_name} with box size of {box_size}") if stat_name == "0": stat_op = np.mean title = "Mean" elif stat_name == "1": stat_op = np.std title = "Standard Deviation" elif stat_name == "2": stat_op = np.var title = "Variance" point_features = [ stat_op(ds_feature[c[0] - box_size:c[0] + box_size, c[1] - box_size:c[1] + box_size, c[2] - box_size:c[2] + box_size, ]) for c in centers ] plot_image = plot_to_image(point_features, title=title) return (point_features, encode_numpy(plot_image))
def get_entities(src: DataURI): with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM: ds_objects = DM.sources[0] logger.debug(f"Using dataset {ds_objects}") objects_fullname = ds_objects.get_metadata("fullname") objects_scale = ds_objects.get_metadata("scale") objects_offset = ds_objects.get_metadata("offset") objects_crop_start = ds_objects.get_metadata("crop_start") objects_crop_end = ds_objects.get_metadata("crop_end") logger.info(f"Setting up entities {objects_fullname}") tabledata, entities_df = setup_entity_table(objects_fullname, entities_df=None, scale=objects_scale, offset=objects_offset, crop_start=objects_crop_start, crop_end=objects_crop_end, flipxy=False) return encode_numpy(np.array(entities_df))
def render_workspace(request, slice_idx: Int, workspace: String, max_size: IntListOrNone = None, binning: Int = 1, clim: FloatList = [0, 1], png: SmartBoolean = False, **layers): """""" logger.info("render_workspace") database = request.context["session"] if "workspace_renderer" in database: renderer = database["workspace_renderer"] else: renderer = _Renderer() database["workspace_renderer"] = renderer renderer.clear() for i, (layer, data_tr, cmap_tr, clim_tr) in enumerate(KNOWN_LAYERS): if not layer in layers: continue logger.info("Rendering layer {}: {}".format(layer, slice_idx)) dsname, cmap, alpha, visible = LayerParams(layers.pop(layer)) if dsname == "__data__": data = ws.get_data(workspace) else: data = ws.get_dataset(workspace, dsname) data = data[slice_idx] renderer.data_size = data.shape if data_tr: data = data_tr(data) if cmap and cmap_tr: cmap = cmap_tr(cmap) if clim and clim_tr: if type(clim_tr) in [tuple, list]: clim = clim_tr else: clim = clim_tr(data, clim) if cmap == None: cmap = "gray" renderer.update_layer( layer, dsname, data, cmap=cmap, clim=clim, visible=visible, alpha=alpha, interp="nearest", order=i + 1, ) if "annotations" in layers: n = len(KNOWN_LAYERS) levels, _, alpha, visible = layers.pop("annotations") for i, level in enumerate(levels): level, labels = level data = ws.get_dataset(workspace, level) data = data[slice_idx] & 15 all_labels = get_labels(workspace, level, full=True) cmap = _label_cmap(all_labels, labels) clim = _Renderer.label_clim() renderer.update_layer( level, level, data, cmap=cmap, clim=clim, visible=visible, alpha=alpha, interp="nearest", order=i + n + 1, ) renderer.render_workspace(max_size=max_size, binning=binning) if png: image = renderer.png.copy() else: image = renderer.image.copy() return encode_numpy(image)
def get_crop(src: DataURI, roi: IntList): logger.debug("Getting anno crop") ds = dataset_from_uri(src, mode="r") data = ds[roi[0] : roi[1], roi[2] : roi[3], roi[4] : roi[5]] return encode_numpy(data)
def get_slice(src: DataURI, slice_idx: Int, order: tuple): ds = dataset_from_uri(src, mode="r")[:] ds = np.transpose(ds, order) data = ds[slice_idx] return encode_numpy(data)
def get_volume(src: DataURI): logger.debug("Getting annotation volume") ds = dataset_from_uri(src, mode="r") data = ds[:] return encode_numpy(data)
def get_slice(src:DataURI, slice_idx:Int): ds = dataset_from_uri(src, mode='r') data = ds[slice_idx] return encode_numpy(data)