Exemple #1
0
def norm_minmse(gt, x, normalize_gt=True):
    """
    normalizes and affinely scales an image pair such that the MSE is minimized

    Parameters
    ----------
    gt: ndarray
        the ground truth image
    x: ndarray
        the image that will be affinely scaled
    normalize_gt: bool
        set to True of gt image should be normalized (default)

    Returns
    -------
    gt_scaled, x_scaled

    """
    if normalize_gt:
        gt = normalize(gt, 0.1, 99.9, clip=False).astype(np.float32,
                                                         copy=False)
    x = x.astype(np.float32, copy=False) - np.mean(x)
    gt = gt.astype(np.float32, copy=False) - np.mean(gt)
    scale = np.cov(x.flatten(), gt.flatten())[0, 1] / np.var(x.flatten())
    return gt, scale * x
Exemple #2
0
def render_label_pred_example():
    model_path = path_model2d()
    model = StarDist2D(None,
                       name=model_path.name,
                       basedir=str(model_path.parent))
    img, y_gt = real_image2d()
    x = normalize(img, 1, 99.8)
    y, _ = model.predict_instances(x)

    im = render_label_pred(y_gt, y, img=x)
    import matplotlib.pyplot as plt
    plt.figure(1, figsize=(12, 4))
    plt.subplot(1, 4, 1)
    plt.imshow(x)
    plt.title("img")
    plt.subplot(1, 4, 2)
    plt.imshow(render_label(y_gt, img=x))
    plt.title("gt")
    plt.subplot(1, 4, 3)
    plt.imshow(render_label(y, img=x))
    plt.title("pred")
    plt.subplot(1, 4, 4)
    plt.imshow(im)
    plt.title("tp (green) fp (red) fn(blue)")
    plt.tight_layout()
    plt.show()
    return im
Exemple #3
0
def test_speed(model2d):
    from time import time

    model = model2d
    img, mask = real_image2d()
    x = normalize(img, 1, 99.8)
    x = np.tile(x, (6, 6))
    print(x.shape)

    stats = []

    for mode, n_tiles, sparse in product(("normal", "big"), (None, (2, 2)),
                                         (True, False)):
        t = time()
        if mode == "normal":
            labels, res = model.predict_instances(x,
                                                  n_tiles=n_tiles,
                                                  sparse=sparse)
        else:
            labels, res = model.predict_instances_big(x,
                                                      axes="YX",
                                                      block_size=1024 + 256,
                                                      context=64,
                                                      min_overlap=64,
                                                      n_tiles=n_tiles,
                                                      sparse=sparse)

        t = time() - t
        s = f"mode={mode}\ttiles={n_tiles}\tsparse={sparse}\t{t:.2f}s"
        print(s)
        stats.append(s)

    for s in stats:
        print(s)
Exemple #4
0
def test_predict3D(model3d):
    model = model3d
    img = real_image3d()[0]
    img = normalize(img, 1, 99.8)
    img = repeat(img, 2)

    ref_labels, ref_polys = model.predict_instances(img)
    res_labels, res_polys = model.predict_instances_big(img,
                                                        axes='ZYX',
                                                        block_size=(55, 105,
                                                                    105),
                                                        min_overlap=(13, 25,
                                                                     25),
                                                        context=(17, 30, 30))

    m = matching(ref_labels, res_labels)
    assert (1.0, 1.0) == (m.accuracy, m.mean_true_score)

    # sort them first lexicographic
    ref_inds = np.lexsort(ref_polys["points"].T)
    res_inds = np.lexsort(res_polys["points"].T)

    assert np.allclose(ref_polys["dist"][ref_inds],
                       res_polys["dist"][res_inds],
                       atol=1e-2)
    assert np.allclose(ref_polys["points"][ref_inds],
                       res_polys["points"][res_inds],
                       atol=1e-2)
    assert np.allclose(ref_polys["prob"][ref_inds],
                       res_polys["prob"][res_inds],
                       atol=1e-2)

    return ref_polys, res_polys
Exemple #5
0
def test_predict2D(model2d, use_channel):
    model = model2d
    img = real_image2d()[0]
    img = normalize(img, 1, 99.8)
    img = repeat(img, 2)
    axes = 'YX'

    if use_channel:
        img = img[...,np.newaxis]
        axes += 'C'

    ref_labels, ref_polys = model.predict_instances(img, axes=axes)
    res_labels, res_polys = model.predict_instances_big(img, axes=axes, block_size=288, min_overlap=32, context=96)

    m = matching(ref_labels, res_labels)
    assert (1.0, 1.0) == (m.accuracy, m.mean_true_score)

    m = matching(render_polygons(ref_polys, shape=img.shape),
                 render_polygons(res_polys, shape=img.shape))
    assert (1.0, 1.0) == (m.accuracy, m.mean_true_score)

    # sort them first lexicographic
    ref_inds = np.lexsort(ref_polys["points"].T)
    res_inds = np.lexsort(res_polys["points"].T)

    assert np.allclose(ref_polys["coord"][ref_inds],
                       res_polys["coord"][res_inds],atol=1e-2)
    assert np.allclose(ref_polys["points"][ref_inds],
                       res_polys["points"][res_inds],atol=1e-2)
    assert np.allclose(ref_polys["prob"][ref_inds],
                       res_polys["prob"][res_inds],atol=1e-2)

    return ref_polys, res_polys
    def Optimize(self):
   
        
            
                     
                
        self.Y = [label(y) for y in tqdm(self.Y)]
        self.X = [normalize(x,1,99.8,axis= (0,1)) for x in tqdm(self.X)]
        
        print('Images to apply prediction on',len(self.X))     
        Yhat_val = [self.Starmodel.predict(x) for x in self.X]
        
        opt_prob_thresh, opt_measure, opt_nms_thresh = None, -np.inf, None
        for _opt_nms_thresh in self.nms_threshs:
            _opt_prob_thresh, _opt_measure = self.optimize_threshold(self.Y, Yhat_val, model=self.Starmodel, nms_thresh=_opt_nms_thresh)
            if _opt_measure > opt_measure:
                opt_prob_thresh, opt_measure, opt_nms_thresh = _opt_prob_thresh, _opt_measure, _opt_nms_thresh
        opt_threshs = dict(prob=opt_prob_thresh, nms=opt_nms_thresh)

        self.thresholds = opt_threshs
        print("Using optimized values: prob_thresh={prob:g}, nms_thresh={nms:g}.", opt_threshs)
        if self.basedir is not None:
            print("Saving to 'thresholds.json'.")
            save_json(opt_threshs, str(self.basedir +  '/' + 'thresholds.json'))
        return opt_threshs
def norm_minmse(y, x):
    y = normalize(y, 0.1, 99.9)
    x = x - x.mean()
    y = y - y.mean()
    scale = np.cov(x.flatten(), y.flatten())[0, 1] / np.var(x.flatten())
    x = scale * x
    return y, x
def Run_StarDist2D(img, mdls, which_model):
    # runs a Stardist 2D model
    axis_norm = (0, 1)  # normalize channels independently
    im = normalize(img, 1, 99.8, axis=axis_norm)
    model = mdls[which_model]
    labels, details = model.predict_instances(im)
    return labels
Exemple #9
0
def load_training_data(root, image_folder, labels_folder, ext):

    # get the image and label mask paths and validate them
    image_pattern = os.path.join(root, image_folder, f'*{ext}')
    print("Looking for images with the pattern", image_pattern)
    train_images = glob(image_pattern)
    assert len(train_images) > 0, "Did not find any images"
    train_images.sort()

    label_pattern = os.path.join(root, labels_folder, f'*{ext}')
    print("Looking for labels with the pattern", image_pattern)
    train_labels = glob(label_pattern)
    assert len(train_labels) > 0, "Did not find any labels"
    train_labels.sort()

    check_training_data(train_images, train_labels)

    # normalization parameters: lower and upper percentile used for image normalization
    # maybe these should be exposed
    lower_percentile = 1
    upper_percentile = 99.8
    ax_norm = (0, 1, 2)

    train_images = [imageio.volread(im) for im in train_images]
    train_labels = [imageio.volread(im) for im in train_labels]
    check_training_images(train_images, train_labels)
    train_images = [
        normalize(im, lower_percentile, upper_percentile, axis=ax_norm)
        for im in train_images
    ]
    train_labels = [fill_label_holes(im) for im in train_labels]

    return train_images, train_labels
 def calculation_run(
         self, report_fun: Callable[[str, int],
                                    None]) -> SegmentationResult:
     self.check_limitations()
     denoised = noise_filtering_dict[
         self.parameters["noise_filtering"]["name"]].noise_filter(
             self.image.get_channel(self.parameters["channel"]),
             self.image.spacing,
             self.parameters["noise_filtering"]["values"],
         )
     model = self.get_model()
     normalized = normalize(
         denoised,
         1,
         99.8,
     )
     segmentation, details = model.predict_instances(
         normalized, axes=self.image.return_order.replace("C", ""))
     return SegmentationResult(
         segmentation,
         self.get_segmentation_profile(),
         additional_layers={
             "description":
             AdditionalLayerDescription(details, layer_type="image"),
             "segmentation":
             AdditionalLayerDescription(segmentation, layer_type="labels")
         })
Exemple #11
0
def test_mesh_export(model3d):
    model = model3d
    img, mask = real_image3d()
    x = normalize(img, 1, 99.8)
    labels, polys = model.predict_instances(x, nms_thresh=.5, overlap_label=-3)

    s = export_to_obj_file3D(polys, "mesh.obj", scale=(.2, .1, .1))
    return s
def predictions(model_dist,i):
    img = normalize(X[i],1,99.8,axis=axis_norm)
    input = torch.tensor(img)
    input = input.unsqueeze(0).unsqueeze(0)#unsqueeze 2 times
    dist,prob = model_dist(input)
    dist_numpy= dist.detach().cpu().numpy().squeeze()
    prob_numpy= prob.detach().cpu().numpy().squeeze()
    return dist_numpy,prob_numpy
def load_images(img_dir, img_ext):
    # Read images, normalize images
    print('Loading input images...')
    img_names = sorted(glob(f'{img_dir}/*.{img_ext}'))
    if len(img_names) == 0:
        print(f'No images were able to be loaded from: {img_dir}.')
        return
    imgs = [normalize(img, 1, 99.8, axis=(0, 1)) for img in tqdm(list(map(io.imread, img_names)))]
    return img_names, imgs
Exemple #14
0
def make_target_image(tiff_image_metadata, labels):
	"""
	Loads the tiff images with the pahts in files_to_load.
	Then, it puts them in a list in the same order as labels and converts it to a [X, Y, C] numpy array.
	Then images are normalized by scaling from 0 to 1
	"""
	filenames = [tiff_image_metadata[label] for label in labels]
	image = numpy.dstack(list(map(tifffile.imread, filenames))) #YXC
	image = normalize(image, 0, 100, axis = (0, 1))
	return numpy.moveaxis(image, 0, 1) #XYC
Exemple #15
0
def get_Yest_list(Y_est,
                  tag='instance_0',
                  ix_fg_list=[0],
                  pnorm=True,
                  tipo=0,
                  pmin=0.01,
                  pmax=99.9):
    Y_est_list = []

    if tipo == 0:

        for i in np.arange(len(Y_est)):

            start = True
            for ix_fg in ix_fg_list:
                if start:
                    Y_est_i = Y_est[i][tag][:, :, :, ix_fg]
                else:
                    Y_est_i += Y_est[i][tag][:, :, :, ix_fg]
                start = False

            if (Y_est[i][tag].shape[0] == 1):
                Y_est_i = Y_est[i][tag][0, ...]

            if pnorm:
                Y_est_i = normalize(Y_est_i, pmin=pmin, pmax=pmax, clip=True)
            Y_est_list.append(Y_est_i)
    else:
        Y_est_aux = Y_est[tag]
        for i in range(Y_est_aux.shape[0]):
            start = True
            for ix_fg in ix_fg_list:
                if start:
                    Y_est_i = Y_est_aux[i, :, :, ix_fg]
                else:
                    Y_est_i += Y_est_aux[i, :, :, ix_fg]
                start = False

            if pnorm:
                Y_est_i = normalize(Y_est_i, pmin=pmin, pmax=pmax, clip=True)
            Y_est_list.append(Y_est_i)

    return Y_est_list
Exemple #16
0
def display_img(filename, norm_min_percent, norm_max_percent):
    img = tiff.imread(filename)
    img = img.astype(np.float32)
    t, x, y = img.shape[0:3]
    img = img[int(t / 2), :, :]

    img = normalize(img, norm_min_percent, norm_max_percent)
    img = img * 255

    return img
Exemple #17
0
def test_load_and_predict_big():
    model_path = path_model2d()
    model = StarDist2D(None,
                       name=model_path.name,
                       basedir=str(model_path.parent))
    img, _ = real_image2d()
    x = normalize(img, 1, 99.8)
    x = np.tile(x, (8, 8))
    labels, polygons = model.predict_instances(x)
    return labels
Exemple #18
0
def test_load_and_predict_with_overlap(model3d):
    model = model3d
    img, mask = real_image3d()
    x = normalize(img, 1, 99.8)
    prob, dist = model.predict(x, n_tiles=(1, 2, 2))
    assert prob.shape == dist.shape[:3]
    assert model.config.n_rays == dist.shape[-1]
    labels, _ = model.predict_instances(x, nms_thresh=.5, overlap_label=-3)
    assert np.min(labels) == -3
    return model, labels
Exemple #19
0
def segment_nuclei_stardist(image2d,
                            sdmodel,
                            prob_thresh=0.5,
                            overlap_thresh=0.3,
                            norm=True,
                            norm_pmin=1.0,
                            norm_pmax=99.8,
                            norm_clip=False):
    """[summary]

    :param image2d: 2d image to be segmented
    :type image2d: NumPy.Array
    :param sdmodel: stardit 2d model
    :type sdmodel: StarDist Model
    :param prob_thresh: probability threshold, defaults to 0.5
    :type prob_thresh: float, optional
    :param overlap_thresh: overlap threshold, defaults to 0.3
    :type overlap_thresh: float, optional
    :param norm: switch on image normalization, defaults to True
    :type norm: bool, optional
    :param norm_pmin: minimum percentile for normalization, defaults to 1.0
    :type norm_pmin: float, optional
    :param norm_pmax: maximum percentile for normalization, defaults to 99.8
    :type norm_pmax: float, optional
    :param norm_clip: clipping normalization, defaults to False
    :type norm_clip: bool, optional
    :return: mask - binary mask
    :rtype: NumPy.Array
    """

    # workaround explained here to avoid errors
    # https://github.com/openai/spinningup/issues/16
    # os.environ['KMP_DUPLICATE_LIB_OK'] = 'True'

    # normalize image
    image2d_norm = normalize(image2d,
                             pmin=norm_pmin,
                             pmax=norm_pmax,
                             axis=None,
                             clip=norm_clip,
                             eps=1e-20,
                             dtype=np.float32)

    # predict the instances of th single nuclei
    mask2d, details = sdmodel.predict_instances(image2d_norm,
                                                axes=None,
                                                normalizer=None,
                                                prob_thresh=0.4,
                                                nms_thresh=0.3,
                                                n_tiles=None,
                                                show_tile_progress=True,
                                                overlap_label=None,
                                                verbose=False)

    return mask2d
def show_surface():
    model = _model3d()
    img, mask = test_image_nuclei_3d(return_mask=True)
    x = normalize(img, 1, 99.8)
    labels, polys = model.predict_instances(x)
    surface = surface_from_polys(polys)
    # add the surface
    viewer = napari.view_image(img)
    viewer.add_surface(surface)

    return viewer
Exemple #21
0
def test_load_and_predict_with_overlap():
    model_path = path_model3d()
    model = StarDist3D(None, name=model_path.name, basedir=str(model_path.parent))
    img, mask = real_image3d()
    x = normalize(img,1,99.8)
    prob, dist = model.predict(x, n_tiles=(1,2,2))
    assert prob.shape == dist.shape[:3]
    assert model.config.n_rays == dist.shape[-1]
    labels, _ = model.predict_instances(x, nms_thresh = .5,
                                        overlap_label = -3)
    assert np.min(labels) == -3
    return model, labels
Exemple #22
0
def render_label_example(model2d):
    model = model2d
    img, y_gt = real_image2d()
    x = normalize(img, 1, 99.8)
    y, _ = model.predict_instances(x)
    # im =  render_label(y,img = x, alpha = 0.3, alpha_boundary=1, cmap = (.3,.4,0))
    im =  render_label(y,img = x, alpha = 0.3, alpha_boundary=1)
    import matplotlib.pyplot as plt
    plt.figure(1)
    plt.imshow(im)
    plt.show()
    return im
Exemple #23
0
    def _predict_stardist(self, model, file, T, channel, prob_thresh,
                          nms_thresh, out_dir):

        axes = 'TCYX'
        # if T.ndim==3:
        #     timelapse = T
        if T.ndim == 4:
            timelapse = T[:, channel]
        else:
            raise ValueError(
                'Data has unexpected number of dimensions. Weird.')

        # normalise
        print(f'Normalizing each frame to run Stardist', flush=True)
        timelapse = np.stack(
            [normalize(frame, 1, 99.8) for frame in timelapse])
        print(
            f"Timelapse has axes {axes.replace('C','')} with shape {timelapse.shape}"
        )

        polygons = [
            model.predict_instances(frame,
                                    nms_thresh=nms_thresh,
                                    prob_thresh=prob_thresh)[1]
            for frame in tqdm(timelapse)
        ]

        if prob_thresh is None:
            prob_string = 'default'
        else:
            prob_string = f'{prob_thresh:.2f}'

        if nms_thresh is None:
            nms_string = 'default'
        else:
            nms_string = f'{nms_thresh:.2f}'

        roi_path = out_dir / f"{file.stem}_prob={prob_string}_nms={nms_string}"
        roi_path.parent.mkdir(parents=True, exist_ok=True)
        rois_python = Path(str(roi_path) + '.npz')
        rois_imagej = Path(str(roi_path) + '.zip')

        print(f'Saving ImageJ ROIs to {rois_imagej}')
        export_imagej_rois(str(rois_imagej),
                           [poly['coord'] for poly in polygons])

        print(f'Saving Python rois to {rois_python}')
        np.savez(
            str(rois_python),
            coord=[p['coord'] for p in polygons],
            points=[p['points'] for p in polygons],
            prob=[p['prob'] for p in polygons],
        )
Exemple #24
0
def test_polygon_order_2D(model2d):
    model = model2d
    img = real_image2d()[0]
    img = normalize(img, 1, 99.8)
    labels, polys = model.predict_instances(img, nms_thresh=0)

    for i, coord in enumerate(polys['coord'], start=1):
        # polygon representing object with id i
        p = Polygon(coord, shape_max=labels.shape)
        # mask of object with id i in label image (not occluded since nms_thresh=0)
        mask_i = labels[p.slice] == i
        assert np.all(p.mask == mask_i)
Exemple #25
0
def STARPrediction3D(image,
                     model,
                     n_tiles,
                     MaskImage=None,
                     smartcorrection=None,
                     UseProbability=True):

    copymodel = model
    image = normalize(image, 1, 99.8, axis=(0, 1, 2))
    shape = [image.shape[1], image.shape[2]]
    image = zero_pad_time(image, 64, 64)
    grid = copymodel.config.grid

    MidImage, details = model.predict_instances(image, n_tiles=n_tiles)
    SmallProbability, SmallDistance = model.predict(image, n_tiles=n_tiles)

    StarImage = MidImage[:image.shape[0], :shape[0], :shape[1]]
    SmallDistance = MaxProjectDist(SmallDistance, axis=-1)
    Probability = np.zeros([
        SmallProbability.shape[0] * grid[0],
        SmallProbability.shape[1] * grid[1],
        SmallProbability.shape[2] * grid[2]
    ])
    Distance = np.zeros([
        SmallDistance.shape[0] * grid[0], SmallDistance.shape[1] * grid[1],
        SmallDistance.shape[2] * grid[2]
    ])
    #We only allow for the grid parameter to be 1 along the Z axis
    for i in range(0, SmallProbability.shape[0]):
        Probability[i, :] = cv2.resize(
            SmallProbability[i, :],
            dsize=(SmallProbability.shape[2] * grid[2],
                   SmallProbability.shape[1] * grid[1]))
        Distance[i, :] = cv2.resize(SmallDistance[i, :],
                                    dsize=(SmallDistance.shape[2] * grid[2],
                                           SmallDistance.shape[1] * grid[1]))

    if UseProbability:

        Probability[Probability < globalthreshold] = 0

        MaxProjectDistance = Probability[:image.shape[0], :shape[0], :shape[1]]

    else:

        MaxProjectDistance = Distance[:image.shape[0], :shape[0], :shape[1]]

    Watershed, Markers = WatershedwithMask3D(
        MaxProjectDistance.astype('uint16'), StarImage.astype('uint16'),
        MaskImage.astype('uint16'), grid)
    Watershed = fill_label_holes(Watershed.astype('uint16'))

    return Watershed, MaxProjectDistance, StarImage
Exemple #26
0
def test_predict_dense_sparse(model2d):
    model = model2d
    img, mask = real_image2d()
    x = normalize(img, 1, 99.8)
    labels1, res1 = model.predict_instances(x, n_tiles=(2, 2), sparse=False)
    labels2, res2 = model.predict_instances(x, n_tiles=(2, 2), sparse=True)
    assert np.allclose(labels1, labels2)
    assert all(
        np.allclose(res1[k], res2[k])
        for k in set(res1.keys()).union(set(res2.keys()))
        if isinstance(res1[k], np.ndarray))
    return labels2, res1, labels2, res2
Exemple #27
0
        def __getitem__(self, i):

            #Read Raw images
            if self.Normalize == True:
                x = ReadFloat(self.files[i])
                x = normalize(x, 1, 99.8, axis=self.axis_norm)
                x = x
            if self.labelMe == True:
                #Read Label images
                x = ReadInt(self.files[i])
                x = x
            return x
Exemple #28
0
def test_load_and_predict(model3d):
    model = model3d
    img, mask = real_image3d()
    x = normalize(img, 1, 99.8)
    prob, dist = model.predict(x, n_tiles=(1, 2, 2))
    assert prob.shape == dist.shape[:3]
    assert model.config.n_rays == dist.shape[-1]
    labels, _ = model.predict_instances(x)
    assert labels.shape == img.shape[:3]
    stats = matching(mask, labels, thresh=0.5)
    assert (stats.fp, stats.tp, stats.fn) == (0, 30, 21)
    return model, labels
Exemple #29
0
def test_pretrained_integration():
    from stardist.models import StarDist2D
    img = normalize(real_image2d()[0])

    model = StarDist2D.from_pretrained("2D_versatile_fluo")
    prob, dist = model.predict(img)

    y1, res1 = model._instances_from_prediction(img.shape,
                                                prob,
                                                dist,
                                                nms_thresh=.3)
    return y1, res1
Exemple #30
0
def test_optimize_thresholds(model2d):
    model = model2d
    img, mask = real_image2d()
    x = normalize(img, 1, 99.8)

    res = model.optimize_thresholds([x], [mask],
                              nms_threshs=[.3, .5],
                              iou_threshs=[.3, .5],
                              optimize_kwargs=dict(tol=1e-1),
                              save_to_json=False)

    np.testing.assert_almost_equal(res["prob"], 0.454617141955, decimal=3)
    np.testing.assert_almost_equal(res["nms"] , 0.3, decimal=3)