Exemple #1
0
def test_NRMSE_no_int_overflow():
    camf = cam.astype(np.float32)
    cam_noisyf = cam_noisy.astype(np.float32)
    assert_almost_equal(compare_mse(cam, cam_noisy),
                        compare_mse(camf, cam_noisyf))
    assert_almost_equal(compare_nrmse(cam, cam_noisy),
                        compare_nrmse(camf, cam_noisyf))
Exemple #2
0
def get_errors(W, v, proj):
    """
    Calculate the errors between projections and reprojections.
    =============
    --VARIABLES--
    """
    projSize = proj.shape[0] * proj.shape[-1]
    reproj = W.FP(v)

    # Swap axes for easier computation
    proj = np.swapaxes(proj, 0, 1)
    reproj = np.swapaxes(reproj, 0, 1)

    # Calculate error in the entire domain
    spray_range = np.max(
        [proj.max(axis=(1, 2)), reproj.max(axis=(1, 2))], axis=0)
    NRMSE = np.array([compare_nrmse(x, y) for (x, y) in zip(proj, reproj)])
    mass = np.array(
        [compare_nrmse(x.sum(), y.sum()) for (x, y) in zip(proj, reproj)])
    PSNR = np.array([
        compare_psnr(x, y, data_range=sr)
        for (x, y, sr) in zip(proj, reproj, spray_range)
    ])
    SSIM = np.array([
        compare_ssim(x, y, data_range=sr)
        for (x, y, sr) in zip(proj, reproj, spray_range)
    ])

    return {'NRMSE': NRMSE, 'Mass': mass, 'PSNR': PSNR, 'SSIM': SSIM}
def test_NRMSE_errors():
    x = np.ones(4)
    # shape mismatch
    with testing.raises(ValueError):
        compare_nrmse(x[:-1], x)
    # invalid normalization name
    with testing.raises(ValueError):
        compare_nrmse(x, x, 'foo')
def test_NRMSE_errors():
    x = np.ones(4)
    # shape mismatch
    with testing.raises(ValueError):
        compare_nrmse(x[:-1], x)
    # invalid normalization name
    with testing.raises(ValueError):
        compare_nrmse(x, x, 'foo')
def test_NRMSE_no_int_overflow():
    camf = cam.astype(np.float32)
    cam_noisyf = cam_noisy.astype(np.float32)
    with expected_warnings(['DEPRECATED']):
        assert_almost_equal(compare_mse(cam, cam_noisy),
                            compare_mse(camf, cam_noisyf))
        assert_almost_equal(compare_nrmse(cam, cam_noisy),
                            compare_nrmse(camf, cam_noisyf))
def test_NRMSE_errors():
    x = np.ones(4)
    with expected_warnings(['DEPRECATED']):
        # shape mismatch
        with testing.raises(ValueError):
            compare_nrmse(x[:-1], x)
        # invalid normalization name
        with testing.raises(ValueError):
            compare_nrmse(x, x, norm_type='foo')
def test_NRMSE():
    x = np.ones(4)
    y = np.asarray([0., 2., 2., 2.])
    assert_equal(compare_nrmse(y, x, 'mean'), 1/np.mean(y))
    assert_equal(compare_nrmse(y, x, 'Euclidean'), 1/np.sqrt(3))
    assert_equal(compare_nrmse(y, x, 'min-max'), 1/(y.max()-y.min()))

    # mixed precision inputs are allowed
    assert_almost_equal(compare_nrmse(y, np.float32(x), 'min-max'),
                        1 / (y.max() - y.min()))
def test_NRMSE():
    x = np.ones(4)
    y = np.asarray([0., 2., 2., 2.])
    assert_equal(compare_nrmse(y, x, 'mean'), 1/np.mean(y))
    assert_equal(compare_nrmse(y, x, 'Euclidean'), 1/np.sqrt(3))
    assert_equal(compare_nrmse(y, x, 'min-max'), 1/(y.max()-y.min()))

    # mixed precision inputs are allowed
    assert_almost_equal(compare_nrmse(y, np.float32(x), 'min-max'),
                        1 / (y.max() - y.min()))
Exemple #9
0
def get_distance_fn(dist_metric, a, b):
    if dist_metric == 'mse':
        return np.sum((a - b)**2) / float(a.size)
    if dist_metric == 'ssim':
        return compare_ssim(a, b, multichannel=True)
    if dist_metric == 'nrmse_euc':
        return compare_nrmse(a, b, norm_type='Euclidean')
    if dist_metric == 'nrmse_minmax':
        return compare_nrmse(a, b, norm_type='min-max')
    if dist_metric == 'nrmse_mean':
        return compare_nrmse(a, b, norm_type='mean')
    if dist_metric == 'psnr':
        return -1 * compare_psnr(a, b)
Exemple #10
0
def analyze(directory, pfile):
    images = sorted([
        join(directory, f) for f in listdir(directory)
        if isfile(join(directory, f)) and ".png" in f
    ])
    n = len(images)
    data = Data(n)
    image_i = imload(images.pop(0))
    t = 0
    for path in images:
        image_f = imload(path)

        data.ssim[t] = measure.compare_ssim(image_f,
                                            image_i,
                                            multichannel=True)
        data.nrmse[t] = measure.compare_nrmse(image_f, image_i)
        data.manhattan[t] = sum(abs(image_f - image_i))
        data.path[t] = basename(path)

        image_i = image_f
        t += 1

    data.path[t] = ""
    data.ssim_fft = np.fft.fft(data.ssim)
    data.nrmse_fft = np.fft.fft(data.nrmse)
    data.manhattan_fft = np.fft.fft(data.manhattan)

    pickle.dump(data, open(pfile, 'wb'))
Exemple #11
0
def run_nrmse(lead_frame: np.ndarray, following_frame: np.ndarray,
              lead_frame_number: int,
              following_frame_number: int) -> Tuple[int, int, float]:
    score: float = compare_nrmse(lead_frame,
                                 following_frame,
                                 norm_type="min-max")
    return lead_frame_number, following_frame_number, score
Exemple #12
0
def run_metrics(image_file_name1, image_file_name2):
    image_name1 = io.imread(image_file_name1)
    image_name2 = io.imread(image_file_name2)
    peak_signal_to_noise_ratio = measure.compare_psnr(image_name1, image_name2)
    print("PSNR Peak signal to noise ratio is %s" % peak_signal_to_noise_ratio)
    mse = measure.compare_mse(image_name1, image_name2)
    print("MSE Mean square error between the images is %s" % mse)
    rmse = measure.compare_nrmse(image_name1, image_name2)
    print("RMSE Normalised root mean square error between the images is %s" %
          rmse)
    ssim = measure.compare_ssim(image_name1, image_name2, multichannel=True)
    print("SSIM Structural Similarity Index is %s" % ssim)
    #[M3,M4] = minkowski_distance(image_name1,image_name2)
    #print ("Minkowski distance is %s %s"%(M3,M4))
    #AD = average_difference(image_name1,image_name2)
    #print ("AD Average difference is %s"%AD)
    #SC = structural_content(image_name1,image_name2)
    #print ("SC Structural Content is %s"%SC)
    #NK = normalised_cross_correlation(image_name1,image_name2)
    #print ("NK normalised cross correlation is %s"%NK)
    #MD = maximum_difference(image_name1,image_name2)
    #print ("Maximum difference is %s"%MD)
    return {
        'peaktonoise': peak_signal_to_noise_ratio,
        'mse': mse,
        'rmse': rmse,
        'ssim': ssim,
        'score': peak_signal_to_noise_ratio
    }
Exemple #13
0
    def test_shepp_logan(self):
        '''The much-abused Shepp-Logan.'''

        ph = shepp_logan(self.N)
        ph /= np.max(ph.flatten())
        phs = ph[..., None] * self.mps

        kspace = np.fft.ifftshift(np.fft.fft2(np.fft.fftshift(phs,
                                                              axes=(0, 1)),
                                              axes=(0, 1)),
                                  axes=(0, 1))
        kspace_u = np.array(np.zeros_like(kspace))  # wrap for pylint
        kspace_u[:, ::2, :] = kspace[:, ::2, :]
        ctr = int(self.N / 2)
        pd = 5
        calib = kspace[:, ctr - pd:ctr + pd, :].copy()

        recon = grappa(kspace_u, calib, (5, 5), coil_axis=-1)
        recon = np.fft.fftshift(np.fft.ifft2(np.fft.ifftshift(recon,
                                                              axes=(0, 1)),
                                             axes=(0, 1)),
                                axes=(0, 1))
        recon = np.abs(np.sqrt(np.sum(recon * np.conj(recon), axis=-1)))
        recon /= np.max(recon.flatten())

        # Make sure less than 4% NRMSE
        # print(compare_nrmse(ph, recon))
        self.assertTrue(compare_nrmse(ph, recon) < .04)
Exemple #14
0
def getErrorMetrics(im_pred, im_gt, mask=None):
    # flatten array
    im_pred = np.array(im_pred).astype(np.float).flatten()
    im_gt = np.array(im_gt).astype(np.float).flatten()
    if mask is not None:
        mask = np.array(mask).astype(np.float).flatten()
        im_pred = im_pred[mask > 0]
        im_gt = im_gt[mask > 0]
    mask = np.abs(im_gt.flatten()) > 0

    # check dimension
    assert (im_pred.flatten().shape == im_gt.flatten().shape)

    # NRMSE
    rmse_pred = compare_nrmse(im_gt, im_pred)

    # PSNR
    try:
        psnr_pred = compare_psnr(im_gt, im_pred)
    except:
        psnr_pred = psnr(im_gt, im_pred)
        print('use psnr')

    # ssim
    ssim_pred = compare_ssim(im_gt, im_pred)
    score_ismrm = sum((np.abs(im_gt.flatten() - im_pred.flatten()) < 0.1) *
                      mask) / (sum(mask) + 0.0) * 10000

    return {
        'rmse': rmse_pred,
        'psnr': psnr_pred,
        'ssim': ssim_pred,
        'score_ismrm': score_ismrm
    }
Exemple #15
0
def compare_all(image_a, image_b):
    # peak signal to noise ratio (see https://en.wikipedia.org/wiki/Peak_signal-to-noise_ratio)
    psnr_score = compare_psnr(image_a, image_b)
    print("PSNR: {}".format(str(psnr_score)), end=" dB\n\n")

    # mean squared error (see https://en.wikipedia.org/wiki/Mean_squared_error)
    mse_score = compare_mse(image_a, image_b)
    print("MSE: {}".format(str(mse_score)))
    print("Range [0, +INF) where 0 is identical", end="\n\n")

    # normalized root mean squared error (see https://en.wikipedia.org/wiki/Root-mean-square_deviation)
    nrmse_score = compare_nrmse(image_a, image_b, norm_type='Euclidean')
    print("NRMSE: {}".format(str(nrmse_score)), end="\n\n")

    # structural similarity measure (see https://en.wikipedia.org/wiki/Structural_similarity)
    ssim_score = compare_ssim(image_a, image_b, full=False, multichannel=True)
    print("SSIM: {}".format(str(ssim_score)))
    print("Range [-1, +1] where +1 is identical", end="\n\n")

    pae_score = pae(image_a, image_b)
    print("PAE: {}".format(str(pae_score)))
    print("Range [0, +INF) where 0 is identical", end="\n\n")

    mae_score = mae(image_a, image_b)
    print("MAE: {}".format(str(mae_score)))
    print("Range [0, +INF) where 0 is identical", end="\n\n")

    return {
        'psnr_score': psnr_score,
        'mse_score': mse_score,
        'nrmse_score': nrmse_score,
        'ssim_score': ssim_score,
        'mae_score': mae_score,
    }
Exemple #16
0
def plot_image_diff(noisy, reference, plot_title):
    """Helper function to display denoising"""
    difference = noisy - reference
    mse = compare_mse(reference, noisy)
    nrmse = compare_nrmse(reference, noisy)
    psnr = compare_psnr(reference, noisy)
    subtitle = 'norm: %(norm).4f\nMSE: %(MSE).4f\nNRMSE: %(NRMSE).4f\nPSNR: %(PSNR).4fdB' % {
        'norm': np.sqrt(np.sum(difference**2)),
        'MSE': mse,
        'NRMSE': nrmse,
        'PSNR': psnr
    }
    print(
        plot_title +
        ': norm: %(norm).4f\tMSE: %(MSE).4f\tNRMSE: %(NRMSE).4f\tPSNR: %(PSNR).4fdB'
        % {
            'norm': np.sqrt(np.sum(difference**2)),
            'MSE': mse,
            'NRMSE': nrmse,
            'PSNR': psnr
        })
    plt.gray()
    plt.subplot(1, 2, 1)
    plt.title('Noisy')
    plt.imshow(noisy)
    plt.xticks(())
    plt.yticks(())
    plt.subplot(1, 2, 2)
    plt.title(subtitle)
    plt.imshow(reference)
    plt.xticks(())
    plt.yticks(())
def compare_stat(im_pred, im_gt, max_value):
    # im_pred = np.array(im_pred).astype(np.float).flatten()
    # im_gt = np.array(im_gt).astype(np.float).flatten()
    # mask=np.abs(im_gt.flatten())>0

    # check dimension
    assert(im_pred.flatten().shape==im_gt.flatten().shape)

    range = np.max(im_gt)
    if range < 1:   # for range between 0~1
        range = 1
    # NRMSE
    try:
        rmse_pred = compare_nrmse(im_gt, im_pred)
    except:
        rmse_pred = float('nan')

    # PSNR
    try:
        psnr_pred = compare_psnr(im_gt, im_pred, data_range=range)
        # pdb.set_trace()
    except:
        psnr_pred = float('nan')

    # ssim
    try:
        ssim_pred = compare_ssim(im_gt, im_pred, data_range=range)
        # score_ismrm = sum((np.abs(im_gt.flatten()-im_pred.flatten())<0.1)*mask)/(sum(mask)+0.0)*10000
    except:
        ssim_pred = float('nan')
        score_ismrm = float('nan')

    return {'rmse':rmse_pred,'psnr':psnr_pred,'ssim':ssim_pred}#,'score_ismrm':score_ismrm}
Exemple #18
0
def get_nrmse(real, fake):
    nrmse = 0
    length = len(real)
    for i in range(length):
        nrmse += compare_nrmse(real[i][0].cpu().detach().numpy(),
                               fake[i][0].cpu().detach().numpy())
    return np.array([nrmse / length])
Exemple #19
0
def getErrorMetrics(im_pred, im_gt, mask=None):
    im_pred = np.array(im_pred).astype(np.float)
    im_gt = np.array(im_gt).astype(np.float)
    # sanity check
    assert (im_pred.flatten().shape == im_gt.flatten().shape)
    # RMSE
    rmse_pred = compare_nrmse(im_true=im_gt, im_test=im_pred)
    # PSNR
    psnr_pred = compare_psnr(im_true=im_gt, im_test=im_pred)
    # SSIM
    ssim_pred = compare_ssim(X=im_gt, Y=im_pred)
    # MSE
    mse_pred = mean_squared_error(y_true=im_gt.flatten(),
                                  y_pred=im_pred.flatten())
    # MAE
    mae_pred = mean_absolute_error(y_true=im_gt.flatten(),
                                   y_pred=im_pred.flatten())
    print("Compare prediction with groundtruth CT:")
    print(
        'mae: {mae_pred:.4f} | mse: {mse_pred:.4f} | rmse: {rmse_pred:.4f} | psnr: {psnr_pred:.4f} | ssim: {ssim_pred:.4f}'
        .format(mae_pred=mae_pred,
                mse_pred=mse_pred,
                rmse_pred=rmse_pred,
                psnr_pred=psnr_pred,
                ssim_pred=ssim_pred))
    return mae_pred, mse_pred, rmse_pred, psnr_pred, ssim_pred
Exemple #20
0
def NRMSE(srcpath, dstpath, mse_type='Euclidean', scale=256):
    scr = io.imread(srcpath)
    dst = io.imread(dstpath)
    scr = transform.resize(scr, (scale, scale))
    dst = transform.resize(dst, (scale, scale))
    nrmse = measure.compare_nrmse(scr, dst, norm_type=mse_type)
    return nrmse
Exemple #21
0
def img_comp(gt, pr, mses=None, nrmses=None, psnrs=None, ssims=None):
    if ssims is None:
        ssims = []
    if psnrs is None:
        psnrs = []
    if nrmses is None:
        nrmses = []
    if mses is None:
        mses = []
    gt, pr = np.squeeze(gt), np.squeeze(pr)
    gt = gt.astype(np.float32)
    if gt.ndim == 2:
        n = 1
        gt = np.reshape(gt, (1, gt.shape[0], gt.shape[1]))
        pr = np.reshape(pr, (1, pr.shape[0], pr.shape[1]))
    else:
        n = np.size(gt, 0)

    for i in range(n):
        mses.append(
            compare_mse(prctile_norm(np.squeeze(gt[i])),
                        prctile_norm(np.squeeze(pr[i]))))
        nrmses.append(
            compare_nrmse(prctile_norm(np.squeeze(gt[i])),
                          prctile_norm(np.squeeze(pr[i]))))
        psnrs.append(
            compare_psnr(prctile_norm(np.squeeze(gt[i])),
                         prctile_norm(np.squeeze(pr[i])), 1))
        ssims.append(
            compare_ssim(prctile_norm(np.squeeze(gt[i])),
                         prctile_norm(np.squeeze(pr[i]))))
    return mses, nrmses, psnrs, ssims
 def compare_memories(self, stored_memory, cur_memory):
     #mse = np.sum((stored_memory.astype("float") - cur_memory.astype("float")) ** 2)
     #mse /= float(stored_memory.shape[0] * stored_memory.shape[1])
     mse = compare_nrmse(stored_memory, cur_memory, norm_type="euclidean")
     psnr = compare_psnr(stored_memory, cur_memory)
     (score, diff) = compare_ssim(stored_memory, cur_memory, full=True)
     print(str(score) + " " + str(mse) + " " + str(psnr))
     return score, mse, psnr
Exemple #23
0
    def _run_single_scan(self, idx):
        in_file_1_path = self._in_data_folder.get_file_path(idx)
        in_file_2_path = self._in_data_folder_2.get_file_path(idx)

        in_img_1 = ScanWrapper(in_file_1_path).get_data()
        in_img_2 = ScanWrapper(in_file_2_path).get_data()

        nrmse = compare_nrmse(np.abs(in_img_1), np.abs(in_img_2))
        self._nrmse_diff.append(nrmse)
def get_features_array(imgs_true, imgs_pred):
    return np.array(
        list(
            map(
                lambda x, y:
                (compare_ssim(x.reshape(256, 256), y.reshape(256, 256)),
                 compare_mse(x.reshape(256, 256), y.reshape(256, 256)),
                 compare_nrmse(x.reshape(256, 256), y.reshape(256, 256)),
                 compare_psnr(x.reshape(256, 256), y.reshape(256, 256))),
                imgs_true, imgs_pred)))
Exemple #25
0
def print_vector_comparison(X, Y, comparison_name=None):
    difference = X - Y
	norm = np.sqrt(np.sum(difference ** 2))
	mse = compare_mse(X, Y)
	nrmse = compare_nrmse(X, Y)
	cos = cosine_similarity(X, Y)
	if comparison_name is not None:
		printing_text = comparison_name + ':Norm:%(NORM).4f\tMSE:%(MSE).4f\tNRMSE:%(NRMSE).4f\tCos:%(COS).4f' % {'NORM': norm, 'MSE': mse, 'NRMSE': nrmse, 'COS': cos}
		print(printing_text)
	return norm, mse, nrmse, cos
def NRMSE(srcpath, dstpath, gray2rgb = False, scale = 256, mse_type = 'Euclidean'):
    scr = io.imread(srcpath)
    dst = io.imread(dstpath)
    if gray2rgb:
        dst = np.expand_dims(dst, axis = 2)
        dst = np.concatenate((dst, dst, dst), axis = 2)
    if scale != (0, 0):
        scr = cv2.resize(scr, scale)
        dst = cv2.resize(dst, scale)
    nrmse = measure.compare_nrmse(scr, dst, norm_type = mse_type)
    return nrmse
Exemple #27
0
def test(model, data, mask, originalData, meta, contours, args):
    dices = []
    minSize = 15
    startF = 0
    endF = 1
    totalDice = [0 for i in range(startF, endF)]

    for testId in range(len(data)):
        if args.cuda:
            gd = torch.FloatTensor(np.reshape(data[testId], [1, 1, 400, 400]))
            contour = np.reshape(mask[testId], [1, 1, 400, 400])
            print("mask", mask.min(), mask.max())
            pred_ = generatePrediction(model, gd)

            for i in range(startF, endF):
                pred = pred_.copy()
                filter = 0.65  # + 0.010 * i
                pred[pred < filter] = 0
                pred[pred > 0] = 1

                labels = filterPrediction(pred, minSize)
                start = timer()
                pred2, conts = computeContours(originalData[testId], labels)
                end = timer()
                print("finding isocontours too ", end - start, "seconds")
                print("fitler", filter, len(pred[pred > 0]))

                dice, jaccard = computeOverlap(mask[testId], pred2)
                hausdorffs = hausdorff(mask[testId], pred2)
                f1, acc = compareMasks(mask[testId], pred2)
                print("AVG hausdorff: {}".format(
                    sum(hausdorffs) / len(hausdorffs)))
                print("Acc: {}".format(acc))
                print("F1: {}".format(f1))
                print("Dice: {}".format(dice))
                print("Jaccard: {}".format(jaccard))
                print("PSNR: {}".format(
                    compare_psnr(mask[testId], pred2, data_range=1)))
                print("SSIM: {}".format(
                    compare_ssim(mask[testId], pred2, data_range=1)))
                print("MSE: {}".format(compare_mse(mask[testId], pred2)))
                print("NRMSE: {}".format(compare_nrmse(mask[testId], pred2)))
                dices.append(dice)
                totalDice[i - startF] += dice

    dices = np.array(dices)
    print(np.average(dices), np.min(dices), np.max(dices))
    print(totalDice)
    for i, d in enumerate(totalDice):
        print("filter", 0.6 + (i + startF) / 100, "dice", d / len(data))

    print("end")
Exemple #28
0
def printImageDiff(img1Path, img2Path):
    imA = cv2.imread(img1Path)
    imB = cv2.imread(img2Path)

    grayA = cv2.cvtColor(imA, cv2.COLOR_BGR2GRAY)
    grayB = cv2.cvtColor(imB, cv2.COLOR_BGR2GRAY)

    (score, grad) = compare_ssim(grayA, grayB, full=True)
    #grad = (grad * 255).astype("uint8")

    nrmse = compare_nrmse(grayA, grayB)

    return (score, nrmse)
Exemple #29
0
def print_comparison(image, reference, method_name):
    difference = image - reference
    mse = compare_mse(reference, image)
    nrmse = compare_nrmse(reference, image)
    psnr = compare_psnr(reference, image)
    text = method_name + ': norm: %(norm).4f\tMSE: %(MSE).4f\tNRMSE: %(NRMSE).4f\tPSNR: %(PSNR).4f' % {
        'norm': np.sqrt(np.sum(difference**2)),
        'MSE': mse,
        'NRMSE': nrmse,
        'PSNR': psnr
    }
    print >> results, text
    print(text)
Exemple #30
0
def compute_motion_score(prev_img, now_img):
    """Calculate the motion between images"""
    rmse = compare_nrmse(prev_img, now_img)
    ssim = compare_ssim(prev_img, now_img, multichannel=True)

    # Experimentally derived equation:
    # ~100 should be a natural threshold
    score = (rmse + (1 - ssim) * 1.5) / .35 * 100

    # TODO Remove
    logging.info('MOTION {} {} {}'.format(rmse, ssim, score))

    return score
def volume_nrmse(data, truth):

    values = np.zeros(data.shape[2])

    jj = 0
    for ii in range(data.shape[2]):
        # if np.sum(data[:,:,ii] - truth[:,:,ii]) == 0:
        #     jj += 1
        #     continue

        values[ii] = measure.compare_nrmse(data[:, :, ii], truth[:, :, ii])

    # return np.mean(values[:-1-jj]), np.std(values[:-1-jj])
    return values[:-1 - jj]
Exemple #32
0
def test_NRMSE_errors():
    x = np.ones(4)
    with pytest.raises(ValueError):
        compare_nrmse(x.astype(np.uint8), x.astype(np.float32))
    with pytest.raises(ValueError):
        compare_nrmse(x[:-1], x)
    # invalid normalization name
    with pytest.raises(ValueError):
        compare_nrmse(x, x, 'foo')
Exemple #33
0
def get_denoise_metrics(input, output, report):
    image_file_name1 = input
    image_file_name2 = output

    image_name1 = io.imread(image_file_name1)
    image_name2 = io.imread(image_file_name2)

    # estimate the standard deiviation of the images

    std_1 = numpy.std(numpy.std(numpy.array(image_name1)))
    std_2 = numpy.std(numpy.std(numpy.array(image_name2)))

    print("std is %2.10f" % std_1)

    # print ("Standard deviation of the images are"%(std_1,std_2))

    # estimate the peak signal to noise ratio (PSNR) between the image

    peak_signal_to_noise_ratio = measure.compare_psnr(image_name1, image_name2)

    print("Peak signal to noise ratio is %s" % peak_signal_to_noise_ratio)

    # estimate the mean square error between the images

    mse = measure.compare_mse(image_name1, image_name2)

    print("Mean square error between the images is %s" % mse)

    # estimate the normalised root mean square error between the images

    rmse = measure.compare_nrmse(image_name1, image_name2)

    print("Normalised root mean square error between the images is %s" % rmse)

    resp = open(report, 'w')
    resp.write("std1 is %2.10f \n" % std_1)
    resp.write("std2 is %2.10f \n" % std_2)
    resp.write(
        "Peak signal to noise ratio is %s \n" %
        peak_signal_to_noise_ratio)
    resp.write("Mean square error between the images is %s \n" % mse)
    resp.write(
        "Normalised root mean squre error between the images is %s \n" %
        rmse)
    resp.close()
Exemple #34
0
def run_metrics(image_file_name1,image_file_name2 ):
    image_name1 = io.imread(image_file_name1)
    image_name2 = io.imread(image_file_name2)
    peak_signal_to_noise_ratio = measure.compare_psnr (image_name1,image_name2)
    print ("PSNR Peak signal to noise ratio is %s"%peak_signal_to_noise_ratio)
    mse = measure.compare_mse(image_name1,image_name2)
    print  ("MSE Mean square error between the images is %s"%mse)
    rmse = measure.compare_nrmse(image_name1,image_name2)
    print  ("RMSE Normalised root mean square error between the images is %s"%rmse)
    ssim = measure.compare_ssim(image_name1,image_name2, multichannel=True)
    print ("SSIM Structural Similarity Index is %s"%ssim)
    #[M3,M4] = minkowski_distance(image_name1,image_name2)
    #print ("Minkowski distance is %s %s"%(M3,M4))
    #AD = average_difference(image_name1,image_name2)
    #print ("AD Average difference is %s"%AD)
    #SC = structural_content(image_name1,image_name2)
    #print ("SC Structural Content is %s"%SC)
    #NK = normalised_cross_correlation(image_name1,image_name2)
    #print ("NK normalised cross correlation is %s"%NK)
    #MD = maximum_difference(image_name1,image_name2)
    #print ("Maximum difference is %s"%MD)
    return {'peaktonoise':peak_signal_to_noise_ratio ,'mse': mse, 'rmse': rmse, 'ssim':ssim,'score':peak_signal_to_noise_ratio}
Exemple #35
0
print image_name2.shape

#estimate the standard deiviation of the images

std_1 = numpy.std (numpy.std (numpy.array(image_name1)))
std_2 = numpy.std (numpy.std (numpy.array(image_name2)))

print ("std is %2.10f"%std_1)

#print ("Standard deviation of the images are"%(std_1,std_2))

#estimate the peak signal to noise ratio (PSNR) between the image

peak_signal_to_noise_ratio = measure.compare_psnr (image_name1,image_name2)

print ("Peak signal to noise ratio is %s"%peak_signal_to_noise_ratio)

# estimate the mean square error between the images

mse = measure.compare_mse(image_name1,image_name2)

print  ("Mean square error between the images is %s"%mse)

# estimate the normalised root mean square error between the images

rmse = measure.compare_nrmse(image_name1,image_name2)
ssim = measure.compare_ssim(image_name1,image_name2)

print  ("Normalised root mean squre error between the images is %s"%rmse)
print ("SSIM is %s"%ssim)
def test_NRMSE():
    x = np.ones(4)
    y = np.asarray([0., 2., 2., 2.])
    assert_equal(compare_nrmse(y, x, 'mean'), 1/np.mean(y))
    assert_equal(compare_nrmse(y, x, 'Euclidean'), 1/np.sqrt(3))
    assert_equal(compare_nrmse(y, x, 'min-max'), 1/(y.max()-y.min()))