def check_equal(self, old_frame, frame): diff = ssim(old_frame, frame, data_range=frame.max() - frame.min(), multichannel=True) # print(diff) return diff > SIMILARITY_THRES
def SSIM(patch, table=QUANTIZATIONTABLE): assert patch.shape[0] % 8 == 0 and patch.shape[1] % 8 == 0, \ "Invalid sampling area make sure sample area is equally divisible by 8" pbar = range(2, 64) last_metric, rep = 0, 0 for i in pbar: compressed_data, partitions = array.array('b', []), [] ext = compressed_data.extend; app = partitions.append list_of_patches = split((patch.copy() - 128).astype(np.int8), 8, 8) [ext(capture(zig_zag(quantize(dct_2d(x), table=table)), values=i)) for x in list_of_patches] compressed_split = [compressed_data[z:z + i] for z in range(0, len(compressed_data), i)] [app(idct_2d(undo_quantize(zig_zag_reverse(rebuild(y)), table=table)) + 128) for y in compressed_split] index = merge_blocks(partitions, int(1), int(1)).astype(np.uint8) metric = ssim(patch, index, data_range=index.max() - index.min()) if metric > 0.96: if table[0][0] < 8: table[0][0] = 8 if i % 2 != 0: i += 1 return i, metric, table if abs(last_metric - metric) < 0.0000000001: if metric > 0.94: if table[0][0] < 8: table[0][0] = 8 if i % 2 != 0: i += 1 return i - rep, metric, table return SSIM(patch, table=np.round(table/1.1)) rep += 1 if rep == 4: last_metric = metric; rep = 0 if metric < 0.92: return SSIM(patch, table=np.round(table/1.2)) if table[0][0] < 8: table[0][0] = 8 return 64, metric, table
def get_set_ssim(originalSet, noisySet, img_height=64, img_width=64): ssim_sum = 0 originalSet = originalSet.reshape(originalSet.shape[0], img_height, img_width, 1) noisySet = noisySet.reshape(noisySet.shape[0], img_height, img_width, 1) for i in range(originalSet.shape[0]): ssim_sum += ssim(originalSet[i], noisySet[i], data_range=originalSet[i].max() - noisySet[i].min(), multichannel=True) return 1.0 * ssim_sum / originalSet.shape[0]
def SSIM_between_classifiers(): for root, folders, _ in os.walk(experiment_dir_cnn): cont = 0 for z in folders: im_experiment_cnn = io.imread(os.path.join(experiment_dir_cnn, z +"/final_rec.png")) im_GT = io.imread(os.path.join(GT_dir_cnn, z + "/target.png")) ssim_error = ssim(im_experiment_cnn, im_GT) general_cnn_ssim.append(ssim_error) im_experiment_ss = io.imread(os.path.join(experiment_dir_simple_seg, z +"png")) ssim_error = ssim(im_experiment_ss, im_GT) general_ss_ssim.append(ssim_error) print("Done {}".format(100 * cont / len(folders))) cont = cont + 1 # print("general ssim {}".format(ssim_error)) # print("{} of {} folders visited".format(cont, len(folders))) media_cnn = (sum(general_cnn_ssim)/cont) media_ss = (sum(general_ss_ssim)/cont) break return {'cnn': media_cnn, 'ss': media_ss}
def get_image_similarity(img1, img2, algorithm='SIFT'): # Converting to grayscale and resizing i1 = cv2.resize(cv2.imread(img1, cv2.IMREAD_GRAYSCALE), SIM_IMAGE_SIZE) i2 = cv2.resize(cv2.imread(img2, cv2.IMREAD_GRAYSCALE), SIM_IMAGE_SIZE) similarity = 0.0 if algorithm == 'SIFT': # Using OpenCV for feature detection and matching sift = cv2.xfeatures2d.SIFT_create() k1, d1 = sift.detectAndCompute(i1, None) k2, d2 = sift.detectAndCompute(i2, None) bf = cv2.BFMatcher() matches = bf.knnMatch(d1, d2, k=2) for m, n in matches: if m.distance < SIFT_RATIO * n.distance: similarity += 1.0 # Custom normalization for better variance in the similarity matrix if similarity == len(matches): similarity = 1.0 elif similarity > 1.0: similarity = 1.0 - 1.0/similarity elif similarity == 1.0: similarity = 0.1 else: similarity = 0.0 elif algorithm == 'CW-SSIM': # FOR EXPERIMENTS ONLY! # Very slow algorithm - up to 50x times slower than SIFT or SSIM. # Optimization using CUDA or Cython code should be explored in the future. similarity = pyssim.SSIM(img1).cw_ssim_value(img2) elif algorithm == 'SSIM': # Default SSIM implementation of Scikit-Image similarity = ssim(i1, i2) else: # Using MSE algorithm with custom normalization err = np.sum((i1.astype("float") - i2.astype("float")) ** 2) err /= float(i1.shape[0] * i2.shape[1]) if err > 0.0: similarity = MSE_NUMERATOR / err else: similarity = 1.0 return similarity
def SSIM(): media = 0 for root, _, files in os.walk(experiment_dir_simple_seg): cont = 0 for z in files: # print(z) im_experiment = io.imread(os.path.join(experiment_dir_simple_seg, z)) im_GT = io.imread(os.path.join(GT_dir, z)) ssim_error = ssim(im_experiment, im_GT) general_ssim.append(ssim_error) # print("Done {}".format(100 * cont / len(files))) cont = cont + 1 # print("general ssim {}".format(ssim_error)) media = (sum(general_ssim)/cont) return media
def SSIM_neural_output(): media = 0 for root, folders, _ in os.walk(experiment_dir_cnn): cont = 0 for z in folders: im_experiment = io.imread(os.path.join(experiment_dir_cnn, z +"/final_rec.png")) im_GT = io.imread(os.path.join(GT_dir_cnn, z + "/target.png")) ssim_error = ssim(im_experiment, im_GT) general_ssim.append(ssim_error) # print("Done {}".format(100 * cont / len(folders))) cont = cont + 1 # print("general ssim {}".format(ssim_error)) # print("{} of {} folders visited".format(cont, len(folders))) media = (sum(general_ssim)/cont) break return media
def get_image_ssim(original_img, noisy_img): return ssim(original_img * 255.0, noisy_img * 255.0, data_range=original_img.max() - noisy_img.min(), multichannel=False)
from skimage.measure._structural_similarity import compare_ssim as ssim import imageio ''' args: pass in the path of the original image in 'original_sample_url' pass in the path of the resulting image in 'index_url' ''' original_sample_url = '' index_url = '' original_sample, index = imageio.imread(original_sample_url), imageio.imread( index_url) metric = ssim(original_sample, index, data_range=index.max() - index.min(), multichannel=True) print('Result SSIM Metric: ', metric)