-
Notifications
You must be signed in to change notification settings - Fork 0
/
metrics_numba.py
265 lines (206 loc) · 8.76 KB
/
metrics_numba.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
import cv2
import pywt
import skimage.measure
import imquality.brisque as brisque
import webcolors as wc
from skimage.filters import sobel
from pybdm import BDM
import pylab as pl
import numpy as np
from numba import jit
import sys
# The cpbd module tries to import scipy.ndimage.imread, which does not exist. This line below is a fix for this issue.
# noinspection PyTypeChecker
sys.modules['scipy.ndimage.imread'] = cv2.imread
import cpbd
# TODO: check for RGB/BGR inconsistencies
# TODO: validate metrics (is the implementation correct?)
# TODO: check correlation between k_complexity_bw() and k_complexity_lightness() (better yet, check all correlations)
# TODO: wrt to the k_complexity functions, is the metric of 2D array sufficiently correlated with its 1D version?
# TODO: https://onlinelibrary.wiley.com/doi/abs/10.1111/j.1467-8659.2011.01900.x?casa_token=VU-te9nefM8AAAAA%3AqcRGedXRra5yodIDCXAUdS3m1mHCIyxaui2IsbWiB2Iq2S3xNnRs1xb6GMf7um2foQNzBIxFrg8rdTM
# TODO: https://ieeexplore.ieee.org/document/5995467
# Utility vars & functions
numba_parallel = True
numba_cache = True
scales = np.logspace(0.01, 1, num=10, endpoint=False, base=2)
bins_0_252 = list(range(0, 252, 28))
bins_0_0_9 = list(np.arange(0, 0.9, 0.1))
@jit(nopython=True, cache=numba_cache, parallel=numba_parallel)
def nb_digitize(x):
return np.digitize(x)-1
@jit(nopython=True, cache=numba_cache, parallel=numba_parallel)
def nb_colorfulness_helper(R, G, B):
rg = np.absolute(R - G)
yb = np.absolute(0.5 * (R + G) - B)
(rbMean, rbStd) = (np.mean(rg), np.std(rg))
(ybMean, ybStd) = (np.mean(yb), np.std(yb))
std_root = np.sqrt((rbStd ** 2) + (ybStd ** 2))
mean_root = np.sqrt((rbMean ** 2) + (ybMean ** 2))
return std_root + (0.3 * mean_root)
# @jit(nopython=True, cache=numba_cache, parallel=numba_parallel)
@jit(cache=numba_cache)
def nb_fractal_dimension_helper(image):
pixels = []
for i in range(image.shape[0]):
for j in range(image.shape[1]):
if image[i, j] > 0:
pixels.append((i, j))
lx = image.shape[1]
ly = image.shape[0]
pixels = np.array(pixels)
# computing the fractal dimension
# considering only scales in a logarithmic list
ns = []
# looping over several scales
for scale in scales:
# computing the histogram
# h, edges = np.histogramdd(pixels, bins=(np.arange(0, lx, scale), np.arange(0, ly, scale)))
h = nb_fractal_dimension_helper_helper(pixels, lx, ly, scale)
ns.append(np.sum(h > 0))
# linear fit, polynomial of degree 1
coeffs = np.polyfit(np.log(scales), np.log(ns), 1)
return -coeffs[0] # the fractal dimension is the OPPOSITE of the fitting coefficient
def nb_fractal_dimension_helper_helper(pixels, lx, ly, scale):
h, edges = np.histogramdd(pixels, bins=(np.arange(0, lx, scale), np.arange(0, ly, scale)))
return h
def contrast_rms(image):
# Returns RMS contrast
image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
return image.std()
# noinspection PyTypeChecker
def contrast_tenengrad(image):
# Returns tenengrad contrast
image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
sobel_img = sobel(image) ** 2
return np.sqrt(np.sum(sobel_img)) / image.size * 10000
def fractal_dimension(image):
# Adapted from https://francescoturci.net/2016/03/31/box-counting-in-numpy/
image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
# # finding all the non-zero pixels
# pixels = []
# for i in range(image.shape[0]):
# for j in range(image.shape[1]):
# if image[i, j] > 0:
# pixels.append((i, j))
#
# lx = image.shape[1]
# ly = image.shape[0]
# pixels = np.array(pixels)
#
# # computing the fractal dimension
# # considering only scales in a logarithmic list
# scales = np.logspace(0.01, 1, num=10, endpoint=False, base=2)
# ns = []
# # looping over several scales
# for scale in scales:
# # computing the histogram
# h, edges = np.histogramdd(pixels, bins=(np.arange(0, lx, scale), np.arange(0, ly, scale)))
# ns.append(np.sum(h > 0))
#
# # linear fit, polynomial of degree 1
# coeffs = np.polyfit(np.log(scales), np.log(ns), 1)
#
# return -coeffs[0] # the fractal dimension is the OPPOSITE of the fitting coefficient
return nb_fractal_dimension_helper(image)
def sharpness(image):
# https://pypi.org/project/cpbd/
image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
return cpbd.compute(image)
def sharpness_laplacian(image):
image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
return cv2.Laplacian(image, cv2.CV_64F).var()
def brisque_score(image):
return brisque.score(image)
def color_dominant(image):
def get_approx_color(hex_color):
orig = wc.hex_to_rgb(hex_color)
similarity = {}
for hex_code, color_name in wc.CSS3_HEX_TO_NAMES.items():
approx = wc.hex_to_rgb(hex_code)
similarity[color_name] = sum(np.subtract(orig, approx) ** 2)
return min(similarity, key=similarity.get)
def get_color_name(hex_color):
try:
return wc.hex_to_name(hex_color)
except ValueError:
return get_approx_color(hex_color)
# https://stackoverflow.com/questions/50899692/most-dominant-color-in-rgb-image-opencv-numpy-python
# https://stackoverflow.com/questions/44354437/classify-users-by-colors
data = np.reshape(image, (-1, 3))
data = np.float32(data)
criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 10, 1.0)
flags = cv2.KMEANS_RANDOM_CENTERS
compactness, labels, centers = cv2.kmeans(data, 1, None, criteria, 10, flags)
bgr_ = centers[0].astype(np.int32)
rgb_ = bgr_
rgb_[0], rgb_[2] = bgr_[2], bgr_[0] # convert BGR to RGB
hex_ = wc.rgb_to_hex(tuple(rgb_))
return get_color_name(hex_)
def colorfulness(image):
# https://www.pyimagesearch.com/2017/06/05/computing-image-colorfulness-with-opencv-and-python/
# "Measuring colourfulness in natural images" David Hasler and Sabine Susstrunk
# split the image into its respective RGB components
(B, G, R) = cv2.split(image.astype('float'))
# rg = np.absolute(R - G)
# yb = np.absolute(0.5 * (R + G) - B)
# (rbMean, rbStd) = (np.mean(rg), np.std(rg))
# (ybMean, ybStd) = (np.mean(yb), np.std(yb))
# std_root = np.sqrt((rbStd ** 2) + (ybStd ** 2))
# mean_root = np.sqrt((rbMean ** 2) + (ybMean ** 2))
# return std_root + (0.3 * mean_root)
return nb_colorfulness_helper(R, G, B)
# First order
def pixel_intensity_mean(image):
return image.mean()
def hue_mean(image):
return cv2.cvtColor(image, cv2.COLOR_BGR2HSV).mean()
def saturation_mean(image):
img_hsv = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)
saturation = img_hsv[:, :, 1].mean()
return saturation
# Second order
# Higher order
def entropy_shannon(image):
return skimage.measure.shannon_entropy(image)
def k_complexity_bw(image):
image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
image = image.reshape(image.shape[0]*image.shape[1])
image = image*(252/256)
# bins = list(range(0, 252, 28))
image = nb_digitize(image, bins=bins_0_252)
# image = np.digitize(image, bins=bins)-1
bdm = BDM(ndim=1, nsymbols=9, warn_if_missing_ctm=False)
return bdm.bdm(image)
def k_complexity_lab_l(image):
image = cv2.cvtColor(image, cv2.COLOR_BGR2Lab)
image = image.astype('float32') / 255 # transformation to fix Euclidian distances in Lab space
image = image[:, :, 0]
image = image.reshape(image.shape[0]*image.shape[1])
# bins = list(np.arange(0, 0.9, 0.1))
image = nb_digitize(image, bins=bins_0_0_9)
# image = np.digitize(image, bins=bins)-1
bdm = BDM(ndim=1, nsymbols=9, warn_if_missing_ctm=False)
return bdm.bdm(image)
def k_complexity_lab_a(image):
image = cv2.cvtColor(image, cv2.COLOR_BGR2Lab)
image = image.astype('float32') / 255 # transformation to fix Euclidian distances in Lab space
image = image[:, :, 1]
image = image.reshape(image.shape[0]*image.shape[1])
# bins = list(np.arange(0, 0.9, 0.1))
image = nb_digitize(image, bins=bins_0_0_9)
# image = np.digitize(image, bins=bins)-1
bdm = BDM(ndim=1, nsymbols=9, warn_if_missing_ctm=False)
return bdm.bdm(image)
def k_complexity_lab_b(image):
image = cv2.cvtColor(image, cv2.COLOR_BGR2Lab)
image = image.astype('float32') / 255 # transformation to fix Euclidian distances in Lab space
image = image[:, :, 2]
image = image.reshape(image.shape[0]*image.shape[1])
# bins = list(np.arange(0, 0.9, 0.1))
image = nb_digitize(image, bins=bins_0_0_9)
# image = np.digitize(image, bins=bins)-1
bdm = BDM(ndim=1, nsymbols=9, warn_if_missing_ctm=False)
return bdm.bdm(image)
def haar_wavelet(image):
image = image[::2, ::2] # resize by factor 2
return pywt.dwt2(image, 'Haar')