def test_tracenorm(self): K = self.X @ self.X.T trace = metrics.trace(K) self.assertEqual(trace, 25.75, K) Kt = preprocessing.tracenorm(K) self.assertTrue(matNear(Kt * trace / K.shape[0], K)) o_numpy = metrics.trace(K.numpy()) self.assertEqual(trace, o_numpy)
def kernel_evaluation(K): kernel_results_dict = {} K = kernel_normalization(K) # normalize the kernel K (useless in the case of HPK computed on normalized data) kernel_results_dict['score_margin'] = margin(K, Ytr) # the distance between the positive and negative classes in the kernel space kernel_results_dict['score_radius'] = radius( K) # the radius of the Einimum Enclosing Ball containing data in the kernel space kernel_results_dict['score_ratio'] = ratio(K, Ytr) # the radius/margin ratio defined as (radius**2/margin**2)/n_examples kernel_results_dict['score_froben'] = frobenius(K) # the Frobenius norm of a kernel matrix kernel_results_dict['score_trace'] = trace(K) # the trace of the kernel matrix return kernel_results_dict
def tracenorm(K): """normalize the trace of a squared kernel matrix Parameters ---------- K : (n,n) ndarray, the squared kernel matrix. Returns ------- Kt : ndarray, the trace-normalized version of *K*. Notes ----- In trace-normalization, the kernel is divided by the average of the diagonal. """ K = check_squared(K) trn = trace(K) / K.shape[0] return K / trn
def tracenorm(K): """normalize the trace of a squared kernel matrix Parameters ---------- K : (n,n) ndarray, the squared kernel matrix. Returns ------- Kt : ndarray, the trace-normalized version of *K*. Notes ----- In trace-normalization, the kernel is divided by the average of the diagonal. """ K = check_squared(K) trn = trace(K) / K.shape[0] return K / trn
KLte = [pairwise.homogeneous_polynomial_kernel(Xte,Xtr, degree=d) for d in range(11)] print ('done') #evaluate kernels in terms of margin, radius etc... print ('evaluating metrics...', end='') from MKLpy.metrics import margin, radius, ratio, trace, frobenius from MKLpy.preprocessing import kernel_normalization deg = 5 K = KLtr[deg] #the HPK with degree 5 K = kernel_normalization(K) #normalize the kernel K (useless in the case of HPK computed on normalized data) score_margin = margin(K,Ytr) #the distance between the positive and negative classes in the kernel space score_radius = radius(K) #the radius of the Einimum Enclosing Ball containing data in the kernel space score_ratio = ratio (K,Ytr) #the radius/margin ratio defined as (radius**2/margin**2)/n_examples #the ratio can be also computed as score_radius**2/score_margin**2/len(Ytr) score_trace = trace (K) #the trace of the kernel matrix score_froben = frobenius(K) #the Frobenius norm of a kernel matrix print ('done') print ('results of the %d-degree HP kernel:' % deg) print ('margin: %.4f, radius: %.4f, radiu-margin ratio: %.4f,' % (score_margin, score_radius, score_ratio)) print ('trace: %.4f, frobenius norm: %.4f' % (score_trace, score_froben)) #evaluate the empirical complexity of the kernel matrix, i.e. the Spectral Ratio # Michele Donini, Fabio Aiolli: "Learning deep kernels in the space of dot-product polynomials". Machine Learning (2017) # Ivano Lauriola, Mirko Polato, Fabio Aiolli: "The Minimum Effort Maximum Output principle applied to Multiple Kernel Learning". ESANN (2018) print ('computing Spectral Ratio...', end='') from MKLpy.metrics import spectral_ratio SR = spectral_ratio(K, norm=True) print ('%.4f' % SR)
def test_trace(self): self.assertEqual(metrics.trace(self.K1), 3) self.assertRaises(SquaredKernelError, metrics.trace, self.X)
def test_tracenorm(self): K = self.X.dot(self.X.T) trace = metrics.trace(K) self.assertEqual(trace, 25, K) Kt = preprocessing.tracenorm(K) self.assertTrue(matNear(Kt * trace / K.shape[0], K))
def tracenorm(K): #return sum([K[i,i] for i in range(k.shape[0])]) / K.shape[0] trn = trace(K) / K.shape[0] return K / trn