def test_knn_memory(): if not have_flann: raise SkipTest("No flann, so skipping knn tests.") dim = 3 n = 20 np.random.seed(47) bags = Features( [np.random.randn(np.random.randint(30, 100), dim) for _ in xrange(n)]) tdir = tempfile.mkdtemp() div_funcs = ('kl', 'js', 'renyi:.9', 'l2', 'tsallis:.8') Ks = (3, 4) est = KNNDivergenceEstimator(div_funcs=div_funcs, Ks=Ks, memory=tdir) res1 = est.fit_transform(bags) with LogCapture('skl_groups.divergences.knn', level=logging.INFO) as l: res2 = est.transform(bags) assert len(l.records) == 0 assert np.all(res1 == res2) with LogCapture('skl_groups.divergences.knn', level=logging.INFO) as l: res3 = est.fit_transform(bags) for r in l.records: assert not r.message.startswith("Getting divergences") assert np.all(res1 == res3)
def test_knn_memory(): if not have_flann: raise SkipTest("No flann, so skipping knn tests.") dim = 3 n = 20 np.random.seed(47) bags = Features([np.random.randn(np.random.randint(30, 100), dim) for _ in xrange(n)]) tdir = tempfile.mkdtemp() div_funcs = ('kl', 'js', 'renyi:.9', 'l2', 'tsallis:.8') Ks = (3, 4) est = KNNDivergenceEstimator(div_funcs=div_funcs, Ks=Ks, memory=tdir) res1 = est.fit_transform(bags) with LogCapture('skl_groups.divergences.knn', level=logging.INFO) as l: res2 = est.transform(bags) assert len(l.records) == 0 assert np.all(res1 == res2) with LogCapture('skl_groups.divergences.knn', level=logging.INFO) as l: res3 = est.fit_transform(bags) for r in l.records: assert not r.message.startswith("Getting divergences") assert np.all(res1 == res3)
def test_knn_kl(): if not have_flann: raise SkipTest("No flann, so skipping knn tests.") # verified by hand # Dhat(P||Q) = \log m/(n-1) + d / n \sum_{i=1}^n \log \nu_k(i)/rho_k(i) x = np.reshape([0., 1, 3], (3, 1)) y = np.reshape([.2, 1.2, 3.2, 7.2], (4, 1)) n = x.shape[0] m = y.shape[0] x_to_y = np.log( m / (n - 1)) + 1 / n * (np.log(1.2 / 3) + np.log(.8 / 2) + np.log(1.8 / 3)) y_to_x = np.log(n / (m - 1)) + 1 / m * (np.log(.8 / 3) + np.log(1.2 / 2) + np.log(2.2 / 3) + np.log(6.2 / 6)) msg = "got {}, expected {}" est = KNNDivergenceEstimator(div_funcs=['kl'], Ks=[2], clamp=False) res = est.fit_transform([x, y]).squeeze() assert res[0, 0] == 0 assert res[1, 1] == 0 assert np.allclose(res[0, 1], x_to_y), msg.format(res[0, 1], x_to_y) assert np.allclose(res[1, 0], y_to_x), msg.format(res[1, 0], y_to_x)
def divergence_gen(gen, gt_db, batch=1000, metric='kl', normalize=False, n_bins=100, whitening=True, classes=None, **kwargs): """ Given a generator and the gt function (the one generator tries to approximate), we measure the discrepancy of the generated from the gt signals. """ # # generate some samples. batch = gt_db.shape[0] if classes is None: gen_samples = gen_images(gen, n=batch, batchsize=batch) else: # # conditional case. gen_csamples, n_ms = [], int(batch // len(classes) + 10) for cl in classes: x = gen_images_with_condition(gen, n=n_ms, c=cl, batchsize=n_ms) gen_csamples.append(x) gen_csamples = np.concatenate(gen_csamples, 0) gen_samples = gen_csamples[:gt_db.shape[0]] if len(gt_db.shape) != 2: gt_db = gt_db.reshape((batch, -1)) if len(gen_samples.shape) != 2: gen_samples = gen_samples.reshape((batch, -1)) if gen_samples.dtype == np.uint8: gen_samples = gen_samples.astype(np.float32) if normalize: # # Given that gen_images have a range [0, 255], normalize # # the images in the [-1, 1] range for the KNN. gen_samples1 = gen_samples / 127.5 - 1 else: gen_samples1 = gen_samples if metric == 'ndb': global ndb if ndb is None: ndb = NDB(training_data=gt_db, number_of_bins=n_bins, whitening=whitening) metric_val = ndb.evaluate(gen_samples) chainer.reporter.report({'ndb': metric_val['NDB']}) chainer.reporter.report({'JS': metric_val['JS']}) diver = metric_val['NDB'] else: # # define an estimator (e.g. KL divergence). est = KNNDivergenceEstimator(div_funcs=[metric], Ks=[3], clamp=False) # # fit and return the result. res_diver = est.fit_transform([gt_db, gen_samples]) try: diver = res_diver[0, 1] except: diver = res_diver[0][0][0, 1] chainer.reporter.report({'kl': diver}) return diver
def kNNdiv_Kernel(X_white, kernel, Knn=3, div_func='renyi:.5', Nref=None, compwise=True, njobs=1, W_ica_inv=None): ''' `div_func` kNN divergence estimate between some data X_white and a distribution specified by Kernel. ''' if isinstance(Knn, int): Knns = [Knn] elif isinstance(Knn, list): Knns = Knn # if component wise there should be X_white.shape[1] # kernels for each componenets if compwise: if X_white.shape[1] != len(kernel): raise ValueError # construct reference "bag" if compwise: ref_dist = np.zeros((Nref, X_white.shape[1])) for icomp in range(X_white.shape[1]): samp = kernel[icomp].sample(Nref) if isinstance(samp, tuple): ref_dist[:, icomp] = samp[0].flatten() else: ref_dist[:, icomp] = samp.flatten() else: samp = kernel.sample(Nref) if isinstance(samp, tuple): ref_dist = samp[0] else: ref_dist = samp if W_ica_inv is not None: ref_dist = np.dot(ref_dist, W_ica_inv.T) # estimate divergence kNN = KNNDivergenceEstimator(div_funcs=[div_func], Ks=Knns, version='slow', clamp=False, n_jobs=njobs) feat = Features([X_white, ref_dist]) div_knn = kNN.fit_transform(feat) if len(Knns) == 1: return div_knn[0][0][0][1] div_knns = np.zeros(len(Knns)) for i in range(len(Knns)): div_knns[i] = div_knn[0][i][0][1] return div_knns
def test_knn_sanity_slow(): if not have_flann: raise SkipTest("No flann, so skipping knn tests.") dim = 3 n = 20 np.random.seed(47) bags = Features( [np.random.randn(np.random.randint(30, 100), dim) for _ in xrange(n)]) # just make sure it runs div_funcs = ('kl', 'js', 'renyi:.9', 'l2', 'tsallis:.8') Ks = (3, 4) est = KNNDivergenceEstimator(div_funcs=div_funcs, Ks=Ks) res = est.fit_transform(bags) assert res.shape == (len(div_funcs), len(Ks), n, n) assert np.all(np.isfinite(res)) # test that JS blows up when there's a huge difference in bag sizes # (so that K is too low) assert_raises( ValueError, partial(est.fit_transform, bags + [np.random.randn(1000, dim)])) # test fit() and then transform() with JS, with different-sized test bags est = KNNDivergenceEstimator(div_funcs=('js', ), Ks=(5, )) est.fit(bags, get_rhos=True) with LogCapture('skl_groups.divergences.knn', level=logging.WARNING) as l: res = est.transform([np.random.randn(300, dim)]) assert res.shape == (1, 1, 1, len(bags)) assert len(l.records) == 1 assert l.records[0].message.startswith('Y_rhos had a lower max_K') # test that passing div func more than once raises def blah(df): est = KNNDivergenceEstimator(div_funcs=[df, df]) return est.fit(bags) assert_raises(ValueError, lambda: blah('kl')) assert_raises(ValueError, lambda: blah('renyi:.8')) assert_raises(ValueError, lambda: blah('l2'))
def test_knn_sanity_slow(): if not have_flann: raise SkipTest("No flann, so skipping knn tests.") dim = 3 n = 20 np.random.seed(47) bags = Features([np.random.randn(np.random.randint(30, 100), dim) for _ in xrange(n)]) # just make sure it runs div_funcs = ('kl', 'js', 'renyi:.9', 'l2', 'tsallis:.8') Ks = (3, 4) est = KNNDivergenceEstimator(div_funcs=div_funcs, Ks=Ks) res = est.fit_transform(bags) assert res.shape == (len(div_funcs), len(Ks), n, n) assert np.all(np.isfinite(res)) # test that JS blows up when there's a huge difference in bag sizes # (so that K is too low) assert_raises( ValueError, partial(est.fit_transform, bags + [np.random.randn(1000, dim)])) # test fit() and then transform() with JS, with different-sized test bags est = KNNDivergenceEstimator(div_funcs=('js',), Ks=(5,)) est.fit(bags, get_rhos=True) with LogCapture('skl_groups.divergences.knn', level=logging.WARNING) as l: res = est.transform([np.random.randn(300, dim)]) assert res.shape == (1, 1, 1, len(bags)) assert len(l.records) == 1 assert l.records[0].message.startswith('Y_rhos had a lower max_K') # test that passing div func more than once raises def blah(df): est = KNNDivergenceEstimator(div_funcs=[df, df]) return est.fit(bags) assert_raises(ValueError, lambda: blah('kl')) assert_raises(ValueError, lambda: blah('renyi:.8')) assert_raises(ValueError, lambda: blah('l2'))
def kNNdiv_gauss(X_white, cov_X, Knn=3, div_func='renyi:.5', gauss=None, Nref=None, njobs=1): ''' `div_func` kNN divergence estimate between X_white and a reference Gaussian with covariance matrix cov_X. ''' if gauss is None: if Nref is None: raise ValueError gauss = np.random.multivariate_normal( np.zeros(X_white.shape[1]), cov_X, size=Nref) # Gaussian reference distribution if gauss.shape[1] != X_white.shape[1]: raise ValueError( 'dimension between X_white and Gaussian reference distribution do not match' ) if isinstance(Knn, int): Knns = [Knn] elif isinstance(Knn, list): Knns = Knn kNN = KNNDivergenceEstimator(div_funcs=[div_func], Ks=Knns, version='slow', clamp=False, n_jobs=njobs) feat = Features([X_white, gauss]) div_knn = kNN.fit_transform(feat) if len(Knns) == 1: return div_knn[0][0][0][1] div_knns = np.zeros(len(Knns)) for i in range(len(Knns)): div_knns[i] = div_knn[0][i][0][1] return div_knns
def test_knn_kl(): if not have_flann: raise SkipTest("No flann, so skipping knn tests.") # verified by hand # Dhat(P||Q) = \log m/(n-1) + d / n \sum_{i=1}^n \log \nu_k(i)/rho_k(i) x = np.reshape([0., 1, 3], (3, 1)) y = np.reshape([.2, 1.2, 3.2, 7.2], (4, 1)) n = x.shape[0] m = y.shape[0] x_to_y = np.log(m / (n-1)) + 1/n * ( np.log(1.2 / 3) + np.log(.8 / 2) + np.log(1.8 / 3)) y_to_x = np.log(n / (m-1)) + 1/m * ( np.log(.8 / 3) + np.log(1.2 / 2) + np.log(2.2 / 3) + np.log(6.2 / 6)) msg = "got {}, expected {}" est = KNNDivergenceEstimator(div_funcs=['kl'], Ks=[2], clamp=False) res = est.fit_transform([x, y]).squeeze() assert res[0, 0] == 0 assert res[1, 1] == 0 assert np.allclose(res[0, 1], x_to_y), msg.format(res[0, 1], x_to_y) assert np.allclose(res[1, 0], y_to_x), msg.format(res[1, 0], y_to_x)