def test_plot_sufficient_summary_02(self): np.random.seed(42) gradients = np.random.uniform(-1, 1, 200).reshape(50, 1, 4) inputs = np.random.uniform(-1, 1, 200).reshape(50, 4) weights = np.ones((50, 1)) / 50 ss = KernelActiveSubspaces(dim=3, n_features=8, n_boot=5) ss.fit(inputs=inputs, gradients=gradients, weights=weights) with self.assertRaises(ValueError): ss.plot_sufficient_summary(10, 10)
def test_plot_sufficient_summary_03(self): np.random.seed(42) gradients = np.random.uniform(-1, 1, 200).reshape(50, 1, 4) inputs = np.random.uniform(-1, 1, 200).reshape(50, 4) weights = np.ones((50, 1)) / 50 ss = KernelActiveSubspaces(dim=2, n_features=8, method='exact', n_boot=5) ss.fit(inputs=inputs, gradients=gradients, weights=weights) with assert_plot_figures_added(): ss.plot_sufficient_summary( np.random.uniform(-1, 1, 100).reshape(25, 4), np.random.uniform(-1, 1, 25).reshape(-1, 1))
def test_plot_sufficient_summary_02(self): np.random.seed(42) gradients = np.random.uniform(-1, 1, 200).reshape(50, 1, 4) inputs = np.random.uniform(-1, 1, 200).reshape(50, 4) weights = np.ones((50, 1)) / 50 ss = KernelActiveSubspaces() ss.compute(inputs=inputs, gradients=gradients, weights=weights, method='exact', nboot=49, n_features=8, feature_map=None) ss.partition(3) with self.assertRaises(ValueError): ss.plot_sufficient_summary(10, 10)
def test_plot_sufficient_summary_03(self): np.random.seed(42) gradients = np.random.uniform(-1, 1, 200).reshape(50, 1, 4) inputs = np.random.uniform(-1, 1, 200).reshape(50, 4) weights = np.ones((50, 1)) / 50 ss = KernelActiveSubspaces() ss.compute(inputs=inputs, gradients=gradients, weights=weights, method='exact', nboot=49, n_features=8, feature_map=None) ss.partition(2) with assert_plot_figures_added(): ss.plot_sufficient_summary( np.random.uniform(-1, 1, 100).reshape(25, 4), np.random.uniform(-1, 1, 25).reshape(-1, 1))
'verbose': verbose }, method='bso', maxiter=10, save_file=False) print('The lowest rrmse is {}%'.format(best[0])) W = np.load('opt_pr_matrix.npy') b = np.load('bias.npy') fm._pr_matrix = W fm.bias = b kss.fit(gradients=dt.reshape(N, 1, input_dim), outputs=t, inputs=y) kss.plot_eigenvalues(n_evals=5, figsize=(6, 4)) kss.plot_sufficient_summary(xx, f, figsize=(6, 4)) # comparison with Nonlinear Level-set Learning explained in detail in tutorial 07 from athena.nll import NonlinearLevelSet import torch # create NonlinearLevelSet object, eventually passing an optimizer of choice nll = NonlinearLevelSet(n_layers=10, active_dim=1, lr=0.01, epochs=1000, dh=0.25, optimizer=torch.optim.Adam) # convert to pytorch tensors x_torch = torch.as_tensor(xx, dtype=torch.double)
def test_plot_sufficient_summary_01(self): ss = KernelActiveSubspaces(dim=2) with self.assertRaises(TypeError): ss.plot_sufficient_summary(10, 10)