Ejemplo n.º 1
0
 def test_transform_01(self):
     np.random.seed(42)
     inputs = np.random.uniform(-1, 1, 60).reshape(15, 4)
     outputs = np.random.uniform(0, 5, 15)
     ss = ActiveSubspaces(dim=2, method='local', n_boot=250)
     ss.fit(inputs=inputs, outputs=outputs)
     active = ss.transform(np.random.uniform(-1, 1, 8).reshape(2, 4))[0]
     true_active = np.array([[-0.004748, 0.331107], [-0.949099, 0.347534]])
     np.testing.assert_array_almost_equal(true_active, active)
Ejemplo n.º 2
0
 def test_transform_05(self):
     np.random.seed(42)
     inputs = np.random.uniform(-1, 1, 60).reshape(15, 4)
     outputs = np.random.uniform(0, 5, 15)
     ss = ActiveSubspaces(dim=2, method='local', n_boot=250)
     ss.fit(inputs=inputs, outputs=outputs)
     inactive = ss.transform(np.random.uniform(-1, 1, 8).reshape(2, 4))[1]
     true_inactive = np.array([[-1.03574242, -0.04662904],
                               [-0.49850367, -0.37146678]])
     np.testing.assert_array_almost_equal(true_inactive, inactive)
Ejemplo n.º 3
0
 def test_transform_04(self):
     np.random.seed(42)
     inputs = np.random.uniform(-1, 1, 60).reshape(15, 4)
     outputs = np.random.uniform(0, 5, 45).reshape(15, 3)
     ss = ActiveSubspaces(dim=2, method='local', n_boot=250)
     ss.fit(inputs=inputs, outputs=outputs)
     active = ss.transform(np.random.uniform(-1, 1, 8).reshape(2, 4))[0]
     true_active = np.array([[0.15284753, 0.67109407],
                             [0.69006622, -0.4165206]])
     np.testing.assert_array_almost_equal(true_active, active)
Ejemplo n.º 4
0
 def test_transform_03(self):
     np.random.seed(42)
     inputs = np.random.uniform(-1, 1, 60).reshape(15, 4)
     outputs = np.random.uniform(0, 5, 45).reshape(15, 3)
     ss = ActiveSubspaces(dim=2, method='local', n_boot=250)
     ss.fit(inputs=inputs, outputs=outputs, metric=np.diag(np.ones(3)))
     new_inputs = np.random.uniform(-1, 1, 8).reshape(2, 4)
     active, inactive = ss.transform(new_inputs)
     reconstructed_inputs = active.dot(ss.W1.T) + inactive.dot(ss.W2.T)
     np.testing.assert_array_almost_equal(new_inputs, reconstructed_inputs)
Ejemplo n.º 5
0
 def test_inverse_transform_01(self):
     np.random.seed(42)
     inputs = np.random.uniform(-1, 1, 60).reshape(15, 4)
     outputs = np.random.uniform(0, 5, 15)
     ss = ActiveSubspaces(dim=1, method='local', n_boot=250)
     ss.fit(inputs=inputs, outputs=outputs)
     new_inputs = np.random.uniform(-1, 1, 8).reshape(2, 4)
     active = ss.transform(new_inputs)[0]
     new_inputs = ss.inverse_transform(reduced_inputs=active, n_points=5)[0]
     np.testing.assert_array_almost_equal(np.kron(active, np.ones((5, 1))),
                                          new_inputs.dot(ss.W1))
Ejemplo n.º 6
0
 def test_hit_and_run_inactive_01(self):
     np.random.seed(42)
     inputs = np.random.uniform(-1, 1, 60).reshape(15, 4)
     outputs = np.random.uniform(0, 5, 15)
     ss = ActiveSubspaces(dim=1, method='local', n_boot=250)
     ss.fit(inputs=inputs, outputs=outputs)
     new_inputs = np.random.uniform(-1, 1, 8).reshape(2, 4)
     active = ss.transform(new_inputs)[0]
     inactive_swap = np.array([
         ss._hit_and_run_inactive(reduced_input=red_inp, n_points=1)
         for red_inp in active
     ])
     inactive_inputs = np.swapaxes(inactive_swap, 1, 2)
     new_inputs = ss._rotate_x(reduced_inputs=active,
                               inactive_inputs=inactive_inputs)[0]
     np.testing.assert_array_almost_equal(active, new_inputs.dot(ss.W1))
Ejemplo n.º 7
0
# Define the output of interest and compute the gradients
func = partial(output, normalizer=nor, r=generatrix)
f = func(x)
df = egrad(func)(x)

# Compute the active subspace
asub = ActiveSubspaces(dim=1)
asub.fit(gradients=df)

M_test = 50
X_test = inputs_uniform(M_test, input_dim, lb, ub)
nor = Normalizer(lb, ub)
x_test = nor.fit_transform(X_test)
f_test = func(x_test)
x_forward = asub.transform(x_test)[0]
g = profile(x_forward, asub, func)

plt.figure(figsize=(6, 4))
plt.title('Comparison between profile choices')
plt.scatter(x_forward, g, c='r', label='Optimal profile')
plt.scatter(x_forward, f_test, c='b', label='Unchanged profile')
plt.legend()
plt.grid(linestyle='dotted')
plt.show()

gp_g = GPy.models.GPRegression(x_forward, g.reshape(-1, 1))
gp_f = GPy.models.GPRegression(x_forward, f_test.reshape(-1, 1))

gp_g.constrain_positive('')
gp_g.optimize_restarts(5, verbose=False)
Ejemplo n.º 8
0
x.requires_grad = True
f = radial(x + torch.ones(input_dim), generatrix=lambda x: x)
f.backward(gradient=torch.ones([n_samples]))
df = x.grad

#search for an active subspace
ss = ActiveSubspaces(dim=1)
ss.fit(gradients=df.cpu().detach().numpy())
ss.plot_eigenvalues(figsize=(6, 4))
ss.plot_sufficient_summary(x.detach().numpy(),
                           f.detach().numpy(),
                           figsize=(6, 4))

kernel = GPy.kern.RBF(input_dim=1, ARD=True)
gp = GPy.models.GPRegression(
    ss.transform(x.detach().numpy())[0],
    f.reshape(-1, 1).detach().numpy(), kernel)
gp.optimize_restarts(5, verbose=False)

# Use No U-Turn Sampler (NUTS) Hamiltonian Monte Carlo to sample from the posterior of the original model.
#plain NUTS
num_chains = 1
num_samples = 100
kernel = NUTS(model)
mcmc = MCMC(kernel,
            num_samples=num_samples,
            warmup_steps=100,
            num_chains=num_chains)
mcmc.run(f)
mcmc.summary()
mcmc_samples = mcmc.get_samples(group_by_chain=True)