def predict(params, x, y, xstar):
     """Returns the predictive mean and covariance at locations xstar,
        of the latent function value f (without observation noise)."""
     mean, cov_params, noise_scale = unpack_params(params)
     cov_f_f = cov_func(cov_params, xstar, xstar)
     cov_y_f = cov_func(cov_params, x, xstar)
     cov_y_y = cov_func(cov_params, x, x) + noise_scale * np.eye(len(y))
     pred_mean = mean +   np.dot(solve(cov_y_y, cov_y_f).T, y - mean)
     pred_cov = cov_f_f - np.dot(solve(cov_y_y, cov_y_f).T, cov_y_f)
     return pred_mean, pred_cov
 def plot_gmm(params, ax, num_points=100):
     angles = np.expand_dims(np.linspace(0, 2*np.pi, num_points), 1)
     xs, ys = np.cos(angles), np.sin(angles)
     circle_pts = np.concatenate([xs, ys], axis=1) * 2.0
     for log_proportion, mean, chol in zip(*unpack_params(params)):
         cur_pts = mean + np.dot(circle_pts, chol)
         ax.plot(cur_pts[:, 0], cur_pts[:, 1], '-')
 def log_marginal_likelihood(params, data):
     cluster_lls = []
     for log_proportion, mean, chol in zip(*unpack_params(params)):
         cov = np.dot(chol.T, chol) + 0.000001 * np.eye(D)
         cluster_log_likelihood = log_proportion + mvn.logpdf(data, mean, cov)
         cluster_lls.append(np.expand_dims(cluster_log_likelihood, axis=0))
     cluster_lls = np.concatenate(cluster_lls, axis=0)
     return np.sum(logsumexp(cluster_lls, axis=0))
 def solve_triangular_grad(g):
     al2d = lambda x: x if x.ndim > 1 else x[...,None]
     v = al2d(solve_triangular(a, g, trans=_flip(a, trans), lower=lower))
     return -transpose(tri(anp.dot(v, al2d(ans).T)))
def logistic_predictions(weights, inputs):
    # Outputs probability of a label being true according to logistic model.
    return sigmoid(np.dot(inputs, weights))
 def vector_dot_grad(*args, **kwargs):
     args, vector = args[:-1], args[-1]
     return np.dot(vector, fun_grad(*args, **kwargs))
 def loss(W_vect, X, T):
     log_prior = -L2_reg * np.dot(W_vect, W_vect)
     log_lik = np.sum(predictions(W_vect, X) * T)
     return - log_prior - log_lik
 def forward_pass(self, inputs, param_vector):
     params = self.parser.get(param_vector, 'params')
     biases = self.parser.get(param_vector, 'biases')
     if inputs.ndim > 2:
         inputs = inputs.reshape((inputs.shape[0], np.prod(inputs.shape[1:])))
     return self.nonlinearity(np.dot(inputs[:, :], params) + biases)
def activations(weights, *args):
    cat_state = np.concatenate(args + (np.ones((args[0].shape[0],1)),), axis=1)
    return np.dot(cat_state, weights)
 def predictions(weights, inputs):
     for W, b in unpack_layers(weights):
         outputs = np.dot(inputs, W) + b
         inputs = nonlinearity(outputs)
     return outputs
 def predictions(W_vect, inputs):
     for W, b in unpack_layers(W_vect):
         outputs = np.dot(inputs, W) + b
         inputs = np.tanh(outputs)
     return outputs - logsumexp(outputs, axis=1, keepdims=True)