示例#1
0
def log_poisson_likelihood_opt(param, *args):
    """
    Considers only the log-likelihood of the Poisson distribution in front of the gaussian process to optimize
    latent values - note that there are no hyper-parameters here to consider. The log-likelhood is taken as
     the natural log is monotically increasing
    :param param: v_array containing the latent intensities
    :param args: k_array which is the data set
    :return: log of the combined poisson distributions
    """
    # Define parameters and arguments
    v_array = param
    k_array = args[0]

    # Generate Objective Function: log(P(D|v))
    exp_term = -1 * np.sum(np.exp(v_array))
    product_term = np.matmul(v_array, np.transpose(k_array))

    factorial_k = scispec.gamma(k_array + np.ones_like(k_array))
    # factorial_term = - np.sum(np.log(factorial_k))  # summation of logs = log of product
    factorial_term = -np.sum(
        fn.log_special(factorial_k))  # summation of logs = log of product

    log_p_likelihood = exp_term + product_term + factorial_term
    log_p_likelihood_convex = -1 * log_p_likelihood
    return log_p_likelihood_convex
def log_poisson_likelihood(lambda_array, k_array):
    """
    Takes in the intensity array (lambda) and the observations array
    :param lambda_array: Assumed, actual intensity array
    :param k_array: observations array
    :return: log of the combined poisson distributions
    """

    exp_term = -1 * np.sum(lambda_array)
    product_term_array = fn.log_special(lambda_array) * k_array
    product_term = np.sum(product_term_array)

    factorial_k = scispec.gamma(k_array +
                                np.ones_like(k_array))  # Gamma(k+1) = k!
    factorial_term = -np.sum(
        np.log(factorial_k))  # summation of logs = log of product

    log_p_likelihood = exp_term + product_term + factorial_term

    # Note this will be a negative value due to the very small likelihood
    return log_p_likelihood
示例#3
0
x_vox = fn.row_create(x_mesh)
y_vox = fn.row_create(y_mesh)
t_vox = fn.row_create(t_mesh)
k_vox = fn.row_create(k_mesh)
print('k_vox shape is', k_vox.shape)
print("Initial Data Points are ", k_vox)

# Initialise arguments and parameters for optimization
# Arbitrary vector for optimization using the Newton-CG optimization algorithm
initial_p_array = np.ones_like(k_vox)

# Choose appropriate starting point for the optimization
# Initialise array for optimization start
initial_v_scalar = np.arange(0, 10, 1)

initial_v_array = fn.log_special(k_vox)
# initial_v_array = np.ones_like(k_vox) * 1

# print('The initial_v_array is', initial_v_array)
"""If the initial v_array is too far out, the optimization cannot be completed successfully. It is reasonable to
assume that a good starting point would be the log of the initial data values"""

# Tuple containing all arguments to be passed into objective function, jacobian and hessian, but we can specify
# which arguments will be used for each function
arguments_v = (k_vox, initial_p_array)

# -------------------------------------------------------------------- END OF TESTING FOR THE OBJECTIVE FUNCTION
start_poisson_opt = time.clock()

# Start Optimization Algorithm for latent intensities - note this does not take into account the intensity locations
# ChangeParam
示例#4
0
# ------------------------------------------End of SELECTION FOR EXCLUSION OF ZERO POINTS

# ------------------------------------------ Tabulate Poisson Likelihood

# Use the k_quad and latent intensity v array to obtain likelihood
# The k_quad now contains data from year 2014
# Latent_v_array was optimized based on


# Generate Objective Function: log(P(D|v))
exp_term = -1 * np.sum(np.exp(latent_v_array))
product_term = np.matmul(latent_v_array, np.transpose(k_quad))

factorial_k = scispec.gamma(k_quad + np.ones_like(k_quad))
# factorial_term = - np.sum(np.log(factorial_k))  # summation of logs = log of product
factorial_term = - np.sum(fn.log_special(factorial_k))  # summation of logs = log of product

log_p_likelihood = exp_term + product_term + factorial_term

print('The Log Poisson Likelihood is', log_p_likelihood)

# ------------------------------------------------------- Tabulate the GP Log Marginal Likelihood

# The Covariance Matrix and Prior mean are created here as a component of the objective function
prior_mean = mean_func_scalar(mean_optimal, xy_quad)

# Select Kernel and Construct Covariance Matrix
if kernel == 'matern3':
    c_auto = fast_matern_2d(sigma_optimal, length_optimal, xy_quad, xy_quad)
elif kernel == 'matern1':
    c_auto = fast_matern_1_2d(sigma_optimal, length_optimal, xy_quad, xy_quad)
示例#5
0
# ------------------------------------------End of SELECTION FOR EXCLUSION OF ZERO POINTS

# ------------------------------------------ Tabulate Poisson Likelihood

# Use the k_quad and latent intensity v array to obtain likelihood
# The k_quad now contains data from year 2014
# Latent_v_array was optimized based on

# Generate Objective Function: log(P(D|v))
exp_term = -1 * np.sum(np.exp(latent_v_array))
product_term = np.matmul(latent_v_array, np.transpose(k_quad))

factorial_k = scispec.gamma(k_quad + np.ones_like(k_quad))
# factorial_term = - np.sum(np.log(factorial_k))  # summation of logs = log of product
factorial_term = -np.sum(
    fn.log_special(factorial_k))  # summation of logs = log of product

log_p_likelihood = exp_term + product_term + factorial_term

print('The Log Poisson Likelihood is', log_p_likelihood)

# ------------------------------------------------------- Tabulate the GP Log Marginal Likelihood

# The Covariance Matrix and Prior mean are created here as a component of the objective function
prior_mean = mean_func_scalar(mean_optimal, xy_quad)

# Select Kernel and Construct Covariance Matrix
if kernel == 'matern3':
    c_auto = fast_matern_2d(sigma_optimal, length_optimal, xy_quad, xy_quad)
elif kernel == 'matern1':
    c_auto = fast_matern_1_2d(sigma_optimal, length_optimal, xy_quad, xy_quad)