def discrete_uniform(x, lower=0, upper=1): """ Discrete Uniform distribution log-likelihood. :param x: *int, np.array[int].* :param lower: (optional) *int, float.* Lower bound, default is 0. :param upper: (optional) *int, float.* Upper bound, default is 1. .. math :: \log{P(x; a, b)} = -n\log(b-a) """ if fails_constraints(x >= lower, x <= upper): return -np.inf if isinstance(x, np.ndarray): if x.dtype != np.int_: raise ValueError( 'x must be integers, function received {}'.format(x)) else: return -np.size(x) * np.log(upper - lower) elif isinstance(x, numbers.Integral): return -np.log(upper - lower) else: return -np.inf
def poisson(x, rate=1): """ Poisson distribution log-likelihood. :param x: *int, float, np.array.* Event count. :param rate: (optional) *int, float, np.array.* Rate parameter, :math:`\lambda > 0`. Defaults to 1. .. math :: \log{P(x; \lambda)} \propto x \log{\lambda} - \lambda """ if fails_constraints(rate > 0): return -np.inf if np.size(rate) != 1 and len(x) != len(rate): raise ValueError('If rate is a vector, x must be the same size as rate.' ' We got x={}, rate={}'.format(x, rate)) return np.sum(x*np.log(rate)) - np.size(x)*rate
def poisson(x, rate=1): """ Poisson distribution log-likelihood. :param x: *int, float, np.array.* Event count. :param rate: (optional) *int, float, np.array.* Rate parameter, :math:`\lambda > 0`. Defaults to 1. .. math :: \log{P(x; \lambda)} \propto x \log{\lambda} - \lambda """ if outofbounds(rate > 0): return -np.inf if np.size(rate) != 1 and len(x) != len(rate): raise ValueError('If rate is a vector, x must be the same size as rate.' ' We got x={}, rate={}'.format(x, rate)) return np.sum(x*np.log(rate)) - np.size(x)*rate
def uniform(x, lower=0, upper=1): """ Uniform distribution log-likelihood. Bounds are inclusive. :param x: *int, float, np.array.* :param lower: (optional) *int, float.* Lower bound, default is 0. :param upper: (optional) *int, float.* Upper bound, default is 1. .. math :: \log{P(x; a, b)} = -n\log(b-a) """ if fails_constraints(x >= lower, x <= upper): return -np.inf return -np.size(x) * np.log(upper - lower)
def uniform(x, lower=0, upper=1): """ Uniform distribution log-likelihood. Bounds are inclusive. :param x: *int, float, np.array.* :param lower: (optional) *int, float.* Lower bound, default is 0. :param upper: (optional) *int, float.* Upper bound, default is 1. .. math :: \log{P(x; a, b)} = -n\log(b-a) """ if outofbounds(x >= lower, x <= upper): return -np.inf return -np.size(x) * np.log(upper-lower)
def exponential(x, rate=1): """ Log likelihood of the exponential distribution. :param x: *int, float, np.array.* :param rate: (optional) *int, float, np.array.* Rate parameter, :math:`\lambda > 0`. Defaults to 1. .. math :: \log{P(x; \lambda)} \propto \log{\lambda} - \lambda x """ if outofbounds(rate > 0): return -np.inf if np.size(rate) != 1 and len(x) != len(rate): raise ValueError('If rate is a vector, x must be the same size as rate.' ' We got x={}, rate={}'.format(x, rate)) return np.sum(np.log(rate) - rate*x)
def exponential(x, rate=1): """ Log likelihood of the exponential distribution. :param x: *int, float, np.array.* :param rate: (optional) *int, float, np.array.* Rate parameter, :math:`\lambda > 0`. Defaults to 1. .. math :: \log{P(x; \lambda)} \propto \log{\lambda} - \lambda x """ if fails_constraints(x > 0, rate > 0): return -np.inf if np.size(rate) != 1 and len(x) != len(rate): raise ValueError('If rate is a vector, x must be the same size as rate.' ' We got x={}, rate={}'.format(x, rate)) return np.sum(np.log(rate) - rate*x)
def normal(x, mu=0, sig=1): """ Normal distribution log-likelihood. :param x: *int, float, np.array.* :param mu: (optional) *int, float, np.array.* Location parameter of the normal distribution. Defaults to 0. :param sig: (optional) *int, float.* Standard deviation of the normal distribution, :math:`\sigma > 0`. Defaults to 1. .. math:: \log{P(x; \mu, \sigma)} \propto -\log{\sigma} \ - \\frac{(x - \mu)^2}{2 \sigma^2} """ if np.size(mu) != 1 and len(x) != len(mu): raise ValueError('If mu is a vector, x must be the same size as mu.' ' We got x={}, mu={}'.format(x, mu)) return np.sum(-np.log(sig) - (x - mu)**2/(2*sig**2))
def normal(x, mu=0, sig=1): """ Normal distribution log-likelihood. :param x: *int, float, np.array.* :param mu: (optional) *int, float, np.array.* Location parameter of the normal distribution. Defaults to 0. :param sig: (optional) *int, float.* Standard deviation of the normal distribution, :math:`\sigma > 0`. Defaults to 1. .. math:: \log{P(x; \mu, \sigma)} \propto -\log{\sigma} \ - \\frac{(x - \mu)^2}{2 \sigma^2} """ if np.size(mu) != 1 and len(x) != len(mu): raise ValueError('If mu is a vector, x must be the same size as mu.' ' We got x={}, mu={}'.format(x, mu)) return np.sum(-np.log(sig) - (x - mu)**2 / (2 * sig**2))
def discrete_uniform(x, lower=0, upper=1): """ Discrete Uniform distribution log-likelihood. :param x: *int, np.array[int].* :param lower: (optional) *int, float.* Lower bound, default is 0. :param upper: (optional) *int, float.* Upper bound, default is 1. .. math :: \log{P(x; a, b)} = -n\log(b-a) """ if outofbounds(x >= lower, x <= upper): return -np.inf if isinstance(x, np.ndarray): if x.dtype != np.int_: raise ValueError('x must be integers, function received {}'.format(x)) else: return -np.size(x) * np.log(upper-lower) elif isinstance(x, int) or isinstance(x, np.int_): return -np.log(upper-lower) else: return -np.inf
def size(self): return State([(var, np.size(self[var])) for var in self])