def points_mode(rv: RandomVariable, samples=100, n=10): samples = rv.sample(size=samples) probabilities = rv.p(samples) if samples.ndim == 1: samples = samples.reshape(-1, 1) return mode_from_points(samples, probabilities, n=n)[0]
def metropolis(size: int, pdf: F[[np.ndarray], np.ndarray], proposal: RandomVariable, M: float) -> np.ndarray: """ :param size: number of samples :param pdf: pdf to sample from :param proposal: proposal distribution :param M: normalization constant :return: array of samples """ samples = [] while len(samples) < size: remainder = size - len(samples) sample = proposal.sample(size=remainder) accept_rate = pdf(sample) / (M * proposal.p(sample)) max_rate = accept_rate.max() if max_rate > 1.0: raise Exception("M to small, accept rate %s > 1.0. m: " % (max_rate)) rejection_probability = np.random.rand(remainder) accept_mask = accept_rate > rejection_probability samples.extend(sample[accept_mask]) return np.array(samples)
def metropolis_hastings(size: int, pdf: F[[np.ndarray], np.ndarray], proposal: RandomVariable, initial: np.ndarray = None) -> List[np.ndarray]: """ :param size: number of samples :param pdf: pdf to sample from :param proposal: proposal distribution :param initial: starting point :return: array of samples """ if initial is None: p = np.random.rand(*proposal.shape) else: p = initial samples = [] while len(samples) < size: sample = proposal.sample(p) accept_rate = min([(pdf(sample) * proposal.p(p, sample)) / (pdf(p) * proposal.p(sample, p)), 1]) if np.random.rand() < accept_rate: samples.append(sample) p = sample return np.array(samples)
def med(cls, n: int = None, probability: np.float32 = None) -> RandomVariable: """ :param n: number of observations :param probability: probability of positive observation :return: RandomVariable """ if n is None and probability is None: _sample = Binomial.sample _p = Binomial.p elif n is None: def _sample(n: np.int, size: int = 1): return Binomial.sample(n, probability, size) def _p(x: np.ndarray, n: np.int): return Binomial.p(x, n, probability) elif probability is None: def _sample(probability: np.float, size: int = 1): return Binomial.sample(n, probability, size) def _p(x: np.ndarray, probability: np.float): return Binomial.p(x, n, probability) else: def _sample(size: int = 1): return Binomial.sample(n, probability, size) def _p(x: np.ndarray): return Binomial.p(x, n, probability) parameters = { Binomial.n: Parameter(shape=(), value=n), Binomial.probability: Parameter(shape=(), value=probability) } return RandomVariable(_sample, _p, shape=(), parameters=parameters, cls=cls)
def med(cls, probabilities: np.ndarray = None, categories: int = None) -> RandomVariable: """ :param probabilities: probability of categories :param categories: number of categories :return: RandomVariable """ if probabilities is None: _sample = Categorical.sample _p = Categorical.p shape = categories else: def _sample(size: int = 1): return Categorical.sample(probabilities, size) def _p(x): return Categorical.p(x, probabilities) shape = probabilities.size parameters = { Categorical.probabilities: Parameter(shape=shape, value=probabilities) } return RandomVariable(_sample, _p, shape=(), parameters=parameters, cls=cls)
def med(cls, points: np.ndarray = None, variance: float = 2.0, error: float = 1e-1, verbose: bool = False, density_estimator: Density = RCKD) -> RandomVariable: """ :param points: points to estimate density from :param variance: variance of kernel :param error: acceptable error of partition function :param verbose: print estimation of partition function :param density_estimator: density estimator :return: """ density = density_estimator(variance=variance, error=error, verbose=verbose) density.fit(points) def _sample(size: int = 1): return points[np.random.randint(low=0, high=points.shape[0], size=size)] def _p(x: np.ndarray): return density.p(x) parameters = {} return RandomVariable(_sample, _p, shape=None, parameters=parameters, cls=cls)
def med(cls, alpha: np.ndarray = None, categories: int = None) -> RandomVariable: """ :param alpha: probability weights :param categories: number of categories :return: """ if alpha is None: _sample = Dirichlet.sample _p = Dirichlet.p shape = categories else: def _sample(size: int = 1): return Dirichlet.sample(alpha, size) def _p(x): return Dirichlet.p(x, alpha) shape = alpha.size parameters = {Dirichlet.alpha: Parameter(shape=shape, value=alpha)} return RandomVariable(_sample, _p, shape, parameters=parameters, cls=cls)
def med(cls, probability: np.float = None) -> RandomVariable: """ :param probability: probability of success :return: RandomVariable """ if probability is None: _sample = Geometric.sample _p = Geometric.p else: def _sample(size: int = 1): return Geometric.sample(probability, size) def _p(x: np.ndarray): return Geometric.p(x, probability) parameters = { Geometric.probability: Parameter(shape=(), value=probability) } return RandomVariable(_sample, _p, shape=(), parameters=parameters, cls=cls)
def med(cls, sampling=None, probability=None, fast_p=None) -> RandomVariable: """ :param sampling: sampling function :param probability: probability function :param fast_p: numba jitted probability function :return: """ if sampling is None: def sampling(*args, **kwargs): raise NotImplementedError( "Sampling not implemented in this Generic") if probability is None: def probability(*args, **kwargs): raise NotImplementedError( "Probability not implemented in this Generic") parameters = { Generic.sampling_function: Parameter(None, sampling), Generic.probability_function: Parameter(None, probability), Generic.fast_probability_function: Parameter(None, fast_p) } return RandomVariable(sampling, probability, shape=None, parameters=parameters, cls=cls)
def med(cls, N: np.int = None, K: np.int = None, n: np.int = None) -> RandomVariable: """ :param N: population size :param K: success states in population :param n: number of draws :return: RandomVariable """ params = [N, K, n] none = [i for i, param in enumerate(params) if param is None] not_none = [i for i, param in enumerate(params) if param is not None] def _p(x, *args): call_args = [None] * 3 for i, arg in enumerate(args): call_args[none[i]] = arg for i in not_none: call_args[i] = params[i] return Hypergeometric.p(x, *call_args) def _sample(*args, size: int = 1): call_args = [None] * 3 for i, arg in enumerate(args[:len(none)]): call_args[none[i]] = arg for i in not_none: call_args[i] = params[i] if len(args) > len(none): size = args[-1] return Hypergeometric.sample(*call_args, size=size) parameters = { Hypergeometric.N: Parameter((), N), Hypergeometric.K: Parameter((), K), Hypergeometric.n: Parameter((), n) } return RandomVariable(_sample, _p, shape=(), parameters=parameters, cls=cls)
def med(cls, density, lower_bound: np.ndarray, upper_bound: np.ndarray, points: int = 1000, variance: float = 2.0, error: float = 1e-1, batch: int = 25, verbose: bool = False) -> RandomVariable: """ :param density: function to act as density :param lower_bound: lower bound of density :param upper_bound: upper bound of density :param points: points to estimate density :param variance: variance of kernel :param error: tolerance of normalization constant error :param batch: particles in each mcmc step :param verbose: print error will estimating partition :return: RandomVariable """ lower_bound, upper_bound = np.array(lower_bound), np.array(upper_bound) initial = multivariate_uniform.sample(lower_bound, upper_bound, size=batch) samples = fast_metropolis_hastings(np.maximum(points, 10000), density, initial=initial)[-points:] density = RCKD(variance=variance, sampling_sz=100, error=error, verbose=verbose) density.fit(samples) def _sample(size: int = 1): return samples[np.random.randint(low=0, high=samples.shape[0], size=size)] def _p(x: np.ndarray): return density.p(x) parameters = {} return RandomVariable(_sample, _p, shape=None, parameters=parameters, cls=cls)
def med(cls, a: np.ndarray = None, b: np.ndarray = None, dimension: Tuple = None) -> RandomVariable: """ :param a: lower bound :param b: upper bound :param dimension: dimension of r.v :return: RandomVariable """ if a is None and b is None: _sample = MultiVariateUniform.sample _p = MultiVariateUniform.p shape = dimension elif a is None: def _sample(a: np.ndarray, size: int = 1): return MultiVariateUniform.sample(a, b, size) def _p(x: np.ndarray, a: np.ndarray): return MultiVariateUniform.p(x, a, b) shape = b.size elif b is None: def _sample(b: np.ndarray, size: int = 1): return MultiVariateUniform.sample(a, b, size) def _p(x: np.ndarray, b: np.ndarray): return MultiVariateUniform.p(x, a, b) shape = a.size else: def _sample(size: int = 1): return MultiVariateUniform.sample(a, b, size) def _p(x: np.ndarray): return MultiVariateUniform.p(x, a, b) shape = a.size parameters = { MultiVariateUniform.a: Parameter(shape, a), MultiVariateUniform.b: Parameter(shape, b) } return RandomVariable(_sample, _p, shape=shape, parameters=parameters, cls=cls)
def med(cls, mu: np.float = None, lam: np.float = None, a: np.float = None, b: np.float = None) -> RandomVariable: """ :param mu: mean :param lam: precision :param a: shape :param b: rate :return: RandomVariable """ params = [mu, lam, a, b] none = [i for i, param in enumerate(params) if param is None] not_none = [i for i, param in enumerate(params) if param is not None] def _p(x, *args): call_args = [None] * 4 for i, arg in enumerate(args): call_args[none[i]] = arg for i in not_none: call_args[i] = params[i] return NormalInverseGamma.p(x, *call_args) def _sample(*args, size: int = 1): call_args = [None] * 4 for i, arg in enumerate(args[:len(none)]): call_args[none[i]] = arg for i in not_none: call_args[i] = params[i] if len(args) > len(none): size = args[-1] return NormalInverseGamma.sample(*call_args, size=size) parameters = { NormalInverseGamma.mu: Parameter((), mu), NormalInverseGamma.lam: Parameter((), lam), NormalInverseGamma.a: Parameter((), a), NormalInverseGamma.b: Parameter((), b) } return RandomVariable(_sample, _p, shape=(), parameters=parameters, cls=cls)
def med(cls, probability: np.float32 = None) -> RandomVariable: """ :param probability: probability of positive outcome :return: RandomVariable """ if probability is None: _sample = Bernoulli.sample _p = Bernoulli.p else: def _sample(size: int = 1): return Bernoulli.sample(probability, size) def _p(x): return Bernoulli.p(x, probability) parameters = {Bernoulli.probability: Parameter(shape=(), value=probability)} return RandomVariable(_sample, _p, shape=(), parameters=parameters, cls=cls)
def _search_posterior(data: Tuple[np.ndarray], likelihood: Union[RandomVariable, Callable[[Tuple[np.ndarray]], np.ndarray]], prior: RandomVariable, samples: int, energy: float, batch: int, volume: float): fast_ll = jitted_likelihood(likelihood) fast_p = jitted_prior(prior) log_likelihood, log_prior = jit_log_probabilities(data, fast_ll, fast_p) initial = prior.sample(size=batch) return search_posterior_estimation(size=samples, log_likelihood=log_likelihood, log_prior=log_prior, initial=initial, energy=energy, volume=volume)
def med(cls, x: np.ndarray = None, variables: np.ndarray = None, sigma: np.float = None) -> RandomVariable: """ :param x: input :param variables: weights :param sigma: variance of estimates :return: RandomVariable """ params = [x, variables, sigma] none = [i for i, param in enumerate(params) if param is None] not_none = [i for i, param in enumerate(params) if param is not None] def _p(x, *args): call_args = [None] * 3 for i, arg in enumerate(args): call_args[none[i]] = arg for i in not_none: call_args[i] = params[i] return UniLinear.p(x, *call_args) def _sample(*args, size: int = 1): call_args = [None] * 3 for i, arg in enumerate(args): call_args[none[i]] = arg for i in not_none: call_args[i] = params[i] return UniLinear.sample(*call_args, size=size) parameters = { UniLinear.x: Parameter((), x), UniLinear.variables: Parameter((), variables), UniLinear.sigma: Parameter((), sigma) } return RandomVariable(_sample, _p, shape=(), parameters=parameters, cls=cls)
def med(cls, n: int = None, probabilities: np.ndarray = None, outcomes: int = None) -> RandomVariable: """ :param n: number of observations :param probabilities: probability for each outcome :param outcomes: number of outcomes :return: RandomVariable """ if n is None and probabilities is None: _sample = Multinomial.sample _p = Multinomial.p shape = outcomes elif n is None: def _sample(n: np.ndarray, size: int = 1): return Multinomial.sample(n, probabilities, size) def _p(x: np.ndarray, n: np.ndarray): return Multinomial.p(x, n, probabilities) shape = probabilities.size elif probabilities is None: def _sample(probabilities: np.ndarray, size: int = 1): return Multinomial.sample(n, probabilities, size) def _p(x: np.ndarray, probabilities: np.ndarray): return Multinomial.p(x, n, probabilities) shape = None else: def _sample(size: int = 1): return Multinomial.sample(n, probabilities, size) def _p(x: np.ndarray): return Multinomial.p(x, n, probabilities) shape = probabilities.size parameters = { Multinomial.n: Parameter(shape=(), value=n), Multinomial.probabilities: Parameter(shape=shape, value=probabilities) } return RandomVariable(_sample, _p, shape=shape, parameters=parameters, cls=cls)
def med(cls, x: np.ndarray = None, mu: Callable[[np.ndarray], np.float] = None, sigma: Callable[[np.ndarray, np.ndarray], np.float] = None, X: np.ndarray = None, Y: np.ndarray = None) -> RandomVariable: """ :param x: non-observed samples :param mu: mean function :param sigma: variance function :param X: observed samples :param Y: observed values :return: RandomVariable """ params = [x, mu, sigma, X, Y] none = [i for i, param in enumerate(params) if param is None] not_none = [i for i, param in enumerate(params) if param is not None] def _p(x, *args): call_args = [None] * 5 for i, arg in enumerate(args): call_args[none[i]] = arg for i in not_none: call_args[i] = params[i] return GaussianProcess.p(x, *call_args) def _sample(*args, size: int = 1): call_args = [None] * 5 for i, arg in enumerate(args): call_args[none[i]] = arg for i in not_none: call_args[i] = params[i] return GaussianProcess.sample(*call_args, size=size) parameters = { GaussianProcess.x: Parameter(None, x), GaussianProcess.mu: Parameter(None, mu), GaussianProcess.sigma: Parameter(None, sigma), GaussianProcess.X: Parameter(None, X), GaussianProcess.Y: Parameter(None, Y) } return RandomVariable(_sample, _p, shape=(), parameters=parameters, cls=cls)
def med(cls, a: np.float = None, b: np.float = None) -> RandomVariable: """ :param a: shape :param b: rate :return: RandomVariable """ if a is None and b is None: _sample = Gamma.sample _p = Gamma.p elif a is None: def _sample(a: np.float, size: int = 1): return Gamma.sample(a, b, size) def _p(x: np.ndarray, a: np.float): return Gamma.p(x, a, b) elif b is None: def _sample(b: np.float, size: int = 1): return Gamma.sample(a, b, size) def _p(x: np.ndarray, b: np.float): return Gamma.p(x, a, b) else: def _sample(size: int = 1): return Gamma.sample(a, b, size) def _p(x): return Gamma.p(x, a, b) parameters = { Gamma.a: Parameter(shape=(), value=a), Gamma.b: Parameter(shape=(), value=b) } return RandomVariable(_sample, _p, shape=(), parameters=parameters, cls=cls)
def _sample_posterior(data: Tuple[np.ndarray], likelihood: Union[RandomVariable, Callable[[Tuple[np.ndarray]], np.ndarray]], prior: RandomVariable, size: int, energy: float, batch: int): fast_ll = jitted_likelihood(likelihood) fast_p = jitted_prior(prior) log_likelihood, log_prior = jit_log_probabilities(data, fast_ll, fast_p) initial = prior.sample(size=batch) samples = fast_metropolis_hastings_log_space_parameter_posterior_estimation( size=size, log_likelihood=log_likelihood, log_prior=log_prior, initial=initial, energy=energy) return samples
def med(cls, a: np.float = None, b: np.float = None) -> RandomVariable: """ :param a: lower bound :param b: upper bound :return: RandomVariable """ if a is None and b is None: _sample = Uniform.sample _p = Uniform.p elif a is None: def _sample(a: np.ndarray, size: int = 1): return Uniform.sample(a, b, size) def _p(x: np.ndarray, a: np.ndarray): return Uniform.p(x, a, b) elif b is None: def _sample(b: np.ndarray, size: int = 1): return Uniform.sample(a, b, size) def _p(x: np.ndarray, b: np.ndarray): return Uniform.p(x, a, b) else: def _sample(size: int = 1): return Uniform.sample(a, b, size) def _p(x: np.ndarray): return Uniform.p(x, a, b) parameters = {Uniform.a: Parameter((), a), Uniform.b: Parameter((), b)} return RandomVariable(_sample, _p, shape=(), parameters=parameters, cls=cls)
def med(cls, lam: np.float32 = None) -> RandomVariable: """ :param lam: rate :return: RandomVariable """ if lam is None: _sample = Poisson.sample _p = Poisson.p else: def _sample(size: int = 1): return Poisson.sample(lam, size) def _p(x): return Poisson.p(x, lam) parameters = {Poisson.lam: Parameter(shape=(), value=lam)} return RandomVariable(_sample, _p, shape=(), parameters=parameters, cls=cls)
def med(cls, lam: np.float = None) -> RandomVariable: """ :param lam: lambda, rate parameter :return: RandomVariable """ if lam is None: _sample = Exponential.sample _p = Exponential.p else: def _sample(size: int = 1): return Exponential.sample(lam, size) def _p(x): return Exponential.p(x, lam) parameters = {Exponential.lam: Parameter(shape=(), value=lam)} return RandomVariable(_sample, _p, shape=(), parameters=parameters, cls=cls)