def __init__(self, outputs, inputs, hypers=None, params=None, distargs=None, rng=None): DistributionGpm.__init__(self, outputs, inputs, hypers, params, distargs, rng) # Distargs self.l = distargs['l'] self.h = distargs['h'] # Sufficient statistics. self.N = 0 self.sum_x = 0 self.sum_x_sq = 0 # Hyperparameters (fixed). self.alpha = 2. self.beta = 2. # Uncollapsed mean and precision parameters. if params is None: params = {} self.mu = params.get('mu', None) self.sigma = params.get('sigma', 1) if not self.mu or not self.sigma: self.mu, self.sigma = NormalTrunc.sample_parameters( self.alpha, self.beta, self.l, self.h, self.rng)
def __init__( self, outputs, inputs, hypers=None, params=None, distargs=None, rng=None): DistributionGpm.__init__( self, outputs, inputs, hypers, params, distargs, rng) # Distargs. self.N = 0 self.data = OrderedDict() self.counts = OrderedDict() # Hyperparameters. if hypers is None: hypers = {} self.alpha = hypers.get('alpha', 1.)
def __init__(self, outputs, inputs, hypers=None, params=None, distargs=None, rng=None): DistributionGpm.__init__(self, outputs, inputs, hypers, params, distargs, rng) # Sufficent statistics. self.N = 0 self.x_sum = 0 # Hyperparameters. if hypers is None: hypers = {} self.alpha = hypers.get('alpha', 1.) self.beta = hypers.get('beta', 1.) assert self.alpha > 0 assert self.beta > 0
def __init__(self, outputs, inputs, hypers=None, params=None, distargs=None, rng=None): DistributionGpm.__init__(self, outputs, inputs, hypers, params, distargs, rng) # Sufficient statistics. self.N = 0 self.sum_x = 0 # Hyperparameters. if hypers is None: hypers = {} self.a = hypers.get('a', 1) self.b = hypers.get('b', 1) assert self.a > 0 assert self.b > 0
def __init__(self, outputs, inputs, hypers=None, params=None, distargs=None, rng=None): DistributionGpm.__init__(self, outputs, inputs, hypers, params, distargs, rng) # Distargs. k = distargs.get('k', None) if k is None: raise ValueError('Categorical requires distarg `k`.') self.k = int(k) # Sufficient statistics. self.N = 0 self.counts = np.zeros(self.k) # Hyperparameters. if hypers is None: hypers = {} self.alpha = hypers.get('alpha', 1.)
def __init__(self, outputs, inputs, hypers=None, params=None, distargs=None, rng=None): DistributionGpm.__init__(self, outputs, inputs, hypers, params, distargs, rng) # Sufficient statistics. self.N = 0 self.sum_sin_x = 0 self.sum_cos_x = 0 # Hyperparameters. # Prior concentration of mean, mean of mean, and Vonmises kappa if hypers is None: hypers = {} self.a = hypers.get('a', 1.) self.b = hypers.get('b', pi) self.k = hypers.get('k', 1.5) assert self.a > 0 assert 0 <= self.b <= 2 * pi assert self.k > 0
def __init__(self, outputs, inputs, hypers=None, params=None, distargs=None, rng=None): DistributionGpm.__init__( self, outputs, inputs, hypers, params, distargs, rng) # Sufficient statistics. self.N = 0 self.sum_log_x = 0 self.sum_minus_log_x = 0 # Hyperparameters (fixed). self.mu = 5. self.alpha = 1. self.beta = 1. # Parameters. if params is None: params = {} self.strength = params.get('strength', None) self.balance = params.get('balance', 1) if not self.strength or not self.balance: self.strength, self.balance = Beta.sample_parameters( self.mu, self.alpha, self.beta, self.rng) assert self.mu > 0 assert self.alpha > 0 assert self.beta > 0
def __init__(self, outputs, inputs, hypers=None, params=None, distargs=None, rng=None): DistributionGpm.__init__(self, outputs, inputs, hypers, params, distargs, rng) # Sufficient statistics. self.N = 0 self.sum_x = 0 self.sum_x_sq = 0 # Hyper parameters. if hypers is None: hypers = {} self.m = hypers.get('m', 0.) self.r = hypers.get('r', 1.) self.s = hypers.get('s', 1.) self.nu = hypers.get('nu', 1.) assert self.s > 0. assert self.r > 0. assert self.nu > 0.