def model(): with pyro.plate_stack("plates", shape): with pyro.plate("particles", 200000): if "dist_type" == "Normal": pyro.sample("x", dist.Normal(loc, scale)) else: pyro.sample("x", dist.StudentT(10.0, loc, scale))
def model(): with pyro.plate_stack("plates", shape): if "dist_type" == "Normal": return pyro.sample("x", dist.Normal(loc, scale)) elif "dist_type" == "StudentT": return pyro.sample("x", dist.StudentT(10.0, loc, scale)) else: return pyro.sample("x", dist.AsymmetricLaplace(loc, scale, 1.5))
def model(): fn = dist.TransformedDistribution( dist.Normal(torch.zeros_like(loc), torch.ones_like(scale)), [AffineTransform(loc, scale), ExpTransform()]) if event_shape: fn = fn.to_event(len(event_shape)) with pyro.plate_stack("plates", batch_shape): with pyro.plate("particles", 200000): return pyro.sample("x", fn)
def model(): with pyro.plate_stack("plates", shape): with pyro.plate("particles", 200000): return pyro.sample( "x", dist.TransformedDistribution( dist.Normal(torch.zeros_like(loc), torch.ones_like(scale)), [AffineTransform(loc, scale), ExpTransform()]))
def model(data): hmm = dist.LinearHMM(init_dist, trans_mat, trans_dist, obs_mat, obs_dist, duration=num_steps) with pyro.plate_stack("plates", batch_shape): z = pyro.sample("z", hmm) pyro.sample("x", dist.Normal(z, 1).to_event(2), obs=data)
def model(self, zero_data, covariates): with pyro.plate_stack("batch", zero_data.shape[:-2], rightmost_dim=-2): loc = zero_data[..., :1, :] scale = pyro.sample("scale", dist.LogNormal(loc, 1).to_event(1)) with self.time_plate: jumps = pyro.sample("jumps", dist.Normal(0, scale).to_event(1)) prediction = jumps.cumsum(-2) noise_dist = dist.Laplace(0, 1) self.predict(noise_dist, prediction)
def model(l): # Dimension -1 of `l` represents the number of rounds # Other dimensions are batch dimensions: we indicate this with a plate_stack with pyro.plate_stack("plate", l.shape[:-1]): theta = pyro.sample("theta", dist.Normal(prior_mean, prior_sd)) # Share theta across the number of rounds of the experiment # This represents repeatedly testing the same participant theta = theta.unsqueeze(-1) # This define a *logistic regression* model for y logit_p = sensitivity * (theta - l) # The event shape represents responses from the same participant y = pyro.sample("y", dist.Bernoulli(logits=logit_p).to_event(1)) return y
def model(self, zero_data, covariates): with pyro.plate_stack("batch", zero_data.shape[:-2], rightmost_dim=-2): loc = zero_data[..., :1, :] scale = pyro.sample("scale", dist.LogNormal(loc, 1).to_event(1)) with self.time_plate: jumps = pyro.sample("jumps", dist.Normal(0, scale).to_event(1)) prediction = jumps.cumsum(-2) duration, obs_dim = zero_data.shape[-2:] noise_dist = dist.GaussianHMM( dist.Normal(0, 1).expand([obs_dim]).to_event(1), torch.eye(obs_dim), dist.Normal(0, 1).expand([obs_dim]).to_event(1), torch.eye(obs_dim), dist.Normal(0, 1).expand([obs_dim]).to_event(1), duration=duration, ) self.predict(noise_dist, prediction)
def model(self, zero_data, covariates): with pyro.plate_stack("batch", zero_data.shape[:-2], rightmost_dim=-2): loc = zero_data[..., :1, :] scale = pyro.sample("scale", dist.LogNormal(loc, 1).to_event(1)) with self.time_plate: jumps = pyro.sample("jumps", dist.Normal(0, scale).to_event(1)) prediction = jumps.cumsum(-2) duration, obs_dim = zero_data.shape[-2:] noise_dist = dist.LinearHMM( dist.Stable(1.9, 0).expand([obs_dim]).to_event(1), torch.eye(obs_dim), dist.Stable(1.9, 0).expand([obs_dim]).to_event(1), torch.eye(obs_dim), dist.Stable(1.9, 0).expand([obs_dim]).to_event(1), duration=duration, ) rep = StableReparam() with poutine.reparam( config={"residual": LinearHMMReparam(rep, rep, rep)}): self.predict(noise_dist, prediction)
def model(): with pyro.plate_stack("plates", shape): with pyro.plate("particles", 200000): return pyro.sample("x", dist.Stable(stability, 0, scale, loc))
def model(): with pyro.plate_stack("plates", shape[:dim]): return pyro.sample("x", dist.Normal(loc, scale).to_event(-dim))
def model(data=None): with pyro.plate_stack("plates", batch_shape): return pyro.sample("x", hmm, obs=data)
def model(): with pyro.plate_stack("plates", shape): with pyro.plate("particles", 10000): pyro.sample( "x", dist.RelaxedOneHotCategorical(temperature, logits=logits))
def model(): with pyro.plate_stack("plates", batch_shape): return pyro.sample("x", dist.Normal(loc, scale).to_event(len(event_shape)))
def guide(data): with pyro.plate_stack("plates", batch_shape): pyro.sample("z", posterior)
def model(): with pyro.plate_stack("plates", batch_shape): return pyro.sample("x", hmm)
def model(): with pyro.plate_stack("plates", shape): return pyro.sample("x", dist.StudentT(df, loc, scale))
def guide(): loc = pyro.param("loc", torch.tensor(0.)) with pyro.plate_stack("plates", (2,) * depth): return pyro.sample("x", dist.Normal(loc, 1).has_rsample_(has_rsample))
def model(): with pyro.plate_stack("plates", shape[:dim]): with pyro.plate("particles", 10000): pyro.sample("x", dist.Uniform(0, 1).expand(shape).to_event(-dim))
def model(): with pyro.plate_stack("plates", shape[:dim]): with pyro.plate("particles", 10000): pyro.sample( "x", dist.Normal(loc, scale).expand(shape).to_event(-dim))
def model(): with pyro.plate_stack("plates", shape): return pyro.sample("x", dist.ProjectedNormal(concentration))
def model(data): with pyro.plate_stack("plates", batch_shape): z = pyro.sample("z", prior) pyro.sample("x", dist.Normal(z, 1).to_event(2), obs=data)
def plate(self) -> pyro.plate: return (self.plate_stack if isinstance( self.plate_stack, tp.ContextManager) else pyro.plate_stack( 'plate', self.plate_stack))
def model(): with pyro.plate_stack("plates", shape): with pyro.plate("particles", 10000): pyro.sample("x", dist.ProjectedNormal(concentration))