def test_nested_parameterized(): class Linear(Parameterized): def __init__(self, a): super(Linear, self).__init__() self.a = Parameter(a) def forward(self, x): return self.a * x class Quadratic(Parameterized): def __init__(self, linear1, linear2, a): super(Quadratic, self).__init__() self.linear1 = linear1 self.linear2 = linear2 self.a = Parameter(a) def forward(self, x): return self.linear1(x) * x + self.linear2(self.a) linear1 = Linear(torch.tensor(1.)) linear1.set_prior("a", dist.Normal(0, 1)) linear2 = Linear(torch.tensor(1.)) linear2.set_prior("a", dist.Normal(0, 1)) q = Quadratic(linear1, linear2, torch.tensor(2.)) q.set_prior("a", dist.Cauchy(0, 1)) def model(x): q.set_mode("model") return q(x) trace = pyro.poutine.trace(model).get_trace(torch.tensor(5.)) assert "Quadratic/a" in trace.nodes assert "Linear/a" in trace.nodes assert "Linear__1/a" in trace.nodes
def test_nested_parameterized(): class Linear(Parameterized): def __init__(self, a): super().__init__() self.a = Parameter(a) def forward(self, x): return self.a * x class Quadratic(Parameterized): def __init__(self, linear1, linear2, a): super().__init__() self._pyro_name = "Quadratic" self.linear1 = linear1 self.linear2 = linear2 self.a = Parameter(a) def forward(self, x): return self.linear1(x) * x + self.linear2(self.a) linear1 = Linear(torch.tensor(1.0)) linear1.a = PyroSample(dist.Normal(0, 1)) linear2 = Linear(torch.tensor(1.0)) linear2.a = PyroSample(dist.Normal(0, 1)) q = Quadratic(linear1, linear2, torch.tensor(2.0)) q.a = PyroSample(dist.Cauchy(0, 1)) def model(x): q.set_mode("model") return q(x) trace = pyro.poutine.trace(model).get_trace(torch.tensor(5.0)) assert "Quadratic.a" in trace.nodes assert "Quadratic.linear1.a" in trace.nodes assert "Quadratic.linear2.a" in trace.nodes
def model(M=None, N=None, x=None, y=None): ___shape = {} ___shape['N'] = () ___shape['M'] = () ___shape['y'] = N ___shape['x'] = N, M ___shape['beta'] = M beta = sample('beta', ImproperUniform(M)) for m in range(1, M + 1): sample('beta' + '__{}'.format(m - 1) + '__1', dist.Cauchy(0.0, 2.5 ), obs=beta[m - 1]) for n in range(1, N + 1): sample('y' + '__{}'.format(n - 1) + '__2', dist.Bernoulli( inv_logit(x[n - 1] * beta)), obs=y[n - 1])
def model(data, params): # initialize data J = data["J"] K = data["K"] N = data["N"] jj = data["jj"].long() - 1 kk = data["kk"].long() - 1 y = data["y"] sigma_alpha = pyro.sample("sigma_alpha", dist.HalfCauchy(5.)) sigma_beta = pyro.sample("sigma_beta", dist.HalfCauchy(5.)) sigma_gamma = pyro.sample("sigma_gamma", dist.HalfCauchy(5.)) with pyro.plate('alpha_', J): alpha = pyro.sample("alpha", dist.Normal(0., sigma_alpha)) with pyro.plate('beta_', K): beta = pyro.sample("beta", dist.Normal(0., sigma_beta)) log_gamma = pyro.sample("log_gamma", dist.Normal(0., sigma_gamma)) delta = pyro.sample("delta", dist.Cauchy(0., 5)) with pyro.plate('data', N): y = pyro.sample('y', dist.Bernoulli(logits= log_gamma[kk].exp() * (alpha[jj] - beta[kk] + delta)), obs=y)
def model(data, params): # initialize data T = data["T"] y = data["y"] # init parameters phi = params["phi"] # initialize transformed parameters h = init_vector("h", dims=(T)) # vector # model block sigma = pyro.sample("sigma", dist.HalfCauchy(5.)) mu = pyro.sample("mu", dist.Cauchy(0., 10.)) h_std = pyro.sample("h_std", dist.Normal(0., 1.).expand([T])) with torch.no_grad(): h = h_std * sigma h[0] = h[0] / torch.sqrt(1. - phi * phi) h = h + mu for t in range(1, T): h[t] = h[t] + phi * (h[t - 1] - mu) y = pyro.sample(y, dist.Normal(0., (h / 2.).exp()), obs=y)
def Cauchy(_name, loc, scale): return {'x': pyro.sample(_name, dist.Cauchy(loc, scale))}
def model(self, zero_data, covariates): period = 24 * 7 duration, dim = zero_data.shape[-2:] assert dim == 2 # Data is bivariate: (arrivals, departures). # Sample global parameters. noise_scale = pyro.sample( "noise_scale", dist.LogNormal(torch.full((dim, ), -3.), 1.).to_event(1)) assert noise_scale.shape[-1:] == (dim, ) trans_timescale = pyro.sample( "trans_timescale", dist.LogNormal(torch.zeros(dim), 1).to_event(1)) assert trans_timescale.shape[-1:] == (dim, ) trans_loc = pyro.sample("trans_loc", dist.Cauchy(0, 1 / period)) trans_loc = trans_loc.unsqueeze(-1).expand(trans_loc.shape + (dim, )) assert trans_loc.shape[-1:] == (dim, ) trans_scale = pyro.sample( "trans_scale", dist.LogNormal(torch.zeros(dim), 0.1).to_event(1)) trans_corr = pyro.sample("trans_corr", dist.LKJCorrCholesky(dim, torch.ones(()))) trans_scale_tril = trans_scale.unsqueeze(-1) * trans_corr assert trans_scale_tril.shape[-2:] == (dim, dim) obs_scale = pyro.sample( "obs_scale", dist.LogNormal(torch.zeros(dim), 0.1).to_event(1)) obs_corr = pyro.sample("obs_corr", dist.LKJCorrCholesky(dim, torch.ones(()))) obs_scale_tril = obs_scale.unsqueeze(-1) * obs_corr assert obs_scale_tril.shape[-2:] == (dim, dim) # Note the initial seasonality should be sampled in a plate with the # same dim as the time_plate, dim=-1. That way we can repeat the dim # below using periodic_repeat(). with pyro.plate("season_plate", period, dim=-1): season_init = pyro.sample( "season_init", dist.Normal(torch.zeros(dim), 1).to_event(1)) assert season_init.shape[-2:] == (period, dim) # Sample independent noise at each time step. with self.time_plate: season_noise = pyro.sample("season_noise", dist.Normal(0, noise_scale).to_event(1)) assert season_noise.shape[-2:] == (duration, dim) # Construct a prediction. This prediction has an exactly repeated # seasonal part plus slow seasonal drift. We use two deterministic, # linear functions to transform our diagonal Normal noise to nontrivial # samples from a Gaussian process. prediction = (periodic_repeat(season_init, duration, dim=-2) + periodic_cumsum(season_noise, period, dim=-2)) assert prediction.shape[-2:] == (duration, dim) # Construct a joint noise model. This model is a GaussianHMM, whose # .rsample() and .log_prob() methods are parallelized over time; this # this entire model is parallelized over time. init_dist = dist.Normal(torch.zeros(dim), 100).to_event(1) trans_mat = trans_timescale.neg().exp().diag_embed() trans_dist = dist.MultivariateNormal(trans_loc, scale_tril=trans_scale_tril) obs_mat = torch.eye(dim) obs_dist = dist.MultivariateNormal(torch.zeros(dim), scale_tril=obs_scale_tril) noise_model = dist.GaussianHMM(init_dist, trans_mat, trans_dist, obs_mat, obs_dist, duration=duration) assert noise_model.event_shape == (duration, dim) # The final statement registers our noise model and prediction. self.predict(noise_model, prediction)
def forward(self, data): loc, log_scale = self.z.unbind(-1) with pyro.plate("data"): pyro.sample("obs", dist.Cauchy(loc, log_scale.exp()), obs=data)