def to_dist_fn(h): loc, scale = h xp = cuda.get_array_module(loc, scale) return distributions.HyperbolicWrapped( distributions.Independent( D.Normal(loc=xp.zeros(shape=scale.shape, dtype=scale.dtype), scale=scale)), functions.pseudo_polar_projection(loc))
def to_dist_fn(h): xp = cuda.get_array_module(h) scale = F.softplus(h[..., n_latent:]) shape = scale.shape[:-1] + (n_latent, ) return distributions.HyperbolicWrapped( distributions.Independent( D.Normal(loc=xp.zeros(shape=shape, dtype=scale.dtype), scale=scale)), functions.pseudo_polar_projection(h[..., :n_latent]))
def _log_det_jacobian(self, x, y): if not self.parts: xp = cuda.get_array_module(x, y) return xp.zeros_like(x.array) result = 0 for part in self.parts: y = part.forward(x) result = result + _sum_rightmost(part.log_det_jacobian(x, y), self.event_dim - part.event_dim) x = y return result
def _log_det_jacobian(self, x, y): shape = x.shape scale = self.scale if isinstance(scale, numbers.Number): xp = cuda.get_array_module(x, y) result = exponential.log(basic_math.absolute(scale)) \ * xp.ones(shape, dtype=x.dtype) else: result = exponential.log(basic_math.absolute(scale)) if self.event_dim: result_size = result.shape[:-self.event_dim] + (-1, ) result = sum_mod.sum(result.view(result_size), axis=-1) shape = shape[:-self.event_dim] return broadcast.broadcast_to(result, shape)
def __call__(self, x): h = F.relu(self.bn1(self.conv1(x))) h = self.bn2(self.conv2(h)) # 如果residual block的输入输出维度不一致,则对增加的维度用0来填充 if x.data.shape != h.data.shape: xp = cuda.get_array_module(x.data) n, c, hh, ww = x.data.shape # 获取输入数据的数量n、通道数c、高度hh、宽度ww pad_c = h.data.shape[1] - c # 获取需要增加的通道数 p = xp.zeros((n, pad_c, hh, ww), dtype=xp.float32) # 对增加的维度用0填充 p = chainer.Variable(p) with chainer.no_backprop_mode(): x = F.concat((p, x)) if x.data.shape[2:] != h.data.shape[2:]: x = F.average_pooling_2d(x, 1, 2) # 平均池化减半 return F.relu(h + x) # 返回残差函数 Relu(h+x)