Ejemplo n.º 1
0
    def forward(self, x):

        mean = self.out_bias(x, self.weight)

        sigma = torch.exp(self.log_alpha) * self.weight * self.weight

        std = torch.sqrt(1e-16 + self.out_nobias(x * x, sigma))
        # if self.training:

        #############################
        # epsilon = std.data.new(std.size()).normal_()
        #############################

        # means = torch.zeros(std.size())
        # epsilon = torch.normal(mean=means,std=1.0)
        # print("std.size:", std.size())
        epsilon = torch.randn(self.std_size)

        # else:
        #     epsilon = 0.0

        # Local reparameterization trick
        out = mean + std * epsilon

        if cfg.record_mean_var and cfg.record_now and self.training and self.name in cfg.record_layers:
            utils.save_array_to_file(mean.cpu().detach().numpy(), self.mean_var_path, "mean")
            utils.save_array_to_file(std.cpu().detach().numpy(), self.mean_var_path, "std")

        return out
Ejemplo n.º 2
0
    def forward(self, x):
        mean = F.linear(x, self.W)
        if self.bias is not None:
            mean = mean + self.bias

        sigma = torch.exp(self.log_alpha) * self.W * self.W
        std = torch.sqrt(1e-16 + F.linear(x * x, sigma))
        if self.training:
            epsilon = std.data.new(std.size()).normal_()
        else:
            epsilon = 0.0
        # Local reparameterization trick
        out = mean + std * epsilon

        if cfg.record_mean_var and cfg.record_now and self.training and self.name in cfg.record_layers:
            utils.save_array_to_file(mean.cpu().detach().numpy(), self.mean_var_path, "mean")
            utils.save_array_to_file(std.cpu().detach().numpy(), self.mean_var_path, "std")

        return out