def forward(self, x): nu2 = nn.square(x).mean((-2, -1), keepdims=True) x = x * (1.0 / nn.sqrt(nu2 + nn.abs(self.eps))) return x*self.weight.reshape ( (1,-1,1,1) ) \ + self.bias.reshape ( (1,-1,1,1) )
def forward(self, x, **kwargs): mean, var = nn.moments(x, axes=(2, 3), keepdims=True) x = x - mean / (nn.sqrt(var) + 1e-5) x = x * self.gamma.reshape( (1,-1,1,1) ) \ + self.beta.reshape( (1,-1,1,1) ) return x
def reduce_std(input_t, axes=None, keepdims=False): """ Reduce std operator. input_t Tensor axes(None) int Iterable of ints. None - all axes keepdims(False) keep reduced axes """ return nn.sqrt(reduce_variance(input_t, axes, keepdims))
def forward(self, x, **kwargs): if self.is_training(): mean, var = nn.moments(x, axes=(0, 2, 3), keepdims=True) BatchNorm2D.upd_krn.run(self.running_mean, self.running_var, mean, var, np.float32(self.momentum), global_shape=(self.in_ch, )) else: mean = self.running_mean.reshape((1, -1, 1, 1)) var = self.running_var.reshape((1, -1, 1, 1)) x = (x - mean) / (nn.sqrt(var) + 1e-5) x = x * self.gamma.reshape( (1,-1,1,1) ) \ + self.beta.reshape( (1,-1,1,1) ) return x
def forward(self, x): x = x / (nn.sqrt(nn.reduce_sum(nn.square(x), axes=1, keepdims=True)) + 1e-10) * self.weight.reshape((1, -1, 1, 1)) return x