Beispiel #1
0
    def forward(self, x):
        in_mean, in_var = paddle.mean(x, axis=[2, 3], keepdim=True), paddle.var(x, axis=[2, 3], keepdim=True)
        out_in = (x - in_mean) / paddle.sqrt(in_var + self.eps)
        ln_mean, ln_var = paddle.mean(x, axis=[1, 2, 3], keepdim=True), paddle.var(x, axis=[1, 2, 3], keepdim=True)
        out_ln = (x - ln_mean) / paddle.sqrt(ln_var + self.eps)
        out = self.rho.expand([x.shape[0], -1, -1, -1]) * out_in + \
              (1-self.rho.expand([x.shape[0], -1, -1, -1])) * out_ln
        out = out * self.gamma.expand([x.shape[0], -1, -1, -1]) + self.beta.expand([x.shape[0], -1, -1, -1])

        return out
Beispiel #2
0
 def static(self):
     with paddle.static.program_guard(paddle.static.Program()):
         x = paddle.data('X', self.shape, self.dtype)
         out = paddle.var(x, self.axis, self.unbiased, self.keepdim)
         exe = paddle.static.Executor(self.place)
         res = exe.run(feed={'X': self.x}, fetch_list=[out])
     return res[0]
Beispiel #3
0
 def dynamic(self, axis=None, keepdim=False, unbiased=True):
     with fluid.dygraph.guard(self._place):
         data = fluid.dygraph.to_variable(self._input)
         out = paddle.var(input=data,
                          axis=axis,
                          keepdim=keepdim,
                          unbiased=unbiased)
         return out.numpy()
    def forward(self, input):
        in_mean, in_var = paddle.mean(input, [2, 3],
                                      keepdim=True), paddle.var(input, [2, 3],
                                                                keepdim=True)
        out_in = (input - in_mean) / paddle.sqrt(in_var + self.eps)
        ln_mean, ln_var = paddle.mean(input, [1, 2, 3],
                                      keepdim=True), paddle.var(input,
                                                                [1, 2, 3],
                                                                keepdim=True)
        out_ln = (input - ln_mean) / paddle.sqrt(ln_var + self.eps)
        out = self.rho.expand([input.shape[0], -1, -1, -1]) * out_in + (
            1 - self.rho.expand([input.shape[0], -1, -1, -1])) * out_ln
        out = out * self.gamma.expand([input.shape[0], -1, -1, -1
                                       ]) + self.beta.expand(
                                           [input.shape[0], -1, -1, -1])

        return out
Beispiel #5
0
    def static(self, axis=None, keepdim=False, unbiased=True):
        prog = fluid.Program()
        with fluid.program_guard(prog):
            data = fluid.data(name="data",
                              dtype=self._dtype,
                              shape=[None, 3, 4, 5])
            out = prog.current_block().create_var(dtype=self._dtype,
                                                  shape=[2, 3, 4, 5])
            paddle.var(input=data,
                       axis=axis,
                       keepdim=keepdim,
                       unbiased=unbiased,
                       out=out)

        exe = fluid.Executor(self._place)
        return exe.run(feed={"data": self._input},
                       program=prog,
                       fetch_list=[out])[0]
Beispiel #6
0
 def test_alias(self):
     paddle.disable_static()
     x = paddle.to_tensor(np.array([10, 12], 'float32'))
     out1 = paddle.var(x).numpy()
     out2 = paddle.tensor.var(x).numpy()
     out3 = paddle.tensor.stat.var(x).numpy()
     self.assertTrue(np.allclose(out1, out2))
     self.assertTrue(np.allclose(out1, out3))
     paddle.enable_static()
Beispiel #7
0
    def _style_pooling(self, x, eps=1e-5):
        N, C, _, _ = x.shape

        channel_mean = paddle.mean(paddle.reshape(x, [N, C, -1]),
                                   axis=2,
                                   keepdim=True)
        channel_var = paddle.var(
            paddle.reshape(x, [N, C, -1]), axis=2, keepdim=True) + eps
        channel_std = paddle.sqrt(channel_var)

        t = paddle.concat((channel_mean, channel_std), axis=2)
        return t
Beispiel #8
0
 def dygraph(self):
     paddle.disable_static()
     x = paddle.to_tensor(self.x)
     out = paddle.var(x, self.axis, self.unbiased, self.keepdim)
     paddle.enable_static()
     return out.numpy()