def weight(self, ext, module, g_inp, g_out, backproped):
        X, dE_dY = convUtils.get_weight_gradient_factors(
            module.input0, g_out[0], module)

        if module.has_bias():
            X = module.append_ones(X)

        return einsum('bml,bkl,bmi,bki->b', (dE_dY, X, dE_dY, X))
Exemple #2
0
    def weight(self, ext, module, g_inp, g_out, backproped):
        X, dE_dY = convUtils.get_weight_gradient_factors(
            module.input0, g_out[0], module)

        if module.has_bias():
            X = module.append_ones(X)

        d1 = einsum('bml,bkl->bmk', (dE_dY, X))
        return (d1**2).sum(0).view_as(module.weight)
Exemple #3
0
    def weight(self, ext, module, g_inp, g_out, backproped):
        """batch_l2 for weight.

        Args:
            ext: extension
            module: module
            g_inp: input gradients
            g_out: output gradients
            backproped: backpropagation quantities

        Returns:
            batch_l2 for weight
        """
        X, dE_dY = convUtils.get_weight_gradient_factors(
            module.input0, g_out[0], module)
        return einsum("nmi,nki,nmj,nkj->n", dE_dY, X, dE_dY, X)
Exemple #4
0
 def weight(self, ext, module, g_inp, g_out, backproped):
     X, dE_dY = convUtils.get_weight_gradient_factors(
         module.input0, g_out[0], module)
     return einsum("nmi,nki,nmj,nkj->n", (dE_dY, X, dE_dY, X))
Exemple #5
0
 def weight(self, ext, module, g_inp, g_out, backproped):
     N_axis = 0
     X, dE_dY = convUtils.get_weight_gradient_factors(
         module.input0, g_out[0], module)
     d1 = einsum("nml,nkl->nmk", (dE_dY, X))
     return (d1**2).sum(N_axis).view_as(module.weight)