Exemple #1
0
class LRN(nn.Module):
    def __init__(self, size, alpha=1e-4, beta=0.75, k=1):
        super().__init__()
        self.lrn = SpatialCrossMapLRN(size, alpha, beta, k)

    def forward(self, x):
        self.lrn.clearState()
        return Variable(self.lrn.updateOutput(x.data))
Exemple #2
0
class SpatialCrossMapLRNFunc(Function):
    def __init__(self, size, alpha=1e-4, beta=0.75, k=1):
        self.size = size
        self.alpha = alpha
        self.beta = beta
        self.k = k

    def forward(self, input):
        self.save_for_backward(input)
        self.lrn = SpatialCrossMapLRNOld(self.size, self.alpha, self.beta, self.k)
        self.lrn.type(input.type())
        return self.lrn.forward(input)

    def backward(self, grad_output):
        input, = self.saved_tensors
        return self.lrn.backward(input, grad_output)
Exemple #3
0
 def forward(self, input):
     from torch.legacy.nn import SpatialCrossMapLRN as SpatialCrossMapLRNOld
     self.save_for_backward(input)
     self.lrn = SpatialCrossMapLRNOld(self.size, self.alpha, self.beta,
                                      self.k)
     self.lrn.type(input.type())
     return self.lrn.forward(input)
Exemple #4
0
 def forward(self, input):
     self.save_for_backward(input)
     if disable_torch_legacy:
         assert False
     self.lrn = SpatialCrossMapLRNOld(self.size, self.alpha, self.beta,
                                      self.k)
     self.lrn.type(input.type())
     return self.lrn.forward(input)
Exemple #5
0
class LRNFunc(Function):
    def __init__(self, size, alpha=1e-4, beta=0.75, k=1):
        super(LRNFunc, self).__init__()
        self.size = size
        self.alpha = alpha
        self.beta = beta
        self.k = k

    def forward(self, input):
        self.save_for_backward(input)
        if disable_torch_legacy:
            assert False
        self.lrn = SpatialCrossMapLRNOld(self.size, self.alpha, self.beta,
                                         self.k)
        self.lrn.type(input.type())
        return self.lrn.forward(input)

    def backward(self, grad_output):
        input, = self.saved_tensors
        return self.lrn.backward(input, grad_output)
 def forward(self, input):
     self.save_for_backward(input)
     self.lrn = SpatialCrossMapLRNOld(self.size, self.alpha, self.beta,
                                      self.k)
     self.lrn.type(input.type())
     return self.lrn.forward(input)
Exemple #7
0
 def __init__(self, size, alpha=1e-4, beta=0.75, k=1):
     super().__init__()
     self.lrn = SpatialCrossMapLRN(size, alpha, beta, k)