class LRN(nn.Module): def __init__(self, size, alpha=1e-4, beta=0.75, k=1): super().__init__() self.lrn = SpatialCrossMapLRN(size, alpha, beta, k) def forward(self, x): self.lrn.clearState() return Variable(self.lrn.updateOutput(x.data))
class SpatialCrossMapLRNFunc(Function): def __init__(self, size, alpha=1e-4, beta=0.75, k=1): self.size = size self.alpha = alpha self.beta = beta self.k = k def forward(self, input): self.save_for_backward(input) self.lrn = SpatialCrossMapLRNOld(self.size, self.alpha, self.beta, self.k) self.lrn.type(input.type()) return self.lrn.forward(input) def backward(self, grad_output): input, = self.saved_tensors return self.lrn.backward(input, grad_output)
def forward(self, input): from torch.legacy.nn import SpatialCrossMapLRN as SpatialCrossMapLRNOld self.save_for_backward(input) self.lrn = SpatialCrossMapLRNOld(self.size, self.alpha, self.beta, self.k) self.lrn.type(input.type()) return self.lrn.forward(input)
def forward(self, input): self.save_for_backward(input) if disable_torch_legacy: assert False self.lrn = SpatialCrossMapLRNOld(self.size, self.alpha, self.beta, self.k) self.lrn.type(input.type()) return self.lrn.forward(input)
class LRNFunc(Function): def __init__(self, size, alpha=1e-4, beta=0.75, k=1): super(LRNFunc, self).__init__() self.size = size self.alpha = alpha self.beta = beta self.k = k def forward(self, input): self.save_for_backward(input) if disable_torch_legacy: assert False self.lrn = SpatialCrossMapLRNOld(self.size, self.alpha, self.beta, self.k) self.lrn.type(input.type()) return self.lrn.forward(input) def backward(self, grad_output): input, = self.saved_tensors return self.lrn.backward(input, grad_output)
def forward(self, input): self.save_for_backward(input) self.lrn = SpatialCrossMapLRNOld(self.size, self.alpha, self.beta, self.k) self.lrn.type(input.type()) return self.lrn.forward(input)
def __init__(self, size, alpha=1e-4, beta=0.75, k=1): super().__init__() self.lrn = SpatialCrossMapLRN(size, alpha, beta, k)