Beispiel #1
0
 def forward(self, input_tensor: Tensor_Torch):
     input_linked_tensor = input_tensor.get_linked_tensor()
     output_linked_tensor = input_linked_tensor.view(
         input_linked_tensor.shape[0], -1)
     if self.inplace_forward:
         input_tensor.set_linked_tensor(output_linked_tensor)
     return output_linked_tensor
Beispiel #2
0
 def forward(self, input_tensor: Tensor_Torch):
     input_linked_tensor = input_tensor.get_linked_tensor()
     # reference in Pytorch Style Transfer Tutorial
     a, b, c, d = input_linked_tensor.size()  # should be as four tuple
     # b=number of feature maps
     # (c,d)=dimensions of a f. map (N=c*d)
     features = input_linked_tensor.view(a * b, c *
                                         d)  # resise F_XL into \hat F_XL
     G = torch.mm(features, features.t())  # compute the gram product
     # we 'normalize' the values of the gram matrix
     # by dividing by the number of element in each feature maps.
     output_linked_tensor = G.div(a * b * c * d)
     if self.inplace_forward:
         input_tensor.set_linked_tensor(output_linked_tensor)
     return output_linked_tensor
Beispiel #3
0
 def forward(self, input_tensor: Tensor_Torch):
     linked_tensor = self.linear(input_tensor.get_linked_tensor())
     if self.inplace_forward:
         input_tensor.set_linked_tensor(linked_tensor)
     return linked_tensor