Пример #1
0
 def forward(self, input):
     if self.activation:
         out = F.linear(input, self.weight * self.scale)
         out = fused_leaky_relu(out, self.bias * self.lr_mul)
     else:
         out = F.linear(
             input, self.weight * self.scale, bias=self.bias * self.lr_mul
         )
     return out
Пример #2
0
    def forward(self, input):
        if self.activation:
            out = F.linear(input, self.weight * self.scale)
            # t = out[0]
            # t = t[:5]
            # print('after linear', t)
            if self.v:
                out = fused_leaky_relu(out, self.bias * self.lr_mul)
                # t = out[0]
                # t = t[:5]
                # print('after leaky_relu', t)
            else:
                out = fused_leaky_relu_v(out, self.bias * self.lr_mul)
                # t = out[0]
                # t = t[:5]
                # print('after leaky_relu_v', t)

        else:
            out = F.linear(input,
                           self.weight * self.scale,
                           bias=self.bias * self.lr_mul)

        return out