def forward(self, x): x = self.fc0(x) x = F.tanh(x) x = self.fc1(x) x = F.tanh(x) x = self.fc2(x) return x
def forward(self, x): x = self.fc0(x) x = self.bn0(x) x = F.softmax(x) if self.converter == "tflite" else F.tanh(x) x = self.fc1(x) x = self.bn1(x) x = F.softmax(x) if self.converter == "tflite" else F.tanh(x) x = self.fc2(x) return x
def forward(self, x): y = self.fc0(x) x = F.tanh(y) y = self.fc1(x) x = F.tanh(y) x = self.fc2(x) y = (x + x) / 2 # in order to test drop() y._drop() return y
def forward(self, x): x = self.fc0(x) x = self.bn0(x) x = F.leaky_relu(x) x = F.leaky_relu(x) x = F.tanh(x) x = self.fc1(x) x = F.leaky_relu(x) x = self.bn1(x) x = F.tanh(x) x = self.fc2(x) x = F.leaky_relu(x) return x
def gelu(x): """Implementation of the gelu activation function. For information: OpenAI GPT's gelu is slightly different (and gives slightly different results): x * 0.5 * (1.0 + F.tanh((np.sqrt(2 / np.pi) * (x + 0.044715 * (x ** 3))))) Also see https://arxiv.org/abs/1606.08415 """ return x * 0.5 * (1.0 + F.tanh( (np.sqrt(2 / np.pi) * (x + 0.044715 * (x**3)))))
def forward(self, x): r""" Feedforwards a batch of noise vectors into a batch of fake images. Args: x (Tensor): A batch of noise vectors of shape (N, nz). Returns: Tensor: A batch of fake images of shape (N, C, H, W). """ h = self.l1(x) h = h.reshape(x.shape[0], -1, self.bottom_width, self.bottom_width) h = self.block2(h) h = self.block3(h) h = self.block4(h) h = self.b5(h) h = self.activation(h) h = F.tanh(self.c5(h)) return h
def forward(self, input): """ Forward pass of the function. """ return input * F.tanh(F.softplus(input))