def forward(self, ctx: Context, x: np.array) -> np.array: """ :math:`Softmax(x_i) = \\frac{exp(x_i)}{\\sum_j{exp(x_j)}}` """ e_x = np.exp(x) res = e_x / np.sum(e_x, axis=-1, keepdims=True) ctx.save_for_back(res) return res
def forward(self, ctx: Context, image): """ Performs 2d average pool over input tensor Args: ctx (Context): Autograd Conext image (np.array): input image. Allowed shapes: [N, C, H, W], [C, H, W] N - batches, C - channels, H - height, W - width Returns: tensor (np.array): """ img_w = image.shape[-1] img_h = image.shape[-2] channels = image.shape[-3] # new image width new_w = (img_w - self.kernel_size[0]) // self.stride[0] + 1 # new image height new_h = (img_h - self.kernel_size[1]) // self.stride[1] + 1 img_out = Img2Col.img_2_col_forward(self.kernel_size, self.stride, False, image) averaged = np.average(img_out, -2) ctx.save_for_back(img_out, image.shape, averaged.shape) return np.reshape(averaged, (-1, channels, new_h, new_w))
def forward(self, ctx: Context, image: np.array): """ Performs Img to Col transformation. Args: ctx (Context): usual context, image (np.array): image to be transformed, allowed shapes: [N, C, H, W], [C, H, W] N - batches, C - channels, H - height, W - width """ ctx.save_for_back(image.shape) return self.img_2_col_forward(self.kernel_size, self.stride, True, image)
def forward(self, ctx: Context, tensor: np.array) -> np.array: """ Softsign(x) = :math:`\\frac{1}{1 + |x|}` """ denominator = 1 + np.abs(tensor) ctx.save_for_back(denominator) return tensor / denominator
def forward(self, ctx: Context, tensor: np.array) -> np.array: """ Softplus(x) = :math:`ln(1 + e^x)` """ ctx.save_for_back(1 + np.exp(-tensor)) return np.log(1 + np.exp(tensor))
def forward(self, ctx: Context, tensor: np.array): """ ArcTan(x) = :math:`tan^{-1}(x)` """ ctx.save_for_back(tensor) return np.arctan(tensor)
def forward(self, ctx: Context, tensor: np.array) -> np.array: """Sigmoid(x) = :math:`\\frac{1}{ 1 + e^{-x} }`""" sig = 1 / (1 + np.exp(-tensor)) ctx.save_for_back(sig) return sig
def forward(self, ctx: Context, tensor: np.array): """ ReLU(x) = :math:`max(0,x)`""" ctx.save_for_back(tensor) return np.clip(tensor, a_min=0, a_max=None)
def forward(self, ctx: Context, tensor: np.array) -> np.array: """Tanh(x)""" tanh = np.tanh(tensor) ctx.save_for_back(tanh) return tanh