def forward(a, b, d, f): c = (a / b) + (d * f) h = sail.exp(c) g = h - sail.max(h, 1, True) + d - c i = sail.sum(h) return i
def test_max(self, rq): times = [] for c in choices: arr1 = np.random.uniform(0, 1, (c["shape"])) x1 = sail.Tensor(arr1, requires_grad=rq) x3 = sail.max(x1, c["axis"], keepdims=c["keepdims"]) arr3 = np.max(arr1, c["axis"], keepdims=c["keepdims"]) self.assert_eq(x3.shape, c["result_shape"]) self.assert_eq_np_sail(arr3, x3, 1e-7) self.assert_eq(x3.requires_grad, rq) return
def forward(a): y = sail.max(a) return y
tensor (Tensor): Input data axis (int or tuple of ints, optional): If provided, then `axis` represents the axis the mean will be computed over. If `axis` is a tuple, then the axes provided will be used to compute the mean keepdims (boolean, optional): If True, then the axes that are reduced will be replaced with 1, otherwise, those axes will be removed Examples: >>> x = sail.random.uniform(0, 1, (12, 32, 4, 5)) >>> y = sail.mean(x, 1, True) >>> y.shape (12, 1, 4, 5) >>> z = sail.mean(x, -2, False) >>> z.shape (12, 32, 5) """ add_docstring(sail.mean, descr) descr = r""" sail.max(tensor, axis=None, keepdims=False) -> Tensor Returns the maximum of `tensor` over specified axis. .. note:: If ``axis < 0``, then the axis that will be computed over is ``tensor.ndim + axis``. Args: tensor (Tensor): Input data axis (int or tuple of ints, optional): If provided, then `axis` represents the axis the max will be computed over. If `axis` is a tuple, then the axes provided will be used to compute the max keepdims (boolean, optional): If True, then the axes that are reduced will be replaced with 1, otherwise, those axes will be removed Examples: >>> x = sail.random.uniform(0, 1, (12, 32, 4, 5)) >>> y = sail.max(x, 1, True) >>> y.shape