def clip(a, a_min, a_max): """Clips the values of an array to a given interval. Given an interval, values outside the interval are clipped to the interval edges. For example, if an interval of ``[0, 1]`` is specified, values smaller than 0 become 0, and values larger than 1 become 1. Args: a (~chainerx.ndarray): Array containing elements to clip. a_min (scalar): Maximum value. a_max (scalar): Minimum value. Returns: ~chainerx.ndarray: An array with the elements of ``a``, but where values < ``a_min`` are replaced with ``a_min``, and those > ``a_max`` with ``a_max``. Note: The :class:`~chainerx.ndarray` typed ``a_min`` and ``a_max`` are not supported yet. Note: During backpropagation, this function propagates the gradient of the output array to the input array ``a``. .. seealso:: :func:`numpy.clip` """ return -chainerx.maximum(-chainerx.maximum(a, a_min), -a_max)
def test_maximum_invalid_dtypes(device, dtype): shape = (3, 2) bool_array = chainerx.array(array_utils.uniform(shape, 'bool_')) numeric_array = chainerx.array(array_utils.uniform(shape, dtype)) with pytest.raises(chainerx.DtypeError): chainerx.maximum(bool_array, numeric_array) with pytest.raises(chainerx.DtypeError): chainerx.maximum(numeric_array, bool_array)
def __call__(self, x): h = self.bn1(self.conv1(x)) h = chx.max_pool(chx.maximum(0, h), 3, stride=2) h = self.res2(h) h = self.res3(h) h = self.res4(h) h = self.res5(h) h = chx.average_pool(h, 7, stride=1) h = self.fc(h) return h
def forward_chainerx(self, inputs): x, = inputs y = chainerx.max_pool( x, ksize=self.ksize, stride=self.stride, pad=self.pad, cover_all=self.cover_all) # This function can return -inf (or huge negative numbers in case of # CUDA) around boundaries. # Convert them to finite numbers in order to properly calculate numeric # gradients. y = chainerx.maximum(y, -1e4) return y,
def forward_chainerx(self, inputs): x, = inputs y = chainerx.max_pool(x, ksize=self.ksize, stride=self.stride, pad=self.pad, cover_all=self.cover_all) # This function can return -inf (or huge negative numbers in case of # CUDA) around boundaries. # Convert them to finite numbers in order to properly calculate numeric # gradients. y = chainerx.maximum(y, -1e4) return y,
def relu(x): """Rectified Linear Unit function. Args: x (~chainerx.ndarray): Input array. Returns: :class:`~chainerx.ndarray`: Returned array: :math:`y = \\max (0, x)`. Note: During backpropagation, this function propagates the gradient of the output array to the input array ``x``. """ # TODO(imanishi): The function should also be available to C++ users return chainerx.maximum(0, x)
def forward_chainerx(self, inputs): x, = inputs return chainerx.maximum(x, 0),
def __call__(self, x): h = chx.maximum(0, self.bn1(self.conv1(x))) h = chx.maximum(0, self.bn2(self.conv2(h))) h = self.bn3(self.conv3(h)) return chx.maximum(0, h + x)
def __call__(self, x): h1 = chx.maximum(0, self.bn1(self.conv1(x))) h1 = chx.maximum(0, self.bn2(self.conv2(h1))) h1 = self.bn3(self.conv3(h1)) h2 = self.bn4(self.conv4(x)) return chx.maximum(0, h1 + h2)
def forward_chainerx(self, inputs): x, = inputs return chainerx.minimum(chainerx.maximum(0, x), self.cap),