def forward_gpu(self, x): self.y = cuda.empty_like(x[0]) if cudnn.enabled and self.use_cudnn: handle = cudnn.get_default_handle() desc = cudnn.get_tensor_desc(x[0], 1, 1) libcudnn.cudnnActivationForward( handle, _mode, 1, desc.value, cudnn.get_ptr(x[0]), 0, desc.value, cudnn.get_ptr(self.y) ) else: cuda.elementwise("float* y, const float* x", "y[i] = tanhf(x[i])", "tanh_fwd")(self.y, x[0]) return (self.y,)
def forward_gpu(self, x): self.y = cuda.empty_like(x[0]) if cudnn.enabled and self.use_cudnn: handle = cudnn.get_default_handle() desc = cudnn.get_tensor_desc(x[0], 1, 1) libcudnn.cudnnActivationForward(handle, _mode, 1, desc.value, cudnn.get_ptr(x[0]), 0, desc.value, cudnn.get_ptr(self.y)) else: cuda.elementwise('float* y, const float* x', 'y[i] = tanhf(x[i])', 'tanh_fwd')(self.y, x[0]) return self.y,
def forward_gpu(self, x): self.y = cuda.empty_like(x[0]) if cudnn.enabled and self.use_cudnn: handle = cudnn.get_default_handle() desc = cudnn.get_tensor_desc(x[0], 1, 1) libcudnn.cudnnActivationForward( handle, _mode, 1, desc.value, cudnn.get_ptr(x[0]), 0, desc.value, cudnn.get_ptr(self.y)) else: cuda.elementwise( 'float* y, const float* x', 'y[i] = 1 / (1 + __expf(-x[i]))', 'sigmoid_fwd')(self.y, x[0]) return self.y,
def forward_gpu(self, x): y = cuda.empty_like(x[0]) if cudnn.enabled and self.use_cudnn: handle = cudnn.get_default_handle() desc = cudnn.get_tensor_desc(x[0], 1, 1) libcudnn.cudnnActivationForward( handle, _mode, 1, desc.value, cudnn.get_ptr(x[0]), 0, desc.value, cudnn.get_ptr(y)) self.y = y else: cuda.elementwise('float* y, const float* x', 'y[i] = max(0.f, x[i])', 'relu_fwd')(y, x[0]) return y,