def testEluGradGradWRTinputs(self): inputs = constant_op.constant([[-2, -1, 1, 3], [5, 7, 8, 9]], dtype=dtypes.float32) dummy = constant_op.constant([[3, 1, -1, -2], [9, 8, 7, 6]], dtype=dtypes.float32) elu = gen_nn_ops.elu(inputs) elu_grad = gradients_impl.gradients(elu, inputs, grad_ys=dummy)[0] with self.cached_session(): error = gradient_checker.compute_gradient_error( inputs, inputs.shape, elu_grad, elu_grad.shape) self.assertLess(error, 1e-4)
def dilated_conv1d_layer(x, kernel, num_filters, name, dilation=1, stride=1, activation=True, padding='VALID'): with tf.name_scope(name) as scope: kernel = tf.get_variable(f'{name}/weights', shape=[kernel, x.get_shape()[-1].value, num_filters], initializer=tf.truncated_normal_initializer(stddev=1e-2)) post_out = atrous_conv1d(x, kernel, dilation, padding=padding) biases = tf.get_variable(f'{name}/biases', shape=[num_filters], initializer=tf.constant_initializer(0.0)) post_out = tf.nn.bias_add(post_out, biases) if activation: post_out = elu(post_out, name=name) # tf.summary.histogram(f'{name}/kernel', kernel) # tf.summary.histogram(f'{name}/biases', biases) return post_out
def testEluGradGradWRTinputs(self): inputs = constant_op.constant( [[-2, -1, 1, 3], [5, 7, 8, 9]], dtype=dtypes.float32) dummy = constant_op.constant( [[3, 1, -1, -2], [9, 8, 7, 6]], dtype=dtypes.float32) elu = gen_nn_ops.elu(inputs) elu_grad = gradients_impl.gradients(elu, inputs, grad_ys=dummy)[0] with self.cached_session(): error = gradient_checker.compute_gradient_error( inputs, inputs.shape, elu_grad, elu_grad.shape) self.assertLess(error, 1e-4)
def conv2d_layer(x, filter, units, name, strides=None, padding='SAME'): if strides is None: strides = [1, 1] with tf.name_scope(name) as scope: kernel = tf.get_variable( f'{name}/weights', shape=[filter[0], filter[1], x.get_shape()[-1].value, units], initializer=tf.random_normal_initializer(0, 0.1)) conv = tf.nn.conv2d(x, filter=kernel, strides=[1, strides[0], strides[1], 1], padding=padding) biases = tf.get_variable('%s/biases' % name, shape=[units], initializer=tf.constant_initializer(0.0)) out = tf.nn.bias_add(conv, biases) post_out = elu(out, name=name) # post_out = out tf.summary.histogram(f'{name}/kernel', kernel) # tf.summary.histogram('%s/biases' % name, biases) return post_out, kernel, biases
def predict(self, inputTensor, _): result = gen_nn_ops.elu(inputTensor) return (result)