def test_fwd_pass_connections_and_gradient(self): batch_size = 64 width = 14 height = 28 hyper = { 'width_height': (width, height, 1), 'model_type': 'GS', 'batch_size': batch_size, 'temp': tf.constant(0.1) } shape = (batch_size, width, height, 1) x_upper, x_lower = create_upper_and_lower(shape=shape) sop = SOP(hyper=hyper) with tf.GradientTape() as tape: logits = sop.call(x_upper=x_upper) loss = tf.nn.sigmoid_cross_entropy_with_logits(labels=x_lower, logits=logits) grad = tape.gradient(sources=sop.trainable_variables, target=loss) self.assertTrue(grad is not None)
def test_fwd_pass_connections_and_gradient(): batch_size, width, height, rgb, sample_size = 64, 14, 28, 1, 1 hyper = { 'width_height': (width, height, 1), 'model_type': 'GS', 'batch_size': batch_size, 'units_per_layer': 240, 'temp': tf.constant(0.1) } shape = (batch_size, width, height, rgb) x_upper, x_lower = create_upper_and_lower_dummy_data(shape=shape) sop = SOP(hyper=hyper) with tf.GradientTape() as tape: logits = sop.call(x_upper=x_upper) x_lower = tf.reshape(x_lower, x_lower.shape + (sample_size, )) loss = tf.nn.sigmoid_cross_entropy_with_logits(labels=x_lower, logits=logits) grad = tape.gradient(sources=sop.trainable_variables, target=loss) print('\nTEST: Forward pass and gradient computation') assert grad is not None