def test_forward_pass_with_invalid_padding_value(self): # given w = np.random.rand(5, 5, 3, 16) b = np.random.rand(16) activation = np.random.rand(16, 11, 11, 3) padding = 'lorem ipsum' # when layer = SuperFastConvLayer2D(w=w, b=b, padding=padding) with pytest.raises(InvalidPaddingModeError): _ = layer.forward_pass(activation, training=True)
def test_backward_pass_only_size_valid_padding(self): # given activation = np.random.rand(64, 11, 11, 3) w = np.random.rand(5, 5, 3, 16) b = np.random.rand(16) layer = SuperFastConvLayer2D(w=w, b=b, padding='valid') # when forward_result = layer.forward_pass(activation, training=True) backward_result = layer.backward_pass(forward_result) # then assert backward_result.shape == activation.shape
def test_forward_pass_with_valid_padding(self): # given w = np.random.rand(5, 5, 3, 16) b = np.random.rand(16) activation = np.random.rand(16, 11, 11, 3) padding = 'valid' # when layer = SuperFastConvLayer2D(w=w, b=b, padding=padding) result = layer.forward_pass(activation, training=True) assert result.shape == (16, 7, 7, 16) expected_val = np.sum( w[:, :, :, 0] * activation[0, 0:5, 0:5, :]) + b[0] assert abs(expected_val - result[0, 0, 0, 0]) < 1e-8