def test_backward_brackets(): """Tests that the backwards operation through the logsignature gives the correct values.""" for class_ in (False, True): for device in h.get_devices(): for batch_size, input_stream, input_channels, basepoint in h.random_sizes_and_basepoint(): for depth in (1, 2, 4, 6): for mode in (h.brackets_mode,): stream = random.choice([False, True]) inverse = random.choice([False, True]) _test_backward(class_, device, batch_size, input_stream, input_channels, depth, stream, basepoint, inverse, mode)
def test_backward(): """Tests that the backwards operation through the signature gives the correct values.""" for class_ in (False, True): for device in h.get_devices(): for batch_size, input_stream, input_channels, basepoint in h.random_sizes_and_basepoint(): for depth in (1, 2, 4, 6): for stream in (False, True): for inverse in (False, True): for initial in (None, h.without_grad, h.with_grad): for scalar_term in (False, True): _test_backward(class_, device, batch_size, input_stream, input_channels, depth, stream, basepoint, inverse, initial, scalar_term)
def test_no_adjustments(): """Tests that the logsignature computations don't modify any memory that they're not supposed to.""" for class_ in (False, True): for device in h.get_devices(): for batch_size, input_stream, input_channels, basepoint in h.random_sizes_and_basepoint(): for depth in (1, 2, 5): for mode in h.all_modes: path_grad = random.choice([False, True]) stream = random.choice([False, True]) inverse = random.choice([False, True]) _test_no_adjustments(class_, device, batch_size, input_stream, input_channels, depth, stream, basepoint, inverse, mode, path_grad)
def test_no_adjustments(): """Tests that the signature computations don't modify any memory that they're not supposed to.""" for class_ in (False, True): for path_grad in (False, True): for device in h.get_devices(): for batch_size, input_stream, input_channels, basepoint in h.random_sizes_and_basepoint(): for depth in (1, 2, 5): for stream in (False, True): for inverse in (False, True): for initial in (None, h.without_grad, h.with_grad): for scalar_term in (False, True): _test_no_adjustments(class_, device, batch_size, input_stream, input_channels, depth, stream, basepoint, inverse, initial, path_grad, scalar_term)
def test_repeat_and_memory_leaks(): """Performs two separate tests. First, that the computations are deterministic, and always give the same result when run multiple times; in particular that using the class signatory.LogSignature multiple times is fine. Second, that there are no memory leaks. """ for class_ in (False, True): for path_grad in (False, True): for batch_size, input_stream, input_channels, basepoint in h.random_sizes_and_basepoint(): for depth in (1, 2, 5): for mode in h.all_modes: stream = random.choice([False, True]) inverse = random.choice([False, True]) _test_repeat_and_memory_leaks(class_, path_grad, batch_size, input_stream, input_channels, depth, stream, basepoint, inverse, mode)
def test_repeat_and_memory_leaks(): """Performs two separate tests. First, that the computations are deterministic, and always give the same result when run multiple times; in particular that using the class signatory.Signature multiple times is fine. Second, that there are no memory leaks. """ for class_ in (False, True): for path_grad in (False, True): for batch_size, input_stream, input_channels, basepoint in h.random_sizes_and_basepoint(): for depth in (1, 2, 5): for stream in (False, True): for inverse in (False, True): for initial in (None, h.without_grad, h.with_grad): for scalar_term in (False, True): _test_repeat_and_memory_leaks(class_, path_grad, batch_size, input_stream, input_channels, depth, stream, basepoint, inverse, initial, scalar_term)