def testSlogDet(self): # Tests the slog_determinant function # TODO: use kron and -1 * kron matrices, when mul is implemented # the current version is platform-dependent tf.compat.v1.set_random_seed(5) # negative derminant initializer = initializers.random_matrix(((2, 3), (2, 3)), tt_rank=1, dtype=self.dtype) kron_neg = variables.get_variable('kron_neg', initializer=initializer) tf.compat.v1.set_random_seed(1) # positive determinant initializer = initializers.random_matrix(((2, 3), (2, 3)), tt_rank=1, dtype=self.dtype) kron_pos = variables.get_variable('kron_pos', initializer=initializer) init_op = tf.compat.v1.global_variables_initializer() # negative derminant self.evaluate(init_op) desired_sign, desired_det = np.linalg.slogdet( self.evaluate(ops.full(kron_neg))) actual_sign, actual_det = self.evaluate(kr.slog_determinant(kron_neg)) self.assertEqual(desired_sign, actual_sign) self.assertAllClose(desired_det, actual_det) # positive determinant desired_sign, desired_det = np.linalg.slogdet( self.evaluate(ops.full(kron_pos))) actual_sign, actual_det = self.evaluate(kr.slog_determinant(kron_pos)) self.assertEqual(desired_sign, actual_sign) self.assertAllClose(desired_det, actual_det)
def testAttributes(self): # Test that after converting an initializer into a variable all the # attributes stays the same. tens = initializers.random_tensor([2, 3, 2], tt_rank=2) tens_v = variables.get_variable('tt_tens', initializer=tens) mat = initializers.random_matrix([[3, 2, 2], [3, 3, 3]], tt_rank=3) mat_v = variables.get_variable('tt_mat', initializer=mat) for (init, var) in [[tens, tens_v], [mat, mat_v]]: self.assertEqual(init.get_shape(), var.get_shape()) self.assertEqual(init.get_raw_shape(), var.get_raw_shape()) self.assertEqual(init.ndims(), var.ndims()) self.assertEqual(init.get_tt_ranks(), var.get_tt_ranks()) self.assertEqual(init.is_tt_matrix(), var.is_tt_matrix())
def testIsKronKron(self): # Tests _is_kron on a Kronecker matrix initializer = initializers.random_matrix(((2, 3), (3, 2)), tt_rank=1, dtype=self.dtype) kron_mat = variables.get_variable('kron_mat', initializer=initializer) self.assertTrue(kr._is_kron(kron_mat))
def testIsKronNonKron(self): # Tests _is_kron on a non-Kronecker matrix initializer = initializers.random_matrix(((2, 3), (3, 2)), tt_rank=2, dtype=self.dtype) tt_mat = variables.get_variable('tt_mat', initializer=initializer) self.assertFalse(kr._is_kron(tt_mat))
def testIsKronKron(self): # Tests _is_kron on a Kronecker matrix batch initializer = initializers.random_matrix_batch(((2, 3), (3, 2)), tt_rank=1, batch_size=3) kron_mat_batch = variables.get_variable('kron_mat_batch', initializer=initializer) self.assertTrue(kr._is_kron(kron_mat_batch))
def testIsKronNonKron(self): # Tests _is_kron on a non-Kronecker matrix batch initializer = initializers.random_matrix_batch(((2, 3), (3, 2)), tt_rank=2, batch_size=3) tt_mat_batch = variables.get_variable('tt_mat_batch', initializer=initializer) self.assertFalse(kr._is_kron(tt_mat_batch))
def testProjectWeightedSumDtypeBug(self): # Test that project_sum(TensorTrain, TensorTrain variable, np.array) works. what = initializers.random_tensor_batch((2, 3, 4), batch_size=3, dtype=self.dtype) where = variables.get_variable('a', initializer=what[0]) weights = tf.zeros((3,), dtype=self.dtype) # Check that it doesn't throw an exception trying to convert weights to # Variable dtype (float32_ref). riemannian.project_sum(what, where, weights)
def testInv(self): # Tests the inv function initializer = initializers.random_matrix(((2, 3, 2), (2, 3, 2)), tt_rank=1) kron_mat = variables.get_variable('kron_mat', initializer=initializer) init_op = tf.global_variables_initializer() with self.test_session() as sess: sess.run(init_op) desired = np.linalg.inv(ops.full(kron_mat).eval()) actual = ops.full(kr.inv(kron_mat)).eval() self.assertAllClose(desired, actual)
def testInv(self): # Tests the inv function initializer = initializers.random_matrix(((2, 3, 2), (2, 3, 2)), tt_rank=1, dtype=self.dtype) kron_mat = variables.get_variable('kron_mat', initializer=initializer) init_op = tf.compat.v1.global_variables_initializer() self.evaluate(init_op) desired = np.linalg.inv(self.evaluate(ops.full(kron_mat))) actual = self.evaluate(ops.full(kr.inv(kron_mat))) self.assertAllClose(desired, actual)
def testDet(self): # Tests the determinant function initializer = initializers.random_matrix(((2, 3, 2), (2, 3, 2)), tt_rank=1, dtype=self.dtype) kron_mat = variables.get_variable('kron_mat', initializer=initializer) init_op = tf.global_variables_initializer() with self.test_session() as sess: sess.run(init_op) desired = np.linalg.det(ops.full(kron_mat).eval()) actual = kr.determinant(kron_mat).eval() self.assertAllClose(desired, actual)
def testDet(self): # Tests the determinant function initializer = initializers.random_matrix_batch(((2, 3, 2), (2, 3, 2)), tt_rank=1, batch_size=3, dtype=self.dtype) kron_mat_batch = variables.get_variable('kron_mat_batch', initializer=initializer) init_op = tf.compat.v1.global_variables_initializer() self.evaluate(init_op) desired = self.evaluate(tf.linalg.det(ops.full(kron_mat_batch))) actual = self.evaluate(kr.determinant(kron_mat_batch)) self.assertAllClose(desired, actual)
def testInv(self): # Tests the inv function initializer = initializers.random_matrix_batch(((2, 3, 2), (2, 3, 2)), tt_rank=1, batch_size=3, dtype=self.dtype) kron_mat_batch = variables.get_variable('kron_mat_batch', initializer=initializer) init_op = tf.global_variables_initializer() with self.test_session() as sess: sess.run(init_op) desired = np.linalg.inv(ops.full(kron_mat_batch).eval()) actual = ops.full(kr.inv(kron_mat_batch)).eval() self.assertAllClose(desired, actual, atol=1e-4)
def testDet(self): # Tests the determinant function initializer = initializers.random_matrix_batch(((2, 3, 2), (2, 3, 2)), tt_rank=1, batch_size=3) kron_mat_batch = variables.get_variable('kron_mat_batch', initializer=initializer) init_op = tf.global_variables_initializer() with self.test_session() as sess: sess.run(init_op) desired = tf.matrix_determinant(ops.full(kron_mat_batch)).eval() actual = kr.determinant(kron_mat_batch).eval() self.assertAllClose(desired, actual)
def testAssign(self): old_init = initializers.random_tensor([2, 3, 2], tt_rank=2) tt = variables.get_variable('tt', initializer=old_init) new_init = initializers.random_tensor([2, 3, 2], tt_rank=2) assigner = variables.assign(tt, new_init) with self.test_session(): tf.global_variables_initializer().run() init_value = ops.full(tt).eval() assigner_value = ops.full(assigner).eval() after_value = ops.full(tt) after_value = after_value.eval() self.assertAllClose(assigner_value, after_value) # Assert that the value actually changed: abs_diff = np.linalg.norm((init_value - after_value).flatten()) rel_diff = abs_diff / np.linalg.norm((init_value).flatten()) self.assertGreater(rel_diff, 0.2)
def testCholesky(self): # Tests the cholesky function np.random.seed(8) # generating two symmetric positive-definite tt-cores L_1 = np.tril(np.random.normal(scale=2., size=(2, 2))) L_2 = np.tril(np.random.normal(scale=2., size=(3, 3))) K_1 = L_1.dot(L_1.T) K_2 = L_2.dot(L_2.T) K = np.kron(K_1, K_2) initializer = TensorTrain( [K_1[None, :, :, None], K_2[None, :, :, None]], tt_ranks=7 * [1]) kron_mat = variables.get_variable('kron_mat', initializer=initializer) init_op = tf.global_variables_initializer() with self.test_session() as sess: sess.run(init_op) desired = np.linalg.cholesky(K) actual = ops.full(kr.cholesky(kron_mat)).eval() self.assertAllClose(desired, actual)
def testAssign(self): old_init = initializers.random_tensor([2, 3, 2], tt_rank=2, dtype=self.dtype) tt = variables.get_variable('tt', initializer=old_init) new_init = initializers.random_tensor([2, 3, 2], tt_rank=2, dtype=self.dtype) self.evaluate(tf.compat.v1.global_variables_initializer()) init_value = self.evaluate(ops.full(tt)) assigner = variables.assign(tt, new_init) assigner_value = self.evaluate(ops.full(assigner)) after_value = ops.full(tt) after_value = self.evaluate(after_value) self.assertAllClose(assigner_value, after_value) # Assert that the value actually changed: abs_diff = np.linalg.norm((init_value - after_value).flatten()) rel_diff = abs_diff / np.linalg.norm((init_value).flatten()) self.assertGreater(rel_diff, 0.2)
def testSlogDet(self): # Tests the slog_determinant function tf.set_random_seed(1) # negative and positive determinants initializer = initializers.random_matrix_batch(((2, 3), (2, 3)), tt_rank=1, batch_size=3, dtype=self.dtype) kron_mat_batch = variables.get_variable('kron_mat_batch', initializer=initializer) init_op = tf.global_variables_initializer() with self.test_session() as sess: # negative derminant sess.run(init_op) desired_sign, desired_det = np.linalg.slogdet( ops.full(kron_mat_batch).eval()) actual_sign, actual_det = sess.run(kr.slog_determinant(kron_mat_batch)) self.assertAllEqual(desired_sign, actual_sign) self.assertAllClose(desired_det, actual_det)
def testCholesky(self): # Tests the cholesky function np.random.seed(8) # generating two symmetric positive-definite tt-cores L_1 = np.tril(np.random.normal(scale=2., size=(4, 2, 2))) L_2 = np.tril(np.random.normal(scale=2., size=(4, 3, 3))) K_1 = np.einsum('ijk,ilk->ijl', L_1, L_1) K_2 = np.einsum('ijk,ilk->ijl', L_2, L_2) initializer = TensorTrainBatch( [K_1[:, None, :, :, None], K_2[:, None, :, :, None]], tt_ranks=7 * [1]) kron_mat_batch = variables.get_variable('kron_mat_batch', initializer=initializer) init_op = tf.global_variables_initializer() with self.test_session() as sess: sess.run(init_op) desired = np.linalg.cholesky(ops.full(kron_mat_batch).eval()) actual = ops.full(kr.cholesky(kron_mat_batch)).eval() self.assertAllClose(desired, actual)
def testCholesky(self): # Tests the cholesky function np.random.seed(8) # generating two symmetric positive-definite tt-cores L_1 = np.tril(np.random.normal(scale=2., size=(2, 2))) L_1 = L_1.astype(self.dtype.as_numpy_dtype) L_2 = np.tril(np.random.normal(scale=2., size=(3, 3))) L_2 = L_2.astype(self.dtype.as_numpy_dtype) K_1 = L_1.dot(L_1.T) K_2 = L_2.dot(L_2.T) K = np.kron(K_1, K_2) initializer = TensorTrain( [K_1[None, :, :, None], K_2[None, :, :, None]], tt_ranks=7 * [1]) kron_mat = variables.get_variable('kron_mat', initializer=initializer) init_op = tf.compat.v1.global_variables_initializer() self.evaluate(init_op) desired = np.linalg.cholesky(K) actual = self.evaluate(ops.full(kr.cholesky(kron_mat))) self.assertAllClose(desired, actual, atol=1e-5, rtol=1e-5)
def testGetExistingVariable(self): init = initializers.random_tensor([2, 3, 2], tt_rank=2) tt_1 = variables.get_variable('tt_1', initializer=init) with tf.variable_scope('test'): tt_2 = variables.get_variable('tt_2', initializer=init) with self.test_session(): tf.global_variables_initializer().run() with self.assertRaises(ValueError): # The variable already exists and scope.reuse is False by default. variables.get_variable('tt_1') with self.assertRaises(ValueError): with tf.variable_scope('', reuse=True): # The variable doesn't exist. variables.get_variable('tt_3') with tf.variable_scope('', reuse=True): tt_1_copy = variables.get_variable('tt_1') self.assertAllClose(ops.full(tt_1).eval(), ops.full(tt_1_copy).eval()) with tf.variable_scope('', reuse=True): # Again try to retrieve an existing variable, but pass an initializer # and check that it still works. tt_1_copy = variables.get_variable('tt_1', initializer=0 * init) self.assertAllClose(ops.full(tt_1).eval(), ops.full(tt_1_copy).eval()) with self.assertRaises(ValueError): with tf.variable_scope('', reuse=True): # The variable is defined in a different scope variables.get_variable('tt_2') with self.assertRaises(ValueError): with tf.variable_scope('nottest', reuse=True): # The variable is defined in a different scope variables.get_variable('tt_2') with tf.variable_scope('test', reuse=True): tt_2_copy = variables.get_variable('tt_2') self.assertAllClose(ops.full(tt_2).eval(), ops.full(tt_2_copy).eval())