Ejemplo n.º 1
0
    def test_B_grad(self, random_rank4_parafac2_tensor):
        X = random_rank4_parafac2_tensor.construct_tensor()
        A = random_rank4_parafac2_tensor.A 
        blueprint_B = random_rank4_parafac2_tensor.blueprint_B
        C = random_rank4_parafac2_tensor.C

        ktensor = KruskalTensor([A, blueprint_B, C])
        projected_X = ktensor.construct_tensor()

        wrong_decomposition = Parafac2Tensor.random_init(
            sizes=random_rank4_parafac2_tensor.shape,
            rank=random_rank4_parafac2_tensor.rank
        )

        
        sub_problem = Parafac2RLS()
        sub_problem.update_decomposition(
            X, wrong_decomposition, projected_X, should_update_projections=False
        )
        new_B = wrong_decomposition.blueprint_B

        def loss(x):
            B = x.reshape(wrong_decomposition.blueprint_B.shape)
            factor_matrices = [wrong_decomposition.A, B, wrong_decomposition.C]
            estimated = np.einsum('ir, jr, kr -> ijk', *factor_matrices)
            return np.linalg.norm(estimated - projected_X)**2

        deriv = approx_fprime(new_B.ravel(), loss, epsilon=np.sqrt(np.finfo(float).eps))
        assert np.allclose(deriv, 0, atol=1e-4, rtol=1e-4)
Ejemplo n.º 2
0
    def test_minimum(self, random_rank4_ktensor, **kwargs):
        # Check if loss increases after pertubation
        X = random_rank4_ktensor.construct_tensor()
        wrong_decomposition = KruskalTensor.random_init(
            random_rank4_ktensor.shape,
            rank=random_rank4_ktensor.rank
        )

        for mode in range(3):
            sub_problem = self.SubProblem(mode=mode, **kwargs)
            sub_problem.update_decomposition(X, wrong_decomposition)
            factor_matrix = wrong_decomposition.factor_matrices[mode]
            fm_norm = np.linalg.norm(factor_matrix)

            for noise in range(1, 11):
                noise /= 100
                factor_matrix_perturbed = self.perturb_factor_matrix(factor_matrix, noise)
                
                def loss(x):
                    factor_matrices = [
                        fm for fm in wrong_decomposition.factor_matrices
                    ]
                    factor_matrices[mode] = x
                    estimated = np.einsum('ir, jr, kr -> ijk', *factor_matrices)
                    return np.linalg.norm(estimated - X)**2
                
                assert loss(factor_matrix) <= loss(factor_matrix_perturbed)
Ejemplo n.º 3
0
    def check_gradient(self, decomposition, **kwargs):
        X = decomposition.construct_tensor()
        wrong_decomposition = KruskalTensor.random_init(
            decomposition.shape,
            rank=decomposition.rank,
            random_method='uniform'
        )
        num_modes = len(decomposition.factor_matrices)

        letters = [chr(ord('i') + mode) for mode in range(num_modes)]
        einsum_pattern = ''.join(f'{l}r, ' for l in letters)[:-2] + ' -> ' + ''.join(letters)

        for mode in range(num_modes):
            print('kwargs', kwargs)
            rls = self.SubProblem(mode=mode, **kwargs)
            rls.update_decomposition(X, wrong_decomposition)

            def loss(x):
                factor_matrices = [
                    fm for fm in wrong_decomposition.factor_matrices
                ]
                factor_matrices[mode] = x.reshape(factor_matrices[mode].shape)
                
                estimated = np.einsum(einsum_pattern, *factor_matrices)
                return np.linalg.norm(estimated - X)**2
            
            raveled_fm = wrong_decomposition.factor_matrices[mode].ravel()
            deriv = approx_fprime(
                raveled_fm,
                loss,
                epsilon=np.sqrt(np.finfo(float).eps)
            )

            np.testing.assert_allclose(0, deriv/loss(raveled_fm), atol=1e-3, rtol=1e-3)
Ejemplo n.º 4
0
    def test_projected_X(self, random_rank4_parafac2_tensor):
        X = random_rank4_parafac2_tensor.construct_slices()
        A = random_rank4_parafac2_tensor.A 
        blueprint_B = random_rank4_parafac2_tensor.blueprint_B
        C = random_rank4_parafac2_tensor.C

        ktensor = KruskalTensor([A, blueprint_B, C])
        projected_X = ktensor.construct_tensor()

        wrong_decomposition = Parafac2Tensor.random_init(
            sizes=random_rank4_parafac2_tensor.shape,
            rank=random_rank4_parafac2_tensor.rank
        )

        sub_problem = Parafac2RLS()
        sub_problem.update_decomposition(
            X, wrong_decomposition, projected_X, should_update_projections=True
        )

        for projection in wrong_decomposition.projection_matrices:
            assert np.allclose(projection.T@projection, np.identity(projection.shape[1]))
Ejemplo n.º 5
0
 def random_nonnegative_rank4_ktensor(self):
     return KruskalTensor.random_init([30, 40, 50], 4, random_method='uniform')
Ejemplo n.º 6
0
 def random_rank4_ktensor(self):
     return KruskalTensor.random_init([30, 40, 50], rank=4)