Beispiel #1
0
def test_missingness(algname):

    # Random data tensor.
    shape = (15, 16, 17)
    rank = 3

    if algname == "mcp_als":
        X = tt.randn_ktensor(shape, rank=rank, random_state=data_seed).full()
    elif algname == "ncp_hals":
        X = tt.rand_ktensor(shape, rank=rank, random_state=data_seed).full()

    # Random missingness mask.
    mask = np.random.binomial(1, .5, size=X.shape).astype(bool)

    # Create second tensor with corrupted entries.
    Y = X.copy()
    Y[~mask] = 999.

    # Algorithm fitting options.
    options = dict(rank=rank,
                   mask=mask,
                   verbose=False,
                   tol=1e-6,
                   random_state=alg_seed)

    # Fit decompositions for both X and Y.
    resultX = getattr(tt, algname)(X, **options)
    resultY = getattr(tt, algname)(Y, **options)

    # Test that learning curves are identical.
    assert np.allclose(resultX.obj_hist, resultY.obj_hist)

    # Test that final factors are identical.
    for uX, uY in zip(resultX.factors, resultY.factors):
        assert np.allclose(uX, uY)
def test_objective_decreases(algname, shape, rank):

    # Generate data. If algorithm is made for nonnegative tensor decomposition
    # then generate nonnegative data.
    if algname in ['ncp_hals, ncp_bcd']:
        X = tt.rand_ktensor(shape, rank=rank, random_state=data_seed).full()
    else:
        X = tt.randn_ktensor(shape, rank=rank, random_state=data_seed).full()

    # Fit model.
    f = getattr(tt, algname)
    result = f(X, rank=rank, verbose=False, tol=1e-6, random_state=alg_seed)

    # Test that objective function monotonically decreases.
    assert np.all(np.diff(result.obj_hist) < obj_decreased_tol)
Beispiel #3
0
def test_objective_decreases(algname, shape, rank):

    # Generate data. If algorithm is made for nonnegative tensor decomposition
    # then generate nonnegative data.
    if algname in ['ncp_hals, ncp_bcd']:
        X = tt.rand_ktensor(shape, rank=rank, random_state=data_seed).full()
    else:
        X = tt.randn_ktensor(shape, rank=rank, random_state=data_seed).full()

    # Algorithm fitting options.
    options = dict(rank=rank, verbose=False, tol=1e-6, random_state=alg_seed)

    # Add special options for particular algorithms.
    if algname == 'mcp_als':
        options['mask'] = np.ones_like(X).astype(bool)

    # Fit model.
    result = getattr(tt, algname)(X, **options)

    # Test that objective function monotonically decreases.
    assert np.all(np.diff(result.obj_hist) < obj_decreased_tol)
Beispiel #4
0
"""
Created on Thu Feb 14 14:24:14 2019
===============================================================
Examples of CPD
===============================================================
 
@author: Yongjie
"""

import tensortools as tt
import numpy as np
import matplotlib.pyplot as plt

# Make dataset.
I, J, K, R = 100, 100, 100, 4
X = tt.rand_ktensor((I, J, K), rank=R)

# Add noise.
Xn = np.maximum(0, X.full() + .1 * np.random.randn(I, J, K))

# Fit ensemble of unconstrained tensor decompositions.
methods = (
    'cp_als',
    'ncp_bcd',
    'ncp_hals',
)

ensembles = {}
for m in methods:
    ensembles[m] = tt.Ensemble(fit_method=m, fit_options=dict(tol=1e-4))
    ensembles[m].fit(Xn, ranks=range(1, 9), replicates=3)