Beispiel #1
0
def ctorTests(verbose):
    arr = numpy.arange(24).reshape([2, 3, 4])
    A = numpy.array([[1, 2], [3, 4], [5, 6]])
    B = numpy.array([[1, 2, 3], [4, 5, 6]])
    C = numpy.array([[1, 2, 3, 4]])

    obj = ttensor.ttensor(tensor.tensor(arr), [A, B, C])
    print obj
    print obj.shape
Beispiel #2
0
def ctorTests(verbose):
    arr = numpy.arange(24).reshape([2,3,4]);
    A = numpy.array([[1,2],[3,4],[5,6]]);
    B = numpy.array([[1,2,3],[4,5,6]]);
    C = numpy.array([[1,2,3,4]]);
    
    obj = ttensor.ttensor(tensor.tensor(arr), [A, B, C]);
    print obj;
    print obj.shape;
Beispiel #3
0
def test1():
    A = ttensor.ttensor(tensor.tenrands([2,3,4]),
                    [numpy.random.random([10,2]),
                    numpy.random.random([30,3]),
                    numpy.random.random([40,4])]).totensor();

    [a,b] = DTA.DTA(A, [1,2,3]);
    #print a;
    #print b;

    Core = numpy.arange(24).reshape([2,3,4]);
    #Core = numpy.array([[1,3,5],[2,4,6]] , [[7,9,11],[8,10,12]])
    u1 = numpy.array([[1,2],[3,4]]);
    u2 = numpy.array([[0,1,0],[1,0,1],[1,1,1]]);
    u3 = numpy.array([[1,1,1,1],[1,2,3,4],[1,1,1,1]]);
    tt = ttensor.ttensor(tensor.tensor(Core), [u1,u2,u3]);
    
    print tt;
    [a,b] = DTA.DTA(tt.totensor(), [1,2,3]);
    print a;
    print a.totensor();
    print b;
Beispiel #4
0
def test1():
    A = ttensor.ttensor(tensor.tenrands([2, 3, 4]), [
        numpy.random.random([10, 2]),
        numpy.random.random([30, 3]),
        numpy.random.random([40, 4])
    ]).totensor()

    [a, b] = DTA.DTA(A, [1, 2, 3])
    #print a;
    #print b;

    Core = numpy.arange(24).reshape([2, 3, 4])
    #Core = numpy.array([[1,3,5],[2,4,6]] , [[7,9,11],[8,10,12]])
    u1 = numpy.array([[1, 2], [3, 4]])
    u2 = numpy.array([[0, 1, 0], [1, 0, 1], [1, 1, 1]])
    u3 = numpy.array([[1, 1, 1, 1], [1, 2, 3, 4], [1, 1, 1, 1]])
    tt = ttensor.ttensor(tensor.tensor(Core), [u1, u2, u3])

    print tt
    [a, b] = DTA.DTA(tt.totensor(), [1, 2, 3])
    print a
    print a.totensor()
    print b
Beispiel #5
0
def DTA(Xnew, R, C=None, alpha=None):
    """DTA analysis"""

    # number of dimensions of the input tensor.
    N = Xnew.ndims()

    # If the co-variacne matrices are not given,
    # initialize all of them to be 0
    if (C == None):
        C = []
        dv = Xnew.shape
        for i in range(0, N):
            C.extend([sparse.coo_matrix(([], ([], [])), [dv[i], dv[i]])])

    # If the forgetting factor is not given, it is 1.
    if (alpha == None):
        alpha = 1

    U = []
    Cnew = []
    for i in range(0, N):
        if (Xnew.__class__ == tensor.tensor):
            XM = tenmat.tenmat(Xnew, [i]).tondarray()
        elif (Xnew.__class__ == sptensor.sptensor):
            XM = sptenmat.sptenmat(Xnew, [i]).tosparsemat()
        elif (Xnew.__class__ == ttensor.ttensor):
            raise TypeError("It is not supported yet.")
        else:
            raise TypeError(
                "1st argument must be tensor, sptensor, or ttensor")

        Cnew.extend(
            [numpy.array(alpha * C[i] + numpy.dot(XM, XM.transpose()))])

        (w, v) = eigwrapper(Cnew[i], R[i])

        U.extend([numpy.array(v)])

    core = Xnew.ttm(U, None, 't')
    T = ttensor.ttensor(core, U)
    return (T, Cnew)
Beispiel #6
0
def DTA(Xnew, R, C = None, alpha = None):
    """DTA analysis"""
    
    # number of dimensions of the input tensor.
    N = Xnew.ndims();
    
    # If the co-variacne matrices are not given,
    # initialize all of them to be 0
    if(C == None):
        C = [];
        dv = Xnew.shape;
        for i in range(0, N):
            C.extend([ sparse.coo_matrix(([],([],[])),[dv[i], dv[i]]) ]);
    
    # If the forgetting factor is not given, it is 1.
    if(alpha == None):
        alpha = 1;
    
    U = [];
    Cnew = [];
    for i in range (0,N):
        if(Xnew.__class__ == tensor.tensor):
            XM = tenmat.tenmat(Xnew,[i]).tondarray();
        elif(Xnew.__class__ == sptensor.sptensor):
            XM = sptenmat.sptenmat(Xnew,[i]).tosparsemat();
        elif(Xnew.__class__ == ttensor.ttensor):
            raise TypeError("It is not supported yet.");
        else:
            raise TypeError("1st argument must be tensor, sptensor, or ttensor");
        
        
        Cnew.extend([ numpy.array(alpha*C[i] + numpy.dot(XM, XM.transpose())) ]);
        
        (w,v) = eigwrapper(Cnew[i], R[i]);
        
        U.extend([ numpy.array(v) ]);

    core = Xnew.ttm(U, None, 't');
    T = ttensor.ttensor(core, U);
    return (T, Cnew);