Exemple #1
0
def testComputationMethods():
    import numpy as np
    from utils import testSignal, sliceData, sliceY
    irf = (0, 0, 0, 0, 0, 1, 0, 0, 0, 0)
    offset = 0
    # o->lag-by-5, -5=>no-lag, -9->lead-by-4
    X, Y, st, A, B = testSignal(nTrl=1,
                                nSamp=500,
                                d=1,
                                nE=1,
                                nY=1,
                                isi=10,
                                tau=10,
                                offset=offset,
                                irf=irf,
                                noise2signal=0)
    #plt.subplot(311);plt.plot(X[0,:,0],label='X');plt.plot(Y[0,:,0,0],label='Y');plt.title("offset={}, irf={}".format(offset,irf));plt.legend()
    print("A={}\nB={}".format(A, B))
    print("X{}={}".format(X.shape, X[:30, np.argmax(np.abs(A))]))
    print("Y{}={}".format(Y.shape, Y[0, :30, 0]))
    Y_true = Y[..., 0:1, :]

    # other measures....
    from updateSummaryStatistics import updateSummaryStatistics, autocov, cov, crossautocov, updateCyy, updateCxx, updateCxy
    Cxdtxdt = autocov(X, tau=18)  # (tau[0],dx,tau[1],dx)
    Cxdtydt = crossautocov(X, Y[:, :, 0:1], tau=18)  # (tau[0],dx,tau[1],dy)
    Cydxdt = crossautocov(Y[:, :, 0:1], X, tau=[1, 18])  # (tauy,dy,taux,dx)
    Cydtydt = np.stack([
        crossautocov(Y[:, :, yi:yi + 1], Y[:, :, yi:yi + 1], tau=18)
        for yi in range(Y.shape[-1])
    ], 0)  # (nY,tau,d,tau,d)
    Cxxdd = cov(X)

    # compare
    Cxx = updateCxx(None, X, None)
    Cxy = updateCxy(None, X, Y[:, :, 0:1, np.newaxis], None,
                    tau=18)  #(1,dy,tau,dx)
    Cyy = updateCyy(None, Y[:, :, :, np.newaxis], None,
                    tau=18)  #(nY,yd,tau,yd,tau)

    print('Cxx-Cxxdd={}'.format(np.max(np.abs(Cxxdd - Cxx).ravel())))
    print('Cxy-Cydxdt={}'.format(np.max(np.abs(Cxy - Cydxdt).ravel())))
    print('Cyy-Cydtydt={}'.format(np.max(np.abs(Cyy.ravel() -
                                                Cydtydt.ravel()))))

    # test centering
    cX = X - np.mean(X, (0, 1))
    Cxx = updateCxx(None, X, None, center=True)
    cCxx = updateCxx(None, cX, None, center=False)
    print("Cxx(center) - cCxx={}".format(np.max(np.abs(Cxx - cCxx).ravel())))

    Cxy = updateCxy(None, X, Y, None, tau=3, center=True)
    cCxy = updateCxy(None, cX, Y, None, tau=3, center=False)
    print("Cxy(center) - cCxy={}".format(np.max(np.abs(Cxy - cCxy).ravel())))
Exemple #2
0
def testdataset(fn, **args):
    '''  a conforming toy dataset loader'''
    fs=10
    X,Y,st,A,B=testSignal(**args)
    # make coords array for the meta-info about the dimensions of X
    coords = [None]*X.ndim
    coords[0] = {'name':'trial'}
    coords[1] = {'name':'time','unit':'ms', \
                 'coords':[i*1000/fs for i in range(X.shape[1])], \
                 'fs':fs}
    coords[2] = {'name':'channel','coords':None}
    return (X, Y, coords)
def testcases():
    #   Fe  = [nE x nEpoch x nTrl x nM ] similarity score for each event type for each stimulus
    #   Ye  = [nE x nY x nEpoch x nTrl ] Indicator for which events occured for which outputs
    nE = 2
    nEpoch = 100
    nTrl = 30
    nY = 20
    nM = 20
    Fe = np.random.standard_normal((nM, nTrl, nEpoch, nE))
    Ye = np.random.standard_normal((nTrl, nEpoch, nY, nE))
    print("Fe={}".format(Fe.shape))
    print("Ye={}".format(Ye.shape))
    Fy = scoreOutput(Fe, Ye)  # (nM,nTrl,nEp,nY)
    print("Fy={}".format(Fy.shape))
    import matplotlib.pyplot as plt
    sFy = np.cumsum(Fy, axis=-2)
    plt.clf()
    plt.plot(sFy[0, 0, :, :])
    plt.xlabel('epoch')
    plt.ylabel('output')
    plt.show()

    # more complex example with actual signal/noise
    from utils import testSignal
    from scoreOutput import scoreOutput, plot_outputscore, convWX, convYR, convXYR
    from scoreStimulus import scoreStimulus
    from decodingSupervised import decodingSupervised
    from normalizeOutputScores import normalizeOutputScores
    import numpy as np
    irf = (1, 1, -1, -1, 0, 0, 0, 0, 0, 0)
    X, Y, st, W, R = testSignal(nTrl=1,
                                nSamp=1000,
                                d=1,
                                nE=1,
                                nY=10,
                                isi=2,
                                irf=irf,
                                noise2signal=0)

    plot_outputscore(X[0, ...], Y[0, :, 0:3, :], W, R)

    # add a correlated output
    Y[:, :, 1, :] = Y[:, :, 0, :] * .5
    plot_outputscore(X[0, ...], Y[0, :, 0:3, :], W, R)
Exemple #4
0
def testcase():
    from utils import testNoSignal, testSignal, sliceData, sliceY
    #from multipleCCA import *
    if False:
        X, Y, st = testNoSignal()
    else:
        X, Y, st, A, B = testSignal(tau=10, noise2signal=10)
    # TODO[]: summary stats directly without first slicing
    Y_true = Y[:, :, 0:1, :] if Y.ndim > 3 else Y[:, 0:1, :] # N.B. hack with [0] to stop removal of dim...
    from updateSummaryStatistics import updateSummaryStatistics, plot_summary_statistics
    Cxx, Cxy, Cyy = updateSummaryStatistics(X, Y_true, tau=10)

    #plot_summary_statistics(Cxx,Cxy,Cyy)

    # single supervised training
    from multipleCCA import multipleCCA, plot_multicca_solution
    J, w, r = multipleCCA(Cxx, Cxy, Cyy, rcond=-.3, symetric=True) # 50% rank reduction
    
    plot_multicca_solution(w, r)

    # apply to the data
    from scoreStimulus import scoreStimulus
    Fe = scoreStimulus(X, w, r)
    import matplotlib.pyplot as plt;
    plt.clf();
    plt.plot(np.einsum("Tsd,d->s",X,w.ravel()),'b',label="Xw");
    plt.plot(Y_true.ravel(),'g',label="Y");
    plt.plot(Fe.ravel()/np.max(Fe.ravel())/2,'r',label="Fe");
    plt.legend()
    from scoreOutput import scoreOutput
    print("Fe={}".format(Fe.shape))
    print("Y={}".format(Y.shape))
    Fy = scoreOutput(Fe, Y, r) # (nM,nTrl,nEp,nY)
    print("Fy={}".format(Fy.shape))
    import matplotlib.pyplot as plt
    sFy=np.cumsum(Fy, -2)
    plt.clf();plt.plot(sFy[0, 0, :, :]);plt.xlabel('epoch');plt.ylabel('output');plt.show()

    from decodingSupervised import decodingSupervised
    decodingSupervised(Fy)
    from decodingCurveSupervised import decodingCurveSupervised
    decodingCurveSupervised(Fy)
Exemple #5
0
def testCases():
    import numpy as np
    from utils import testSignal, sliceData, sliceY
    from updateSummaryStatistics import updateSummaryStatistics, plot_summary_statistics
    import matplotlib.pyplot as plt

    irf = (0, 0, 0, 0, 0, 0, 0, 0, 0, 1)
    offset = 0
    # X->lag-by-10
    X, Y, st, A, B = testSignal(nTrl=1,
                                nSamp=500,
                                d=1,
                                nE=1,
                                nY=1,
                                isi=10,
                                tau=10,
                                offset=offset,
                                irf=irf,
                                noise2signal=0)
    print("A={}\nB={}".format(A, B))
    print("X{}={}".format(X.shape, X[:30, np.argmax(np.abs(A))]))
    print("Y{}={}".format(Y.shape, Y[0, :30, 0]))

    tau = 10
    uss_offset = 0
    Cxx, Cxy, Cyy = updateSummaryStatistics(X,
                                            Y,
                                            None,
                                            tau=tau,
                                            offset=uss_offset)
    print("Cxx={} Cxy={} Cyy={}".format(Cxx.shape, Cxy.shape, Cyy.shape))
    plt.figure(1)
    plot_summary_statistics(Cxx, Cxy, Cyy)
    plt.show()

    # leading X
    irf = (1, 0, 0, 0, 0, 0, 0, 0, 0, 0)
    offset = -9
    # X leads by 9
    X, Y, st, A, B = testSignal(nTrl=1,
                                nSamp=5000,
                                d=1,
                                nE=1,
                                nY=1,
                                isi=10,
                                tau=10,
                                offset=offset,
                                irf=irf,
                                noise2signal=0)
    plt.figure(0)
    plt.clf()
    plt.plot(X[0, :, 0], label='X')
    plt.plot(Y[0, :, 0, 0], label='Y')
    plt.title("offset={}, irf={}".format(offset, irf))
    plt.legend()

    # no-shift in analysis window
    tau = 10
    uss_offset = 0
    Cxx, Cxy, Cyy = updateSummaryStatistics(X,
                                            Y,
                                            None,
                                            tau=tau,
                                            offset=uss_offset)
    print("Cxx={} Cxy={} Cyy={}".format(Cxx.shape, Cxy.shape, Cyy.shape))
    plt.figure(1)
    plt.clf()
    plot_summary_statistics(Cxx, Cxy, Cyy)
    plt.show()

    # shifted analysis window
    tau = 10
    uss_offset = -9
    Cxx, Cxy, Cyy = updateSummaryStatistics(X,
                                            Y,
                                            None,
                                            tau=tau,
                                            offset=uss_offset)
    print("Cxx={} Cxy={} Cyy={}".format(Cxx.shape, Cxy.shape, Cyy.shape))
    plt.figure(2)
    plt.clf()
    plot_summary_statistics(Cxx, Cxy, Cyy)
    plt.show()

    plt.figure(3)
    plt.clf()
    plot_erp(Cxy)
    plt.show()
Exemple #6
0
def testSlicedvsContinuous():
    import numpy as np
    from utils import testSignal, sliceData, sliceY
    irf = (0, 0, 0, 0, 0, 1, 0, 0, 0, 0)
    offset = 0
    # o->lag-by-5, -5=>no-lag, -9->lead-by-4
    X, Y, st, A, B = testSignal(nTrl=1,
                                nSamp=500,
                                d=1,
                                nE=1,
                                nY=1,
                                isi=10,
                                tau=10,
                                offset=offset,
                                irf=irf,
                                noise2signal=0)
    #plt.subplot(311);plt.plot(X[0,:,0],label='X');plt.plot(Y[0,:,0,0],label='Y');plt.title("offset={}, irf={}".format(offset,irf));plt.legend()
    print("A={}\nB={}".format(A, B))
    print("X{}={}".format(X.shape, X[:30, np.argmax(np.abs(A))]))
    print("Y{}={}".format(Y.shape, Y[0, :30, 0]))
    Y_true = Y[..., 0:1, :]

    # slice then compute
    Xe = sliceData(X, stimTimes, tau=tau)
    Ye = sliceY(Y, stimTimes)
    Ye_true = Ye[..., 0:1, :]
    print('Xe{}={}\nYe{}={}\nst{}={}'.format(Xe.shape, Xe[:5, :, 0], Ye.shape,
                                             Ye[:5, 0, 0], stimTimes.shape,
                                             stimTimes[:5]))
    Cxx, Cxy, Cyy = updateSummaryStatistics(Xe,
                                            Ye_true,
                                            stimTimes,
                                            tau=Xe.shape[-2])
    print("Cxx={} Cxy={} Cyy={}".format(Cxx.shape, Cxy.shape, Cyy.shape))

    plot_summary_statistics(Cxx, Cxy, Cyy)

    plot_Cxy(Cxy)

    dCxx = np.max((np.abs(Cxx / np.trace(Cxx) - Cxx1 / np.trace(Cxx1)) /
                   (np.abs(Cxx / np.trace(Cxx)) + 1e-5)).ravel())
    dCxy = np.max((np.abs(Cxy - Cxy1) / (np.abs(Cxy) + 1e-5)).ravel())
    dCyy = np.max((np.abs(Cyy - Cyy1) / (np.abs(Cyy) + 1e-5)).ravel())
    print("dCxx= {} dCxy={} dCyy={}".format(dCxx, dCxy, dCyy))

    # timing tests
    from timeit import timeit
    dur = timeit(
        lambda: updateSummaryStatistics(Xe, Ye_true, stimTimes, tau=10),
        number=1000,
        globals=globals())
    print("Sliced={}s".format(dur / 1000))

    def slicenuss(X, Y, stimTimes, tau):
        Xe = sliceData(X, stimTimes, tau)
        Ye = sliceY(Y, stimTimes)
        Cxx, Cxy, Cyy = updateSummaryStatistics(Xe, Ye, stimTimes, tau=tau)

    dur = timeit(lambda: slicenuss(X, Y_true, stimTimes, tau=10),
                 number=1000,
                 globals=globals())
    print("Slice+USS={}s".format(dur / 1000))

    dur = timeit(lambda: updateSummaryStatistics(X, Y_true, None, tau=10),
                 number=1000,
                 globals=globals())
    print("Raw={}s".format(dur / 1000))
Exemple #7
0
def testLeadLag():
    import numpy as np
    from utils import testSignal
    from model_fitting import MultiCCA
    from decodingCurveSupervised import decodingCurveSupervised
    import matplotlib.pyplot as plt

    irf = (0, 0, 0, 0, 0, 0, 0, 0, 0, 1)
    offset = 0
    # X->lag-by-10
    X, Y, st, A, B = testSignal(nTrl=1,
                                nSamp=500,
                                d=10,
                                nE=1,
                                nY=30,
                                isi=10,
                                tau=10,
                                offset=offset,
                                irf=irf,
                                noise2signal=0)

    # reference case lagged response test
    evtlabs = None
    tau = 10
    cca_offset = 0
    cca = MultiCCA(evtlabs=evtlabs, tau=tau, offset=cca_offset)
    scores = cca.cv_fit(X, Y)
    Fy = scores['estimator']
    (_) = decodingCurveSupervised(Fy)

    # leading X
    irf = (1, 0, 0, 0, 0, 0, 0, 0, 0, 0)
    offset = -9
    # X leads by 9
    X, Y, st, A, B = testSignal(nTrl=1,
                                nSamp=5000,
                                d=10,
                                nE=1,
                                nY=30,
                                isi=10,
                                tau=10,
                                offset=offset,
                                irf=irf,
                                noise2signal=0)
    plt.figure(0)
    plt.clf()
    plt.plot(X[0, :, 0], label='X')
    plt.plot(Y[0, :, 0, 0], label='Y')
    plt.title("offset={}, irf={}".format(offset, irf))
    plt.legend()

    # no-shift in analysis window
    evtlabs = None
    tau = 10
    cca_offset = 0
    cca = MultiCCA(evtlabs=evtlabs, tau=tau, offset=cca_offset)
    scores = cca.cv_fit(X, Y)
    Fy = scores['estimator']
    (_) = decodingCurveSupervised(Fy)

    # shifted analysis window
    evtlabs = None
    tau = 20
    cca_offset = -9
    cca = MultiCCA(evtlabs=evtlabs, tau=tau, offset=cca_offset)
    scores = cca.cv_fit(X, Y)
    Fy = scores['estimator']
    (_) = decodingCurveSupervised(Fy)

    plt.figure(1)
    plt.clf()
    plot_model_weights(cca)