def start_test(maxlag, col1, col2, start, stop): Xnew = [] Ynew = [] example = open('/home/lelz/towlelab/examples/test.rest.neuroscanascii', 'rb') counter = 0 for l in example: if counter < start: counter += 1 continue counter += 1 if counter > stop: break l = l.strip().split() #print "t2", len(l[0]) if len(l) > 0: if len(l[0]) > 0: if l[0][0] == '[': continue # print "appending to x: ", l[0] Xnew.append(float(l[col1])) # print "appending to y: ", l[1] Ynew.append(float(l[col2])) Xnew = np.array(Xnew) Ynew = np.array(Ynew) d = np.vstack((Xnew, Ynew)).T res = gtest.grangercausalitytests(d, maxlag, verbose=False) return res
def start_test_signal_gen(lag, SNR, K1, K2, col1, col2): data = signal_gen.signal_gen(SNR, K1, K2) Xnew = data[col1] Ynew = data[col2] d = np.vstack((Xnew, Ynew)).T res = gtest.grangercausalitytests(d, lag, verbose=True) return res
def start_test(maxlag, col1, col2, start, stop): Xnew = [] Ynew = [] example = open("/home/lelz/towlelab/examples/test.rest.neuroscanascii", "rb") counter = 0 for l in example: if counter < start: counter += 1 continue counter += 1 if counter > stop: break l = l.strip().split() # print "t2", len(l[0]) if len(l) > 0: if len(l[0]) > 0: if l[0][0] == "[": continue # print "appending to x: ", l[0] Xnew.append(float(l[col1])) # print "appending to y: ", l[1] Ynew.append(float(l[col2])) Xnew = np.array(Xnew) Ynew = np.array(Ynew) d = np.vstack((Xnew, Ynew)).T res = gtest.grangercausalitytests(d, maxlag, verbose=False) return res
def test_grangercausality(): # some example data mdata = macrodata.load().data mdata = mdata[['realgdp','realcons']] data = mdata.view((float,2)) data = np.diff(np.log(data), axis=0) #R: lmtest:grangertest r_result = [0.243097, 0.7844328, 195, 2] #f_test gr = grangercausalitytests(data[:,1::-1], 2, verbose=False) assert_almost_equal(r_result, gr[2][0]['ssr_ftest'], decimal=7) assert_almost_equal(gr[2][0]['params_ftest'], gr[2][0]['ssr_ftest'], decimal=7)
def test_grangercausality(): # some example data mdata = macrodata.load().data mdata = mdata[['realgdp', 'realcons']] data = mdata.view((float, 2)) data = np.diff(np.log(data), axis=0) #R: lmtest:grangertest r_result = [0.243097, 0.7844328, 195, 2] #f_test gr = grangercausalitytests(data[:, 1::-1], 2, verbose=False) assert_almost_equal(r_result, gr[2][0]['ssr_ftest'], decimal=7) assert_almost_equal(gr[2][0]['params_ftest'], gr[2][0]['ssr_ftest'], decimal=7)
def old_granger_test(X, ij, newLength=256, NFFT=256, offset=0, Fs=2, maxlag=3, progressCallback=donothing_callback, window=window_hanning, noverlap=0, detrend=detrend_none, gv1=0, gv2=0): threshold = .05 / (len(ij) * 2) oldNFFT = NFFT NFFT = newLength numRows, numCols = X.shape if numRows < NFFT: tmp = X X = np.zeros((NFFT, numCols), X.dtype) X[:numRows, :] = tmp del tmp numRows, numCols = X.shape # get all the columns of X that we are interested in by checking # the ij tuples allColumns = set() for i, j in ij: allColumns.add(i) allColumns.add(j) Ncols = len(allColumns) # for real X, ignore the negative frequencies if np.iscomplexobj(X): numFreqs = NFFT else: numFreqs = NFFT // 2 + 1 if cbook.iterable(window): assert (len(window) == NFFT) windowVals = window else: windowVals = window_hanning( np.ones(NFFT, X.dtype) ) #I changed this from window to window_hanning. window was not doing anything!! -eli ind = range(offset, int(numRows - newLength + 1), int(oldNFFT - noverlap)) #coherence calcs on each sweep start at offset numSlices = len(ind) FFTSlices = {} Pxx = {} if newLength > oldNFFT: #make sure that the newlength is shorter than the segment (epoch) length (see below) newLength = oldNFFT slices = range(numSlices) # normVal = np.linalg.norm(windowVals)**2 for iCol in allColumns: progressCallback(i / Ncols, 'Cacheing FFTs') Slices = np.zeros((numSlices, newLength)) for iSlice in slices: #thisSlice = X[ind[iSlice]:ind[iSlice]+NFFT, iCol] #this is the line that reads the data normally thisSlice = X[ ind[iSlice]:ind[iSlice] + newLength, iCol] #this is the line that reads sections of epochs print "GRANGER TESTING: ", ind[ iSlice], " to ", ind[iSlice] + newLength print "shape of all: ", Slices.shape print "shape of slice: ", thisSlice.shape #thisSlice = windowVals*detrend(thisSlice) Slices[iSlice] = thisSlice # = np.fft.fft(thisSlice)[:numFreqs] #FFTSlices[iCol] = Slices Pxx[iCol] = np.mean(Slices, axis=0) # / normVal print "shape of pxx one col: ", Pxx[iCol].shape # print Pxx[iCol] del Slices, ind, windowVals Cxy = {} Phase = {} count = 0 N = len(ij) typedict = ['params_ftest', 'ssr_chi2test'] for i, j in ij: count += 1 if count % 10 == 0: progressCallback(count / N, 'Computing coherences') d = np.vstack((Pxx[i], Pxx[j])).T drev = np.vstack((Pxx[j], Pxx[i])).T res = gtest.grangercausalitytests(d, maxlag, verbose=False) resrev = gtest.grangercausalitytests(drev, maxlag, verbose=False) # print "looking at: ", res[maxlag][0] # this gets the p value result = res[maxlag][0][typedict[gv1]][not gv2] rev_result = resrev[maxlag][0][typedict[gv1]][not gv2] if result <= threshold and rev_result > threshold: Cxy[i, j] = 1 - (result / threshold) Phase[i, j] = 90 elif result > threshold and rev_result <= threshold: Cxy[i, j] = 1 - (rev_result / threshold) Phase[i, j] = -90 elif result <= threshold and rev_result <= threshold: Cxy[i, j] = min((result, rev_result)) direction = (result, rev_result).index(Cxy[i, j]) if direction == 0: Phase[i, j] = 10 else: Phase[i, j] = -10 else: Cxy[i, j] = 0 Phase[i, j] = 0 # print typedict[gv1],gv2 print "RESIS: ", Cxy[i, j], "RESULT ", result, "REVRESULT ", rev_result print "NFFT, NUMFREQS: ", NFFT, numFreqs freqs = Fs / NFFT * np.arange(numFreqs) print "FREQS ARE: ", freqs return Cxy, Phase, freqs
def granger_test2(X, ij, newLength=256, NFFT=256, offset=0, Fs=2, maxlag=3, progressCallback=donothing_callback, window=window_hanning, noverlap=0, detrend=detrend_none, gv1=0, gv2=0): oldNFFT = NFFT NFFT = newLength numRows, numCols = X.shape if numRows < NFFT: tmp = X X = np.zeros((NFFT, numCols), X.dtype) X[:numRows, :] = tmp del tmp numRows, numCols = X.shape # get all the columns of X that we are interested in by checking # the ij tuples allColumns = set() for i, j in ij: allColumns.add(i) allColumns.add(j) Ncols = len(allColumns) # for real X, ignore the negative frequencies if np.iscomplexobj(X): numFreqs = NFFT else: numFreqs = NFFT // 2 + 1 if cbook.iterable(window): assert (len(window) == NFFT) windowVals = window else: windowVals = window_hanning( np.ones(NFFT, X.dtype) ) #I changed this from window to window_hanning. window was not doing anything!! -eli ind = range(offset, int(numRows - newLength + 1), int(oldNFFT - noverlap)) #coherence calcs on each sweep start at offset numSlices = len(ind) threshold = .05 / (numSlices * 2) FFTSlices = {} Pxx = {} Cxy = {} Phase = {} if newLength > oldNFFT: #make sure that the newlength is shorter than the segment (epoch) length (see below) newLength = oldNFFT slices = range(numSlices) typedict = ['params_ftest', 'ssr_ftest'] # normVal = np.linalg.norm(windowVals)**2 counter = 0 for i, j in ij: # FOR EACH ELECTRODE PAIR progressCallback(counter / len(ij), 'Cacheing FFTs') Slices = np.zeros((numSlices, 1)) RevSlices = np.zeros((numSlices, 1)) counter += 1 counter2 = 0 #print i,j for iSlice in slices: # FOR EACH TRIAL thisSlice = X[ind[iSlice]:ind[iSlice] + newLength, i] #this is the line that reads sections of epochs thisSlice2 = X[ind[iSlice]:ind[iSlice] + newLength, j] #print "GRANGER TESTING: ", ind[iSlice], " to ", ind[iSlice] + newLength #print "shape of all: ", Slices.shape #print "shape of slice: ", thisSlice.shape thisSlice = windowVals * detrend(thisSlice) thisSlice2 = windowVals * detrend(thisSlice2) d = np.vstack((thisSlice, thisSlice2)).T drev = np.vstack((thisSlice2, thisSlice)).T res = gtest.grangercausalitytests(d, maxlag, verbose=False) resrev = gtest.grangercausalitytests(drev, maxlag, verbose=False) #print "RES: ", res presult = res[maxlag][0][typedict[gv1]][not gv2] prev_result = resrev[maxlag][0][typedict[gv1]][not gv2] fresult = np.log(res[maxlag][0][typedict[gv1]][gv2]) frev_result = np.log(resrev[maxlag][0][typedict[gv1]][gv2]) #print "RESULTS: ", fresult, frev_result if presult <= threshold and prev_result > threshold: Slices[counter2] = fresult RevSlices[counter2] = 0. elif presult > threshold and prev_result <= threshold: RevSlices[counter2] = frev_result Slices[counter2] = 0. elif presult <= threshold and prev_result <= threshold: Slices[counter2] = fresult RevSlices[counter2] = frev_result else: Slices[counter2] = 0. RevSlices[counter2] = 0. counter2 += 1 avg_forward = np.mean(Slices) avg_backwards = np.mean(RevSlices) Cxy[i, j] = max((avg_forward, avg_backwards)) direction = (avg_forward, avg_backwards).index(Cxy[i, j]) if direction == 0: Phase[i, j] = 90 else: Phase[i, j] = -90 print "TRODE RESULTS: ", Cxy[i, j], Phase[i, j] del Slices, RevSlices freqs = Fs / NFFT * np.arange(numFreqs) #print "FREQS ARE: ", freqs return Cxy, Phase, freqs
def old_granger_test( X, ij, newLength=256, NFFT=256, offset=0, Fs=2, maxlag=3, progressCallback=donothing_callback, window=window_hanning, noverlap=0, detrend=detrend_none, gv1=0, gv2=0, ): threshold = 0.05 / (len(ij) * 2) oldNFFT = NFFT NFFT = newLength numRows, numCols = X.shape if numRows < NFFT: tmp = X X = np.zeros((NFFT, numCols), X.dtype) X[:numRows, :] = tmp del tmp numRows, numCols = X.shape # get all the columns of X that we are interested in by checking # the ij tuples allColumns = set() for i, j in ij: allColumns.add(i) allColumns.add(j) Ncols = len(allColumns) # for real X, ignore the negative frequencies if np.iscomplexobj(X): numFreqs = NFFT else: numFreqs = NFFT // 2 + 1 if cbook.iterable(window): assert len(window) == NFFT windowVals = window else: windowVals = window_hanning( np.ones(NFFT, X.dtype) ) # I changed this from window to window_hanning. window was not doing anything!! -eli ind = range( offset, int(numRows - newLength + 1), int(oldNFFT - noverlap) ) # coherence calcs on each sweep start at offset numSlices = len(ind) FFTSlices = {} Pxx = {} if newLength > oldNFFT: # make sure that the newlength is shorter than the segment (epoch) length (see below) newLength = oldNFFT slices = range(numSlices) # normVal = np.linalg.norm(windowVals)**2 for iCol in allColumns: progressCallback(i / Ncols, "Cacheing FFTs") Slices = np.zeros((numSlices, newLength)) for iSlice in slices: # thisSlice = X[ind[iSlice]:ind[iSlice]+NFFT, iCol] #this is the line that reads the data normally thisSlice = X[ind[iSlice] : ind[iSlice] + newLength, iCol] # this is the line that reads sections of epochs print "GRANGER TESTING: ", ind[iSlice], " to ", ind[iSlice] + newLength print "shape of all: ", Slices.shape print "shape of slice: ", thisSlice.shape # thisSlice = windowVals*detrend(thisSlice) Slices[iSlice] = thisSlice # = np.fft.fft(thisSlice)[:numFreqs] # FFTSlices[iCol] = Slices Pxx[iCol] = np.mean(Slices, axis=0) # / normVal print "shape of pxx one col: ", Pxx[iCol].shape # print Pxx[iCol] del Slices, ind, windowVals Cxy = {} Phase = {} count = 0 N = len(ij) typedict = ["params_ftest", "ssr_chi2test"] for i, j in ij: count += 1 if count % 10 == 0: progressCallback(count / N, "Computing coherences") d = np.vstack((Pxx[i], Pxx[j])).T drev = np.vstack((Pxx[j], Pxx[i])).T res = gtest.grangercausalitytests(d, maxlag, verbose=False) resrev = gtest.grangercausalitytests(drev, maxlag, verbose=False) # print "looking at: ", res[maxlag][0] # this gets the p value result = res[maxlag][0][typedict[gv1]][not gv2] rev_result = resrev[maxlag][0][typedict[gv1]][not gv2] if result <= threshold and rev_result > threshold: Cxy[i, j] = 1 - (result / threshold) Phase[i, j] = 90 elif result > threshold and rev_result <= threshold: Cxy[i, j] = 1 - (rev_result / threshold) Phase[i, j] = -90 elif result <= threshold and rev_result <= threshold: Cxy[i, j] = min((result, rev_result)) direction = (result, rev_result).index(Cxy[i, j]) if direction == 0: Phase[i, j] = 10 else: Phase[i, j] = -10 else: Cxy[i, j] = 0 Phase[i, j] = 0 # print typedict[gv1],gv2 print "RESIS: ", Cxy[i, j], "RESULT ", result, "REVRESULT ", rev_result print "NFFT, NUMFREQS: ", NFFT, numFreqs freqs = Fs / NFFT * np.arange(numFreqs) print "FREQS ARE: ", freqs return Cxy, Phase, freqs
def granger_test2( X, ij, newLength=256, NFFT=256, offset=0, Fs=2, maxlag=3, progressCallback=donothing_callback, window=window_hanning, noverlap=0, detrend=detrend_none, gv1=0, gv2=0, ): oldNFFT = NFFT NFFT = newLength numRows, numCols = X.shape if numRows < NFFT: tmp = X X = np.zeros((NFFT, numCols), X.dtype) X[:numRows, :] = tmp del tmp numRows, numCols = X.shape # get all the columns of X that we are interested in by checking # the ij tuples allColumns = set() for i, j in ij: allColumns.add(i) allColumns.add(j) Ncols = len(allColumns) # for real X, ignore the negative frequencies if np.iscomplexobj(X): numFreqs = NFFT else: numFreqs = NFFT // 2 + 1 if cbook.iterable(window): assert len(window) == NFFT windowVals = window else: windowVals = window_hanning( np.ones(NFFT, X.dtype) ) # I changed this from window to window_hanning. window was not doing anything!! -eli ind = range( offset, int(numRows - newLength + 1), int(oldNFFT - noverlap) ) # coherence calcs on each sweep start at offset numSlices = len(ind) threshold = 0.05 / (numSlices * 2) FFTSlices = {} Pxx = {} Cxy = {} Phase = {} if newLength > oldNFFT: # make sure that the newlength is shorter than the segment (epoch) length (see below) newLength = oldNFFT slices = range(numSlices) typedict = ["params_ftest", "ssr_ftest"] # normVal = np.linalg.norm(windowVals)**2 counter = 0 for i, j in ij: # FOR EACH ELECTRODE PAIR progressCallback(counter / len(ij), "Cacheing FFTs") Slices = np.zeros((numSlices, 1)) RevSlices = np.zeros((numSlices, 1)) counter += 1 counter2 = 0 # print i,j for iSlice in slices: # FOR EACH TRIAL thisSlice = X[ind[iSlice] : ind[iSlice] + newLength, i] # this is the line that reads sections of epochs thisSlice2 = X[ind[iSlice] : ind[iSlice] + newLength, j] # print "GRANGER TESTING: ", ind[iSlice], " to ", ind[iSlice] + newLength # print "shape of all: ", Slices.shape # print "shape of slice: ", thisSlice.shape thisSlice = windowVals * detrend(thisSlice) thisSlice2 = windowVals * detrend(thisSlice2) d = np.vstack((thisSlice, thisSlice2)).T drev = np.vstack((thisSlice2, thisSlice)).T res = gtest.grangercausalitytests(d, maxlag, verbose=False) resrev = gtest.grangercausalitytests(drev, maxlag, verbose=False) # print "RES: ", res presult = res[maxlag][0][typedict[gv1]][not gv2] prev_result = resrev[maxlag][0][typedict[gv1]][not gv2] fresult = np.log(res[maxlag][0][typedict[gv1]][gv2]) frev_result = np.log(resrev[maxlag][0][typedict[gv1]][gv2]) # print "RESULTS: ", fresult, frev_result if presult <= threshold and prev_result > threshold: Slices[counter2] = fresult RevSlices[counter2] = 0.0 elif presult > threshold and prev_result <= threshold: RevSlices[counter2] = frev_result Slices[counter2] = 0.0 elif presult <= threshold and prev_result <= threshold: Slices[counter2] = fresult RevSlices[counter2] = frev_result else: Slices[counter2] = 0.0 RevSlices[counter2] = 0.0 counter2 += 1 avg_forward = np.mean(Slices) avg_backwards = np.mean(RevSlices) Cxy[i, j] = max((avg_forward, avg_backwards)) direction = (avg_forward, avg_backwards).index(Cxy[i, j]) if direction == 0: Phase[i, j] = 90 else: Phase[i, j] = -90 print "TRODE RESULTS: ", Cxy[i, j], Phase[i, j] del Slices, RevSlices freqs = Fs / NFFT * np.arange(numFreqs) # print "FREQS ARE: ", freqs return Cxy, Phase, freqs