def detrend_linear(x): """Remove the best fit line from x""" # I'm going to regress x on xx=range(len(x)) and return # x - (b*xx+a) # xx = arange(len(x), typecode=x.typecode()) xx = arange(len(x), type(x)) X = transpose(array([xx] + [x])) C = cov(X) b = C[0, 1] / C[0, 0] a = mean(x) - b * mean(xx) return x - (b * xx + a)
def normalizeTbt(self, plane): global nturns, tx, ty if plane == 'x': print 'HORIZONTAL PLANE:' b = tx[turn:,:] #truncate by the first 5 turns n_turns = shape(b)[0] elif plane == 'y': print 'VERTICAL PLANE' b = ty[turn:,:] #truncate by the first 5 turns n_turns = shape(b)[0] else: print "no tbt data acquired" b = (b-mean(b))/sqrt(n_turns) return b
def svdClean(self, plane): global nturns, tx, ty if plane == "x": b = tx[turn:, :] # truncate by the first 5 turns n_turns = shape(b)[0] elif plane == "y": b = ty[turn:, :] # truncate by the first 5 turns n_turns = shape(b)[0] else: print "no tbt data acquired" b_mean = mean(b) b = (b - b_mean) / sqrt(n_turns) n_bpms = shape(b)[1] # ----svd for matrix with bpms >10 if n_bpms > 10: A = singular_value_decomposition(b) # print "Singular values:",A[1] else: sys.exit("Exit, # of bpms < 10") # ----SVD cut for noise floor if sing_val > n_bpms: svdcut = n_bpms print "requested more singular values than available" print "# of sing_val used for", plane, "=", n_bpms else: svdcut = int(sing_val) print "# of sing_val used for", plane, "=", svdcut # print A[1][0] A[1][svdcut:] = 0.0 b = matrixmultiply(A[0], matrixmultiply(identity(len(A[1])) * A[1], A[2])) b = (b * sqrt(n_turns)) + b_mean # b = b*sqrt(n_turns) if plane == "x": tx[turn:, :] = b elif plane == "y": ty[turn:, :] = b else: print "no tbt data to analyze" nturns = shape(tx)[0]
def normalizeTbt(self, plane): global nturns, tx, ty if plane == 'x': print 'HORIZONTAL PLANE:' b = tx[turn:,:] #truncate by the first 5 turns n_turns = shape(b)[0] elif plane == 'y': print 'VERTICAL PLANE' b = ty[turn:,:] #truncate by the first 5 turns n_turns = shape(b)[0] else: print "no tbt data acquired" print 'Start Turn:',turn if shape(b)[0]==0 or shape(b)[1]==0 or len(b)<turn: print "No turn by turn data, check start turn \n exiting"; sys.exit() b = (b-mean(b))/sqrt(n_turns) return b
def setDescendantTipProperty(self,f,prop_name='Trait',force_recalc=False): """Sets n.propMean, n.propStdev) of f(node) for all nodes in self. These correspond to T_i and S_i respectively in AOT, Ackerley 2004. Example usage: (a) You have a dict mapping leaf -> property: leaf_f = lambda node: d[node.Data] (b) You have the property stored in leaf.prop_xyz: leaf_f = lambda node: node.prop_xyz tree.setDescendantTipProperty(leaf_f, 'MyProp') will set on each node n.MyPropMean and n.MyPropStdev. """ for node in self.traverse(self_before=False, self_after=True): if force_recalc or not hasattr(node, 'DescendantTips'): node.setDescendantTips() values = map(f, node.DescendantTips) setattr(node, prop_name+'Mean', mean(values)) if len(values) > 1: std = stdev(values) else: std = 0 setattr(node, prop_name+'Stdev', std)
def psd(x, NFFT=256, Fs=2, detrend=detrend_none, window=window_hamming, noverlap=0): """ The power spectral density by Welches average periodogram method. The vector x is divided into NFFT length segments. Each segment is detrended by function detrend and windowed by function window. noperlap gives the length of the overlap between segments. The absolute(fft(segment))**2 of each segment are averaged to compute Pxx, with a scaling to correct for power loss due to windowing. Fs is the sampling frequency. -- NFFT must be a power of 2 -- detrend and window are functions, unlike in matlab where they are vectors. -- if length x < NFFT, it will be zero padded to NFFT Refs: Bendat & Piersol -- Random Data: Analysis and Measurement Procedures, John Wiley & Sons (1986) """ if NFFT % 2: raise ValueError, 'NFFT must be a power of 2' # zero pad x up to NFFT if it is shorter than NFFT if len(x) < NFFT: n = len(x) x = resize(x, (NFFT, )) x[n:] = 0 # for real x, ignore the negative frequencies # if x.typecode()==Complex: numFreqs = NFFT if any(numpy.iscomplex(x)): numFreqs = NFFT else: numFreqs = NFFT // 2 + 1 # windowVals = window(ones((NFFT,),x.typecode())) windowVals = window(numpy.ones(NFFT)) step = NFFT - noverlap ind = range(0, len(x) - NFFT + 1, step) n = len(ind) # Pxx = zeros((numFreqs,n), Float) Pxx = numpy.zeros([numFreqs, n]) # do the ffts of the slices for i in range(n): thisX = x[ind[i]:ind[i] + NFFT] thisX = windowVals * detrend(thisX) fx = absolute(fft(thisX))**2 #print("numFreqs={0:f}".format(numFreqs)) #print("len of fx slice={0:d}".format(len(fx[:int(numFreqs)]))) #print("len of destination in Pxx={0:d}") Pxx[:, i] = fx[:int(numFreqs)] # Scale the spectrum by the norm of the window to compensate for # windowing loss; see Bendat & Piersol Sec 11.5.2 if n > 1: Pxx = mean(Pxx, 1) Pxx = divide(Pxx, norm(windowVals)**2) freqs = Fs / NFFT * arange(0, numFreqs) return Pxx, freqs
def detrend_mean(x): return x - mean(x)
def csd(x, y, NFFT=256, Fs=2, detrend=detrend_none, window=window_hamming, noverlap=0): """ The cross spectral density Pxy by Welches average periodogram method. The vectors x and y are divided into NFFT length segments. Each segment is detrended by function detrend and windowed by function window. noverlap gives the length of the overlap between segments. The product of the direct FFTs of x and y are averaged over each segment to compute Pxy, with a scaling to correct for power loss due to windowing. Fs is the sampling frequency. NFFT must be a power of 2 Refs: Bendat & Piersol -- Random Data: Analysis and Measurement Procedures, John Wiley & Sons (1986) """ if NFFT % 2: raise ValueError, 'NFFT must be a power of 2' # zero pad x and y up to NFFT if they are shorter than NFFT if len(x) < NFFT: n = len(x) x = resize(x, (NFFT, )) x[n:] = 0 if len(y) < NFFT: n = len(y) y = resize(y, (NFFT, )) y[n:] = 0 # for real x, ignore the negative frequencies # if x.typecode()==Complex: numFreqs = NFFT if any(numpy.iscomplex(x)): numFreqs = NFFT else: numFreqs = NFFT // 2 + 1 # windowVals = window(ones((NFFT,),x.typecode())) windowVals = window(numpy.ones(NFFT)) step = NFFT - noverlap ind = range(0, len(x) - NFFT + 1, step) n = len(ind) # Pxy = zeros((numFreqs,n), Complex) Pxy = numpy.zeros([numFreqs, n]) # do the ffts of the slices for i in range(n): thisX = x[ind[i]:ind[i] + NFFT] thisX = windowVals * detrend(thisX) thisY = y[ind[i]:ind[i] + NFFT] thisY = windowVals * detrend(thisY) fx = fft(thisX) fy = fft(thisY) Pxy[:, i] = fy[:numFreqs] * conjugate(fx[:numFreqs]) # Scale the spectrum by the norm of the window to compensate for # windowing loss; see Bendat & Piersol Sec 11.5.2 if n > 1: Pxy = mean(Pxy, 1) Pxy = divide(Pxy, norm(windowVals)**2) freqs = Fs / NFFT * arange(0, numFreqs) return Pxy, freqs
def svdClean(self, plane): global nturns, tx, ty if plane == 'x': b = tx[turn:,:] #truncate by the first 5 turns n_turns = shape(b)[0] elif plane == 'y': b = ty[turn:,:] #truncate by the first 5 turns n_turns = shape(b)[0] else: print "no tbt data acquired" b_mean = mean(b) b = (b-b_mean)/sqrt(n_turns) n_bpms = shape(b)[1] print "number of bpms",n_bpms #----svd for matrix with bpms >10 if n_bpms > 10: A = singular_value_decomposition(b) #print "Singular values:",A[1] else: sys.exit('Exit, # of bpms < 10') #----SVD cut for noise floor if sing_val > n_bpms: svdcut = n_bpms print 'requested more singular values than available' print '# of sing_val used for', plane, '=', n_bpms else: svdcut = int(sing_val) print '# of sing_val used for', plane, '=', svdcut print shape(b)[1] print A[1] sing=open(options.file+"_sing_val","w") for xx in A[1]: print >> sing,xx sing.close() A[1][svdcut:] = 0. b = matrixmultiply(A[0],matrixmultiply(identity(len(A[1]))*A[1], A[2])) b = (b *sqrt(n_turns))+b_mean A = singular_value_decomposition(b) print A[1] sing=open(options.file+"sing_val_cut","w") for xx in A[1]: print >> sing,xx sing.close() #sys.exit() #sys.exit() #b = b*sqrt(n_turns) if plane == 'x': tx[turn:,:] = b elif plane == 'y': ty[turn:,:] = b else: print "no tbt data to analyze" nturns = shape(tx)[0]