def random2HemiPermutations(mat): """Return a matrix with elements of a triangle permuted without saving the degrees. """ s = mat.shape N = s[0] N2 = N / 2 L = N * (N - 2) / 8 nruter = array(mat) Sind = triSup( arange(N2 * N2).reshape((N2, N2)) + arange(0, N2 * N2, N2).reshape((N2, 1))) Mind = (arange(N2 * N2).reshape( (N2, N2)) + arange(N2, N2 * N2 + 1, N2).reshape( (N2, 1))).reshape(N2 * N2) Iind = triSup(ones((N, N)), ind=1)[-L:] Salea = permutation(Sind) Malea = permutation(Mind) Ialea = permutation(Iind) nruter[unravel_index(Sind, s)] = nruter[unravel_index(Salea, s)] nruter[unravel_index(Mind, s)] = nruter[unravel_index(Malea, s)] nruter[unravel_index(Iind, s)] = nruter[unravel_index(Ialea, s)] for i in range(s[0]): nruter[i:, i] = nruter[i, i:] return nruter
def initialRand(self, dens, normX=None): '''...''' self.x = zeros(self.N) self.A = zeros(self.N) self.x[permutation(self.N)[range(int(dens * self.N))]] = 1. self.A[permutation(self.N)[range(int(dens * self.N))]] = 1. if normX != None: self.x *= normX / norm(self.x)
def createSimilarAR(data): """ creates an AR-process that is similar to a given data set. data must be given in n x d-format """ # step 1: get "average" fit matrix l_A = [] for rep in arange(100): idx = randint(0, data.shape[0] - 1, data.shape[0] - 1) idat = data[idx, :] odat = data[idx + 1, :] l_A.append(lstsq(idat, odat)[0]) sysmat = meanMat(l_A).T # idea: get "dynamic noise" from input data as difference of # expected vs. predicted data: # eta_i = (sysmat*(data[:,i-1]).T - data[:,i]) # however, in order to destroy any possible correlations in the # input noise (they would also occur in the output), the # noise per section has to be permuted. prediction = dot(sysmat, data[:-1, :].T) dynNoise = data[1:, :].T - prediction res = [ zeros((dynNoise.shape[0], 1)), ] for nidx in permutation(dynNoise.shape[1]): res.append(dot(sysmat, res[-1]) + dynNoise[:, nidx][:, newaxis]) return hstack(res).T
def prepVStimDict(self): res=int(self.params['res']) if mod(res, 2)==0: box_order=arange(res**2/2+1, res**2/2+1+res) else: box_order=arange(res**2/2.-res/2.+1,res**2/2.-res/2.+1+res) self.params['box_order']=box_order[permutation(res).astype(int)] self.vStimDict={'pre':self.params['pre']/1000., 'dur':self.params['dur']/1000., 'post':0.1, 'bg':self.params['bg']/100., 'fg':self.params['fg']/100., 'reps':self.params['reps'], 'squarenum':self.params['box_order'], 'res':self.params['res'], 'radius':self.params['box_sz']/2, 'sz':self.params['sz'], 'type':'squareboard', 'calProtName':'fastSBLineCal', 'trigger':self.params['trigger']} self.calMod.calibrateBG(self.vStimDict,self.params,self.infoDict['dirVar'].get()) self.calMod.calibrateFG(self.vStimDict,self.params,self.infoDict['dirVar'].get()) self.notifyStimMachine() self.RFLineDur=zeros((self.params['reps'],self.params['res'],)) self.RFLinePost=zeros((self.params['reps'],self.params['res'],))
def test_column_set_get(): m = 256 n = 128 data = np.random.randn(m, n) cm_data = cm.CUDAMatrix(cm.reformat(data)) indices = permutation(n) cm_indices = cm.CUDAMatrix(cm.reformat(indices.reshape(1, -1))) start = 0 end = 10 cm_columns = cm_data.get_column_vectors(cm_indices, start, end) get_error = np.sum((cm_columns.asarray() - data[:, indices[start:end]])**2) data_set = np.random.randn(m, end - start) cm_columns.free_device_memory() cm_columns = cm.CUDAMatrix(cm.reformat(data_set)) cm_data.set_column_vectors(cm_indices, start, end, cm_columns) data[:, indices[start:end]] = data_set set_error = np.sum((cm_data.asarray() - data)**2) print "Get Error = ", get_error print "Set Error = ", set_error assert get_error < 10**-2 or set_error < 10**-2, \ "Error in CUDAMatrix.get_column_vectors exceeded threshold"
def permute(lst): ans = [] from pylab import permutation p = permutation(len(lst)) for i in range(len(lst)): ans.append(lst[p[i]]) return ans
def createSimilarAR(data): """ creates an AR-process that is similar to a given data set. data must be given in n x d-format """ # step 1: get "average" fit matrix l_A = [] for rep in arange(100): idx = randint(0,data.shape[0]-1,data.shape[0]-1) idat = data[idx,:] odat = data[idx+1,:] l_A.append(lstsq(idat,odat)[0]) sysmat = meanMat(l_A).T # idea: get "dynamic noise" from input data as difference of # expected vs. predicted data: # eta_i = (sysmat*(data[:,i-1]).T - data[:,i]) # however, in order to destroy any possible correlations in the # input noise (they would also occur in the output), the # noise per section has to be permuted. prediction = dot(sysmat,data[:-1,:].T) dynNoise = data[1:,:].T - prediction res = [zeros((dynNoise.shape[0],1)), ] for nidx in permutation(dynNoise.shape[1]): res.append( dot(sysmat,res[-1]) + dynNoise[:,nidx][:,newaxis] ) return hstack(res).T
def main(): XC = loadtxt('iris.data', delimiter=',', dtype=float, converters={4: cnvt}) ind = arange(150) # indices into the dataset ind = permutation(ind) # random permutation L = ind[0:90] # learning set indices T = ind[90:] # test set indices # Learning Set X = transpose(XC[L, 0:4]) nnc = NNb(X, XC[L, -1]) # Classification of Test Set c = zeros(len(T)) for i in arange(len(T)): print sys.argv[1] print int(sys.argv[1]) c[i] = nnc.classify(XC[T[i], 0:4], int(sys.argv[1])) # Confusion Matrix CM = zeros((3, 3)) for i in range(3): for j in range(3): CM[i, j] = sum(logical_and(XC[T, 4] == (i + 1), c == (j + 1))) print(CM) # Plot Test Set figure(1) color = array([[1, 0, 0], [0, 1, 0], [0, 0, 1]]) for i in range(4): for j in range(4): subplot(4, 4, 4 * i + j + 1) if i == j: continue print color[XC[T, 4].astype(int) - 1] print[1, 1, 1] * len(T) print color[c.astype(int) - 1] scatter(XC[T, i], XC[T, j], s=100, marker='s', edgecolor=color[XC[T, 4].astype(int) - 1], facecolor=[1, 1, 1] * len(T)) scatter(XC[T, i], XC[T, j], s=30, marker='+', edgecolor=color[c.astype(int) - 1]) savefig('figures/nnbtest.pdf')
def randomMatrixPermutations(mat): """Return a matrix with elements of a triangle permuted without saving the degrees. """ s = mat.shape nruter = array(mat) indices = find(triu(ones(s), k=1)) aleaInd = permutation(indices) nruter[unravel_index(indices, s)] = nruter[unravel_index(aleaInd, s)] for i in range(s[0]): nruter[i:, i] = nruter[i, i:] return nruter
def initialRand(self, dens, normX=None): '''Initialize (x, A) with a certain density 'dens'. ''' #self.x = rand(self.N) #self.x[self.x < (1.- dens)] = 0 #self.x[self.x >= (1.- dens)] = 1 #if normX != None: #self.x *= normX / norm(self.x) #self.A = self.thresholding(self.x, dens) self.A = zeros(self.N) self.A[permutation(self.N)[range(int(dens * self.N))]] = 1. self.initTheta() self.x = (arctanh(1.99 * (self.A - 0.5)) / self.G + self.theta) / self.P
def loadMatlabData(filename): """ Load data from csv file and divide it into parts for training, cross validation and test. """ # Load the training data print "Loading training data..." data = loadmat(filename) X = data['X'] y = data['y'] # Randomly select 100 datapoints to display sel = permutation(X.shape[0]) random_columns = sel[0:100] displayData(X[random_columns,:]) return X,y
def main(k): XC = loadtxt('data/iris.data', delimiter=',', dtype=float, converters={4: cnvt}) ind = arange(150) # indices into the dataset ind = permutation(ind) # random permutation L = ind[0:90] # learning set indices T = ind[90:] # test set indices # Learning Set # The k-NNb classifier is called for given k. X = transpose(XC[L, 0:4]) nnc = NNb(X, XC[L, -1], k) # Classification of Test Set c = zeros(len(T)) for i in range(len(T)): c[i] = nnc.classify(XC[T[i], 0:4]) # Confusion Matrix CM = zeros((3, 3)) for i in range(3): for j in range(3): CM[i, j] = sum(logical_and(XC[T, 4] == (i + 1), c == (j + 1))) print(CM) # Plot Test Set plt.figure(1) color = array([[1, 0, 0], [0, 1, 0], [0, 0, 1]]) for i in range(4): for j in range(4): plt.subplot(4, 4, 4 * i + j + 1) if i == j: continue plt.scatter(XC[T, i], XC[T, j], s=100, marker='s', edgecolor=color[XC[T, 4].astype(int) - 1], facecolor=[1, 1, 1] * len(T)) plt.scatter(XC[T, i], XC[T, j], s=30, marker='+', edgecolor=color[c.astype(int) - 1]) plt.savefig('lab_42.png')
def __init__(self,args): self.defaults={'fg':3000, 'bg':300, 'pre':500, 'dur':100, 'post':1000, 'reps':20, 'trigger':1, 'res':25, 'box_sz':10, 'sz':250, 'analysis':'spike', 'slope':1, 'respDelay':100, 'box_order':arange(15**2)[permutation(15**2).astype(int)]} #self.paramsOrder=['fg','bg','sz','res','box_sz','box_order','reps','pre','dur','post','trigger','analysis','slope','respDelay'] self.paramsOrder=['fg','bg','sz','res','box_sz','reps','pre','dur','post','analysis','respDelay'] self.initOther(args) self.calMod=calibrationModule(self.defaults, self.paramsOrder) self.calMod.initCalibratedBG() self.calMod.initCalibratedFG(['sz', 'res']) self.initParams() self.protName='squareboardLineCalibrated'
# Get N nearest neighbors minindex = np.argsort(dsq)[0:self.k] # Group sum by value return Counter(self.c[minindex]).most_common()[0][0] def cnvt(s): tab = {'Iris-setosa':1.0, 'Iris-versicolor':2.0, 'Iris-virginica':3.0} if tab.has_key(s): return tab[s] else: return -1.0 XC = loadtxt('data.csv', delimiter=',', dtype=float, converters={4: cnvt}) ind = arange(150) # indices into the dataset ind = permutation(ind) # random permutation L = ind[0:90] # learning set indices T = ind[90:] # test set indices # Learning Set X = transpose(XC[L,0:4]) nnc = NNb(X, XC[L,-1]) # Classification of Test Set c = zeros(len(T)) for i in arange(len(T)): c[i] = nnc.classify(XC[T[i],0:4]) # Confusion Matrix CM = zeros((3,3)) for i in range(3):
def getTestParamDict(symbol): if symbol==key._1: d={'type':'vsync_test', 'fg':1, 'bg':0.5, 'pre':.5, 'dur':5., 'post':.5, 'reps':1, 'trigger':0} elif symbol==key._2: d={'type':'checkerboard', 'fg':1.0, 'bg':0.5, 'pre':.5, 'dur':5., 'post':.5, 'sz':400, 'res':array([20,1]), 'mu':0.50, 'std':0.2, 'reps':1, 'trigger':0} elif symbol==key._3: d={'type':'squareboard', 'fg':1.0, 'bg':0.2, 'pre':.1, 'dur':.2, 'post':.1, 'radius':50, 'squarenum':permutation(16)+1, 'sz':400, 'res':4, 'reps':1, 'trigger':0} elif symbol==key._4: d={'type':'sectors', 'fg':1.0, 'bg':0.5, 'pre':.1, 'dur':.2, 'post':.1, 'radius':100, 'orientation':array((0,90)), 'width':45, 'reps':3, 'trigger':0} elif symbol==key._5: d={'type':'fullfieldnoise', 'fg':1.0, 'bg':0.5, 'pre':.2, 'dur':2., 'post':.2, 'mu':0.50, 'std':0.3, 'reps':2, 'res':3, 'trigger':0} elif symbol==key._6: d={'type':'calibration', 'fg':1.0, 'bg':0.5, 'pre':.1, 'dur':.2, 'post':.1, 'intensity':arange(10)/10., 'reps':1, 'trigger':0} else: d=None return d