def input_transform( Ashape, Bshape, FFTshape, first ): am,an = Ashape bm,bn = Bshape M,N = FFTshape DM = DFT( M ) # get DFT matrix for rows DN = DFT( N ) # get DFT matrix for cols if( first ): c = am*an W = unpaddedTransform( DM[:,:am], DN[:an,:] ) else: c = bm*bn W = unpaddedTransform( DM[:,:bm], DN[:bn,:] ) T = [] for i in range(4*M*N): if( first ): if( i % 2 == 0 ): T.extend( array( [W[i/4,:].real, zeros(c)] ) ) else: T.extend( array( [W[i/4,:].imag, zeros(c)] ) ) else: if( i % 4 == 0 or i % 4 == 3 ): T.extend( array( [zeros(c), W[i/4,:].real] ) ) else: T.extend( array( [zeros(c), W[i/4,:].imag] ) ) return array(T)
def create(self,net,N=50,dimensions=8,randomize=False): vocab={} for k in self.nodes.keys(): node=net.get(k,None) if node is None: dim=dimensions if randomize is False and len(self.nodes[k])+1>dim: dim=len(self.nodes[k])+1 node=net.make_array(k,N,dim) if not hrr.Vocabulary.registered.has_key(id(node)): v=hrr.Vocabulary(node.dimension,randomize=randomize) v.register(node) vocab[k]=hrr.Vocabulary.registered[id(node)] # ensure all terms are parsed before starting for k,v in self.connect.items(): pre_name,post_name=k for pre_term,post_term in v: pre=vocab[pre_name].parse(pre_term).v post=vocab[post_name].parse(post_term).v for k,v in self.connect.items(): pre_name,post_name=k t=numeric.zeros((vocab[post_name].dimensions,vocab[pre_name].dimensions),typecode='f') for pre_term,post_term in v: pre=vocab[pre_name].parse(pre_term).v post=vocab[post_name].parse(post_term).v t+=numeric.array([pre*bb for bb in post]) if pre_name==post_name: if pre_name in self.inhibit: for pre_term in vocab[pre_name].keys: pre=vocab[pre_name].parse(pre_term).v*self.inhibit[pre_name] post_value=numeric.zeros(vocab[post_name].dimensions,typecode='f') for post_term in vocab[pre_name].keys: if pre_term!=post_term: post_value+=vocab[post_name].parse(post_term).v t+=numeric.array([pre*bb for bb in post_value]) if pre_name in self.excite: t+=numeric.eye(len(t))*self.excite[pre_name] net.connect(net.get(pre_name),net.get(post_name),transform=t) for i,(pre,post) in enumerate(self.ands): D=len(pre) node=net.make('and%02d'%i,D*N,D) for j,p in enumerate(pre): t=numeric.zeros((D,vocab[p[0]].dimensions),typecode='f') t[j,:]=vocab[p[0]].parse(p[1]).v*math.sqrt(D) net.connect(net.get(p[0]),node,transform=t) def result(x,v=vocab[post[0]].parse(post[1]).v): for xx in x: if xx<0.4: return [0]*len(v) #TODO: This is pretty arbitrary.... return v net.connect(node,net.get(post[0]),func=result) return net
def connect(self, lg=0.2, pstc_input=0.002, verbose=False, N_match=150, pstc_match=0.002): if verbose: print ' parsing rules' self.rules.initialize(self.spa) # Store rules in the documentation comment for this network for use in the interactive mode view # TODO: Figure out a different way to do this, as this line is pretty much the only Nengo-specific # bit of code in here. self.net.network.documentation = 'BG: ' + ','.join(self.rules.names) for (a,b) in self.rules.get_lhs_matches(): t=self.rules.lhs_match(a,b) name='match_%s_%s'%(a,b) vocab1 = self.spa.sources[a] vocab2 = self.spa.sources[b] assert vocab1==vocab2 dim = vocab1.dimensions self.net.make_array(name,N_match,dim,dimensions=2,encoders=[[1,1],[1,-1],[-1,-1],[-1,1]],radius=1.4) t1=numeric.zeros((dim*2,dim),typecode='f') t2=numeric.zeros((dim*2,dim),typecode='f') for i in range(dim): t1[i*2,i]=1.0 t2[i*2+1,i]=1.0 self.spa.net.connect('source_'+a, self.name+'.'+name, transform=t1, pstc=pstc_match) self.spa.net.connect('source_'+b, self.name+'.'+name, transform=t2, pstc=pstc_match) transform=numeric.array([t for i in range(dim)]).T def product(x): return x[0]*x[1] self.net.connect(name, 'StrD1', transform=(1+lg)*transform, pstc=pstc_input, func=product) self.net.connect(name, 'StrD2', transform=(1-lg)*transform, pstc=pstc_input, func=product) self.net.connect(name, 'STN', transform=transform, pstc=pstc_input, func=product) # TODO: add support for matches (do this with a subnetwork, not a separate module) #if len(self.rules.get_lhs_matches())>0: # self.match=spa.match.Match(self,pstc_match=self.p.pstc_input/2) # self.spa.add_module(self.name+'_match',self.match,create=True,connect=True) for source in self.spa.sources.keys(): if verbose: print ' connecting core inputs from',source transform=self.rules.lhs(source) if transform is None: continue self.spa.net.connect('source_'+source, self.name+'.StrD1', transform=(1+lg)*transform, pstc=pstc_input) self.spa.net.connect('source_'+source, self.name+'.StrD2', transform=(1-lg)*transform, pstc=pstc_input) self.spa.net.connect('source_'+source, self.name+'.STN', transform=transform, pstc=pstc_input)
def __init__(self, name, dimensions, pstc, vocab): self.pstc = pstc self.dimension = dimensions self.input = numeric.zeros(dimensions, 'f') self.vocab = vocab self.value = None nef.SimpleNode.__init__(self, name)
def complete(self, N_per_D=30, scaling=1, min_intercept=0.1, mutual_inhibit=0, feedback=0, pstc_feedback=0.01): vocab = self.spa.sources[self.name] self.net.make_array('cleanup', 50, len(vocab.keys), intercept=(min_intercept, 1), encoders=[[1]]) transform = [vocab.parse(k).v for k in vocab.keys] self.net.connect('input', 'cleanup', transform=transform, pstc=0.001) t = numeric.zeros((vocab.dimensions, len(vocab.keys)), typecode='f') for i in range(len(vocab.keys)): t[:, i] += vocab.parse(vocab.keys[i]).v * scaling self.net.connect('cleanup', 'output', transform=t, pstc=0.001) #, func=lambda x: 1) if mutual_inhibit != 0 or feedback != 0: t = (numeric.eye(len(vocab.keys)) - 1) * mutual_inhibit t += numeric.eye(len(vocab.keys)) * feedback self.net.connect('cleanup', 'cleanup', transform=t, pstc=pstc_feedback)
def __init__(self,name,dimensions,pstc,vocab): self.pstc=pstc self.dimension=dimensions self.input=numeric.zeros(dimensions,'f') self.vocab=vocab self.value=None nef.SimpleNode.__init__(self,name)
def compute_weights(encoder, decoder): N1 = len(decoder[0]) D = len(decoder) N2 = len(encoder) w = numeric.zeros((N2, N1), typecode='f') for dim in range(D): sign, table = make_output_table([e[dim] for e in encoder]) for i in range(N1): d = decoder[dim][i] / spike_strength if d < 0: decoder_sign = -1 d = -d else: decoder_sign = 1 histogram = compute_histogram(d, [e[dim] for e in encoder]) cdf = compute_cdf(histogram) for k in range(generate_matrix_n): spike_count = determine_spike_count(cdf) for s in range(spike_count): j = determine_target(table) #TODO: check for multiple spikes to same target w[j][i] += decoder_sign * sign[j] w /= generate_matrix_n #w2=numeric.array(MU.prod(encoder,decoder)) return w
def compute_weights(encoder,decoder): N1=len(decoder[0]) D=len(decoder) N2=len(encoder) w=numeric.zeros((N2,N1),typecode='f') for dim in range(D): sign,table=make_output_table([e[dim] for e in encoder]) for i in range(N1): d=decoder[dim][i]/spike_strength if d<0: decoder_sign=-1 d=-d else: decoder_sign=1 histogram=compute_histogram(d,[e[dim] for e in encoder]) cdf=compute_cdf(histogram) for k in range(generate_matrix_n): spike_count=determine_spike_count(cdf) for s in range(spike_count): j=determine_target(table) #TODO: check for multiple spikes to same target w[j][i]+=decoder_sign*sign[j] w/=generate_matrix_n #w2=numeric.array(MU.prod(encoder,decoder)) return w
def __init__(self, name, pstc, module, vocab): self.pstc = pstc self.vocab = vocab self.module = module self.mem = {} self.ZERO = numeric.zeros(vocab.dimensions, 'f') self.x = self.ZERO nef.SimpleNode.__init__(self, name)
def tick(self): dp=self.vocab.dot(self.input) m=max(dp) if m>0.3: self.value=self.vocab.keys[list(dp).index(m)] else: self.value=None self.input=numeric.zeros(self.dimension,'f')
def tick(self): dp = self.vocab.dot(self.input) m = max(dp) if m > 0.3: self.value = self.vocab.keys[list(dp).index(m)] else: self.value = None self.input = numeric.zeros(self.dimension, 'f')
def __init__(self,name,pstc,module,vocab): self.pstc=pstc self.vocab=vocab self.module=module self.mem={} self.ZERO=numeric.zeros(vocab.dimensions,'f') self.x=self.ZERO nef.SimpleNode.__init__(self,name)
def connect(self, and_neurons=50): # ensure all terms are parsed before starting for k,v in self.connections.items(): pre_name,post_name=k for pre_term,post_term in v: pre=self.spa.sources[pre_name].parse(pre_term).v post=self.spa.sinks[post_name].parse(post_term).v for k,v in self.connections.items(): pre_name,post_name=k t=numeric.zeros((self.spa.sinks[post_name].dimensions,self.spa.sources[pre_name].dimensions),typecode='f') for pre_term,post_term in v: pre=self.spa.sources[pre_name].parse(pre_term).v post=self.spa.sinks[post_name].parse(post_term).v t+=numeric.array([pre*bb for bb in post]) if pre_name==post_name: if pre_name in self.inhibit: for pre_term in self.spa.sources[pre_name].keys: pre=self.spa.sources[pre_name].parse(pre_term).v*self.inhibit[pre_name] post_value=numeric.zeros(self.spa.sources[post_name].dimensions,typecode='f') for post_term in self.spa.sources[pre_name].keys: if pre_term!=post_term: post_value+=self.spa.sources[post_name].parse(post_term).v t+=numeric.array([pre*bb for bb in post_value]) if pre_name in self.excite: t+=numeric.eye(len(t))*self.excite[pre_name] self.spa.net.connect('source_'+pre_name,'sink_'+post_name,transform=t) for i,(pre,post) in enumerate(self.ands): D=len(pre) aname='and%02d'%i self.net.make(aname,D*and_neurons,D) for j,p in enumerate(pre): t=numeric.zeros((D,self.spa.sources[p[0]].dimensions),typecode='f') t[j,:]=self.spa.sources[p[0]].parse(p[1]).v*math.sqrt(D) self.spa.net.connect('source_'+p[0],self.name+'.'+aname,transform=t) def result(x,v=self.spa.sinks[post[0]].parse(post[1]).v): for xx in x: if xx<0.4: return [0]*len(v) #TODO: This is pretty arbitrary.... return v self.spa.net.connect(self.name+'.'+aname,'sink_'+post[0],func=result)
def replace_cleanup(net, learning_rate=5e-5, threshold=0.0, radius=1.0, max_rate=(100, 200), cleanup_pstc=0.001, post_term="cleanup_00"): cleanup = net.get('cleanup') cleanup_neurons = cleanup.neurons D = cleanup.dimension try: bias_termination = cleanup.getTermination(u'bias') bias_node = net.get('bias') bias_weights = bias_termination.getNodeTerminations()[0].weights[0] bias_pstc = bias_termination.tau has_bias = True except: has_bias = False net.remove('cleanup') output = net.get('output') term = output.getTermination(post_term) net.network.removeProjection(term) # random weight matrix to initialize projection from pre to post def rand_weights(w): for i in range(len(w)): for j in range(len(w[0])): w[i][j] = random.uniform(-1e-3, 1e-3) return w weight = rand_weights(np.zeros((output.neurons, cleanup.neurons)).tolist()) term.setTransform(weight, False) cleanup = net.make('cleanup', neurons=cleanup_neurons, dimensions=D, radius=radius, intercept=threshold, max_rate=max_rate, tau_ref=0.004) net.connect(cleanup.getOrigin('AXON'), term) net.connect('input', 'cleanup', pstc=cleanup_pstc) if has_bias: weights = [[bias_weights]] * cleanup_neurons tname = 'bias' cleanup.addTermination(tname, weights, bias_pstc, False) orig = bias_node.getOrigin('X') term = cleanup.getTermination(tname) net.network.addProjection(orig, term)
def connect(self): self.bg.rules.initialize(self.spa) N=self.p.match_neurons for (a,b) in self.bg.rules.get_lhs_matches(): t=self.bg.rules.lhs_match(a,b) name='%s_%s'%(a,b) dim=self.spa.sources[a].dimensions if N==0: m=self.net.make(name,1,dim*2,quick=True,mode='direct') def dotproduct(x): return sum([x[2*i]*x[2*i+1] for i in range(len(x)/2)]) funcs=[nef.functions.PythonFunction(dotproduct)] m.addDecodedOrigin('product',funcs,'AXON') else: m=self.net.make_array(name,N,dim,dimensions=2,encoders=[[1,1],[1,-1],[-1,-1],[-1,1]],quick=True,radius=1.4,storage_code="%d") def product(x): return x[0]*x[1] m.addDecodedOrigin('product',[nef.functions.PythonFunction(product,dim)],'AXON') self.net.network.exposeOrigin(m.getOrigin('product'),name) t1=numeric.zeros((dim*2,dim),typecode='f') t2=numeric.zeros((dim*2,dim),typecode='f') for i in range(dim): t1[i*2,i]=1.0 t2[i*2+1,i]=1.0 va=self.spa.vocab(a) vb=self.spa.vocab(b) if va is not vb: t2=numeric.dot(t2,vb.transform_to(va)) m.addDecodedTermination(a,t1,self.p.pstc_match,False) m.addDecodedTermination(b,t2,self.p.pstc_match,False) self.net.network.exposeTermination(m.getTermination(a),name+'_1') self.net.network.exposeTermination(m.getTermination(b),name+'_2') self.spa.net.connect(self.spa.sources[a],self.net.network.getTermination(name+'_1')) self.spa.net.connect(self.spa.sources[b],self.net.network.getTermination(name+'_2')) if N==0: transform=[t for i in range(1)] else: transform=[t for i in range(dim)] self.bg.add_input(self.net.network.getOrigin(name),numeric.array(transform).T)
def make(net,node,index=0,dimensions=8,pattern='I',pstc=0.01,use_single_input=False): STN=node.getNode('STN') transform=numeric.zeros((STN.dimension,dimensions),'f') if dimensions in hrr.Vocabulary.defaults.keys(): vocab=hrr.Vocabulary.defaults[dimensions] else: vocab=hrr.Vocabulary(dimensions) terms=[t.name for t in node.terminations] STNterms=[t.name for t in STN.terminations] count=0 while 'rule_%02d'%count in terms or 'rule_%02d'%count in STNterms: count=count+1 name='rule_%02d'%count transform[index,:]=vocab.parse(pattern).v if use_single_input: input=node.getNode('input') input.addDecodedTermination(name,transform,pstc,False) node.exposeTermination(input.getTermination(name),name) else: StrD1=node.getNode('StrD1') StrD2=node.getNode('StrD2') STN.addDecodedTermination(name,transform,pstc,False) node.exposeTermination(STN.getTermination(name),name+'_STN') StrD1.addDecodedTermination(name,transform*(0.8),pstc,False) node.exposeTermination(StrD1.getTermination(name),name+'_StrD1') StrD2.addDecodedTermination(name,transform*(1.2),pstc,False) node.exposeTermination(StrD2.getTermination(name),name+'_StrD2') if net.network.getMetaData("bgrule") == None: net.network.setMetaData("bgrule", HashMap()) bgrules = net.network.getMetaData("bgrule") rule=HashMap(6) rule.put("name", node.getName()) rule.put("index", index) rule.put("dimensions", dimensions) rule.put("pattern", pattern) rule.put("pstc", pstc) rule.put("use_single_input", use_single_input) bgrules.put(node.getName(), rule) if net.network.getMetaData("templates") == None: net.network.setMetaData("templates", ArrayList()) templates = net.network.getMetaData("templates") templates.add(node.getName())
def input_transform(dimensions,first,invert=False): fft=np.array(discrete_fourier_transform(dimensions)) M=[] for i in range((dimensions/2+1)*4): if invert: row=fft[-(i/4)] else: row=fft[i/4] if first: if i%2==0: row2=np.array([row.real,np.zeros(dimensions)]) else: row2=np.array([row.imag,np.zeros(dimensions)]) else: if i%4==0 or i%4==3: row2=np.array([np.zeros(dimensions),row.real]) else: row2=np.array([np.zeros(dimensions),row.imag]) M.extend(row2) return M
def input_transform(dimensions, first, invert=False): fft = array(discrete_fourier_transform(dimensions)) M = [] for i in range((dimensions / 2 + 1) * 4): if invert: row = fft[-(i / 4)] else: row = fft[i / 4] if first: if i % 2 == 0: row2 = array([row.real, zeros(dimensions)]) else: row2 = array([row.imag, zeros(dimensions)]) else: if i % 4 == 0 or i % 4 == 3: row2 = array([zeros(dimensions), row.real]) else: row2 = array([zeros(dimensions), row.imag]) M.extend(row2) return M
def transform_to(self, other, keys=None): if keys is None: keys = list(self.keys) for k in other.keys: if k not in keys: keys.append(k) t = numeric.zeros((other.dimensions, self.dimensions), typecode='f') for k in keys: a = self[k].v b = other[k].v t += array([a * bb for bb in b]) return t
def transform_to(self,other,keys=None): if keys is None: keys=list(self.keys) for k in other.keys: if k not in keys: keys.append(k) t=numeric.zeros((other.dimensions,self.dimensions),typecode='f') for k in keys: a=self[k].v b=other[k].v t+=array([a*bb for bb in b]) return t
def unpaddedTransform( DM, DN ): M,m = DM.shape n,N = DN.shape Tshape = (M*N,m*n) T = zeros( Tshape ) + 1.0j * zeros( Tshape ) for i in range(M): for j in range(N): row = i*N + j for k in range(m): for l in range(n): T[row,k*n + l] = DM[i,k] * DN[l,j] # p = ravel( repeat( DM[i,:], [n]*DM.shape[1] ) ) * ravel( tile( DN[:,j], m ) ) # Tr[row,:] = p.real # Ti[row,:] = p.imag # for i in range(M): # T[row,:] = ravel( repeat( DM[i,:], [n]*DM.shape[1] ) ) * ravel( tile( DN[:,j], m ) ) # T[row,:] = DM[i,:].repeat(n) * tile( DN[:,j].flatten(), (1,m) ) return T
def __init__(self,name,Ashape,Bshape,rotateA=False,rotateB=False,pstc_gate=0.01,pstc_input=0): self.A = zeros( Ashape ) self.B = zeros( Bshape ) self.rotateA = rotateA self.rotateB = rotateB self.gate = 0 # Determine DFT matrices ra,ca = Ashape rb,cb = Bshape M,N = (ra+rb, ca+cb) # determine padded FFT size self.DM = DFT( M ) # get DFT matrix for rows self.DN = DFT( N ) # get DFT matrix for cols self.ZM = DFTinverse( M ) # get inverse DFT matrix self.ZN = DFTinverse( N ) # get inverse DFT matrix nef.simplenode.SimpleNode.__init__(self,name) self.getTermination('A').setDimensions( prod(Ashape) ) self.getTermination('B').setDimensions( prod(Bshape) ) self.getTermination('gate').setTau(pstc_gate) if( pstc_input > 0 ): self.getTermination('A').setTau(pstc_input) self.getTermination('B').setTau(pstc_input)
def compute_sparse_weights(origin, post, transform, fan_in, noise=0.1, num_samples=100): encoder = post.encoders radius = post.radii[0] if hasattr(transform, 'tolist'): transform = transform.tolist() approx = origin.node.getDecodingApproximator('AXON') # create X matrix X = approx.evalPoints X = MU.transpose([f.multiMap(X) for f in origin.functions]) # create A matrix A = approx.values S = fan_in N_A = len(A) samples = len(A[0]) N_B = len(encoder) w_sparse = np.zeros((N_B, N_A), 'f') noise_sd = MU.max(A) * noise decoder_list = [None for _ in range(num_samples)] for i in range(num_samples): indices = random.sample(range(N_A), S) activity = [A[j] for j in indices] n = [[random.gauss(0, noise_sd) for _ in range(samples)] for j in range(S)] activity = MU.sum(activity, n) activityT = MU.transpose(activity) gamma = MU.prod(activity, activityT) upsilon = MU.prod(activity, X) gamma_inv = pinv(gamma, noise_sd * noise_sd) decoder_list[i] = MU.prod([[x for x in row] for row in gamma_inv], upsilon) for i in range(N_B): ww = MU.prod(random.choice(decoder_list), MU.prod(MU.transpose(transform), encoder[i])) for j, k in enumerate(indices): w_sparse[i, k] = float(ww[j]) / radius return list(w_sparse)
def create_cleanup_inhibit(self,net,**params): for name,value in params.items(): node=net.get(name) vocab=hrr.Vocabulary.registered[id(node)] cleanup=net.make_array('clean_'+name,50,len(vocab.keys),intercept=(0,1),encoders=[[1]]) transform=[vocab.parse(k).v for k in vocab.keys] net.connect(node,cleanup,transform=transform) t=numeric.zeros((vocab.dimensions,len(vocab.keys)),typecode='f') for i in range(len(vocab.keys)): for j in range(len(vocab.keys)): if i!=j: t[:,i]+=vocab.parse(vocab.keys[j]).v*value net.connect(cleanup,node,transform=t)
def complete(self, recurrent_cleanup=0, pstc_feedback=0.01): if recurrent_cleanup!=0: vocab=self.spa.sources[self.name] self.net.make_array('cleanup', 50, len(vocab.keys), intercept=(0,1), encoders=[[1]]) transform=[vocab.parse(k).v for k in vocab.keys] self.net.connect('buffer','cleanup',transform=transform, pstc=pstc_feedback) t=numeric.zeros((vocab.dimensions,len(vocab.keys)),typecode='f') for i in range(len(vocab.keys)): for j in range(len(vocab.keys)): if i!=j: t[:,i]+=vocab.parse(vocab.keys[j]).v*recurrent_cleanup else: t[:,i]-=vocab.parse(vocab.keys[j]).v*recurrent_cleanup self.net.connect('cleanup','buffer',transform=t, pstc=pstc_feedback, func=lambda x: 1)
def complete(self, N_per_D=30, scaling=1, min_intercept=0.1, mutual_inhibit=0, feedback=0, pstc_feedback=0.01): vocab=self.spa.sources[self.name] self.net.make_array('cleanup', 50, len(vocab.keys), intercept=(min_intercept,1), encoders=[[1]]) transform=[vocab.parse(k).v for k in vocab.keys] self.net.connect('input','cleanup',transform=transform, pstc=0.001) t=numeric.zeros((vocab.dimensions,len(vocab.keys)),typecode='f') for i in range(len(vocab.keys)): t[:,i]+=vocab.parse(vocab.keys[i]).v*scaling self.net.connect('cleanup','output',transform=t, pstc=0.001)#, func=lambda x: 1) if mutual_inhibit!=0 or feedback!=0: t=(numeric.eye(len(vocab.keys))-1)*mutual_inhibit t+=numeric.eye(len(vocab.keys))*feedback self.net.connect('cleanup','cleanup',transform=t, pstc=pstc_feedback)
def replace_cleanup(net, learning_rate=5e-5, threshold=0.0, radius=1.0, max_rate=(100,200), cleanup_pstc=0.001, post_term="cleanup_00"): cleanup = net.get('cleanup') cleanup_neurons = cleanup.neurons D = cleanup.dimension try: bias_termination = cleanup.getTermination(u'bias') bias_node = net.get('bias') bias_weights = bias_termination.getNodeTerminations()[0].weights[0] bias_pstc = bias_termination.tau has_bias = True except: has_bias = False net.remove('cleanup') output = net.get('output') term = output.getTermination(post_term) net.network.removeProjection(term) # random weight matrix to initialize projection from pre to post def rand_weights(w): for i in range(len(w)): for j in range(len(w[0])): w[i][j] = random.uniform(-1e-3,1e-3) return w weight = rand_weights(np.zeros((output.neurons, cleanup.neurons)).tolist()) term.setTransform(weight, False) cleanup = net.make('cleanup', neurons=cleanup_neurons, dimensions=D, radius=radius, intercept=threshold, max_rate=max_rate, tau_ref=0.004) net.connect(cleanup.getOrigin('AXON'), term) net.connect('input', 'cleanup', pstc=cleanup_pstc) if has_bias: weights=[[bias_weights]]*cleanup_neurons tname='bias' cleanup.addTermination(tname, weights, bias_pstc, False) orig = bias_node.getOrigin('X') term = cleanup.getTermination(tname) net.network.addProjection(orig, term)
def create_cleanup_inhibit(self, net, **params): for name, value in params.items(): node = net.get(name) vocab = hrr.Vocabulary.registered[id(node)] cleanup = net.make_array('clean_' + name, 50, len(vocab.keys), intercept=(0, 1), encoders=[[1]]) transform = [vocab.parse(k).v for k in vocab.keys] net.connect(node, cleanup, transform=transform) t = numeric.zeros((vocab.dimensions, len(vocab.keys)), typecode='f') for i in range(len(vocab.keys)): for j in range(len(vocab.keys)): if i != j: t[:, i] += vocab.parse(vocab.keys[j]).v * value net.connect(cleanup, node, transform=t)
def make(net, node, index=0, dim=8, pattern='I', pstc=0.01, use_single_input=False): STN = node.getNode('STN') transform = numeric.zeros((STN.dimension, dim), 'f') if dim in hrr.Vocabulary.defaults.keys(): vocab = hrr.Vocabulary.defaults[dim] else: vocab = hrr.Vocabulary(dim) terms = [t.name for t in node.terminations] STNterms = [t.name for t in STN.terminations] count = 0 while 'rule_%02d' % count in terms or 'rule_%02d' % count in STNterms: count = count + 1 name = 'rule_%02d' % count transform[index, :] = vocab.parse(pattern).v if use_single_input: input = node.getNode('input') input.addDecodedTermination(name, transform, pstc, False) node.exposeTermination(input.getTermination(name), name) else: StrD1 = node.getNode('StrD1') StrD2 = node.getNode('StrD2') STN.addDecodedTermination(name, transform, pstc, False) node.exposeTermination(STN.getTermination(name), name + '_STN') StrD1.addDecodedTermination(name, transform * (0.8), pstc, False) node.exposeTermination(StrD1.getTermination(name), name + '_StrD1') StrD2.addDecodedTermination(name, transform * (1.2), pstc, False) node.exposeTermination(StrD2.getTermination(name), name + '_StrD2')
def make(net,errName='error', N_err=50, preName='pre', postName='post', rate=5e-7): # get pre and post ensembles from their names pre = net.network.getNode(preName) post = net.network.getNode(postName) # modulatory termination (find unused termination) count=0 while 'mod_%02d'%count in [t.name for t in post.terminations]: count=count+1 modname = 'mod_%02d'%count post.addDecodedTermination(modname, numeric.eye(post.dimension), 0.005, True) # random weight matrix to initialize projection from pre to post def rand_weights(w): for i in range(len(w)): for j in range(len(w[0])): w[i][j] = random.uniform(-1e-3,1e-3) return w weight = rand_weights(numeric.zeros((post.neurons, pre.neurons)).tolist()) # non-decoded termination (to learn transformation) count = 0 prename = pre.getName() while '%s_%02d'%(prename,count) in [t.name for t in post.terminations]: count=count+1 prename = '%s_%02d'%(prename, count) post.addPESTermination(prename, weight, 0.005, False) # Create error ensemble error = net.make(errName, N_err, post.dimension) # Add projections net.connect(error.getOrigin('X'),post.getTermination(modname)) net.connect(pre.getOrigin('AXON'),post.getTermination(prename)) # Set learning rule on the non-decoded termination net.learn(post,prename,modname,rate=rate)
def make(net,node,index=0,dim=8,pattern='I',pstc=0.01,use_single_input=False): STN=node.getNode('STN') transform=numeric.zeros((STN.dimension,dim),'f') if dim in hrr.Vocabulary.defaults.keys(): vocab=hrr.Vocabulary.defaults[dim] else: vocab=hrr.Vocabulary(dim) terms=[t.name for t in node.terminations] STNterms=[t.name for t in STN.terminations] count=0 while 'rule_%02d'%count in terms or 'rule_%02d'%count in STNterms: count=count+1 name='rule_%02d'%count transform[index,:]=vocab.parse(pattern).v if use_single_input: input=node.getNode('input') input.addDecodedTermination(name,transform,pstc,False) node.exposeTermination(input.getTermination(name),name) else: StrD1=node.getNode('StrD1') StrD2=node.getNode('StrD2') STN.addDecodedTermination(name,transform,pstc,False) node.exposeTermination(STN.getTermination(name),name+'_STN') StrD1.addDecodedTermination(name,transform*(0.8),pstc,False) node.exposeTermination(StrD1.getTermination(name),name+'_StrD1') StrD2.addDecodedTermination(name,transform*(1.2),pstc,False) node.exposeTermination(StrD2.getTermination(name),name+'_StrD2')
def tick(self): length=numeric.norm(self.input) if length>self.input_threshold: self.x=[xx/length for xx in self.input] self.input=numeric.zeros(self.dimension)
def makeifftrow(D,i): if i==0 or i*2==D: return ifft[i] if i<=D/2: return ifft[i]+ifft[-i].real-ifft[-i].imag*1j return np.zeros(dimensions)
def origin_C(self): if( self.gate > 0.1 ): return zeros( self.A.shape ) else: return ravel( conv2dftmatrix( self.A, self.B, self.DM, self.DN, self.ZM, self.ZN ) )
def make(net, errName='error', N_err=50, preName='pre', postName='post', rate=5e-4, oja=False): # get pre and post ensembles from their names pre = net.network.getNode(preName) post = net.network.getNode(postName) # modulatory termination (find unused termination) count = 0 while 'mod_%02d' % count in [t.name for t in post.terminations]: count = count + 1 modname = 'mod_%02d' % count post.addDecodedTermination(modname, numeric.eye(post.dimension), 0.005, True) # random weight matrix to initialize projection from pre to post def rand_weights(w): for i in range(len(w)): for j in range(len(w[0])): w[i][j] = random.uniform(-1e-3, 1e-3) return w weight = rand_weights(numeric.zeros((post.neurons, pre.neurons)).tolist()) # non-decoded termination (to learn transformation) count = 0 prename = pre.getName() while '%s_%02d' % (prename, count) in [t.name for t in post.terminations]: count = count + 1 prename = '%s_%02d' % (prename, count) post.addPESTermination(prename, weight, 0.005, False) # Create error ensemble try: net.get(errName) # if it already exists except StructuralException: net.make(errName, N_err, post.dimension) # Add projections net.connect(errName, post.getTermination(modname)) net.connect(pre.getOrigin('AXON'), post.getTermination(prename)) # Set learning rule on the non-decoded termination net.learn(post, prename, modname, rate=rate, oja=oja) if net.network.getMetaData("learnedterm") == None: net.network.setMetaData("learnedterm", HashMap()) learnedterms = net.network.getMetaData("learnedterm") learnedterm = HashMap(5) learnedterm.put("errName", errName) learnedterm.put("N_err", N_err) learnedterm.put("preName", preName) learnedterm.put("postName", postName) learnedterm.put("rate", rate) learnedterms.put(errName, learnedterm) if net.network.getMetaData("templates") == None: net.network.setMetaData("templates", ArrayList()) templates = net.network.getMetaData("templates") templates.add(errName) if net.network.getMetaData("templateProjections") == None: net.network.setMetaData("templateProjections", HashMap()) templateproj = net.network.getMetaData("templateProjections") templateproj.put(errName, postName) templateproj.put(preName, postName)
def tick(self): length = numeric.norm(self.input) if length > self.input_threshold: self.x = [xx / length for xx in self.input] self.input = numeric.zeros(self.dimension)
def paintComponent(self,g): f,dimension,minx,maxx,miny,maxy, params, color, time_step = self.config core.DataViewComponent.paintComponent(self,g) width=self.size.width-self.border_left-self.border_right height=self.size.height-self.border_top-self.border_bottom-self.label_offset if width<2: return dt_tau=None if self.view.tau_filter>0: dt_tau=self.view.dt/self.view.tau_filter try: data=self.data.get(start=self.view.current_tick,count=1,dt_tau=dt_tau)[0] except: return if dimension == 2: if self.image is None : self.image = BI(width, height, BI.TYPE_INT_ARGB) if self.counter != time_step: self.counter += 1 g.drawImage( self.image, self.border_left, self.border_top, None) else: self.counter = 0 # currently only for fixed grid grid_size = self.grid_size # step_size = width / grid_size coeffs=transpose(array([data])) basis = array(f(self.func_input, params)) value = transpose(dot(transpose(basis),coeffs)) maxv = max(value[0]) minv = min(value[0]) if maxv > self.max: self.max = maxv if minv < self.min: self.min = minv pvalue = (value - self.min) / (self.max - self.min) # normalized pixel value if color == 'g': ## gray pvalue = array(map(int, array(pvalue[0]) * 0xFF)) pvalue = pvalue * 0x10000 + pvalue * 0x100 + pvalue elif color == 'c': ##color pvalue = map(int, array(pvalue[0]) * 0xFF * 2) R = zeros(len(pvalue)) G = zeros(len(pvalue)) B = zeros(len(pvalue)) for i, v in enumerate(pvalue): if v < 0xFF: B[i] = 0xFF - v G[i] = v else: G[i] = 2*0xFF - v R[i] = v - 0xFF pvalue = R * 0x10000 + G * 0x100 + B pvalue = reshape(pvalue,[grid_size, grid_size]) rvalue = 0xFF000000 + pvalue # expand pixel value from grid to raster size # ratio = float(width) / grid_size # indeces = map(int, (floor(array(range(width)) / ratio))) ## Tooooooo slow here! # for i, ii in enumerate(indeces): # for j, jj in enumerate(indeces) : # rvalue[i,j] = pvalue[ii,jj] for zoom in range(2): zgrid_size = grid_size * (zoom + 1) rvalue = reshape(rvalue, [zgrid_size * zgrid_size, 1]) rvalue = concatenate([rvalue, rvalue], 1) rvalue = reshape(rvalue, [zgrid_size, zgrid_size* 2]) rvalue = repeat(rvalue, ones(zgrid_size) * 2) # draw image rvalue = reshape(rvalue, [1, width * height]) self.image.setRGB(0, 0, width, height, rvalue[0], 0, width) g.drawImage( self.image, self.border_left, self.border_top, None) elif dimension == 1: g.color=Color(0.8,0.8,0.8) g.drawRect(self.border_left,self.border_top+self.label_offset,width,height) g.color=Color.black txt='%4g'%maxx bounds=g.font.getStringBounds(txt,g.fontRenderContext) g.drawString(txt,self.size.width-self.border_right-bounds.width/2,self.size.height-self.border_bottom+bounds.height) txt='%4g'%minx bounds=g.font.getStringBounds(txt,g.fontRenderContext) g.drawString(txt,self.border_left-bounds.width/2,self.size.height-self.border_bottom+bounds.height) g.drawString('%6g'%maxy,0,10+self.border_top+self.label_offset) g.drawString('%6g'%miny,0,self.size.height-self.border_bottom) g.color=Color.black pdftemplate=getattr(self.view.area,'pdftemplate',None) if pdftemplate is not None: pdf,scale=pdftemplate pdf.setLineWidth(0.5) steps=100 dx=float(maxx-minx)/(width*steps) for i in range(width*steps): x=minx+i*dx value=sum([f(j,x)*d for j,d in enumerate(data)]) y=float((value-miny)*height/(maxy-miny)) xx=self.border_left+i/float(steps) yy=self.height-self.border_bottom-y if i==0: pdf.moveTo((self.x+xx)*scale,800-(self.y+yy)*scale) else: if 0<y<height: pdf.lineTo((self.x+xx)*scale,800-(self.y+yy)*scale) pdf.setRGBColorStroke(g.color.red,g.color.green,g.color.blue) pdf.stroke() else: dx=float(maxx-minx)/(width-1) px,py=None,None for i in range(width): x=minx+i*dx value=sum([f(j,x)*d for j,d in enumerate(data)]) y=int((value-miny)*height/(maxy-miny)) xx=self.border_left+i yy=self.height-self.border_bottom-y if px is not None and miny<value<maxy: g.drawLine(px,py,xx,yy) px,py=xx,yy
def reprow( r, N ): A = zeros( (len(r),N) ) for i in range(N): A[i,:] = r return A
def reset(self,randomize=False): self.A = zeros( self.A.shape ) self.B = zeros( self.B.shape ) self.gate = 0
def create(self, net, N=50, dimensions=8, randomize=False): vocab = {} for k in self.nodes.keys(): node = net.get(k, None) if node is None: dim = dimensions if randomize is False and len(self.nodes[k]) + 1 > dim: dim = len(self.nodes[k]) + 1 node = net.make_array(k, N, dim) if not hrr.Vocabulary.registered.has_key(id(node)): v = hrr.Vocabulary(node.dimension, randomize=randomize) v.register(node) vocab[k] = hrr.Vocabulary.registered[id(node)] # ensure all terms are parsed before starting for k, v in self.connect.items(): pre_name, post_name = k for pre_term, post_term in v: pre = vocab[pre_name].parse(pre_term).v post = vocab[post_name].parse(post_term).v for k, v in self.connect.items(): pre_name, post_name = k t = numeric.zeros( (vocab[post_name].dimensions, vocab[pre_name].dimensions), typecode='f') for pre_term, post_term in v: pre = vocab[pre_name].parse(pre_term).v post = vocab[post_name].parse(post_term).v t += numeric.array([pre * bb for bb in post]) if pre_name == post_name: if pre_name in self.inhibit: for pre_term in vocab[pre_name].keys: pre = vocab[pre_name].parse( pre_term).v * self.inhibit[pre_name] post_value = numeric.zeros(vocab[post_name].dimensions, typecode='f') for post_term in vocab[pre_name].keys: if pre_term != post_term: post_value += vocab[post_name].parse( post_term).v t += numeric.array([pre * bb for bb in post_value]) if pre_name in self.excite: t += numeric.eye(len(t)) * self.excite[pre_name] net.connect(net.get(pre_name), net.get(post_name), transform=t) for i, (pre, post) in enumerate(self.ands): D = len(pre) node = net.make('and%02d' % i, D * N, D) for j, p in enumerate(pre): t = numeric.zeros((D, vocab[p[0]].dimensions), typecode='f') t[j, :] = vocab[p[0]].parse(p[1]).v * math.sqrt(D) net.connect(net.get(p[0]), node, transform=t) def result(x, v=vocab[post[0]].parse(post[1]).v): for xx in x: if xx < 0.4: return [0] * len( v) #TODO: This is pretty arbitrary.... return v net.connect(node, net.get(post[0]), func=result) return net
def make(net, node, index=0, dimensions=8, pattern='I', pstc=0.01, use_single_input=False): STN = node.getNode('STN') transform = numeric.zeros((STN.dimension, dimensions), 'f') if dimensions in hrr.Vocabulary.defaults.keys(): vocab = hrr.Vocabulary.defaults[dimensions] else: vocab = hrr.Vocabulary(dimensions) terms = [t.name for t in node.terminations] STNterms = [t.name for t in STN.terminations] count = 0 while 'rule_%02d' % count in terms or 'rule_%02d' % count in STNterms: count = count + 1 name = 'rule_%02d' % count transform[index, :] = vocab.parse(pattern).v if use_single_input: input = node.getNode('input') input.addDecodedTermination(name, transform, pstc, False) node.exposeTermination(input.getTermination(name), name) else: StrD1 = node.getNode('StrD1') StrD2 = node.getNode('StrD2') STN.addDecodedTermination(name, transform, pstc, False) node.exposeTermination(STN.getTermination(name), name + '_STN') StrD1.addDecodedTermination(name, transform * (0.8), pstc, False) node.exposeTermination(StrD1.getTermination(name), name + '_StrD1') StrD2.addDecodedTermination(name, transform * (1.2), pstc, False) node.exposeTermination(StrD2.getTermination(name), name + '_StrD2') if net.network.getMetaData("bgrule") == None: net.network.setMetaData("bgrule", HashMap()) bgrules = net.network.getMetaData("bgrule") rule = HashMap(6) rule.put("name", node.getName()) rule.put("index", index) rule.put("dimensions", dimensions) rule.put("pattern", pattern) rule.put("pstc", pstc) rule.put("use_single_input", use_single_input) bgrules.put(node.getName(), rule) if net.network.getMetaData("templates") == None: net.network.setMetaData("templates", ArrayList()) templates = net.network.getMetaData("templates") templates.add(node.getName())
def __init__(self,dimensions,name='Buffer'): self.input=numeric.zeros(dimensions) self.x=[0]*dimensions self.dimension=dimensions nef.SimpleNode.__init__(self,name)
def __init__(self, dimensions, name='Buffer'): self.input = numeric.zeros(dimensions) self.x = [0] * dimensions self.dimension = dimensions nef.SimpleNode.__init__(self, name)
def makeifftrow(D, i): if i == 0 or i * 2 == D: return ifft[i] if i <= D / 2: return ifft[i] + ifft[-i].real - ifft[-i].imag * 1j return zeros(dimensions)
def make(net,errName='error', N_err=50, preName='pre', postName='post', rate=5e-4, oja=False): # get pre and post ensembles from their names pre = net.network.getNode(preName) post = net.network.getNode(postName) # modulatory termination (find unused termination) count=0 while 'mod_%02d'%count in [t.name for t in post.terminations]: count=count+1 modname = 'mod_%02d'%count post.addDecodedTermination(modname, numeric.eye(post.dimension), 0.005, True) # random weight matrix to initialize projection from pre to post def rand_weights(w): for i in range(len(w)): for j in range(len(w[0])): w[i][j] = random.uniform(-1e-3,1e-3) return w weight = rand_weights(numeric.zeros((post.neurons, pre.neurons)).tolist()) # non-decoded termination (to learn transformation) count = 0 prename = pre.getName() while '%s_%02d'%(prename,count) in [t.name for t in post.terminations]: count=count+1 prename = '%s_%02d'%(prename, count) post.addPESTermination(prename, weight, 0.005, False) # Create error ensemble try: net.get(errName) # if it already exists except StructuralException: net.make(errName, N_err, post.dimension) # Add projections net.connect(errName, post.getTermination(modname)) net.connect(pre.getOrigin('AXON'),post.getTermination(prename)) # Set learning rule on the non-decoded termination net.learn(post,prename,modname,rate=rate,oja=oja) if net.network.getMetaData("learnedterm") == None: net.network.setMetaData("learnedterm", HashMap()) learnedterms = net.network.getMetaData("learnedterm") learnedterm=HashMap(5) learnedterm.put("errName", errName) learnedterm.put("N_err", N_err) learnedterm.put("preName", preName) learnedterm.put("postName", postName) learnedterm.put("rate", rate) learnedterms.put(errName, learnedterm) if net.network.getMetaData("templates") == None: net.network.setMetaData("templates", ArrayList()) templates = net.network.getMetaData("templates") templates.add(errName) if net.network.getMetaData("templateProjections") == None: net.network.setMetaData("templateProjections", HashMap()) templateproj = net.network.getMetaData("templateProjections") templateproj.put(errName, postName) templateproj.put(preName, postName)
def __init__(self,net,productions,dimensions,neurons_buffer=40,neurons_bg=40,neurons_product=300,subdimensions=None,bg_radius=1.5, tau_gaba=0.008,tau_ampa=0.002,noise=None,vocab=None,quick=True,bg_output_weight=-3,bg_same_neurons=True, align_hrr=False,direct_convolution=False,direct_buffer=False,direct_gate=False,direct_same=False,buffer_mode='rate'): if vocab is None: if dimensions in hrr.Vocabulary.defaults and hrr.Vocabulary.defaults[dimensions].randomize!=align_hrr: vocab=hrr.Vocabulary.defaults[dimensions] else: vocab=hrr.Vocabulary(dimensions,randomize=not align_hrr) self.vocab=vocab self.net=net self.production_count=len(productions.productions) self.dimensions=dimensions self.direct_convolution=direct_convolution self.direct_buffer=direct_buffer self.direct_gate=direct_gate self.direct_same=direct_same D=len(productions.productions) bias=net.make_input('prod_bias',[1]) prod=net.make_array('prod',neurons_bg,D,intercept=(0.2,1),encoders=[[1]],quick=quick) net.connect(bias,prod) input=[] transform=[] for k in productions.get_buffers(): if self.direct_buffer is True or (isinstance(self.direct_buffer,list) and k in self.direct_buffer): buffer=net.make('buffer_'+k,1,dimensions,quick=True,mode='direct') else: if subdimensions!=None: buffer=net.make_array('buffer_'+k,neurons_buffer*subdimensions,dimensions/subdimensions,dimensions=subdimensions,quick=quick,mode=buffer_mode) else: buffer=net.make('buffer_'+k,neurons_buffer*dimensions,dimensions,quick=quick,mode=buffer_mode) input.append(buffer) transform.append(productions.calc_input_transform(k,vocab)) for k in productions.get_same_buffers(): a,b=k.split('_sameas_',1) if self.direct_same: dp=net.make('dp_%s_%s'%(a,b),1,1,quick=quick,mode='direct') else: dp=net.make('dp_%s_%s'%(a,b),neurons_buffer,1,quick=quick) transform.append(productions.calc_input_same_transform(k,vocab)) input.append(dp) basalganglia.make_basal_ganglia(net,input,prod,D,neurons=neurons_bg,input_transform=transform,output_weight=bg_output_weight,noise=noise,radius=bg_radius,same_neurons=bg_same_neurons) for k in productions.get_same_buffers(): a,b=k.split('_sameas_',1) if self.direct_same: same=net.make_array('same_%s_%s'%(a,b),1,dimensions,dimensions=2,quick=quick,mode='direct') else: same=net.make_array('same_%s_%s'%(a,b),neurons_product*2,dimensions,dimensions=2,quick=quick,encoders=[[1,1],[1,-1],[-1,-1],[-1,1]]) t1=[] t2=[] for i in range(dimensions): m1=numeric.zeros((2,dimensions),typecode='f') m2=numeric.zeros((2,dimensions),typecode='f') m1[0,i]=1.0 m2[1,i]=1.0 for row in m1: t1.append(row) for row in m2: t2.append(row) net.connect('buffer_'+a,same,transform=t1,pstc=tau_ampa) net.connect('buffer_'+b,same,transform=t2,pstc=tau_ampa) def product(x): return x[0]*x[1] net.connect(same,'dp_%s_%s'%(a,b),func=product,transform=[[1]*dimensions],pstc=tau_ampa) for k in productions.get_direct_actions(): if self.direct_buffer: net.make('thal_'+k,1,dimensions,quick=True,mode='direct') else: net.make('thal_'+k,neurons_buffer*dimensions,dimensions,quick=quick) net.connect('thal_'+k,'buffer_'+k,pstc=tau_ampa) net.connect(prod,'thal_'+k,transform=productions.calc_output_transform(k,vocab),pstc=tau_ampa) for k in productions.get_transform_actions(): a,b=k.split('_to_',1) name='thal_%s_%s'%(a,b) net.make(name,neurons_buffer*dimensions,dimensions,quick=quick) net.connect(prod,name,transform=productions.calc_output_transform(k,vocab),pstc=tau_ampa) conv=nef.convolution.make_convolution(net,k,name,'buffer_'+a,'buffer_'+b,1,quick=True,mode='direct') for k in productions.get_gate_actions(): a,b=k.split('_to_',1) if self.direct_gate: c=DirectChannel('channel_%s_to_%s'%(a,b),dimensions,pstc_gate=tau_gaba,pstc_input=tau_ampa) net.add(c) net.connect('buffer_'+a,c.getTermination('input')) net.connect(c.getOrigin('X'),'buffer_'+b,pstc=tau_ampa) else: c=net.make('channel_%s_to_%s'%(a,b),neurons_buffer*dimensions,dimensions,quick=quick) net.connect('buffer_'+a,c,pstc=tau_ampa) net.connect(c,'buffer_'+b,pstc=tau_ampa) c.addTermination('gate',[[-10.0]]*(neurons_buffer*dimensions),tau_gaba,False) name='gate_%s_%s'%(a,b) net.make(name,neurons_buffer,1,quick=quick,encoders=[[1]],intercept=(0.3,1)) net.connect('prod',name,transform=productions.calc_output_gates(k,vocab),pstc=tau_ampa) net.connect(bias,name) net.connect(name,c.getTermination('gate')) for k in productions.get_gate_deconv_actions(): a,c=k.split('_to_',1) a,b=a.split('_deconv_',1) if self.direct_convolution: conv=nef.convolution.make_convolution(net,'%s_deconv_%s_to_%s'%(a,b,c),'buffer_'+a,'buffer_'+b,'buffer_'+c,1,quick=True,invert_second=True,mode='direct',pstc_in=tau_ampa,pstc_out=tau_ampa,pstc_gate=tau_gaba) else: conv=nef.convolution.make_convolution(net,'%s_deconv_%s_to_%s'%(a,b,c),'buffer_'+a,'buffer_'+b,'buffer_'+c,neurons_product,quick=quick,invert_second=True,pstc_in=tau_ampa,pstc_out=tau_ampa) conv.addTermination('gate',[[[-100.0]]*neurons_product]*conv.dimension,tau_gaba,False) name='gate_%s_%s_%s'%(a,b,c) net.make(name,neurons_buffer,1,quick=quick,encoders=[[1]],intercept=(0.3,1)) net.connect('prod',name,transform=productions.calc_output_gates(k,vocab),pstc=tau_ampa) net.connect(bias,name) net.connect(name,conv.getTermination('gate'))