def setup_network(self, rng): """ Creates the network and imprints given (route) images, with distance-dependent connection probability and stronger links between cells that participate in the route patterns """ print("Creating network from " + str(self.num_imprinted) + " route images") self.network = None # Generate the network using route images self.network = net.grid_empty(self.M, self.N) nodes = self.network.nodes() for i, u in enumerate(nodes): for v in nodes[i + 1:]: # Check if both nodes participate in the same pattern in_pattern = False for pat in range(self.num_imprinted): if self.route_patterns[u[0], u[1], pat] and self.route_patterns[v[0], v[1], pat]: in_pattern = True break # if so, make a strong link with some probability depending on distance p_connect_pattern = max( 1.0 / (self.conn_b * np.sqrt((u[0] - v[0])**2 + (u[1] - v[1])**2)) - self.conn_c, 0) p_connect_background = max( 1.0 / (self.conn_b_bck * np.sqrt((u[0] - v[0])**2 + (u[1] - v[1])**2)) - self.conn_c_bck, 0) if in_pattern and rng.rand() < p_connect_pattern: self.network.add_edge(u, v, {"strength": 15}) # fewer and weaker background connections are created where there was no common input. elif rng.rand() < p_connect_background: self.network.add_edge(u, v, {"strength": 1}) print("Done")
# define measurement spots mrg = 2 left_spot = line(mrg, mrg + 2) right_spot = line(N - 2 - mrg, N - mrg) connected_line = line(mrg, N - mrg) patterns = [connected_line] # generate the network conn_b = 1 conn_c = 0.2 conn_b_bck = 1 conn_c_bck = 0.3 network_template = net.grid_empty(M, N) nodes = network_template.nodes() for i, u in enumerate(nodes): for v in nodes[i + 1:]: # see if this pair of cells is part of the line: in_pattern = False for pat in patterns: if pat[u[0], u[1]] and pat[v[0], v[1]]: in_pattern = True break dist = np.sqrt((u[0] - v[0])**2 + (u[1] - v[1])**2) # p = max(1.0/(conn_b*dist-conn_c),0) p_patt = max(1.0 / (conn_b * dist) - conn_c, 0) p_backg = max(1.0 / (conn_b_bck * dist) - conn_c_bck, 0)
def setup(seed,seednr,num_patterns): print "sampling network",seednr,"with a pool of",num_patterns,"patterns" # Instead of generating patterns, get patterns from 'all_views' folder ## tp275 ## patterns = getPatterns.getPatternsInDirectory( '/home/ec2-user/environment/synchrony/images/datasets/boxes_90x7_002/', M, N, rotation=True, rot_step=1) rng = RandomState(seed) # generate the network: # import images to generate the network, with distance-dependent connection probability, # with stronger links between cells that participate in the first num_imprinted patterns. network = net.grid_empty(M,N) nodes = network.nodes() route_patterns = getPatterns.getPatternsInDirectory( '/home/ec2-user/environment/synchrony/images/routes/route_boxes_90x7/', M, N) for i,u in enumerate(nodes): for v in nodes[i+1:]: # if both nodes participate in the same pattern, make a strong link, # with some probability depending on distance in_pattern=False for pat in range(num_imprinted): if route_patterns[u[0],u[1],pat] and route_patterns[v[0],v[1],pat]: in_pattern = True break p_connect_pattern = max(1.0/(conn_b*np.sqrt((u[0]-v[0])**2 + (u[1]-v[1])**2))-conn_c,0) p_connect_background = max(1.0/(conn_b_bck*np.sqrt((u[0]-v[0])**2 + (u[1]-v[1])**2))-conn_c_bck,0) if in_pattern and rng.rand()<p_connect_pattern: network.add_edge(u,v,{"strength":15}) # fewer and weaker background connections are created where there was no common input. elif rng.rand()<p_connect_background: network.add_edge(u,v,{"strength":1}) # create a setup (experiment object) for each pattern to be presented to the network experiments_this_net = [] similarities_this_net = [] for i in range(num_patterns): i = np.random.randint(0, num_patterns) # Make pattern selection random for more variation (hopefully) current = patterns[:,:,i] ex = lab.experiment(network,[rng.randint(1,10000)],inputc=current, name="seed "+str(seednr)+" pattern "+str(i), downsample=downsample, verbose=True, con_upstr_exc=2, measures=[lab.spikey_rsync(roi=current,name="rsync",tau=10.0/downsample), lab.mean_spikecount(roi=current,name="spikes"), ]) # calculate this pattern's similarity to imprinted patterns # (the fraction of its cells it shares with an imprinted pattern) # Change: 'patterns' has been changed to 'route_patterns' where appropriate ## tp275 ## overlaps = [np.sum(current*route_patterns[:,:,j])/float(np.sum(current)) for j in range(num_imprinted)] nr_active = np.sum(current) # nr of active cells in the pattern (for normalization) all_imprinted = np.sum(route_patterns[:,:,0:num_imprinted],axis=2) all_imprinted[all_imprinted>1] = 1 similarity = np.sum(current*all_imprinted)/float(nr_active) activated_subnet = network.subgraph([node for node in zip(*np.where(current))]) edges = [edge for edge in activated_subnet.edges_iter(data=True) if edge[2]["strength"]>1] ex.network_match = len(edges)/float(np.sum(current > 0)) # import ipdb; ipdb.set_trace() ex.similarity = similarity ex.similar_to = zip(overlaps,[route_patterns[:,:,j].copy() for j in range(num_imprinted)]) similarities_this_net.append(similarity) # if i<num_imprinted: # ex.name+="_imprinted" experiments_this_net.append(ex) # sort all experiments that use this network by pattern similarity sort = np.digitize(similarities_this_net,bins,right=True) experiments_binned = [[] for _ in bins] similarities_binned = [[] for _ in bins] for i,ex in enumerate(experiments_this_net): experiments_binned[sort[i]].append(ex) similarities_binned[sort[i]].append(ex.similarity) # check whether there are enough experiments in each pattern similarity bin if np.min([len(s) for s in similarities_binned]) >= patterns_per_bin: return np.array([column[0:patterns_per_bin] for column in experiments_binned]).flatten() elif num_patterns<num_patterns_initial*100: print "seednr "+str(seednr)+": "+str(num_patterns)+" sample patterns not enough, trying with more" return setup(seed,seednr,num_patterns*2) else: raise Exception("couldn't find required number of samples in each bin after "+str(num_patterns)+" patterns")
def setup(seed, seednr, num_patterns): print "sampling network", seednr, "with a pool of", num_patterns, "patterns" rng = RandomState(seed) # generate patterns by choosing a point on the network and activating a random choice of cells near it patterns = np.zeros((M, N, num_patterns)) for pat in range(num_patterns): margin = 2 center = rng.randint(margin, M - margin), rng.randint(margin, N - margin) for i in range(M): for j in range(N): p_on = max( 1.0 / (pattern_b * np.sqrt((center[0] - i)**2 + (center[1] - j)**2)) - pattern_c, 0) if (i, j) != center else 1 #patterns[i,j,pat] = p_on if rng.rand() < p_on: patterns[i, j, pat] = 1 ## visualize patterns: # clf() # imshow(patterns[:,:,pat]);colorbar() # import pdb;pdb.set_trace() rng = RandomState( seed ) # reinitialize rng so the sampled network is not dependent on the nr of previously sampled patterns # generate the network: # random network with distance-dependent connection probability, # with stronger links between cells that participate in the first num_imprinted patterns. network = net.grid_empty(M, N) nodes = network.nodes() for i, u in enumerate(nodes): for v in nodes[i + 1:]: # if both nodes participate in the same pattern, make a strong link, # with some probability depending on distance in_pattern = False for pat in range(num_imprinted): if patterns[u[0], u[1], pat] and patterns[v[0], v[1], pat]: in_pattern = True break p_connect_pattern = max( 1.0 / (conn_b * np.sqrt((u[0] - v[0])**2 + (u[1] - v[1])**2)) - conn_c, 0) p_connect_background = max( 1.0 / (conn_b_bck * np.sqrt((u[0] - v[0])**2 + (u[1] - v[1])**2)) - conn_c_bck, 0) if in_pattern and rng.rand() < p_connect_pattern: network.add_edge(u, v, {"strength": 15}) # fewer and weaker background connections are created where there was no common input. elif rng.rand() < p_connect_background: network.add_edge(u, v, {"strength": 1}) # create a setup (experiment object) for each pattern to be presented to the network experiments_this_net = [] similarities_this_net = [] for i in range(num_patterns): current = patterns[:, :, i] ex = lab.experiment(network, [rng.randint(1, 10000)], inputc=current, name="seed " + str(seednr) + " pattern " + str(i), downsample=downsample, verbose=True, con_upstr_exc=2, measures=[ lab.spikey_rsync(roi=current, name="rsync", tau=10.0 / downsample), lab.mean_spikecount(roi=current, name="spikes"), ]) # calculate this pattern's similarity to imprinted patterns # (the fraction of its cells it shares with an imprinted pattern) overlaps = [ np.sum(current * patterns[:, :, j]) / float(np.sum(current)) for j in range(num_imprinted) ] nr_active = np.sum( current) # nr of active cells in the pattern (for normalization) all_imprinted = np.sum(patterns[:, :, 0:num_imprinted], axis=2) all_imprinted[all_imprinted > 1] = 1 similarity = np.sum(current * all_imprinted) / float(nr_active) activated_subnet = network.subgraph( [node for node in zip(*np.where(current))]) edges = [ edge for edge in activated_subnet.edges_iter(data=True) if edge[2]["strength"] > 1 ] ex.network_match = len(edges) / float(np.sum(current > 0)) # import ipdb; ipdb.set_trace() ex.similarity = similarity ex.similar_to = zip( overlaps, [patterns[:, :, j].copy() for j in range(num_imprinted)]) similarities_this_net.append(similarity) # if i<num_imprinted: # ex.name+="_imprinted" experiments_this_net.append(ex) # sort all experiments that use this network by pattern similarity sort = np.digitize(similarities_this_net, bins, right=True) experiments_binned = [[] for _ in bins] similarities_binned = [[] for _ in bins] for i, ex in enumerate(experiments_this_net): experiments_binned[sort[i]].append(ex) similarities_binned[sort[i]].append(ex.similarity) # check whether there are enough experiments in each pattern similarity bin if np.min([len(s) for s in similarities_binned]) >= patterns_per_bin: return np.array([ column[0:patterns_per_bin] for column in experiments_binned ]).flatten() elif num_patterns < num_patterns_initial * 100: print "seednr " + str(seednr) + ": " + str( num_patterns) + " sample patterns not enough, trying with more" return setup(seed, seednr, num_patterns * 2) else: raise Exception( "couldn't find required number of samples in each bin after " + str(num_patterns) + " patterns")