Beispiel #1
0
    def setup_experiments(self, rng):
        """
        Creates a setup (experiment object) for each pattern to be presented to the network
        """
        # no. images in folder = no. patterns / no. rotations
        self.num_unrotated = int(
            len(self.patterns[0][0]) / self.rotations_per_image)
        # populate experiments list with same no. lists as number of images in folder
        self.experiments = [[] for i in range(self.num_unrotated)]
        self.similarities = [[] for i in range(self.num_unrotated)]
        for i in range(len(self.patterns[0][0])):  # for each pattern
            current = self.patterns[:, :, i]
            ex = lab.experiment(self.network, [rng.randint(1, 10000)],
                                inputc=current,
                                name="pattern " + str(i),
                                downsample=self.downsample,
                                verbose=True,
                                con_upstr_exc=2,
                                measures=[
                                    lab.spikey_rsync(roi=current,
                                                     name="rsync",
                                                     tau=10.0 /
                                                     self.downsample),
                                    lab.mean_spikecount(roi=current,
                                                        name="spikes")
                                ])

            # Calculate this pattern's similarity to imprinted route patterns
            # (the fraction of its cells it shares with an imprinted pattern)
            overlaps = [
                np.sum(current * self.route_patterns[:, :, j]) /
                float(np.sum(current)) for j in range(self.num_imprinted)
            ]
            nr_active = np.sum(
                current
            )  # nr of active cells in the pattern (for normalization)
            all_imprinted = np.sum(self.route_patterns[:, :,
                                                       0:self.num_imprinted],
                                   axis=2)
            all_imprinted[all_imprinted > 1] = 1
            similarity = np.sum(current * all_imprinted) / float(nr_active)

            activated_subnet = self.network.subgraph(
                [node for node in zip(*np.where(current))])
            edges = [
                edge for edge in activated_subnet.edges_iter(data=True)
                if edge[2]["strength"] > 1
            ]

            ex.network_match = len(edges) / float(np.sum(current > 0))
            ex.similarity = similarity
            ex.similar_to = zip(overlaps, [
                self.route_patterns[:, :, j].copy()
                for j in range(self.num_imprinted)
            ])

            self.experiments[int(np.floor(
                i / self.rotations_per_image))].append(ex)
            self.similarities[int(np.floor(
                i / self.rotations_per_image))].append(similarity)
Beispiel #2
0
 def setup(inputc, name):
     return lab.experiment(network,
                           lab.standard_seeds[0:16],
                           name=name,
                           inputc=inputc,
                           T=5000,
                           verbose=True,
                           downsample=downsample,
                           con_upstr_exc=con_upstr_exc,
                           measures=[
                               lab.spikey_rsync(roi=left_spot + right_spot,
                                                name="$R_{syn}$",
                                                tau=10.0 / downsample),
                               lab.spikedetect_add_jitter(roi=left_spot,
                                                          name="left",
                                                          jitter=jitter),
                               lab.spikedetect_add_jitter(roi=right_spot,
                                                          name="right",
                                                          jitter=jitter),
                           ])
Beispiel #3
0
    seeds = lab.standard_seeds[0:1]
    inputc = np.ones((M, N))
    inputc[0, :] = 0
    inputc[-1, :] = 0
    inputc[:, 0] = 0
    inputc[:, -1] = 0

    ex = lab.experiment(network,
                        seeds,
                        inputc=inputc,
                        T=300,
                        transient=1000,
                        name=par,
                        verbose=True,
                        downsample=downsample,
                        con_upstr_exc=con_upstr_exc,
                        measures=[lab.spikey_rsync(roi=inputc, name='rsync')])

    delta_t = ex.simulation_kwargs['delta_t']
    ms_per_step = delta_t * downsample

    close('all')
    ex.saveanimtr(trialnr=0,
                  start=0,
                  skip=6,
                  grid_as='graph',
                  filename="{}.mp4".format(ut.slug(ex.name)),
                  ms_per_step=ms_per_step,
                  dpi=70)

print '\a'
Beispiel #4
0
                                     init,
                                     10,
                                     scatter=scatter,
                                     method="mean_shortest")
        experiments.append(
            lab.experiment(network,
                           seeds,
                           inputc=pattern,
                           transient=1000,
                           name="scatter " + str(scatter),
                           verbose=True,
                           downsample=downsample,
                           con_upstr_exc=con_upstr_exc,
                           measures=[
                               lab.spikey_rsync(roi=pattern,
                                                name="rsync",
                                                tau=10.0 / downsample),
                               lab.spikey_rsync(roi=pattern,
                                                window="growing",
                                                increment=2000 / downsample,
                                                name="growing_rsync",
                                                tau=10.0 / downsample),
                               lab.mean_spikecount(roi=pattern,
                                                   window="growing",
                                                   increment=2000 / downsample,
                                                   name="growing_spikecount")
                           ]))

    # # Plot
    close('all')
Beispiel #5
0
    # create the network
    network = net.grid_eightNN(M,N,strength=strength)


    # set up experiments
    seeds = lab.standard_seeds[0:10]


    scattervalues = [0,5,10]
    names = ['A','B','C']
    experiments = []
    for name,scatter in zip(names,scattervalues):
        pattern = net.scramble_stimulus(network,line,sct=scatter,seed=1)
        experiments.append(lab.experiment(network,seeds,inputc=pattern,transient=1000, name=name, verbose=True, downsample=downsample,con_upstr_exc=con_upstr_exc,
                              measures=[lab.spikey_rsync(roi=pattern,name="$R_{syn}$", tau=10.0/downsample),
                                        lab.spikey_rsync(roi=pattern,window="growing",increment=2000/downsample,name="$R_{syn, t}$", tau=10.0/downsample),
                                        lab.mean_spikecount(roi=pattern,window="growing",increment=2000/downsample,name="$spikecount_{t}$")
                                        ]))

    # # Plot
    close('all')

    plo.eplotsetup(experiments[0], measurename='$R_{syn}$')

    experiments[0].viewtrial()
    savefig(par+'_example_trial.pdf', bbox_inches='tight')

    delta_t = experiments[0].simulation_kwargs['delta_t']

    plo.compare(experiments,grid_as="graph",plot_as='boxplot',measurename="$R_{syn}$", vrange=[0, 1], label_names=True)
Beispiel #6
0
# the subpopulation whose synchrony we want to measure is defined as a binary mask.
roi = np.zeros(inputc.shape)
roi[1:4, 1:4] = 1

# let's say we want to compare two networks with different synapse strengths.
M, N = inputc.shape
networkA = net.grid_eightNN(M, N, strength=10)
networkB = net.grid_eightNN(M, N, strength=1)

# set up the experiments. we want to run 5 repetitions and each time measure the synchrony in the measured region.
seeds = lab.standard_seeds[0:5]
exprA = lab.experiment(networkA,
                       seeds,
                       inputc=inputc,
                       measures=[lab.spikey_rsync(roi)],
                       T=300,
                       name="A",
                       verbose=True)
exprB = lab.experiment(networkB,
                       seeds,
                       inputc=inputc,
                       measures=[lab.spikey_rsync(roi)],
                       T=300,
                       name="B",
                       verbose=True)

# plot voltage traces of all cells from one of the repetitions:
exprA.viewtrial()  # causes a single simulation to be computed
plt.show()
Beispiel #7
0
def setup(seed,seednr,num_patterns):
    print "sampling network",seednr,"with a pool of",num_patterns,"patterns"
    # Instead of generating patterns, get patterns from 'all_views' folder ## tp275 ##
    patterns = getPatterns.getPatternsInDirectory(
                '/home/ec2-user/environment/synchrony/images/datasets/boxes_90x7_002/', M, N, rotation=True, rot_step=1)
    rng = RandomState(seed)

    # generate the network:
    # import images to generate the network, with distance-dependent connection probability,
    # with stronger links between cells that participate in the first num_imprinted patterns.
    network = net.grid_empty(M,N)
    nodes = network.nodes()
    route_patterns = getPatterns.getPatternsInDirectory(
                '/home/ec2-user/environment/synchrony/images/routes/route_boxes_90x7/', M, N)
    
    for i,u in enumerate(nodes):
        for v in nodes[i+1:]:
            # if both nodes participate in the same pattern, make a strong link,
            # with some probability depending on distance
            in_pattern=False
            for pat in range(num_imprinted):
                if route_patterns[u[0],u[1],pat] and route_patterns[v[0],v[1],pat]:
                    in_pattern = True
                    break

            p_connect_pattern    = max(1.0/(conn_b*np.sqrt((u[0]-v[0])**2 + (u[1]-v[1])**2))-conn_c,0)
            p_connect_background = max(1.0/(conn_b_bck*np.sqrt((u[0]-v[0])**2 + (u[1]-v[1])**2))-conn_c_bck,0)
            if in_pattern and rng.rand()<p_connect_pattern:
                network.add_edge(u,v,{"strength":15})
            # fewer and weaker background connections are created where there was no common input.
            elif rng.rand()<p_connect_background:
                network.add_edge(u,v,{"strength":1})

    # create a setup (experiment object) for each pattern to be presented to the network
    experiments_this_net = []
    similarities_this_net = []
    for i in range(num_patterns):
        i = np.random.randint(0, num_patterns)  # Make pattern selection random for more variation (hopefully)
        current = patterns[:,:,i]
        ex = lab.experiment(network,[rng.randint(1,10000)],inputc=current, name="seed "+str(seednr)+" pattern "+str(i), downsample=downsample, verbose=True, con_upstr_exc=2,
                                            measures=[lab.spikey_rsync(roi=current,name="rsync",tau=10.0/downsample),
                                                      lab.mean_spikecount(roi=current,name="spikes"),
                                                      ])
        # calculate this pattern's similarity to imprinted patterns
        # (the fraction of its cells it shares with an imprinted pattern)
        # Change: 'patterns' has been changed to 'route_patterns' where appropriate ## tp275 ##
        overlaps = [np.sum(current*route_patterns[:,:,j])/float(np.sum(current)) for j in range(num_imprinted)]
        nr_active = np.sum(current) # nr of active cells in the pattern (for normalization)
        all_imprinted = np.sum(route_patterns[:,:,0:num_imprinted],axis=2)
        all_imprinted[all_imprinted>1] = 1
        similarity = np.sum(current*all_imprinted)/float(nr_active)

        activated_subnet = network.subgraph([node for node in zip(*np.where(current))])
        edges = [edge for edge in activated_subnet.edges_iter(data=True) if edge[2]["strength"]>1]

        ex.network_match = len(edges)/float(np.sum(current > 0))

        # import ipdb; ipdb.set_trace()

        ex.similarity = similarity
        ex.similar_to = zip(overlaps,[route_patterns[:,:,j].copy() for j in range(num_imprinted)])
        similarities_this_net.append(similarity)
        # if i<num_imprinted:
        #     ex.name+="_imprinted"
        experiments_this_net.append(ex)

    # sort all experiments that use this network by pattern similarity
    sort = np.digitize(similarities_this_net,bins,right=True)
    experiments_binned = [[] for _ in bins]
    similarities_binned = [[] for _ in bins]
    for i,ex in enumerate(experiments_this_net):
        experiments_binned[sort[i]].append(ex)
        similarities_binned[sort[i]].append(ex.similarity)

    # check whether there are enough experiments in each pattern similarity bin
    if np.min([len(s) for s in similarities_binned]) >= patterns_per_bin:
        return np.array([column[0:patterns_per_bin] for column in experiments_binned]).flatten()
    elif num_patterns<num_patterns_initial*100:
        print "seednr "+str(seednr)+": "+str(num_patterns)+" sample patterns not enough, trying with more"
        return setup(seed,seednr,num_patterns*2)
    else:
        raise Exception("couldn't find required number of samples in each bin after "+str(num_patterns)+" patterns")
Beispiel #8
0
    savefig('activity__{}.pdf'.format(slug(titlestr)), dpi=300)

    if pars['strength'] == 0:
        figure(figsize=(14, 1.5))
        trace = volt[2,4,:]
        plot(np.linspace(0,max_seconds,volt.shape[-1]), trace, linewidth=0.5)
        xlim(0,max_seconds)
        ylabel('mV')
        tight_layout()
        savefig('singlecell_{}.pdf'.format(k), dpi=300)

    close('all')


    import hcLab
    rs_s = hcLab.spikey_rsync(tau=2, delta_t=delta_t*downsample, roi=inputc_)
    rsync = rs_s.compute(np.array(spikes, dtype='bool'))
    print rsync
    rsyncs.append(rsync)


for plottype,label in zip(["inh_random", "inh_recurrent", "inh_lateral"], ["$g^{{inh, random}}$", "$g^{{inh, recurr.}}$", "$g^{{inh, lateral}}$"]):
    figure(figsize=(3,2))
    plot_rsyncs = [(p[plottype],r) for r,p in zip(rsyncs, parameters) if plottype in p.get('lineplots',[])]
    plot(*zip(*plot_rsyncs))
    hlines(rsyncs[0], 0, 20, linestyles=':')
    ylim(0,0.3)
    xlabel(label)
    ylabel("$R_{syn}$")
    tight_layout()
    savefig("rsync_{}.pdf".format(plottype))
Beispiel #9
0
def setup(seed, seednr, num_patterns):
    print "sampling network", seednr, "with a pool of", num_patterns, "patterns"
    rng = RandomState(seed)

    # generate patterns by choosing a point on the network and activating a random choice of cells near it
    patterns = np.zeros((M, N, num_patterns))
    for pat in range(num_patterns):
        margin = 2
        center = rng.randint(margin,
                             M - margin), rng.randint(margin, N - margin)
        for i in range(M):
            for j in range(N):
                p_on = max(
                    1.0 / (pattern_b * np.sqrt((center[0] - i)**2 +
                                               (center[1] - j)**2)) -
                    pattern_c, 0) if (i, j) != center else 1
                #patterns[i,j,pat] = p_on
                if rng.rand() < p_on:
                    patterns[i, j, pat] = 1
        ## visualize patterns:
        # clf()
        # imshow(patterns[:,:,pat]);colorbar()
        # import pdb;pdb.set_trace()
    rng = RandomState(
        seed
    )  # reinitialize rng so the sampled network is not dependent on the nr of previously sampled patterns

    # generate the network:
    # random network with distance-dependent connection probability,
    # with stronger links between cells that participate in the first num_imprinted patterns.
    network = net.grid_empty(M, N)
    nodes = network.nodes()
    for i, u in enumerate(nodes):
        for v in nodes[i + 1:]:
            # if both nodes participate in the same pattern, make a strong link,
            # with some probability depending on distance
            in_pattern = False
            for pat in range(num_imprinted):
                if patterns[u[0], u[1], pat] and patterns[v[0], v[1], pat]:
                    in_pattern = True
                    break

            p_connect_pattern = max(
                1.0 / (conn_b * np.sqrt((u[0] - v[0])**2 + (u[1] - v[1])**2)) -
                conn_c, 0)
            p_connect_background = max(
                1.0 / (conn_b_bck * np.sqrt((u[0] - v[0])**2 +
                                            (u[1] - v[1])**2)) - conn_c_bck, 0)
            if in_pattern and rng.rand() < p_connect_pattern:
                network.add_edge(u, v, {"strength": 15})
            # fewer and weaker background connections are created where there was no common input.
            elif rng.rand() < p_connect_background:
                network.add_edge(u, v, {"strength": 1})

    # create a setup (experiment object) for each pattern to be presented to the network
    experiments_this_net = []
    similarities_this_net = []
    for i in range(num_patterns):
        current = patterns[:, :, i]
        ex = lab.experiment(network, [rng.randint(1, 10000)],
                            inputc=current,
                            name="seed " + str(seednr) + " pattern " + str(i),
                            downsample=downsample,
                            verbose=True,
                            con_upstr_exc=2,
                            measures=[
                                lab.spikey_rsync(roi=current,
                                                 name="rsync",
                                                 tau=10.0 / downsample),
                                lab.mean_spikecount(roi=current,
                                                    name="spikes"),
                            ])
        # calculate this pattern's similarity to imprinted patterns
        # (the fraction of its cells it shares with an imprinted pattern)
        overlaps = [
            np.sum(current * patterns[:, :, j]) / float(np.sum(current))
            for j in range(num_imprinted)
        ]
        nr_active = np.sum(
            current)  # nr of active cells in the pattern (for normalization)
        all_imprinted = np.sum(patterns[:, :, 0:num_imprinted], axis=2)
        all_imprinted[all_imprinted > 1] = 1
        similarity = np.sum(current * all_imprinted) / float(nr_active)

        activated_subnet = network.subgraph(
            [node for node in zip(*np.where(current))])
        edges = [
            edge for edge in activated_subnet.edges_iter(data=True)
            if edge[2]["strength"] > 1
        ]

        ex.network_match = len(edges) / float(np.sum(current > 0))

        # import ipdb; ipdb.set_trace()

        ex.similarity = similarity
        ex.similar_to = zip(
            overlaps, [patterns[:, :, j].copy() for j in range(num_imprinted)])
        similarities_this_net.append(similarity)
        # if i<num_imprinted:
        #     ex.name+="_imprinted"
        experiments_this_net.append(ex)

    # sort all experiments that use this network by pattern similarity
    sort = np.digitize(similarities_this_net, bins, right=True)
    experiments_binned = [[] for _ in bins]
    similarities_binned = [[] for _ in bins]
    for i, ex in enumerate(experiments_this_net):
        experiments_binned[sort[i]].append(ex)
        similarities_binned[sort[i]].append(ex.similarity)

    # check whether there are enough experiments in each pattern similarity bin
    if np.min([len(s) for s in similarities_binned]) >= patterns_per_bin:
        return np.array([
            column[0:patterns_per_bin] for column in experiments_binned
        ]).flatten()
    elif num_patterns < num_patterns_initial * 100:
        print "seednr " + str(seednr) + ": " + str(
            num_patterns) + " sample patterns not enough, trying with more"
        return setup(seed, seednr, num_patterns * 2)
    else:
        raise Exception(
            "couldn't find required number of samples in each bin after " +
            str(num_patterns) + " patterns")