コード例 #1
0
def trial_averaged_tuning_curve_errorbar(sheet,
                                         folder,
                                         stimulus,
                                         parameter,
                                         start,
                                         end,
                                         xlabel="",
                                         ylabel="",
                                         color="black",
                                         percentile=False,
                                         useXlog=False,
                                         useYlog=False,
                                         ylim=[0., 100.],
                                         xlim=False,
                                         opposite=False,
                                         box=None,
                                         radius=None,
                                         addon="",
                                         data=None,
                                         data_curve=True):
    print inspect.stack()[0][3]
    print "folder: ", folder
    print "sheet: ", sheet
    data_store = PickledDataStore(load=True,
                                  parameters=ParameterSet({
                                      'root_directory':
                                      folder,
                                      'store_stimuli':
                                      False
                                  }),
                                  replace=True)
    data_store.print_content(full_recordings=False)

    neurons = []
    neurons = param_filter_query(
        data_store, sheet_name=sheet,
        st_name=stimulus).get_segments()[0].get_stored_spike_train_ids()
    print "Recorded neurons:", len(neurons)

    if radius:
        sheet_ids = data_store.get_sheet_indexes(sheet_name=sheet,
                                                 neuron_ids=neurons)
        positions = data_store.get_neuron_postions()[sheet]
        if radius:
            ids1 = select_ids_by_position(positions, sheet_ids, radius=radius)
        neurons = data_store.get_sheet_ids(sheet_name=sheet, indexes=ids1)

    NeuronAnnotationsToPerNeuronValues(data_store, ParameterSet({})).analyse()
    l4_exc_or = data_store.get_analysis_result(
        identifier='PerNeuronValue',
        value_name='LGNAfferentOrientation',
        sheet_name=sheet)[0]
    l4_exc_or_many = numpy.array(neurons)[numpy.nonzero(
        numpy.array([
            circular_dist(l4_exc_or.get_value_by_id(i), 0, numpy.pi)
            for i in neurons
        ]) < .1)[0]]
    neurons = list(l4_exc_or_many)

    print "Selected neurons:", len(neurons)  #, neurons
    if len(neurons) < 1:
        return

    TrialAveragedFiringRate(
        param_filter_query(data_store, sheet_name=sheet, st_name=stimulus),
        ParameterSet({'neurons': list(neurons)})).analyse()

    PlotTuningCurve(
        param_filter_query(data_store,
                           st_name=stimulus,
                           analysis_algorithm=['TrialAveragedFiringRate']),
        ParameterSet({
            'polar': False,
            'pool': False,
            'centered': False,
            'percent': False,
            'mean': True,
            'parameter_name': parameter,
            'neurons': list(neurons),
            'sheet_name': sheet
        }),
        fig_param={
            'dpi': 200
        },
        plot_file_name=folder + "/TrialAveragedSensitivityNew_" + stimulus +
        "_" + parameter + "_" + str(sheet) + "_" + addon + "_mean.svg"
    ).plot({
        # '*.y_lim':(0,30),
        # '*.x_lim':(-10,100),
        # '*.x_scale':'log', '*.x_scale_base':10,
        '*.fontsize': 17
    })
    return
コード例 #2
0
def LFP(sheet,
        folder,
        stimulus,
        parameter,
        tip=[.0, .0, .0],
        sigma=0.300,
        ylim=[0., -1.],
        addon="",
        color='black'):
    import matplotlib as ml
    import quantities as pq
    print inspect.stack()[0][3]
    print "folder: ", folder
    print "sheet: ", sheet

    data_store = PickledDataStore(load=True,
                                  parameters=ParameterSet({
                                      'root_directory':
                                      folder,
                                      'store_stimuli':
                                      False
                                  }),
                                  replace=True)
    data_store.print_content(full_recordings=False)

    ids = param_filter_query(
        data_store, sheet_name=sheet,
        st_name=stimulus).get_segments()[0].get_stored_esyn_ids()
    if ids == None or len(ids) < 1:
        print "No gesyn recorded.\n"
        return
    print "Recorded gesyn:", len(ids), ids

    ids = param_filter_query(
        data_store, sheet_name=sheet,
        st_name=stimulus).get_segments()[0].get_stored_vm_ids()
    if ids == None or len(ids) < 1:
        print "No Vm recorded.\n"
        return
    print "Recorded Vm:", len(ids), ids

    NeuronAnnotationsToPerNeuronValues(data_store, ParameterSet({})).analyse()
    l4_exc_or = data_store.get_analysis_result(
        identifier='PerNeuronValue',
        value_name='LGNAfferentOrientation',
        sheet_name=sheet)[0]
    l4_exc_or_many = numpy.array(ids)[numpy.nonzero(
        numpy.array([
            circular_dist(l4_exc_or.get_value_by_id(i), 0, numpy.pi)
            for i in ids
        ]) < .1)[0]]
    ids = list(l4_exc_or_many)

    print "Recorded neurons:", len(ids), ids
    # 900 neurons over 6000 micrometers, 200 micrometers interval

    sheet_indexes = data_store.get_sheet_indexes(sheet_name=sheet,
                                                 neuron_ids=ids)

    positions = data_store.get_neuron_postions()[sheet]
    print positions.shape  # all 10800

    # take the positions of the ids
    ids_positions = numpy.transpose(positions)[sheet_indexes, :]
    print ids_positions.shape
    print ids_positions

    # Pre-compute distances from the LFP tip
    distances = []
    for i in range(len(ids)):
        distances.append(
            numpy.linalg.norm(
                numpy.array(ids_positions[i][0]) - numpy.array(tip)))
    distances = numpy.array(distances)
    print "distances:", len(distances), distances

    # ##############################
    # LFP
    # tip = [[x],[y],[.0]]
    # For each recorded cell:
    # Gaussianly weight it by its distance from tip
    # produce the currents
    # Divide the whole by the norm factor (area): 4 * numpy.pi * sigma

    # 95% of the LFP signal is a result of all exc and inh cells conductances from 250um radius from the tip of the electrode (Katzner et al. 2009).
    # Mostly excitatory neurons are relevant for the LFP (because of their geometry) Bartos
    # Therefore we include all recorded cells but account for the distance-dependent contribution weighting currents /r^2
    # We assume that the electrode has been placed in the cortical coordinates <tip>
    # Given that the current V1 orientation map has a pixel for each 100 um, a reasonable way to look at a neighborhood is in a radius of 300 um

    print "LFP electrode tip location (x,y) in degrees:", tip

    # Gather vm and conductances
    segs = sorted(
        param_filter_query(data_store, st_name=stimulus,
                           sheet_name=sheet).get_segments(),
        key=lambda x: getattr(
            MozaikParametrized.idd(x.annotations['stimulus']), parameter))
    ticks = set([])
    for x in segs:
        ticks.add(
            getattr(MozaikParametrized.idd(x.annotations['stimulus']),
                    parameter))
    ticks = sorted(ticks)
    num_ticks = len(ticks)
    print ticks
    trials = len(segs) / num_ticks
    print "trials:", trials

    pop_vm = []
    pop_gsyn_e = []
    pop_gsyn_i = []
    for n, idd in enumerate(ids):
        print "idd", idd
        full_vm = [s.get_vm(idd) for s in segs]  # all segments
        full_gsyn_es = [s.get_esyn(idd) for s in segs]
        full_gsyn_is = [s.get_isyn(idd) for s in segs]
        print "len full_gsyn_e", len(
            full_gsyn_es)  # segments = stimuli * trials
        print "shape gsyn_e[0]", full_gsyn_es[0].shape  # stimulus lenght
        # mean input over trials
        mean_full_vm = numpy.zeros((num_ticks, full_vm[0].shape[0]))  # init
        mean_full_gsyn_e = numpy.zeros(
            (num_ticks, full_gsyn_es[0].shape[0]))  # init
        mean_full_gsyn_i = numpy.zeros((num_ticks, full_gsyn_es[0].shape[0]))
        # print "shape mean_full_gsyn_e/i", mean_full_gsyn_e.shape
        sampling_period = full_gsyn_es[0].sampling_period
        t_stop = float(full_gsyn_es[0].t_stop - sampling_period)  # 200.0
        t_start = float(full_gsyn_es[0].t_start)
        time_axis = numpy.arange(0, len(full_gsyn_es[0]), 1) / float(
            len(full_gsyn_es[0])) * abs(t_start - t_stop) + t_start
        # sum by size
        t = 0
        for v, e, i in zip(full_vm, full_gsyn_es, full_gsyn_is):
            s = int(t / trials)
            v = v.rescale(mozaik.tools.units.mV)
            e = e.rescale(
                mozaik.tools.units.nS)  # NEST is in nS, PyNN is in uS
            i = i.rescale(
                mozaik.tools.units.nS)  # NEST is in nS, PyNN is in uS
            mean_full_vm[s] = mean_full_vm[s] + numpy.array(v.tolist())
            mean_full_gsyn_e[s] = mean_full_gsyn_e[s] + numpy.array(e.tolist())
            mean_full_gsyn_i[s] = mean_full_gsyn_i[s] + numpy.array(i.tolist())
            t = t + 1

        # average by trials
        for st in range(num_ticks):
            mean_full_vm[st] = mean_full_vm[st] / trials
            mean_full_gsyn_e[st] = mean_full_gsyn_e[st] / trials
            mean_full_gsyn_i[st] = mean_full_gsyn_i[st] / trials

        pop_vm.append(mean_full_vm)
        pop_gsyn_e.append(mean_full_gsyn_e)
        pop_gsyn_i.append(mean_full_gsyn_i)

    pop_v = numpy.array(pop_vm)
    pop_e = numpy.array(pop_gsyn_e)
    pop_i = numpy.array(pop_gsyn_i)

    # Produce the current for each cell for this time interval, with the Ohm law:
    # I = ge(V-Ee) + gi(V+Ei)
    # where
    # Ee is the equilibrium for exc, which is 0.0
    # Ei is the equilibrium for inh, which is -80.0
    i = pop_e * (pop_v - 0.0) + pop_i * (pop_v - 80.0)
    # i = pop_e*(pop_v-0.0) + 0.3*pop_i*(pop_v-80.0)
    # i = pop_e*(pop_v-0.0) # only exc
    # the LFP is the result of cells' currents divided by the distance
    sum_i = numpy.sum(i, axis=0)
    lfp = sum_i / (4 * numpy.pi * sigma)  #
    lfp /= 1000.  # from milli to micro
    print "LFP:", lfp.shape, lfp.mean(), lfp.min(), lfp.max()
    # print lfp
    # lfp = np.convolve(lfp, np.ones((10,))/10, mode='valid') # moving avg or running mean implemented as a convolution over steps of 10, divided by 10
    # lfp = np.convolve(lfp, np.ones((10,))/10, mode='valid') # moving avg or running mean implemented as a convolution over steps of 10, divided by 10

    #plot the LFP for each stimulus
    for s in range(num_ticks):
        # for each stimulus plot the average conductance per cell over time
        matplotlib.rcParams.update({'font.size': 22})
        fig, ax = plt.subplots()

        ax.plot(range(0, len(lfp[s])), lfp[s], color=color, linewidth=3)

        # ax.set_ylim([lfp.min(), lfp.max()])
        ax.set_ylim(ylim)
        ax.set_ylabel("LFP (uV)")
        ax.set_xlabel("Time (us)")
        ax.spines['right'].set_visible(False)
        ax.spines['top'].set_visible(False)
        ax.xaxis.set_ticks_position('bottom')
        ax.xaxis.set_ticks(ticks, ticks)
        ax.yaxis.set_ticks_position('left')

        # text
        plt.tight_layout()
        plt.savefig(folder + "/TimecourseLFP_" + sheet + "_" + parameter +
                    "_" + str(ticks[s]) + "_" + addon + ".svg",
                    dpi=200,
                    transparent=True)
        fig.clf()
        plt.close()
        # garbage
        gc.collect()
コード例 #3
0
def trial_averaged_raster(sheet,
                          folder,
                          stimulus,
                          parameter,
                          opposite=False,
                          box=None,
                          radius=None,
                          addon=""):
    print inspect.stack()[0][3]
    print "folder: ", folder
    print "sheet: ", sheet
    data_store = PickledDataStore(load=True,
                                  parameters=ParameterSet({
                                      'root_directory':
                                      folder,
                                      'store_stimuli':
                                      False
                                  }),
                                  replace=True)
    data_store.print_content(full_recordings=False)

    spike_ids = param_filter_query(
        data_store,
        sheet_name=sheet).get_segments()[0].get_stored_spike_train_ids()
    if spike_ids == None:
        print "No spikes recorded.\n"
        return
    print "Recorded neurons:", len(spike_ids)

    if sheet == 'V1_Exc_L4' or sheet == 'V1_Inh_L4':
        NeuronAnnotationsToPerNeuronValues(data_store,
                                           ParameterSet({})).analyse()
        l4_exc_or = data_store.get_analysis_result(
            identifier='PerNeuronValue',
            value_name='LGNAfferentOrientation',
            sheet_name=sheet)[0]
        if opposite:
            addon = addon + "_opposite"
            l4_exc_or_many = numpy.array(spike_ids)[numpy.nonzero(
                numpy.array([
                    circular_dist(l4_exc_or.get_value_by_id(i), numpy.pi /
                                  2, numpy.pi) for i in spike_ids
                ]) < .1)[0]]
        else:
            addon = addon + "_same"
            l4_exc_or_many = numpy.array(spike_ids)[numpy.nonzero(
                numpy.array([
                    circular_dist(l4_exc_or.get_value_by_id(i), 0, numpy.pi)
                    for i in spike_ids
                ]) < .1)[0]]
        spike_ids = list(l4_exc_or_many)

    if radius or box:
        sheet_ids = data_store.get_sheet_indexes(sheet_name=sheet,
                                                 neuron_ids=spike_ids)
        positions = data_store.get_neuron_postions()[sheet]
        if box:
            ids1 = select_ids_by_position(positions, sheet_ids, box=box)
        if radius:
            ids1 = select_ids_by_position(positions, sheet_ids, radius=radius)
        spike_ids = data_store.get_sheet_ids(sheet_name=sheet, indexes=ids1)

    print "Selected neurons:", len(spike_ids)
    if len(spike_ids) < 1:
        return

    dsv = param_filter_query(data_store, sheet_name=sheet, st_name=stimulus)
    dist = box if not radius else radius

    # Raster + Histogram
    RasterPlot(dsv,
               ParameterSet({
                   'sheet_name': sheet,
                   'neurons': list(spike_ids),
                   'trial_averaged_histogram': True,
                   'spontaneous': True
               }),
               fig_param={
                   'dpi': 100,
                   'figsize': (100, 50)
               },
               plot_file_name=folder + "/HistRaster_" + parameter + "_" +
               str(sheet) + "_radius" + str(dist) + "_" + addon + ".svg").plot(
                   {'SpikeRasterPlot.group_trials': True})
コード例 #4
0
def trial_averaged_Vm(sheet,
                      folder,
                      stimulus,
                      parameter,
                      opposite=False,
                      box=None,
                      radius=None,
                      addon=""):
    print inspect.stack()[0][3]
    print "folder: ", folder
    print "sheet: ", sheet
    data_store = PickledDataStore(load=True,
                                  parameters=ParameterSet({
                                      'root_directory':
                                      folder,
                                      'store_stimuli':
                                      False
                                  }),
                                  replace=True)
    data_store.print_content(full_recordings=False)

    analog_ids = param_filter_query(
        data_store, sheet_name=sheet).get_segments()[0].get_stored_vm_ids()
    if analog_ids == None:
        print "No Vm recorded.\n"
        return
    print "Recorded neurons:", len(analog_ids)

    if sheet == 'V1_Exc_L4' or sheet == 'V1_Inh_L4':
        NeuronAnnotationsToPerNeuronValues(data_store,
                                           ParameterSet({})).analyse()
        l4_exc_or = data_store.get_analysis_result(
            identifier='PerNeuronValue',
            value_name='LGNAfferentOrientation',
            sheet_name=sheet)[0]
        if opposite:
            addon = addon + "_opposite"
            l4_exc_or_many = numpy.array(analog_ids)[numpy.nonzero(
                numpy.array([
                    circular_dist(l4_exc_or.get_value_by_id(i), numpy.pi /
                                  2, numpy.pi) for i in analog_ids
                ]) < .1)[0]]
        else:
            addon = addon + "_same"
            l4_exc_or_many = numpy.array(analog_ids)[numpy.nonzero(
                numpy.array([
                    circular_dist(l4_exc_or.get_value_by_id(i), 0, numpy.pi)
                    for i in analog_ids
                ]) < .1)[0]]
        analog_ids = list(l4_exc_or_many)

    if radius or box:
        sheet_ids = data_store.get_sheet_indexes(sheet_name=sheet,
                                                 neuron_ids=analog_ids)
        positions = data_store.get_neuron_postions()[sheet]
        if box:
            ids1 = select_ids_by_position(positions, sheet_ids, box=box)
        if radius:
            ids1 = select_ids_by_position(positions, sheet_ids, radius=radius)
        analog_ids = data_store.get_sheet_ids(sheet_name=sheet, indexes=ids1)

    print "Selected neurons:", len(analog_ids)
    if len(analog_ids) < 1:
        return

    dsv = param_filter_query(data_store, sheet_name=sheet, st_name=stimulus)

    dist = box if not radius else radius
    for n in analog_ids:
        VmPlot(
            dsv,
            ParameterSet({
                'neuron': n,
                'sheet_name': sheet,
                'spontaneous': True,
            }),
            fig_param={
                'dpi': 300,
                'figsize': (40, 5)
            },
            # plot_file_name=folder+"/Vm_"+parameter+"_"+str(sheet)+"_"+str(dist)+"_"+str(n)+"_"+addon+".png"
            plot_file_name=folder + "/Vm_" + parameter + "_" + str(sheet) +
            "_radius" + str(dist) + "_" + str(n) + "_" + addon + ".svg"
        ).plot({
            # '*.y_lim':(0,60),
            # '*.x_scale':'log', '*.x_scale_base':2,
            # '*.y_ticks':[5, 10, 25, 50, 60],
            # # '*.y_scale':'linear',
            # '*.y_scale':'log', '*.y_scale_base':2,
            # '*.fontsize':24
        })
コード例 #5
0
ファイル: report.py プロジェクト: JoelChavas/mozaik-contrib
logger = mozaik.getMozaikLogger("Mozaik")

setup_logging()
data_store = PickledDataStore(load=True,parameters=ParameterSet({'root_directory':'ST'}))
logger.info('Loaded data store')
 
NeuronAnnotationsToPerNeuronValues(data_store,ParameterSet({})).analyse()
# find neuron with preference closet to 0  

analog_indexes = param_filter_query(data_store,sheet_name="V1_Exc_L4").get_segments()[0].get_stored_isyn_ids()
analog_indexes_inh = param_filter_query(data_store,sheet_name="V1_Inh_L4").get_segments()[0].get_stored_isyn_ids()

#find neuron with preference closet to 0  
NeuronAnnotationsToPerNeuronValues(data_store,ParameterSet({})).analyse()
l4_exc_or = data_store.get_analysis_result(identifier='PerNeuronValue',value_name = 'LGNAfferentOrientation', sheet_name = 'V1_Exc_L4')
l4_exc_phase = data_store.get_analysis_result(identifier='PerNeuronValue',value_name = 'LGNAfferentPhase', sheet_name = 'V1_Exc_L4')
l4_exc = analog_indexes[numpy.argmin([circular_dist(o,numpy.pi/2,numpy.pi)  for (o,p) in zip(l4_exc_or[0].get_value_by_id(analog_indexes),l4_exc_phase[0].get_value_by_id(analog_indexes))])]
l4_inh_or = data_store.get_analysis_result(identifier='PerNeuronValue',value_name = 'LGNAfferentOrientation', sheet_name = 'V1_Inh_L4')
l4_inh_phase = data_store.get_analysis_result(identifier='PerNeuronValue',value_name = 'LGNAfferentPhase', sheet_name = 'V1_Inh_L4')
l4_inh = analog_indexes_inh[numpy.argmin([circular_dist(o,numpy.pi/2,numpy.pi)  for (o,p) in zip(l4_inh_or[0].get_value_by_id(analog_indexes_inh),l4_inh_phase[0].get_value_by_id(analog_indexes_inh))])]
l4_exc_or_many = numpy.array(l4_exc_or[0].ids)[numpy.nonzero(numpy.array([circular_dist(o,numpy.pi/2,numpy.pi)  for (o,p) in zip(l4_exc_or[0].values,l4_exc_phase[0].values)]) < 0.1)[0]]

os.mkdir('REPORT')

l4_exc_data = param_filter_query(data_store,sheet_name='V1_Exc_L4')
l4_inh_data = param_filter_query(data_store,sheet_name='V1_Inh_L4')

dsv = param_filter_query(data_store,st_name='FullfieldDriftingSinusoidalGrating',contrast=100)    
OverviewPlot(dsv,ParameterSet({'sheet_name' : 'V1_Exc_L4', 'neuron' : l4_exc, 'sheet_activity' : {}}),plot_file_name='REPORT/GratingsExc.png',fig_param={'dpi' : 100,'figsize': (14,8)}).plot({'Vm_plot.y_lim' : (-67,-56),'Conductance_plot.y_lim' : (0,35.0)})
OverviewPlot(dsv,ParameterSet({'sheet_name' : 'V1_Inh_L4', 'neuron' : l4_inh, 'sheet_activity' : {}}),plot_file_name='REPORT/GratingsInh.png',fig_param={'dpi' : 100,'figsize': (14,8)}).plot({'Vm_plot.y_lim' : (-67,-56),'Conductance_plot.y_lim' : (0,35.0)})
コード例 #6
0
#find neuron with preference closet to pref_or
l4_analog_ids = param_filter_query(
    data_store,
    sheet_name="V1_Exc_L4").get_segments()[0].get_stored_esyn_ids()
l4_analog_ids_inh = param_filter_query(
    data_store,
    sheet_name="V1_Inh_L4").get_segments()[0].get_stored_esyn_ids()
l23_analog_ids = param_filter_query(
    data_store,
    sheet_name="V1_Exc_L2/3").get_segments()[0].get_stored_esyn_ids()
l23_analog_ids_inh = param_filter_query(
    data_store,
    sheet_name="V1_Inh_L2/3").get_segments()[0].get_stored_esyn_ids()
NeuronAnnotationsToPerNeuronValues(data_store, ParameterSet({})).analyse()
l4_exc_or = data_store.get_analysis_result(identifier='PerNeuronValue',
                                           value_name='LGNAfferentOrientation',
                                           sheet_name='V1_Exc_L4')
l4_exc = l4_analog_ids[numpy.argmin([
    circular_dist(o, pref_or, numpy.pi)
    for o in l4_exc_or[0].get_value_by_id(l4_analog_ids)
])]
l4_inh_or = data_store.get_analysis_result(identifier='PerNeuronValue',
                                           value_name='LGNAfferentOrientation',
                                           sheet_name='V1_Inh_L4')
l4_inh = l4_analog_ids_inh[numpy.argmin([
    circular_dist(o, pref_or, numpy.pi)
    for o in l4_inh_or[0].get_value_by_id(l4_analog_ids_inh)
])]
l23_exc_or = data_store.get_analysis_result(
    identifier='PerNeuronValue',
    value_name='LGNAfferentOrientation',