Esempio n. 1
0
 def perform_analysis(self):
       dsv1 = queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating')
       for sheet in dsv1.sheets():
           dsv = queries.param_filter_query(dsv1, sheet_name=sheet)
           segs1, stids = colapse(dsv.get_segments(),dsv.get_stimuli(),parameter_list=['trial'],allow_non_identical_objects=True)
           for segs,st in zip(segs1, stids):
               first_analog_signal = segs[0].get_esyn(segs[0].get_stored_esyn_ids()[0])
               duration = first_analog_signal.t_stop - first_analog_signal.t_start
               frequency = MozaikParametrized.idd(st).temporal_frequency * MozaikParametrized.idd(st).params()['temporal_frequency'].units
               period = 1/frequency
               period = period.rescale(first_analog_signal.t_start.units)
               cycles = duration / period
               first_har = round(cycles)
               e_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_esyn(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_esyn(idd))) for idd in segs[0].get_stored_esyn_ids()]
               i_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_isyn(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_esyn(idd))) for idd in segs[0].get_stored_isyn_ids()]
               v_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_vm(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_esyn(idd))) for idd in segs[0].get_stored_vm_ids()]
               e_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_esyn(idd) for seg in segs],axis=0).flatten()/len(segs[0].get_esyn(idd)))[first_har]) for idd in segs[0].get_stored_esyn_ids()]
               i_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_isyn(idd) for seg in segs],axis=0).flatten()/len(segs[0].get_esyn(idd)))[first_har]) for idd in segs[0].get_stored_isyn_ids()]
               v_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_vm(idd) for seg in segs],axis=0).flatten()/len(segs[0].get_esyn(idd)))[first_har]) for idd in segs[0].get_stored_vm_ids()]
               
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(e_f0,segs[0].get_stored_esyn_ids(),first_analog_signal.units,value_name = 'F0_Exc_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(i_f0,segs[0].get_stored_isyn_ids(),first_analog_signal.units,value_name = 'F0_Inh_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(v_f0,segs[0].get_stored_vm_ids(),first_analog_signal.units,value_name = 'F0_Vm',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(e_f1,segs[0].get_stored_esyn_ids(),first_analog_signal.units,value_name = 'F1_Exc_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(i_f1,segs[0].get_stored_isyn_ids(),first_analog_signal.units,value_name = 'F1_Inh_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(v_f1,segs[0].get_stored_vm_ids(),first_analog_signal.units,value_name = 'F1_Vm',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
Esempio n. 2
0
    def perform_analysis(self):
        for sheet in self.parameters.sheet_list:
            #Obtain the average firing rate for each neuron and each samples of the stimuli, separately for the spectrally matched noise and synthetic texture stimuli
            dsv_noise = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',sheet_name=sheet, st_texture = self.parameters.texture_list, value_name = "Firing rate", st_stats_type = 2)
            dsv_texture = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',sheet_name=sheet, st_texture = self.parameters.texture_list, value_name = "Firing rate", st_stats_type = 1)
            pnvs_noise = dsv_noise.get_analysis_result()
            pnvs_texture = dsv_texture.get_analysis_result()
            firing_rates_noise = numpy.array([pnv.get_value_by_id(pnvs_noise[0].ids) for pnv in pnvs_noise])
            firing_rates_texture = numpy.array([pnv.get_value_by_id(pnvs_texture[0].ids) for pnv in pnvs_texture])

            assert firing_rates_noise.shape == firing_rates_texture.shape

            count_positively_modulated = 0
            count_negatively_modulated = 0

            #For every neuron, check if it is significantly modulated through a randomization test
            for i in range (firing_rates_noise.shape[1]):
                mean_response_texture = numpy.mean(firing_rates_texture[:,i])
                mean_response_noise = numpy.mean(firing_rates_noise[:,i])
                modulation = (mean_response_texture - mean_response_noise)/(mean_response_texture + mean_response_noise)
                neuron_modulated = self.randomization_test(firing_rates_noise[:,i],firing_rates_texture[:,i], modulation)
                if modulation > 0:
                    count_positively_modulated += neuron_modulated
                elif modulation < 0:
                    count_negatively_modulated += neuron_modulated
            st = MozaikParametrized.idd(pnvs_texture[0].stimulus_id)

            setattr(st,'stats_type',None)
            setattr(st,'sample',None)
            setattr(st,'texture',None)

            self.datastore.full_datastore.add_analysis_result(SingleValue(float(count_positively_modulated)/firing_rates_noise.shape[1] * 100, qt.percent, value_name = "Percentage of neurons significantly positively modulated", sheet_name=sheet, tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))
            self.datastore.full_datastore.add_analysis_result(SingleValue(float(count_negatively_modulated)/firing_rates_noise.shape[1] * 100, qt.percent, value_name = "Percentage of neurons significantly negatively modulated", sheet_name=sheet, tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))
            self.datastore.full_datastore.add_analysis_result(SingleValue(float(firing_rates_noise.shape[1] - count_positively_modulated - count_negatively_modulated)/firing_rates_noise.shape[1] * 100,qt.percent, value_name = "Percentage of neurons not significantly modulated", sheet_name=sheet, tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))
Esempio n. 3
0
 def perform_analysis(self):
       dsv1 = queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating')
       for sheet in dsv1.sheets():
           dsv = queries.param_filter_query(dsv1, sheet_name=sheet)
           segs1, stids = colapse(dsv.get_segments(),dsv.get_stimuli(),parameter_list=['trial'],allow_non_identical_objects=True)
           for segs,st in zip(segs1, stids):
               first_analog_signal = segs[0].get_esyn(segs[0].get_stored_esyn_ids()[0])
               duration = first_analog_signal.t_stop - first_analog_signal.t_start
               frequency = MozaikParametrized.idd(st).temporal_frequency * MozaikParametrized.idd(st).params()['temporal_frequency'].units
               period = 1/frequency
               period = period.rescale(first_analog_signal.t_start.units)
               cycles = duration / period
               first_har = round(cycles)
               e_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_esyn(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_esyn(idd))) for idd in segs[0].get_stored_esyn_ids()]
               i_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_isyn(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_esyn(idd))) for idd in segs[0].get_stored_isyn_ids()]
               v_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_vm(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_esyn(idd))) for idd in segs[0].get_stored_vm_ids()]
               e_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_esyn(idd) for seg in segs],axis=0).flatten()/len(segs[0].get_esyn(idd)))[first_har]) for idd in segs[0].get_stored_esyn_ids()]
               i_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_isyn(idd) for seg in segs],axis=0).flatten()/len(segs[0].get_esyn(idd)))[first_har]) for idd in segs[0].get_stored_isyn_ids()]
               v_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_vm(idd) for seg in segs],axis=0).flatten()/len(segs[0].get_esyn(idd)))[first_har]) for idd in segs[0].get_stored_vm_ids()]
               
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(e_f0,segs[0].get_stored_esyn_ids(),first_analog_signal.units,value_name = 'F0_Exc_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(i_f0,segs[0].get_stored_isyn_ids(),first_analog_signal.units,value_name = 'F0_Inh_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(v_f0,segs[0].get_stored_vm_ids(),first_analog_signal.units,value_name = 'F0_Vm',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(e_f1,segs[0].get_stored_esyn_ids(),first_analog_signal.units,value_name = 'F1_Exc_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(i_f1,segs[0].get_stored_isyn_ids(),first_analog_signal.units,value_name = 'F1_Inh_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(v_f1,segs[0].get_stored_vm_ids(),first_analog_signal.units,value_name = 'F1_Vm',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
Esempio n. 4
0
 def subplot(self, subplotspec):
     plots = {}
     gs = gridspec.GridSpecFromSubplotSpec(20, 1, subplot_spec=subplotspec,hspace=1.0, wspace=1.0)
     
     dsv = queries.param_filter_query(self.datastore,y_axis_name=['Vm (no AP) trial-to-trial mean','Vm (no AP) trial-to-trial variance'],st_name="NaturalImageWithEyeMovement")
     plots['plot1'] = (PerNeuronAnalogSignalScatterPlot(dsv,ParameterSet({'neurons' : self.parameters.neurons})),gs[2:10,0],{})
     dsv = queries.param_filter_query(self.datastore,y_axis_name=['Vm (no AP) trial-to-trial mean','Vm (no AP) trial-to-trial variance'],st_name="FullfieldDriftingSinusoidalGrating",st_orientation=0,st_contrast=100)
     plots['plot2'] = (PerNeuronAnalogSignalScatterPlot(dsv,ParameterSet({'neurons' : self.parameters.neurons})),gs[12:,0],{})
     return plots
Esempio n. 5
0
 def subplot(self, subplotspec):
     plots = {}
     gs = gridspec.GridSpecFromSubplotSpec(20, 1, subplot_spec=subplotspec,hspace=1.0, wspace=1.0)
     
     dsv = queries.param_filter_query(self.datastore,y_axis_name=['Vm (no AP) trial-to-trial mean','Vm (no AP) trial-to-trial variance'],st_name="NaturalImageWithEyeMovement")
     plots['plot1'] = (PerNeuronAnalogSignalScatterPlot(dsv,ParameterSet({'neurons' : self.parameters.neurons})),gs[2:10,0],{})
     dsv = queries.param_filter_query(self.datastore,y_axis_name=['Vm (no AP) trial-to-trial mean','Vm (no AP) trial-to-trial variance'],st_name="FullfieldDriftingSinusoidalGrating",st_orientation=0,st_contrast=100)
     plots['plot2'] = (PerNeuronAnalogSignalScatterPlot(dsv,ParameterSet({'neurons' : self.parameters.neurons})),gs[12:,0],{})
     return plots
Esempio n. 6
0
      def perform_analysis(self):
            
            for sheet in self.datastore.sheets():
                dsv = queries.param_filter_query(self.datastore, sheet_name=sheet)
                if len(dsv.get_segments()) != 0:
                  assert queries.equal_stimulus_type(self.datastore) , "Data store has to contain only recordings to the same stimulus type"
                  st = self.datastore.get_stimuli()[0]
                  assert MozaikParametrized.idd(st).getParams().has_key('temporal_frequency'), "The stimulus has to have parameter temporal_frequency which is used as first harmonic"

                  segs1, stids = colapse(dsv.get_segments(),dsv.get_stimuli(),parameter_list=['trial'],allow_non_identical_objects=True)
                  for segs,st in zip(segs1, stids):
                      first_analog_signal = segs[0].get_esyn(segs[0].get_stored_esyn_ids()[0])
                      duration = first_analog_signal.t_stop - first_analog_signal.t_start
                      frequency = MozaikParametrized.idd(st).temporal_frequency * MozaikParametrized.idd(st).getParams()['temporal_frequency'].units
                      period = 1/frequency
                      period = period.rescale(first_analog_signal.t_start.units)
                      cycles = duration / period
                      first_har = int(round(cycles))
                      
                      e_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_esyn(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_esyn(idd))) for idd in segs[0].get_stored_esyn_ids()]
                      i_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_isyn(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_isyn(idd))) for idd in segs[0].get_stored_isyn_ids()]
                      v_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_vm(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_vm(idd))) for idd in segs[0].get_stored_vm_ids()]
                      e_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_esyn(idd) for seg in segs],axis=0).flatten())[first_har]/len(segs[0].get_esyn(idd))) for idd in segs[0].get_stored_esyn_ids()]
                      i_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_isyn(idd) for seg in segs],axis=0).flatten())[first_har]/len(segs[0].get_isyn(idd))) for idd in segs[0].get_stored_isyn_ids()]
                      v_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_vm(idd) for seg in segs],axis=0).flatten())[first_har]/len(segs[0].get_vm(idd))) for idd in segs[0].get_stored_vm_ids()]
                      
                      cond_units = segs[0].get_esyn(segs[0].get_stored_esyn_ids()[0]).units
                      vm_units = segs[0].get_vm(segs[0].get_stored_esyn_ids()[0]).units
                      
                      self.datastore.full_datastore.add_analysis_result(PerNeuronValue(e_f0,segs[0].get_stored_esyn_ids(),cond_units,value_name = 'F0_Exc_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
                      self.datastore.full_datastore.add_analysis_result(PerNeuronValue(i_f0,segs[0].get_stored_isyn_ids(),cond_units,value_name = 'F0_Inh_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
                      self.datastore.full_datastore.add_analysis_result(PerNeuronValue(v_f0,segs[0].get_stored_vm_ids(),vm_units,value_name = 'F0_Vm',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
                      self.datastore.full_datastore.add_analysis_result(PerNeuronValue(e_f1,segs[0].get_stored_esyn_ids(),cond_units,value_name = 'F1_Exc_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
                      self.datastore.full_datastore.add_analysis_result(PerNeuronValue(i_f1,segs[0].get_stored_isyn_ids(),cond_units,value_name = 'F1_Inh_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
                      self.datastore.full_datastore.add_analysis_result(PerNeuronValue(v_f1,segs[0].get_stored_vm_ids(),vm_units,value_name = 'F1_Vm',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        

                # AnalogSignalList part 
                dsv = queries.param_filter_query(dsv, sheet_name=sheet,name='AnalogSignalList')
                for asl in dsv.get_analysis_result():
                    assert MozaikParametrized.idd(asl.stimulus_id).getParams().has_key('temporal_frequency'), "The stimulus has to have parameter temporal_frequency which is used as first harmonic"

                    signals = asl.asl
                    first_analog_signal = signals[0]
                    duration = first_analog_signal.t_stop - first_analog_signal.t_start
                    frequency = MozaikParametrized.idd(asl.stimulus_id).temporal_frequency * MozaikParametrized.idd(asl.stimulus_id).getParams()['temporal_frequency'].units
                    period = 1/frequency
                    period = period.rescale(first_analog_signal.t_start.units)
                    cycles = duration / period
                    first_har = int(round(cycles))

                    f0 = [abs(numpy.fft.fft(signal)[0]) for signal in signals]
                    f1 = [2*abs(numpy.fft.fft(signal)[first_har]) for signal in signals]
                    
                    self.datastore.full_datastore.add_analysis_result(PerNeuronValue(f0,asl.ids,asl.y_axis_units,value_name = 'F0('+ asl.y_axis_name + ')',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=asl.stimulus_id))                            
                    self.datastore.full_datastore.add_analysis_result(PerNeuronValue(f1,asl.ids,asl.y_axis_units,value_name = 'F1('+ asl.y_axis_name + ')',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=asl.stimulus_id))                                                
Esempio n. 7
0
    def perform_analysis(self):
        dsv = queries.param_filter_query(self.datastore, identifier='PerNeuronValue')
        if len(dsv.get_analysis_result()) == 0: return
        assert queries.ads_with_equal_stimulus_type(dsv)
        assert queries.equal_ads(dsv,except_params=['stimulus_id', 'sheet_name'])

        textures = list(set([MozaikParametrized.idd(ads.stimulus_id).texture for ads in dsv.get_analysis_result()]))
        samples = list(set([MozaikParametrized.idd(ads.stimulus_id).sample for ads in dsv.get_analysis_result()]))
        trials = list(set([MozaikParametrized.idd(ads.stimulus_id).trial for ads in dsv.get_analysis_result()]))

        for sheet in self.parameters.sheet_list:
            mean_rates = [] #This is a 4D array where we will store the firing rates of each neurons for each trial of each sample of each texture family
            for texture in textures:
                mean_rates_texture = []
                dsv_tmp = queries.param_filter_query(dsv,identifier='PerNeuronValue',sheet_name=sheet,st_texture=texture,st_stats_type=1)
                for sample in samples:
                    mean_rates_sample = []
                    for trial in trials:
                        pnv = queries.param_filter_query(dsv_tmp,identifier='PerNeuronValue',st_sample=sample,st_trial=trial).get_analysis_result()[0]
                        mean_rates_sample.append(pnv.values)
                    mean_rates_texture.append(mean_rates_sample)
                mean_rates.append(mean_rates_texture)

            global_averaged_rates = numpy.mean(mean_rates, axis = (0,1,2)) #Calculating the global averaged firing rates for each neurons accross each texture family, samples and trials
            textures_averaged_rates = numpy.mean(mean_rates, axis = (1,2)) #Calculating the firing rates of each neurons for each texture family by averaging accross samples and trials
            samples_averaged_rates = numpy.mean(mean_rates, axis = 2) #Calculating the firing rates of each neurons for each sample by averaging accross trials

            SStextures = len(trials) * len(samples) * numpy.sum((textures_averaged_rates - global_averaged_rates)**2, axis=0) #Compute the Anova sum of squares accross texture families
            SSsamples = len(trials) * numpy.sum((numpy.transpose(samples_averaged_rates,(1,0,2)) - textures_averaged_rates)**2, axis=(0,1))  #Compute the Anova sum of squares accross samples
            SStrials = numpy.sum((numpy.transpose(mean_rates,(2,0,1,3)) - samples_averaged_rates)**2, axis=(0,1,2))  #Compute the Anova sum of squares accross trials (residuals)
            SStotal = numpy.sum((mean_rates - global_averaged_rates)**2, axis=(0,1,2)) #Compute tha Anova total sum of squares

            #We compute the mean squares of the nested Anova
            MStextures = SStextures/(len(textures)-1)
            MSsamples = SSsamples/(len(textures) * (len(samples) - 1))
            MStrials = SStrials/(len(textures) * len(samples) * (len(trials) - 1))

            #We compute the R-squared for each factor and for the residuals
            RsquaredTextures = SStextures/SStotal
            RsquaredSamples = SSsamples/SStotal
            RsquaredTrials = SStrials/SStotal
            
            #The variance ratio is the F statistic of the nested Anova
            varianceRatio = MStextures/MSsamples

            st = MozaikParametrized.idd(pnv.stimulus_id)
            setattr(st,'stats_type',None)
            setattr(st,'trial',None)
            setattr(st,'sample',None)
            setattr(st,'texture',None)

            self.datastore.full_datastore.add_analysis_result(PerNeuronValue(varianceRatio,pnv.ids,None,value_name = "Texture variance ratio",sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))
            self.datastore.full_datastore.add_analysis_result(PerNeuronValue(RsquaredTextures * 100,pnv.ids,value_units=qt.percent,value_name = "Texture r-squared",sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))
            self.datastore.full_datastore.add_analysis_result(PerNeuronValue(RsquaredSamples * 100,pnv.ids,value_units=qt.percent,value_name = "Sample r-squared",sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))
            self.datastore.full_datastore.add_analysis_result(PerNeuronValue(RsquaredTrials * 100,pnv.ids,value_units=qt.percent,value_name = "Trial r-squared",sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))
Esempio n. 8
0
      def perform_analysis(self):
            
            for sheet in self.datastore.sheets():
                dsv = queries.param_filter_query(self.datastore, sheet_name=sheet)
                if len(dsv.get_segments()) != 0:
                  assert queries.equal_stimulus_type(self.datastore) , "Data store has to contain only recordings to the same stimulus type"
                  st = self.datastore.get_stimuli()[0]
                  assert MozaikParametrized.idd(st).getParams().has_key('temporal_frequency'), "The stimulus has to have parameter temporal_frequency which is used as first harmonic"

                  segs1, stids = colapse(dsv.get_segments(),dsv.get_stimuli(),parameter_list=['trial'],allow_non_identical_objects=True)
                  for segs,st in zip(segs1, stids):
                      first_analog_signal = segs[0].get_esyn(segs[0].get_stored_esyn_ids()[0])
                      duration = first_analog_signal.t_stop - first_analog_signal.t_start
                      frequency = MozaikParametrized.idd(st).temporal_frequency * MozaikParametrized.idd(st).getParams()['temporal_frequency'].units
                      period = 1/frequency
                      period = period.rescale(first_analog_signal.t_start.units)
                      cycles = duration / period
                      first_har = int(round(cycles))
                      
                      e_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_esyn(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_esyn(idd))) for idd in segs[0].get_stored_esyn_ids()]
                      i_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_isyn(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_isyn(idd))) for idd in segs[0].get_stored_isyn_ids()]
                      v_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_vm(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_vm(idd))) for idd in segs[0].get_stored_vm_ids()]
                      e_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_esyn(idd) for seg in segs],axis=0).flatten())[first_har]/len(segs[0].get_esyn(idd))) for idd in segs[0].get_stored_esyn_ids()]
                      i_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_isyn(idd) for seg in segs],axis=0).flatten())[first_har]/len(segs[0].get_isyn(idd))) for idd in segs[0].get_stored_isyn_ids()]
                      v_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_vm(idd) for seg in segs],axis=0).flatten())[first_har]/len(segs[0].get_vm(idd))) for idd in segs[0].get_stored_vm_ids()]
                      
                      cond_units = segs[0].get_esyn(segs[0].get_stored_esyn_ids()[0]).units
                      vm_units = segs[0].get_vm(segs[0].get_stored_esyn_ids()[0]).units
                      
                      self.datastore.full_datastore.add_analysis_result(PerNeuronValue(e_f0,segs[0].get_stored_esyn_ids(),cond_units,value_name = 'F0_Exc_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
                      self.datastore.full_datastore.add_analysis_result(PerNeuronValue(i_f0,segs[0].get_stored_isyn_ids(),cond_units,value_name = 'F0_Inh_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
                      self.datastore.full_datastore.add_analysis_result(PerNeuronValue(v_f0,segs[0].get_stored_vm_ids(),vm_units,value_name = 'F0_Vm',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
                      self.datastore.full_datastore.add_analysis_result(PerNeuronValue(e_f1,segs[0].get_stored_esyn_ids(),cond_units,value_name = 'F1_Exc_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
                      self.datastore.full_datastore.add_analysis_result(PerNeuronValue(i_f1,segs[0].get_stored_isyn_ids(),cond_units,value_name = 'F1_Inh_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
                      self.datastore.full_datastore.add_analysis_result(PerNeuronValue(v_f1,segs[0].get_stored_vm_ids(),vm_units,value_name = 'F1_Vm',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        

                # AnalogSignalList part 
                dsv = queries.param_filter_query(dsv, sheet_name=sheet,name='AnalogSignalList')
                for asl in dsv.get_analysis_result():
                    assert MozaikParametrized.idd(asl.stimulus_id).getParams().has_key('temporal_frequency'), "The stimulus has to have parameter temporal_frequency which is used as first harmonic"

                    signals = asl.asl
                    first_analog_signal = signals[0]
                    duration = first_analog_signal.t_stop - first_analog_signal.t_start
                    frequency = MozaikParametrized.idd(asl.stimulus_id).temporal_frequency * MozaikParametrized.idd(asl.stimulus_id).getParams()['temporal_frequency'].units
                    period = 1/frequency
                    period = period.rescale(first_analog_signal.t_start.units)
                    cycles = duration / period
                    first_har = int(round(cycles))

                    f0 = [abs(numpy.fft.fft(signal)[0])/len(signal) for signal in signals]
                    f1 = [2*abs(numpy.fft.fft(signal)[first_har])/len(signal) for signal in signals]
                    
                    self.datastore.full_datastore.add_analysis_result(PerNeuronValue(f0,asl.ids,asl.y_axis_units,value_name = 'F0('+ asl.y_axis_name + ')',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=asl.stimulus_id))                            
                    self.datastore.full_datastore.add_analysis_result(PerNeuronValue(f1,asl.ids,asl.y_axis_units,value_name = 'F1('+ asl.y_axis_name + ')',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=asl.stimulus_id))                                                
Esempio n. 9
0
    def perform_analysis(self):

        for sheet in self.datastore.sheets():
            dsv_psth = queries.param_filter_query(
                self.datastore,
                analysis_algorithm="PopulationMean",
                y_axis_name='Mean(psth (bin=5.0))',
                sheet_name=sheet)
            dsv_cv = queries.param_filter_query(
                self.datastore,
                analysis_algorithm="PopulationMean",
                value_name='Mean(CV of ISI squared)',
                sheet_name=sheet,
                identifier="SingleValue")
            dsv_corr = queries.param_filter_query(
                self.datastore,
                analysis_algorithm="PopulationMean",
                value_name='Mean(Correlation coefficient(psth (bin=5.0)))',
                sheet_name=sheet,
                identifier="SingleValue")

            assert len(
                dsv_cv.get_analysis_result()
            ) == 1, "Error: SpontaneousActivityLength accepts only datastore that holds one  SingleValue analysis data structure with value_name: Mean(CV of ISI squared). It contains: %d" % len(
                dsv_cv.get_analysis_result())
            assert len(
                dsv_corr.get_analysis_result()
            ) == 1, "Error: SpontaneousActivityLength accepts only datastore that holds one  SingleValue analysis data structure with value_name: 'Mean(Correlation coefficient(psth (bin=5.0))).It contains: %d" % len(
                dsv_cv.get_analysis_result())
            assert len(
                dsv_psth.get_analysis_result()
            ) == 1, "Error: SpontaneousActivityLength accepts only datastore that holds one  AnalogSignal analysis data structure with value_name: 'Mean(psth (bin=5.0)).It contains: %d" % len(
                dsv_cv.get_analysis_result())

            if dsv_cv.get_analysis_result(
            )[0].value >= 0.95 and dsv_corr.get_analysis_result(
            )[0].value <= 0.05:
                i = numpy.nonzero(
                    dsv_psth.get_analysis_result()[0].analog_signal)[0][-1]
                logger.warning(i)
                l = dsv_psth.get_analysis_result()[0].analog_signal.times[i]
            else:
                l = 0
            logger.warning(dsv_psth.get_analysis_result()[0].analog_signal)
            logger.warning(dsv_cv.get_analysis_result()[0].value)
            logger.warning(dsv_corr.get_analysis_result()[0].value)
            logger.warning(l)

            self.datastore.full_datastore.add_analysis_result(
                SingleValue(l,
                            qt.ms,
                            value_name='Spontaneous activity length',
                            sheet_name=sheet,
                            tags=self.tags,
                            analysis_algorithm=self.__class__.__name__))
Esempio n. 10
0
    def  __init__(self, datastore, **params):
        Parameterized.__init__(self, **params)
        self.datastore = datastore

        ### lets first find the values of the two parameters in the datastore
        self.x_axis_values = list(parameter_value_list(datastore.get_analysis_result(),self.x_axis_parameter))
        self.y_axis_values = list(parameter_value_list(param_filter_query(datastore,**{self.x_axis_parameter:self.x_axis_values[0]}).get_analysis_result(),self.y_axis_parameter))
        
        ### and verify it forms a grid
        for v in self.x_axis_values:
            assert set(self.y_axis_values) == parameter_value_list(param_filter_query(datastore,**{self.x_axis_parameter:v}).get_analysis_result(),self.y_axis_parameter)
Esempio n. 11
0
    def  __init__(self, datastore, **params):
        Parameterized.__init__(self, **params)
        self.datastore = datastore

        ### lets first find the values of the two parameters in the datastore
        self.x_axis_values = list(parameter_value_list(datastore.get_analysis_result(),self.x_axis_parameter))
        self.y_axis_values = list(parameter_value_list(param_filter_query(datastore,**{self.x_axis_parameter:self.x_axis_values[0]}).get_analysis_result(),self.y_axis_parameter))
        
        ### and verify it forms a grid
        for v in self.x_axis_values:
            assert set(self.y_axis_values) == parameter_value_list(param_filter_query(datastore,**{self.x_axis_parameter:v}).get_analysis_result(),self.y_axis_parameter)
Esempio n. 12
0
 def perform_analysis(self):
           dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',sheet_name=self.parameters.sheet_name,st_name='DriftingSinusoidalGratingCenterSurroundStimulus')
           
           if len(dsv.get_analysis_result()) == 0: return
           assert queries.ads_with_equal_stimulus_type(dsv)
           assert queries.equal_ads(dsv,except_params=['stimulus_id'])
           self.pnvs = dsv.get_analysis_result()
           
           # get stimuli
           self.st = [MozaikParametrized.idd(s.stimulus_id) for s in self.pnvs]
           
           
           # transform the pnvs into a dictionary of tuning curves according along the 'surround_orientation' parameter
           # also make sure they are ordered according to the first pnv's idds 
           
           self.tc_dict = colapse_to_dictionary([z.get_value_by_id(self.parameters.neurons) for z in self.pnvs],self.st,"surround_orientation")
           for k in self.tc_dict.keys():
                   sis = []
                   surround_tuning=[]
                   
                   # we will do the calculation neuron by neuron
                   for i in xrange(0,len(self.parameters.neurons)):
                       
                       ors = self.tc_dict[k][0]
                       values = numpy.array([a[i] for a in self.tc_dict[k][1]])
                       d={}
                       for o,v in zip(ors,values):
                           d[o] = v
                       sis.append(d[0] / d[numpy.pi/2])
                       
                       
                   self.datastore.full_datastore.add_analysis_result(PerNeuronValue(sis,self.parameters.neurons,None,value_name = 'Suppression index of ' + self.pnvs[0].value_name ,sheet_name=self.parameters.sheet_name,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(k)))
Esempio n. 13
0
 def __init__(self, datastore, parameters, plot_file_name=None,fig_param=None):
     Plotting.__init__(self, datastore, parameters, plot_file_name, fig_param)
     
     self.st = []
     self.tc_dict = []
     self.pnvs = []
     self.max_mean_response_indexes = []
     assert queries.ads_with_equal_stimulus_type(datastore)
     assert len(self.parameters.neurons) > 0 , "ERROR, empty list of neurons specified"
     dsvs = queries.partition_analysis_results_by_parameters_query(self.datastore,parameter_list=['value_name'],excpt=True)
     for dsv in dsvs:
         dsv = queries.param_filter_query(dsv,identifier='PerNeuronValue',sheet_name=self.parameters.sheet_name)
         assert matching_parametrized_object_params(dsv.get_analysis_result(), params=['value_name'])
         self.pnvs.append(dsv.get_analysis_result())
         # get stimuli
         st = [MozaikParametrized.idd(s.stimulus_id) for s in self.pnvs[-1]]
         self.st.append(st)
         # transform the pnvs into a dictionary of tuning curves along the parameter_name
         # also make sure the values are ordered according to ids in the first pnv
         dic = colapse_to_dictionary([z.get_value_by_id(self.parameters.neurons) for z in self.pnvs[-1]],st,self.parameters.parameter_name)
         #sort the entries in dict according to the parameter parameter_name values 
         for k in  dic:
             (b, a) = dic[k]
             par, val = zip(
                          *sorted(
                             zip(b,
                                 numpy.array(a))))
             dic[k] = (par,numpy.array(val))
         self.tc_dict.append(dic)
         if self.parameters.centered:
            self.max_mean_response_indexes.append(numpy.argmax(sum([a[1] for a in dic.values()]),axis=0))
Esempio n. 14
0
 def perform_analysis(self):
     sigma = self.parameters.sigma
     for sheet in self.datastore.sheets():
         positions = self.datastore.get_neuron_postions()[sheet]
         for pnv in queries.param_filter_query(self.datastore,sheet_name=sheet,identifier='PerNeuronValue').get_analysis_result():
             lhis = []
             for x in pnv.ids:
                 idx = self.datastore.get_sheet_indexes(sheet,x)
                 sx = positions[0][idx]
                 sy = positions[1][idx]
                 lhi_current=[0,0]
                 for y in pnv.ids:
                     idx = self.datastore.get_sheet_indexes(sheet,y)
                     tx = positions[0][idx]
                     ty = positions[1][idx]
                     lhi_current[0]+=numpy.exp(-((sx-tx)*(sx-tx)+(sy-ty)*(sy-ty))/(2*sigma*sigma))*numpy.cos(2*pnv.get_value_by_id(y))
                     lhi_current[1]+=numpy.exp(-((sx-tx)*(sx-tx)+(sy-ty)*(sy-ty))/(2*sigma*sigma))*numpy.sin(2*pnv.get_value_by_id(y))
                 lhis.append(numpy.sqrt(lhi_current[0]*lhi_current[0] + lhi_current[1]*lhi_current[1])/(2*numpy.pi*sigma*sigma))
             
             self.datastore.full_datastore.add_analysis_result(
                 PerNeuronValue(lhis,
                                pnv.ids,
                                qt.dimensionless,
                                value_name='LocalHomogeneityIndex' + '(' + str(self.parameters.sigma) + ':' + pnv.value_name + ')',
                                sheet_name=sheet,
                                tags=self.tags,
                                period=None,
                                analysis_algorithm=self.__class__.__name__,
                                stimulus_id=str(pnv.stimulus_id)))
Esempio n. 15
0
 def perform_analysis(self):
     sigma = self.parameters.sigma
     for sheet in self.datastore.sheets():
         positions = self.datastore.get_neuron_postions()[sheet]
         for pnv in queries.param_filter_query(self.datastore,sheet_name=sheet,identifier='PerNeuronValue').get_analysis_result():
             lhis = []
             for x in pnv.ids:
                 idx = self.datastore.get_sheet_indexes(sheet,x)
                 sx = positions[0][idx]
                 sy = positions[1][idx]
                 lhi_current=[0,0]
                 for y in pnv.ids:
                     idx = self.datastore.get_sheet_indexes(sheet,y)
                     tx = positions[0][idx]
                     ty = positions[1][idx]
                     lhi_current[0]+=numpy.exp(-((sx-tx)*(sx-tx)+(sy-ty)*(sy-ty))/(2*sigma*sigma))*numpy.cos(2*pnv.get_value_by_id(y))
                     lhi_current[1]+=numpy.exp(-((sx-tx)*(sx-tx)+(sy-ty)*(sy-ty))/(2*sigma*sigma))*numpy.sin(2*pnv.get_value_by_id(y))
                 lhis.append(numpy.sqrt(lhi_current[0]*lhi_current[0] + lhi_current[1]*lhi_current[1])/(2*numpy.pi*sigma*sigma))
             
             self.datastore.full_datastore.add_analysis_result(
                 PerNeuronValue(lhis,
                                pnv.ids,
                                qt.dimensionless,
                                value_name='LocalHomogeneityIndex' + '(' + str(self.parameters.sigma) + ':' + pnv.value_name + ')',
                                sheet_name=sheet,
                                tags=self.tags,
                                period=None,
                                analysis_algorithm=self.__class__.__name__))
Esempio n. 16
0
 def perform_analysis(self):
           dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',sheet_name=self.parameters.sheet_name,st_name='DriftingSinusoidalGratingCenterSurroundStimulus')
           
           if len(dsv.get_analysis_result()) == 0: return
           assert queries.ads_with_equal_stimulus_type(dsv)
           assert queries.equal_ads(dsv,except_params=['stimulus_id'])
           self.pnvs = dsv.get_analysis_result()
           
           # get stimuli
           self.st = [MozaikParametrized.idd(s.stimulus_id) for s in self.pnvs]
           
           
           # transform the pnvs into a dictionary of tuning curves according along the 'surround_orientation' parameter
           # also make sure they are ordered according to the first pnv's idds 
           
           self.tc_dict = colapse_to_dictionary([z.get_value_by_id(self.parameters.neurons) for z in self.pnvs],self.st,"surround_orientation")
           for k in self.tc_dict.keys():
                   sis = []
                   surround_tuning=[]
                   
                   # we will do the calculation neuron by neuron
                   for i in xrange(0,len(self.parameters.neurons)):
                       
                       ors = self.tc_dict[k][0]
                       values = numpy.array([a[i] for a in self.tc_dict[k][1]])
                       d=OrderedDict()
                       for o,v in zip(ors,values):
                           d[o] = v
                       sis.append(d[0] / d[numpy.pi/2])
                       
                       
                   self.datastore.full_datastore.add_analysis_result(PerNeuronValue(sis,self.parameters.neurons,None,value_name = 'Suppression index of ' + self.pnvs[0].value_name ,sheet_name=self.parameters.sheet_name,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(k)))
Esempio n. 17
0
    def subplot(self, subplotspec):
        plots = {}
        gs = gridspec.GridSpecFromSubplotSpec(20, 29, subplot_spec=subplotspec,
                                              hspace=1.0, wspace=1.0)
        
        
        #analog_ids = sorted(numpy.random.permutation(queries.param_filter_query(self.datastore,sheet_name=self.parameters.exc_sheet_name).get_segments()[0].get_stored_esyn_ids()))
        #analog_ids_inh = sorted(numpy.random.permutation(queries.param_filter_query(self.datastore,sheet_name=self.parameters.inh_sheet_name).get_segments()[0].get_stored_isyn_ids()))

        analog_ids = sorted(numpy.random.permutation(queries.param_filter_query(self.datastore,sheet_name=self.parameters.exc_sheet_name).get_segments()[0].get_stored_spike_train_ids()))
        analog_ids_inh = sorted(numpy.random.permutation(queries.param_filter_query(self.datastore,sheet_name=self.parameters.inh_sheet_name).get_segments()[0].get_stored_spike_train_ids()))
                
        #pnv = queries.param_filter_query(self.datastore,value_name=['orientation max of Firing rate'],sheet_name=self.parameters.exc_sheet_name,st_contrast=100).get_analysis_result()[0]
        #analog_ids = numpy.array(pnv.ids)[pnv.values>5.0]
        #pnv = queries.param_filter_query(self.datastore,value_name=['orientation max of Firing rate'],sheet_name=self.parameters.inh_sheet_name,st_contrast=100).get_analysis_result()[0]
        #analog_ids_inh = numpy.array(pnv.ids)[pnv.values>5.0]

        
        
        dsv1 = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',value_name=['LGNAfferentOrientation'],sheet_name=self.parameters.exc_sheet_name)
        dsv2 = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',value_name=['orientation preference of Firing rate'],sheet_name=self.parameters.exc_sheet_name,st_contrast=100,analysis_algorithm='GaussianTuningCurveFit')
        plots['Or_corr_exc'] = (PerNeuronValueScatterPlot(dsv1+dsv2, ParameterSet({'only_matching_units' : True , 'ignore_nan' : True})),gs[0:4,3:5],{'x_label' : 'OR measured','y_label' : 'OR set','x_lim': (0.0,numpy.pi),'y_lim' : (0.0,numpy.pi), 'cmp' : None})
        dsv1 = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',value_name=['LGNAfferentOrientation'],sheet_name=self.parameters.inh_sheet_name)
        dsv2 = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',value_name=['orientation preference of Firing rate'],sheet_name=self.parameters.inh_sheet_name,st_contrast=100,analysis_algorithm='GaussianTuningCurveFit')
        plots['Or_corr_ing'] = (PerNeuronValueScatterPlot(dsv1+dsv2, ParameterSet({'only_matching_units' : True , 'ignore_nan' : True})),gs[0:4,5:7],{'x_label' : 'OR measured','y_label' : None,'x_lim': (0.0,numpy.pi),'y_lim' : (0.0,numpy.pi), 'cmp' : None})
        
        
                
        dsv = queries.param_filter_query(self.datastore,value_name=['orientation HWHH of Firing rate'],sheet_name=[self.parameters.exc_sheet_name,self.parameters.inh_sheet_name])    
        plots['HWHH'] = (PerNeuronValueScatterPlot(dsv, ParameterSet({'only_matching_units' : True, 'ignore_nan' : True})),gs[0:4,8:12],{'x_lim': (0,50),'y_lim' : (0,50),'identity_line' : True, 'x_label' : 'HWHH Cont. 100%','y_label' : 'HWHH Cont. 50%', 'cmp' : None})

        dsv = queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating',analysis_algorithm=['TrialAveragedFiringRate'])
        plots['ExcORTCMean'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(analog_ids), 'sheet_name' : self.parameters.exc_sheet_name,'centered'  : True,'mean' : True,'pool' : False,'polar' : False})),gs[6:10,:3],{'title' : None,'x_label' : None , 'y_label' : 'EXC\nfiring rate (sp/s)','colors' : ['#FFAB00','#000000']})
        plots['ExcORTC1'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(analog_ids[0:7]), 'sheet_name' : self.parameters.exc_sheet_name,'centered'  : True,'mean' : False,'pool' : False,'polar' : False})),gs[6:8,3:],{'title' : None,'x_label' : None,'x_axis' : False, 'x_ticks' : False,'colors' : ['#FFAB00','#000000']})
        plots['ExcORTC2'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(analog_ids[7:14]), 'sheet_name' : self.parameters.exc_sheet_name,'centered'  : True,'mean' : False,'pool' : False,'polar': False})),gs[8:10,3:],{'title' : None,'x_label' : None,'x_axis' : False,'colors' : ['#FFAB00','#000000']})

        plots['InhORTCMean'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(analog_ids_inh), 'sheet_name' : self.parameters.inh_sheet_name,'centered'  : True,'mean' : True,'pool' : False,'polar' : False})),gs[11:15,:3],{'title' : None, 'y_label' : 'INH\nfiring rate (sp/s)','colors' : ['#FFAB00','#000000']})
        plots['InhORTC1'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(analog_ids_inh[0:3]), 'sheet_name' : self.parameters.inh_sheet_name,'centered'  : True,'mean' : False,'pool' : False,'polar' : False})),gs[11:13,3:],{'title' : None,'x_label' : None,'y_axis' : False,'x_axis' : False,'colors' : ['#FFAB00','#000000']})
        plots['InhORTC2'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(analog_ids_inh[3:6]), 'sheet_name' : self.parameters.inh_sheet_name,'centered'  : True,'mean' : False,'pool' : False,'polar' : False})),gs[13:15,3:],{'title' : None,'y_axis' : None,'colors' : ['#FFAB00','#000000']})

        
        dsv = queries.param_filter_query(self.datastore,value_name=['orientation HWHH of Firing rate'],sheet_name=[self.parameters.exc_sheet_name])    
        plots['HWHHHistogramExc'] = (PerNeuronValuePlot(dsv, ParameterSet({'cortical_view' : False})),gs[17:,1:7],{'title' : 'Excitatory' , 'x_lim' : (0.0,50.0), 'x_label' : 'HWHH'})
        dsv = queries.param_filter_query(self.datastore,value_name=['orientation HWHH of Firing rate'],sheet_name=[self.parameters.inh_sheet_name])    
        plots['HWHHHistogramInh'] = (PerNeuronValuePlot(dsv, ParameterSet({'cortical_view' : False})),gs[17:,8:14],{'title' : 'Inhibitory' , 'x_lim' : (0.0,50.0), 'x_label' : 'HWHH'})

        return plots
Esempio n. 18
0
    def subplot(self, subplotspec):
        plots = {}
        gs = gridspec.GridSpecFromSubplotSpec(1, 1, subplot_spec=subplotspec,hspace=1.0, wspace=1.0)

        dsv = queries.param_filter_query(self.datastore,value_name='Fano Factor (spike count (bin=13.0))')   
        
        assert len(queries.param_filter_query(dsv,analysis_algorithm='TrialToTrialFanoFactorOfAnalogSignal',st_name="NaturalImageWithEyeMovement").get_analysis_result()) == 1
        assert len(queries.param_filter_query(dsv,analysis_algorithm='TrialToTrialFanoFactorOfAnalogSignal',st_name="FullfieldDriftingSinusoidalGrating",st_orientation=0).get_analysis_result()) == 1
        
        a= queries.param_filter_query(dsv,analysis_algorithm='TrialToTrialFanoFactorOfAnalogSignal',st_name="NaturalImageWithEyeMovement").get_analysis_result()[0].values
        b= queries.param_filter_query(dsv,analysis_algorithm='TrialToTrialFanoFactorOfAnalogSignal',st_name="FullfieldDriftingSinusoidalGrating",st_orientation=0).get_analysis_result()[0].values
        
        a = a[~numpy.isnan(a)]
        b = b[~numpy.isnan(b)]
        
        plots['Bar'] = (BarComparisonPlot({"NI" : numpy.mean(a), "GR" : numpy.mean(b)}),gs[:,:],{})
        return plots
Esempio n. 19
0
    def subplot(self, subplotspec):
        plots = {}
        gs = gridspec.GridSpecFromSubplotSpec(1, 1, subplot_spec=subplotspec,hspace=1.0, wspace=1.0)

        dsv = queries.param_filter_query(self.datastore,value_name='Fano Factor (spike count (bin=13.0))')   
        
        assert len(queries.param_filter_query(dsv,analysis_algorithm='TrialToTrialFanoFactorOfAnalogSignal',st_name="NaturalImageWithEyeMovement").get_analysis_result()) == 1
        assert len(queries.param_filter_query(dsv,analysis_algorithm='TrialToTrialFanoFactorOfAnalogSignal',st_name="FullfieldDriftingSinusoidalGrating",st_orientation=0).get_analysis_result()) == 1
        
        a= queries.param_filter_query(dsv,analysis_algorithm='TrialToTrialFanoFactorOfAnalogSignal',st_name="NaturalImageWithEyeMovement").get_analysis_result()[0].values
        b= queries.param_filter_query(dsv,analysis_algorithm='TrialToTrialFanoFactorOfAnalogSignal',st_name="FullfieldDriftingSinusoidalGrating",st_orientation=0).get_analysis_result()[0].values
        
        a = a[~numpy.isnan(a)]
        b = b[~numpy.isnan(b)]
        
        plots['Bar'] = (BarComparisonPlot({"NI" : numpy.mean(a), "GR" : numpy.mean(b)}),gs[:,:],{})
        return plots
Esempio n. 20
0
    def check_segments_gratings_merge(self, ref_dss, merged_ds):
        """
        Check if all the segments from the Drifting Sinusoidal Gratings experiment of the reference datastores are present in the merged datastore

        Parameters
        ----------

        ref_dss : An iterable object containing the reference datastores that have be used for the merge
        merged_ds : The DataStore object that resulted from the merge
        """

        merged_seg_count = len(merged_ds.get_segments())
        merged_null_seg_count = len(merged_ds.get_segments(null=True))
        ref_seg_count = 0
        ref_null_seg_count = 0
        for ds in ref_dss:
            ref_seg_count += len(ds.get_segments())
            ref_null_seg_count += len(ds.get_segments(null=True))
            segs = queries.param_filter_query(
                ds, st_name="DriftingSinusoidalGrating").get_segments()

            for seg in segs:
                stimulus = eval(seg.annotations["stimulus"])
                s = queries.param_filter_query(
                    merged_ds,
                    st_name="DriftingSinusoidalGrating",
                    sheet_name=seg.annotations["sheet_name"],
                    st_orientation=stimulus["orientation"],
                    st_contrast=stimulus["contrast"],
                    st_trial=stimulus["trial"],
                ).get_segments()[0]

                np.testing.assert_equal(
                    self.get_spikes(seg),
                    self.get_spikes(s),
                )
                seg.release()
                s.release()

        assert (
            merged_seg_count == ref_seg_count
        ), "The number of segments in the merged datastore must be equal to the sum of the number of segments in each reference datastores"
        assert (
            merged_null_seg_count == ref_null_seg_count
        ), "The number of null segments in the merged datastore must be equal to the sum of the number of null segments in each reference datastores"
Esempio n. 21
0
    def perform_analysis(self):

        dsv = queries.param_filter_query(self.datastore,identifier='SingleValue')

        f = open(Global.root_directory+self.parameters.file_name,'w')

        for a in dsv.get_analysis_result():
            f.write("%s %s %s %s %s\n" % (a.sheet_name, a.value_name, str(a.value), a.analysis_algorithm, a.stimulus_id))
        f.close()
Esempio n. 22
0
 def perform_analysis(self):
           dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',sheet_name=self.parameters.sheet_name,st_name='DriftingSinusoidalGratingDisk')
           
           if len(dsv.get_analysis_result()) == 0: return
           assert queries.ads_with_equal_stimulus_type(dsv)
           assert queries.equal_ads(dsv,except_params=['stimulus_id'])
           self.pnvs = dsv.get_analysis_result()
           
           # get stimuli
           self.st = [MozaikParametrized.idd(s.stimulus_id) for s in self.pnvs]
           
           
           # transform the pnvs into a dictionary of tuning curves according along the 'radius' parameter
           # also make sure they are ordered according to the first pnv's idds 
           
           self.tc_dict = colapse_to_dictionary([z.get_value_by_id(self.parameters.neurons) for z in self.pnvs],self.st,"radius")
           for k in self.tc_dict.keys():
                   crf_sizes = []
                   supp_sizes= []
                   sis = []
                   max_responses=[]
                   
                   # we will do the calculation neuron by neuron
                   for i in xrange(0,len(self.parameters.neurons)):
                       
                       rads = self.tc_dict[k][0]
                       values = numpy.array([a[i] for a in self.tc_dict[k][1]])
                       
                       # sort them based on radiuses
                       rads , values = zip(*sorted(zip(rads,values)))
                                                   
                       max_response = numpy.max(values)
                       crf_index  = numpy.argmax(values)
                       crf_size = rads[crf_index]
                       
                       if crf_index < len(values)-1:
                           supp_index = crf_index+numpy.argmax(values[crf_index+1:])+1
                       else:
                           supp_index = len(values)-1
                                                   
                       supp_size = rads[supp_index]                                
                       
                       if values[crf_index] != 0:
                           si = (values[crf_index]-values[supp_index])/values[crf_index]
                       else:
                           si = 0
                       
                       crf_sizes.append(crf_size)
                       supp_sizes.append(supp_size)
                       sis.append(si)
                       max_responses.append(max_response)
                       
                       
                   self.datastore.full_datastore.add_analysis_result(PerNeuronValue(max_responses,self.parameters.neurons,self.st[0].params()["radius"].units,value_name = 'Max. response of ' + self.pnvs[0].value_name ,sheet_name=self.parameters.sheet_name,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(k)))
                   self.datastore.full_datastore.add_analysis_result(PerNeuronValue(crf_sizes,self.parameters.neurons,self.st[0].params()["radius"].units,value_name = 'Max. facilitation radius of ' + self.pnvs[0].value_name ,sheet_name=self.parameters.sheet_name,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(k)))
                   self.datastore.full_datastore.add_analysis_result(PerNeuronValue(supp_sizes,self.parameters.neurons,self.st[0].params()["radius"].units,value_name = 'Max. suppressive radius of ' + self.pnvs[0].value_name ,sheet_name=self.parameters.sheet_name,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(k)))
                   self.datastore.full_datastore.add_analysis_result(PerNeuronValue(sis,self.parameters.neurons,None,value_name = 'Suppression index of ' + self.pnvs[0].value_name ,sheet_name=self.parameters.sheet_name,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(k)))
Esempio n. 23
0
 def subplot(self, subplotspec):
     plots = {}
     gs = gridspec.GridSpecFromSubplotSpec(4, 18, subplot_spec=subplotspec,
                                           hspace=1.0, wspace=1.0)
     
     orr = list(set([MozaikParametrized.idd(s).orientation for s in queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating',st_contrast=100).get_stimuli()]))                
     #ors = self.datastore.get_analysis_result(identifier='PerNeuronValue',value_name = 'LGNAfferentOrientation', sheet_name = self.parameters.sheet_name)
     
     #dsv = queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating',st_orientation=orr[numpy.argmin([circular_dist(o,ors[0].get_value_by_id(self.parameters.neuron),numpy.pi)  for o in orr])],st_contrast=100)             
     dsv = queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating',st_orientation=0,st_contrast=100)             
     plots['Gratings'] = (OverviewPlot(dsv, ParameterSet({'sheet_name': self.parameters.sheet_name,'neuron': self.parameters.neuron,'spontaneous' : True, 'sheet_activity' : {}})),gs[0:2,:],{'x_label': None})
     #dsv = queries.param_filter_query(self.datastore,st_name='DriftingGratingWithEyeMovement')            
     #plots['GratingsWithEM'] = (OverviewPlot(dsv, ParameterSet({'sheet_name': self.parameters.sheet_name,'neuron': self.parameters.neuron, 'spontaneous' : True,'sheet_activity' : {}})),gs[2:4,:],{'x_label': None})
     dsv = queries.param_filter_query(self.datastore,st_name='NaturalImageWithEyeMovement')            
     plots['NIwEM'] = (OverviewPlot(dsv, ParameterSet({'sheet_name': self.parameters.sheet_name,'neuron': self.parameters.neuron,'spontaneous' : True, 'sheet_activity' : {}})),gs[2:4,:],{})
     
     
     return plots
Esempio n. 24
0
    def subplot(self, subplotspec):
        plots = {}
        gs = gridspec.GridSpecFromSubplotSpec(12, 18, subplot_spec=subplotspec,
                                              hspace=1.0, wspace=1.0)
        
        assert queries.equal_stimulus(self.datastore,['trial'])        
        
        lgn_on_dsv = queries.param_filter_query(self.datastore, sheet_name='X_ON')
        lgn_off_dsv = queries.param_filter_query(self.datastore, sheet_name='X_OFF')
        lgn_spikes_a = [[s.spiketrains[0:2] for s in lgn_on_dsv.get_segments()],
                      [s.spiketrains[0:2] for s in lgn_off_dsv.get_segments()]]
        lgn_spikes_b = [[s.spiketrains[2:4] for s in lgn_on_dsv.get_segments()],
                      [s.spiketrains[2:4] for s in lgn_off_dsv.get_segments()]]
        
        plots['LGN_SRP1'] = (SpikeRasterPlot(lgn_spikes_a),gs[1:4, 0:5],{'x_axis' : False, 'x_label': None,'colors':['#FACC2E', '#0080FF']})
        plots['LGN_SHP1'] = (SpikeHistogramPlot(lgn_spikes_a),gs[4:5, 0:5],{'x_axis' : False, 'x_label': None,'colors':['#FACC2E', '#0080FF']})
        plots['LGN_SRP2'] = (SpikeRasterPlot(lgn_spikes_b),gs[7:10, 0:5],{'x_axis' : False, 'x_label': None,'colors':['#FACC2E', '#0080FF']})
        plots['LGN_SHP2'] = (SpikeHistogramPlot(lgn_spikes_b),gs[10:11, 0:5],{'colors':['#FACC2E', '#0080FF']})
                     
        dsv1 = queries.param_filter_query(self.datastore,sheet_name=self.parameters.sheet_name)
        #print self.parameters.neuron
        #sp = [[[s.get_spiketrain(self.parameters.neuron)] for s in dsv1.get_segments()]]
        
        plots['V1_SRP1'] = (RasterPlot(dsv1,ParameterSet({'sheet_name' : self.parameters.sheet_name, 'spontaneous' : True,'neurons' : [self.parameters.neuron],'trial_averaged_histogram': False})),gs[:3, 6:14],{'x_axis' : False, 'x_label': None})
        plots['V1_SHP1'] = (RasterPlot(dsv1,ParameterSet({'sheet_name' :self.parameters.sheet_name, 'spontaneous' : True,'neurons' : [self.parameters.neuron],'trial_averaged_histogram': False})),gs[:3, 6:14],{'x_axis' : False, 'x_label': None})

        p = {}
        p['title']=None
        p['x_axis']=None
        p['x_label']=None
        plots['Vm_Plot'] = (VmPlot(dsv1, ParameterSet({'sheet_name': self.parameters.sheet_name,'neuron': self.parameters.neuron, 'spontaneous' : True})),gs[4:8, 6:14],p)                                  
        p = {}
        p['title']=None
        plots['Gsyn_Plot'] = (GSynPlot(dsv1, ParameterSet({'sheet_name': self.parameters.sheet_name,'neuron': self.parameters.neuron,'spontaneous' : True})),gs[8:12, 6:14],p)                                  
        plots['GSTA_Plot'] = (ConductanceSignalListPlot(queries.TagBasedQuery(ParameterSet({'tags': ['GSTA']})).query(dsv1),ParameterSet({'normalize_individually': True, 'neurons' : [self.parameters.neuron]})),gs[7:10, 15:],{})                                  
        
        #p = {}
        #p['mean'] = False
        #AnalogSignalListPlot(dsv, ParameterSet({'sheet_name': self.parameters.sheet_name,
        #                                        'ylabel': 'AC (norm)'})).subplot(gs[2:5, 15:], p)

        return plots
Esempio n. 25
0
    def subplot(self, subplotspec):
        plots = {}
        gs = gridspec.GridSpecFromSubplotSpec(12, 18, subplot_spec=subplotspec,
                                              hspace=1.0, wspace=1.0)
        
        assert queries.equal_stimulus(self.datastore,['trial'])        
        
        lgn_on_dsv = queries.param_filter_query(self.datastore, sheet_name='X_ON')
        lgn_off_dsv = queries.param_filter_query(self.datastore, sheet_name='X_OFF')
        lgn_spikes_a = [[s.spiketrains[0:2] for s in lgn_on_dsv.get_segments()],
                      [s.spiketrains[0:2] for s in lgn_off_dsv.get_segments()]]
        lgn_spikes_b = [[s.spiketrains[2:4] for s in lgn_on_dsv.get_segments()],
                      [s.spiketrains[2:4] for s in lgn_off_dsv.get_segments()]]
        
        plots['LGN_SRP1'] = (SpikeRasterPlot(lgn_spikes_a),gs[1:4, 0:5],{'x_axis' : False, 'x_label': None,'colors':['#FACC2E', '#0080FF']})
        plots['LGN_SHP1'] = (SpikeHistogramPlot(lgn_spikes_a),gs[4:5, 0:5],{'x_axis' : False, 'x_label': None,'colors':['#FACC2E', '#0080FF']})
        plots['LGN_SRP2'] = (SpikeRasterPlot(lgn_spikes_b),gs[7:10, 0:5],{'x_axis' : False, 'x_label': None,'colors':['#FACC2E', '#0080FF']})
        plots['LGN_SHP2'] = (SpikeHistogramPlot(lgn_spikes_b),gs[10:11, 0:5],{'colors':['#FACC2E', '#0080FF']})
                     
        dsv1 = queries.param_filter_query(self.datastore,sheet_name=self.parameters.sheet_name)
        #print self.parameters.neuron
        #sp = [[[s.get_spiketrain(self.parameters.neuron)] for s in dsv1.get_segments()]]
        
        plots['V1_SRP1'] = (RasterPlot(dsv1,ParameterSet({'sheet_name' : self.parameters.sheet_name, 'spontaneous' : True,'neurons' : [self.parameters.neuron],'trial_averaged_histogram': False})),gs[:3, 6:14],{'x_axis' : False, 'x_label': None})
        plots['V1_SHP1'] = (RasterPlot(dsv1,ParameterSet({'sheet_name' :self.parameters.sheet_name, 'spontaneous' : True,'neurons' : [self.parameters.neuron],'trial_averaged_histogram': False})),gs[:3, 6:14],{'x_axis' : False, 'x_label': None})

        p = {}
        p['title']=None
        p['x_axis']=None
        p['x_label']=None
        plots['Vm_Plot'] = (VmPlot(dsv1, ParameterSet({'sheet_name': self.parameters.sheet_name,'neuron': self.parameters.neuron, 'spontaneous' : True})),gs[4:8, 6:14],p)                                  
        p = {}
        p['title']=None
        plots['Gsyn_Plot'] = (GSynPlot(dsv1, ParameterSet({'sheet_name': self.parameters.sheet_name,'neuron': self.parameters.neuron,'spontaneous' : True})),gs[8:12, 6:14],p)                                  
        plots['GSTA_Plot'] = (ConductanceSignalListPlot(queries.TagBasedQuery(ParameterSet({'tags': ['GSTA']})).query(dsv1),ParameterSet({'normalize_individually': True, 'neurons' : [self.parameters.neuron]})),gs[7:10, 15:],{})                                  
        
        #p = {}
        #p['mean'] = False
        #AnalogSignalListPlot(dsv, ParameterSet({'sheet_name': self.parameters.sheet_name,
        #                                        'ylabel': 'AC (norm)'})).subplot(gs[2:5, 15:], p)

        return plots
Esempio n. 26
0
      def perform_analysis(self):

          for sheet in self.datastore.sheets():
              dsv_psth = queries.param_filter_query(self.datastore,analysis_algorithm="PopulationMean",y_axis_name='Mean(psth (bin=5.0))',sheet_name=sheet)
              dsv_cv = queries.param_filter_query(self.datastore,analysis_algorithm="PopulationMean",value_name='Mean(CV of ISI squared)',sheet_name=sheet,identifier="SingleValue")
              dsv_corr = queries.param_filter_query(self.datastore,analysis_algorithm="PopulationMean",value_name='Mean(Correlation coefficient(psth (bin=5.0)))',sheet_name=sheet,identifier="SingleValue")
            
              assert len(dsv_cv.get_analysis_result()) == 1, "Error: SpontaneousActivityLength accepts only datastore that holds one  SingleValue analysis data structure with value_name: Mean(CV of ISI squared). It contains: %d" % len(dsv_cv.get_analysis_result())
              assert len(dsv_corr.get_analysis_result()) ==1, "Error: SpontaneousActivityLength accepts only datastore that holds one  SingleValue analysis data structure with value_name: 'Mean(Correlation coefficient(psth (bin=5.0))).It contains: %d" % len(dsv_cv.get_analysis_result())
              assert len(dsv_psth.get_analysis_result()) ==1, "Error: SpontaneousActivityLength accepts only datastore that holds one  AnalogSignal analysis data structure with value_name: 'Mean(psth (bin=5.0)).It contains: %d" % len(dsv_cv.get_analysis_result())
            
              if dsv_cv.get_analysis_result()[0].value >= 0.95 and dsv_corr.get_analysis_result()[0].value <= 0.05:
                    i = numpy.nonzero(dsv_psth.get_analysis_result()[0].analog_signal)[0][-1]
                    logger.warning(i)
                    l = dsv_psth.get_analysis_result()[0].analog_signal.times[i]
              else:      
                    l = 0
              logger.warning(dsv_psth.get_analysis_result()[0].analog_signal)
              logger.warning(dsv_cv.get_analysis_result()[0].value)
              logger.warning(dsv_corr.get_analysis_result()[0].value)
              logger.warning(l)
                
              self.datastore.full_datastore.add_analysis_result(SingleValue(value=l,value_name = 'Spontaneous activity length',sheet_name=sheet,tags=self.tags,analysis_algorithm=self.__class__.__name__))        
Esempio n. 27
0
    def make_grid_plot(self, subplotspec):
        """
        Call to execute the grid plot.

        Parameters
        ----------
        subplotspec : subplotspec
                    Is the subplotspec into which the whole lineplot is to be plotted.
        """
        subplotspec = gridspec.GridSpecFromSubplotSpec(
            100, 100,
            subplot_spec=subplotspec)[int(100 * self.extra_space_top):100,
                                      0:int(100 *
                                            (1 - self.extra_space_right))]

        gs = gridspec.GridSpecFromSubplotSpec(len(self.y_axis_values),
                                              len(self.x_axis_values),
                                              subplot_spec=subplotspec)

        params = OrderedDict()
        d = OrderedDict()
        for i in range(0, len(self.x_axis_values)):
            for j in range(0, len(self.y_axis_values)):
                if i > 0 and self.shared_axis:
                    params["y_label"] = None
                    if self.shared_lim:
                        params["y_axis"] = False
                else:
                    params["y_label"] = self.y_axis_values[j]

                if j < len(self.y_axis_values) - 1 and self.shared_axis:
                    params["x_label"] = None
                    if self.shared_lim:
                        params["x_axis"] = False
                else:
                    params["x_label"] = self.x_axis_values[i]

                dsv = param_filter_query(
                    self.datastore, **{
                        self.x_axis_parameter: self.x_axis_values[i],
                        self.y_axis_parameter: self.y_axis_values[j]
                    })
                li = self._single_plot(dsv, gs[j, i])
                for (name, plot, gss, par) in li:
                    par.update(params)
                    d[name + '.' + 'plot[' + str(i) + ',' + str(j) +
                      ']'] = (plot, gss, par)
        return d
Esempio n. 28
0
 def subplot(self, subplotspec):
     plots = {}
     gs = gridspec.GridSpecFromSubplotSpec(1,2, subplot_spec=subplotspec,hspace=1.0, wspace=1.0)
     
     var_gr = 0
     var_ni = 0
     std_gr = 0
     std_ni = 0
             
     orr = list(set([MozaikParametrized.idd(s).orientation for s in queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating',st_contrast=100).get_stimuli()]))        
     l4_exc_or = self.datastore.get_analysis_result(identifier='PerNeuronValue',value_name = 'LGNAfferentOrientation', sheet_name = self.parameters.sheet_name)
     
     
     # lets calculate spont. activity trial to trial variability
     # we assume that the spontaneous activity had already the spikes removed
     dsv = queries.param_filter_query(self.datastore,st_name='InternalStimulus',st_direct_stimulation_name='None',sheet_name=self.parameters.sheet_name,analysis_algorithm='ActionPotentialRemoval',ads_unique=True)
     ids = dsv.get_analysis_result()[0].ids
     sp = {}
     for idd in ids:
         assert len(dsv.get_analysis_result()) == 1
         s = dsv.get_analysis_result()[0].get_asl_by_id(idd).magnitude
         sp[idd] = 1/numpy.mean(numpy.std([s[i*int(len(s)/10):(i+1)*int(len(s)/10)] for i in xrange(0,10)],axis=0,ddof=1))
         #sp[idd]  = 1/numpy.std(s,ddof=1)
     print sp[ids[1]]
         
     #lets calculate the mean of trial-to-trial variances across the neurons in the datastore for gratings 
     dsv = queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating',sheet_name=self.parameters.sheet_name,st_contrast=100,analysis_algorithm='TrialVariability',y_axis_name='Vm (no AP) trial-to-trial variance')
     assert queries.equal_ads(dsv, except_params=['stimulus_id'])
     ids = dsv.get_analysis_result()[0].ids
     
     var_gr_ind = []
     logger.info("AA")
     logger.info(str([sp[i]  for i in ids]))
     for i in ids:
         # find the or pereference of the neuron
         o = orr[numpy.argmin([circular_dist(o,l4_exc_or[0].get_value_by_id(i),numpy.pi) for o in orr])]
         assert len(queries.param_filter_query(dsv,st_orientation=o,ads_unique=True).get_analysis_result())==1
         a = 1/numpy.mean(numpy.sqrt(queries.param_filter_query(dsv,st_orientation=o,ads_unique=True).get_analysis_result()[0].get_asl_by_id(i).magnitude))
         var_gr = var_gr + a / sp[i]
         var_gr_ind.append(a / sp[i])
         std_gr = std_gr + a
     var_gr = var_gr / len(ids)
     std_gr = std_gr / len(ids)
     
     logger.info(str(var_gr_ind))
     #lets calculate the mean of trial-to-trial variances across the neurons in the datastore for natural images 
     dsv = queries.param_filter_query(self.datastore,st_name='NaturalImageWithEyeMovement',sheet_name=self.parameters.sheet_name,y_axis_name='Vm (no AP) trial-to-trial variance',ads_unique=True)
     var_ni_ind = [1/numpy.mean(numpy.sqrt(dsv.get_analysis_result()[0].get_asl_by_id(i).magnitude)) / sp[i] for i in ids]
     var_ni = numpy.mean(var_ni_ind)
     
     plots['Bar'] = (BarComparisonPlot({"NI" : var_ni*100.0, "GR" : var_gr*100.0}),gs[0,0],{})
     plots['Scatter'] = (ScatterPlot(var_gr_ind*100, var_ni_ind*100),gs[0,1],{'x_label' : 'GR', 'y_label' : 'NI','identity_line' : True})
     
     return plots
Esempio n. 29
0
    def __init__(self, datastore, parameters, plot_file_name=None,
                 fig_param=None):
        Plotting.__init__(self, datastore, parameters, plot_file_name, fig_param)
        self.poss = []
        self.pnvs = []
        self.sheets = []
        for sheet in datastore.sheets():
            dsv = queries.param_filter_query(self.datastore,sheet_name=sheet)
            z = dsv.get_analysis_result(identifier='PerNeuronValue')
            if len(z) != 0:
                if len(z) > 1:
                    logger.error('Warning currently only one PerNeuronValue per sheet will be plotted!!!')
                self.poss.append(datastore.get_neuron_postions()[sheet])
                self.pnvs.append(z)
                self.sheets.append(sheet)

        self.length=len(self.poss)
Esempio n. 30
0
    def perform_analysis(self):

        dsv = queries.param_filter_query(self.datastore, identifier='PerNeuronValue')
        textures = list(set([MozaikParametrized.idd(ads.stimulus_id).texture for ads in dsv.get_analysis_result()]))
        samples = list(set([MozaikParametrized.idd(ads.stimulus_id).sample for ads in dsv.get_analysis_result()]))

        for sheet in self.parameters.sheet_list:
            for texture in textures:
                #First we calculate the modulation for each sample of each original image
                for sample in samples:
                    pnv_noise = queries.param_filter_query(dsv,sheet_name=sheet,st_sample=sample,st_texture=texture,st_stats_type=2).get_analysis_result()[0]
                    pnv_texture = queries.param_filter_query(dsv,sheet_name=sheet,st_sample=sample,st_texture=texture,st_stats_type=1).get_analysis_result()[0]
                    modulation=[]
                    for texture_firing_rate,noise_firing_rate in zip(pnv_texture.get_value_by_id(pnv_texture.ids),pnv_noise.get_value_by_id(pnv_noise.ids)):
                            modulation.append(numpy.nan_to_num((texture_firing_rate - noise_firing_rate)/(texture_firing_rate + noise_firing_rate)))
                    st = MozaikParametrized.idd(pnv_texture.stimulus_id)
                    setattr(st,'stats_type',None)
                    self.datastore.full_datastore.add_analysis_result(PerNeuronValue(modulation,pnv_texture.ids,None,value_name = "Sample Modulation of " + pnv_texture.value_name, sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))
               

                #Then we calculate the modulation for each texture family by averaging the firing rates accross samples
                pnvs_noise = queries.param_filter_query(dsv,sheet_name=sheet,st_texture=texture,st_stats_type=2).get_analysis_result()
                pnvs_texture = queries.param_filter_query(dsv,sheet_name=sheet,st_texture=texture,st_stats_type=1).get_analysis_result()
                mean_rates_noise = [pnv.get_value_by_id(pnvs_noise[0].ids) for pnv in pnvs_noise]
                mean_rates_texture = [pnv.get_value_by_id(pnvs_noise[0].ids) for pnv in pnvs_texture]
                _mean_rates_noise = numpy.mean(mean_rates_noise,axis=0)
                _mean_rates_texture = numpy.mean(mean_rates_texture,axis=0)
                modulation = numpy.nan_to_num((_mean_rates_texture - _mean_rates_noise)/(_mean_rates_texture + _mean_rates_noise))
                st = MozaikParametrized.idd(pnvs_texture[0].stimulus_id)

                setattr(st,'stats_type',None)
                setattr(st,'sample',None)
                self.datastore.full_datastore.add_analysis_result(PerNeuronValue(modulation,pnv_texture.ids,None,value_name = "Texture Modulation of " + pnv_texture.value_name ,sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))

            #Finally  we calculate the global modulation by averaging the firing rates accross texture families 
            pnvs_noise = queries.param_filter_query(dsv,identifier='PerNeuronValue',sheet_name=sheet,st_stats_type=2).get_analysis_result()
            pnvs_texture = queries.param_filter_query(dsv,identifier='PerNeuronValue',sheet_name=sheet,st_stats_type=1).get_analysis_result()
            mean_rates_noise = [pnv.get_value_by_id(pnvs_noise[0].ids) for pnv in pnvs_noise]
            mean_rates_texture = [pnv.get_value_by_id(pnvs_noise[0].ids) for pnv in pnvs_texture]
            _mean_rates_noise = numpy.mean(mean_rates_noise,axis=0)
            _mean_rates_texture = numpy.mean(mean_rates_texture,axis=0)
            modulation = numpy.nan_to_num((_mean_rates_texture - _mean_rates_noise)/(_mean_rates_texture + _mean_rates_noise))
            st = MozaikParametrized.idd(pnvs_texture[0].stimulus_id)

            setattr(st,'stats_type',None)
            setattr(st,'sample',None)
            setattr(st,'texture',None)
            self.datastore.full_datastore.add_analysis_result(PerNeuronValue(modulation,pnv_texture.ids,None,value_name = "Global Modulation of " + pnv_texture.value_name ,sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))
Esempio n. 31
0
    def perform_analysis(self):
        logger.info('Starting NeuronAnnotationsToPerNeuronValues Analysis')
        anns = self.datastore.get_neuron_annotations()

        for sheet in self.datastore.sheets():
            dsv = queries.param_filter_query(self.datastore, sheet_name=sheet)
            keys = set([])

            for n in range(0, len(anns[sheet])):
                keys = keys.union(anns[sheet][n].keys())

            for k in keys:
                # first check if the key is defined for all neurons
                key_ok = True

                for n in range(0, len(anns[sheet])):
                    if not k in anns[sheet][n]:
                        key_ok = False
                        break

                if key_ok:
                    values = []
                    for n in range(0, len(anns[sheet])):
                        values.append(anns[sheet][n][k])

                    period = None
                    if k == 'LGNAfferentOrientation':
                        period = numpy.pi
                    if k == 'LGNAfferentPhase':
                        period = 2 * numpy.pi

                    self.datastore.full_datastore.add_analysis_result(
                        PerNeuronValue(
                            values,
                            dsv.get_sheet_ids(sheet),
                            qt.dimensionless,
                            period=period,
                            value_name=k,
                            sheet_name=sheet,
                            tags=self.tags,
                            analysis_algorithm=self.__class__.__name__))
Esempio n. 32
0
    def perform_analysis(self):
        logger.info('Starting NeuronAnnotationsToPerNeuronValues Analysis')
        anns = self.datastore.get_neuron_annotations()

        for sheet in self.datastore.sheets():
            dsv = queries.param_filter_query(self.datastore,sheet_name=sheet)
            keys = Set([])

            for n in xrange(0, len(anns[sheet])):
                keys = keys.union(anns[sheet][n].keys())

            for k in keys:
                # first check if the key is defined for all neurons
                key_ok = True

                for n in xrange(0, len(anns[sheet])):
                    if not k in anns[sheet][n]:
                        key_ok = False
                        break

                if key_ok:
                    values = []
                    for n in xrange(0, len(anns[sheet])):
                        values.append(anns[sheet][n][k])

                    period = None
                    if k == 'LGNAfferentOrientation':
                        period = numpy.pi
                    if k == 'LGNAfferentPhase':
                        period = 2*numpy.pi

                    self.datastore.full_datastore.add_analysis_result(
                        PerNeuronValue(values,
                                       dsv.get_sheet_ids(sheet),
                                       qt.dimensionless,
                                       period=period,
                                       value_name=k,
                                       sheet_name=sheet,
                                       tags=self.tags,
                                       analysis_algorithm=self.__class__.__name__))
Esempio n. 33
0
    def make_grid_plot(self, subplotspec):
        """
        Call to execute the grid plot.

        Parameters
        ----------
        subplotspec : subplotspec
                    Is the subplotspec into which the whole lineplot is to be plotted.
        """
        subplotspec = gridspec.GridSpecFromSubplotSpec(
                                100, 100, subplot_spec=subplotspec
                            )[int(100*self.extra_space_top):100, 0:int(100*(1-self.extra_space_right))]
       
        gs = gridspec.GridSpecFromSubplotSpec(len(self.y_axis_values),len(self.x_axis_values),subplot_spec=subplotspec)
        
        params = {}
        d = {}
        for i in xrange(0,len(self.x_axis_values)):
            for j in xrange(0,len(self.y_axis_values)):
                if i > 0 and self.shared_axis:
                    params["y_label"]=None
                    if self.shared_lim:
                        params["y_axis"] = False
                else:
                    params["y_label"]=self.y_axis_values[j]
                    
                if j < len(self.y_axis_values)-1 and self.shared_axis:
                    params["x_label"]=None
                    if self.shared_lim:
                        params["x_axis"] = False
                else:
                    params["x_label"]=self.x_axis_values[i]

                dsv = param_filter_query(self.datastore,**{self.x_axis_parameter:self.x_axis_values[i],self.y_axis_parameter:self.y_axis_values[j]})
                li = self._single_plot(dsv,gs[j,i])
                for (name,plot,gss,par) in li:
                    par.update(params)
                    d[name + '.' + 'plot[' +str(i) + ',' +str(j) + ']'] = (plot,gss,par)
        return d
Esempio n. 34
0
 def subplot(self, subplotspec):
     dsv = queries.param_filter_query(self.datastore,sheet_name=self.parameters.sheet_name)
     return PerStimulusPlot(dsv, function=self._ploter, title_style="Standard"
                     ).make_line_plot(subplotspec)
Esempio n. 35
0
    def perform_analysis(self):
        for sheet in self.datastore.sheets():
            # Load up spike trains for the right sheet and the corresponding
            # stimuli, and transform spike trains into psth
            dsv = queries.param_filter_query(self.datastore,identifier='AnalogSignalList',sheet_name=sheet,analysis_algorithm='PSTH',st_name='FullfieldDriftingSinusoidalGrating')
            assert queries.equal_ads(dsv,except_params=['stimulus_id']) , "It seems PSTH computed in different ways are present in datastore, ModulationRatio can accept only one"
            psths = dsv.get_analysis_result()
            st = [MozaikParametrized.idd(p.stimulus_id) for p in psths]
            # average across trials
            psths, stids = colapse(psths,st,parameter_list=['trial'],func=neo_sum,allow_non_identical_objects=True)

            # retrieve the computed orientation preferences
            pnvs = self.datastore.get_analysis_result(identifier='PerNeuronValue',
                                                      sheet_name=sheet,
                                                      value_name='orientation preference')
            if len(pnvs) != 1:
                logger.error("ERROR: Expected only one PerNeuronValue per sheet "
                             "with value_name 'orientation preference' in datastore, got: "
                             + str(len(pnvs)))
                return None
        
            or_pref = pnvs[0]
            # find closest orientation of grating to a given orientation preference of a neuron
            # first find all the different presented stimuli:
            ps = OrderedDict()
            for s in st:
                ps[MozaikParametrized.idd(s).orientation] = True
            ps = ps.keys()
            # now find the closest presented orientations
            closest_presented_orientation = []
            for i in xrange(0, len(or_pref.values)):
                circ_d = 100000
                idx = 0
                for j in xrange(0, len(ps)):
                    if circ_d > circular_dist(or_pref.values[i], ps[j], numpy.pi):
                        circ_d = circular_dist(or_pref.values[i], ps[j], numpy.pi)
                        idx = j
                closest_presented_orientation.append(ps[idx])

            closest_presented_orientation = numpy.array(closest_presented_orientation)

            # collapse along orientation - we will calculate MR for each
            # parameter combination other than orientation
            d = colapse_to_dictionary(psths, stids, "orientation")
            for (st, vl) in d.items():
                # here we will store the modulation ratios, one per each neuron
                modulation_ratio = []
                f0 = []
                f1 = []
                ids = []
                frequency = MozaikParametrized.idd(st).temporal_frequency * MozaikParametrized.idd(st).getParams()['temporal_frequency'].units
                for (orr, ppsth) in zip(vl[0], vl[1]):
                    for j in numpy.nonzero(orr == closest_presented_orientation)[0]:
                        if or_pref.ids[j] in ppsth.ids:
                            a = or_pref.ids[j]
                            mr,F0,F1 = self._calculate_MR(ppsth.get_asl_by_id(or_pref.ids[j]).flatten(),frequency)
                            modulation_ratio.append(mr)
                            f0.append(F0)
                            f1.append(F1)
                            ids.append(or_pref.ids[j])
                            
                logger.debug('Adding PerNeuronValue:' + str(sheet))
                self.datastore.full_datastore.add_analysis_result(
                    PerNeuronValue(modulation_ratio,
                                   ids,
                                   qt.dimensionless,
                                   value_name='Modulation ratio' + '(' + psths[0].x_axis_name + ')',
                                   sheet_name=sheet,
                                   tags=self.tags,
                                   period=None,
                                   analysis_algorithm=self.__class__.__name__,
                                   stimulus_id=str(st)))

                self.datastore.full_datastore.add_analysis_result(
                    PerNeuronValue(f0,
                                   ids,
                                   qt.dimensionless,
                                   value_name='F0' + '(' + psths[0].x_axis_name + ')',
                                   sheet_name=sheet,
                                   tags=self.tags,
                                   period=None,
                                   analysis_algorithm=self.__class__.__name__,
                                   stimulus_id=str(st)))
                
                self.datastore.full_datastore.add_analysis_result(
                    PerNeuronValue(f1,
                                   ids,
                                   qt.dimensionless,
                                   value_name='F1' + '(' + psths[0].x_axis_name + ')',
                                   sheet_name=sheet,
                                   tags=self.tags,
                                   period=None,
                                   analysis_algorithm=self.__class__.__name__,
                                   stimulus_id=str(st)))


                import pylab
                pylab.figure()
                pylab.hist(modulation_ratio)
Esempio n. 36
0
Global.root_directory = sys.argv[1] + '/'

setup_logging()

data_store = PickledDataStore(load=True,
                              parameters=ParameterSet({
                                  'root_directory':
                                  sys.argv[1],
                                  'store_stimuli':
                                  False
                              }),
                              replace=True)

NeuronAnnotationsToPerNeuronValues(data_store, ParameterSet({})).analyse()
analog_ids = queries.param_filter_query(
    data_store,
    sheet_name="V1_Exc_L4").get_segments()[0].get_stored_esyn_ids()

dsv = queries.param_filter_query(data_store, st_name='FlashedBar')
for ads in dsv.get_analysis_result():
    sid = MozaikParametrized.idd(ads.stimulus_id)
    sid.x = 0
    ads.stimulus_id = str(sid)
for seg in dsv.get_segments():
    sid = MozaikParametrized.idd(seg.annotations['stimulus'])
    sid.x = 0
    seg.annotations['stimulus'] = str(sid)
for seg in dsv.get_segments(null=True):
    sid = MozaikParametrized.idd(seg.annotations['stimulus'])
    sid.x = 0
    seg.annotations['stimulus'] = str(sid)
Esempio n. 37
0
      def perform_analysis(self):
                dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',sheet_name=self.parameters.sheet_name,st_name='DriftingSinusoidalGratingDisk')
                
                if len(dsv.get_analysis_result()) == 0: return
                assert queries.ads_with_equal_stimulus_type(dsv)
                assert queries.equal_ads(dsv,except_params=['stimulus_id'])
                self.pnvs = dsv.get_analysis_result()
                
                # get stimuli
                self.st = [MozaikParametrized.idd(s.stimulus_id) for s in self.pnvs]
                
                
                # transform the pnvs into a dictionary of tuning curves according along the 'radius' parameter
                # also make sure they are ordered according to the first pnv's idds 
                
                self.tc_dict = colapse_to_dictionary([z.get_value_by_id(self.parameters.neurons) for z in self.pnvs],self.st,"radius")
                for k in self.tc_dict.keys():
                        crf_sizes = []
                        supp_sizes= []
                        sis = []
                        max_responses=[]
                        csis = []
                        
                        # we will do the calculation neuron by neuron
                        for i in xrange(0,len(self.parameters.neurons)):
                            
                            rads = self.tc_dict[k][0]
                            values = numpy.array([a[i] for a in self.tc_dict[k][1]])
                            
                            # sort them based on radiuses
                            rads , values = zip(*sorted(zip(rads,values)))
                                                        
                            max_response = numpy.max(values)
                            crf_index  = numpy.argmax(values)
                            crf_size = rads[crf_index]
                            
                            if crf_index < len(values)-1:
                                supp_index = crf_index+numpy.argmin(values[crf_index+1:])+1
                            else:
                                supp_index = len(values)-1
                            supp_size = rads[supp_index]                                

                            if supp_index < len(values)-1:
                                cs_index = supp_index+numpy.argmax(values[supp_index+1:])+1
                            else:
                                cs_index = len(values)-1

                            
                            if values[crf_index] != 0:
                                si = (values[crf_index]-values[supp_index])/values[crf_index]
                            else:
                                si = 0

                            if values[cs_index] != 0:
                                csi = (values[cs_index]-values[supp_index])/values[crf_index]
                            else:
                                csi = 0

                            crf_sizes.append(crf_size)
                            supp_sizes.append(supp_size)
                            sis.append(si)
                            max_responses.append(max_response)
                            csis.append(csi)
                            
                            
                        self.datastore.full_datastore.add_analysis_result(PerNeuronValue(max_responses,self.parameters.neurons,self.st[0].getParams()["radius"].units,value_name = 'Max. response of ' + self.pnvs[0].value_name ,sheet_name=self.parameters.sheet_name,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(k)))
                        self.datastore.full_datastore.add_analysis_result(PerNeuronValue(crf_sizes,self.parameters.neurons,self.st[0].getParams()["radius"].units,value_name = 'Max. facilitation radius of ' + self.pnvs[0].value_name ,sheet_name=self.parameters.sheet_name,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(k)))
                        self.datastore.full_datastore.add_analysis_result(PerNeuronValue(supp_sizes,self.parameters.neurons,self.st[0].getParams()["radius"].units,value_name = 'Max. suppressive radius of ' + self.pnvs[0].value_name ,sheet_name=self.parameters.sheet_name,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(k)))
                        self.datastore.full_datastore.add_analysis_result(PerNeuronValue(sis,self.parameters.neurons,None,value_name = 'Suppression index of ' + self.pnvs[0].value_name ,sheet_name=self.parameters.sheet_name,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(k)))
                        self.datastore.full_datastore.add_analysis_result(PerNeuronValue(csis,self.parameters.neurons,None,value_name = 'Counter-suppression index of ' + self.pnvs[0].value_name ,sheet_name=self.parameters.sheet_name,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(k)))
Esempio n. 38
0
 def subplot(self, subplotspec):
     dsv = queries.param_filter_query(self.datastore,sheet_name=self.parameters.sheet_name,identifier='AnalogSignal')
     return PerStimulusADSPlot(dsv, function=self._ploter, title_style="Clever").make_line_plot(subplotspec)
Esempio n. 39
0
        def plot(self):
            self.fig = pylab.figure(facecolor='w', **self.fig_param)
            gs = gridspec.GridSpec(1, 1)
            gs.update(left=0.07, right=0.97, top=0.9, bottom=0.1)
            gs = gs[0,0]
            
            gs = gridspec.GridSpecFromSubplotSpec(4, 5,subplot_spec=gs)

            orr = list(set([MozaikParametrized.idd(s).orientation for s in queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating',st_contrast=100).get_stimuli()]))        
            l4_exc_or = self.datastore.get_analysis_result(identifier='PerNeuronValue',value_name = 'LGNAfferentOrientation', sheet_name = 'V1_Exc_L4')

            
            col = orr[numpy.argmin([circular_dist(o,l4_exc_or[0].get_value_by_id(self.parameters.neuron),numpy.pi)  for o in orr])]
            #segs = queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating',st_contrast=100,st_orientation=col,sheet_name='V1_Exc_L4').get_segments()
            #signals = [seg.get_vm(self.parameters.neuron) for seg in segs] 
            dsv = queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating',sheet_name='V1_Exc_L4',st_contrast=100,analysis_algorithm='ActionPotentialRemoval',st_orientation=col)
            assert queries.ads_with_equal_stimuli(dsv,except_params=["trial"])
            adss = dsv.get_analysis_result()
            signals = [ads.get_asl_by_id(self.parameters.neuron) for ads in adss] 
            
            (signal,noise,snr) = self.wavelet_decomposition(signals)
            
            ax = pylab.subplot(gs[0,0:2])            
            for s in signals:
                ax.plot(s,c='k')
            pylab.ylabel('Vm')
            pylab.title("Gratings",fontsize=20)
            pylab.xlim(0,len(signals[0]))
            pylab.ylim(-80,-50)
            
            ax = pylab.subplot(gs[1,0:2])            
            ax.imshow(signal,aspect='auto',origin='lower')
            pylab.ylabel('Signal')
             
            ax = pylab.subplot(gs[2,0:2])            
            ax.imshow(noise,aspect='auto',origin='lower')
            pylab.ylabel('Noise')

            ax = pylab.subplot(gs[3,0:2])            
            ax.imshow(snr,aspect='auto',origin='lower')
            pylab.ylabel('SNR')
            pylab.xlabel('time')
            
            
            #segs = queries.param_filter_query(self.datastore,st_name='NaturalImageWithEyeMovement',sheet_name='V1_Exc_L4').get_segments()
            #signals = [seg.get_vm(self.parameters.neuron) for seg in segs] 
            dsv = queries.param_filter_query(self.datastore,st_name='NaturalImageWithEyeMovement',sheet_name='V1_Exc_L4',analysis_algorithm='ActionPotentialRemoval')
            assert queries.ads_with_equal_stimuli(dsv,except_params=["trial"])
            adss = dsv.get_analysis_result()
            signals = [ads.get_asl_by_id(self.parameters.neuron) for ads in adss] 
           
            (signal_ni,noise_ni,snr_ni) = self.wavelet_decomposition(signals)
            
            ax = pylab.subplot(gs[0,2:4])            
            for s in signals:
                ax.plot(s,c='k')
            pylab.xlim(0,len(signals[0]))                
            pylab.ylim(-80,-50)
            pylab.title("NI",fontsize=20)
            ax = pylab.subplot(gs[1,2:4])            
            ax.imshow(signal_ni,aspect='auto',origin='lower')

            ax = pylab.subplot(gs[2,2:4])            
            ax.imshow(noise_ni,aspect='auto',origin='lower')

            ax = pylab.subplot(gs[3,2:4])            
            ax.imshow(snr_ni,aspect='auto',origin='lower')
            pylab.xlabel('time')
            
            ax = pylab.subplot(gs[1,4])            
            ax.plot(numpy.mean(signal,axis=1),label="GR")
            ax.plot(numpy.mean(signal_ni,axis=1),label="NI")
            ax.set_xscale('log')
            ax.set_yscale('log')
            pylab.legend()
            
            ax = pylab.subplot(gs[2,4])            
            ax.plot(numpy.mean(noise,axis=1))
            ax.plot(numpy.mean(noise_ni,axis=1))
            ax.set_xscale('log')
            ax.set_yscale('log')
            
            ax = pylab.subplot(gs[3,4])            
            ax.plot(numpy.mean(snr,axis=1))
            ax.plot(numpy.mean(snr_ni,axis=1))
            ax.set_xscale('log')
            ax.set_yscale('log')
            pylab.xlabel("frequency")
            
            
            if self.plot_file_name:
               pylab.savefig(Global.root_directory+self.plot_file_name)              
Esempio n. 40
0
        def plot(self):
            self.fig = pylab.figure(facecolor='w', **self.fig_param)
            gs = gridspec.GridSpec(1, 1)
            gs.update(left=0.07, right=0.97, top=0.9, bottom=0.1)
            gs = gs[0,0]
            
            gs = gridspec.GridSpecFromSubplotSpec(4, 5,subplot_spec=gs)

            orr = list(set([MozaikParametrized.idd(s).orientation for s in queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating',st_contrast=100).get_stimuli()]))        
            l4_exc_or = self.datastore.get_analysis_result(identifier='PerNeuronValue',value_name = 'LGNAfferentOrientation', sheet_name = 'V1_Exc_L4')

            
            col = orr[numpy.argmin([circular_dist(o,l4_exc_or[0].get_value_by_id(self.parameters.neuron),numpy.pi)  for o in orr])]
            #segs = queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating',st_contrast=100,st_orientation=col,sheet_name='V1_Exc_L4').get_segments()
            #signals = [seg.get_vm(self.parameters.neuron) for seg in segs] 
            dsv = queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating',sheet_name='V1_Exc_L4',st_contrast=100,analysis_algorithm='ActionPotentialRemoval',st_orientation=col)
            assert queries.ads_with_equal_stimuli(dsv,except_params=["trial"])
            adss = dsv.get_analysis_result()
            signals = [ads.get_asl_by_id(self.parameters.neuron) for ads in adss] 
            
            (signal,noise,snr) = self.wavelet_decomposition(signals)
            
            ax = pylab.subplot(gs[0,0:2])            
            for s in signals:
                ax.plot(s,c='k')
            pylab.ylabel('Vm')
            pylab.title("Gratings",fontsize=20)
            pylab.xlim(0,len(signals[0]))
            pylab.ylim(-80,-50)
            
            ax = pylab.subplot(gs[1,0:2])            
            ax.imshow(signal,aspect='auto',origin='lower')
            pylab.ylabel('Signal')
             
            ax = pylab.subplot(gs[2,0:2])            
            ax.imshow(noise,aspect='auto',origin='lower')
            pylab.ylabel('Noise')

            ax = pylab.subplot(gs[3,0:2])            
            ax.imshow(snr,aspect='auto',origin='lower')
            pylab.ylabel('SNR')
            pylab.xlabel('time')
            
            
            #segs = queries.param_filter_query(self.datastore,st_name='NaturalImageWithEyeMovement',sheet_name='V1_Exc_L4').get_segments()
            #signals = [seg.get_vm(self.parameters.neuron) for seg in segs] 
            dsv = queries.param_filter_query(self.datastore,st_name='NaturalImageWithEyeMovement',sheet_name='V1_Exc_L4',analysis_algorithm='ActionPotentialRemoval')
            assert queries.ads_with_equal_stimuli(dsv,except_params=["trial"])
            adss = dsv.get_analysis_result()
            signals = [ads.get_asl_by_id(self.parameters.neuron) for ads in adss] 
           
            (signal_ni,noise_ni,snr_ni) = self.wavelet_decomposition(signals)
            
            ax = pylab.subplot(gs[0,2:4])            
            for s in signals:
                ax.plot(s,c='k')
            pylab.xlim(0,len(signals[0]))                
            pylab.ylim(-80,-50)
            pylab.title("NI",fontsize=20)
            ax = pylab.subplot(gs[1,2:4])            
            ax.imshow(signal_ni,aspect='auto',origin='lower')

            ax = pylab.subplot(gs[2,2:4])            
            ax.imshow(noise_ni,aspect='auto',origin='lower')

            ax = pylab.subplot(gs[3,2:4])            
            ax.imshow(snr_ni,aspect='auto',origin='lower')
            pylab.xlabel('time')
            
            ax = pylab.subplot(gs[1,4])            
            ax.plot(numpy.mean(signal,axis=1),label="GR")
            ax.plot(numpy.mean(signal_ni,axis=1),label="NI")
            ax.set_xscale('log')
            ax.set_yscale('log')
            pylab.legend()
            
            ax = pylab.subplot(gs[2,4])            
            ax.plot(numpy.mean(noise,axis=1))
            ax.plot(numpy.mean(noise_ni,axis=1))
            ax.set_xscale('log')
            ax.set_yscale('log')
            
            ax = pylab.subplot(gs[3,4])            
            ax.plot(numpy.mean(snr,axis=1))
            ax.plot(numpy.mean(snr_ni,axis=1))
            ax.set_xscale('log')
            ax.set_yscale('log')
            pylab.xlabel("frequency")
            
            
            if self.plot_file_name:
               pylab.savefig(Global.root_directory+self.plot_file_name)              
Esempio n. 41
0
    def perform_analysis(self):
        samples = list(set([MozaikParametrized.idd(ads.stimulus_id).sample for ads in self.datastore.get_analysis_result()]))

        for sheet in self.parameters.sheet_list:
            averaged_noise_psths = []
            averaged_texture_psths = []
            modulation_list = []

            #First, we compute the time-course of the modulation for every individual texture
            for texture in self.parameters.texture_list:
                #Get the PSTHs for both the spectrallu-matched noise and the synthetic texture stimuli 
                psths_noise = queries.param_filter_query(self.datastore,identifier='AnalogSignalList',sheet_name=sheet, analysis_algorithm = "PSTH", st_stats_type = 2, st_texture = texture).get_analysis_result()
                psths_texture = queries.param_filter_query(self.datastore,identifier='AnalogSignalList',sheet_name=sheet, analysis_algorithm = "PSTH", st_stats_type = 1, st_texture = texture).get_analysis_result()
                ids = psths_noise[0].ids
                t_start = psths_noise[0].asl[0].t_start
                sampling_period = psths_noise[0].asl[0].sampling_period
                units = psths_noise[0].asl[0].units
                assert len(psths_noise) == len(psths_texture)
                asls_noise = [psth.get_asl_by_id(ids) for psth in psths_noise]
                asls_texture = [psth.get_asl_by_id(ids) for psth in psths_texture]

                #For every neuron, compute the average of the PSTHs for both type of stimuli
                noise_psth = numpy.mean(asls_noise, axis = 0)
                texture_psth = numpy.mean(asls_texture, axis = 0)
                #Then calculate the modulation for every time step
                modulation = numpy.nan_to_num((texture_psth - noise_psth)/(texture_psth + noise_psth))

                #Store the values obtained for this texture in some lists
                averaged_noise_psths.append(noise_psth)
                averaged_texture_psths.append(texture_psth)
                modulation_list.append(modulation)

                averaged_noise_asls = [NeoAnalogSignal(asl, t_start=t_start, sampling_period=sampling_period,units = units) for asl in noise_psth]
                averaged_texture_asls = [NeoAnalogSignal(asl, t_start=t_start, sampling_period=sampling_period,units = units) for asl in texture_psth]
                modulation_asls = [NeoAnalogSignal(asl, t_start=t_start, sampling_period=sampling_period,units = qt.dimensionless) for asl in modulation]

                st_noise = MozaikParametrized.idd(psths_noise[0].stimulus_id)
                setattr(st_noise,'sample',None)
                st_texture = MozaikParametrized.idd(psths_texture[0].stimulus_id)
                setattr(st_texture,'sample',None)

                st_modulation = MozaikParametrized.idd(psths_noise[0].stimulus_id)
                setattr(st_modulation,'sample',None)
                setattr(st_modulation,'stats_type',None)

                #Store both the averaged PSTHs, and the time-course of the modulation for every neuron in the population
                self.datastore.full_datastore.add_analysis_result(
                    AnalogSignalList(averaged_noise_asls,
                                         ids,
                                         psths_noise[0].y_axis_units,
                                         x_axis_name='time',
                                         y_axis_name='Noise ' + psths_noise[0].y_axis_name + ' samples averaged',
                                         sheet_name=sheet,
                                         tags=self.tags,
                                         analysis_algorithm=self.__class__.__name__,
                                         stimulus_id=str(st_noise)))
                self.datastore.full_datastore.add_analysis_result(
                    AnalogSignalList(averaged_texture_asls,
                                         ids,
                                         psths_noise[0].y_axis_units,
                                         x_axis_name='time',
                                         y_axis_name='Texture ' + psths_noise[0].y_axis_name + ' samples averaged',
                                         sheet_name=sheet,
                                         tags=self.tags,
                                         analysis_algorithm=self.__class__.__name__,
                                         stimulus_id=str(st_texture)))
                self.datastore.full_datastore.add_analysis_result(
                    AnalogSignalList(modulation_asls,
                                         ids,
                                         qt.dimensionless,
                                         x_axis_name='time',
                                         y_axis_name='Modulation ' + psths_noise[0].y_axis_name + ' samples averaged',
                                         sheet_name=sheet,
                                         tags=self.tags,
                                         analysis_algorithm=self.__class__.__name__,
                                         stimulus_id=str(st_modulation)))


            #Normalize the PSTHs
            max_firing_rates = numpy.max(numpy.concatenate((averaged_noise_psths,  averaged_texture_psths)), axis = (0,2,3))
            averaged_noise_psths = numpy.transpose(numpy.transpose(averaged_noise_psths,(0,2,3,1))/max_firing_rates, (0,3,1,2))
            averaged_texture_psths = numpy.transpose(numpy.transpose(averaged_texture_psths,(0,2,3,1))/max_firing_rates, (0,3,1,2))

            #Compute the average accross textures families of the time course of the modulation and of the PSTHs for both type of stimuli 
            noise_psth = numpy.mean(averaged_noise_psths, axis = 0)
            texture_psth = numpy.mean(averaged_texture_psths, axis = 0)
            modulation = numpy.mean(modulation_list, axis = 0)


            var_noise_psth = numpy.var(averaged_noise_psths, axis = 0)
            var_texture_psth = numpy.var(averaged_texture_psths, axis = 0)

            averaged_noise_asls = [NeoAnalogSignal(asl, t_start=t_start, sampling_period=sampling_period,units = units) for asl in noise_psth]
            averaged_texture_asls = [NeoAnalogSignal(asl, t_start=t_start, sampling_period=sampling_period,units = units) for asl in texture_psth]
            modulation_asls = [NeoAnalogSignal(asl, t_start=t_start, sampling_period=sampling_period,units = qt.dimensionless) for asl in modulation]

            var_noise_asls = [NeoAnalogSignal(asl, t_start=t_start, sampling_period=sampling_period,units = units) for asl in var_noise_psth]
            var_texture_asls = [NeoAnalogSignal(asl, t_start=t_start, sampling_period=sampling_period,units = units) for asl in var_texture_psth]

            setattr(st_noise,'texture',None)
            setattr(st_texture,'texture',None)
            setattr(st_modulation,'texture',None)

            self.datastore.full_datastore.add_analysis_result(
                    AnalogSignalList(averaged_noise_asls,
                                         ids,
                                         psths_noise[0].y_axis_units,
                                         x_axis_name='time',
                                         y_axis_name='Noise ' +  psths_noise[0].y_axis_name + ' textures averaged',
                                         sheet_name=sheet,
                                         tags=self.tags,
                                         analysis_algorithm=self.__class__.__name__,
                                         stimulus_id=str(st_noise)))
            self.datastore.full_datastore.add_analysis_result(
                    AnalogSignalList(averaged_texture_asls,
                                         ids,
                                         psths_noise[0].y_axis_units,
                                         x_axis_name='time',
                                         y_axis_name='Texture ' + psths_noise[0].y_axis_name + ' textures averaged',
                                         sheet_name=sheet,
                                         tags=self.tags,
                                         analysis_algorithm=self.__class__.__name__,
                                         stimulus_id=str(st_texture)))
            self.datastore.full_datastore.add_analysis_result(
                    AnalogSignalList(var_noise_asls,
                                         ids,
                                         psths_noise[0].y_axis_units,
                                         x_axis_name='time',
                                         y_axis_name='Noise ' + psths_noise[0].y_axis_name + ' textures var',
                                         sheet_name=sheet,
                                         tags=self.tags,
                                         analysis_algorithm=self.__class__.__name__,
                                         stimulus_id=str(st_noise)))
            self.datastore.full_datastore.add_analysis_result(
                    AnalogSignalList(var_texture_asls,
                                         ids,
                                         psths_noise[0].y_axis_units,
                                         x_axis_name='time',
                                         y_axis_name='Texture ' + psths_noise[0].y_axis_name + ' textures var',
                                         sheet_name=sheet,
                                         tags=self.tags,
                                         analysis_algorithm=self.__class__.__name__,
                                         stimulus_id=str(st_texture)))
            self.datastore.full_datastore.add_analysis_result(
                    AnalogSignalList(modulation_asls,
                                         ids,
                                         qt.dimensionless,
                                         x_axis_name='time',
                                         y_axis_name='Modulation ' + psths_noise[0].y_axis_name + ' textures averaged',
                                         sheet_name=sheet,
                                         tags=self.tags,
                                         analysis_algorithm=self.__class__.__name__,
                                         stimulus_id=str(st_modulation)))
Esempio n. 42
0
    def subplot(self, subplotspec):
        plots = {}
        gs = gridspec.GridSpecFromSubplotSpec(16, 14, subplot_spec=subplotspec,
                                              hspace=1.0, wspace=1.0)
        
        low_contrast = str(5)
        
        analog_ids = sorted(queries.param_filter_query(self.datastore,sheet_name=self.parameters.sheet_name,value_name=['F0_Exc_Cond-Mean(ECond)']).get_analysis_result()[0].ids)
        
        dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',value_name=['F0_Exc_Cond-Mean(ECond)','F0_Inh_Cond-Mean(ICond)'],sheet_name=self.parameters.sheet_name)
        plots['MeanF0'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(analog_ids), 'sheet_name' : self.parameters.sheet_name,'centered'  : True,'mean' : True,'pool' : True,'polar' : True})),gs[:4,:3],{'legend' : False, 'y_label': 'F0(Cond)' ,'title' : None, 'x_ticks' : None, 'x_label' : None,'colors': {'F0_Exc_Cond-Mean(ECond) contrast : 100' : '#FF0000' , 'F0_Exc_Cond-Mean(ECond) contrast : ' + low_contrast : '#FFACAC','F0_Inh_Cond-Mean(ICond) contrast : 100' : '#0000FF' , 'F0_Inh_Cond-Mean(ICond) contrast : ' +low_contrast : '#ACACFF'}})
        
        dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',value_name=['F1_Exc_Cond','F1_Inh_Cond'],sheet_name=self.parameters.sheet_name)
        plots['MeanF1'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(analog_ids), 'sheet_name' : self.parameters.sheet_name,'centered'  : True,'mean' : True,'pool' : True,'polar' : True})),gs[4:8,:3],{'y_label': 'F1(Cond)','title' : None, 'x_ticks' : None, 'x_label' : None,'colors': {'F1_Exc_Cond contrast : 100' : '#FF0000' , 'F1_Exc_Cond contrast : ' + low_contrast : '#FFACAC','F1_Inh_Cond contrast : 100' : '#0000FF' , 'F1_Inh_Cond contrast : ' + low_contrast : '#ACACFF'}})

        dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',value_name=['F0_Vm-Mean(VM)'],sheet_name=self.parameters.sheet_name)
        plots['MeanVMF0'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(analog_ids), 'sheet_name' : self.parameters.sheet_name,'centered'  : True,'mean' : True,'pool' : True,'polar' : True})),gs[8:12,:3],{'y_label': 'F0(Vm)' ,'title' : None, 'x_ticks' : None, 'x_label' : None,'colors': {'F0_Vm-Mean(VM) contrast : 100' : '#000000' , 'F0_Vm-Mean(VM) contrast : ' + low_contrast : '#ACACAC'}})

        dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',value_name=['F1_Vm'],sheet_name=self.parameters.sheet_name)
        plots['MeanVMF1'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(analog_ids), 'sheet_name' : self.parameters.sheet_name,'centered'  : True,'mean' : True,'pool' : True,'polar' : True})),gs[12:16,:3],{'y_label': 'F1(Vm)','title' : None, 'x_ticks' : None, 'x_label' : None,'colors': {'F1_Vm contrast : 100' : '#000000' , 'F1_Vm contrast : ' + low_contrast : '#ACACAC'}})
        
        if True:
            if self.parameters.many:
                dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',value_name=['F0_Exc_Cond','F0_Inh_Cond'],sheet_name=self.parameters.sheet_name)
                plots['F0a'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(analog_ids[0:10]), 'sheet_name' : self.parameters.sheet_name,'centered'  : False,'mean' : False,'pool' : True,'polar' : True})),gs[:2,3:],{'y_label': None,'title' : None, 'x_ticks' : None, 'x_label' : None,'colors': {'F0_Exc_Cond contrast : 100' : '#FF0000' , 'F0_Exc_Cond contrast : ' + low_contrast : '#FFACAC','F0_Inh_Cond contrast : 100' : '#0000FF' , 'F0_Inh_Cond contrast : ' + low_contrast : '#ACACFF'}})

                dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',value_name=['F0_Exc_Cond','F0_Inh_Cond'],sheet_name=self.parameters.sheet_name)
                plots['F0b'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(analog_ids[10:20]), 'sheet_name' : self.parameters.sheet_name,'centered'  : False,'mean' : False,'pool' : True,'polar' : True})),gs[2:4,3:],{'y_label': None,'title' : None, 'x_ticks' : None, 'x_label' : None,'colors': {'F0_Exc_Cond contrast : 100' : '#FF0000' , 'F0_Exc_Cond contrast : ' + low_contrast : '#FFACAC','F0_Inh_Cond contrast : 100' : '#0000FF' , 'F0_Inh_Cond contrast : ' + low_contrast : '#ACACFF'}})


                dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',value_name=['F1_Exc_Cond','F1_Inh_Cond'],sheet_name=self.parameters.sheet_name)
                plots['F1a'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(analog_ids[0:10]), 'sheet_name' : self.parameters.sheet_name,'centered'  : False,'mean' : False,'pool' : True,'polar' : True})),gs[4:6,3:],{'y_label': None,'title' : None, 'x_ticks' : None, 'x_label' : None,'colors': {'F1_Exc_Cond contrast : 100' : '#FF0000' , 'F1_Exc_Cond contrast : ' + low_contrast : '#FFACAC','F1_Inh_Cond contrast : 100' : '#0000FF' , 'F1_Inh_Cond contrast : ' + low_contrast : '#ACACFF'}})

                dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',value_name=['F1_Exc_Cond','F1_Inh_Cond'],sheet_name=self.parameters.sheet_name)
                plots['F1b'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(analog_ids[10:20]), 'sheet_name' : self.parameters.sheet_name,'centered'  : False,'mean' : False,'pool' : True,'polar' : True})),gs[6:8,3:],{'y_label': None,'title' : None, 'x_ticks' : None, 'x_label' : None,'colors': {'F1_Exc_Cond contrast : 100' : '#FF0000' , 'F1_Exc_Cond contrast : ' + low_contrast : '#FFACAC','F1_Inh_Cond contrast : 100' : '#0000FF' , 'F1_Inh_Cond contrast : ' + low_contrast : '#ACACFF'}})


                dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',value_name=['F0_Vm-Mean(VM)'],sheet_name=self.parameters.sheet_name)
                plots['VMF0a'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(analog_ids[0:10]), 'sheet_name' : self.parameters.sheet_name,'centered'  : False,'mean' : False,'pool' : True,'polar' : True})),gs[8:10,3:],{'y_label': None ,'title' : None, 'x_ticks' : None, 'x_label' : None,'colors': {'F0_Vm-Mean(VM) contrast : 100' : '#000000' , 'F0_Vm-Mean(VM) contrast : ' + low_contrast : '#ACACAC'}})

                dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',value_name=['F0_Vm-Mean(VM)'],sheet_name=self.parameters.sheet_name)
                plots['VMF0b'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(analog_ids[10:20]), 'sheet_name' : self.parameters.sheet_name,'centered'  : False,'mean' : False,'pool' : True,'polar' : True})),gs[10:12,3:],{'y_label': None ,'title' : None, 'x_ticks' : None, 'x_label' : None,'colors': {'F0_Vm-Mean(VM) contrast : 100' : '#000000' , 'F0_Vm-Mean(VM) contrast : ' + low_contrast : '#ACACAC'}})


                dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',value_name=['F1_Vm'],sheet_name=self.parameters.sheet_name)
                plots['VMF1a'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(analog_ids[0:10]), 'sheet_name' : self.parameters.sheet_name,'centered'  : False,'mean' : False,'pool' : True,'polar' : True})),gs[12:14,3:],{'y_label': None,'title' : None, 'x_ticks' : None, 'x_label' : None,'colors': {'F1_Vm contrast : 100' : '#000000' , 'F1_Vm contrast : ' + low_contrast : '#ACACAC'}})

                dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',value_name=['F1_Vm'],sheet_name=self.parameters.sheet_name)
                plots['VMF1b'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(analog_ids[10:20]), 'sheet_name' : self.parameters.sheet_name,'centered'  : False,'mean' : False,'pool' : True,'polar' : True})),gs[14:16,3:],{'y_label': None,'title' : None, 'x_ticks' : None, 'x_label' : None,'colors': {'F1_Vm contrast : 100' : '#000000' , 'F1_Vm contrast : ' + low_contrast : '#ACACAC'}})

            else:
                #neurons = [0,6,2,4,9,15]
                #neurons = [i fori in xrange(0:10)]
                neurons = [5,15,3,38,18,24]
                #neurons = [30,31,32,33,34,35,36,37,38,39,40]
                
                dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',value_name=['F0_Exc_Cond-Mean(ECond)','F0_Inh_Cond-Mean(ICond)'],sheet_name=self.parameters.sheet_name)
                plots['F0'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(numpy.array(analog_ids)[neurons]), 'sheet_name' : self.parameters.sheet_name,'centered'  : False,'mean' : False,'pool' : True,'polar' : True})),gs[:4,3:],{'legend' : False, 'y_label': None ,'title' : None, 'x_ticks' : None, 'x_label' : None,'colors': {'F0_Exc_Cond-Mean(ECond) contrast : 100' : '#FF0000' , 'F0_Exc_Cond-Mean(ECond) contrast : ' + low_contrast : '#FFACAC','F0_Inh_Cond-Mean(ICond) contrast : 100' : '#0000FF' , 'F0_Inh_Cond-Mean(ICond) contrast : ' + low_contrast : '#ACACFF'}})

                dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',value_name=['F1_Exc_Cond','F1_Inh_Cond'],sheet_name=self.parameters.sheet_name)
                plots['F1'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(numpy.array(analog_ids)[neurons]), 'sheet_name' : self.parameters.sheet_name,'centered'  : False,'mean' : False,'pool' : True,'polar' : True})),gs[4:8,3:],{'y_label': None,'title' : None, 'x_ticks' : None, 'x_label' : None,'colors': {'F1_Exc_Cond contrast : 100' : '#FF0000' , 'F1_Exc_Cond contrast : ' + low_contrast : '#FFACAC','F1_Inh_Cond contrast : 100' : '#0000FF' , 'F1_Inh_Cond contrast : ' + low_contrast : '#ACACFF'}})

                dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',value_name=['F0_Vm-Mean(VM)'],sheet_name=self.parameters.sheet_name)
                plots['VMF0'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(numpy.array(analog_ids)[neurons]), 'sheet_name' : self.parameters.sheet_name,'centered'  : False,'mean' : False,'pool' : True,'polar' : True})),gs[8:12,3:],{'y_label': None ,'title' : None, 'x_ticks' : None, 'x_label' : None,'colors': {'F0_Vm-Mean(VM) contrast : 100' : '#000000' , 'F0_Vm-Mean(VM) contrast : ' + low_contrast : '#ACACAC'}})

                dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',value_name=['F1_Vm'],sheet_name=self.parameters.sheet_name)
                plots['VMF1'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(numpy.array(analog_ids)[neurons]), 'sheet_name' : self.parameters.sheet_name,'centered'  : False,'mean' : False,'pool' : True,'polar' : True})),gs[12:16,3:],{'y_label': None,'title' : None, 'x_ticks' : None, 'x_label' : None,'colors': {'F1_Vm contrast : 100' : '#000000' , 'F1_Vm contrast : ' + low_contrast : '#ACACAC'}})
                

        return plots        
Esempio n. 43
0
    def perform_analysis(self):
        for sheet in self.datastore.sheets():
            # Load up spike trains for the right sheet and the corresponding
            # stimuli, and transform spike trains into psth
            print sheet
            self.datastore.print_content()
            dsv = queries.param_filter_query(self.datastore,identifier='AnalogSignalList',sheet_name=sheet,analysis_algorithm='PSTH',st_name='FullfieldDriftingSinusoidalGrating')
            dsv.print_content()
            assert queries.equal_ads(dsv,except_params=['stimulus_id']) , "It seems PSTH computed in different ways are present in datastore, ModulationRatio can accept only one"
            psths = dsv.get_analysis_result()
            st = [MozaikParametrized.idd(p.stimulus_id) for p in psths]
            # average across trials
            psths, stids = colapse(psths,st,parameter_list=['trial'],func=neo_sum,allow_non_identical_objects=True)

            # retrieve the computed orientation preferences
            pnvs = self.datastore.get_analysis_result(identifier='PerNeuronValue',
                                                      sheet_name=sheet,
                                                      value_name='orientation preference')
            
            if len(pnvs) != 1:
                logger.error("ERROR: Expected only one PerNeuronValue per sheet "
                             "with value_name 'orientation preference' in datastore, got: "
                             + str(len(pnvs)))
                return None
        
            or_pref = pnvs[0]
            # find closest orientation of grating to a given orientation preference of a neuron
            # first find all the different presented stimuli:
            ps = {}
            for s in st:
                ps[MozaikParametrized.idd(s).orientation] = True
            ps = ps.keys()
            print ps
            # now find the closest presented orientations
            closest_presented_orientation = []
            for i in xrange(0, len(or_pref.values)):
                circ_d = 100000
                idx = 0
                for j in xrange(0, len(ps)):
                    if circ_d > circular_dist(or_pref.values[i], ps[j], numpy.pi):
                        circ_d = circular_dist(or_pref.values[i], ps[j], numpy.pi)
                        idx = j
                closest_presented_orientation.append(ps[idx])

            closest_presented_orientation = numpy.array(closest_presented_orientation)

            # collapse along orientation - we will calculate MR for each
            # parameter combination other than orientation
            d = colapse_to_dictionary(psths, stids, "orientation")
            for (st, vl) in d.items():
                # here we will store the modulation ratios, one per each neuron
                modulation_ratio = []
                ids = []
                frequency = MozaikParametrized.idd(st).temporal_frequency * MozaikParametrized.idd(st).params()['temporal_frequency'].units
                for (orr, ppsth) in zip(vl[0], vl[1]):
                    for j in numpy.nonzero(orr == closest_presented_orientation)[0]:
                        if or_pref.ids[j] in ppsth.ids:
                            modulation_ratio.append(self._calculate_MR(ppsth.get_asl_by_id(or_pref.ids[j]),frequency))
                            ids.append(or_pref.ids[j])
                            
                logger.debug('Adding PerNeuronValue:' + str(sheet))
                self.datastore.full_datastore.add_analysis_result(
                    PerNeuronValue(modulation_ratio,
                                   ids,
                                   qt.dimensionless,
                                   value_name='Modulation ratio' + '(' + psths[0].x_axis_name + ')',
                                   sheet_name=sheet,
                                   tags=self.tags,
                                   period=None,
                                   analysis_algorithm=self.__class__.__name__,
                                   stimulus_id=str(st)))

                import pylab
                pylab.figure()
                pylab.hist(modulation_ratio)
Esempio n. 44
0
 def subplot(self, subplotspec):
     dsv = queries.param_filter_query(self.datastore,identifier='ConductanceSignalList')
     return PerStimulusADSPlot(dsv, function=self._ploter, title_style="Clever").make_line_plot(subplotspec)
Esempio n. 45
0
        def plot(self):
            self.fig = pylab.figure(facecolor='w', **self.fig_param)
            gs = gridspec.GridSpec(1, 1)
            gs.update(left=0.1, right=0.9, top=0.9, bottom=0.1)
            gs = gs[0,0]
            gs = gridspec.GridSpecFromSubplotSpec(2, 1,subplot_spec=gs)

            orr = list(set([MozaikParametrized.idd(s).orientation for s in queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating',st_contrast=100).get_stimuli()]))        
            oor = self.datastore.get_analysis_result(identifier='PerNeuronValue',value_name = 'LGNAfferentOrientation', sheet_name = self.parameters.sheet_name)
            
            if True:
                for neuron_idd in self.parameters.neurons:
                    col = orr[numpy.argmin([circular_dist(o,oor[0].get_value_by_id(neuron_idd),numpy.pi)  for o in orr])]
                    dsv =  queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating',st_contrast=100,st_orientation=col,sheet_name=self.parameters.sheet_name,analysis_algorithm='ActionPotentialRemoval')
                    TrialToTrialCrossCorrelationOfAnalogSignalList(dsv,ParameterSet({'neurons' : [neuron_idd]}),tags=['helper']).analyse()
                    dsv =  queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating',st_contrast=100,st_orientation=col,sheet_name=self.parameters.sheet_name,analysis_algorithm='PSTH')
                    TrialToTrialCrossCorrelationOfAnalogSignalList(dsv,ParameterSet({'neurons' : [neuron_idd]}),tags=['helper']).analyse()
                
                
            dsv =  queries.tag_based_query(self.datastore,['helper'])   
            dsv1 =  queries.param_filter_query(dsv,y_axis_name='trial-trial cross-correlation of Vm (no AP)',st_name='FullfieldDriftingSinusoidalGrating',sheet_name=self.parameters.sheet_name)
            vm_cc_gr = numpy.mean(numpy.array([asl.asl[0] for asl in dsv1.get_analysis_result()]),axis=0)
            dsv1 =  queries.param_filter_query(dsv,y_axis_name='trial-trial cross-correlation of psth (bin=2.0)',st_name='FullfieldDriftingSinusoidalGrating',sheet_name=self.parameters.sheet_name)
            psth_cc_gr = numpy.mean(numpy.array([asl.asl[0] for asl in dsv1.get_analysis_result()]),axis=0)
            
            
            #queries.param_filter_query(self.datastore,analysis_algorithm='TrialToTrialCrossCorrelationOfAnalogSignalList').print_content(full_ADS=True)
            
            dsv =  queries.param_filter_query(self.datastore,y_axis_name='trial-trial cross-correlation of Vm (no AP)',st_name="NaturalImageWithEyeMovement",sheet_name=self.parameters.sheet_name,ads_unique=True)
            vm_cc_ni = numpy.mean(numpy.array(dsv.get_analysis_result()[0].asl),axis=0)
            dsv =  queries.param_filter_query(self.datastore,y_axis_name='trial-trial cross-correlation of psth (bin=2.0)',st_name="NaturalImageWithEyeMovement",sheet_name=self.parameters.sheet_name,ads_unique=True)
            psth_cc_ni = numpy.mean(numpy.array(dsv.get_analysis_result()[0].asl),axis=0)
            
            logger.info(str(vm_cc_gr))
            logger.info(str(vm_cc_ni))
            
            
            z = int(min(self.parameters.window_length,len(vm_cc_gr-1)/2,len(vm_cc_ni-1)/2)/2)*2
            logger.info(str(psth_cc_ni))
            logger.info(str(psth_cc_gr))
            fontsize = 30
            pylab.rcParams['xtick.major.pad'] = fontsize-5
            pylab.rcParams['ytick.major.pad'] = 10
            pylab.rc('axes', linewidth=5)
            
            
            logger.info(len(vm_cc_gr[int(len(vm_cc_gr)/2)-z:int(len(vm_cc_gr)/2)+z+1]))
            logger.info(len(numpy.linspace(-z,z,2*z+1)))
                
            ax = pylab.subplot(gs[0,0])       
            ax.plot(numpy.linspace(-z,z,2*z+1),vm_cc_gr[int(len(vm_cc_gr)/2)-z:int(len(vm_cc_gr)/2)+z+1],label="Gratings")
            ax.plot(numpy.linspace(-z,z,2*z+1),vm_cc_ni[int(len(vm_cc_ni)/2)-z:int(len(vm_cc_ni)/2)+z+1],label="Natural images")
            pylab.legend()
            pylab.title("VM")
            pylab.xlabel("time (ms)")
            #pylab.ylabel("corr coef")
            
            ax = pylab.subplot(gs[1,0])
            ax.plot(numpy.linspace(-z,z,z+1),psth_cc_gr[int(len(psth_cc_gr)/2)-z/2:int(len(psth_cc_gr)/2)+z/2+1],label="Gratings")
            ax.plot(numpy.linspace(-z,z,z+1),psth_cc_ni[int(len(psth_cc_ni)/2)-z/2:int(len(psth_cc_ni)/2)+z/2+1],label="Natural images")
            
            pylab.xlim(-z,z)
            pylab.xticks([-z,0,z],[-250,0,250])#[-2*z,0,2*z])
            pylab.yticks([-1.0,0.0,1.0])
            
            #pylab.legend()
            #pylab.title("Spikes")
            #pylab.xlabel("time (ms)",fontsize=fontsize)
            #pylab.ylabel("corr. coef.",fontsize=fontsize)
            #three_tick_axis(pylab.gca().xaxis)
            for label in ax.get_xticklabels() + ax.get_yticklabels():
                label.set_fontsize(fontsize)
            
            if self.plot_file_name:
               pylab.savefig(Global.root_directory+self.plot_file_name)              
Esempio n. 46
0
    def perform_analysis(self):
        layers = [["V1_Exc_L2/3", "V1_Inh_L2/3"], ["V1_Exc_L4", "V1_Inh_L4"]]
        for layer in layers:
            if not set(layer).issubset(set(self.datastore.sheets())):
                warnings.warn("Layer %s not part of data store sheets: %s!" %
                              (layer, self.datastore.sheets()))
                continue

            # pool all spikes from the layer
            allspikes = []
            dsv = queries.param_filter_query(self.datastore, sheet_name=layer)
            segs = dsv.get_segments()
            # add spikes from the layer to the pool
            tstart = tstop = 0
            for seg in segs:
                for st in seg.spiketrains:
                    tstart = min(st.t_start.magnitude, tstart)
                    tstop = max(st.t_stop.magnitude, tstop)
                    allspikes.extend(st.magnitude)

            assert (len({
                load_parameters(str(s))["name"]
                for s in dsv.get_stimuli()
            }) == 1), "All stimuli have to have the same name!"
            # calculate specific time bin in each segment as in Fontenele2019
            dt = np.mean(np.diff(np.sort(allspikes)))

            # case with no spikes is not taken care off!
            bins = np.arange(tstart, tstop, dt)
            hist, bins = np.histogram(allspikes, bins)

            # find zeros in the histogram
            zeros = np.where(hist == 0)[0]

            # calculate durations of avalanches
            durs = np.diff(zeros) * dt
            durs = durs[durs > dt]

            # calculate sizes of avalanches
            szs = []
            for i in range(len(zeros) - 1):
                szs.append(np.sum(hist[zeros[i]:zeros[i + 1]]))  # .magnitude))
            szs = np.array(szs)
            szs = szs[szs > 0]

            # calculate tau=exponent of size distr
            s_distr, s_bins = self.create_hist(szs, self.parameters.num_bins)
            s_amp, s_slope, s_error_sq, s_error_diff = self.fit_powerlaw_distribution(
                s_bins, s_distr, "size")

            # calculate tau_t=exponent of distr of durations
            d_distr, d_bins = self.create_hist(durs, self.parameters.num_bins)
            d_amp, d_slope, d_error_sq, d_error_diff = self.fit_powerlaw_distribution(
                d_bins, d_distr, "duration")

            # calculate the <S>(D) curve, S=size, D=duration
            [sd_amp, sd_slope], _ = curve_fit(f=self.powerlaw,
                                              xdata=durs,
                                              ydata=szs,
                                              p0=[0, 0])

            beta = sd_slope  # for dcc calculation
            error_diff = sum(szs - self.powerlaw(durs, sd_amp, sd_slope))
            error_sq = np.linalg.norm(szs -
                                      self.powerlaw(durs, sd_amp, sd_slope))
            crit_dist = np.abs(beta - (-d_slope - 1) / (-s_slope - 1))

            stims = dsv.get_stimuli()
            common_stim_params = load_parameters(str(stims[0]))
            for st in stims:
                p = load_parameters(str(st))
                common_stim_params = {  # Inner join of 2 dicts
                    k: v
                    for (k, v) in p.items() if k in common_stim_params
                    and p[k] == common_stim_params[k]
                }

            for sheet in layer:
                common_params = {
                    "sheet_name": sheet,
                    "tags": self.tags,
                    "analysis_algorithm": self.__class__.__name__,
                    "stimulus_id": str(common_stim_params),
                }
                self.datastore.full_datastore.add_analysis_result(
                    SingleValue(
                        value=crit_dist * pq.dimensionless,
                        value_units=pq.dimensionless,
                        value_name="DistanceToCriticality",
                        **common_params,
                    ))
                self.datastore.full_datastore.add_analysis_result(
                    SingleValue(
                        value=dt * pq.s,
                        value_units=pq.s,
                        value_name="AvalancheBinSize",
                        **common_params,
                    ))
                self.datastore.full_datastore.add_analysis_result(
                    SingleValueList(
                        values=durs * pq.s,
                        values_unit=pq.s,
                        value_name="AvalancheDurations",
                        **common_params,
                    ))
                self.datastore.full_datastore.add_analysis_result(
                    SingleValueList(
                        values=szs * pq.dimensionless,
                        values_unit=pq.dimensionless,
                        value_name="AvalancheSizes",
                        **common_params,
                    ))
                self.datastore.full_datastore.add_analysis_result(
                    SingleValue(
                        value=sd_amp * pq.dimensionless,
                        value_units=pq.dimensionless,
                        value_name="SDAmplitude",
                        **common_params,
                    ))
                self.datastore.full_datastore.add_analysis_result(
                    SingleValue(
                        value=sd_slope * pq.dimensionless,
                        value_units=pq.dimensionless,
                        value_name="SDSlope",
                        **common_params,
                    ))
                self.datastore.full_datastore.add_analysis_result(
                    SingleValue(
                        value=error_sq * pq.dimensionless,
                        value_units=pq.dimensionless,
                        value_name="SDErrorSq",
                        **common_params,
                    ))
                self.datastore.full_datastore.add_analysis_result(
                    SingleValue(
                        value=error_diff * pq.dimensionless,
                        value_units=pq.dimensionless,
                        value_name="SDErrorDiff",
                        **common_params,
                    ))
                self.datastore.full_datastore.add_analysis_result(
                    SingleValue(
                        value=s_slope * pq.dimensionless,
                        value_units=pq.dimensionless,
                        value_name="SSlope",
                        **common_params,
                    ))
                self.datastore.full_datastore.add_analysis_result(
                    SingleValue(
                        value=s_amp * pq.dimensionless,
                        value_units=pq.dimensionless,
                        value_name="SAmplitude",
                        **common_params,
                    ))
                self.datastore.full_datastore.add_analysis_result(
                    SingleValueList(
                        values=s_distr * pq.dimensionless,
                        values_unit=pq.dimensionless,
                        value_name="SDistr",
                        **common_params,
                    ))
                self.datastore.full_datastore.add_analysis_result(
                    SingleValueList(
                        values=s_bins * pq.dimensionless,
                        values_unit=pq.dimensionless,
                        value_name="SBins",
                        **common_params,
                    ))
                self.datastore.full_datastore.add_analysis_result(
                    SingleValue(
                        value=s_error_sq * pq.dimensionless,
                        value_units=pq.dimensionless,
                        value_name="SErrorSq",
                        **common_params,
                    ))
                self.datastore.full_datastore.add_analysis_result(
                    SingleValue(
                        value=s_error_diff * pq.dimensionless,
                        value_units=pq.dimensionless,
                        value_name="SErrorDiff",
                        **common_params,
                    ))
                self.datastore.full_datastore.add_analysis_result(
                    SingleValue(
                        value=d_slope * pq.dimensionless,
                        value_units=pq.dimensionless,
                        value_name="DSlope",
                        **common_params,
                    ))
                self.datastore.full_datastore.add_analysis_result(
                    SingleValue(
                        value=d_amp * pq.dimensionless,
                        value_units=pq.dimensionless,
                        value_name="DAmplitude",
                        **common_params,
                    ))
                self.datastore.full_datastore.add_analysis_result(
                    SingleValueList(
                        values=d_distr * pq.s,
                        values_unit=pq.dimensionless,
                        value_name="DDistr",
                        **common_params,
                    ))
                self.datastore.full_datastore.add_analysis_result(
                    SingleValueList(
                        values=d_bins * pq.s,
                        values_unit=pq.dimensionless,
                        value_name="DBins",
                        **common_params,
                    ))
                self.datastore.full_datastore.add_analysis_result(
                    SingleValue(
                        value=d_error_sq * pq.dimensionless,
                        value_units=pq.dimensionless,
                        value_name="DErrorSq",
                        **common_params,
                    ))
                self.datastore.full_datastore.add_analysis_result(
                    SingleValue(
                        value=d_error_diff * pq.dimensionless,
                        value_units=pq.dimensionless,
                        value_name="DErrorDiff",
                        **common_params,
                    ))
Esempio n. 47
0
    def subplot(self, subplotspec):
        plots = {}
        gs = gridspec.GridSpecFromSubplotSpec(16, 14, subplot_spec=subplotspec,
                                              hspace=1.0, wspace=1.0)
        
        low_contrast = str(5)
        
        analog_ids = sorted(queries.param_filter_query(self.datastore,sheet_name=self.parameters.sheet_name,value_name=['F0_Exc_Cond-Mean(ECond)']).get_analysis_result()[0].ids)
        
        dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',value_name=['F0_Exc_Cond-Mean(ECond)','F0_Inh_Cond-Mean(ICond)'],sheet_name=self.parameters.sheet_name)
        plots['MeanF0'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(analog_ids), 'sheet_name' : self.parameters.sheet_name,'centered'  : True,'mean' : True,'pool' : True,'polar' : True})),gs[:4,:3],{'legend' : False, 'y_label': 'F0(Cond)' ,'title' : None, 'x_ticks' : None, 'x_label' : None,'colors': {'F0_Exc_Cond-Mean(ECond) contrast : 100' : '#FF0000' , 'F0_Exc_Cond-Mean(ECond) contrast : ' + low_contrast : '#FFACAC','F0_Inh_Cond-Mean(ICond) contrast : 100' : '#0000FF' , 'F0_Inh_Cond-Mean(ICond) contrast : ' +low_contrast : '#ACACFF'}})
        
        dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',value_name=['F1_Exc_Cond','F1_Inh_Cond'],sheet_name=self.parameters.sheet_name)
        plots['MeanF1'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(analog_ids), 'sheet_name' : self.parameters.sheet_name,'centered'  : True,'mean' : True,'pool' : True,'polar' : True})),gs[4:8,:3],{'y_label': 'F1(Cond)','title' : None, 'x_ticks' : None, 'x_label' : None,'colors': {'F1_Exc_Cond contrast : 100' : '#FF0000' , 'F1_Exc_Cond contrast : ' + low_contrast : '#FFACAC','F1_Inh_Cond contrast : 100' : '#0000FF' , 'F1_Inh_Cond contrast : ' + low_contrast : '#ACACFF'}})

        dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',value_name=['F0_Vm-Mean(VM)'],sheet_name=self.parameters.sheet_name)
        plots['MeanVMF0'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(analog_ids), 'sheet_name' : self.parameters.sheet_name,'centered'  : True,'mean' : True,'pool' : True,'polar' : True})),gs[8:12,:3],{'y_label': 'F0(Vm)' ,'title' : None, 'x_ticks' : None, 'x_label' : None,'colors': {'F0_Vm-Mean(VM) contrast : 100' : '#000000' , 'F0_Vm-Mean(VM) contrast : ' + low_contrast : '#ACACAC'}})

        dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',value_name=['F1_Vm'],sheet_name=self.parameters.sheet_name)
        plots['MeanVMF1'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(analog_ids), 'sheet_name' : self.parameters.sheet_name,'centered'  : True,'mean' : True,'pool' : True,'polar' : True})),gs[12:16,:3],{'y_label': 'F1(Vm)','title' : None, 'x_ticks' : None, 'x_label' : None,'colors': {'F1_Vm contrast : 100' : '#000000' , 'F1_Vm contrast : ' + low_contrast : '#ACACAC'}})
        
        if True:
            if self.parameters.many:
                dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',value_name=['F0_Exc_Cond','F0_Inh_Cond'],sheet_name=self.parameters.sheet_name)
                plots['F0a'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(analog_ids[0:10]), 'sheet_name' : self.parameters.sheet_name,'centered'  : False,'mean' : False,'pool' : True,'polar' : True})),gs[:2,3:],{'y_label': None,'title' : None, 'x_ticks' : None, 'x_label' : None,'colors': {'F0_Exc_Cond contrast : 100' : '#FF0000' , 'F0_Exc_Cond contrast : ' + low_contrast : '#FFACAC','F0_Inh_Cond contrast : 100' : '#0000FF' , 'F0_Inh_Cond contrast : ' + low_contrast : '#ACACFF'}})

                dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',value_name=['F0_Exc_Cond','F0_Inh_Cond'],sheet_name=self.parameters.sheet_name)
                plots['F0b'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(analog_ids[10:20]), 'sheet_name' : self.parameters.sheet_name,'centered'  : False,'mean' : False,'pool' : True,'polar' : True})),gs[2:4,3:],{'y_label': None,'title' : None, 'x_ticks' : None, 'x_label' : None,'colors': {'F0_Exc_Cond contrast : 100' : '#FF0000' , 'F0_Exc_Cond contrast : ' + low_contrast : '#FFACAC','F0_Inh_Cond contrast : 100' : '#0000FF' , 'F0_Inh_Cond contrast : ' + low_contrast : '#ACACFF'}})


                dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',value_name=['F1_Exc_Cond','F1_Inh_Cond'],sheet_name=self.parameters.sheet_name)
                plots['F1a'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(analog_ids[0:10]), 'sheet_name' : self.parameters.sheet_name,'centered'  : False,'mean' : False,'pool' : True,'polar' : True})),gs[4:6,3:],{'y_label': None,'title' : None, 'x_ticks' : None, 'x_label' : None,'colors': {'F1_Exc_Cond contrast : 100' : '#FF0000' , 'F1_Exc_Cond contrast : ' + low_contrast : '#FFACAC','F1_Inh_Cond contrast : 100' : '#0000FF' , 'F1_Inh_Cond contrast : ' + low_contrast : '#ACACFF'}})

                dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',value_name=['F1_Exc_Cond','F1_Inh_Cond'],sheet_name=self.parameters.sheet_name)
                plots['F1b'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(analog_ids[10:20]), 'sheet_name' : self.parameters.sheet_name,'centered'  : False,'mean' : False,'pool' : True,'polar' : True})),gs[6:8,3:],{'y_label': None,'title' : None, 'x_ticks' : None, 'x_label' : None,'colors': {'F1_Exc_Cond contrast : 100' : '#FF0000' , 'F1_Exc_Cond contrast : ' + low_contrast : '#FFACAC','F1_Inh_Cond contrast : 100' : '#0000FF' , 'F1_Inh_Cond contrast : ' + low_contrast : '#ACACFF'}})


                dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',value_name=['F0_Vm-Mean(VM)'],sheet_name=self.parameters.sheet_name)
                plots['VMF0a'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(analog_ids[0:10]), 'sheet_name' : self.parameters.sheet_name,'centered'  : False,'mean' : False,'pool' : True,'polar' : True})),gs[8:10,3:],{'y_label': None ,'title' : None, 'x_ticks' : None, 'x_label' : None,'colors': {'F0_Vm-Mean(VM) contrast : 100' : '#000000' , 'F0_Vm-Mean(VM) contrast : ' + low_contrast : '#ACACAC'}})

                dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',value_name=['F0_Vm-Mean(VM)'],sheet_name=self.parameters.sheet_name)
                plots['VMF0b'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(analog_ids[10:20]), 'sheet_name' : self.parameters.sheet_name,'centered'  : False,'mean' : False,'pool' : True,'polar' : True})),gs[10:12,3:],{'y_label': None ,'title' : None, 'x_ticks' : None, 'x_label' : None,'colors': {'F0_Vm-Mean(VM) contrast : 100' : '#000000' , 'F0_Vm-Mean(VM) contrast : ' + low_contrast : '#ACACAC'}})


                dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',value_name=['F1_Vm'],sheet_name=self.parameters.sheet_name)
                plots['VMF1a'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(analog_ids[0:10]), 'sheet_name' : self.parameters.sheet_name,'centered'  : False,'mean' : False,'pool' : True,'polar' : True})),gs[12:14,3:],{'y_label': None,'title' : None, 'x_ticks' : None, 'x_label' : None,'colors': {'F1_Vm contrast : 100' : '#000000' , 'F1_Vm contrast : ' + low_contrast : '#ACACAC'}})

                dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',value_name=['F1_Vm'],sheet_name=self.parameters.sheet_name)
                plots['VMF1b'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(analog_ids[10:20]), 'sheet_name' : self.parameters.sheet_name,'centered'  : False,'mean' : False,'pool' : True,'polar' : True})),gs[14:16,3:],{'y_label': None,'title' : None, 'x_ticks' : None, 'x_label' : None,'colors': {'F1_Vm contrast : 100' : '#000000' , 'F1_Vm contrast : ' + low_contrast : '#ACACAC'}})

            else:
                #neurons = [0,6,2,4,9,15]
                #neurons = [i fori in xrange(0:10)]
                neurons = [5,15,3,38,18,24]
                #neurons = [30,31,32,33,34,35,36,37,38,39,40]
                
                dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',value_name=['F0_Exc_Cond-Mean(ECond)','F0_Inh_Cond-Mean(ICond)'],sheet_name=self.parameters.sheet_name)
                plots['F0'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(numpy.array(analog_ids)[neurons]), 'sheet_name' : self.parameters.sheet_name,'centered'  : False,'mean' : False,'pool' : True,'polar' : True})),gs[:4,3:],{'legend' : False, 'y_label': None ,'title' : None, 'x_ticks' : None, 'x_label' : None,'colors': {'F0_Exc_Cond-Mean(ECond) contrast : 100' : '#FF0000' , 'F0_Exc_Cond-Mean(ECond) contrast : ' + low_contrast : '#FFACAC','F0_Inh_Cond-Mean(ICond) contrast : 100' : '#0000FF' , 'F0_Inh_Cond-Mean(ICond) contrast : ' + low_contrast : '#ACACFF'}})

                dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',value_name=['F1_Exc_Cond','F1_Inh_Cond'],sheet_name=self.parameters.sheet_name)
                plots['F1'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(numpy.array(analog_ids)[neurons]), 'sheet_name' : self.parameters.sheet_name,'centered'  : False,'mean' : False,'pool' : True,'polar' : True})),gs[4:8,3:],{'y_label': None,'title' : None, 'x_ticks' : None, 'x_label' : None,'colors': {'F1_Exc_Cond contrast : 100' : '#FF0000' , 'F1_Exc_Cond contrast : ' + low_contrast : '#FFACAC','F1_Inh_Cond contrast : 100' : '#0000FF' , 'F1_Inh_Cond contrast : ' + low_contrast : '#ACACFF'}})

                dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',value_name=['F0_Vm-Mean(VM)'],sheet_name=self.parameters.sheet_name)
                plots['VMF0'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(numpy.array(analog_ids)[neurons]), 'sheet_name' : self.parameters.sheet_name,'centered'  : False,'mean' : False,'pool' : True,'polar' : True})),gs[8:12,3:],{'y_label': None ,'title' : None, 'x_ticks' : None, 'x_label' : None,'colors': {'F0_Vm-Mean(VM) contrast : 100' : '#000000' , 'F0_Vm-Mean(VM) contrast : ' + low_contrast : '#ACACAC'}})

                dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',value_name=['F1_Vm'],sheet_name=self.parameters.sheet_name)
                plots['VMF1'] = (PlotTuningCurve(dsv, ParameterSet({'parameter_name' : 'orientation', 'neurons': list(numpy.array(analog_ids)[neurons]), 'sheet_name' : self.parameters.sheet_name,'centered'  : False,'mean' : False,'pool' : True,'polar' : True})),gs[12:16,3:],{'y_label': None,'title' : None, 'x_ticks' : None, 'x_label' : None,'colors': {'F1_Vm contrast : 100' : '#000000' , 'F1_Vm contrast : ' + low_contrast : '#ACACAC'}})
                

        return plots        
import mozaik
from mozaik.storage.datastore import Hdf5DataStore,PickledDataStore
from mozaik.tools.mozaik_parametrized import colapse, colapse_to_dictionary, MozaikParametrized
from mozaik.analysis.technical import NeuronAnnotationsToPerNeuronValues
from parameters import ParameterSet
import numpy
from mozaik.storage import queries
from mozaik.controller import Global
Global.root_directory = sys.argv[1]+'/'

setup_logging()

data_store = PickledDataStore(load=True,parameters=ParameterSet({'root_directory':sys.argv[1],'store_stimuli' : False}),replace=True)

NeuronAnnotationsToPerNeuronValues(data_store,ParameterSet({})).analyse()
analog_ids = queries.param_filter_query(data_store,sheet_name="V1_Exc_L4").get_segments()[0].get_stored_esyn_ids()


dsv = queries.param_filter_query(data_store,st_name='FlashedBar')
for ads in dsv.get_analysis_result():
    sid = MozaikParametrized.idd(ads.stimulus_id)
    sid.x=0
    ads.stimulus_id = str(sid)
for seg in dsv.get_segments():    
    sid = MozaikParametrized.idd(seg.annotations['stimulus'])
    sid.x=0
    seg.annotations['stimulus'] = str(sid)
for seg in dsv.get_segments(null=True):    
    sid = MozaikParametrized.idd(seg.annotations['stimulus'])
    sid.x=0
    seg.annotations['stimulus'] = str(sid)    
Esempio n. 49
0
      def plot(self):
          self.fig = pylab.figure(facecolor='w', **self.fig_param)
          gs = gridspec.GridSpec(1, 1)
          gs.update(left=0.07, right=0.97, top=0.9, bottom=0.1)
          gs = gs[0,0]
        
          dsv_simple = self.datastore.get_analysis_result(identifier='PerNeuronValue',sheet_name=self.parameters.SimpleSheetName,analysis_algorithm='ModulationRatio')
          dsv_complex = self.datastore.get_analysis_result(identifier='PerNeuronValue',sheet_name=self.parameters.ComplexSheetName,analysis_algorithm='ModulationRatio')
          
          
          dsv = queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating',st_orientation=0)
          dsv_simple_v_F0 = dsv.get_analysis_result(identifier='PerNeuronValue',sheet_name=self.parameters.SimpleSheetName,value_name='F0_Vm-Mean(VM)')
          dsv_complex_v_F0 = dsv.get_analysis_result(identifier='PerNeuronValue',sheet_name=self.parameters.ComplexSheetName,value_name='F0_Vm-Mean(VM)')
          dsv_simple_v_F1 = dsv.get_analysis_result(identifier='PerNeuronValue',sheet_name=self.parameters.SimpleSheetName,value_name='F1_Vm')
          dsv_complex_v_F1 = dsv.get_analysis_result(identifier='PerNeuronValue',sheet_name=self.parameters.ComplexSheetName,value_name='F1_Vm')
          
          
          print len(dsv_simple)
          assert len(dsv_simple) == 1
          assert len(dsv_complex) == 1
          assert len(dsv_simple_v_F0) == 1
          assert len(dsv_complex_v_F0) == 1
          assert len(dsv_simple_v_F1) == 1
          assert len(dsv_complex_v_F1) == 1

          print dsv_simple_v_F0[0].values
          print dsv_simple_v_F1[0].values
          
          simple_v_mr = 2*dsv_simple_v_F1[0].values/abs(dsv_simple_v_F0[0].values)
          print simple_v_mr
          
          complex_v_mr = 2*dsv_complex_v_F1[0].values/abs(dsv_complex_v_F0[0].values)



          dsv_simple = dsv_simple[0]
          dsv_complex = dsv_complex[0]

          gs = gridspec.GridSpecFromSubplotSpec(3, 2,subplot_spec=gs)
          ax = pylab.subplot(gs[0,0])
          ax.hist(dsv_simple.values,bins=numpy.arange(0,2.2,0.2),color='k')
          pylab.ylim(0,450)
          disable_xticks(ax)
          remove_x_tick_labels()
          remove_y_tick_labels()
          #pylab.ylabel('Layer 4',fontsize=15)
          ax = pylab.subplot(gs[1,0])
          ax.hist(dsv_complex.values,bins=numpy.arange(0,2.2,0.2),color='w')
          pylab.ylim(0,450)
          disable_xticks(ax)
          remove_x_tick_labels()
          remove_y_tick_labels()
          #pylab.ylabel('Layer 2/3',fontsize=15)
          ax = pylab.subplot(gs[2,0])
          ax.hist([dsv_simple.values,dsv_complex.values],bins=numpy.arange(0,2.2,0.2),histtype='barstacked',color=['k','w'])
          pylab.ylim(0,450)
          #pylab.ylabel('Pooled',fontsize=15)
          three_tick_axis(ax.xaxis)
          remove_y_tick_labels()
          #pylab.xlabel('Modulation ratio',fontsize=15)
          for label in ax.get_xticklabels() + ax.get_yticklabels(): 
              label.set_fontsize(30) 

          
          if True:
              ax = pylab.subplot(gs[0,1])
              ax.hist(simple_v_mr,bins=numpy.arange(0,2.2,0.2),color='k')
              disable_xticks(ax)
              remove_x_tick_labels()
              remove_y_tick_labels()
              ax = pylab.subplot(gs[1,1])
              ax.hist(complex_v_mr,bins=numpy.arange(0,2.2,0.2),color='k')
              disable_xticks(ax)
              remove_x_tick_labels()
              remove_y_tick_labels()
              ax = pylab.subplot(gs[2,1])
              ax.hist([simple_v_mr,complex_v_mr],bins=numpy.arange(0,2.2,0.2),histtype='barstacked',color=['k','w'])
              three_tick_axis(ax.xaxis)
              remove_y_tick_labels()
              
          for label in ax.get_xticklabels() + ax.get_yticklabels(): 
              label.set_fontsize(30) 
          
          if self.plot_file_name:
                        pylab.savefig(Global.root_directory+self.plot_file_name)