コード例 #1
0
def ads_with_equal_stimulus_type(dsv, allow_None=False):
    """
    This functions tests whether DSV contains only ADS associated
    with the same stimulus type.
    
    Parameters
    ----------
    not_None : bool
             If true it will not allow ADS that are not associated with stimulus
    """
    if allow_None:
        return matching_parametrized_object_params([
            MozaikParametrized.idd(ads.stimulus_id)
            for ads in dsv.analysis_results if ads.stimulus_id != None
        ],
                                                   params=['name'])
    else:
        if len([0 for ads in dsv.analysis_results if ads.stimulus_id == None
                ]) > 0:
            return False
        return matching_parametrized_object_params([
            MozaikParametrized.idd(ads.stimulus_id)
            for ads in dsv.analysis_results
        ],
                                                   params=['name'])
コード例 #2
0
    def  __init__(self, datastore,single_trial=False, **params):
        self.single_trial = single_trial
        PerDSVPlot.__init__(self, datastore, **params)
        ss = self._get_stimulus_ids()
        assert ss != [], "Error, empty datastore!"
        if self.title_style == "Clever":
            stimulus = MozaikParametrized.idd(ss[0])
            for s in ss:
                s = MozaikParametrized.idd(s)
                if s.name != stimulus.name:
                    logger.warning('Datastore does not contain same type of stimuli: changing title_style from Clever to Standard')
                    self.title_style = "Standard"
                    break

        # lets find parameter indexes that vary if we need 'Clever' title style
        if self.title_style == "Clever":
            self.varied = varying_parameters([MozaikParametrized.idd(s) for s in ss])
            
            if not self.single_trial:
                self.varied = [x for x in self.varied if x != 'trial']
            
            
        if self.title_style == "Standard":
            self.extra_space_top = 0.07
        if self.title_style == "Clever":
            self.extra_space_top = len(self.varied)*0.005
コード例 #3
0
 def perform_analysis(self):
       assert queries.equal_stimulus_type(self.datastore) , "Data store has to contain only recordings to the same stimulus type"
       st = self.datastore.get_stimuli()[0]
       assert MozaikParametrized.idd(st).params().has_key('temporal_frequency'), "The stimulus has to have parameter temporal_frequency which is used as first harmonic"
       
       for sheet in self.datastore.sheets():
           dsv = queries.param_filter_query(self.datastore, sheet_name=sheet)
           segs1, stids = colapse(dsv.get_segments(),dsv.get_stimuli(),parameter_list=['trial'],allow_non_identical_objects=True)
           for segs,st in zip(segs1, stids):
               first_analog_signal = segs[0].get_esyn(segs[0].get_stored_esyn_ids()[0])
               duration = first_analog_signal.t_stop - first_analog_signal.t_start
               frequency = MozaikParametrized.idd(st).temporal_frequency * MozaikParametrized.idd(st).params()['temporal_frequency'].units
               period = 1/frequency
               period = period.rescale(first_analog_signal.t_start.units)
               cycles = duration / period
               first_har = round(cycles)
               
               e_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_esyn(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_esyn(idd))) for idd in segs[0].get_stored_esyn_ids()]
               i_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_isyn(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_isyn(idd))) for idd in segs[0].get_stored_isyn_ids()]
               v_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_vm(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_vm(idd))) for idd in segs[0].get_stored_vm_ids()]
               e_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_esyn(idd) for seg in segs],axis=0).flatten())[first_har]/len(segs[0].get_esyn(idd))) for idd in segs[0].get_stored_esyn_ids()]
               i_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_isyn(idd) for seg in segs],axis=0).flatten())[first_har]/len(segs[0].get_isyn(idd))) for idd in segs[0].get_stored_isyn_ids()]
               v_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_vm(idd) for seg in segs],axis=0).flatten())[first_har]/len(segs[0].get_vm(idd))) for idd in segs[0].get_stored_vm_ids()]
               
               cond_units = segs[0].get_esyn(segs[0].get_stored_esyn_ids()[0]).units
               vm_units = segs[0].get_vm(segs[0].get_stored_esyn_ids()[0]).units
               
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(e_f0,segs[0].get_stored_esyn_ids(),cond_units,value_name = 'F0_Exc_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(i_f0,segs[0].get_stored_isyn_ids(),cond_units,value_name = 'F0_Inh_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(v_f0,segs[0].get_stored_vm_ids(),vm_units,value_name = 'F0_Vm',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(e_f1,segs[0].get_stored_esyn_ids(),cond_units,value_name = 'F1_Exc_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(i_f1,segs[0].get_stored_isyn_ids(),cond_units,value_name = 'F1_Inh_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(v_f1,segs[0].get_stored_vm_ids(),vm_units,value_name = 'F1_Vm',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
コード例 #4
0
ファイル: plotting.py プロジェクト: brainscales/mozaik
    def _ploter(self, idx, gs):
        pair = self.pairs[idx]
        # Let's figure out the varying parameters
        p1 = varying_parameters(pair)
        if pair[0].stimulus_id == None or pair[1].stimulus_id == None:
            p2 = []
        elif MozaikParametrized.idd(pair[0].stimulus_id).name != MozaikParametrized.idd(pair[1].stimulus_id).name:
            p2 = ['name']
        else:
            p2 = varying_parameters([MozaikParametrized.idd(p.stimulus_id) for p in pair])
        p1 = [x for x in p1 if ((x != 'value_name') and (x != 'stimulus_id'))]

        x_label = pair[0].value_name + '(' + pair[0].value_units.dimensionality.latex + ')'
        y_label = pair[1].value_name + '(' + pair[1].value_units.dimensionality.latex + ')'

        for p in p1:
            x_label += '\n' + str(p) + " = " + str(getattr(pair[0],p))
            y_label += '\n' + str(p) + " = " + str(getattr(pair[1],p))
        
        for p in p2:
            x_label += '\n' + str(p) + " = " + str(getattr(MozaikParametrized.idd(pair[0].stimulus_id),p))
            y_label += '\n' + str(p) + " = " + str(getattr(MozaikParametrized.idd(pair[1].stimulus_id),p))
        
        params = {}
        params["x_label"] = x_label
        params["y_label"] = y_label
        params["title"] = self.sheets[idx]
        if pair[0].value_units != pair[1].value_units or pair[1].value_units == pq.dimensionless:
           params["equal_aspect_ratio"] = False
        
        ids = list(set(pair[0].ids) & set(pair[1].ids))
        return [("ScatterPlot",ScatterPlot(pair[0].get_value_by_id(ids), pair[1].get_value_by_id(ids)),gs,params)]
コード例 #5
0
ファイル: plot_constructors.py プロジェクト: RCagnol/mozaik
    def __init__(self, datastore, single_trial=False, **params):
        self.single_trial = single_trial
        PerDSVPlot.__init__(self, datastore, **params)
        ss = self._get_stimulus_ids()
        assert ss != [], "Error, empty datastore!"
        if self.title_style == "Clever":
            stimulus = MozaikParametrized.idd(ss[0])
            for s in ss:
                s = MozaikParametrized.idd(s)
                if s.name != stimulus.name:
                    logger.warning(
                        'Datastore does not contain same type of stimuli: changing title_style from Clever to Standard'
                    )
                    self.title_style = "Standard"
                    break

        # lets find parameter indexes that vary if we need 'Clever' title style
        if self.title_style == "Clever":
            self.varied = varying_parameters(
                [MozaikParametrized.idd(s) for s in ss])

            if not self.single_trial:
                self.varied = [x for x in self.varied if x != 'trial']

        if self.title_style == "Standard":
            self.extra_space_top = 0.07
        if self.title_style == "Clever":
            self.extra_space_top = len(self.varied) * 0.005
コード例 #6
0
 def perform_analysis(self):
       dsv1 = queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating')
       for sheet in dsv1.sheets():
           dsv = queries.param_filter_query(dsv1, sheet_name=sheet)
           segs1, stids = colapse(dsv.get_segments(),dsv.get_stimuli(),parameter_list=['trial'],allow_non_identical_objects=True)
           for segs,st in zip(segs1, stids):
               first_analog_signal = segs[0].get_esyn(segs[0].get_stored_esyn_ids()[0])
               duration = first_analog_signal.t_stop - first_analog_signal.t_start
               frequency = MozaikParametrized.idd(st).temporal_frequency * MozaikParametrized.idd(st).params()['temporal_frequency'].units
               period = 1/frequency
               period = period.rescale(first_analog_signal.t_start.units)
               cycles = duration / period
               first_har = round(cycles)
               e_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_esyn(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_esyn(idd))) for idd in segs[0].get_stored_esyn_ids()]
               i_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_isyn(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_esyn(idd))) for idd in segs[0].get_stored_isyn_ids()]
               v_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_vm(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_esyn(idd))) for idd in segs[0].get_stored_vm_ids()]
               e_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_esyn(idd) for seg in segs],axis=0).flatten()/len(segs[0].get_esyn(idd)))[first_har]) for idd in segs[0].get_stored_esyn_ids()]
               i_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_isyn(idd) for seg in segs],axis=0).flatten()/len(segs[0].get_esyn(idd)))[first_har]) for idd in segs[0].get_stored_isyn_ids()]
               v_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_vm(idd) for seg in segs],axis=0).flatten()/len(segs[0].get_esyn(idd)))[first_har]) for idd in segs[0].get_stored_vm_ids()]
               
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(e_f0,segs[0].get_stored_esyn_ids(),first_analog_signal.units,value_name = 'F0_Exc_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(i_f0,segs[0].get_stored_isyn_ids(),first_analog_signal.units,value_name = 'F0_Inh_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(v_f0,segs[0].get_stored_vm_ids(),first_analog_signal.units,value_name = 'F0_Vm',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(e_f1,segs[0].get_stored_esyn_ids(),first_analog_signal.units,value_name = 'F1_Exc_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(i_f1,segs[0].get_stored_isyn_ids(),first_analog_signal.units,value_name = 'F1_Inh_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(v_f1,segs[0].get_stored_vm_ids(),first_analog_signal.units,value_name = 'F1_Vm',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
コード例 #7
0
ファイル: vision.py プロジェクト: dguarino/mozaik
 def perform_analysis(self):
       dsv1 = queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating')
       for sheet in dsv1.sheets():
           dsv = queries.param_filter_query(dsv1, sheet_name=sheet)
           segs1, stids = colapse(dsv.get_segments(),dsv.get_stimuli(),parameter_list=['trial'],allow_non_identical_objects=True)
           for segs,st in zip(segs1, stids):
               first_analog_signal = segs[0].get_esyn(segs[0].get_stored_esyn_ids()[0])
               duration = first_analog_signal.t_stop - first_analog_signal.t_start
               frequency = MozaikParametrized.idd(st).temporal_frequency * MozaikParametrized.idd(st).params()['temporal_frequency'].units
               period = 1/frequency
               period = period.rescale(first_analog_signal.t_start.units)
               cycles = duration / period
               first_har = round(cycles)
               e_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_esyn(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_esyn(idd))) for idd in segs[0].get_stored_esyn_ids()]
               i_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_isyn(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_esyn(idd))) for idd in segs[0].get_stored_isyn_ids()]
               v_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_vm(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_esyn(idd))) for idd in segs[0].get_stored_vm_ids()]
               e_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_esyn(idd) for seg in segs],axis=0).flatten()/len(segs[0].get_esyn(idd)))[first_har]) for idd in segs[0].get_stored_esyn_ids()]
               i_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_isyn(idd) for seg in segs],axis=0).flatten()/len(segs[0].get_esyn(idd)))[first_har]) for idd in segs[0].get_stored_isyn_ids()]
               v_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_vm(idd) for seg in segs],axis=0).flatten()/len(segs[0].get_esyn(idd)))[first_har]) for idd in segs[0].get_stored_vm_ids()]
               
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(e_f0,segs[0].get_stored_esyn_ids(),first_analog_signal.units,value_name = 'F0_Exc_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(i_f0,segs[0].get_stored_isyn_ids(),first_analog_signal.units,value_name = 'F0_Inh_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(v_f0,segs[0].get_stored_vm_ids(),first_analog_signal.units,value_name = 'F0_Vm',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(e_f1,segs[0].get_stored_esyn_ids(),first_analog_signal.units,value_name = 'F1_Exc_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(i_f1,segs[0].get_stored_isyn_ids(),first_analog_signal.units,value_name = 'F1_Inh_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(v_f1,segs[0].get_stored_vm_ids(),first_analog_signal.units,value_name = 'F1_Vm',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
コード例 #8
0
      def perform_analysis(self):
            
            for sheet in self.datastore.sheets():
                dsv = queries.param_filter_query(self.datastore, sheet_name=sheet)
                if len(dsv.get_segments()) != 0:
                  assert queries.equal_stimulus_type(self.datastore) , "Data store has to contain only recordings to the same stimulus type"
                  st = self.datastore.get_stimuli()[0]
                  assert MozaikParametrized.idd(st).getParams().has_key('temporal_frequency'), "The stimulus has to have parameter temporal_frequency which is used as first harmonic"

                  segs1, stids = colapse(dsv.get_segments(),dsv.get_stimuli(),parameter_list=['trial'],allow_non_identical_objects=True)
                  for segs,st in zip(segs1, stids):
                      first_analog_signal = segs[0].get_esyn(segs[0].get_stored_esyn_ids()[0])
                      duration = first_analog_signal.t_stop - first_analog_signal.t_start
                      frequency = MozaikParametrized.idd(st).temporal_frequency * MozaikParametrized.idd(st).getParams()['temporal_frequency'].units
                      period = 1/frequency
                      period = period.rescale(first_analog_signal.t_start.units)
                      cycles = duration / period
                      first_har = int(round(cycles))
                      
                      e_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_esyn(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_esyn(idd))) for idd in segs[0].get_stored_esyn_ids()]
                      i_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_isyn(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_isyn(idd))) for idd in segs[0].get_stored_isyn_ids()]
                      v_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_vm(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_vm(idd))) for idd in segs[0].get_stored_vm_ids()]
                      e_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_esyn(idd) for seg in segs],axis=0).flatten())[first_har]/len(segs[0].get_esyn(idd))) for idd in segs[0].get_stored_esyn_ids()]
                      i_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_isyn(idd) for seg in segs],axis=0).flatten())[first_har]/len(segs[0].get_isyn(idd))) for idd in segs[0].get_stored_isyn_ids()]
                      v_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_vm(idd) for seg in segs],axis=0).flatten())[first_har]/len(segs[0].get_vm(idd))) for idd in segs[0].get_stored_vm_ids()]
                      
                      cond_units = segs[0].get_esyn(segs[0].get_stored_esyn_ids()[0]).units
                      vm_units = segs[0].get_vm(segs[0].get_stored_esyn_ids()[0]).units
                      
                      self.datastore.full_datastore.add_analysis_result(PerNeuronValue(e_f0,segs[0].get_stored_esyn_ids(),cond_units,value_name = 'F0_Exc_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
                      self.datastore.full_datastore.add_analysis_result(PerNeuronValue(i_f0,segs[0].get_stored_isyn_ids(),cond_units,value_name = 'F0_Inh_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
                      self.datastore.full_datastore.add_analysis_result(PerNeuronValue(v_f0,segs[0].get_stored_vm_ids(),vm_units,value_name = 'F0_Vm',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
                      self.datastore.full_datastore.add_analysis_result(PerNeuronValue(e_f1,segs[0].get_stored_esyn_ids(),cond_units,value_name = 'F1_Exc_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
                      self.datastore.full_datastore.add_analysis_result(PerNeuronValue(i_f1,segs[0].get_stored_isyn_ids(),cond_units,value_name = 'F1_Inh_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
                      self.datastore.full_datastore.add_analysis_result(PerNeuronValue(v_f1,segs[0].get_stored_vm_ids(),vm_units,value_name = 'F1_Vm',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        

                # AnalogSignalList part 
                dsv = queries.param_filter_query(dsv, sheet_name=sheet,name='AnalogSignalList')
                for asl in dsv.get_analysis_result():
                    assert MozaikParametrized.idd(asl.stimulus_id).getParams().has_key('temporal_frequency'), "The stimulus has to have parameter temporal_frequency which is used as first harmonic"

                    signals = asl.asl
                    first_analog_signal = signals[0]
                    duration = first_analog_signal.t_stop - first_analog_signal.t_start
                    frequency = MozaikParametrized.idd(asl.stimulus_id).temporal_frequency * MozaikParametrized.idd(asl.stimulus_id).getParams()['temporal_frequency'].units
                    period = 1/frequency
                    period = period.rescale(first_analog_signal.t_start.units)
                    cycles = duration / period
                    first_har = int(round(cycles))

                    f0 = [abs(numpy.fft.fft(signal)[0])/len(signal) for signal in signals]
                    f1 = [2*abs(numpy.fft.fft(signal)[first_har])/len(signal) for signal in signals]
                    
                    self.datastore.full_datastore.add_analysis_result(PerNeuronValue(f0,asl.ids,asl.y_axis_units,value_name = 'F0('+ asl.y_axis_name + ')',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=asl.stimulus_id))                            
                    self.datastore.full_datastore.add_analysis_result(PerNeuronValue(f1,asl.ids,asl.y_axis_units,value_name = 'F1('+ asl.y_axis_name + ')',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=asl.stimulus_id))                                                
コード例 #9
0
    def perform_analysis(self):
        dsv = queries.param_filter_query(self.datastore, identifier='PerNeuronValue')
        if len(dsv.get_analysis_result()) == 0: return
        assert queries.ads_with_equal_stimulus_type(dsv)
        assert queries.equal_ads(dsv,except_params=['stimulus_id', 'sheet_name'])

        textures = list(set([MozaikParametrized.idd(ads.stimulus_id).texture for ads in dsv.get_analysis_result()]))
        samples = list(set([MozaikParametrized.idd(ads.stimulus_id).sample for ads in dsv.get_analysis_result()]))
        trials = list(set([MozaikParametrized.idd(ads.stimulus_id).trial for ads in dsv.get_analysis_result()]))

        for sheet in self.parameters.sheet_list:
            mean_rates = [] #This is a 4D array where we will store the firing rates of each neurons for each trial of each sample of each texture family
            for texture in textures:
                mean_rates_texture = []
                dsv_tmp = queries.param_filter_query(dsv,identifier='PerNeuronValue',sheet_name=sheet,st_texture=texture,st_stats_type=1)
                for sample in samples:
                    mean_rates_sample = []
                    for trial in trials:
                        pnv = queries.param_filter_query(dsv_tmp,identifier='PerNeuronValue',st_sample=sample,st_trial=trial).get_analysis_result()[0]
                        mean_rates_sample.append(pnv.values)
                    mean_rates_texture.append(mean_rates_sample)
                mean_rates.append(mean_rates_texture)

            global_averaged_rates = numpy.mean(mean_rates, axis = (0,1,2)) #Calculating the global averaged firing rates for each neurons accross each texture family, samples and trials
            textures_averaged_rates = numpy.mean(mean_rates, axis = (1,2)) #Calculating the firing rates of each neurons for each texture family by averaging accross samples and trials
            samples_averaged_rates = numpy.mean(mean_rates, axis = 2) #Calculating the firing rates of each neurons for each sample by averaging accross trials

            SStextures = len(trials) * len(samples) * numpy.sum((textures_averaged_rates - global_averaged_rates)**2, axis=0) #Compute the Anova sum of squares accross texture families
            SSsamples = len(trials) * numpy.sum((numpy.transpose(samples_averaged_rates,(1,0,2)) - textures_averaged_rates)**2, axis=(0,1))  #Compute the Anova sum of squares accross samples
            SStrials = numpy.sum((numpy.transpose(mean_rates,(2,0,1,3)) - samples_averaged_rates)**2, axis=(0,1,2))  #Compute the Anova sum of squares accross trials (residuals)
            SStotal = numpy.sum((mean_rates - global_averaged_rates)**2, axis=(0,1,2)) #Compute tha Anova total sum of squares

            #We compute the mean squares of the nested Anova
            MStextures = SStextures/(len(textures)-1)
            MSsamples = SSsamples/(len(textures) * (len(samples) - 1))
            MStrials = SStrials/(len(textures) * len(samples) * (len(trials) - 1))

            #We compute the R-squared for each factor and for the residuals
            RsquaredTextures = SStextures/SStotal
            RsquaredSamples = SSsamples/SStotal
            RsquaredTrials = SStrials/SStotal
            
            #The variance ratio is the F statistic of the nested Anova
            varianceRatio = MStextures/MSsamples

            st = MozaikParametrized.idd(pnv.stimulus_id)
            setattr(st,'stats_type',None)
            setattr(st,'trial',None)
            setattr(st,'sample',None)
            setattr(st,'texture',None)

            self.datastore.full_datastore.add_analysis_result(PerNeuronValue(varianceRatio,pnv.ids,None,value_name = "Texture variance ratio",sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))
            self.datastore.full_datastore.add_analysis_result(PerNeuronValue(RsquaredTextures * 100,pnv.ids,value_units=qt.percent,value_name = "Texture r-squared",sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))
            self.datastore.full_datastore.add_analysis_result(PerNeuronValue(RsquaredSamples * 100,pnv.ids,value_units=qt.percent,value_name = "Sample r-squared",sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))
            self.datastore.full_datastore.add_analysis_result(PerNeuronValue(RsquaredTrials * 100,pnv.ids,value_units=qt.percent,value_name = "Trial r-squared",sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))
コード例 #10
0
ファイル: space.py プロジェクト: bernhardkaplan/mozaik
 def __init__(self,**params):
     MozaikParametrized.__init__(self,**params)
     assert self.size_x > 0 and self.size_y > 0
     
     half_width = self.size_x/2.0
     half_height = self.size_y/2.0
     self.left = self.location_x - half_width
     self.right = self.location_x + half_width
     self.top = self.location_y + half_height
     self.bottom = self.location_y - half_height
     self.width = self.right - self.left
     self.height = self.top - self.bottom
コード例 #11
0
    def perform_analysis(self):

        dsv = queries.param_filter_query(self.datastore, identifier='PerNeuronValue')
        textures = list(set([MozaikParametrized.idd(ads.stimulus_id).texture for ads in dsv.get_analysis_result()]))
        samples = list(set([MozaikParametrized.idd(ads.stimulus_id).sample for ads in dsv.get_analysis_result()]))

        for sheet in self.parameters.sheet_list:
            for texture in textures:
                #First we calculate the modulation for each sample of each original image
                for sample in samples:
                    pnv_noise = queries.param_filter_query(dsv,sheet_name=sheet,st_sample=sample,st_texture=texture,st_stats_type=2).get_analysis_result()[0]
                    pnv_texture = queries.param_filter_query(dsv,sheet_name=sheet,st_sample=sample,st_texture=texture,st_stats_type=1).get_analysis_result()[0]
                    modulation=[]
                    for texture_firing_rate,noise_firing_rate in zip(pnv_texture.get_value_by_id(pnv_texture.ids),pnv_noise.get_value_by_id(pnv_noise.ids)):
                            modulation.append(numpy.nan_to_num((texture_firing_rate - noise_firing_rate)/(texture_firing_rate + noise_firing_rate)))
                    st = MozaikParametrized.idd(pnv_texture.stimulus_id)
                    setattr(st,'stats_type',None)
                    self.datastore.full_datastore.add_analysis_result(PerNeuronValue(modulation,pnv_texture.ids,None,value_name = "Sample Modulation of " + pnv_texture.value_name, sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))
               

                #Then we calculate the modulation for each texture family by averaging the firing rates accross samples
                pnvs_noise = queries.param_filter_query(dsv,sheet_name=sheet,st_texture=texture,st_stats_type=2).get_analysis_result()
                pnvs_texture = queries.param_filter_query(dsv,sheet_name=sheet,st_texture=texture,st_stats_type=1).get_analysis_result()
                mean_rates_noise = [pnv.get_value_by_id(pnvs_noise[0].ids) for pnv in pnvs_noise]
                mean_rates_texture = [pnv.get_value_by_id(pnvs_noise[0].ids) for pnv in pnvs_texture]
                _mean_rates_noise = numpy.mean(mean_rates_noise,axis=0)
                _mean_rates_texture = numpy.mean(mean_rates_texture,axis=0)
                modulation = numpy.nan_to_num((_mean_rates_texture - _mean_rates_noise)/(_mean_rates_texture + _mean_rates_noise))
                st = MozaikParametrized.idd(pnvs_texture[0].stimulus_id)

                setattr(st,'stats_type',None)
                setattr(st,'sample',None)
                self.datastore.full_datastore.add_analysis_result(PerNeuronValue(modulation,pnv_texture.ids,None,value_name = "Texture Modulation of " + pnv_texture.value_name ,sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))

            #Finally  we calculate the global modulation by averaging the firing rates accross texture families 
            pnvs_noise = queries.param_filter_query(dsv,identifier='PerNeuronValue',sheet_name=sheet,st_stats_type=2).get_analysis_result()
            pnvs_texture = queries.param_filter_query(dsv,identifier='PerNeuronValue',sheet_name=sheet,st_stats_type=1).get_analysis_result()
            mean_rates_noise = [pnv.get_value_by_id(pnvs_noise[0].ids) for pnv in pnvs_noise]
            mean_rates_texture = [pnv.get_value_by_id(pnvs_noise[0].ids) for pnv in pnvs_texture]
            _mean_rates_noise = numpy.mean(mean_rates_noise,axis=0)
            _mean_rates_texture = numpy.mean(mean_rates_texture,axis=0)
            modulation = numpy.nan_to_num((_mean_rates_texture - _mean_rates_noise)/(_mean_rates_texture + _mean_rates_noise))
            st = MozaikParametrized.idd(pnvs_texture[0].stimulus_id)

            setattr(st,'stats_type',None)
            setattr(st,'sample',None)
            setattr(st,'texture',None)
            self.datastore.full_datastore.add_analysis_result(PerNeuronValue(modulation,pnv_texture.ids,None,value_name = "Global Modulation of " + pnv_texture.value_name ,sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))
コード例 #12
0
def save_data(dirname,dsv,name):

    try:
        os.mkdir(dirname)
    except:
        'all good'
        
    for neuron_id in analog_ids:
        mat_vm = []
        mat_exc = []
        mat_inh = []
        for seg in dsv.get_segments():
            sid = MozaikParametrized.idd(seg.annotations['stimulus'])
            a = seg.get_vm(neuron_id).magnitude
            a= numpy.insert(a,0,sid.trial)
            a= numpy.insert(a,0,sid.y)
            mat_vm.append(a)
            
            a = seg.get_esyn(neuron_id).magnitude
            a= numpy.insert(a,0,sid.trial)
            a= numpy.insert(a,0,sid.y)
            mat_exc.append(a)

            a = seg.get_isyn(neuron_id).magnitude
            a= numpy.insert(a,0,sid.trial)
            a= numpy.insert(a,0,sid.y)
            mat_inh.append(a)

            
        numpy.savetxt(dirname+'/'+'VM_' + name+str(neuron_id)+'.csv',numpy.array(mat_vm))
        numpy.savetxt(dirname+'/'+'ExcC' + name+str(neuron_id)+'.csv',numpy.array(mat_exc))
        numpy.savetxt(dirname+'/'+'InhC' + name+str(neuron_id)+'.csv',numpy.array(mat_inh))
コード例 #13
0
 def perform_analysis(self):
           dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',sheet_name=self.parameters.sheet_name,st_name='DriftingSinusoidalGratingCenterSurroundStimulus')
           
           if len(dsv.get_analysis_result()) == 0: return
           assert queries.ads_with_equal_stimulus_type(dsv)
           assert queries.equal_ads(dsv,except_params=['stimulus_id'])
           self.pnvs = dsv.get_analysis_result()
           
           # get stimuli
           self.st = [MozaikParametrized.idd(s.stimulus_id) for s in self.pnvs]
           
           
           # transform the pnvs into a dictionary of tuning curves according along the 'surround_orientation' parameter
           # also make sure they are ordered according to the first pnv's idds 
           
           self.tc_dict = colapse_to_dictionary([z.get_value_by_id(self.parameters.neurons) for z in self.pnvs],self.st,"surround_orientation")
           for k in self.tc_dict.keys():
                   sis = []
                   surround_tuning=[]
                   
                   # we will do the calculation neuron by neuron
                   for i in xrange(0,len(self.parameters.neurons)):
                       
                       ors = self.tc_dict[k][0]
                       values = numpy.array([a[i] for a in self.tc_dict[k][1]])
                       d=OrderedDict()
                       for o,v in zip(ors,values):
                           d[o] = v
                       sis.append(d[0] / d[numpy.pi/2])
                       
                       
                   self.datastore.full_datastore.add_analysis_result(PerNeuronValue(sis,self.parameters.neurons,None,value_name = 'Suppression index of ' + self.pnvs[0].value_name ,sheet_name=self.parameters.sheet_name,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(k)))
コード例 #14
0
ファイル: plotting.py プロジェクト: brainscales/mozaik
 def __init__(self, datastore, parameters, plot_file_name=None,fig_param=None):
     Plotting.__init__(self, datastore, parameters, plot_file_name, fig_param)
     
     self.st = []
     self.tc_dict = []
     self.pnvs = []
     self.max_mean_response_indexes = []
     assert queries.ads_with_equal_stimulus_type(datastore)
     assert len(self.parameters.neurons) > 0 , "ERROR, empty list of neurons specified"
     dsvs = queries.partition_analysis_results_by_parameters_query(self.datastore,parameter_list=['value_name'],excpt=True)
     for dsv in dsvs:
         dsv = queries.param_filter_query(dsv,identifier='PerNeuronValue',sheet_name=self.parameters.sheet_name)
         assert matching_parametrized_object_params(dsv.get_analysis_result(), params=['value_name'])
         self.pnvs.append(dsv.get_analysis_result())
         # get stimuli
         st = [MozaikParametrized.idd(s.stimulus_id) for s in self.pnvs[-1]]
         self.st.append(st)
         # transform the pnvs into a dictionary of tuning curves along the parameter_name
         # also make sure the values are ordered according to ids in the first pnv
         dic = colapse_to_dictionary([z.get_value_by_id(self.parameters.neurons) for z in self.pnvs[-1]],st,self.parameters.parameter_name)
         #sort the entries in dict according to the parameter parameter_name values 
         for k in  dic:
             (b, a) = dic[k]
             par, val = zip(
                          *sorted(
                             zip(b,
                                 numpy.array(a))))
             dic[k] = (par,numpy.array(val))
         self.tc_dict.append(dic)
         if self.parameters.centered:
            self.max_mean_response_indexes.append(numpy.argmax(sum([a[1] for a in dic.values()]),axis=0))
コード例 #15
0
    def perform_analysis(self):
        for sheet in self.parameters.sheet_list:
            #Obtain the average firing rate for each neuron and each samples of the stimuli, separately for the spectrally matched noise and synthetic texture stimuli
            dsv_noise = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',sheet_name=sheet, st_texture = self.parameters.texture_list, value_name = "Firing rate", st_stats_type = 2)
            dsv_texture = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',sheet_name=sheet, st_texture = self.parameters.texture_list, value_name = "Firing rate", st_stats_type = 1)
            pnvs_noise = dsv_noise.get_analysis_result()
            pnvs_texture = dsv_texture.get_analysis_result()
            firing_rates_noise = numpy.array([pnv.get_value_by_id(pnvs_noise[0].ids) for pnv in pnvs_noise])
            firing_rates_texture = numpy.array([pnv.get_value_by_id(pnvs_texture[0].ids) for pnv in pnvs_texture])

            assert firing_rates_noise.shape == firing_rates_texture.shape

            count_positively_modulated = 0
            count_negatively_modulated = 0

            #For every neuron, check if it is significantly modulated through a randomization test
            for i in range (firing_rates_noise.shape[1]):
                mean_response_texture = numpy.mean(firing_rates_texture[:,i])
                mean_response_noise = numpy.mean(firing_rates_noise[:,i])
                modulation = (mean_response_texture - mean_response_noise)/(mean_response_texture + mean_response_noise)
                neuron_modulated = self.randomization_test(firing_rates_noise[:,i],firing_rates_texture[:,i], modulation)
                if modulation > 0:
                    count_positively_modulated += neuron_modulated
                elif modulation < 0:
                    count_negatively_modulated += neuron_modulated
            st = MozaikParametrized.idd(pnvs_texture[0].stimulus_id)

            setattr(st,'stats_type',None)
            setattr(st,'sample',None)
            setattr(st,'texture',None)

            self.datastore.full_datastore.add_analysis_result(SingleValue(float(count_positively_modulated)/firing_rates_noise.shape[1] * 100, qt.percent, value_name = "Percentage of neurons significantly positively modulated", sheet_name=sheet, tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))
            self.datastore.full_datastore.add_analysis_result(SingleValue(float(count_negatively_modulated)/firing_rates_noise.shape[1] * 100, qt.percent, value_name = "Percentage of neurons significantly negatively modulated", sheet_name=sheet, tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))
            self.datastore.full_datastore.add_analysis_result(SingleValue(float(firing_rates_noise.shape[1] - count_positively_modulated - count_negatively_modulated)/firing_rates_noise.shape[1] * 100,qt.percent, value_name = "Percentage of neurons not significantly modulated", sheet_name=sheet, tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))
コード例 #16
0
ファイル: vision.py プロジェクト: antolikjan/mozaik
 def perform_analysis(self):
           dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',sheet_name=self.parameters.sheet_name,st_name='DriftingSinusoidalGratingCenterSurroundStimulus')
           
           if len(dsv.get_analysis_result()) == 0: return
           assert queries.ads_with_equal_stimulus_type(dsv)
           assert queries.equal_ads(dsv,except_params=['stimulus_id'])
           self.pnvs = dsv.get_analysis_result()
           
           # get stimuli
           self.st = [MozaikParametrized.idd(s.stimulus_id) for s in self.pnvs]
           
           
           # transform the pnvs into a dictionary of tuning curves according along the 'surround_orientation' parameter
           # also make sure they are ordered according to the first pnv's idds 
           
           self.tc_dict = colapse_to_dictionary([z.get_value_by_id(self.parameters.neurons) for z in self.pnvs],self.st,"surround_orientation")
           for k in self.tc_dict.keys():
                   sis = []
                   surround_tuning=[]
                   
                   # we will do the calculation neuron by neuron
                   for i in xrange(0,len(self.parameters.neurons)):
                       
                       ors = self.tc_dict[k][0]
                       values = numpy.array([a[i] for a in self.tc_dict[k][1]])
                       d={}
                       for o,v in zip(ors,values):
                           d[o] = v
                       sis.append(d[0] / d[numpy.pi/2])
                       
                       
                   self.datastore.full_datastore.add_analysis_result(PerNeuronValue(sis,self.parameters.neurons,None,value_name = 'Suppression index of ' + self.pnvs[0].value_name ,sheet_name=self.parameters.sheet_name,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(k)))
コード例 #17
0
def save_data(dirname, dsv, name):

    try:
        os.mkdir(dirname)
    except:
        'all good'

    for neuron_id in analog_ids:
        mat_vm = []
        mat_exc = []
        mat_inh = []
        for seg in dsv.get_segments():
            sid = MozaikParametrized.idd(seg.annotations['stimulus'])
            a = seg.get_vm(neuron_id).magnitude
            a = numpy.insert(a, 0, sid.trial)
            a = numpy.insert(a, 0, sid.y)
            mat_vm.append(a)

            a = seg.get_esyn(neuron_id).magnitude
            a = numpy.insert(a, 0, sid.trial)
            a = numpy.insert(a, 0, sid.y)
            mat_exc.append(a)

            a = seg.get_isyn(neuron_id).magnitude
            a = numpy.insert(a, 0, sid.trial)
            a = numpy.insert(a, 0, sid.y)
            mat_inh.append(a)

        numpy.savetxt(dirname + '/' + 'VM_' + name + str(neuron_id) + '.csv',
                      numpy.array(mat_vm))
        numpy.savetxt(dirname + '/' + 'ExcC' + name + str(neuron_id) + '.csv',
                      numpy.array(mat_exc))
        numpy.savetxt(dirname + '/' + 'InhC' + name + str(neuron_id) + '.csv',
                      numpy.array(mat_inh))
コード例 #18
0
ファイル: queries.py プロジェクト: h-mayorquin/mozaik
def ads_with_equal_stimulus_type(dsv, allow_None=False):
    """
    This functions tests whether DSV contains only ADS associated
    with the same stimulus type.
    
    Parameters
    ----------
    not_None : bool
             If true it will not allow ADS that are not associated with stimulus
    """
    if allow_None:
        return matching_parametrized_object_params([MozaikParametrized.idd(ads.stimulus_id) for ads in dsv.analysis_results if ads.stimulus_id != None],params=['name'])
    else:
        if len([0 for ads in dsv.analysis_results if ads.stimulus_id == None]) > 0:
           return False
        return matching_parametrized_object_params([MozaikParametrized.idd(ads.stimulus_id) for ads in dsv.analysis_results],params=['name'])    
コード例 #19
0
ファイル: queries.py プロジェクト: flcunha/mozaik
def ads_with_equal_stimuli(dsv,params=None,except_params=None):
    """
    This functions returns true if DSV contains only ADS associated with stimuli 
    of the same kind and with the same values for parameters supplied in *params* or 
    with the exception of parameters listed in *except_params*. 
    Otherwise False.
    """
    return matching_parametrized_object_params([MozaikParametrized.idd(ads.stimulus_id) for ads in dsv.analysis_results],params=params,except_params=except_params)
コード例 #20
0
ファイル: queries.py プロジェクト: JoelChavas/mozaik
def ads_with_equal_stimuli(dsv,params=None,except_params=None):
    """
    This functions returns true if DSV contains only ADS associated with stimuli 
    of the same kind and with the same values for parameters supplied in *params* or 
    with the exception of parameters listed in *except_params*. 
    Otherwise False.
    """
    return matching_parametrized_object_params([MozaikParametrized.idd(ads.stimulus_id) for ads in dsv.analysis_results],params=params,except_params=except_params)
コード例 #21
0
def equal_stimulus(dsv, except_params):
    """
    This functions returns True if DSV contains only recordings associated
    with stimuli of identical parameter values, with the exception of parameters in *except_params*
    """
    return matching_parametrized_object_params(
        [MozaikParametrized.idd(s) for s in dsv.get_stimuli()],
        except_params=['name'])
コード例 #22
0
def equal_stimulus_type(dsv):
    """
    This functions returns True if DSV contains only recordings associated
    with the same stimulus type. Otherwise False.
    """
    return matching_parametrized_object_params(
        [MozaikParametrized.idd(s) for s in dsv.get_stimuli()],
        params=['name'])
コード例 #23
0
 def perform_analysis(self):
           dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',sheet_name=self.parameters.sheet_name,st_name='DriftingSinusoidalGratingDisk')
           
           if len(dsv.get_analysis_result()) == 0: return
           assert queries.ads_with_equal_stimulus_type(dsv)
           assert queries.equal_ads(dsv,except_params=['stimulus_id'])
           self.pnvs = dsv.get_analysis_result()
           
           # get stimuli
           self.st = [MozaikParametrized.idd(s.stimulus_id) for s in self.pnvs]
           
           
           # transform the pnvs into a dictionary of tuning curves according along the 'radius' parameter
           # also make sure they are ordered according to the first pnv's idds 
           
           self.tc_dict = colapse_to_dictionary([z.get_value_by_id(self.parameters.neurons) for z in self.pnvs],self.st,"radius")
           for k in self.tc_dict.keys():
                   crf_sizes = []
                   supp_sizes= []
                   sis = []
                   max_responses=[]
                   
                   # we will do the calculation neuron by neuron
                   for i in xrange(0,len(self.parameters.neurons)):
                       
                       rads = self.tc_dict[k][0]
                       values = numpy.array([a[i] for a in self.tc_dict[k][1]])
                       
                       # sort them based on radiuses
                       rads , values = zip(*sorted(zip(rads,values)))
                                                   
                       max_response = numpy.max(values)
                       crf_index  = numpy.argmax(values)
                       crf_size = rads[crf_index]
                       
                       if crf_index < len(values)-1:
                           supp_index = crf_index+numpy.argmax(values[crf_index+1:])+1
                       else:
                           supp_index = len(values)-1
                                                   
                       supp_size = rads[supp_index]                                
                       
                       if values[crf_index] != 0:
                           si = (values[crf_index]-values[supp_index])/values[crf_index]
                       else:
                           si = 0
                       
                       crf_sizes.append(crf_size)
                       supp_sizes.append(supp_size)
                       sis.append(si)
                       max_responses.append(max_response)
                       
                       
                   self.datastore.full_datastore.add_analysis_result(PerNeuronValue(max_responses,self.parameters.neurons,self.st[0].params()["radius"].units,value_name = 'Max. response of ' + self.pnvs[0].value_name ,sheet_name=self.parameters.sheet_name,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(k)))
                   self.datastore.full_datastore.add_analysis_result(PerNeuronValue(crf_sizes,self.parameters.neurons,self.st[0].params()["radius"].units,value_name = 'Max. facilitation radius of ' + self.pnvs[0].value_name ,sheet_name=self.parameters.sheet_name,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(k)))
                   self.datastore.full_datastore.add_analysis_result(PerNeuronValue(supp_sizes,self.parameters.neurons,self.st[0].params()["radius"].units,value_name = 'Max. suppressive radius of ' + self.pnvs[0].value_name ,sheet_name=self.parameters.sheet_name,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(k)))
                   self.datastore.full_datastore.add_analysis_result(PerNeuronValue(sis,self.parameters.neurons,None,value_name = 'Suppression index of ' + self.pnvs[0].value_name ,sheet_name=self.parameters.sheet_name,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(k)))
コード例 #24
0
    def print_content(self, full_recordings=False, full_ADS=False):
        """
        Prints the content of the data store (specifically the list of recordings and ADSs in the DSV).
        
        If the 
        
        Parameters
        ----------
            full_recordings : bool (optional)
                            If True each contained recording will be printed.
                            Otherwise only the overview of the recordings based on stimulus type will be shown.
                            
            full_ADS : bool (optional)
                     If True each contained ADS will be printed (for each this will print the set of their mozaik parameters together with their values).
                     Otherwise only the overview of the ADSs based on their identifier will be shown.
        """
        logger.info("DSV info:")
        logger.info("   Number of recordings: " +
                    str(len(self.block.segments)))
        d = {}
        for st in [s.annotations['stimulus'] for s in self.block.segments]:
            d[MozaikParametrized.idd(st).name] = d.get(
                MozaikParametrized.idd(st).name, 0) + 1

        for k in d.keys():
            logger.info("     " + str(k) + " : " + str(d[k]))

        logger.info("   Number of ADS: " + str(len(self.analysis_results)))
        d = {}
        for ads in self.analysis_results:
            d[ads.identifier] = d.get(ads.identifier, 0) + 1

        for k in d.keys():
            logger.info("     " + str(k) + " : " + str(d[k]))

        if full_recordings:
            logger.info('RECORDING RESULTS')
            for s in [s.annotations['stimulus'] for s in self.block.segments]:
                logger.info(str(s))

        if full_ADS:
            logger.info('ANALYSIS RESULTS')
            for a in self.analysis_results:
                logger.info(str(a))
コード例 #25
0
def partition_analysis_results_by_stimulus_parameters_query(
        dsv, parameter_list=None, excpt=False):
    """
        This query will take all analysis results and return list of DataStoreViews
        each holding analysis results that have the same values of
        of stimulus parameters in parameter_list.

        Note that in most cases one wants to do this only against datastore holding
        only analysis results  measured to the same stimulus type! In that case the datastore is partitioned into
        subsets each holding recordings to the same stimulus with the same paramter
        values, with the exception to the parameters in parameter_list.
    
        Parameters
        ----------
        
        dsv : DataStoreView
            The input DSV.
        
        parameter_list : list(string)
               The list of stimulus parameters that will vary between the ASDs in the returned DSVs, all other parameters will have the same value within each of the 
               returned DSVs.

        except : bool
               If excpt is True the query is allowed only on DSVs holding the same AnalysisDataStructures type.
        """
    if dsv.analysis_results == []: return []

    for ads in dsv.analysis_results:
        assert ads.stimulus_id != None, "partition_analysis_results_by_stimulus_parameters_query accepts only DSV with ADS that all have defined stimulus id"

    st = [
        MozaikParametrized.idd(ads.stimulus_id) for ads in dsv.analysis_results
    ]
    assert parameter_list != None, "parameter_list has to be given"
    assert type(parameter_list) == list, "parameter_list has to be list"

    if excpt:
        assert matching_parametrized_object_params(
            st, params=['name']
        ), "If excpt==True you have to provide a dsv containing the same ADS type"
        parameter_list = set(
            st[0].getParams().keys()) - (set(parameter_list) | set(['name']))

    values, st = colapse(dsv.analysis_results,
                         st,
                         parameter_list=parameter_list,
                         allow_non_identical_objects=True)
    dsvs = []

    for vals in values:
        new_dsv = dsv.fromDataStoreView()
        new_dsv.block.segments = dsv.recordings_copy()
        new_dsv.sensory_stimulus = dsv.sensory_stimulus_copy()
        new_dsv.analysis_results.extend(vals)
        dsvs.append(new_dsv)
    return dsvs
コード例 #26
0
ファイル: datastore.py プロジェクト: h-mayorquin/mozaik
    def print_content(self, full_recordings=False, full_ADS=False):
        """
        Prints the content of the data store (specifically the list of recordings and ADSs in the DSV).
        
        If the 
        
        Parameters
        ----------
            full_recordings : bool (optional)
                            If True each contained recording will be printed.
                            Otherwise only the overview of the recordings based on stimulus type will be shown.
                            
            full_ADS : bool (optional)
                     If True each contained ADS will be printed (for each this will print the set of their mozaik parameters together with their values).
                     Otherwise only the overview of the ADSs based on their identifier will be shown.
        """
        logger.info("DSV info:")
        logger.info("   Number of recordings: " + str(len(self.block.segments)))
        d = {}
        for st in [s.annotations['stimulus'] for s in self.block.segments]:
            d[MozaikParametrized.idd(st).name] = d.get(MozaikParametrized.idd(st).name, 0) + 1

        for k in d.keys():
            logger.info("     " + str(k) + " : " + str(d[k]))

        logger.info("   Number of ADS: " + str(len(self.analysis_results)))
        d = {}
        for ads in self.analysis_results:
            d[ads.identifier] = d.get(ads.identifier, 0) + 1

        for k in d.keys():
            logger.info("     " + str(k) + " : " + str(d[k]))

        if full_recordings:
            logger.info('RECORDING RESULTS')
            for s in [s.annotations['stimulus'] for s in self.block.segments]:
                logger.info(str(s))

        if full_ADS:
            logger.info('ANALYSIS RESULTS')
            for a in self.analysis_results:
                logger.info(str(a))
コード例 #27
0
 def subplot(self, subplotspec):
     plots = {}
     gs = gridspec.GridSpecFromSubplotSpec(1,2, subplot_spec=subplotspec,hspace=1.0, wspace=1.0)
     
     var_gr = 0
     var_ni = 0
     std_gr = 0
     std_ni = 0
             
     orr = list(set([MozaikParametrized.idd(s).orientation for s in queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating',st_contrast=100).get_stimuli()]))        
     l4_exc_or = self.datastore.get_analysis_result(identifier='PerNeuronValue',value_name = 'LGNAfferentOrientation', sheet_name = self.parameters.sheet_name)
     
     
     # lets calculate spont. activity trial to trial variability
     # we assume that the spontaneous activity had already the spikes removed
     dsv = queries.param_filter_query(self.datastore,st_name='InternalStimulus',st_direct_stimulation_name='None',sheet_name=self.parameters.sheet_name,analysis_algorithm='ActionPotentialRemoval',ads_unique=True)
     ids = dsv.get_analysis_result()[0].ids
     sp = {}
     for idd in ids:
         assert len(dsv.get_analysis_result()) == 1
         s = dsv.get_analysis_result()[0].get_asl_by_id(idd).magnitude
         sp[idd] = 1/numpy.mean(numpy.std([s[i*int(len(s)/10):(i+1)*int(len(s)/10)] for i in xrange(0,10)],axis=0,ddof=1))
         #sp[idd]  = 1/numpy.std(s,ddof=1)
     print sp[ids[1]]
         
     #lets calculate the mean of trial-to-trial variances across the neurons in the datastore for gratings 
     dsv = queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating',sheet_name=self.parameters.sheet_name,st_contrast=100,analysis_algorithm='TrialVariability',y_axis_name='Vm (no AP) trial-to-trial variance')
     assert queries.equal_ads(dsv, except_params=['stimulus_id'])
     ids = dsv.get_analysis_result()[0].ids
     
     var_gr_ind = []
     logger.info("AA")
     logger.info(str([sp[i]  for i in ids]))
     for i in ids:
         # find the or pereference of the neuron
         o = orr[numpy.argmin([circular_dist(o,l4_exc_or[0].get_value_by_id(i),numpy.pi) for o in orr])]
         assert len(queries.param_filter_query(dsv,st_orientation=o,ads_unique=True).get_analysis_result())==1
         a = 1/numpy.mean(numpy.sqrt(queries.param_filter_query(dsv,st_orientation=o,ads_unique=True).get_analysis_result()[0].get_asl_by_id(i).magnitude))
         var_gr = var_gr + a / sp[i]
         var_gr_ind.append(a / sp[i])
         std_gr = std_gr + a
     var_gr = var_gr / len(ids)
     std_gr = std_gr / len(ids)
     
     logger.info(str(var_gr_ind))
     #lets calculate the mean of trial-to-trial variances across the neurons in the datastore for natural images 
     dsv = queries.param_filter_query(self.datastore,st_name='NaturalImageWithEyeMovement',sheet_name=self.parameters.sheet_name,y_axis_name='Vm (no AP) trial-to-trial variance',ads_unique=True)
     var_ni_ind = [1/numpy.mean(numpy.sqrt(dsv.get_analysis_result()[0].get_asl_by_id(i).magnitude)) / sp[i] for i in ids]
     var_ni = numpy.mean(var_ni_ind)
     
     plots['Bar'] = (BarComparisonPlot({"NI" : var_ni*100.0, "GR" : var_gr*100.0}),gs[0,0],{})
     plots['Scatter'] = (ScatterPlot(var_gr_ind*100, var_ni_ind*100),gs[0,1],{'x_label' : 'GR', 'y_label' : 'NI','identity_line' : True})
     
     return plots
コード例 #28
0
def partition_analysis_results_by_stimulus_parameters_query(dsv,parameter_list=None,excpt=False):
        """
        This query will take all analysis results and return list of DataStoreViews
        each holding analysis results that have the same values of
        of stimulus parameters in parameter_list.

        Note that in most cases one wants to do this only against datastore holding
        only analysis results  measured to the same stimulus type! In that case the datastore is partitioned into
        subsets each holding recordings to the same stimulus with the same paramter
        values, with the exception to the parameters in parameter_list.
    
        Parameters
        ----------
        
        dsv : DataStoreView
            The input DSV.
        
        parameter_list : list(string)
               The list of stimulus parameters that will vary between the ASDs in the returned DSVs, all other parameters will have the same value within each of the 
               returned DSVs.

        except : bool
               If excpt is True the query is allowed only on DSVs holding the same AnalysisDataStructures type.
        """
        if dsv.analysis_results == []: return []
            
        for ads in dsv.analysis_results:
            assert ads.stimulus_id != None , "partition_analysis_results_by_stimulus_parameters_query accepts only DSV with ADS that all have defined stimulus id"
            
        st = [MozaikParametrized.idd(ads.stimulus_id) for ads in dsv.analysis_results]
        assert parameter_list != None , "parameter_list has to be given"
        assert type(parameter_list) == list , "parameter_list has to be list"
        
        if excpt:
            assert matching_parametrized_object_params(st,params=['name']), "If excpt==True you have to provide a dsv containing the same ADS type"
            parameter_list = set(st[0].params().keys()) - (set(parameter_list) | set(['name']))
        
        
        
        values, st = colapse(dsv.analysis_results,st,parameter_list=parameter_list,allow_non_identical_objects=True)
        dsvs = []

        for vals in values:
            new_dsv = dsv.fromDataStoreView()
            new_dsv.block.segments = dsv.recordings_copy()
            new_dsv.sensory_stimulus = dsv.sensory_stimulus_copy()
            new_dsv.analysis_results.extend(vals)
            dsvs.append(new_dsv)
        return dsvs
コード例 #29
0
 def subplot(self, subplotspec):
     plots = {}
     gs = gridspec.GridSpecFromSubplotSpec(4, 18, subplot_spec=subplotspec,
                                           hspace=1.0, wspace=1.0)
     
     orr = list(set([MozaikParametrized.idd(s).orientation for s in queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating',st_contrast=100).get_stimuli()]))                
     #ors = self.datastore.get_analysis_result(identifier='PerNeuronValue',value_name = 'LGNAfferentOrientation', sheet_name = self.parameters.sheet_name)
     
     #dsv = queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating',st_orientation=orr[numpy.argmin([circular_dist(o,ors[0].get_value_by_id(self.parameters.neuron),numpy.pi)  for o in orr])],st_contrast=100)             
     dsv = queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating',st_orientation=0,st_contrast=100)             
     plots['Gratings'] = (OverviewPlot(dsv, ParameterSet({'sheet_name': self.parameters.sheet_name,'neuron': self.parameters.neuron,'spontaneous' : True, 'sheet_activity' : {}})),gs[0:2,:],{'x_label': None})
     #dsv = queries.param_filter_query(self.datastore,st_name='DriftingGratingWithEyeMovement')            
     #plots['GratingsWithEM'] = (OverviewPlot(dsv, ParameterSet({'sheet_name': self.parameters.sheet_name,'neuron': self.parameters.neuron, 'spontaneous' : True,'sheet_activity' : {}})),gs[2:4,:],{'x_label': None})
     dsv = queries.param_filter_query(self.datastore,st_name='NaturalImageWithEyeMovement')            
     plots['NIwEM'] = (OverviewPlot(dsv, ParameterSet({'sheet_name': self.parameters.sheet_name,'neuron': self.parameters.neuron,'spontaneous' : True, 'sheet_activity' : {}})),gs[2:4,:],{})
     
     
     return plots
コード例 #30
0
        def plot(self):
            self.fig = pylab.figure(facecolor='w', **self.fig_param)
            gs = gridspec.GridSpec(1, 1)
            gs.update(left=0.1, right=0.9, top=0.9, bottom=0.1)
            gs = gs[0,0]
            gs = gridspec.GridSpecFromSubplotSpec(2, 1,subplot_spec=gs)

            orr = list(set([MozaikParametrized.idd(s).orientation for s in queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating',st_contrast=100).get_stimuli()]))        
            oor = self.datastore.get_analysis_result(identifier='PerNeuronValue',value_name = 'LGNAfferentOrientation', sheet_name = self.parameters.sheet_name)
            
            if True:
                for neuron_idd in self.parameters.neurons:
                    col = orr[numpy.argmin([circular_dist(o,oor[0].get_value_by_id(neuron_idd),numpy.pi)  for o in orr])]
                    dsv =  queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating',st_contrast=100,st_orientation=col,sheet_name=self.parameters.sheet_name,analysis_algorithm='ActionPotentialRemoval')
                    TrialToTrialCrossCorrelationOfAnalogSignalList(dsv,ParameterSet({'neurons' : [neuron_idd]}),tags=['helper']).analyse()
                    dsv =  queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating',st_contrast=100,st_orientation=col,sheet_name=self.parameters.sheet_name,analysis_algorithm='PSTH')
                    TrialToTrialCrossCorrelationOfAnalogSignalList(dsv,ParameterSet({'neurons' : [neuron_idd]}),tags=['helper']).analyse()
                
                
            dsv =  queries.tag_based_query(self.datastore,['helper'])   
            dsv1 =  queries.param_filter_query(dsv,y_axis_name='trial-trial cross-correlation of Vm (no AP)',st_name='FullfieldDriftingSinusoidalGrating',sheet_name=self.parameters.sheet_name)
            vm_cc_gr = numpy.mean(numpy.array([asl.asl[0] for asl in dsv1.get_analysis_result()]),axis=0)
            dsv1 =  queries.param_filter_query(dsv,y_axis_name='trial-trial cross-correlation of psth (bin=2.0)',st_name='FullfieldDriftingSinusoidalGrating',sheet_name=self.parameters.sheet_name)
            psth_cc_gr = numpy.mean(numpy.array([asl.asl[0] for asl in dsv1.get_analysis_result()]),axis=0)
            
            
            #queries.param_filter_query(self.datastore,analysis_algorithm='TrialToTrialCrossCorrelationOfAnalogSignalList').print_content(full_ADS=True)
            
            dsv =  queries.param_filter_query(self.datastore,y_axis_name='trial-trial cross-correlation of Vm (no AP)',st_name="NaturalImageWithEyeMovement",sheet_name=self.parameters.sheet_name,ads_unique=True)
            vm_cc_ni = numpy.mean(numpy.array(dsv.get_analysis_result()[0].asl),axis=0)
            dsv =  queries.param_filter_query(self.datastore,y_axis_name='trial-trial cross-correlation of psth (bin=2.0)',st_name="NaturalImageWithEyeMovement",sheet_name=self.parameters.sheet_name,ads_unique=True)
            psth_cc_ni = numpy.mean(numpy.array(dsv.get_analysis_result()[0].asl),axis=0)
            
            logger.info(str(vm_cc_gr))
            logger.info(str(vm_cc_ni))
            
            
            z = int(min(self.parameters.window_length,len(vm_cc_gr-1)/2,len(vm_cc_ni-1)/2)/2)*2
            logger.info(str(psth_cc_ni))
            logger.info(str(psth_cc_gr))
            fontsize = 30
            pylab.rcParams['xtick.major.pad'] = fontsize-5
            pylab.rcParams['ytick.major.pad'] = 10
            pylab.rc('axes', linewidth=5)
            
            
            logger.info(len(vm_cc_gr[int(len(vm_cc_gr)/2)-z:int(len(vm_cc_gr)/2)+z+1]))
            logger.info(len(numpy.linspace(-z,z,2*z+1)))
                
            ax = pylab.subplot(gs[0,0])       
            ax.plot(numpy.linspace(-z,z,2*z+1),vm_cc_gr[int(len(vm_cc_gr)/2)-z:int(len(vm_cc_gr)/2)+z+1],label="Gratings")
            ax.plot(numpy.linspace(-z,z,2*z+1),vm_cc_ni[int(len(vm_cc_ni)/2)-z:int(len(vm_cc_ni)/2)+z+1],label="Natural images")
            pylab.legend()
            pylab.title("VM")
            pylab.xlabel("time (ms)")
            #pylab.ylabel("corr coef")
            
            ax = pylab.subplot(gs[1,0])
            ax.plot(numpy.linspace(-z,z,z+1),psth_cc_gr[int(len(psth_cc_gr)/2)-z/2:int(len(psth_cc_gr)/2)+z/2+1],label="Gratings")
            ax.plot(numpy.linspace(-z,z,z+1),psth_cc_ni[int(len(psth_cc_ni)/2)-z/2:int(len(psth_cc_ni)/2)+z/2+1],label="Natural images")
            
            pylab.xlim(-z,z)
            pylab.xticks([-z,0,z],[-250,0,250])#[-2*z,0,2*z])
            pylab.yticks([-1.0,0.0,1.0])
            
            #pylab.legend()
            #pylab.title("Spikes")
            #pylab.xlabel("time (ms)",fontsize=fontsize)
            #pylab.ylabel("corr. coef.",fontsize=fontsize)
            #three_tick_axis(pylab.gca().xaxis)
            for label in ax.get_xticklabels() + ax.get_yticklabels():
                label.set_fontsize(fontsize)
            
            if self.plot_file_name:
               pylab.savefig(Global.root_directory+self.plot_file_name)              
コード例 #31
0
ファイル: plotting.py プロジェクト: brainscales/mozaik
    def _ploter(self, idx, gs):
        plots  = []
        gs = gridspec.GridSpecFromSubplotSpec(len(self.st), 1, subplot_spec=gs)
        for i,(dic, st, pnvs) in enumerate(zip(self.tc_dict,self.st,self.pnvs)):
            period = st[0].params()[self.parameters.parameter_name].period
            if self.parameters.centered:        
               assert period != None, "ERROR: You asked for centering of tuning curves even though the domain over which it is measured is not periodic." 
            xs = []
            ys = []
            labels = []
                
            for k in dic.keys():    
                (par, val) = dic[k]
                if self.parameters.mean:
                    v = 0
                    for idx in xrange(0,len(self.parameters.neurons)):
                        vv,p = self.center_tc(val[:,idx],par,period,self.max_mean_response_indexes[i][idx])
                        v = v + vv
                    val = v / len(self.parameters.neurons)
                    par = p
                else:
                    val,par = self.center_tc(val[:,idx],par,period,self.max_mean_response_indexes[i][idx])
                    
                if period != None:
                    par = list(par)
                    val = list(val)
                    par.append(par[0] + period)
                    val.append(val[0])
                    if par != sorted(par):
                       print "BBBBBBB"
                       print par
              
                xs.append(numpy.array(par))
                ys.append(numpy.array(val))
                
                l = ""
                for p in varying_parameters([MozaikParametrized.idd(e) for e in dic.keys()]):
                    l = l + str(p) + " : " + str(getattr(MozaikParametrized.idd(k),p))
                labels.append(l)
            
                
            
            params={}
            params["x_label"] = self.parameters.parameter_name
            params["y_label"] = pnvs[0].value_name
            params['labels']=None
            params['linewidth'] = 2
            params['colors'] = [cm.jet(j/float(len(xs))) for j in xrange(0,len(xs))] 
            if pnvs == self.pnvs[0]:
                params["title"] =  'Neuron ID: %d' % self.parameters.neurons[idx]
            
            if self.parameters.centered:        
                if period == pi:
                    params["x_ticks"] = [-pi/2, 0, pi/2]
                    params["x_lim"] = (-pi/2, pi/2)
                    params["x_tick_style"] = "Custom"
                    params["x_tick_labels"] = ["-$\\frac{\\pi}{2}$", "0", "$\\frac{\\pi}{2}$"]
               
                if period == 2*pi:
                    params["x_ticks"] = [-pi, 0, pi]
                    params["x_lim"] = (-pi, pi)
                    params["x_tick_style"] = "Custom"
                    params["x_tick_labels"] = ["-$\\pi$","0", "$\\pi$"]
            else:
                if period == pi:
                    params["x_ticks"] = [0, pi/2, pi]
                    params["x_lim"] = (0, pi)
                    params["x_tick_style"] = "Custom"
                    params["x_tick_labels"] = ["0", "$\\frac{\\pi}{2}$", "$\\pi$"]
               
                if period == 2*pi:
                    params["x_ticks"] = [0, pi, 2*pi]
                    params["x_lim"] = (0, 2*pi)
                    params["x_tick_style"] = "Custom"
                    params["x_tick_labels"] = ["0", "$\\pi$", "$2\\pi$"]

            if pnvs != self.pnvs[-1]:
                params["x_axis"] = None
            plots.append(("TuningCurve_" + pnvs[0].value_name,StandardStyleLinePlot(xs, ys),gs[i],params))
        return plots
コード例 #32
0
    def perform_analysis(self):
        for sheet in self.datastore.sheets():
            # Load up spike trains for the right sheet and the corresponding
            # stimuli, and transform spike trains into psth
            dsv = queries.param_filter_query(self.datastore,identifier='AnalogSignalList',sheet_name=sheet,analysis_algorithm='PSTH',st_name='FullfieldDriftingSinusoidalGrating')
            assert queries.equal_ads(dsv,except_params=['stimulus_id']) , "It seems PSTH computed in different ways are present in datastore, ModulationRatio can accept only one"
            psths = dsv.get_analysis_result()
            st = [MozaikParametrized.idd(p.stimulus_id) for p in psths]
            # average across trials
            psths, stids = colapse(psths,st,parameter_list=['trial'],func=neo_sum,allow_non_identical_objects=True)

            # retrieve the computed orientation preferences
            pnvs = self.datastore.get_analysis_result(identifier='PerNeuronValue',
                                                      sheet_name=sheet,
                                                      value_name='orientation preference')
            if len(pnvs) != 1:
                logger.error("ERROR: Expected only one PerNeuronValue per sheet "
                             "with value_name 'orientation preference' in datastore, got: "
                             + str(len(pnvs)))
                return None
        
            or_pref = pnvs[0]
            # find closest orientation of grating to a given orientation preference of a neuron
            # first find all the different presented stimuli:
            ps = OrderedDict()
            for s in st:
                ps[MozaikParametrized.idd(s).orientation] = True
            ps = ps.keys()
            # now find the closest presented orientations
            closest_presented_orientation = []
            for i in xrange(0, len(or_pref.values)):
                circ_d = 100000
                idx = 0
                for j in xrange(0, len(ps)):
                    if circ_d > circular_dist(or_pref.values[i], ps[j], numpy.pi):
                        circ_d = circular_dist(or_pref.values[i], ps[j], numpy.pi)
                        idx = j
                closest_presented_orientation.append(ps[idx])

            closest_presented_orientation = numpy.array(closest_presented_orientation)

            # collapse along orientation - we will calculate MR for each
            # parameter combination other than orientation
            d = colapse_to_dictionary(psths, stids, "orientation")
            for (st, vl) in d.items():
                # here we will store the modulation ratios, one per each neuron
                modulation_ratio = []
                f0 = []
                f1 = []
                ids = []
                frequency = MozaikParametrized.idd(st).temporal_frequency * MozaikParametrized.idd(st).getParams()['temporal_frequency'].units
                for (orr, ppsth) in zip(vl[0], vl[1]):
                    for j in numpy.nonzero(orr == closest_presented_orientation)[0]:
                        if or_pref.ids[j] in ppsth.ids:
                            a = or_pref.ids[j]
                            mr,F0,F1 = self._calculate_MR(ppsth.get_asl_by_id(or_pref.ids[j]).flatten(),frequency)
                            modulation_ratio.append(mr)
                            f0.append(F0)
                            f1.append(F1)
                            ids.append(or_pref.ids[j])
                            
                logger.debug('Adding PerNeuronValue:' + str(sheet))
                self.datastore.full_datastore.add_analysis_result(
                    PerNeuronValue(modulation_ratio,
                                   ids,
                                   qt.dimensionless,
                                   value_name='Modulation ratio' + '(' + psths[0].x_axis_name + ')',
                                   sheet_name=sheet,
                                   tags=self.tags,
                                   period=None,
                                   analysis_algorithm=self.__class__.__name__,
                                   stimulus_id=str(st)))

                self.datastore.full_datastore.add_analysis_result(
                    PerNeuronValue(f0,
                                   ids,
                                   qt.dimensionless,
                                   value_name='F0' + '(' + psths[0].x_axis_name + ')',
                                   sheet_name=sheet,
                                   tags=self.tags,
                                   period=None,
                                   analysis_algorithm=self.__class__.__name__,
                                   stimulus_id=str(st)))
                
                self.datastore.full_datastore.add_analysis_result(
                    PerNeuronValue(f1,
                                   ids,
                                   qt.dimensionless,
                                   value_name='F1' + '(' + psths[0].x_axis_name + ')',
                                   sheet_name=sheet,
                                   tags=self.tags,
                                   period=None,
                                   analysis_algorithm=self.__class__.__name__,
                                   stimulus_id=str(st)))


                import pylab
                pylab.figure()
                pylab.hist(modulation_ratio)
コード例 #33
0
def perform_analysis_and_visualization(data_store):
    analog_ids = param_filter_query(
        data_store,
        sheet_name="V1_Exc_L4").get_segments()[0].get_stored_esyn_ids()
    analog_ids_inh = param_filter_query(
        data_store,
        sheet_name="V1_Inh_L4").get_segments()[0].get_stored_esyn_ids()
    spike_ids = param_filter_query(
        data_store,
        sheet_name="V1_Exc_L4").get_segments()[0].get_stored_spike_train_ids()

    number_of_cells = len(analog_ids)
    stimuli_list = list(('SparseNoise', 'DenseNoise'))
    save_to = './Data/'

    for stimuli_type in stimuli_list:
        print 'Getting voltage and images for ' + stimuli_type

        # Saving parameters
        format = '.pickle'
        quality = '_3000_21_'  # This is the number of images followed by the interval that they take in ms

        # Load the segments
        dsv = queries.param_filter_query(data_store,
                                         sheet_name="V1_Exc_L4",
                                         st_name=stimuli_type)
        segments = dsv.get_segments()
        stimuli = [MozaikParametrized.idd(s) for s in dsv.get_stimuli()]
        # Take the seeds
        seeds = [s.experiment_seed for s in stimuli]

        # Sort them based on their seeds
        seeds, segments, stimuli = zip(*sorted(zip(seeds, segments, stimuli)))
        segment_length = segments[0].get_spiketrain(spike_ids[0]).t_stop

        # Values to obtain
        spikes = [[] for i in segments[0].get_spiketrain(spike_ids)]
        images = []

        ## Extract images
        print 'Extracting and processing images'
        for i, seg in enumerate(segments):
            """
            First we take out the stimuli and make them as small as we can First we take out the stimuli and make them 
            as small as we can than one pixel assigned to each value of luminance. In order to do so, we first call the class
            And re-adjust is parameter st.density = st.grid_size. After that we successively call the class to extract the images
            frames  
            """

            # First we take the class
            st = MozaikParametrized.idd_to_instance(stimuli[i])
            st.size_x = 1.0
            st.size_y = 1.0
            st.density = st.grid_size

            fr = st.frames()

            # First we call as many frames as many frames as we need (total time / time per image = total # of images)
            ims = [
                fr.next()[0]
                for i in xrange(0, st.duration / st.frame_duration)
            ]
            # Now, we take the images that repeat themselves
            ims = [
                ims[i] for i in xrange(0, len(ims))
                if ((i % (st.time_per_image / st.frame_duration)) == 0)
            ]
            images.append(ims)

        # Saving images
        print 'Saving Images '

        # Concatenate and save
        ims = numpy.concatenate(images, axis=0)
        images_filename = save_to + 'images' + quality + stimuli_type + format
        f = open(images_filename, 'wb')
        cPickle.dump(ims, f)

        ## Get the voltage for all the cells
        for cell_number in range(number_of_cells):
            print 'Extracting Voltage for cell ', cell_number

            vm = []  # Intialize voltage list
            for i, seg in enumerate(segments):
                # get vm
                v = seg.get_vm(analog_ids[cell_number])
                # Check that the voltage between segments match
                if vm != []:
                    assert vm[-1][-1] == v.magnitude[0]
                # Append
                vm.append(v.magnitude)

            # Concatenate the experiments
            print 'Concatenating Voltage'
            vm = [v[:-1] for v in vm]  # Take the last element out
            vm = numpy.concatenate(vm, axis=0)

            print 'voltage shape=', numpy.shape(vm)
            # Save the voltage
            print 'Saving Voltage for cell', cell_number
            voltage_filename = save_to + 'vm' + '_cell_' + str(
                cell_number) + quality + stimuli_type + '.pickle'
            f = open(voltage_filename, 'wb')
            cPickle.dump(vm, f)
コード例 #34
0
      def perform_analysis(self):
                dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',sheet_name=self.parameters.sheet_name,st_name='DriftingSinusoidalGratingDisk')
                
                if len(dsv.get_analysis_result()) == 0: return
                assert queries.ads_with_equal_stimulus_type(dsv)
                assert queries.equal_ads(dsv,except_params=['stimulus_id'])
                self.pnvs = dsv.get_analysis_result()
                
                # get stimuli
                self.st = [MozaikParametrized.idd(s.stimulus_id) for s in self.pnvs]
                
                
                # transform the pnvs into a dictionary of tuning curves according along the 'radius' parameter
                # also make sure they are ordered according to the first pnv's idds 
                
                self.tc_dict = colapse_to_dictionary([z.get_value_by_id(self.parameters.neurons) for z in self.pnvs],self.st,"radius")
                for k in self.tc_dict.keys():
                        crf_sizes = []
                        supp_sizes= []
                        sis = []
                        max_responses=[]
                        csis = []
                        
                        # we will do the calculation neuron by neuron
                        for i in xrange(0,len(self.parameters.neurons)):
                            
                            rads = self.tc_dict[k][0]
                            values = numpy.array([a[i] for a in self.tc_dict[k][1]])
                            
                            # sort them based on radiuses
                            rads , values = zip(*sorted(zip(rads,values)))
                                                        
                            max_response = numpy.max(values)
                            crf_index  = numpy.argmax(values)
                            crf_size = rads[crf_index]
                            
                            if crf_index < len(values)-1:
                                supp_index = crf_index+numpy.argmin(values[crf_index+1:])+1
                            else:
                                supp_index = len(values)-1
                            supp_size = rads[supp_index]                                

                            if supp_index < len(values)-1:
                                cs_index = supp_index+numpy.argmax(values[supp_index+1:])+1
                            else:
                                cs_index = len(values)-1

                            
                            if values[crf_index] != 0:
                                si = (values[crf_index]-values[supp_index])/values[crf_index]
                            else:
                                si = 0

                            if values[cs_index] != 0:
                                csi = (values[cs_index]-values[supp_index])/values[crf_index]
                            else:
                                csi = 0

                            crf_sizes.append(crf_size)
                            supp_sizes.append(supp_size)
                            sis.append(si)
                            max_responses.append(max_response)
                            csis.append(csi)
                            
                            
                        self.datastore.full_datastore.add_analysis_result(PerNeuronValue(max_responses,self.parameters.neurons,self.st[0].getParams()["radius"].units,value_name = 'Max. response of ' + self.pnvs[0].value_name ,sheet_name=self.parameters.sheet_name,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(k)))
                        self.datastore.full_datastore.add_analysis_result(PerNeuronValue(crf_sizes,self.parameters.neurons,self.st[0].getParams()["radius"].units,value_name = 'Max. facilitation radius of ' + self.pnvs[0].value_name ,sheet_name=self.parameters.sheet_name,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(k)))
                        self.datastore.full_datastore.add_analysis_result(PerNeuronValue(supp_sizes,self.parameters.neurons,self.st[0].getParams()["radius"].units,value_name = 'Max. suppressive radius of ' + self.pnvs[0].value_name ,sheet_name=self.parameters.sheet_name,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(k)))
                        self.datastore.full_datastore.add_analysis_result(PerNeuronValue(sis,self.parameters.neurons,None,value_name = 'Suppression index of ' + self.pnvs[0].value_name ,sheet_name=self.parameters.sheet_name,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(k)))
                        self.datastore.full_datastore.add_analysis_result(PerNeuronValue(csis,self.parameters.neurons,None,value_name = 'Counter-suppression index of ' + self.pnvs[0].value_name ,sheet_name=self.parameters.sheet_name,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(k)))
コード例 #35
0
ファイル: plot_constructors.py プロジェクト: RCagnol/mozaik
 def title(self, idx):
     return self._title([
         MozaikParametrized.idd(ads.stimulus_id)
         for ads in self.dsvs[idx].get_analysis_result()
     ][0])
コード例 #36
0
    def process_input(self, visual_space, stimulus, duration=None, offset=0):
        """
        Present a visual stimulus to the model, and create the LGN output
        (relay) neurons.
        
        Parameters
        ----------
        visual_space : VisualSpace
                     The visual space to which the stimuli are presented.
                     
        stimulus : VisualStimulus    
                 The visual stimulus to be shown.
        
        duration : int (ms)
                 The time for which we will simulate the stimulus
        
        offset : int(ms)
               The time (in absolute time of the whole simulation) at which the stimulus starts.
        
        Returns
        -------
        retinal_input : list(ndarray)
                      List of 2D arrays containing the frames of luminances that were presented to the retina.
        """
        logger.debug("Presenting visual stimulus from visual space %s" % visual_space)
        visual_space.set_duration(duration)
        self.input = visual_space
        st = MozaikParametrized.idd(stimulus)
        st.trial = None  # to avoid recalculating RFs response to multiple trials of the same stimulus

        cached = self.get_cache(st)

        if cached == None:
            logger.debug("Generating output spikes...")
            (input_currents, retinal_input) = self._calculate_input_currents(visual_space,
                                                                            duration)
        else:
            logger.debug("Retrieved spikes from cache...")
            (input_currents, retinal_input) = cached

        ts = self.model.sim.get_time_step()
        #import pylab
        #pylab.figure()
        for rf_type in self.rf_types:
            assert isinstance(input_currents[rf_type], list)
            for i, (lgn_cell, input_current, scs, ncs) in enumerate(
                                                            zip(self.sheets[rf_type].pop,
                                                                input_currents[rf_type],
                                                                self.scs[rf_type],
                                                                self.ncs[rf_type])):
                assert isinstance(input_current, dict)
                #if i==0:
                #    pylab.plot(self.parameters.linear_scaler * input_current['amplitudes'])
                t = input_current['times'] + offset
                a = self.parameters.linear_scaler * input_current['amplitudes']
                scs.set_parameters(times=t, amplitudes=a)
                if self.parameters.mpi_reproducible_noise:
                    t = numpy.arange(0, duration, ts) + offset
                    amplitudes = (self.parameters.noise.mean
                                   + self.parameters.noise.stdev
                                       * self.ncs_rng[rf_type][i].randn(len(t)))
                    ncs.set_parameters(times=t, amplitudes=amplitudes)
        # for debugging/testing, doesn't work with MPI !!!!!!!!!!!!
        #input_current_array = numpy.zeros((self.shape[1], self.shape[0], len(visual_space.time_points(duration))))
        #update_factor = int(visual_space.update_interval/self.parameters.receptive_field.temporal_resolution)
        #logger.debug("input_current_array.shape = %s, update_factor = %d, p.dim = %s" % (input_current_array.shape, update_factor, self.shape))
        #k = 0
        #for i in range(self.shape[1]): # self.sahpe gives (x,y), so self.shape[1] is the height
        #    for j in range(self.shape[0]):
                # where the kernel temporal resolution is finer than the frame update interval,
                # we only keep the current values at the start of each frame
        #        input_current_array[i,j, :] = input_currents['X_ON'][k]['amplitudes'][::update_factor]
        #        k += 1

        # if record() has already been called, setup the recording now
        self._built = True
        self.write_cache(st, input_currents, retinal_input)
        return retinal_input
コード例 #37
0
 def title(self, idx):
     return self._title(MozaikParametrized.idd(self.dsvs[idx].get_stimuli()[0]))
コード例 #38
0
 def __init__(self, tags=[], **params):
     MozaikParametrized.__init__(self, **params)
     self.tags = tags
コード例 #39
0
def param_filter_query(dsv, ads_unique=False, rec_unique=False, **kwargs):
    """
    It will return DSV with only recordings and ADSs with mozaik parameters 
    whose values match the parameter values combinations provided in `kwargs`. 
    
    To restrict mozaik parameters of the stimuli associated with the ADS or recordings 
    pre-pend 'st_' to the parameter name.
    
    For the recordings, parameter sheet refers to the sheet for which the recording was done. 
    
    
    Parameters
    ----------
    
    dsv : DataStoreView
        The input DSV.
    
    ads_unique : bool, optional
               If True the query will raise an exception if the query does not identify a unique ADS.

    rec_unique : bool, optional
               If True the query will raise an exception if the query does not identify a unique recording.
    
    \*\*kwargs : dict
               Remaining keyword arguments will be interepreted as the mozaik parameter names and their associated values that all ASDs
               or recordings have to match. The values of the parameters should be either directly the values to match or list of values in which
               case this list is interpreted as *one of* of the values that each returned recording or ASD has to match (thus effectively there
               is an *and* operation between the different parameters and *or* operation between the values specified for the given mozaik parameters). 
               
    Examples
    --------
    >>> datastore.param_filter_query(datastore,identifier=['PerNeuronValue','SingleValue'],sheet_name=sheet,value_name='orientation preference')
    
    This command should return DSV containing all recordings and ADSs whose identifier is *PerNeuronValue* or *SingleValue*, and are associated with sheet named *sheet_name* and as their value name have 'orientation preference'.
    Note that since recordings do not have these parameters, this query would return a DSV containing only ADSs.
    
    >>> datastore.param_filter_query(datastore,st_orientation=0.5)
    
    This command should return DSV containing all recordings and ADSs that are associated with stimuli whose mozaik parameter orientation has value 0.5.
    """

    new_dsv = dsv.fromDataStoreView()

    st_kwargs = dict([(k[3:], kwargs[k]) for k in kwargs.keys()
                      if k[0:3] == 'st_'])
    kwargs = dict([(k, kwargs[k]) for k in kwargs.keys() if k[0:3] != 'st_'])

    seg_st = [
        MozaikParametrized.idd(seg.annotations['stimulus'])
        for seg in dsv.block.segments
    ]
    ads_st = [
        MozaikParametrized.idd(ads.stimulus_id) for ads in dsv.analysis_results
        if ads.stimulus_id != None
    ]
    if 'sheet_name' in set(kwargs):
        if len(kwargs) == 1:
            # This means that there is only one 'non-stimulus' parameter sheet, and thus we need
            # to filter out all recordings that are associated with that sheet (otherwsie we do not pass any recordings)
            kw = kwargs['sheet_name'] if isinstance(
                kwargs['sheet_name'], list) else [kwargs['sheet_name']]
            seg_filtered = set([
                s for s in dsv.block.segments
                if s.annotations['sheet_name'] in kw
            ])
        else:
            seg_filtered = set([])
    else:
        seg_filtered = set(dsv.block.segments)

    ads_filtered = set(filter_query(dsv.analysis_results, **kwargs))

    if st_kwargs != {}:
        seg_filtered_st = set(
            filter_query(seg_st,
                         extra_data_list=dsv.block.segments,
                         **st_kwargs)[1])
        ads_filtered_st = set(
            filter_query(ads_st,
                         extra_data_list=[
                             ads for ads in dsv.analysis_results
                             if ads.stimulus_id != None
                         ],
                         **st_kwargs)[1])
    else:
        ads_filtered_st = set(dsv.analysis_results)
        seg_filtered_st = set(dsv.block.segments)

    seg = seg_filtered_st & seg_filtered
    ads = ads_filtered_st & ads_filtered

    new_dsv.sensory_stimulus = dsv.sensory_stimulus_copy()
    new_dsv.block.segments = list(seg)
    new_dsv.analysis_results = list(ads)

    if ads_unique and len(ads) != 1:
        raise ValueError(
            "Result was expected to have only single ADS, it contains %d" %
            len(ads))

    if rec_unique and len(seg) != 1:
        raise ValueError(
            "Result was expected to have only single Segment, it contains %d" %
            len(seg))

    return new_dsv
コード例 #40
0
 def _get_stimulus_ids(self):
     return [MozaikParametrized.idd(ads.stimulus_id) for ads in self.datastore.get_analysis_result()]
コード例 #41
0
 def title(self, idx):
     return self._title([MozaikParametrized.idd(ads.stimulus_id) for ads in self.dsvs[idx].get_analysis_result()][0])
コード例 #42
0
        def plot(self):
            self.fig = pylab.figure(facecolor='w', **self.fig_param)
            gs = gridspec.GridSpec(1, 1)
            gs.update(left=0.07, right=0.97, top=0.9, bottom=0.1)
            gs = gs[0,0]
            
            gs = gridspec.GridSpecFromSubplotSpec(4, 5,subplot_spec=gs)

            orr = list(set([MozaikParametrized.idd(s).orientation for s in queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating',st_contrast=100).get_stimuli()]))        
            l4_exc_or = self.datastore.get_analysis_result(identifier='PerNeuronValue',value_name = 'LGNAfferentOrientation', sheet_name = 'V1_Exc_L4')

            
            col = orr[numpy.argmin([circular_dist(o,l4_exc_or[0].get_value_by_id(self.parameters.neuron),numpy.pi)  for o in orr])]
            #segs = queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating',st_contrast=100,st_orientation=col,sheet_name='V1_Exc_L4').get_segments()
            #signals = [seg.get_vm(self.parameters.neuron) for seg in segs] 
            dsv = queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating',sheet_name='V1_Exc_L4',st_contrast=100,analysis_algorithm='ActionPotentialRemoval',st_orientation=col)
            assert queries.ads_with_equal_stimuli(dsv,except_params=["trial"])
            adss = dsv.get_analysis_result()
            signals = [ads.get_asl_by_id(self.parameters.neuron) for ads in adss] 
            
            (signal,noise,snr) = self.wavelet_decomposition(signals)
            
            ax = pylab.subplot(gs[0,0:2])            
            for s in signals:
                ax.plot(s,c='k')
            pylab.ylabel('Vm')
            pylab.title("Gratings",fontsize=20)
            pylab.xlim(0,len(signals[0]))
            pylab.ylim(-80,-50)
            
            ax = pylab.subplot(gs[1,0:2])            
            ax.imshow(signal,aspect='auto',origin='lower')
            pylab.ylabel('Signal')
             
            ax = pylab.subplot(gs[2,0:2])            
            ax.imshow(noise,aspect='auto',origin='lower')
            pylab.ylabel('Noise')

            ax = pylab.subplot(gs[3,0:2])            
            ax.imshow(snr,aspect='auto',origin='lower')
            pylab.ylabel('SNR')
            pylab.xlabel('time')
            
            
            #segs = queries.param_filter_query(self.datastore,st_name='NaturalImageWithEyeMovement',sheet_name='V1_Exc_L4').get_segments()
            #signals = [seg.get_vm(self.parameters.neuron) for seg in segs] 
            dsv = queries.param_filter_query(self.datastore,st_name='NaturalImageWithEyeMovement',sheet_name='V1_Exc_L4',analysis_algorithm='ActionPotentialRemoval')
            assert queries.ads_with_equal_stimuli(dsv,except_params=["trial"])
            adss = dsv.get_analysis_result()
            signals = [ads.get_asl_by_id(self.parameters.neuron) for ads in adss] 
           
            (signal_ni,noise_ni,snr_ni) = self.wavelet_decomposition(signals)
            
            ax = pylab.subplot(gs[0,2:4])            
            for s in signals:
                ax.plot(s,c='k')
            pylab.xlim(0,len(signals[0]))                
            pylab.ylim(-80,-50)
            pylab.title("NI",fontsize=20)
            ax = pylab.subplot(gs[1,2:4])            
            ax.imshow(signal_ni,aspect='auto',origin='lower')

            ax = pylab.subplot(gs[2,2:4])            
            ax.imshow(noise_ni,aspect='auto',origin='lower')

            ax = pylab.subplot(gs[3,2:4])            
            ax.imshow(snr_ni,aspect='auto',origin='lower')
            pylab.xlabel('time')
            
            ax = pylab.subplot(gs[1,4])            
            ax.plot(numpy.mean(signal,axis=1),label="GR")
            ax.plot(numpy.mean(signal_ni,axis=1),label="NI")
            ax.set_xscale('log')
            ax.set_yscale('log')
            pylab.legend()
            
            ax = pylab.subplot(gs[2,4])            
            ax.plot(numpy.mean(noise,axis=1))
            ax.plot(numpy.mean(noise_ni,axis=1))
            ax.set_xscale('log')
            ax.set_yscale('log')
            
            ax = pylab.subplot(gs[3,4])            
            ax.plot(numpy.mean(snr,axis=1))
            ax.plot(numpy.mean(snr_ni,axis=1))
            ax.set_xscale('log')
            ax.set_yscale('log')
            pylab.xlabel("frequency")
            
            
            if self.plot_file_name:
               pylab.savefig(Global.root_directory+self.plot_file_name)              
コード例 #43
0
ファイル: __init__.py プロジェクト: antolikjan/mozaik
 def __init__(self, **params):
     MozaikParametrized.__init__(self, **params)
     self.input = None
     self._frames = self.frames()
     self.n_frames = numpy.inf  # possibly very dangerous. Don't do 'for i in range(stim.n_frames)'!
コード例 #44
0
ファイル: plot_constructors.py プロジェクト: RCagnol/mozaik
 def title(self, idx):
     return self._title(
         MozaikParametrized.idd(self.dsvs[idx].get_stimuli()[0]))
コード例 #45
0
ファイル: data_structures.py プロジェクト: dguarino/mozaik
 def __init__(self,tags=[], **params):
     MozaikParametrized.__init__(self, **params)
     self.tags = tags
コード例 #46
0
ファイル: plot_constructors.py プロジェクト: RCagnol/mozaik
 def _get_stimulus_ids(self):
     return [
         MozaikParametrized.idd(ads.stimulus_id)
         for ads in self.datastore.get_analysis_result()
     ]
コード例 #47
0
ファイル: vision.py プロジェクト: dguarino/mozaik
    def perform_analysis(self):
        for sheet in self.datastore.sheets():
            # Load up spike trains for the right sheet and the corresponding
            # stimuli, and transform spike trains into psth
            print sheet
            self.datastore.print_content()
            dsv = queries.param_filter_query(self.datastore,identifier='AnalogSignalList',sheet_name=sheet,analysis_algorithm='PSTH',st_name='FullfieldDriftingSinusoidalGrating')
            dsv.print_content()
            assert queries.equal_ads(dsv,except_params=['stimulus_id']) , "It seems PSTH computed in different ways are present in datastore, ModulationRatio can accept only one"
            psths = dsv.get_analysis_result()
            st = [MozaikParametrized.idd(p.stimulus_id) for p in psths]
            # average across trials
            psths, stids = colapse(psths,st,parameter_list=['trial'],func=neo_sum,allow_non_identical_objects=True)

            # retrieve the computed orientation preferences
            pnvs = self.datastore.get_analysis_result(identifier='PerNeuronValue',
                                                      sheet_name=sheet,
                                                      value_name='orientation preference')
            
            if len(pnvs) != 1:
                logger.error("ERROR: Expected only one PerNeuronValue per sheet "
                             "with value_name 'orientation preference' in datastore, got: "
                             + str(len(pnvs)))
                return None
        
            or_pref = pnvs[0]
            # find closest orientation of grating to a given orientation preference of a neuron
            # first find all the different presented stimuli:
            ps = {}
            for s in st:
                ps[MozaikParametrized.idd(s).orientation] = True
            ps = ps.keys()
            print ps
            # now find the closest presented orientations
            closest_presented_orientation = []
            for i in xrange(0, len(or_pref.values)):
                circ_d = 100000
                idx = 0
                for j in xrange(0, len(ps)):
                    if circ_d > circular_dist(or_pref.values[i], ps[j], numpy.pi):
                        circ_d = circular_dist(or_pref.values[i], ps[j], numpy.pi)
                        idx = j
                closest_presented_orientation.append(ps[idx])

            closest_presented_orientation = numpy.array(closest_presented_orientation)

            # collapse along orientation - we will calculate MR for each
            # parameter combination other than orientation
            d = colapse_to_dictionary(psths, stids, "orientation")
            for (st, vl) in d.items():
                # here we will store the modulation ratios, one per each neuron
                modulation_ratio = []
                ids = []
                frequency = MozaikParametrized.idd(st).temporal_frequency * MozaikParametrized.idd(st).params()['temporal_frequency'].units
                for (orr, ppsth) in zip(vl[0], vl[1]):
                    for j in numpy.nonzero(orr == closest_presented_orientation)[0]:
                        if or_pref.ids[j] in ppsth.ids:
                            modulation_ratio.append(self._calculate_MR(ppsth.get_asl_by_id(or_pref.ids[j]),frequency))
                            ids.append(or_pref.ids[j])
                            
                logger.debug('Adding PerNeuronValue:' + str(sheet))
                self.datastore.full_datastore.add_analysis_result(
                    PerNeuronValue(modulation_ratio,
                                   ids,
                                   qt.dimensionless,
                                   value_name='Modulation ratio' + '(' + psths[0].x_axis_name + ')',
                                   sheet_name=sheet,
                                   tags=self.tags,
                                   period=None,
                                   analysis_algorithm=self.__class__.__name__,
                                   stimulus_id=str(st)))

                import pylab
                pylab.figure()
                pylab.hist(modulation_ratio)
コード例 #48
0
def createSimulationRunDocumentAndUploadImages(path, gfs):
    print(path)
    #lets get parameters
    param = load_parameters(os.path.join(path, 'parameters'), {})
    stim_docs = []
    experimental_protocols_docs = []

    if FULL:
        print("Loading...")
        data_store = PickledDataStore(load=True,
                                      parameters=ParameterSet({
                                          'root_directory':
                                          path,
                                          'store_stimuli':
                                          False
                                      }),
                                      replace=False)
        print("Loaded")
        unique_stimuli = [(s, MozaikParametrized.idd(s))
                          for s in set(data_store.get_stimuli())]
        if WITH_STIMULI:
            for s, sidd in unique_stimuli:
                raws = data_store.get_sensory_stimulus([s])
                if raws == []:
                    raws = numpy.array([[[0, 0], [0, 0.1]], [[0, 0], [0, 0]]])
                else:
                    raws = raws[0]
                if param['input_space'] != None:
                    imageio.mimwrite(
                        'movie.gif',
                        raws,
                        duration=param['input_space']['update_interval'] /
                        1000.0)
                else:
                    imageio.mimwrite('movie.gif', raws, duration=0.1)
                params = sidd.get_param_values()

                params = {k: (v, sidd.params()[k].doc) for k, v in params}

                stim_docs.append({
                    'code':
                    sidd.name,
                    'params':
                    params,
                    'short_description':
                    parse_docstring(
                        getattr(
                            __import__(sidd.module_path, globals(), locals(),
                                       sidd.name),
                            sidd.name).__doc__)["short_description"],
                    'long_description':
                    parse_docstring(
                        getattr(
                            __import__(sidd.module_path, globals(), locals(),
                                       sidd.name),
                            sidd.name).__doc__)["long_description"],
                    'gif':
                    gfs.put(open('movie.gif', 'r')),
                })

        #### EXPERIMENTAL PROTOCOLS ########
        print(data_store.get_experiment_parametrization_list())
        for ep in data_store.get_experiment_parametrization_list():
            name = ep[0][8:-2].split('.')[-1]
            module_path = '.'.join(ep[0][8:-2].split('.')[:-1])
            doc_par = get_params_from_docstring(
                getattr(__import__(module_path, globals(), locals(), name),
                        name))
            params = eval(ep[1])

            p = {
                k: (params[k], doc_par[k][0],
                    doc_par[k][1]) if k in doc_par else params[k]
                for k in params.keys()
            }

            experimental_protocols_docs.append({
                'code':
                module_path + '.' + name,
                'params':
                p,
                'short_description':
                parse_docstring(
                    getattr(__import__(module_path, globals(), locals(), name),
                            name).__doc__)["short_description"],
                'long_description':
                parse_docstring(
                    getattr(__import__(module_path, globals(), locals(), name),
                            name).__doc__)["long_description"],
            })

    ##### RECORDERS ###################
    recorders_docs = []
    for sh in param["sheets"].keys():
        for rec in param['sheets'][sh]["params"]["recorders"].keys():
            recorder = param['sheets'][sh]["params"]["recorders"][rec]
            name = recorder["component"].split('.')[-1]
            module_path = '.'.join(recorder["component"].split('.')[:-1])
            doc_par = get_params_from_docstring(
                getattr(__import__(module_path, globals(), locals(), name),
                        name))
            p = {
                k: (recorder["params"][k], doc_par[k][0], doc_par[k][1])
                for k in recorder["params"].keys()
            }

            recorders_docs.append({
                'code':
                module_path + '.' + name,
                'source':
                sh,
                'params':
                p,
                'variables':
                recorder["variables"],
                'short_description':
                parse_docstring(
                    getattr(__import__(module_path, globals(), locals(), name),
                            name).__doc__)["short_description"],
                'long_description':
                parse_docstring(
                    getattr(__import__(module_path, globals(), locals(), name),
                            name).__doc__)["long_description"],
            })

    # load basic info

    if os.path.exists(os.path.join(path, 'info')):
        f = open(os.path.join(path, 'info'), 'r')
        info = eval(f.read())
    else:
        info = {}
        info["creation_data"] = "????"
        info["simulation_run_name"] = "???"
        info["model_name"] = "??"

    #let load up results
    results = []

    if os.path.exists(os.path.join(path, 'results')):
        f = open(os.path.join(path, 'results'), 'r')
        lines = list(set([line for line in f]))
        lines = [eval(line) for line in lines]
    else:
        lines = []

    if os.path.exists(
            os.path.join(path, 'TrialToTrialVariabilityComparison.png')):
        lines.append({
            'parameters': {},
            'file_name': 'TrialToTrialVariabilityComparison.png',
            'class_name': ''
        })  #!!!!!!!!!!!!!!!!!
    if os.path.exists(
            os.path.join(path, 'TrialToTrialVariabilityComparisonNew.png')):
        lines.append({
            'parameters': {},
            'file_name': 'TrialToTrialVariabilityComparisonNew.png',
            'class_name': ''
        })  #!!!!!!!!!!!!!!!!!
    if os.path.exists(os.path.join(path, 'SpontStatisticsOverview.png')):
        lines.append({
            'parameters': {},
            'file_name': 'SpontStatisticsOverview.png',
            'class_name': ''
        })  #!!!!!!!!!!!!!!!!!
    if os.path.exists(os.path.join(path, 'Orientation_responseL23.png')):
        lines.append({
            'parameters': {},
            'file_name': 'Orientation_responseL23.png',
            'class_name': ''
        })  #!!!!!!!!!!!!!!!!!
    if os.path.exists(os.path.join(path, 'Orientation_responseL4.png')):
        lines.append({
            'parameters': {},
            'file_name': 'Orientation_responseL4.png',
            'class_name': ''
        })  #!!!!!!!!!!!!!!!!!
    if os.path.exists(os.path.join(path, 'Orientation_responseInhL23.png')):
        lines.append({
            'parameters': {},
            'file_name': 'Orientation_responseInhL23.png',
            'class_name': ''
        })  #!!!!!!!!!!!!!!!!!
    if os.path.exists(os.path.join(path, 'Orientation_responseInh23.png')):
        lines.append({
            'parameters': {},
            'file_name': 'Orientation_responseInh23.png',
            'class_name': ''
        })  #!!!!!!!!!!!!!!!!!
    if os.path.exists(os.path.join(path, 'MR.png')):
        lines.append({
            'parameters': {},
            'file_name': 'MR.png',
            'class_name': ''
        })  #!!!!!!!!!!!!!!!!!
    if os.path.exists(os.path.join(path, 'MRReal.png')):
        lines.append({
            'parameters': {},
            'file_name': 'MRReal.png',
            'class_name': ''
        })  #!!!!!!!!!!!!!!!!!
    if os.path.exists(os.path.join(path, 'aaa.png')):
        lines.append({
            'parameters': {},
            'file_name': 'aaa.png',
            'class_name': ''
        })  #!!!!!!!!!!!!!!!!!
    if os.path.exists(os.path.join(path, 'bbb.png')):
        lines.append({
            'parameters': {},
            'file_name': 'bbb.png',
            'class_name': ''
        })  #!!!!!!!!!!!!!!!!!
    if os.path.exists(os.path.join(path, 'Orientation_responsInheL4.png')):
        lines.append({
            'parameters': {},
            'file_name': 'Orientation_responsInheL4.png',
            'class_name': ''
        })  #!!!!!!!!!!!!!!!!!
    if os.path.exists(os.path.join(path, 'GratingExcL23.png')):
        lines.append({
            'parameters': {},
            'file_name': 'GratingExcL23.png',
            'class_name': ''
        })
    if os.path.exists(os.path.join(path, 'GratingInhL23.png')):
        lines.append({
            'parameters': {},
            'file_name': 'GratingInhL23.png',
            'class_name': ''
        })
    if os.path.exists(os.path.join(path, 'GratingExcL4.png')):
        lines.append({
            'parameters': {},
            'file_name': 'GratingExcL4.png',
            'class_name': ''
        })
    if os.path.exists(os.path.join(path, 'GratingInhL4.png')):
        lines.append({
            'parameters': {},
            'file_name': 'GratingInhL4.png',
            'class_name': ''
        })
    if os.path.exists(os.path.join(path, 'SpontExcL23.png')):
        lines.append({
            'parameters': {},
            'file_name': 'SpontExcL23.png',
            'class_name': ''
        })
    if os.path.exists(os.path.join(path, 'SpontInhL23.png')):
        lines.append({
            'parameters': {},
            'file_name': 'SpontInhL23.png',
            'class_name': ''
        })
    if os.path.exists(os.path.join(path, 'SpontExcL4.png')):
        lines.append({
            'parameters': {},
            'file_name': 'SpontExcL4.png',
            'class_name': ''
        })
    if os.path.exists(os.path.join(path, 'SpontInhL4.png')):
        lines.append({
            'parameters': {},
            'file_name': 'SpontInhL4.png',
            'class_name': ''
        })
    if os.path.exists(os.path.join(path, 'NatExcL4.png')):
        lines.append({
            'parameters': {},
            'file_name': 'NatExcL4.png',
            'class_name': ''
        })

    for line in lines:
        r = line
        if not re.match('.*\..*$', r['file_name']):
            r['file_name'] += '.png'
        r['code'] = r['class_name'][8:-2]

        if True:
            if r['code'] != '':
                name = r['code'].split('.')[-1]
                module_path = '.'.join(r['code'].split('.')[:-1])
                doc_par = get_params_from_docstring(
                    getattr(__import__(module_path, globals(), locals(), name),
                            name))
                p = {
                    k: (r["parameters"][k], doc_par[k][0],
                        doc_par[k][1]) if k in doc_par else
                    (r["parameters"][k], "", "")
                    for k in r["parameters"].keys()
                }
            else:
                p = []
            r["parameters"] = p
        r["name"] = r['file_name']
        r["figure"] = gfs.put(open(os.path.join(path, r['file_name']), 'rb'))
        results.append(r)

    document = {
        'submission_date':
        datetime.datetime.now().strftime('%d/%m/%Y-%H:%M:%S'),
        'run_date':
        info["creation_data"],
        'simulation_run_name':
        info["simulation_run_name"],
        'model_name':
        info["model_name"],
        'model_description':
        info["model_docstring"] if 'model_docstring' in info else '',
        'results':
        results,
        'stimuli':
        stim_docs,
        'recorders':
        recorders_docs,
        'experimental_protocols':
        experimental_protocols_docs,
        'parameters':
        json.dumps(param, cls=ParametersEncoder)  #!!!!!!!!!!!!!!!!
    }
    return document
コード例 #49
0
                              parameters=ParameterSet({
                                  'root_directory':
                                  sys.argv[1],
                                  'store_stimuli':
                                  False
                              }),
                              replace=True)

NeuronAnnotationsToPerNeuronValues(data_store, ParameterSet({})).analyse()
analog_ids = queries.param_filter_query(
    data_store,
    sheet_name="V1_Exc_L4").get_segments()[0].get_stored_esyn_ids()

dsv = queries.param_filter_query(data_store, st_name='FlashedBar')
for ads in dsv.get_analysis_result():
    sid = MozaikParametrized.idd(ads.stimulus_id)
    sid.x = 0
    ads.stimulus_id = str(sid)
for seg in dsv.get_segments():
    sid = MozaikParametrized.idd(seg.annotations['stimulus'])
    sid.x = 0
    seg.annotations['stimulus'] = str(sid)
for seg in dsv.get_segments(null=True):
    sid = MozaikParametrized.idd(seg.annotations['stimulus'])
    sid.x = 0
    seg.annotations['stimulus'] = str(sid)


def save_data(dirname, dsv, name):

    try:
コード例 #50
0
    def process_input(self, visual_space, stimulus, duration=None, offset=0):
        """
        Present a visual stimulus to the model, and create the LGN output
        (relay) neurons.
        
        Parameters
        ----------
        visual_space : VisualSpace
                     The visual space to which the stimuli are presented.
                     
        stimulus : VisualStimulus    
                 The visual stimulus to be shown.
        
        duration : int (ms)
                 The time for which we will simulate the stimulus
        
        offset : int(ms)
               The time (in absolute time of the whole simulation) at which the stimulus starts.
        
        Returns
        -------
        retinal_input : list(ndarray)
                      List of 2D arrays containing the frames of luminances that were presented to the retina.
        """
        logger.debug("Presenting visual stimulus from visual space %s" %
                     visual_space)
        visual_space.set_duration(duration)
        self.input = visual_space
        st = MozaikParametrized.idd(stimulus)
        st.trial = None  # to avoid recalculating RFs response to multiple trials of the same stimulus

        cached = self.get_cache(st)

        if cached == None:
            logger.debug("Generating output spikes...")
            # Even if we didn't find the stimulus in cache, we still check if we haven't already presented it during this simulation run.
            # This is mainly to avoid regenerating stimuli for multiple trials.

            if str(st) in self.internal_stimulus_cache:
                (input_currents,
                 retinal_input) = self.internal_stimulus_cache[str(st)]
            else:
                (input_currents,
                 retinal_input) = self._calculate_input_currents(
                     visual_space, duration)
        else:
            logger.debug("Retrieved spikes from cache...")
            (input_currents, retinal_input) = cached

        ts = self.model.sim.get_time_step()

        for rf_type in self.rf_types:
            assert isinstance(input_currents[rf_type], list)
            for i, (lgn_cell, input_current, scs, ncs) in enumerate(
                    zip(self.sheets[rf_type].pop, input_currents[rf_type],
                        self.scs[rf_type], self.ncs[rf_type])):
                assert isinstance(input_current, dict)
                t = input_current['times'] + offset
                a = self.parameters.linear_scaler * input_current['amplitudes']
                scs.set_parameters(times=t, amplitudes=a, copy=False)
                if self.parameters.mpi_reproducible_noise:
                    t = numpy.arange(0, duration, ts) + offset
                    amplitudes = (self.parameters.noise.mean +
                                  self.parameters.noise.stdev *
                                  self.ncs_rng[rf_type][i].randn(len(t)))
                    ncs.set_parameters(times=t,
                                       amplitudes=amplitudes,
                                       copy=False)

        # for debugging/testing, doesn't work with MPI !!!!!!!!!!!!
        #input_current_array = numpy.zeros((self.shape[1], self.shape[0], len(visual_space.time_points(duration))))
        #update_factor = int(visual_space.update_interval/self.parameters.receptive_field.temporal_resolution)
        #logger.debug("input_current_array.shape = %s, update_factor = %d, p.dim = %s" % (input_current_array.shape, update_factor, self.shape))
        #k = 0
        #for i in range(self.shape[1]): # self.sahpe gives (x,y), so self.shape[1] is the height
        #    for j in range(self.shape[0]):
        # where the kernel temporal resolution is finer than the frame update interval,
        # we only keep the current values at the start of each frame
        #        input_current_array[i,j, :] = input_currents['X_ON'][k]['amplitudes'][::update_factor]
        #        k += 1

        # if record() has already been called, setup the recording now
        self._built = True
        self.write_cache(st, input_currents, retinal_input)
        # also save into internal cache
        self.internal_stimulus_cache[str(st)] = (input_currents, retinal_input)

        return retinal_input
コード例 #51
0
def createSimulationRunDocumentAndUploadImages(path, gfs):

    data_store = PickledDataStore(load=True,
                                  parameters=ParameterSet({
                                      'root_directory':
                                      path,
                                      'store_stimuli':
                                      False
                                  }),
                                  replace=False)

    #lets get parameters
    param = load_parameters(os.path.join(path, 'parameters'), {})

    ##### STIMULI ###########
    stimuli = [MozaikParametrized.idd(s) for s in data_store.get_stimuli()]
    unique_stimuli = [
        MozaikParametrized.idd(s) for s in set(data_store.get_stimuli())
    ]
    stimuli_types = {}
    for s in stimuli:
        stimuli_types[s.name] = True

    stim_docs = []
    i = 0
    for s in unique_stimuli:

        print data_store.sensory_stimulus.keys()
        raws = data_store.get_sensory_stimulus([str(s)])[0]
        if raws == None:
            raws = numpy.array([[[0, 0], [0, 0.1]], [[0, 0], [0, 0]]])
        if param['input_space'] != None:
            imageio.mimwrite('movie' + str(i) + '.gif',
                             raws,
                             duration=param['input_space']['update_interval'] /
                             1000.0)
        else:
            imageio.mimwrite('movie' + str(i) + '.gif', raws, duration=0.1)
        params = s.get_param_values()

        params = dict([(k, (v, s.params()[k].__class__.__name__,
                            s.params()[k].doc)) for k, v in params])

        stim_docs.append({
            'code':
            s.name,
            'parameters':
            params,
            'short_description':
            parse_docstring(
                getattr(__import__(s.module_path, globals(), locals(), s.name),
                        s.name).__doc__)["short_description"],
            'long_description':
            parse_docstring(
                getattr(__import__(s.module_path, globals(), locals(), s.name),
                        s.name).__doc__)["long_description"],
            'movie':
            gfs.put(open('movie' + str(i) + '.gif', 'r')),
        })
        i += 1

    ##### RECORDERS ###################
    recorders_docs = []
    for sh in param["sheets"].keys():
        for rec in param['sheets'][sh]["params"]["recorders"].keys():
            recorder = param['sheets'][sh]["params"]["recorders"][rec]
            name = recorder["component"].split('.')[-1]
            module_path = '.'.join(recorder["component"].split('.')[:-1])
            doc_par = get_params_from_docstring(
                getattr(__import__(module_path, globals(), locals(), name),
                        name))
            p = dict([(k, (recorder["params"][k], doc_par[k][0],
                           doc_par[k][1])) for k in recorder["params"].keys()])

            recorders_docs.append({
                'code':
                name,
                'source':
                sh,
                'parameters':
                p,
                'variables':
                recorder["variables"],
                'short_description':
                parse_docstring(
                    getattr(__import__(module_path, globals(), locals(), name),
                            name).__doc__)["short_description"],
                'long_description':
                parse_docstring(
                    getattr(__import__(module_path, globals(), locals(), name),
                            name).__doc__)["long_description"],
            })

    #### EXPERIMENTAL PROTOCOLS ########
    experimental_protocols_docs = []
    for ep in data_store.get_experiment_parametrization_list():
        name = ep[0][8:-2].split('.')[-1]
        module_path = '.'.join(ep[0][8:-2].split('.')[:-1])
        doc_par = get_params_from_docstring(
            getattr(__import__(module_path, globals(), locals(), name), name))
        params = eval(ep[1])
        p = {}
        for k in params.keys():
            p[k] = (params[k], doc_par[k][0], doc_par[k][1])

        experimental_protocols_docs.append({
            'code':
            name,
            'parameters':
            p,
            'short_description':
            parse_docstring(
                getattr(__import__(module_path, globals(), locals(), name),
                        name).__doc__)["short_description"],
            'long_description':
            parse_docstring(
                getattr(__import__(module_path, globals(), locals(), name),
                        name).__doc__, True)["long_description"],
        })

    # load basic info
    f = open(os.path.join(path, 'info'), 'r')
    info = eval(f.read())

    info["model_docstring"] = remove_identation(info["model_docstring"])

    #let load up results
    results = []
    f = open(os.path.join(path, 'results'), 'r')

    lines = [eval(line) for line in f]

    if os.path.exists(os.path.join(path, 'Orientation_responseL23.png')):
        lines.append({
            'parameters': {},
            'file_name': 'Orientation_responseL23.png',
            'class_name': ''
        })  #!!!!!!!!!!!!!!!!!
    if os.path.exists(os.path.join(path, 'Orientation_responseL4.png')):
        lines.append({
            'parameters': {},
            'file_name': 'Orientation_responseL4.png',
            'class_name': ''
        })  #!!!!!!!!!!!!!!!!!
    if os.path.exists(os.path.join(path, 'MR.png')):
        lines.append({
            'parameters': {},
            'file_name': 'MR.png',
            'class_name': ''
        })  #!!!!!!!!!!!!!!!!!
    if os.path.exists(os.path.join(path, 'MRReal.png')):
        lines.append({
            'parameters': {},
            'file_name': 'MRReal.png',
            'class_name': ''
        })  #!!!!!!!!!!!!!!!!!

    for line in lines:
        r = line
        if not re.match('.*\..*$', r['file_name']):
            r['file_name'] += '.png'
        r['code'] = r['class_name'][8:-2]
        del r['class_name']
        name = r['code'].split('.')[-1]
        module_path = '.'.join(r['code'].split('.')[:-1])
        long_description = parse_docstring(
            getattr(__import__(module_path, globals(), locals(), name),
                    name).__doc__)["long_description"]
        doc_par = get_params_from_docstring(
            getattr(__import__(module_path, globals(), locals(), name), name))
        p = dict([(k, (r["parameters"][k], doc_par[k][0], doc_par[k][1]))
                  for k in r["parameters"].keys()])
        r["parameters"] = p
        r["name"] = r['file_name']
        r["caption"] = long_description
        r["figure"] = gfs.put(open(os.path.join(path, r['file_name']), 'r'))
        results.append(r)

    # convert param to follow Arkheia format
    def convertParams(params):
        p = {}
        for k in params.keys():
            if isinstance(params[k], ParameterSet):
                p[k] = (convertParams(params[k]), 'N/A', 'N/A')
            else:
                p[k] = (params[k], 'N/A', 'N/A')
        return p

    document = {
        'submission_date':
        datetime.datetime.now().strftime('%d/%m/%Y-%H:%M:%S'),
        'run_date':
        info["creation_data"],
        'simulation_run_name':
        info["simulation_run_name"],
        'model_name':
        info["model_name"],
        'model_info':
        info["model_docstring"],
        'results':
        results,
        'stimuli':
        stim_docs,
        'recorders':
        recorders_docs,
        'experimental_protocols':
        experimental_protocols_docs,
        'parameters':
        json.loads(json.dumps(convertParams(param), cls=ParametersEncoder))
    }
    return document
コード例 #52
0
        def plot(self):
            self.fig = pylab.figure(facecolor='w', **self.fig_param)
            gs = gridspec.GridSpec(1, 1)
            gs.update(left=0.07, right=0.97, top=0.9, bottom=0.1)
            gs = gs[0,0]
            
            gs = gridspec.GridSpecFromSubplotSpec(4, 5,subplot_spec=gs)

            orr = list(set([MozaikParametrized.idd(s).orientation for s in queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating',st_contrast=100).get_stimuli()]))        
            l4_exc_or = self.datastore.get_analysis_result(identifier='PerNeuronValue',value_name = 'LGNAfferentOrientation', sheet_name = 'V1_Exc_L4')

            
            col = orr[numpy.argmin([circular_dist(o,l4_exc_or[0].get_value_by_id(self.parameters.neuron),numpy.pi)  for o in orr])]
            #segs = queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating',st_contrast=100,st_orientation=col,sheet_name='V1_Exc_L4').get_segments()
            #signals = [seg.get_vm(self.parameters.neuron) for seg in segs] 
            dsv = queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating',sheet_name='V1_Exc_L4',st_contrast=100,analysis_algorithm='ActionPotentialRemoval',st_orientation=col)
            assert queries.ads_with_equal_stimuli(dsv,except_params=["trial"])
            adss = dsv.get_analysis_result()
            signals = [ads.get_asl_by_id(self.parameters.neuron) for ads in adss] 
            
            (signal,noise,snr) = self.wavelet_decomposition(signals)
            
            ax = pylab.subplot(gs[0,0:2])            
            for s in signals:
                ax.plot(s,c='k')
            pylab.ylabel('Vm')
            pylab.title("Gratings",fontsize=20)
            pylab.xlim(0,len(signals[0]))
            pylab.ylim(-80,-50)
            
            ax = pylab.subplot(gs[1,0:2])            
            ax.imshow(signal,aspect='auto',origin='lower')
            pylab.ylabel('Signal')
             
            ax = pylab.subplot(gs[2,0:2])            
            ax.imshow(noise,aspect='auto',origin='lower')
            pylab.ylabel('Noise')

            ax = pylab.subplot(gs[3,0:2])            
            ax.imshow(snr,aspect='auto',origin='lower')
            pylab.ylabel('SNR')
            pylab.xlabel('time')
            
            
            #segs = queries.param_filter_query(self.datastore,st_name='NaturalImageWithEyeMovement',sheet_name='V1_Exc_L4').get_segments()
            #signals = [seg.get_vm(self.parameters.neuron) for seg in segs] 
            dsv = queries.param_filter_query(self.datastore,st_name='NaturalImageWithEyeMovement',sheet_name='V1_Exc_L4',analysis_algorithm='ActionPotentialRemoval')
            assert queries.ads_with_equal_stimuli(dsv,except_params=["trial"])
            adss = dsv.get_analysis_result()
            signals = [ads.get_asl_by_id(self.parameters.neuron) for ads in adss] 
           
            (signal_ni,noise_ni,snr_ni) = self.wavelet_decomposition(signals)
            
            ax = pylab.subplot(gs[0,2:4])            
            for s in signals:
                ax.plot(s,c='k')
            pylab.xlim(0,len(signals[0]))                
            pylab.ylim(-80,-50)
            pylab.title("NI",fontsize=20)
            ax = pylab.subplot(gs[1,2:4])            
            ax.imshow(signal_ni,aspect='auto',origin='lower')

            ax = pylab.subplot(gs[2,2:4])            
            ax.imshow(noise_ni,aspect='auto',origin='lower')

            ax = pylab.subplot(gs[3,2:4])            
            ax.imshow(snr_ni,aspect='auto',origin='lower')
            pylab.xlabel('time')
            
            ax = pylab.subplot(gs[1,4])            
            ax.plot(numpy.mean(signal,axis=1),label="GR")
            ax.plot(numpy.mean(signal_ni,axis=1),label="NI")
            ax.set_xscale('log')
            ax.set_yscale('log')
            pylab.legend()
            
            ax = pylab.subplot(gs[2,4])            
            ax.plot(numpy.mean(noise,axis=1))
            ax.plot(numpy.mean(noise_ni,axis=1))
            ax.set_xscale('log')
            ax.set_yscale('log')
            
            ax = pylab.subplot(gs[3,4])            
            ax.plot(numpy.mean(snr,axis=1))
            ax.plot(numpy.mean(snr_ni,axis=1))
            ax.set_xscale('log')
            ax.set_yscale('log')
            pylab.xlabel("frequency")
            
            
            if self.plot_file_name:
               pylab.savefig(Global.root_directory+self.plot_file_name)              
コード例 #53
0
ファイル: queries.py プロジェクト: h-mayorquin/mozaik
def param_filter_query(dsv,ads_unique=False,rec_unique=False,**kwargs):
    """
    It will return DSV with only recordings and ADSs with mozaik parameters 
    whose values match the parameter values combinations provided in `kwargs`. 
    
    To restrict mozaik parameters of the stimuli associated with the ADS or recordings 
    pre-pend 'st_' to the parameter name.
    
    For the recordings, parameter sheet refers to the sheet for which the recording was done. 
    
    
    Parameters
    ----------
    
    dsv : DataStoreView
        The input DSV.
    
    ads_unique : bool, optional
               If True the query will raise an exception if the query does not identify a unique ADS.

    rec_unique : bool, optional
               If True the query will raise an exception if the query does not identify a unique recording.
    
    \*\*kwargs : dict
               Remaining keyword arguments will be interepreted as the mozaik parameter names and their associated values that all ASDs
               or recordings have to match. The values of the parameters should be either directly the values to match or list of values in which
               case this list is interpreted as *one of* of the values that each returned recording or ASD has to match (thus effectively there
               is an *and* operation between the different parameters and *or* operation between the values specified for the given mozaik parameters). 
               
    Examples
    --------
    >>> datastore.param_filter_query(datastore,identifier=['PerNeuronValue','SingleValue'],sheet_name=sheet,value_name='orientation preference')
    
    This command should return DSV containing all recordings and ADSs whose identifier is *PerNeuronValue* or *SingleValue*, and are associated with sheet named *sheet_name* and as their value name have 'orientation preference'.
    Note that since recordings do not have these parameters, this query would return a DSV containing only ADSs.
    
    >>> datastore.param_filter_query(datastore,st_orientation=0.5)
    
    This command should return DSV containing all recordings and ADSs that are associated with stimuli whose mozaik parameter orientation has value 0.5.
    """
    
    new_dsv = dsv.fromDataStoreView()
    
    st_kwargs = dict([(k[3:],kwargs[k]) for k in kwargs.keys() if k[0:3] == 'st_'])
    kwargs = dict([(k,kwargs[k]) for k in kwargs.keys() if k[0:3] != 'st_'])
    
    seg_st = [MozaikParametrized.idd(seg.annotations['stimulus']) for seg in dsv.block.segments]
    ads_st = [MozaikParametrized.idd(ads.stimulus_id) for ads in dsv.analysis_results if ads.stimulus_id != None]
    if 'sheet_name' in set(kwargs):
       if len(kwargs) == 1:
           # This means that there is only one 'non-stimulus' parameter sheet, and thus we need
           # to filter out all recordings that are associated with that sheet (otherwsie we do not pass any recordings)
           kw = kwargs['sheet_name'] if isinstance(kwargs['sheet_name'],list) else [kwargs['sheet_name']]
           seg_filtered = set([s for s in dsv.block.segments if s.annotations['sheet_name'] in kw])
       else:
           seg_filtered = set([]) 
    else:
           seg_filtered = set(dsv.block.segments)
    
    ads_filtered= set(filter_query(dsv.analysis_results,**kwargs))
    
    if st_kwargs != {}:
       seg_filtered_st= set(filter_query(seg_st,extra_data_list=dsv.block.segments,**st_kwargs)[1]) 
       ads_filtered_st= set(filter_query(ads_st,extra_data_list=[ads for ads in dsv.analysis_results if ads.stimulus_id != None],**st_kwargs)[1])
    else:
       ads_filtered_st = set(dsv.analysis_results)
       seg_filtered_st = set(dsv.block.segments)
    
    
    seg = seg_filtered_st & seg_filtered
    ads = ads_filtered_st & ads_filtered
    
    new_dsv.sensory_stimulus = dsv.sensory_stimulus_copy()
    new_dsv.block.segments = list(seg)
    new_dsv.analysis_results = list(ads)
    
    if ads_unique and len(ads) != 1:
       raise ValueError("Result was expected to have only single ADS, it contains %d" % len(ads)) 
        
    if rec_unique and len(seg) != 1:
       raise ValueError("Result was expected to have only single Segment, it contains %d" % len(seg)) 
    
    return new_dsv
コード例 #54
0
import numpy
from mozaik.storage import queries
from mozaik.controller import Global
Global.root_directory = sys.argv[1]+'/'

setup_logging()

data_store = PickledDataStore(load=True,parameters=ParameterSet({'root_directory':sys.argv[1],'store_stimuli' : False}),replace=True)

NeuronAnnotationsToPerNeuronValues(data_store,ParameterSet({})).analyse()
analog_ids = queries.param_filter_query(data_store,sheet_name="V1_Exc_L4").get_segments()[0].get_stored_esyn_ids()


dsv = queries.param_filter_query(data_store,st_name='FlashedBar')
for ads in dsv.get_analysis_result():
    sid = MozaikParametrized.idd(ads.stimulus_id)
    sid.x=0
    ads.stimulus_id = str(sid)
for seg in dsv.get_segments():    
    sid = MozaikParametrized.idd(seg.annotations['stimulus'])
    sid.x=0
    seg.annotations['stimulus'] = str(sid)
for seg in dsv.get_segments(null=True):    
    sid = MozaikParametrized.idd(seg.annotations['stimulus'])
    sid.x=0
    seg.annotations['stimulus'] = str(sid)    


def save_data(dirname,dsv,name):

    try:
コード例 #55
0
ファイル: queries.py プロジェクト: h-mayorquin/mozaik
def equal_stimulus_type(dsv):
    """
    This functions returns True if DSV contains only recordings associated
    with the same stimulus type. Otherwise False.
    """
    return matching_parametrized_object_params([MozaikParametrized.idd(s) for s in dsv.get_stimuli()],params=['name'])
コード例 #56
0
ファイル: queries.py プロジェクト: h-mayorquin/mozaik
def equal_stimulus(dsv,except_params):
    """
    This functions returns True if DSV contains only recordings associated
    with stimuli of identical parameter values, with the exception of parameters in *except_params*
    """
    return matching_parametrized_object_params([MozaikParametrized.idd(s) for s in dsv.get_stimuli()],except_params=['name'])
コード例 #57
0
def perform_analysis_and_visualization(data_store):
    analog_ids = param_filter_query(data_store,sheet_name="V1_Exc_L4").get_segments()[0].get_stored_esyn_ids()
    analog_ids_inh = param_filter_query(data_store,sheet_name="V1_Inh_L4").get_segments()[0].get_stored_esyn_ids()
    spike_ids = param_filter_query(data_store,sheet_name="V1_Exc_L4").get_segments()[0].get_stored_spike_train_ids()
    
    number_of_cells = len(analog_ids)
    numberOfCells_str = str(number_of_cells)
    print 'NUMBER OF CELLS' + numberOfCells_str
    stimuli_list = list(('SparseNoise', 'DenseNoise'))
    #stimuli_list = ['SparseNoise']
    save_to = './Data/'
    
    print stimuli_list
    for stimuli_type in stimuli_list: 
        print 'Getting voltage and images for ' + stimuli_type
        
        # Saving parameters 
        format = '.pickle'
        quality = '_15000_21_' # This is the number of images followed by the interval that they take in ms 
        
        # Load the segments 
        dsv = queries.param_filter_query(data_store, sheet_name="V1_Exc_L4",st_name=stimuli_type)
        segments = dsv.get_segments()
        stimuli = [MozaikParametrized.idd(s) for s in dsv.get_stimuli()]
        # Take the seeds 
        seeds = [s.experiment_seed for s in stimuli]
        
        print seeds,segments,stimuli 
               
        # Sort them based on their seeds 
        seeds,segments,stimuli = zip(*sorted(zip(seeds,segments,stimuli))) 
        segment_length = segments[0].get_spiketrain(spike_ids[0]).t_stop     
        
        # Values to obtain 
        spikes = [[] for i in segments[0].get_spiketrain(spike_ids)]    
        images = []
        
        ## Extract images 
        print 'Extracting and processing images'
        for i, seg in enumerate(segments):
            """
            First we take out the stimuli and make them as small as we can First we take out the stimuli and make them 
            as small as we can than one pixel assigned to each value of luminance. In order to do so, we first call the class
            And re-adjust is parameter st.density = st.grid_size. After that we successively call the class to extract the images
            frames  
            """
                        
            # First we take the class 
            st = MozaikParametrized.idd_to_instance(stimuli[i])
            st.size_x = 1.0
            st.size_y = 1.0 
            st.density = st.grid_size
            
            fr = st.frames()          
            
            # First we call as many frames as many frames as we need (total time / time per image = total # of images) 
            ims = [fr.next()[0] for i in xrange(0,st.duration/st.frame_duration)]
            # Now, we take the images that repeat themselves 
            ims = [ims[i] for i in xrange(0,len(ims)) if ((i % (st.time_per_image/st.frame_duration)) == 0)] 
            images.append(ims)
        
        # Saving images 
        print 'Saving Images '
        
        # Concatenate and save 
        ims = numpy.concatenate(images,axis=0)  
        images_filename = save_to + 'images' + quality + stimuli_type + format
        f = open(images_filename,'wb')
        cPickle.dump(ims,f)
        
        ## Get the voltage for all the cells 
        for cell_number in range(number_of_cells):
            print 'Extracting Voltage for cell ', cell_number 
            
            vm = [] # Intialize voltage list 
            for i,seg in enumerate(segments):
                # get vm 
                v = seg.get_vm(analog_ids[cell_number])
                # Check that the voltage between segments match
                if vm != []:
                    assert vm[-1][-1] == v.magnitude[0]                 
                # Append 
                vm.append(v.magnitude)    
                
              
            # Concatenate the experiments
            print 'Concatenating Voltage'
            vm = [v[:-1] for v in vm] # Take the last element out  
            vm = numpy.concatenate(vm,axis=0)
            
            print 'voltage shape=', numpy.shape(vm)
            # Save the voltage
            print 'Saving Voltage for cell', cell_number 
            voltage_filename = save_to + 'vm' + '_cell_'+ str(cell_number) + quality + stimuli_type + '.pickle'
            f = open(voltage_filename,'wb')
            cPickle.dump(vm,f)