Exemple #1
0
 def perform_analysis(self):
       dsv1 = queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating')
       for sheet in dsv1.sheets():
           dsv = queries.param_filter_query(dsv1, sheet_name=sheet)
           segs1, stids = colapse(dsv.get_segments(),dsv.get_stimuli(),parameter_list=['trial'],allow_non_identical_objects=True)
           for segs,st in zip(segs1, stids):
               first_analog_signal = segs[0].get_esyn(segs[0].get_stored_esyn_ids()[0])
               duration = first_analog_signal.t_stop - first_analog_signal.t_start
               frequency = MozaikParametrized.idd(st).temporal_frequency * MozaikParametrized.idd(st).params()['temporal_frequency'].units
               period = 1/frequency
               period = period.rescale(first_analog_signal.t_start.units)
               cycles = duration / period
               first_har = round(cycles)
               e_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_esyn(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_esyn(idd))) for idd in segs[0].get_stored_esyn_ids()]
               i_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_isyn(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_esyn(idd))) for idd in segs[0].get_stored_isyn_ids()]
               v_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_vm(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_esyn(idd))) for idd in segs[0].get_stored_vm_ids()]
               e_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_esyn(idd) for seg in segs],axis=0).flatten()/len(segs[0].get_esyn(idd)))[first_har]) for idd in segs[0].get_stored_esyn_ids()]
               i_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_isyn(idd) for seg in segs],axis=0).flatten()/len(segs[0].get_esyn(idd)))[first_har]) for idd in segs[0].get_stored_isyn_ids()]
               v_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_vm(idd) for seg in segs],axis=0).flatten()/len(segs[0].get_esyn(idd)))[first_har]) for idd in segs[0].get_stored_vm_ids()]
               
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(e_f0,segs[0].get_stored_esyn_ids(),first_analog_signal.units,value_name = 'F0_Exc_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(i_f0,segs[0].get_stored_isyn_ids(),first_analog_signal.units,value_name = 'F0_Inh_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(v_f0,segs[0].get_stored_vm_ids(),first_analog_signal.units,value_name = 'F0_Vm',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(e_f1,segs[0].get_stored_esyn_ids(),first_analog_signal.units,value_name = 'F1_Exc_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(i_f1,segs[0].get_stored_isyn_ids(),first_analog_signal.units,value_name = 'F1_Inh_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(v_f1,segs[0].get_stored_vm_ids(),first_analog_signal.units,value_name = 'F1_Vm',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
Exemple #2
0
 def perform_analysis(self):
       dsv1 = queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating')
       for sheet in dsv1.sheets():
           dsv = queries.param_filter_query(dsv1, sheet_name=sheet)
           segs1, stids = colapse(dsv.get_segments(),dsv.get_stimuli(),parameter_list=['trial'],allow_non_identical_objects=True)
           for segs,st in zip(segs1, stids):
               first_analog_signal = segs[0].get_esyn(segs[0].get_stored_esyn_ids()[0])
               duration = first_analog_signal.t_stop - first_analog_signal.t_start
               frequency = MozaikParametrized.idd(st).temporal_frequency * MozaikParametrized.idd(st).params()['temporal_frequency'].units
               period = 1/frequency
               period = period.rescale(first_analog_signal.t_start.units)
               cycles = duration / period
               first_har = round(cycles)
               e_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_esyn(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_esyn(idd))) for idd in segs[0].get_stored_esyn_ids()]
               i_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_isyn(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_esyn(idd))) for idd in segs[0].get_stored_isyn_ids()]
               v_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_vm(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_esyn(idd))) for idd in segs[0].get_stored_vm_ids()]
               e_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_esyn(idd) for seg in segs],axis=0).flatten()/len(segs[0].get_esyn(idd)))[first_har]) for idd in segs[0].get_stored_esyn_ids()]
               i_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_isyn(idd) for seg in segs],axis=0).flatten()/len(segs[0].get_esyn(idd)))[first_har]) for idd in segs[0].get_stored_isyn_ids()]
               v_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_vm(idd) for seg in segs],axis=0).flatten()/len(segs[0].get_esyn(idd)))[first_har]) for idd in segs[0].get_stored_vm_ids()]
               
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(e_f0,segs[0].get_stored_esyn_ids(),first_analog_signal.units,value_name = 'F0_Exc_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(i_f0,segs[0].get_stored_isyn_ids(),first_analog_signal.units,value_name = 'F0_Inh_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(v_f0,segs[0].get_stored_vm_ids(),first_analog_signal.units,value_name = 'F0_Vm',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(e_f1,segs[0].get_stored_esyn_ids(),first_analog_signal.units,value_name = 'F1_Exc_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(i_f1,segs[0].get_stored_isyn_ids(),first_analog_signal.units,value_name = 'F1_Inh_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(v_f1,segs[0].get_stored_vm_ids(),first_analog_signal.units,value_name = 'F1_Vm',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
Exemple #3
0
def partition_by_stimulus_paramter_query(dsv, parameter_list):
    """
    This query will take all recordings and return list of DataStoreViews
    each holding recordings measured to the same stimulus with exception of
    the parameters reference by parameter_list.

    Note that in most cases one wants to do this only against datastore holding
    only single Stimulus type! In that case the datastore is partitioned into
    subsets each holding recordings to the same stimulus with the same paramter
    values, with the exception to the parameters in parameter_list.
    
    Parameters
    ----------
    
    dsv : DataStoreView
        The input DSV.
    
    parameter_list : list(string)
               The list of parameters of the associated stimuli that will vary in the returned DSVs, all other stimulus parameters will have the same value within each of the 
               returned DSVs.

    """
    assert 'name' not in parameter_list, "One cannot partition against <name> parameter"
    st = dsv.get_stimuli()
    values, st = colapse(dsv.block.segments,
                         st,
                         parameter_list=parameter_list,
                         allow_non_identical_objects=True)
    dsvs = []
    for vals in values:
        new_dsv = dsv.fromDataStoreView()
        new_dsv.analysis_results = dsv.analysis_result_copy()
        new_dsv.block.segments.extend(vals)
        dsvs.append(new_dsv)
    return dsvs
Exemple #4
0
 def perform_analysis(self):
       assert queries.equal_stimulus_type(self.datastore) , "Data store has to contain only recordings to the same stimulus type"
       st = self.datastore.get_stimuli()[0]
       assert MozaikParametrized.idd(st).params().has_key('temporal_frequency'), "The stimulus has to have parameter temporal_frequency which is used as first harmonic"
       
       for sheet in self.datastore.sheets():
           dsv = queries.param_filter_query(self.datastore, sheet_name=sheet)
           segs1, stids = colapse(dsv.get_segments(),dsv.get_stimuli(),parameter_list=['trial'],allow_non_identical_objects=True)
           for segs,st in zip(segs1, stids):
               first_analog_signal = segs[0].get_esyn(segs[0].get_stored_esyn_ids()[0])
               duration = first_analog_signal.t_stop - first_analog_signal.t_start
               frequency = MozaikParametrized.idd(st).temporal_frequency * MozaikParametrized.idd(st).params()['temporal_frequency'].units
               period = 1/frequency
               period = period.rescale(first_analog_signal.t_start.units)
               cycles = duration / period
               first_har = round(cycles)
               
               e_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_esyn(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_esyn(idd))) for idd in segs[0].get_stored_esyn_ids()]
               i_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_isyn(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_isyn(idd))) for idd in segs[0].get_stored_isyn_ids()]
               v_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_vm(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_vm(idd))) for idd in segs[0].get_stored_vm_ids()]
               e_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_esyn(idd) for seg in segs],axis=0).flatten())[first_har]/len(segs[0].get_esyn(idd))) for idd in segs[0].get_stored_esyn_ids()]
               i_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_isyn(idd) for seg in segs],axis=0).flatten())[first_har]/len(segs[0].get_isyn(idd))) for idd in segs[0].get_stored_isyn_ids()]
               v_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_vm(idd) for seg in segs],axis=0).flatten())[first_har]/len(segs[0].get_vm(idd))) for idd in segs[0].get_stored_vm_ids()]
               
               cond_units = segs[0].get_esyn(segs[0].get_stored_esyn_ids()[0]).units
               vm_units = segs[0].get_vm(segs[0].get_stored_esyn_ids()[0]).units
               
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(e_f0,segs[0].get_stored_esyn_ids(),cond_units,value_name = 'F0_Exc_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(i_f0,segs[0].get_stored_isyn_ids(),cond_units,value_name = 'F0_Inh_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(v_f0,segs[0].get_stored_vm_ids(),vm_units,value_name = 'F0_Vm',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(e_f1,segs[0].get_stored_esyn_ids(),cond_units,value_name = 'F1_Exc_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(i_f1,segs[0].get_stored_isyn_ids(),cond_units,value_name = 'F1_Inh_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
               self.datastore.full_datastore.add_analysis_result(PerNeuronValue(v_f1,segs[0].get_stored_vm_ids(),vm_units,value_name = 'F1_Vm',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
Exemple #5
0
def partition_by_stimulus_paramter_query(dsv, parameter_list):
    """
    This query will take all recordings and return list of DataStoreViews
    each holding recordings measured to the same stimulus with exception of
    the parameters reference by parameter_list.

    Note that in most cases one wants to do this only against datastore holding
    only single Stimulus type! In that case the datastore is partitioned into
    subsets each holding recordings to the same stimulus with the same paramter
    values, with the exception to the parameters in parameter_list.
    
    Parameters
    ----------
    
    dsv : DataStoreView
        The input DSV.
    
    parameter_list : list(string)
               The list of parameters of the associated stimuli that will vary in the returned DSVs, all other stimulus parameters will have the same value within each of the 
               returned DSVs.

    """
    assert 'name' not in parameter_list, "One cannot partition against <name> parameter"
    st = dsv.get_stimuli()
    values, st = colapse(dsv.block.segments,st,parameter_list=parameter_list,allow_non_identical_objects=True)
    dsvs = []
    for vals in values:
        new_dsv = dsv.fromDataStoreView()
        new_dsv.analysis_results = dsv.analysis_result_copy()
        new_dsv.block.segments.extend(vals)
        dsvs.append(new_dsv)
    return dsvs
Exemple #6
0
def partition_analysis_results_by_stimulus_parameters_query(
        dsv, parameter_list=None, excpt=False):
    """
        This query will take all analysis results and return list of DataStoreViews
        each holding analysis results that have the same values of
        of stimulus parameters in parameter_list.

        Note that in most cases one wants to do this only against datastore holding
        only analysis results  measured to the same stimulus type! In that case the datastore is partitioned into
        subsets each holding recordings to the same stimulus with the same paramter
        values, with the exception to the parameters in parameter_list.
    
        Parameters
        ----------
        
        dsv : DataStoreView
            The input DSV.
        
        parameter_list : list(string)
               The list of stimulus parameters that will vary between the ASDs in the returned DSVs, all other parameters will have the same value within each of the 
               returned DSVs.

        except : bool
               If excpt is True the query is allowed only on DSVs holding the same AnalysisDataStructures type.
        """
    if dsv.analysis_results == []: return []

    for ads in dsv.analysis_results:
        assert ads.stimulus_id != None, "partition_analysis_results_by_stimulus_parameters_query accepts only DSV with ADS that all have defined stimulus id"

    st = [
        MozaikParametrized.idd(ads.stimulus_id) for ads in dsv.analysis_results
    ]
    assert parameter_list != None, "parameter_list has to be given"
    assert type(parameter_list) == list, "parameter_list has to be list"

    if excpt:
        assert matching_parametrized_object_params(
            st, params=['name']
        ), "If excpt==True you have to provide a dsv containing the same ADS type"
        parameter_list = set(
            st[0].getParams().keys()) - (set(parameter_list) | set(['name']))

    values, st = colapse(dsv.analysis_results,
                         st,
                         parameter_list=parameter_list,
                         allow_non_identical_objects=True)
    dsvs = []

    for vals in values:
        new_dsv = dsv.fromDataStoreView()
        new_dsv.block.segments = dsv.recordings_copy()
        new_dsv.sensory_stimulus = dsv.sensory_stimulus_copy()
        new_dsv.analysis_results.extend(vals)
        dsvs.append(new_dsv)
    return dsvs
Exemple #7
0
      def perform_analysis(self):
            
            for sheet in self.datastore.sheets():
                dsv = queries.param_filter_query(self.datastore, sheet_name=sheet)
                if len(dsv.get_segments()) != 0:
                  assert queries.equal_stimulus_type(self.datastore) , "Data store has to contain only recordings to the same stimulus type"
                  st = self.datastore.get_stimuli()[0]
                  assert MozaikParametrized.idd(st).getParams().has_key('temporal_frequency'), "The stimulus has to have parameter temporal_frequency which is used as first harmonic"

                  segs1, stids = colapse(dsv.get_segments(),dsv.get_stimuli(),parameter_list=['trial'],allow_non_identical_objects=True)
                  for segs,st in zip(segs1, stids):
                      first_analog_signal = segs[0].get_esyn(segs[0].get_stored_esyn_ids()[0])
                      duration = first_analog_signal.t_stop - first_analog_signal.t_start
                      frequency = MozaikParametrized.idd(st).temporal_frequency * MozaikParametrized.idd(st).getParams()['temporal_frequency'].units
                      period = 1/frequency
                      period = period.rescale(first_analog_signal.t_start.units)
                      cycles = duration / period
                      first_har = int(round(cycles))
                      
                      e_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_esyn(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_esyn(idd))) for idd in segs[0].get_stored_esyn_ids()]
                      i_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_isyn(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_isyn(idd))) for idd in segs[0].get_stored_isyn_ids()]
                      v_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_vm(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_vm(idd))) for idd in segs[0].get_stored_vm_ids()]
                      e_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_esyn(idd) for seg in segs],axis=0).flatten())[first_har]/len(segs[0].get_esyn(idd))) for idd in segs[0].get_stored_esyn_ids()]
                      i_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_isyn(idd) for seg in segs],axis=0).flatten())[first_har]/len(segs[0].get_isyn(idd))) for idd in segs[0].get_stored_isyn_ids()]
                      v_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_vm(idd) for seg in segs],axis=0).flatten())[first_har]/len(segs[0].get_vm(idd))) for idd in segs[0].get_stored_vm_ids()]
                      
                      cond_units = segs[0].get_esyn(segs[0].get_stored_esyn_ids()[0]).units
                      vm_units = segs[0].get_vm(segs[0].get_stored_esyn_ids()[0]).units
                      
                      self.datastore.full_datastore.add_analysis_result(PerNeuronValue(e_f0,segs[0].get_stored_esyn_ids(),cond_units,value_name = 'F0_Exc_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
                      self.datastore.full_datastore.add_analysis_result(PerNeuronValue(i_f0,segs[0].get_stored_isyn_ids(),cond_units,value_name = 'F0_Inh_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
                      self.datastore.full_datastore.add_analysis_result(PerNeuronValue(v_f0,segs[0].get_stored_vm_ids(),vm_units,value_name = 'F0_Vm',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
                      self.datastore.full_datastore.add_analysis_result(PerNeuronValue(e_f1,segs[0].get_stored_esyn_ids(),cond_units,value_name = 'F1_Exc_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
                      self.datastore.full_datastore.add_analysis_result(PerNeuronValue(i_f1,segs[0].get_stored_isyn_ids(),cond_units,value_name = 'F1_Inh_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        
                      self.datastore.full_datastore.add_analysis_result(PerNeuronValue(v_f1,segs[0].get_stored_vm_ids(),vm_units,value_name = 'F1_Vm',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))        

                # AnalogSignalList part 
                dsv = queries.param_filter_query(dsv, sheet_name=sheet,name='AnalogSignalList')
                for asl in dsv.get_analysis_result():
                    assert MozaikParametrized.idd(asl.stimulus_id).getParams().has_key('temporal_frequency'), "The stimulus has to have parameter temporal_frequency which is used as first harmonic"

                    signals = asl.asl
                    first_analog_signal = signals[0]
                    duration = first_analog_signal.t_stop - first_analog_signal.t_start
                    frequency = MozaikParametrized.idd(asl.stimulus_id).temporal_frequency * MozaikParametrized.idd(asl.stimulus_id).getParams()['temporal_frequency'].units
                    period = 1/frequency
                    period = period.rescale(first_analog_signal.t_start.units)
                    cycles = duration / period
                    first_har = int(round(cycles))

                    f0 = [abs(numpy.fft.fft(signal)[0])/len(signal) for signal in signals]
                    f1 = [2*abs(numpy.fft.fft(signal)[first_har])/len(signal) for signal in signals]
                    
                    self.datastore.full_datastore.add_analysis_result(PerNeuronValue(f0,asl.ids,asl.y_axis_units,value_name = 'F0('+ asl.y_axis_name + ')',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=asl.stimulus_id))                            
                    self.datastore.full_datastore.add_analysis_result(PerNeuronValue(f1,asl.ids,asl.y_axis_units,value_name = 'F1('+ asl.y_axis_name + ')',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=asl.stimulus_id))                                                
Exemple #8
0
def partition_analysis_results_by_stimulus_parameters_query(dsv,parameter_list=None,excpt=False):
        """
        This query will take all analysis results and return list of DataStoreViews
        each holding analysis results that have the same values of
        of stimulus parameters in parameter_list.

        Note that in most cases one wants to do this only against datastore holding
        only analysis results  measured to the same stimulus type! In that case the datastore is partitioned into
        subsets each holding recordings to the same stimulus with the same paramter
        values, with the exception to the parameters in parameter_list.
    
        Parameters
        ----------
        
        dsv : DataStoreView
            The input DSV.
        
        parameter_list : list(string)
               The list of stimulus parameters that will vary between the ASDs in the returned DSVs, all other parameters will have the same value within each of the 
               returned DSVs.

        except : bool
               If excpt is True the query is allowed only on DSVs holding the same AnalysisDataStructures type.
        """
        if dsv.analysis_results == []: return []
            
        for ads in dsv.analysis_results:
            assert ads.stimulus_id != None , "partition_analysis_results_by_stimulus_parameters_query accepts only DSV with ADS that all have defined stimulus id"
            
        st = [MozaikParametrized.idd(ads.stimulus_id) for ads in dsv.analysis_results]
        assert parameter_list != None , "parameter_list has to be given"
        assert type(parameter_list) == list , "parameter_list has to be list"
        
        if excpt:
            assert matching_parametrized_object_params(st,params=['name']), "If excpt==True you have to provide a dsv containing the same ADS type"
            parameter_list = set(st[0].params().keys()) - (set(parameter_list) | set(['name']))
        
        
        
        values, st = colapse(dsv.analysis_results,st,parameter_list=parameter_list,allow_non_identical_objects=True)
        dsvs = []

        for vals in values:
            new_dsv = dsv.fromDataStoreView()
            new_dsv.block.segments = dsv.recordings_copy()
            new_dsv.sensory_stimulus = dsv.sensory_stimulus_copy()
            new_dsv.analysis_results.extend(vals)
            dsvs.append(new_dsv)
        return dsvs
Exemple #9
0
def partition_analysis_results_by_parameters_query(dsv,
                                                   parameter_list=None,
                                                   excpt=False):
    """
        This query will take all analysis results and return list of DataStoreViews
        each holding analysis results that have the same values of the parameters 
        except those in in parameter_list.

        Note that in most cases one wants to do this only against datastore holding
        only single analysis results type! In that case the datastore is partitioned into
        subsets each holding recordings to the same stimulus with the same paramters
        values, with the exception to the parameters in parameter_list.
        
        Parameters
        ----------
        
        dsv : DataStoreView
            The input DSV.
        
        parameter_list : list(string)
               The list of parameters that will vary in the returned DSVs, all other parameters will have the same value within each of the 
               returned DSVs.

        except : bool
               If excpt is True the query is allowed only on DSVs holding the same AnalysisDataStructures.
        """
    if dsv.analysis_results == []: return []
    assert parameter_list != None, "parameter_list has to be given"
    if excpt:
        assert equal_ads_type(
            dsv
        ), "If excpt==True you have to provide a dsv containing the same ADS type"
        parameter_list = set(dsv.analysis_results[0].getParams().keys()) - (
            set(parameter_list) | set(['name']))

    values, st = colapse(dsv.analysis_results,
                         dsv.analysis_results,
                         parameter_list=parameter_list,
                         allow_non_identical_objects=True)
    dsvs = []

    for vals in values:
        new_dsv = dsv.fromDataStoreView()
        new_dsv.block.segments = dsv.recordings_copy()
        new_dsv.sensory_stimulus = dsv.sensory_stimulus_copy()
        new_dsv.analysis_results.extend(vals)
        dsvs.append(new_dsv)
    return dsvs
def exportToElphy(data_store_location,elphy_export_location,sheets=None,threshold=None):
    import os.path
    if not os.path.isdir(elphy_export_location):
       if os.path.exists(elphy_export_location):
          raise ValueError("The elphy export path is not a directory")
       else:
          os.makedirs(elphy_export_location)
              
    setup_logging()
    data_store = PickledDataStore(load=True,parameters=ParameterSet({'root_directory':data_store_location, 'store_stimuli' : False}))
    ps = MP.parameter_value_list([MP.MozaikParametrized.idd(s) for s in data_store.get_stimuli()],'name')
    for i,sn in enumerate(ps):
        if sheets == None: sheets = data_store.sheets() 
        
        for shn in sheets:
            dsv = param_filter_query(data_store,st_name = sn,sheet_name = shn)
            if dsv.get_stimuli() == []: continue
            varying_parameters = MP.varying_parameters([MP.MozaikParametrized.idd(s) for s in dsv.get_stimuli()])
            
            segments,stimuli = MP.colapse(dsv.get_segments(),[MP.MozaikParametrized.idd(s) for s in dsv.get_stimuli()],parameter_list=['trial'],allow_non_identical_objects=True)
            j = 0 
            for segs,st in zip(segments,stimuli):
                # just make sure all segments are fully loaded, in future this should probably soreted out such that this line can be deleted
                for s in segs: s.load_full()
                
                # create file name:
                filename = "name=" + sn + "#" + "sheet_name=" + shn 
                for pn in varying_parameters:
                    if pn != "trial":
                        filename += "#" + str(pn) + "=" + str(getattr(MP.MozaikParametrized.idd(st),pn)) 
                path = os.path.join(elphy_export_location,filename+".dat")
                
                # if the threshold is defined add spikes into Vms
                if threshold != None:
                    for seg in segs : addSpikes(seg,threshold)
                    
                createFileFromSegmentList( segs, path)
                print "Finished saving file %d/%d for sheet %s and %d-th stimulus" % (j+1,len(segments),shn,i)
                # release segments from memory
                for s in segs: s.release()
                j = j + 1
        print "Finished saving %d/%d stimulus" % (i+1,len(ps))
Exemple #11
0
    def perform_analysis(self):
        for sheet in self.datastore.sheets():
            # Load up spike trains for the right sheet and the corresponding
            # stimuli, and transform spike trains into psth
            dsv = queries.param_filter_query(self.datastore,identifier='AnalogSignalList',sheet_name=sheet,analysis_algorithm='PSTH',st_name='FullfieldDriftingSinusoidalGrating')
            assert queries.equal_ads(dsv,except_params=['stimulus_id']) , "It seems PSTH computed in different ways are present in datastore, ModulationRatio can accept only one"
            psths = dsv.get_analysis_result()
            st = [MozaikParametrized.idd(p.stimulus_id) for p in psths]
            # average across trials
            psths, stids = colapse(psths,st,parameter_list=['trial'],func=neo_sum,allow_non_identical_objects=True)

            # retrieve the computed orientation preferences
            pnvs = self.datastore.get_analysis_result(identifier='PerNeuronValue',
                                                      sheet_name=sheet,
                                                      value_name='orientation preference')
            if len(pnvs) != 1:
                logger.error("ERROR: Expected only one PerNeuronValue per sheet "
                             "with value_name 'orientation preference' in datastore, got: "
                             + str(len(pnvs)))
                return None
        
            or_pref = pnvs[0]
            # find closest orientation of grating to a given orientation preference of a neuron
            # first find all the different presented stimuli:
            ps = OrderedDict()
            for s in st:
                ps[MozaikParametrized.idd(s).orientation] = True
            ps = ps.keys()
            # now find the closest presented orientations
            closest_presented_orientation = []
            for i in xrange(0, len(or_pref.values)):
                circ_d = 100000
                idx = 0
                for j in xrange(0, len(ps)):
                    if circ_d > circular_dist(or_pref.values[i], ps[j], numpy.pi):
                        circ_d = circular_dist(or_pref.values[i], ps[j], numpy.pi)
                        idx = j
                closest_presented_orientation.append(ps[idx])

            closest_presented_orientation = numpy.array(closest_presented_orientation)

            # collapse along orientation - we will calculate MR for each
            # parameter combination other than orientation
            d = colapse_to_dictionary(psths, stids, "orientation")
            for (st, vl) in d.items():
                # here we will store the modulation ratios, one per each neuron
                modulation_ratio = []
                f0 = []
                f1 = []
                ids = []
                frequency = MozaikParametrized.idd(st).temporal_frequency * MozaikParametrized.idd(st).getParams()['temporal_frequency'].units
                for (orr, ppsth) in zip(vl[0], vl[1]):
                    for j in numpy.nonzero(orr == closest_presented_orientation)[0]:
                        if or_pref.ids[j] in ppsth.ids:
                            a = or_pref.ids[j]
                            mr,F0,F1 = self._calculate_MR(ppsth.get_asl_by_id(or_pref.ids[j]).flatten(),frequency)
                            modulation_ratio.append(mr)
                            f0.append(F0)
                            f1.append(F1)
                            ids.append(or_pref.ids[j])
                            
                logger.debug('Adding PerNeuronValue:' + str(sheet))
                self.datastore.full_datastore.add_analysis_result(
                    PerNeuronValue(modulation_ratio,
                                   ids,
                                   qt.dimensionless,
                                   value_name='Modulation ratio' + '(' + psths[0].x_axis_name + ')',
                                   sheet_name=sheet,
                                   tags=self.tags,
                                   period=None,
                                   analysis_algorithm=self.__class__.__name__,
                                   stimulus_id=str(st)))

                self.datastore.full_datastore.add_analysis_result(
                    PerNeuronValue(f0,
                                   ids,
                                   qt.dimensionless,
                                   value_name='F0' + '(' + psths[0].x_axis_name + ')',
                                   sheet_name=sheet,
                                   tags=self.tags,
                                   period=None,
                                   analysis_algorithm=self.__class__.__name__,
                                   stimulus_id=str(st)))
                
                self.datastore.full_datastore.add_analysis_result(
                    PerNeuronValue(f1,
                                   ids,
                                   qt.dimensionless,
                                   value_name='F1' + '(' + psths[0].x_axis_name + ')',
                                   sheet_name=sheet,
                                   tags=self.tags,
                                   period=None,
                                   analysis_algorithm=self.__class__.__name__,
                                   stimulus_id=str(st)))


                import pylab
                pylab.figure()
                pylab.hist(modulation_ratio)
Exemple #12
0
    def perform_analysis(self):
        for sheet in self.datastore.sheets():
            # Load up spike trains for the right sheet and the corresponding
            # stimuli, and transform spike trains into psth
            print sheet
            self.datastore.print_content()
            dsv = queries.param_filter_query(self.datastore,identifier='AnalogSignalList',sheet_name=sheet,analysis_algorithm='PSTH',st_name='FullfieldDriftingSinusoidalGrating')
            dsv.print_content()
            assert queries.equal_ads(dsv,except_params=['stimulus_id']) , "It seems PSTH computed in different ways are present in datastore, ModulationRatio can accept only one"
            psths = dsv.get_analysis_result()
            st = [MozaikParametrized.idd(p.stimulus_id) for p in psths]
            # average across trials
            psths, stids = colapse(psths,st,parameter_list=['trial'],func=neo_sum,allow_non_identical_objects=True)

            # retrieve the computed orientation preferences
            pnvs = self.datastore.get_analysis_result(identifier='PerNeuronValue',
                                                      sheet_name=sheet,
                                                      value_name='orientation preference')
            
            if len(pnvs) != 1:
                logger.error("ERROR: Expected only one PerNeuronValue per sheet "
                             "with value_name 'orientation preference' in datastore, got: "
                             + str(len(pnvs)))
                return None
        
            or_pref = pnvs[0]
            # find closest orientation of grating to a given orientation preference of a neuron
            # first find all the different presented stimuli:
            ps = {}
            for s in st:
                ps[MozaikParametrized.idd(s).orientation] = True
            ps = ps.keys()
            print ps
            # now find the closest presented orientations
            closest_presented_orientation = []
            for i in xrange(0, len(or_pref.values)):
                circ_d = 100000
                idx = 0
                for j in xrange(0, len(ps)):
                    if circ_d > circular_dist(or_pref.values[i], ps[j], numpy.pi):
                        circ_d = circular_dist(or_pref.values[i], ps[j], numpy.pi)
                        idx = j
                closest_presented_orientation.append(ps[idx])

            closest_presented_orientation = numpy.array(closest_presented_orientation)

            # collapse along orientation - we will calculate MR for each
            # parameter combination other than orientation
            d = colapse_to_dictionary(psths, stids, "orientation")
            for (st, vl) in d.items():
                # here we will store the modulation ratios, one per each neuron
                modulation_ratio = []
                ids = []
                frequency = MozaikParametrized.idd(st).temporal_frequency * MozaikParametrized.idd(st).params()['temporal_frequency'].units
                for (orr, ppsth) in zip(vl[0], vl[1]):
                    for j in numpy.nonzero(orr == closest_presented_orientation)[0]:
                        if or_pref.ids[j] in ppsth.ids:
                            modulation_ratio.append(self._calculate_MR(ppsth.get_asl_by_id(or_pref.ids[j]),frequency))
                            ids.append(or_pref.ids[j])
                            
                logger.debug('Adding PerNeuronValue:' + str(sheet))
                self.datastore.full_datastore.add_analysis_result(
                    PerNeuronValue(modulation_ratio,
                                   ids,
                                   qt.dimensionless,
                                   value_name='Modulation ratio' + '(' + psths[0].x_axis_name + ')',
                                   sheet_name=sheet,
                                   tags=self.tags,
                                   period=None,
                                   analysis_algorithm=self.__class__.__name__,
                                   stimulus_id=str(st)))

                import pylab
                pylab.figure()
                pylab.hist(modulation_ratio)
Exemple #13
0
def exportToElphy(data_store_location,
                  elphy_export_location,
                  sheets=None,
                  threshold=None):
    import os.path
    if not os.path.isdir(elphy_export_location):
        if os.path.exists(elphy_export_location):
            raise ValueError("The elphy export path is not a directory")
        else:
            os.makedirs(elphy_export_location)

    setup_logging()
    data_store = PickledDataStore(load=True,
                                  parameters=ParameterSet({
                                      'root_directory':
                                      data_store_location,
                                      'store_stimuli':
                                      False
                                  }))
    ps = MP.parameter_value_list(
        [MP.MozaikParametrized.idd(s) for s in data_store.get_stimuli()],
        'name')
    for i, sn in enumerate(ps):
        if sheets == None: sheets = data_store.sheets()

        for shn in sheets:
            dsv = param_filter_query(data_store, st_name=sn, sheet_name=shn)
            if dsv.get_stimuli() == []: continue
            varying_parameters = MP.varying_parameters(
                [MP.MozaikParametrized.idd(s) for s in dsv.get_stimuli()])

            segments, stimuli = MP.colapse(
                dsv.get_segments(),
                [MP.MozaikParametrized.idd(s) for s in dsv.get_stimuli()],
                parameter_list=['trial'],
                allow_non_identical_objects=True)
            j = 0
            for segs, st in zip(segments, stimuli):
                # just make sure all segments are fully loaded, in future this should probably soreted out such that this line can be deleted
                for s in segs:
                    s.load_full()

                # create file name:
                filename = "name=" + sn + "#" + "sheet_name=" + shn
                for pn in varying_parameters:
                    if pn != "trial":
                        filename += "#" + str(pn) + "=" + str(
                            getattr(MP.MozaikParametrized.idd(st), pn))
                path = os.path.join(elphy_export_location, filename + ".dat")

                # if the threshold is defined add spikes into Vms
                if threshold != None:
                    for seg in segs:
                        addSpikes(seg, threshold)

                createFileFromSegmentList(segs, path)
                print "Finished saving file %d/%d for sheet %s and %d-th stimulus" % (
                    j + 1, len(segments), shn, i)
                # release segments from memory
                for s in segs:
                    s.release()
                j = j + 1
        print "Finished saving %d/%d stimulus" % (i + 1, len(ps))