Exemple #1
0
      def perform_analysis(self):
            for sheet in self.datastore.sheets():
                # Load up spike trains for the right sheet and the corresponding stimuli, and
                # transform spike trains into psth
                dsv = select_result_sheet_query(self.datastore,sheet)
                assert equal_ads_except(dsv,['stimulus_id'])
                assert ads_with_equal_stimulus_type(dsv)
                assert equal_stimulus_type(dsv)

                psths = [psth(seg.spiketrains,self.parameters.bin_length) for seg in dsv.get_segments()]
                st = [StimulusID(s) for s in dsv.get_stimuli()]
                
                # average across trials
                psths,stids = colapse(psths,st,parameter_list=['trial'],func=neo_mean,allow_non_identical_stimuli=True)
                
                # retrieve the computed orientation preferences 
                pnvs = self.datastore.get_analysis_result(identifier='PerNeuronValue',sheet_name=sheet,value_name='orientation preference')
                if len(pnvs) != 1:
                   logger.error('ERROR: Expected only one PerNeuronValue per sheet with value_name \'orientation preference\' in datastore, got: ' + str(len(pnvs)))
                   return None
                else:
                   or_pref = pnvs[0]
                
                # find closest orientation of grating to a given orientation preference of a neuron
                # first find all the different presented stimuli:
                ps = {}
                for s in st:
                    ps[StimulusID(s).params['orientation']] = True
                ps = ps.keys()
                
                # now find the closest presented orientations
                closest_presented_orientation = []
                for i in xrange(0,len(or_pref.values)):
                    circ_d = 100000
                    idx = 0
                    for j in xrange(0,len(ps)):
                        if circ_d > circular_dist(or_pref.values[i],ps[j],numpy.pi):
                           circ_d = circular_dist(or_pref.values[i],ps[j],numpy.pi)
                           idx = j
                    closest_presented_orientation.append(ps[idx])    
                
                closest_presented_orientation = numpy.array(closest_presented_orientation)
                
                # colapse along orientation - we will calculate MR for each parameter combination other than orientation
                d = colapse_to_dictionary(psths,stids,"orientation")
                for (st,vl) in d.items():
                    # here we will store the modulation ratios, one per each neuron
                    modulation_ratio = numpy.zeros((numpy.shape(psths[0])[1],))
                    frequency = StimulusID(st).params['temporal_frequency'] * StimulusID(st).units['temporal_frequency']
                    for (orr,ppsth) in zip(vl[0],vl[1]):
                        for j in numpy.nonzero(orr == closest_presented_orientation)[0]:
                            modulation_ratio[j] = self.calculate_MR(ppsth[:,j],frequency)
                    
                    self.datastore.full_datastore.add_analysis_result(PerNeuronValue(modulation_ratio,qt.dimensionless,value_name = 'Modulation ratio',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))
                   
                import pylab
                pylab.figure()
                pylab.hist(modulation_ratio)
Exemple #2
0
   def perform_analysis(self):
     for sheet in self.datastore.sheets():
         # Load up spike trains for the right sheet and the corresponding stimuli, and
         # transform spike trains into psth
         dsv = select_result_sheet_query(self.datastore,sheet)
         psths = [psth(seg.spiketrains,self.parameters.bin_length) for seg in dsv.get_segments()]
         st = [StimulusID(s) for s in dsv.get_stimuli()]
 
         # average across trials
         psths,stids = colapse(psths,st,parameter_list=['trial'],func=neo_mean,allow_non_identical_stimuli=True)
         
         for ppsth,stid in zip(psths,stids):
             t_start = ppsth[0].t_start
             duration = ppsth[0].t_stop-ppsth[0].t_start
             al = []    
             for n in self.parameters.neurons:
                 ac = numpy.correlate(numpy.array(ppsth[:,n]), numpy.array(ppsth[:,n]), mode='full')
                 div = numpy.sum(numpy.power(numpy.array(ppsth[:,n]),2))
                 if div != 0:
                    ac = ac / div
                 al.append(AnalogSignal(ac, t_start=-duration,t_stop=duration-self.parameters.bin_length*t_start.units,sampling_period=self.parameters.bin_length*qt.ms,units=qt.dimensionless))
                
             logger.debug('Adding AnalogSignalList:' + str(sheet))
             self.datastore.full_datastore.add_analysis_result(AnalogSignalList(al,self.parameters.neurons,qt.ms,qt.dimensionless,x_axis_name='time',y_axis_name='autocorrelation',sheet_name=sheet,tags=self.tags,analysis_algorithm=self.__class__.__name__,stimulus_id=str(stid)))