def perform_analysis(self): dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',sheet_name=self.parameters.sheet_name,st_name='DriftingSinusoidalGratingCenterSurroundStimulus') if len(dsv.get_analysis_result()) == 0: return assert queries.ads_with_equal_stimulus_type(dsv) assert queries.equal_ads(dsv,except_params=['stimulus_id']) self.pnvs = dsv.get_analysis_result() # get stimuli self.st = [MozaikParametrized.idd(s.stimulus_id) for s in self.pnvs] # transform the pnvs into a dictionary of tuning curves according along the 'surround_orientation' parameter # also make sure they are ordered according to the first pnv's idds self.tc_dict = colapse_to_dictionary([z.get_value_by_id(self.parameters.neurons) for z in self.pnvs],self.st,"surround_orientation") for k in self.tc_dict.keys(): sis = [] surround_tuning=[] # we will do the calculation neuron by neuron for i in xrange(0,len(self.parameters.neurons)): ors = self.tc_dict[k][0] values = numpy.array([a[i] for a in self.tc_dict[k][1]]) d={} for o,v in zip(ors,values): d[o] = v sis.append(d[0] / d[numpy.pi/2]) self.datastore.full_datastore.add_analysis_result(PerNeuronValue(sis,self.parameters.neurons,None,value_name = 'Suppression index of ' + self.pnvs[0].value_name ,sheet_name=self.parameters.sheet_name,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(k)))
def __init__(self, datastore, parameters, plot_file_name=None,fig_param=None): Plotting.__init__(self, datastore, parameters, plot_file_name, fig_param) self.st = [] self.tc_dict = [] self.pnvs = [] self.max_mean_response_indexes = [] assert queries.ads_with_equal_stimulus_type(datastore) assert len(self.parameters.neurons) > 0 , "ERROR, empty list of neurons specified" dsvs = queries.partition_analysis_results_by_parameters_query(self.datastore,parameter_list=['value_name'],excpt=True) for dsv in dsvs: dsv = queries.param_filter_query(dsv,identifier='PerNeuronValue',sheet_name=self.parameters.sheet_name) assert matching_parametrized_object_params(dsv.get_analysis_result(), params=['value_name']) self.pnvs.append(dsv.get_analysis_result()) # get stimuli st = [MozaikParametrized.idd(s.stimulus_id) for s in self.pnvs[-1]] self.st.append(st) # transform the pnvs into a dictionary of tuning curves along the parameter_name # also make sure the values are ordered according to ids in the first pnv dic = colapse_to_dictionary([z.get_value_by_id(self.parameters.neurons) for z in self.pnvs[-1]],st,self.parameters.parameter_name) #sort the entries in dict according to the parameter parameter_name values for k in dic: (b, a) = dic[k] par, val = zip( *sorted( zip(b, numpy.array(a)))) dic[k] = (par,numpy.array(val)) self.tc_dict.append(dic) if self.parameters.centered: self.max_mean_response_indexes.append(numpy.argmax(sum([a[1] for a in dic.values()]),axis=0))
def perform_analysis(self): dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',sheet_name=self.parameters.sheet_name,st_name='DriftingSinusoidalGratingDisk') if len(dsv.get_analysis_result()) == 0: return assert queries.ads_with_equal_stimulus_type(dsv) assert queries.equal_ads(dsv,except_params=['stimulus_id']) self.pnvs = dsv.get_analysis_result() # get stimuli self.st = [MozaikParametrized.idd(s.stimulus_id) for s in self.pnvs] # transform the pnvs into a dictionary of tuning curves according along the 'radius' parameter # also make sure they are ordered according to the first pnv's idds self.tc_dict = colapse_to_dictionary([z.get_value_by_id(self.parameters.neurons) for z in self.pnvs],self.st,"radius") for k in self.tc_dict.keys(): crf_sizes = [] supp_sizes= [] sis = [] max_responses=[] # we will do the calculation neuron by neuron for i in xrange(0,len(self.parameters.neurons)): rads = self.tc_dict[k][0] values = numpy.array([a[i] for a in self.tc_dict[k][1]]) # sort them based on radiuses rads , values = zip(*sorted(zip(rads,values))) max_response = numpy.max(values) crf_index = numpy.argmax(values) crf_size = rads[crf_index] if crf_index < len(values)-1: supp_index = crf_index+numpy.argmax(values[crf_index+1:])+1 else: supp_index = len(values)-1 supp_size = rads[supp_index] if values[crf_index] != 0: si = (values[crf_index]-values[supp_index])/values[crf_index] else: si = 0 crf_sizes.append(crf_size) supp_sizes.append(supp_size) sis.append(si) max_responses.append(max_response) self.datastore.full_datastore.add_analysis_result(PerNeuronValue(max_responses,self.parameters.neurons,self.st[0].params()["radius"].units,value_name = 'Max. response of ' + self.pnvs[0].value_name ,sheet_name=self.parameters.sheet_name,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(k))) self.datastore.full_datastore.add_analysis_result(PerNeuronValue(crf_sizes,self.parameters.neurons,self.st[0].params()["radius"].units,value_name = 'Max. facilitation radius of ' + self.pnvs[0].value_name ,sheet_name=self.parameters.sheet_name,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(k))) self.datastore.full_datastore.add_analysis_result(PerNeuronValue(supp_sizes,self.parameters.neurons,self.st[0].params()["radius"].units,value_name = 'Max. suppressive radius of ' + self.pnvs[0].value_name ,sheet_name=self.parameters.sheet_name,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(k))) self.datastore.full_datastore.add_analysis_result(PerNeuronValue(sis,self.parameters.neurons,None,value_name = 'Suppression index of ' + self.pnvs[0].value_name ,sheet_name=self.parameters.sheet_name,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(k)))
def perform_analysis(self): for sheet in self.datastore.sheets(): # Load up spike trains for the right sheet and the corresponding # stimuli, and transform spike trains into psth print sheet self.datastore.print_content() dsv = queries.param_filter_query(self.datastore,identifier='AnalogSignalList',sheet_name=sheet,analysis_algorithm='PSTH',st_name='FullfieldDriftingSinusoidalGrating') dsv.print_content() assert queries.equal_ads(dsv,except_params=['stimulus_id']) , "It seems PSTH computed in different ways are present in datastore, ModulationRatio can accept only one" psths = dsv.get_analysis_result() st = [MozaikParametrized.idd(p.stimulus_id) for p in psths] # average across trials psths, stids = colapse(psths,st,parameter_list=['trial'],func=neo_sum,allow_non_identical_objects=True) # retrieve the computed orientation preferences pnvs = self.datastore.get_analysis_result(identifier='PerNeuronValue', sheet_name=sheet, value_name='orientation preference') if len(pnvs) != 1: logger.error("ERROR: Expected only one PerNeuronValue per sheet " "with value_name 'orientation preference' in datastore, got: " + str(len(pnvs))) return None or_pref = pnvs[0] # find closest orientation of grating to a given orientation preference of a neuron # first find all the different presented stimuli: ps = {} for s in st: ps[MozaikParametrized.idd(s).orientation] = True ps = ps.keys() print ps # now find the closest presented orientations closest_presented_orientation = [] for i in xrange(0, len(or_pref.values)): circ_d = 100000 idx = 0 for j in xrange(0, len(ps)): if circ_d > circular_dist(or_pref.values[i], ps[j], numpy.pi): circ_d = circular_dist(or_pref.values[i], ps[j], numpy.pi) idx = j closest_presented_orientation.append(ps[idx]) closest_presented_orientation = numpy.array(closest_presented_orientation) # collapse along orientation - we will calculate MR for each # parameter combination other than orientation d = colapse_to_dictionary(psths, stids, "orientation") for (st, vl) in d.items(): # here we will store the modulation ratios, one per each neuron modulation_ratio = [] ids = [] frequency = MozaikParametrized.idd(st).temporal_frequency * MozaikParametrized.idd(st).params()['temporal_frequency'].units for (orr, ppsth) in zip(vl[0], vl[1]): for j in numpy.nonzero(orr == closest_presented_orientation)[0]: if or_pref.ids[j] in ppsth.ids: modulation_ratio.append(self._calculate_MR(ppsth.get_asl_by_id(or_pref.ids[j]),frequency)) ids.append(or_pref.ids[j]) logger.debug('Adding PerNeuronValue:' + str(sheet)) self.datastore.full_datastore.add_analysis_result( PerNeuronValue(modulation_ratio, ids, qt.dimensionless, value_name='Modulation ratio' + '(' + psths[0].x_axis_name + ')', sheet_name=sheet, tags=self.tags, period=None, analysis_algorithm=self.__class__.__name__, stimulus_id=str(st))) import pylab pylab.figure() pylab.hist(modulation_ratio)
def perform_analysis(self): dsv = queries.param_filter_query( self.datastore, identifier='PerNeuronValue', sheet_name=self.parameters.sheet_name, st_name='DriftingSinusoidalGratingDisk') if len(dsv.get_analysis_result()) == 0: return assert queries.ads_with_equal_stimulus_type(dsv) assert queries.equal_ads(dsv, except_params=['stimulus_id']) self.pnvs = dsv.get_analysis_result() # get stimuli self.st = [MozaikParametrized.idd(s.stimulus_id) for s in self.pnvs] # transform the pnvs into a dictionary of tuning curves according along the 'radius' parameter # also make sure they are ordered according to the first pnv's idds self.tc_dict = colapse_to_dictionary( [z.get_value_by_id(self.parameters.neurons) for z in self.pnvs], self.st, "radius") for k in self.tc_dict.keys(): crf_sizes = [] supp_sizes = [] sis = [] max_responses = [] csis = [] # we will do the calculation neuron by neuron for i in range(0, len(self.parameters.neurons)): rads = self.tc_dict[k][0] values = numpy.array([a[i] for a in self.tc_dict[k][1]]) # sort them based on radiuses rads, values = zip(*sorted(zip(rads, values))) max_response = numpy.max(values) crf_index = numpy.argmax(values) crf_size = rads[crf_index] if crf_index < len(values) - 1: supp_index = crf_index + numpy.argmin( values[crf_index + 1:]) + 1 else: supp_index = len(values) - 1 supp_size = rads[supp_index] if supp_index < len(values) - 1: cs_index = supp_index + numpy.argmax( values[supp_index + 1:]) + 1 else: cs_index = len(values) - 1 if values[crf_index] != 0: si = (values[crf_index] - values[supp_index]) / values[crf_index] else: si = 0 if values[cs_index] != 0: csi = (values[cs_index] - values[supp_index]) / values[crf_index] else: csi = 0 crf_sizes.append(crf_size) supp_sizes.append(supp_size) sis.append(si) max_responses.append(max_response) csis.append(csi) self.datastore.full_datastore.add_analysis_result( PerNeuronValue(max_responses, self.parameters.neurons, self.st[0].getParams()["radius"].units, value_name='Max. response of ' + self.pnvs[0].value_name, sheet_name=self.parameters.sheet_name, tags=self.tags, period=None, analysis_algorithm=self.__class__.__name__, stimulus_id=str(k))) self.datastore.full_datastore.add_analysis_result( PerNeuronValue(crf_sizes, self.parameters.neurons, self.st[0].getParams()["radius"].units, value_name='Max. facilitation radius of ' + self.pnvs[0].value_name, sheet_name=self.parameters.sheet_name, tags=self.tags, period=None, analysis_algorithm=self.__class__.__name__, stimulus_id=str(k))) self.datastore.full_datastore.add_analysis_result( PerNeuronValue(supp_sizes, self.parameters.neurons, self.st[0].getParams()["radius"].units, value_name='Max. suppressive radius of ' + self.pnvs[0].value_name, sheet_name=self.parameters.sheet_name, tags=self.tags, period=None, analysis_algorithm=self.__class__.__name__, stimulus_id=str(k))) self.datastore.full_datastore.add_analysis_result( PerNeuronValue(sis, self.parameters.neurons, None, value_name='Suppression index of ' + self.pnvs[0].value_name, sheet_name=self.parameters.sheet_name, tags=self.tags, period=None, analysis_algorithm=self.__class__.__name__, stimulus_id=str(k))) self.datastore.full_datastore.add_analysis_result( PerNeuronValue(csis, self.parameters.neurons, None, value_name='Counter-suppression index of ' + self.pnvs[0].value_name, sheet_name=self.parameters.sheet_name, tags=self.tags, period=None, analysis_algorithm=self.__class__.__name__, stimulus_id=str(k)))
def perform_analysis(self): for sheet in self.datastore.sheets(): # Load up spike trains for the right sheet and the corresponding # stimuli, and transform spike trains into psth dsv = queries.param_filter_query( self.datastore, identifier='AnalogSignalList', sheet_name=sheet, analysis_algorithm='PSTH', st_name='FullfieldDriftingSinusoidalGrating') assert queries.equal_ads( dsv, except_params=['stimulus_id'] ), "It seems PSTH computed in different ways are present in datastore, ModulationRatio can accept only one" psths = dsv.get_analysis_result() st = [MozaikParametrized.idd(p.stimulus_id) for p in psths] # average across trials psths, stids = colapse(psths, st, parameter_list=['trial'], func=neo_sum, allow_non_identical_objects=True) # retrieve the computed orientation preferences pnvs = self.datastore.get_analysis_result( identifier='PerNeuronValue', sheet_name=sheet, value_name='orientation preference') if len(pnvs) != 1: logger.error( "ERROR: Expected only one PerNeuronValue per sheet " "with value_name 'orientation preference' in datastore, got: " + str(len(pnvs))) return None or_pref = pnvs[0] # find closest orientation of grating to a given orientation preference of a neuron # first find all the different presented stimuli: ps = OrderedDict() for s in st: ps[MozaikParametrized.idd(s).orientation] = True ps = list(ps.keys()) # now find the closest presented orientations closest_presented_orientation = [] for i in range(0, len(or_pref.values)): circ_d = 100000 idx = 0 for j in range(0, len(ps)): if circ_d > circular_dist(or_pref.values[i], ps[j], numpy.pi): circ_d = circular_dist(or_pref.values[i], ps[j], numpy.pi) idx = j closest_presented_orientation.append(ps[idx]) closest_presented_orientation = numpy.array( closest_presented_orientation) # collapse along orientation - we will calculate MR for each # parameter combination other than orientation d = colapse_to_dictionary(psths, stids, "orientation") for (st, vl) in d.items(): # here we will store the modulation ratios, one per each neuron modulation_ratio = [] f0 = [] f1 = [] ids = [] frequency = MozaikParametrized.idd( st).temporal_frequency * MozaikParametrized.idd( st).getParams()['temporal_frequency'].units for (orr, ppsth) in zip(vl[0], vl[1]): for j in numpy.nonzero( orr == closest_presented_orientation)[0]: if or_pref.ids[j] in ppsth.ids: a = or_pref.ids[j] mr, F0, F1 = self._calculate_MR( ppsth.get_asl_by_id(or_pref.ids[j]).flatten(), frequency) modulation_ratio.append(mr) f0.append(F0) f1.append(F1) ids.append(or_pref.ids[j]) logger.debug('Adding PerNeuronValue:' + str(sheet)) self.datastore.full_datastore.add_analysis_result( PerNeuronValue(modulation_ratio, ids, qt.dimensionless, value_name='Modulation ratio' + '(' + psths[0].x_axis_name + ')', sheet_name=sheet, tags=self.tags, period=None, analysis_algorithm=self.__class__.__name__, stimulus_id=str(st))) self.datastore.full_datastore.add_analysis_result( PerNeuronValue(f0, ids, qt.dimensionless, value_name='F0' + '(' + psths[0].x_axis_name + ')', sheet_name=sheet, tags=self.tags, period=None, analysis_algorithm=self.__class__.__name__, stimulus_id=str(st))) self.datastore.full_datastore.add_analysis_result( PerNeuronValue(f1, ids, qt.dimensionless, value_name='F1' + '(' + psths[0].x_axis_name + ')', sheet_name=sheet, tags=self.tags, period=None, analysis_algorithm=self.__class__.__name__, stimulus_id=str(st))) import pylab pylab.figure() pylab.hist(modulation_ratio)