Example #1
0
def histogramFromData(angleData, numBins, **kwargs):
    ''' This function plots the histograms of angles with the number of bins
        equal to numBins, assuming the angles are between 0 and 180.    
  '''
    kwargs = {'title': 'Bobby', 'ylimit': 0.1}
    binsRange = [((2.0 * i + 1) / 2) * (180.0 / numBins)
                 for i in xrange(-1, numBins)]
    bins = np.array(binsRange)
    hist1, bins = np.histogram(angleData, bins=bins)
    widths = np.zeros(numBins)  # @TODO not required. better way to look
    center = (bins[:-1] + bins[1:]) / 2
    widths = np.diff(bins)
    histplt = hist1 / np.sum(hist1 * widths)
    plt.bar(center, histplt, align='center', width=widths, facecolor='c')
    plt.ylim(0, kwargs['ylimit'])
    plt.title(kwargs['title'])
    plt.show()

    print('Total number of pixels on the centerline is = {}'.format(
        len(angleData)))
    mean = (0.5) * pcirc.mean(2 * np.array(angleData) * np.pi / 180)
    var = pcirc.var(2 * np.array(angleData) * np.pi / 180)
    print('The circular mean and variance are ' + str(mean * 180 / np.pi) +
          ' and ' + str(var) + ' respectively.')


#histogramFromData(angleData,numBins,title,ylimit,**kwargs)
def test_var():
    data = np.array([
        1.80044838, 2.02938314, 1.03534016, 4.84225057, 1.54256458, 5.19290675,
        2.18474784, 4.77054777, 1.51736933, 0.72727580
    ])
    s = pycircstat.var(data)
    assert_allclose(0.65842, s, atol=0.001, rtol=0.001)
def histogramFromData(angleData,numBins,**kwargs):
  
  ''' This function plots the histograms of angles with the number of bins
        equal to numBins, assuming the angles are between 0 and 180.    
  '''
  kwargs = {'title': 'Bobby', 'ylimit': 0.1}
  binsRange = [ ((2.0*i+1)/2)*(180.0/numBins) for i in xrange(-1,numBins)]
  bins =np.array( binsRange)
  hist1, bins = np.histogram(angleData,bins = bins)
  widths = np.zeros(numBins)   # @TODO not required. better way to look
  center = (bins[:-1] + bins[1:]) / 2
  widths = np.diff(bins)
  histplt=hist1/np.sum(hist1*widths)
  plt.bar(center, histplt, align='center', width=widths,facecolor='c')
  plt.ylim(0,kwargs['ylimit'])
  plt.title(kwargs['title']) 
  plt.show()
  
  print('Total number of pixels on the centerline is = {}'.format(len(angleData)))
  mean=(0.5)*pcirc.mean(2*np.array(angleData)*np.pi/180)
  var=pcirc.var(2*np.array(angleData)*np.pi/180)
  print('The circular mean and variance are ' + str(mean*180/np.pi)+
  ' and ' + str(var) +' respectively.' )  
  
#histogramFromData(angleData,numBins,title,ylimit,**kwargs)
Example #4
0
def drawHistograms(fileFactory, msMap, histRange):
    '''
  INPUT: histRange : (start, end) angle of histogram
         msMap:
  '''
    numBands = msMap[0][0][0][0]
    #@TODO 2,180 can be replace with histRange[1] - histRange[0] ? no use case now
    binsRange = [((2.0 * i + 1) / 2) * (180.0 / numBands)
                 for i in xrange(-1, numBands)]
    bins = np.array(binsRange) + histRange[0]
    #widths = np.zeros(numBands)   # @TODO not required. better way to look
    center = (bins[:-1] + bins[1:]) / 2
    # @TODO 1, review np.diff
    widths = np.diff(bins)
    maxYLimit = _estimateYLimit(msMap, histRange, bins, widths)
    for (xSeg, name), (histCont, _) in itertools.izip(fileFactory, msMap):
        #@TODO May be need tom kae color map a parameter
        plt.imshow(xSeg, cmap=plt.get_cmap('gray'))
        plt.title('mutliscale directional histogram for file {0}'.format(name))
        plt.xticks([])
        plt.yticks([])
        plt.show()
        for index, (nBands, mSize, newAngles) in enumerate(histCont):
            #  hist=msMap[index][0][0] # histogram information
            #  newAngles=msMap[index][0][1] # angle information between 0 and 180 degrees
            ## converting angles to angles between -90 and 90 degrees
            if histRange == (-90, 90):
                newAngles1 = [
                    -180 + angle for angle in newAngles if angle > 90
                ]
                newAngles2 = [angle for angle in newAngles if angle <= 90]
                newAngles = newAngles1 + newAngles2

            #mSize=msMap[index][0][2]

            hist1, bins = np.histogram(newAngles, bins=bins)
            histplt = hist1 / np.sum(hist1 * widths)

            plt.bar(center,
                    histplt,
                    align='center',
                    width=widths,
                    facecolor='c')
            plt.ylim(0, maxYLimit)
            plt.title('At Scale: ' + str(index) + '_msize_' + str(mSize) +
                      'Angles density')
            plt.show()

            print('Total number of pixels on the centerline is = {}'.format(
                len(newAngles)))
            mean = (0.5) * pcirc.mean(2 * np.array(newAngles) * np.pi / 180)
            var = pcirc.var(2 * np.array(newAngles) * np.pi / 180)
            print('The circular mean and variance are ' +
                  str(-180 + mean * 180 / np.pi) + ' and ' + str(var) +
                  ' respectively.')
Example #5
0
def direct_vector_strength_spectrum(event_times, frequencies):
    """
    Computes the direct vector strength spectrum for the given frequencies.

    :param event_times: event times in seconds
    :param frequencies: locking frequencies in Hz
    :return: vector strength spectrum
    """
    ret = np.asarray([1-var( (event_times % (1./w) )*w*2*np.pi ) for w in frequencies])

    return ret
Example #6
0
def _calc_variances(true_values, true_counts, bootstrap_values,
                    bootstrap_counts):

    true_variances = []
    p_vals = []

    bins = 2 * np.pi * np.arange(0, 1, 1. / len(true_values[0]))
    for values, counts, shuffle_values, shuffle_counts in it.izip(
            true_values, true_counts, bootstrap_values, bootstrap_counts):
        true_value = var(bins, values / counts)
        true_variances.append(true_value)

        roi_shuffles = []
        for shuffle in range(shuffle_values.shape[1]):
            roi_shuffles.append(
                var(bins,
                    shuffle_values[:, shuffle] / shuffle_counts[:, shuffle]))
        p_vals.append(percentileofscore(roi_shuffles, true_value) / 100.)

    return true_variances, p_vals
def direct_vector_strength_spectrum(event_times, frequencies):
    """
    Computes the direct vector strength spectrum for the given frequencies.

    :param event_times: event times in seconds
    :param frequencies: locking frequencies in Hz
    :return: vector strength spectrum
    """
    ret = np.asarray([
        1 - var((event_times % (1. / w)) * w * 2 * np.pi) for w in frequencies
    ])

    return ret
Example #8
0
    def _make_tuples(self, key):
        print('Processing', key['cell_id'])
        sampling_rate, eod = (Baseline() & key).fetch1['samplingrate', 'eod']
        dt = 1. / sampling_rate

        trials = Baseline.LocalEODPeaksTroughs() * Baseline.SpikeTimes() & key

        aggregated_spikes = np.hstack([s / 1000 - p[0] * dt for s, p in zip(*trials.fetch['times', 'peaks'])])

        aggregated_spikes %= 1 / eod

        aggregated_spikes *= eod * 2 * np.pi  # normalize to 2*pi
        key['base_var'], key['base_mean'], key['base_std'] = \
            circ.var(aggregated_spikes), circ.mean(aggregated_spikes), circ.std(aggregated_spikes)
        self.insert1(key)
Example #9
0
 def mean_var(self, restrictions):
     """
     Computes the mean and variance of the baseline psth
     :param restrictions: restriction that identify one baseline trial
     :return: mean and variance
     """
     rel = self & restrictions
     spikes = (Baseline.SpikeTimes() & rel).fetch1['times']
     eod = rel.fetch1['eod']
     period = 1 / eod
     factor = 2 * np.pi / period
     t = (spikes % period)
     mu = circ.mean(t * factor) / factor
     sigma2 = circ.var(t * factor) / factor ** 2
     return mu, sigma2
Example #10
0
 def mean_var(self, restrictions):
     """
     Computes the mean and variance of the baseline psth
     :param restrictions: restriction that identify one baseline trial
     :return: mean and variance
     """
     rel = self & restrictions
     spikes = (Baseline.SpikeTimes() & rel).fetch1('times')
     eod = rel.fetch1('eod')
     period = 1 / eod
     factor = 2 * np.pi / period
     t = (spikes % period)
     mu = circ.mean(t * factor) / factor
     sigma2 = circ.var(t * factor) / factor**2
     return mu, sigma2
def drawHistograms(fileFactory, msMap, histRange):
  '''
  INPUT: histRange : (start, end) angle of histogram
         msMap:
  '''
  numBands=msMap[0][0][0][0]
  #@TODO 2,180 can be replace with histRange[1] - histRange[0] ? no use case now
  binsRange = [ ((2.0*i+1)/2)*(180.0/numBands) for i in xrange(-1,numBands)]
  bins =np.array( binsRange) + histRange[0]
  #widths = np.zeros(numBands)   # @TODO not required. better way to look
  center = (bins[:-1] + bins[1:]) / 2
 # @TODO 1, review np.diff   
  widths = np.diff(bins)
  maxYLimit = _estimateYLimit(msMap, histRange, bins, widths)
  for (xSeg, name), (histCont, _) in itertools.izip(fileFactory,msMap):
    #@TODO May be need tom kae color map a parameter
    plt.imshow(xSeg, cmap = plt.get_cmap('gray'))
    plt.title('mutliscale directional histogram for file {0}'.format(name))
    plt.xticks([])
    plt.yticks([])
    plt.show()
    for index, (nBands, mSize, newAngles) in enumerate(histCont):
    #  hist=msMap[index][0][0] # histogram information
    #  newAngles=msMap[index][0][1] # angle information between 0 and 180 degrees
      ## converting angles to angles between -90 and 90 degrees
      if histRange == (-90, 90):
        newAngles1=[-180+angle for angle in newAngles if angle>90]
        newAngles2=[angle for angle in newAngles if angle<=90]
        newAngles=newAngles1+newAngles2
        
        
      #mSize=msMap[index][0][2]

      hist1, bins = np.histogram(newAngles,bins = bins)
      histplt=hist1/np.sum(hist1*widths)

      plt.bar(center, histplt, align='center', width=widths,facecolor='c')
      plt.ylim(0, maxYLimit)
      plt.title('At Scale: '+ str(index)+'_msize_'+str(mSize) + 'Angles density') 
      plt.show()

      print('Total number of pixels on the centerline is = {}'.format(len(newAngles)))
      mean=(0.5)*pcirc.mean(2*np.array(newAngles)*np.pi/180)
      var=pcirc.var(2*np.array(newAngles)*np.pi/180)
      print('The circular mean and variance are ' + str(-180+mean*180/np.pi)+
      ' and ' + str(var) +' respectively.' )
Example #12
0
    def _make_tuples(self, key):
        print('Processing', key['cell_id'])
        sampling_rate, eod = (Baseline() & key).fetch1('samplingrate', 'eod')
        dt = 1. / sampling_rate

        trials = Baseline.LocalEODPeaksTroughs() * Baseline.SpikeTimes() & key

        aggregated_spikes = np.hstack([
            s / 1000 - p[0] * dt
            for s, p in zip(*trials.fetch('times', 'peaks'))
        ])

        aggregated_spikes %= 1 / eod

        aggregated_spikes *= eod * 2 * np.pi  # normalize to 2*pi
        key['base_var'], key['base_mean'], key['base_std'] = \
            circ.var(aggregated_spikes), circ.mean(aggregated_spikes), circ.std(aggregated_spikes)
        self.insert1(key)
Example #13
0
    def _make_tuples(self, key):
        key_sub = dict(key)
        delta_f, eod, samplingrate = (Runs() & key).fetch1(
            'delta_f', 'eod', 'samplingrate')
        locking_frequency = (FirstOrderSignificantPeaks()
                             & key).fetch1('frequency')

        if key['eod_coeff'] > 0:
            # convert spikes to s and center on first peak of eod
            # times, peaks = (Runs.SpikeTimes() * LocalEODPeaksTroughs() & key).fetch('times', 'peaks')
            peaks = (Runs.GlobalEODPeaksTroughs() & key).fetch('peaks')
        #
        #     spikes = np.hstack([s / 1000 - p[0] / samplingrate for s, p in zip(times, peaks)])
        else:
            #     # convert spikes to s and center on first peak of stimulus
            #     times, peaks = (Runs.SpikeTimes() * GlobalEFieldPeaksTroughs() & key).fetch('times', 'peaks')
            peaks = (Runs.GlobalEFieldPeaksTroughs() & key).fetch('peaks')
        # spikes = np.hstack([s / 1000 - p[0] / samplingrate for s, p in zip(times, peaks)])

        spikes = np.hstack(TrialAlign().load_trials(key))
        key['peak_frequency'] = samplingrate / np.mean(
            [np.diff(p).mean() for p in peaks])
        key['locking_frequency'] = locking_frequency

        cycle = 1 / locking_frequency
        spikes %= cycle

        key['spikes'] = spikes / cycle * 2 * np.pi
        key['vector_strength'] = 1 - circ.var(key['spikes'])

        self.insert1(key)

        histograms = self.Histograms()
        for n in SamplingPointsPerBin().fetch:
            n = int(n[0])
            bin_width_time = n / samplingrate
            bin_width_radians = bin_width_time / cycle * np.pi * 2
            bins = np.arange(0, cycle + bin_width_time, bin_width_time)
            key_sub['n'] = n
            key_sub['histogram'], _ = np.histogram(spikes, bins=bins)
            key_sub['bin_width_time'] = bin_width_time
            key_sub['bin_width_radians'] = bin_width_radians

            histograms.insert1(key_sub)
Example #14
0
    def _make_tuples(self, key):
        print('Processing', key['cell_id'], 'run', key['run_id'], )
        if SecondOrderSignificantPeaks() & dict(key, eod_coeff=1, stimulus_coeff=0, baseline_coeff=0, refined=1):
            eod, vs = (SecondOrderSignificantPeaks() & dict(key, eod_coeff=1, stimulus_coeff=0, baseline_coeff=0,
                                                            refined=1)).fetch1['frequency', 'vector_strength']
        elif SecondOrderSignificantPeaks() & dict(key, eod_coeff=1, stimulus_coeff=0, baseline_coeff=0, refined=0):
            eod, vs = (SecondOrderSignificantPeaks() & dict(key, eod_coeff=1, stimulus_coeff=0, baseline_coeff=0,
                                                            refined=0)).fetch1['frequency', 'vector_strength']
        else:
            eod = (Runs() & key).fetch1['eod']

        aggregated_spikes = np.hstack(TrialAlign().load_trials(key))
        aggregated_spikes %= 1 / eod

        aggregated_spikes *= eod * 2 * np.pi  # normalize to 2*pi
        if len(aggregated_spikes) > 1:
            key['stim_var'], key['stim_mean'], key['stim_std'] = \
                circ.var(aggregated_spikes), circ.mean(aggregated_spikes), circ.std(aggregated_spikes)
            self.insert1(key)
Example #15
0
    def _make_tuples(self, key):
        key_sub = dict(key)
        delta_f, eod, samplingrate = (Runs() & key).fetch1['delta_f', 'eod', 'samplingrate']
        locking_frequency = (FirstOrderSignificantPeaks() & key).fetch1['frequency']

        if key['eod_coeff'] > 0:
            # convert spikes to s and center on first peak of eod
            # times, peaks = (Runs.SpikeTimes() * LocalEODPeaksTroughs() & key).fetch['times', 'peaks']
            peaks = (GlobalEODPeaksTroughs() & key).fetch['peaks']
        #
        #     spikes = np.hstack([s / 1000 - p[0] / samplingrate for s, p in zip(times, peaks)])
        else:
            #     # convert spikes to s and center on first peak of stimulus
            #     times, peaks = (Runs.SpikeTimes() * GlobalEFieldPeaksTroughs() & key).fetch['times', 'peaks']
            peaks = (GlobalEFieldPeaksTroughs() & key).fetch['peaks']
        # spikes = np.hstack([s / 1000 - p[0] / samplingrate for s, p in zip(times, peaks)])

        spikes = np.hstack(TrialAlign().load_trials(key))
        key['peak_frequency'] = samplingrate / np.mean([np.diff(p).mean() for p in peaks])
        key['locking_frequency'] = locking_frequency

        cycle = 1 / locking_frequency
        spikes %= cycle

        key['spikes'] = spikes / cycle * 2 * np.pi
        key['vector_strength'] = 1 - circ.var(key['spikes'])

        self.insert1(key)

        histograms = self.Histograms()
        for n in SamplingPointsPerBin().fetch:
            n = int(n[0])
            bin_width_time = n / samplingrate
            bin_width_radians = bin_width_time / cycle * np.pi * 2
            bins = np.arange(0, cycle + bin_width_time, bin_width_time)
            key_sub['n'] = n
            key_sub['histogram'], _ = np.histogram(spikes, bins=bins)
            key_sub['bin_width_time'] = bin_width_time
            key_sub['bin_width_radians'] = bin_width_radians

            histograms.insert1(key_sub)
Example #16
0
    def _make_tuples(self, key):
        print('Processing', key['cell_id'], 'run', key['run_id'])
        if SecondOrderSignificantPeaks() & dict(
                key, eod_coeff=1, stimulus_coeff=0, baseline_coeff=0,
                refined=1):
            eod, vs = (SecondOrderSignificantPeaks()
                       & dict(key,
                              eod_coeff=1,
                              stimulus_coeff=0,
                              baseline_coeff=0,
                              refined=1)).fetch1('frequency',
                                                 'vector_strength')
        elif SecondOrderSignificantPeaks() & dict(
                key, eod_coeff=1, stimulus_coeff=0, baseline_coeff=0,
                refined=0):
            eod, vs = (SecondOrderSignificantPeaks()
                       & dict(key,
                              eod_coeff=1,
                              stimulus_coeff=0,
                              baseline_coeff=0,
                              refined=0)).fetch1('frequency',
                                                 'vector_strength')
        else:
            eod = (Runs() & key).fetch1('eod')

        aggregated_spikes = TrialAlign().load_trials(key)
        if len(aggregated_spikes) == 0:
            warn('TrialAlign returned no spikes. Skipping')
            return
        else:
            aggregated_spikes = np.hstack(aggregated_spikes)
        aggregated_spikes %= 1 / eod

        aggregated_spikes *= eod * 2 * np.pi  # normalize to 2*pi
        if len(aggregated_spikes) > 1:
            key['stim_var'], key['stim_mean'], key['stim_std'] = \
                circ.var(aggregated_spikes), circ.mean(aggregated_spikes), circ.std(aggregated_spikes)
            self.insert1(key)
Example #17
0
def vector_strength_at(f, trial, alpha=None):
    if alpha is None:
        return 1 - circ.var((trial % (1. / f)) * f * 2 * np.pi)
    else:
        return 1 - circ.var((trial % (1. / f)) * f * 2 * np.pi), np.sqrt(
            -np.log(alpha) / len(trial))
Example #18
0
def _neg_vs_at(f, spikes):
    return -np.mean(
        [1 - circ.var((trial % (1. / f)) * f * 2 * np.pi) for trial in spikes])
Example #19
0
def _neg_vs_at(f, spikes):
    return -np.mean([1 - circ.var((trial % (1. / f)) * f * 2 * np.pi) for trial in spikes])
Example #20
0
def vector_strength_at(f, trial, alpha=None):
    if alpha is None:
        return 1 - circ.var((trial % (1. / f)) * f * 2 * np.pi)
    else:
        return 1 - circ.var((trial % (1. / f)) * f * 2 * np.pi), np.sqrt(- np.log(alpha) / len(trial))
        histInformation.append(collectrow)

    binNum, lBin, uBin, freq = zip(*histInformation)

    bins = (np.array(lBin) + np.array(uBin)) / 2

    hist1 = np.array(freq)

    widths = np.ones(len(bins)) * (bins[1] - bins[0])
    histplt = hist1 / np.sum(hist1 * widths)

    newAngles = [bins[i] * np.ones(freq[i]) for i in xrange(len(bins))]
    angles = np.array(list(itertools.chain(*newAngles)))

    mean = (0.5) * pcirc.mean(2 * np.array(angles) * np.pi / 180)
    var = pcirc.var(2 * np.array(angles) * np.pi / 180)

    meanIndegrees = mean * 180 / np.pi
    if meanIndegrees > 90:
        meanIndegrees = -180 + meanIndegrees

    print("The circular mean and variance are " + str(meanIndegrees) + " and " + str(var) + " respectively.")

    histName = (
        "Histogram_Sigma_" + str(Sigma) + "_mean" + str(round(meanIndegrees, 2)) + "_var_" + str(round(var, 2)) + ".png"
    )

    fig = plt.figure()
    plt.bar(bins, histplt, align="center", width=widths, facecolor="r")
    plt.ylim(0, ylimit)
    # plt.title('At Scale: '+ str(index)+'_msize_'+str(mSize) + 'Angles density')
def _vector_strength(param):
    event_times, w = param
    return 1 - var((event_times % (1. / w)) * w * 2 * np.pi)
Example #23
0
def test_var():
    data = np.array([1.80044838, 2.02938314, 1.03534016, 4.84225057,
                     1.54256458, 5.19290675, 2.18474784,
                     4.77054777, 1.51736933, 0.72727580])
    s = pycircstat.var(data)
    assert_allclose(0.65842, s, atol=0.001, rtol=0.001)
Example #24
0
def _vector_strength(param):
    event_times, w = param
    return 1-var( (event_times % (1./w) )*w*2*np.pi )