Beispiel #1
0
def spike_psth(spike_time_ms, t1_ms=-50., t2_ms=250., bin_ms=1):
    """."""
    N_trials = len(spike_time_ms)
    t2_ms = pylab.ceil((t2_ms - t1_ms) / bin_ms) * bin_ms + t1_ms
    N_bins = (t2_ms - t1_ms) / bin_ms

    spike_count_by_trial = pylab.zeros((N_trials, N_bins), dtype=float)
    if N_trials > 0:
        all_spikes_ms = pylab.array([], dtype=float)
        for trial in range(len(spike_time_ms)):
            if spike_time_ms[trial] is None:
                continue
            idx = pylab.find((spike_time_ms[trial] >= t1_ms)
                             & (spike_time_ms[trial] <= t2_ms))
            spike_count_by_trial[trial,:], bin_edges = \
              pylab.histogram(spike_time_ms[trial][idx], bins = N_bins,
                              range = (t1_ms, t2_ms))

        spike_rate = 1000 * spike_count_by_trial.mean(axis=0) / bin_ms
    else:
        spike_rate = pylab.nan

    dummy, bin_edges = \
      pylab.histogram(None, bins = N_bins, range = (t1_ms, t2_ms))
    bin_center_ms = (bin_edges[1:] + bin_edges[:-1]) / 2.0

    return spike_rate, spike_count_by_trial, bin_center_ms
Beispiel #2
0
def corr_score(file1, file2, delta, bin=1., dur=100., ncell=500):
    """Similarity score by correlation coefficient. The spike trains are convolved with a triangular kernel."""
    d1 = numpy.loadtxt(file1)
    d2 = numpy.loadtxt(file2)
    x = numpy.zeros(int(ncell * dur / bin))
    y = numpy.zeros(int(ncell * dur / bin))
    for j in range(ncell):
        if d1.size == 2:
            s1 = numpy.array(d1[0] * (d1[1] == j))
        else:
            s1 = d1[d1[:, 1] == j, 0]
        if d2.size == 2:
            s2 = numpy.array(d2[0] * (d2[1] == j))
        else:
            s2 = d2[d2[:, 1] == j, 0]
        kern = numpy.append(numpy.arange(delta / bin),
                            numpy.arange(delta / bin, -1, -1))
        ts1, dump = pylab.histogram(s1, numpy.arange(0., dur + bin, bin))
        ts2, dump = pylab.histogram(s2, numpy.arange(0., dur + bin, bin))
        x[j * dur / bin:(j + 1) * dur / bin] = numpy.convolve(
            ts1, kern, 'same')
        y[j * dur / bin:(j + 1) * dur / bin] = numpy.convolve(
            ts2, kern, 'same')
    x = x - pylab.mean(x)
    y = y - pylab.mean(y)
    cor = sum(x * y) / (len(x) * pylab.std(x) * pylab.std(y))
    return cor
Beispiel #3
0
def his(im):
    '''
    直方图均衡化处理
    '''
    r = im[:, :, 0]
    g = im[:, :, 1]
    b = im[:, :, 2]
    imhist_r, bins_r = pl.histogram(r, 256, normed=True)
    imhist_g, bins_g = pl.histogram(g, 256, normed=True)
    imhist_b, bins_b = pl.histogram(b, 256, normed=True)
    cdf_r = imhist_r.cumsum()
    cdf_g = imhist_g.cumsum()
    cdf_b = imhist_b.cumsum()
    cdf_r = cdf_r * 255 / cdf_r[-1]
    cdf_g = cdf_g * 255 / cdf_g[-1]
    cdf_b = cdf_b * 255 / cdf_b[-1]
    im_r = pl.interp(r.flatten(), bins_r[:256], cdf_r)
    im_g = pl.interp(g.flatten(), bins_g[:256], cdf_g)
    im_b = pl.interp(b.flatten(), bins_b[:256], cdf_b)
    #原始通道图
    #均衡化之后的通道图
    im_r = im_r.reshape([im.shape[0], im.shape[1]])
    im_g = im_g.reshape([im.shape[0], im.shape[1]])
    im_b = im_b.reshape([im.shape[0], im.shape[1]])
    im_p = copy.deepcopy(im)
    im_p[:, :, 0] = im_r
    im_p[:, :, 1] = im_g
    im_p[:, :, 2] = im_b
    return im_p
Beispiel #4
0
def plot_hist(X,Y,title,name):
    # get list of tracks and list of labels
    xs = X.values
    ys = Y.values
    ys = pl.reshape(ys,[ys.shape[0],])
    
    pl.figure(figsize=(15, 6), dpi=100)
    for i in range(many_features):
        if (i==2):
            counts0, bins0 = pl.histogram(xs[ys==0,i],100,range=(0.,0.08))
            counts1, bins1 = pl.histogram(xs[ys==1,i],100,range=(0.,0.08))
        elif (i==5):
            counts0, bins0 = pl.histogram(xs[ys==0,i],100,range=(1,5))
            counts1, bins1 = pl.histogram(xs[ys==1,i],100,range=(1,5))
        elif (i==6):
            counts0, bins0 = pl.histogram(xs[ys==0,i],100,range=(0,15))
            counts1, bins1 = pl.histogram(xs[ys==1,i],100,range=(0,15))
        elif (i==7):
            counts0, bins0 = pl.histogram(xs[ys==0,i],100,range=(-1.5,1.))
            counts1, bins1 = pl.histogram(xs[ys==1,i],100,range=(-1.5,1.))	      
        else:
            counts0, bins0 = pl.histogram(xs[ys==0,i],100)
            counts1, bins1 = pl.histogram(xs[ys==1,i],100)
        pl.hold()
        pl.subplot(2,4,i+1)
        pl.plot(bins0[0:100],counts0,'r',bins1[0:100],counts1,'b')
        pl.title(feature_names[i])
    pl.tight_layout()
    pl.savefig("../out/{0}/{1}".format(WHICH_EXP,name),bbox_inches='tight')
Beispiel #5
0
def spike_psth(spike_time_ms, t1_ms = -50., t2_ms = 250., bin_ms = 1):
  """."""
  N_trials = len(spike_time_ms)
  t2_ms = pylab.ceil((t2_ms - t1_ms) / bin_ms)*bin_ms + t1_ms
  N_bins = (t2_ms - t1_ms) / bin_ms
  
  spike_count_by_trial = pylab.zeros((N_trials,N_bins),dtype=float)
  if N_trials > 0:
    all_spikes_ms = pylab.array([],dtype=float)
    for trial in range(len(spike_time_ms)):
      if spike_time_ms[trial] is None:
        continue
      idx = pylab.find((spike_time_ms[trial] >= t1_ms) & 
                       (spike_time_ms[trial] <= t2_ms))
      spike_count_by_trial[trial,:], bin_edges = \
        pylab.histogram(spike_time_ms[trial][idx], bins = N_bins, 
                        range = (t1_ms, t2_ms))
      
    spike_rate = 1000*spike_count_by_trial.mean(axis=0)/bin_ms
  else:
    spike_rate = pylab.nan

  dummy, bin_edges = \
    pylab.histogram(None, bins = N_bins, range = (t1_ms, t2_ms))
  bin_center_ms = (bin_edges[1:] + bin_edges[:-1])/2.0

  return spike_rate, spike_count_by_trial, bin_center_ms
Beispiel #6
0
def plot_hipp_subfields(brain_file, subfield_file, figsize=(40.7, 20.3)):

    fig = plt.figure(figsize=figsize)
    ax = plt.subplot(1, 1, 1)

    print "hello"
    brain = nb.load(brain_file).get_data()
    brain_affine = nb.load(brain_file).get_affine()
    subfield = nb.load(subfield_file).get_data()
    subfield_affine = nb.load(subfield_file).get_affine()

    plt.histogram(subfield[:])
    #
    #plt.imshow(subfield[:,:,175], cmap="gray", origin="lower")
    #plt.show()
    subfield[subfield > 1] = 1
    slicer = viz.plot_anat(
        np.asarray(brain),
        np.asarray(brain_affine),
        cut_coords=np.arange(-238, -220, 2),  #None, #[0,50,100,150,200,250],
        slicer='z',
        black_bg=True,
        cmap=cm.Greys_r,  # @UndefinedVariable
        figure=fig,
        axes=ax,
        draw_cross=False)
    slicer.edge_map(np.asarray(subfield),
                    np.asarray(subfield_affine),
                    color='r')
    #
    #    fig.suptitle('subfields', fontsize='14')
    plt.show()

    return fig
    def get_bcc_pz(self,filename_lenscat):

        if self.prob_z == None:

            # filename_lenscat = os.environ['HOME'] + '/data/BCC/bcc_a1.0b/aardvark_v1.0/lenscats/s2n10cats/aardvarkv1.0_des_lenscat_s2n10.351.fit'
            # filename_lenscat = os.environ['HOME'] + '/data/BCC/bcc_a1.0b/aardvark_v1.0/lenscats/s2n10cats/aardvarkv1.0_des_lenscat_s2n10.351.fit'

            if 'fits' in filename_lenscat:
                lenscat = tabletools.loadTable(filename_lenscat)
                if 'z' in lenscat.dtype.names:
                    self.prob_z , _  = pl.histogram(lenscat['z'],bins=self.grid_z_edges,normed=True)
                elif 'z-phot' in lenscat.dtype.names:
                    self.prob_z , _  = pl.histogram(lenscat['z-phot'],bins=self.grid_z_edges,normed=True)

                if 'e1' in lenscat.dtype.names:

                    select = lenscat['star_flag'] == 0
                    lenscat = lenscat[select]
                    select = lenscat['fitclass'] == 0
                    lenscat = lenscat[select]
                    select = (lenscat['e1'] != 0.0) * (lenscat['e2'] != 0.0)
                    lenscat = lenscat[select]
                    self.sigma_ell = np.std(lenscat['e1']*lenscat['weight'],ddof=1)

            elif 'pp2' in filename_lenscat:

                pickle = tabletools.loadPickle(filename_lenscat,log=0)
                self.prob_z =  pickle['prob_z']
                self.grid_z_centers = pickle['bins_z']
                self.grid_z_edges = plotstools.get_bins_edges(self.grid_z_centers)
Beispiel #8
0
def stellar_massftn():
    gadget2msun=10.e10
    boxsize = 47.0
    max_mag=-16.
    min_mag = -23.
    nbins=14
    hubble_h = 0.7
    #subfind_folder = "/mnt/lustre/scratch/cs390/nIFTy/62.5_dmSF/outputs/"
    #ahf_folder = "/mnt/lustre/scratch/cs390/nIFTy/62.5_dm/outputs/"
    
    firstfile = first1
    lastfile = last1

    filter = LGalaxyStruct.properties_used
    filter['DiskMass'] = True
    filter['BulgeMass'] = True

    #file_prefix = "SA_z0.00"    
    (nTrees,nGals,nTreeGals,gal) = read_lgal.readsnap_lgal(folder1,file_prefix,first1,last1,filter)
    massf = gadget2msun*gal['DiskMass']+gadget2msun*gal['BulgeMass']
    mass = numpy.log10(massf)
    stellarmass = pylab.histogram(mass,bins=20,range=(9.0,14.0))
    print stellarmass
    massftn_y = stellarmass[0]
    massftn_x = []
    for i in range(len(stellarmass[0])):
        massftn_x.append((stellarmass[1][i]+stellarmass[1][i+1])/2.)

    delta_logM = massftn_x[1]-massftn_x[0]
    pylab.rc('text', usetex=True)
    fig = pylab.figure()
    ax = fig.add_subplot(111)
    ax.plot(massftn_x,massftn_y/boxsize1**3./delta_logM,'r-',label=label1)

    firstfile = first2
    lastfile = last2

     
    (nTrees,nGals,nTreeGals,gal) = read_lgal.readsnap_lgal(folder2,file_prefix,first2,last2,filter)
    massf = gadget2msun*gal['DiskMass']+gadget2msun*gal['BulgeMass']
    mass = numpy.log10(massf)
    stellarmass = pylab.histogram(mass,bins=20,range=(9.0,14.0))
    print stellarmass
    massftn_y = stellarmass[0]
    massftn_x = []
    for i in range(len(stellarmass[0])):
        massftn_x.append((stellarmass[1][i]+stellarmass[1][i+1])/2.)
    ax.set_xlabel(r"$\log(M_\star/M_\odot$ $h)$")
    ax.set_ylabel(r"galaxies$/(Mpc^3 h^{-3})/\Delta \log(M_\star/M_\odot$ $h)$")
    ax.plot(massftn_x,massftn_y/boxsize2**3./delta_logM,'b-',label=label2)

    print "Stellar mass"
    for i in range(len(massftn_x)):
        print massftn_x[i],"\t",massftn_y[i]/boxsize**3./delta_logM
    ax.set_yscale("log")
    ax.legend(loc='upper right',ncol=1, fancybox=True)


    #pylab.show()
    pylab.savefig('stellar_mass.pdf',bbox_inches='tight')
def _temp_plot_(spk, ax, stim=0, yy=0):

	exid = np.where(spk[0] < ne)[0]
	inid = np.where(spk[0] >= ne)[0]

	htex = pl.histogram(spk[1][exid], bins=sim_time/10, range=(0, sim_time))
	htin = pl.histogram(spk[1][inid], bins=sim_time/10, range=(0, sim_time))

	hr = pl.histogram(spk[0], bins=n, range=(0, n)) 

	ax.plot(spk[1][exid]*dt, spk[0][exid], 'r.', markersize=mksz, label='Exc: '+ str(np.round(len(exid)/ne)) )
	ax.plot(spk[1][inid]*dt, spk[0][inid], 'b.', markersize=mksz, label='Inh: '+str(np.round(len(inid)/ne)) )

	ax.set_yticks([0, 99, 199, 299, ne-1, n-1])
	ax.set_yticklabels([])
	ax.set_ylim(0-10, n+10)
	ax.set_xlim([0-10, sim_time+10])

	ax.set_xticklabels([])	

	divider = make_axes_locatable(ax)	

	axHisty = divider.append_axes("right", size=.5, pad=0.1)
	#adjust_spines(axHisty,['left', 'bottom'], outward=0)

	axHisty.plot(hr[0]/(Ts), hr[1][0:-1], color='k', lw=2)
	
	if stim == 0: 
	   pl.text(.85, .5, str(np.round(len(exid)/ne / Ts,1)) +' Hz', transform = axHisty.transAxes, color='r')
	   pl.text(.85, .85, str(np.round(len(inid)/ni /Ts,1))+' Hz', transform = axHisty.transAxes, color='b')
	else:
	    pl.text(.9, .5, str(np.round(len(exid)/ne /Ts,1)) +' Hz', transform = axHisty.transAxes, color='r')
	    pl.text(.9, .85, str(np.round(len(inid)/ni /Ts,1))+' Hz', transform = axHisty.transAxes, color='b')

	axHisty.set_yticks([0, 99, 199, 299, ne-1, n-1])
	axHisty.set_yticklabels([])
	axHisty.set_xticks([0, 10])

	axHisty.set_ylim(0-10, n+10)

	axHistx = divider.append_axes("bottom", 1.2, pad=0.3)
	#adjust_spines(axHistx,['left', 'bottom'], outward=0)

	axHistx.plot(htex[1][0:-1], htex[0], color='r', lw=2, label='Exc')
	axHistx.plot(htin[1][0:-1], htin[0], color='b', lw=2, label='Inh')	
	
	axHistx.set_yticks([0, 50, 100, 150])
	axHistx.set_yticklabels([])
	
	if yy == 1:	
	   axHistx.set_xlabel('Time (ms)')
	   axHistx.set_ylabel('Population spike count')
	   axHistx.set_yticklabels([0, 50, 100, 150])
	   pl.legend(loc=1, frameon=False, prop={'size':12.5})
	   
	   axHisty.set_xlabel('Firing rate \n (spikes/s)', size=10)
Beispiel #10
0
def modelYield(NFert, Irrigation= False, cycles = 10000):
    '''Model yield for 10,000 iterations and plot the yield in a histogram.
    '''
    field1 = hectareCrop(NFert, Irrigation)
    iterations = []
    while cycles > 0:
        iterations.append(field1.cropYield())
        cycles -= 1
    pylab.histogram(iterations, bins = 40)
    pylab.show()
Beispiel #11
0
def plot_1d_pdfs(cldata):
    grid = cldata['match']['grid']
    logpr = grid['fit']
    pr = np.exp(-0.5 * logpr)
    pr /= pr.sum()

    plt.figure(figsize=(15, 4))

    plt.subplot(141)
    y = np.unique(grid['age'])
    dy = get_bins_from_center(y)
    n, b = plt.histogram(cldata['match']['grid']['age'], bins=dy, weights=pr)
    c = figrc.get_centers_from_bins(b)
    p = n * np.diff(b) * c
    plt.step(10**c, p / p.sum(), where='mid', color='k', lw=2)
    plt.xscale('log')
    figrc.hide_axis('top right'.split())
    plt.xlabel('age [yr]')

    plt.subplot(142)
    Y = grid['av']
    y = np.unique(Y)
    dy = get_bins_from_center(y)
    n, b = plt.histogram(Y, bins=dy, weights=pr)
    c = figrc.get_centers_from_bins(b)
    plt.step(c, n / n.sum(), where='mid', color='k', lw=2)
    figrc.hide_axis('top right'.split())
    plt.xlabel('Av [mag]')

    plt.subplot(143)
    Y = grid['z']
    y = np.unique(Y)
    dy = get_bins_from_center(y)
    n, b = plt.histogram(Y, bins=dy, weights=pr)
    c = figrc.get_centers_from_bins(b)
    plt.step(c, n / n.sum(), where='mid', color='k', lw=2)
    figrc.hide_axis('top right'.split())
    plt.xlabel(r'Log(Z/Z$_\odot$)')

    plt.subplot(144)
    Y = grid['mass']
    dlogm = 0.1
    y = np.unique(Y)
    dy = np.arange(dlogm, 6, dlogm)
    n, b = plt.histogram(Y, bins=dy, weights=pr)
    c = figrc.get_centers_from_bins(b)
    p = n * np.diff(b) * c
    # plt.step(c, n / n.sum(), where='mid')
    plt.step(10**c, p / p.sum(), where='mid', color='k', lw=2)
    plt.xscale('log')
    figrc.hide_axis('top right'.split())
    plt.xlabel(r'Mass [M$_\odot$]')

    plt.tight_layout()
Beispiel #12
0
def semiLogFracFound(i,o,**pltKwds):
    from pylab import histogram,semilogx,loglog
    f=i[(o>0) &(i>0)]
    l=i[(o<1) &(i>0)]

    d=i.sum()
    fNorm=f/d
    lNorm=l/d
    
    fHist=histogram(fNorm,logspace(-6,-2.0,30))
    lHist=histogram(lNorm,logspace(-6,-2.0,30))
    semilogx(fHist[1][1:],fHist[0].astype(float)/(fHist[0]+lHist[0]),**pltKwds)
Beispiel #13
0
def plot_hists(nus=[143,353],
               map1_name=None,
               map2_name=None,
               maskname='wmap_temperature_kq85_analysis_mask_r10_9yr_v5.fits',
               nside=2048,
               fwhm=0.0,
              bins=100,normed=True,
              atol=1e-6, ymin=0.01, ymax=None,
              xmin=-0.001, xmax=0.005):

    if map1_name is None:
        map1_name = 'HFI_SkyMap_{}_2048_R2.02_full.fits'.format(nus[0])
    label1 = '{} GHz'.format(nus[0])
    if map2_name is None:
        map2_name = 'HFI_SkyMap_{}_2048_R2.02_full.fits'.format(nus[1])
    label2 = '{} GHz'.format(nus[1])
   
    map1 = prepare_map( map1_name, field=0,
                        maskname=maskname,
                        nside_out=nside, fwhm=fwhm )
    map2 = prepare_map( map2_name, field=0,
                        maskname=maskname,
                        nside_out=nside, fwhm=fwhm )

    y1,x1 = pl.histogram(map1[np.where(np.negative(np.isclose(map1,0.,atol=atol)))],
                       bins=bins,normed=normed)
    bin1 = (x1[:-1] + x1[1:]) / 2.

    y2,x2 = pl.histogram(map2[np.where(np.negative(np.isclose(map2,0.,atol=atol)))],
                       bins=bins,normed=normed)
    bin2 = (x2[:-1] + x2[1:]) / 2.
    #return bin1,y1,bin2,y2
        

    fig = plt.figure()
    ax = plt.gca()
    
    ax.semilogy(bin1, y1, lw=3, label=label1,color='red')
    ax.semilogy(bin2, y2, lw=3, label=label2,color='gray')
    ax.set_xlim(xmin=xmin,xmax=xmax)
    ax.set_ylim(ymin=ymin, ymax=ymax)

    #ax.set_yscale('log')
    
    ax.set_xlabel('$\mu K$', fontsize=20)
    ax.set_yticks([])
    
    plt.draw()
    plt.legend(frameon=False, fontsize=20)

    plt.savefig('pdfs_{}GHz_{}GHz_fwhm{:.3}rad.pdf'.format(nus[0],nus[1],fwhm))
def galaxy_stellar_massftn():
    gadget2msun=10.e10
    boxsize = 47.0
    max_mag=-16.
    min_mag = -23.
    nbins=14
    hubble_h = 0.7
    model2_folder = "/mnt/lustre/scratch/cs390/AHF_halos/cubepm_131212_6_1728_47Mpc_ext2/mergertrees/outputs/"
    nore_folder = "/mnt/lustre/scratch/cs390/AHF_halos/cubepm_131212_6_1728_47Mpc_ext2/mergertrees/outputs_nore/"
    snaplist_file = "/mnt/lustre/scratch/cs390/AHF_halos/cubepm_131212_6_1728_47Mpc_ext2/mergertrees/cubep3m_zlist_out"
    observe_folder="/mnt/lustre/scratch/cs390/codes/cubepm_131212_6_1728_47Mpc_ext2/observed_UVL/"
    firstfile = 0
    lastfile = 215

    filter = LGalaxyStruct.properties_used
    filter['DiskMass'] = True
    filter['BulgeMass'] = True

    file_prefix = "SA_z8.06"    
    (nTrees,nGals,nTreeGals,gal) = read_lgal.readsnap_lgal(model2_folder,file_prefix,firstfile,lastfile,filter)
    massf = gadget2msun*gal['DiskMass']+gadget2msun*gal['BulgeMass']
    mass = [i for i in massf if i > 10.e6]
    mass = numpy.log10(mass)
    stellarmass = pylab.histogram(mass)
    print stellarmass
    massftn_y = stellarmass[0]
    massftn_x = []
    for i in range(len(stellarmass[0])):
        massftn_x.append((stellarmass[1][i]+stellarmass[1][i+1])/2.)

    pylab.rc('text', usetex=True)
    fig = pylab.figure()
    ax = fig.add_subplot(111)
    ax.plot(massftn_x,massftn_y,'r-')


    file_prefix = "SA_z8.06"    
    (nTrees,nGals,nTreeGals,gal) = read_lgal.readsnap_lgal(nore_folder,file_prefix,firstfile,lastfile,filter)
    massf = gadget2msun*gal['DiskMass']+gadget2msun*gal['BulgeMass']
    mass = [i for i in massf if i > 10.e6]
    mass = numpy.log10(mass)
    stellarmass = pylab.histogram(mass)
    print stellarmass
    massftn_y = stellarmass[0]
    massftn_x = []
    for i in range(len(stellarmass[0])):
        massftn_x.append((stellarmass[1][i]+stellarmass[1][i+1])/2.)

    ax.plot(massftn_x,massftn_y,'b-')
    ax.set_yscale("log")
    pylab.show()
Beispiel #15
0
def semiLogHistLostFound(i,o):
    from pylab import histogram,semilogx

    f=i[(o>0) &(i>0)]
    l=i[(o<1) &(i>0)]

    d=i.sum()
    fNorm=f/d
    lNorm=l/d
    
    fHist=histogram(fNorm,logspace(-6,-2.0,30))
    lHist=histogram(lNorm,logspace(-6,-2.0,30))
    
    semilogx(fHist[1][1:],fHist[0],label='out>0')
    semilogx(lHist[1][1:],lHist[0],label='out=0')
Beispiel #16
0
def spikecorr_tri_conv(filename,
                       bin=5.,
                       maxtime=10000.,
                       start=100.,
                       path='./'):
    data = numpy.loadtxt(path + filename)
    data = data[data[:, 0] > start, :]
    z1 = pylab.histogram(data[data[:, 1] == 0., 0],
                         numpy.arange(0, maxtime + 0.1, 0.1))
    z2 = pylab.histogram(data[data[:, 1] == 1., 0],
                         numpy.arange(0, maxtime + 0.1, 0.1))
    x1 = miscfunc.tri_convolve(z1[0], bin * 10)
    x2 = miscfunc.tri_convolve(z2[0], bin * 10)
    cor = miscfunc.corrcoef(x1, x2)
    return cor
Beispiel #17
0
def old_spike_psth(data, t1_ms=-250., t2_ms=0., bin_ms=10):
    """Uses data format returned by get_spikes"""
    spike_time_ms = data['spike times ms']
    N_trials = data['trials']
    t2_ms = pylab.ceil((t2_ms - t1_ms) / bin_ms) * bin_ms + t1_ms
    N_bins = (t2_ms - t1_ms) / bin_ms

    if N_trials > 0:
        all_spikes_ms = pylab.array([], dtype=float)
        for trial in range(len(spike_time_ms)):
            if spike_time_ms[trial] is None:
                continue
            idx = pylab.find((spike_time_ms[trial] >= t1_ms)
                             & (spike_time_ms[trial] <= t2_ms))
            all_spikes_ms = \
              pylab.concatenate((all_spikes_ms, spike_time_ms[trial][idx]))
        spike_n_bin, bin_edges = \
          pylab.histogram(all_spikes_ms, bins = N_bins,
                          range = (t1_ms, t2_ms), new = True)

        spikes_per_trial_in_bin = spike_n_bin / float(N_trials)
        spike_rate = 1000 * spikes_per_trial_in_bin / bin_ms
    else:
        spike_rate = pylab.nan

    bin_center_ms = (bin_edges[1:] + bin_edges[:-1]) / 2.0

    return spike_rate, bin_center_ms
Beispiel #18
0
def logLogRatioFoundLost(i,o,**pltKwds):
    
    from pylab import histogram,semilogx,loglog

    f=i[(o>0) &(i>0)]
    l=i[(o<1) &(i>0)]

    d=i.sum()
    fNorm=f/d
    lNorm=l/d
    
    fHist=histogram(fNorm,logspace(-6,-2.0,30))
    lHist=histogram(lNorm,logspace(-6,-2.0,30))
    
    #semilogx(fHist[1][1:],fHist[0].astype(float)/lHist[0])
    loglog(fHist[1][1:],fHist[0].astype(float)/lHist[0],**pltKwds)
def testCollisionsE8(n,d=8):
    M = pylab.eye(8,8)

    S = [0.0]*n
    C = [0]*n
    #generate distances and buckets
    for i in range(n):
        p = [random() for j in xrange(d)]
        q = [p[j] + (gauss(0,1)/(d**.5)) for j in xrange(d)]
        S[i]=distance(p,q,d)
        C[i]= int(decodeE8(dot(p,M)) == decodeE8(dot(q,M)))
    
    ranges = pylab.histogram(S,30)[1]   
    bucketsCol = [0]*len(ranges)
    bucketsDis = [0]*len(ranges)

    #fill buckets with counts 
    for i in xrange(n):
        k = len(ranges)-1
        while S[i] < ranges[k]:k=k-1
        if C[i]:bucketsCol[k]=bucketsCol[k]+1
        else:bucketsDis[k] = bucketsDis[k]+1
    print bucketsDis
    print ranges
    pylab.plot(ranges,[float(bucketsCol[i])/(float(bucketsDis[i]+.000000000001))  for i in range(len(ranges))],color='purple') 
Beispiel #20
0
def plot_trait_distribution(params, trait_matrix, traits):
    """
    Plot the time evolution of trait distribution given the params map 
    containing the experimental parameters, the trait matrix containing 
    history of traits, and current list of traits.
    """

    l = pylab.histogram(traits,
                        100,
                        range=(0, params["max_trait"]),
                        normed=False)[0]
    r = []
    for bin in l:
        trait = bin / (1.0 * params["population"])
        r.append(1.0 - trait)
    trait_matrix.append(r)
    pylab.subplot(121).clear()
    pylab.xlabel(r"$x$")
    pylab.ylabel(r"$t$")
    pylab.imshow(trait_matrix, interpolation = "bilinear",
                 origin = "l", cmap = cm.gray,
                 extent = [0, params["max_trait"], 1, \
                               len(trait_matrix) * params["report_freq"]])
    pylab.axis("tight")
    ax = pylab.gca()
    ax.yaxis.major.formatter.set_powerlimits((0, 0))
    pylab.draw()
def spike_make_diagram(ts, gids, name):
    pylab.figure()
    color_marker = "."
    color_bar = "blue"
    color_edge = "black"
    ylabel = "Neuron ID"

    hist_binwidth = 5.0

    ax1 = pylab.axes([0.1, 0.3, 0.85, 0.6])
    pylab.plot(ts, gids, color_marker)
    pylab.ylabel(ylabel)
    pylab.xticks([])
    xlim = pylab.xlim()

    pylab.axes([0.1, 0.1, 0.85, 0.17])
    t_bins = numpy.arange(numpy.amin(ts), numpy.amax(ts), hist_binwidth)
    n, bins = pylab.histogram(ts, bins=t_bins)
    t_bins = t_bins[:-1]                        # FixMe it must work without cutting the end value
    num_neurons = len(numpy.unique(gids))
    heights = (1000 * n / (hist_binwidth * num_neurons))
    pylab.bar(t_bins, heights, width=hist_binwidth, color=color_bar, edgecolor=color_edge)
    pylab.yticks([int(a) for a in numpy.linspace(0.0, int(max(heights) * 1.1) + 5, 4)])
    pylab.ylabel("Rate (Hz)")
    pylab.xlabel("Time (ms)")
    pylab.xlim(xlim)
    pylab.axes(ax1)

    pylab.title('Spike activity')
    pylab.draw()
    pylab.savefig(path + name + ".png", dpi=dpi_n, format='png')
    pylab.close()
Beispiel #22
0
def dcf(X,Y,T,num_bin,noise_std):

	'''
	This function implements the discrete correlation function 
	(DCF) delay estimation method described in Edelson RA, Krolik JH (1988) "The discrete correlation function - A new method for 		analyzing unevenly sampled variability data." The Astrophysical Journal 333: 646-659.

	'''

	#obtain the delta Ts
	deltaT=T[:,None]-T[None,:]
	#iu1 = np.triu_indices(len(T),1)
	iu1 = np.triu_indices(len(T))
	hist, bin_edges=pb.histogram(np.abs(deltaT[iu1]),num_bin)
	cent=bin_edges[0:len(bin_edges)-1]+np.diff(bin_edges)*.5
	dcf=np.zeros(len(cent))
	sigx=np.var(X)
	sigy=np.var(Y)
	muX=np.mean(X)
	muY=np.mean(Y)
	for i in range(0,len(cent)):
		for j in range(0,len(T)):
			for k in range(j,len(T)):
				if i<len(cent)-1:
					if (np.abs(deltaT[j,k])>=bin_edges[i])&(np.abs(deltaT[j,k])<bin_edges[i+1]):
						dcf[i]+=((X[j]-muX)*(Y[k]-muY))/np.sqrt((sigx-noise_std**2)*(sigy-noise_std**2))
				elif i==len(cent)-1:
					if (np.abs(deltaT[j,k])>=bin_edges[i])&(np.abs(deltaT[j,k])<=bin_edges[i+1]):
						dcf[i]+=((X[j]-muX)*(Y[k]-muY))/np.sqrt((sigx-noise_std**2)*(sigy-noise_std**2))


	dcf[hist>0]=dcf[hist>0]/hist[hist>0]
	return cent[np.argmax(dcf)],dcf,cent
Beispiel #23
0
def old_spike_psth(data, t1_ms = -250., t2_ms = 0., bin_ms = 10):
  """Uses data format returned by get_spikes"""
  spike_time_ms = data['spike times ms']
  N_trials = data['trials']
  t2_ms = pylab.ceil((t2_ms - t1_ms) / bin_ms)*bin_ms + t1_ms
  N_bins = (t2_ms - t1_ms) / bin_ms
  
  if N_trials > 0:
    all_spikes_ms = pylab.array([],dtype=float)
    for trial in range(len(spike_time_ms)):
      if spike_time_ms[trial] is None:
        continue
      idx = pylab.find((spike_time_ms[trial] >= t1_ms) & 
                       (spike_time_ms[trial] <= t2_ms))
      all_spikes_ms = \
        pylab.concatenate((all_spikes_ms, spike_time_ms[trial][idx]))
    spike_n_bin, bin_edges = \
      pylab.histogram(all_spikes_ms, bins = N_bins, 
                      range = (t1_ms, t2_ms), new = True)

    spikes_per_trial_in_bin = spike_n_bin/float(N_trials) 
    spike_rate = 1000*spikes_per_trial_in_bin/bin_ms
  else:
    spike_rate = pylab.nan
  
  bin_center_ms = (bin_edges[1:] + bin_edges[:-1])/2.0

  return spike_rate, bin_center_ms
Beispiel #24
0
def testCollisionsE8(n, d=8):
    M = pylab.eye(8, 8)

    S = [0.0] * n
    C = [0] * n
    #generate distances and buckets
    for i in range(n):
        p = [random() for j in xrange(d)]
        q = [p[j] + (gauss(0, 1) / (d**.5)) for j in xrange(d)]
        S[i] = distance(p, q, d)
        C[i] = int(decodeE8(dot(p, M)) == decodeE8(dot(q, M)))

    ranges = pylab.histogram(S, 30)[1]
    bucketsCol = [0] * len(ranges)
    bucketsDis = [0] * len(ranges)

    #fill buckets with counts
    for i in xrange(n):
        k = len(ranges) - 1
        while S[i] < ranges[k]:
            k = k - 1
        if C[i]: bucketsCol[k] = bucketsCol[k] + 1
        else: bucketsDis[k] = bucketsDis[k] + 1
    print bucketsDis
    print ranges
    pylab.plot(ranges, [
        float(bucketsCol[i]) / (float(bucketsDis[i] + .000000000001))
        for i in range(len(ranges))
    ],
               color='purple')
Beispiel #25
0
def __make_spikes_diagram(times, gids, name, path):
    """
    Draw spike diagram
    
    Description:
        Set parameters, include data, draw and save
        
    Args:
        times (list): times
        gids  (list): global IDs of neurons
        name   (str): name of brain part
        path   (str): path to save results
    """
    global successed

    path += "/img"

    if not os.path.exists(path):
        os.makedirs(path)

    pylab.figure()
    color_marker = "."
    color_bar = "blue"
    color_edge = "black"
    ylabel = "Neuron ID"
    hist_binwidth = 5.0
    location = pylab.axes([0.1, 0.3, 0.85, 0.6])
    pylab.plot(times, gids, color_marker)
    pylab.ylabel(ylabel)
    xlim = pylab.xlim()
    pylab.xticks([])
    pylab.axes([0.1, 0.1, 0.85, 0.17])
    t_bins = numpy.arange(numpy.amin(times), numpy.amax(times), hist_binwidth)
    if len(t_bins) == 0:
        pylab.close()
        return "t_bins for {0} is empty".format(name)
    n, bins = pylab.histogram(times, bins=t_bins)
    num_neurons = len(numpy.unique(gids))
    heights = (1000 * n / (hist_binwidth * num_neurons))
    # FixMe t_bins[:-1] should work without cutting the end value
    pylab.bar(t_bins[:-1],
              heights,
              width=hist_binwidth,
              color=color_bar,
              edgecolor=color_edge)
    pylab.yticks(
        [int(a) for a in numpy.linspace(0.0,
                                        int(max(heights) * 1.1) + 5, 4)])
    pylab.ylabel("Rate (Hz)")
    pylab.xlabel("Time (ms)")
    pylab.grid(True)
    pylab.axes(location)
    pylab.title(name)
    pylab.xlim(xlim)
    pylab.draw()
    pylab.savefig("{0}/{1}.png".format(path, name), dpi=120, format='png')
    pylab.close()

    successed += 1
    return "OK"
    def get_bcc_pz(self,filename_lenscat):

        if self.prob_z == None:


            # filename_lenscat = os.environ['HOME'] + '/data/BCC/bcc_a1.0b/aardvark_v1.0/lenscats/s2n10cats/aardvarkv1.0_des_lenscat_s2n10.351.fit'
            # filename_lenscat = os.environ['HOME'] + '/data/BCC/bcc_a1.0b/aardvark_v1.0/lenscats/s2n10cats/aardvarkv1.0_des_lenscat_s2n10.351.fit'
            lenscat = tabletools.loadTable(filename_lenscat)

            if 'z' in lenscat.dtype.names:
                self.prob_z , _  = pl.histogram(lenscat['z'],bins=self.grid_z_edges,normed=True)
            elif 'z-phot' in lenscat.dtype.names:
                self.prob_z , _  = pl.histogram(lenscat['z-phot'],bins=self.grid_z_edges,normed=True)

            if 'e1' in lenscat.dtype.names:
                self.sigma_ell = np.std(lenscat['e1'],ddof=1)
Beispiel #27
0
def spikecorr_tri_conv5(filename, bin=5., maxtime=10000., path='./'):
    data = numpy.loadtxt(path + filename)
    cor5 = []
    for j in range(5):
        z1 = pylab.histogram(
            data[data[:, 1] == 0., 0],
            numpy.arange(j * maxtime / 5., (j + 1) * maxtime / 5. + 0.1, 0.1))
        z2 = pylab.histogram(
            data[data[:, 1] == 1., 0],
            numpy.arange(j * maxtime / 5., (j + 1) * maxtime / 5. + 0.1, 0.1))
        x1 = miscfunc.tri_convolve(z1[0], bin * 10)
        x2 = miscfunc.tri_convolve(z2[0], bin * 10)
        cor5.append(miscfunc.corrcoef(x1, x2))
    cor = pylab.mean(cor5)
    sc = pylab.std(cor5)
    return cor, sc
Beispiel #28
0
def plot_phases(in_file, plot_type, plot_log):
    flags = ['histogram','phases']
    plot_flag = 0
    log_flag = 0

    def no_log(x):
        return x

    fig = pylab.figure(1)
    ax = fig.add_subplot(111)

    try:
        img = spimage.sp_image_read(in_file,0)
    except:
        raise IOError("Can't read %s." % in_file)

    values = img.image.reshape(pylab.size(img.image))

    if plot_log:
        log_function = pylab.log
    else:
        log_function = no_log

    if plot_type == PHASES:
        hist = pylab.histogram(pylab.angle(values),bins=500)
        ax.plot((hist[1][:-1]+hist[1][1:])/2.0,log_function(hist[0]))
    elif plot_flag == HISTOGRAM:
        hist = pylab.histogram2d(pylab.real(values),pylab.imag(values),bins=500)
        ax.imshow(log_function(hist[0]),extent=(hist[2][0],hist[2][-1],-hist[1][-1],-hist[1][0]),interpolation='nearest')
    else:
        ax.plot(pylab.real(values),pylab.imag(values),'.')
    return fig
Beispiel #29
0
def plot_phases(in_file, plot_type, plot_log):
    plot_flag = 0

    def no_log(x):
        return x

    fig = pylab.figure(1)
    ax = fig.add_subplot(111)

    try:
        img = spimage.sp_image_read(in_file, 0)
    except IOError:
        raise IOError("Can't read %s." % in_file)

    values = img.image.reshape(pylab.size(img.image))

    if plot_log:
        log_function = pylab.log
    else:
        log_function = no_log

    if plot_type == PHASES:
        hist = pylab.histogram(pylab.angle(values), bins=500)
        ax.plot((hist[1][:-1] + hist[1][1:]) / 2, log_function(hist[0]))
    elif plot_flag == HISTOGRAM:
        hist = pylab.histogram2d(pylab.real(values),
                                 pylab.imag(values),
                                 bins=500)
        ax.imshow(log_function(hist[0]),
                  extent=(hist[2][0], hist[2][-1], -hist[1][-1], -hist[1][0]),
                  interpolation='nearest')
    else:
        ax.plot(pylab.real(values), pylab.imag(values), '.')
    return fig
Beispiel #30
0
def simulate_rule(Energies, Nsteps):
    Nstates = len(Energies)
    statesOverTime = np.zeros(Nsteps)
    statesOverTime[0] = np.random.randint(0, Nstates)
    for i in np.arange(Nsteps - 1):
        currentState = statesOverTime[i].astype(int)
        newState = currentState.astype(int)
        while newState == currentState:
            newState = np.random.randint(0, Nstates)

        move = False
        move = BoltzmannRule(currentState, newState)

        if (move):
            statesOverTime[i + 1] = newState
        else:
            statesOverTime[i + 1] = currentState

    bins = np.arange(Nstates + 1) + 0.5
    y, dummy = plt.histogram(statesOverTime + 1, bins=bins, normed=True)
    plt.bar(np.arange(len(energies)) + 0.9,
            y,
            width=0.4,
            fc='blue',
            alpha=0.5,
            label='sampledByRule')
    return statesOverTime
Beispiel #31
0
def hist(x,nbins=100):

    vals,bin_edges=pylab.histogram(x,nbins)
    bins=(bin_edges[1:]+bin_edges[:-1])/2
    dx=bins[1]-bins[0]
    vals=1.0*vals/sum(vals)/dx

    return bins,vals
    def get_bcc_pz(self):

        if self.prob_z == None:


            filename_lenscat = os.environ['HOME'] + '/data/BCC/bcc_a1.0b/aardvark_v1.0/lenscats/s2n10cats/aardvarkv1.0_des_lenscat_s2n10.351.fit'
            lenscat = tabletools.loadTable(filename_lenscat)
            self.prob_z , _  = pl.histogram(lenscat['z'],bins=self.grid_z_edges,normed=True)
Beispiel #33
0
	def getTimeHistogramm(self, color, name):
		diffs = plt.diff(self.times)
		## compute standard histogram
		y,x = plt.histogram(diffs, bins=plt.linspace(diffs.min(), diffs.max(), 500))

		## notice that len(x) == len(y)+1
		## We are required to use stepMode=True so that PlotCurveItem will interpret this data correctly.
		curve = pg.PlotCurveItem(x, y, stepMode=True, fillLevel=0, pen=color, brush=color, name=name)
		return curve
Beispiel #34
0
    def update_histogram_plt_data(self, histogram_save_data):
        self.hist_data = histogram_save_data
        bins = (self.Gbins, self.Dbins)

        G_breaking = histogram_save_data.get_G_breaking()
        D_breaking = histogram_save_data.get_D_breaking()
        G_making = histogram_save_data.get_G_making()

        break_histogram = pl.histogram(G_breaking, self.Gbins)
        make_histogram = pl.histogram(G_making, self.Gbins)

        self.histo1D_breaking_sum = self.histo1D_breaking_sum + break_histogram[
            0]
        self.histo1D_making_sum = self.histo1D_making_sum + make_histogram[0]

        break_histogram2D = pl.histogram2d(G_breaking, D_breaking, bins)
        self.histo2D_breaking_sum = self.histo2D_breaking_sum + break_histogram2D[
            0]
Beispiel #35
0
def calculate_activity_histogram(spikes, total_neurons, bin=0.1):
    """Calculates histogram and bins specifically for neurons.

    Bins are provided in seconds instead of milliseconds."""
    hist, bin_edges = pylab.histogram(
        [spike[0] for spike in spikes],
        bins=pylab.ceil(max([spike[0] for spike in spikes])/bin))
    bin_edges = pylab.delete(bin_edges, len(bin_edges)-1) / 1000.
    return [[float(i)/total_neurons for i in hist], bin_edges]
Beispiel #36
0
def histeq(im, nbr_bins = 256):
	""" Histogram equalization of a grayscale image. """
	# get image histogram
	imhist, bins = pl.histogram(im.flatten(), nbr_bins, normed = True)
	cdf = imhist.cumsum()  # cumulative distribution function
	cdf = 255 * cdf / cdf[-1]  # normalize
	# use linear interpolation of cdf to find new pixel values
	im2 = pl.interp(im.flatten(), bins[:-1], cdf)
	return im2.reshape(im.shape)
Beispiel #37
0
def fit_gauss(ph):
    for i in range(4):
		# bepaal de counts en de bingrootte in een range van 0 t/m 1000 ADC
        y, bins =  histogram(ph[i], bins=500, range=[0, 1000]) # loop for elke plaat
        x = (bins[:-1] + bins[1:])/2.  #let op neem bins om het midden te vinden van de bin
        
        if y[np.random.random_integers(200, 300)] != 0 : # ter controle of de data van de detector betrouwbaar is, aangezien plaat 4 503 geen goede data bevatte 3-01-2013
            # vanwege bartels (2012) kiezen we waar de ADC waarde tussen 150 en 410 zit voor de gauss fit, schat geeft de elementen waar tussen gefit moet worden.
            schat = np.where((x > 150) & (x < 600)) # geeft een tweeledig array terug    
            x1 = x[schat[0]] # geeft de waarde van de elementen gevonden met de determinatie hierboven in het eerste element
            #bovenstaande kan ook met x1 = fit_xa[np.where(x>150) & (x< 420)]
            y1 = y[schat] # bepaling van de count in de meting 
            max_min = ndimage.extrema(y1)
            max_y = ndimage.extrema(y1)[1] #maximale count y waarde piek van de gauss!)
        
            min_x = max_min[0]  #  de laagste waarde van de count
            max_x = max_min[1]  # hoogste waarde van de count -> a waarde in de gauss fit
            b_temp = max_min[3]
            b_place = b_temp[0] # b waarde
            b = x1[b_place] # de b-waarde van de gauss curve
        
            bound1 = max_x - (max_x - min_x)*0.75 
        
            if (max_x- min_x) <= 50:
                bound2 = max_x + (max_x - min_x)*1.5
            else:
                bound2 = max_x + (max_x - min_x)
        
            x2 = x1.compress((bound1 <= x1) & (x1 < bound2))
            y2 = y1.compress((bound1 <= x1) & (x1 < bound2))
        
              
            #popt, pcov = curve_fit(func1, x1, y1, [200, 250, 50] ) # werkende fit met een handmatige guess kleine verschillen. orde van 0.2 
            popt, pcov = curve_fit(func1, x1, y1, [max_x, b, 20] ) # fit met guess gebruikt werkt ook
        
            pylab.plot(func1(range(0,600), *popt), '--')
            peak = popt[1]
        
            if i == 0:
                MIP1 = peak
                MPV1.append(MIP1)  #bij herhalen  van de loop oor ander station append deze regel op de juiste manier?
            elif i == 1:
                MIP2 = peak
                MPV2.append(MIP2)
            elif i == 2:
                MIP3 = peak
                MPV3.append(MIP3)
            elif i == 3:
                MIP4 = peak
                MPV4.append(MIP4)
        
            print 'The MPV of detector',i + 1, 'lies at', peak, 'ADC'
        
        else:
            
            print 'The data of the detector ',i + 1, 'could not be fitted to a gauss curve'
Beispiel #38
0
 def SPOC_hist(self, deltacr=0.5, cat=[0,0.1,1/3.,0.5,2/3.,0.9,1], cat_percent=True, crpol=False, return_val=False):
     "Draws an histogram for planet visibility"
     if not ((hasattr(self,'cr') or hasattr(self,'cr_rel')) and (hasattr(self,'sep') or hasattr(self,'sep_rel'))):
         print "Contrast ratio or Separation is unknown, cannot process SPOC diagram."
         return
     if crpol and not hasattr(self,'pol'):
         print "No polarization output available, switching to intensity contrast ratio."
     color = ['w','#FFDD78','#7878FF','#D26E23','#AAFCED','k']
     hatch = ['','','','/','/']
     coloredge = ['k','k','k','k','k']
     pTimeMarkers=c_planet(fr=np.linspace(0,1,1001)[:-1], t=getattr(self,'t',None), a=getattr(self,'a',None), e=getattr(self,'e',None), i=getattr(self,'i',None), w=getattr(self,'w',None), tperi=getattr(self,'tperi',None), distance=getattr(self,'distance',None), radius=getattr(self,'radius',None), albedo_scat=getattr(self,'albedo_scat',None), albedo_geo=getattr(self,'albedo_geo',None), o=getattr(self,'o',None))
     pTimeMarkers.compute(silent=True)
     cat=np.sort(cat)[::-1]
     sep = getattr(self,'sep',0)+getattr(self,'sep_rel',0)
     if crpol:
         cr = np.log10(getattr(self,'crpol',0)+getattr(self,'crpol_rel',0))
         xlabel='Polarized contrast ratio [log10]'
     else:
         cr = np.log10(getattr(self,'cr',0)+getattr(self,'cr_rel',0))
         xlabel='Contrast ratio [log10]'
     sepmax = sep.max()*cat_percent+1*(not cat_percent)
     if cat[0]<sep.max(): cat = np.r_[sep.max(),cat] ###
     if cat[-1]>sep.min(): cat = np.r_[cat,0] ###
     bins = np.linspace(np.floor(cr.min()/deltacr)*deltacr, np.ceil(cr.max()/deltacr)*deltacr, np.ceil(cr.max()/deltacr)-np.floor(cr.min()/deltacr)+1)
     norma = 100./cr.size
     bottom = np.zeros(bins.size-1)
     fig = plt.figure()
     values=[]
     for i in range(cat.size):
         hh = cr[sep/sepmax>cat[i]]
         if return_val: values.append(plt.histogram(hh, bins=bins)[0]*norma)
         if hh.size!=0:
             a = plt.histogram(hh, bins=bins)[0]
             p = plt.bar((bins[:-1]+deltacr/2.)[a>0], a[a>0]*norma, deltacr/2., hatch=hatch[(i-1)%np.size(hatch)], color=color[(i-1)%np.size(color)], edgecolor=coloredge[(i-1)%np.size(coloredge)], bottom=bottom[a>0], label=str(round(cat[i]*sepmax,3)) + "-" + str(round(cat[i-1]*sepmax,3)))
             bottom = bottom+a*norma
             cr = cr[sep/sepmax<=cat[i]]
             sep = sep[sep/sepmax<=cat[i]]
     plt.xticks(bins+deltacr/4., np.round(bins,3))
     plt.legend(title="Separation in [arcsec]", loc=0, fancybox=True, shadow=True, prop={'size':12})
     plt.xlabel(xlabel)
     plt.ylabel('Duration [% of Period]')
     plt.show()
     if return_val: return bins[:-1]+deltacr/2., cat*sepmax, values
Beispiel #39
0
def make_scatter_inputs(yvals, xvals,therange, nbins=10):
    if len(yvals) != len(xvals):
        print  len(yvals), ' doeas not equal ',len(xvals) 
    
    vals, thebins = pylab.histogram(xvals, weights=yvals, bins=nbins,range=therange)
    vals_sq, thebins =  pylab.histogram(xvals, weights=yvals*yvals, bins=nbins ,range=therange)
    vals_n, thebins =  pylab.histogram(xvals, bins=nbins,range=therange)
    
    val_errs = numpy.sqrt((vals_sq/vals_n) - (vals/vals_n)*(vals/vals_n))/numpy.sqrt(vals_n) 

    bincenters=[]
    binerrs=[]
    # print 'The Bins = ', thebins
    for k in range(len(thebins)-1):
        bincenters.append((thebins[k]+thebins[k+1])/2.)
        binerrs.append((thebins[k+1]-thebins[k])/2.)
        
    # print 'bincenters = ',bincenters
    return bincenters, vals/vals_n, binerrs, val_errs
Beispiel #40
0
    def plotpdf(self,x=None,xmin=None,alpha=None,nbins=50,dolog=True,dnds=False,
            drawstyle='steps-post', histcolor='k', plcolor='r', **kwargs):
        """
        Plots PDF and powerlaw.

        kwargs is passed to pylab.hist and pylab.plot
        """
        if not(x): x=self.data
        if not(xmin): xmin=self._xmin
        if not(alpha): alpha=self._alpha

        x=numpy.sort(x)
        n=len(x)

        pylab.gca().set_xscale('log')
        pylab.gca().set_yscale('log')

        if dnds:
            hb = pylab.histogram(x,bins=numpy.logspace(log10(min(x)),log10(max(x)),nbins))
            h = hb[0]
            b = hb[1]
            db = hb[1][1:]-hb[1][:-1]
            h = h/db
            pylab.plot(b[:-1],h,drawstyle=drawstyle,color=histcolor,**kwargs)
            #alpha -= 1
        elif dolog:
            hb = pylab.hist(x,bins=numpy.logspace(log10(min(x)),log10(max(x)),nbins),log=True,fill=False,edgecolor=histcolor,**kwargs)
            alpha -= 1
            h,b=hb[0],hb[1]
        else:
            hb = pylab.hist(x,bins=numpy.linspace((min(x)),(max(x)),nbins),fill=False,edgecolor=histcolor,**kwargs)
            h,b=hb[0],hb[1]
        # plotting points are at the center of each bin
        b = (b[1:]+b[:-1])/2.0

        q = x[x>=xmin]
        px = (alpha-1)/xmin * (q/xmin)**(-alpha)

        # Normalize by the median ratio between the histogram and the power-law
        # The normalization is semi-arbitrary; an average is probably just as valid
        plotloc = (b>xmin)*(h>0)
        norm = numpy.median( h[plotloc] / ((alpha-1)/xmin * (b[plotloc]/xmin)**(-alpha))  )
        px = px*norm

        plotx = pylab.linspace(q.min(),q.max(),1000)
        ploty = (alpha-1)/xmin * (plotx/xmin)**(-alpha) * norm

        #pylab.loglog(q,px,'r',**kwargs)
        pylab.loglog(plotx,ploty,color=plcolor,**kwargs)

        axlims = pylab.axis()
        pylab.vlines(xmin,axlims[2],max(px),colors=plcolor,linestyle='dashed')

        pylab.gca().set_xlim(min(x),max(x))
    def get_kl(x):
        
        snr = np.sqrt(arr_normsq*(x**2)) / noise_std     
        h_sim, b_sim = pl.histogram(snr,bins=plotstools.get_bins_edges(b_des[b_des>min_snr_to_use],constant_spacing=True),normed=True)
        h_sim = h_sim/sum(h_sim)

        h_des_use = h_des[b_des>min_snr_to_use]
        KL_divergence = -sum(  h_des_use * np.log(h_sim)  ) + sum( h_des_use * np.log(h_des_use) )

        print 'KL_divergence=' , KL_divergence , 'scale=' , x
        return KL_divergence
Beispiel #42
0
def histeq(im, numbins=256):
    """ Histogram equalization of grayscale image. """
    # Get image histogram
    hist, bins = plt.histogram(im.flatten(), numbins, normed=True)
    cdf = hist.cumsum()  # Cumulative distribution function
    cdf = 255 * cdf / cdf[-1]

    # User linear interpolation of cdf to find new pixel values
    im2 = np.interp(im.flatten(), bins[:-1], cdf)

    return im2.reshape(im.shape), cdf
def testCollisionPr(n, d=24):
    M = [
        array([[gauss(0, 1) / (24**.5) for a in range(24)] for b in range(d)])
        for k in range(4)
    ]
    #M =[eye(24)]#[array([[gauss(0,1)/(24**.5) for a in range(24)] for b in range(d)])]
    S = [0.0] * n
    C = [0] * n
    print "data generated"
    #generate distances and buckets
    i = 0

    while i < n:

        p = array([(random()) for j in xrange(d)])

        q = array([p[j] + (gaussInv(3.0, 1) / (d**.5)) for j in xrange(d)])
        #q =array( [p[j] + (gauss(0.0,1.0)/(d**.5)) for j in xrange(d)])
        #q =array( [p[j] + (( random() -.5)/(d**.5)) for j in xrange(d)])
        S[i] = distance(p, q, d)
        j = 0
        m = 2

        while j < len(M) / m:
            C[i] = C[i] or decodeGt24(p, M[j * m:(j + 1) * m], 0.0,
                                      1.0) == decodeGt24(
                                          q, M[j * m:(j + 1) * m], -0.0, 1.0)
            j = j + 1

        i = i + 1

    ranges = histogram(S, 25)[1]
    bucketsCol = [0] * len(ranges)
    bucketsDis = [0] * len(ranges)

    #fill buckets with counts

    for i in xrange(n):
        k = len(ranges) - 1
        while S[i] < ranges[k]:
            k = k - 1
        if C[i]: bucketsCol[k] = bucketsCol[k] + 1
        bucketsDis[k] = bucketsDis[k] + 1

    print bucketsDis + bucketsCol
    print ranges

    p = [0] * len(ranges)
    for m in range(len(ranges)):
        if bucketsDis[m] > 0:
            p[m] = bucketsCol[m] / float(bucketsDis[m])

    plot(ranges, p)
Beispiel #44
0
def simulate_rule(stateEnergies, Nsteps):
    # set physical constants
    k = 8.6e-5
    T = 100.0

    # check how many states were provided
    Nstates = len(stateEnergies)

    # Create an array to show were we were (which states we were in)
    # throughout the simulation
    statesOverTime = np.zeros(Nsteps)

    # Make the first state random by using the randint()-function
    statesOverTime[0] = np.random.randint(0, Nstates)

    # Now perform the simulation, by looping thourhg all the steps
    for i in np.arange(Nsteps - 1):

        # Randomize a new state until we get one which is
        # different from the one we are allready in
        currentState = statesOverTime[i].astype(int)
        newState = currentState.astype(int)
        while newState == currentState:
            newState = np.random.randint(0, Nstates)

        # Make a random number between 0 and 1
        chance = np.random.rand()

        #should we move?
        move = False
        #move=PrimeRule(newState)
        move = BoltzmannRule(stateEnergies[currentState],
                             stateEnergies[newState], k, T)

        if (move):
            # move to the proposed state
            statesOverTime[i + 1] = newState
        else:
            # set the next state to be the same as the current
            statesOverTime[i + 1] = currentState

    # Let's plot the resulting disitrbution in blue
    bins = np.arange(Nstates + 1) + 0.5
    y, dummy = plt.histogram(statesOverTime + 1, bins=bins, normed=True)
    plt.bar(np.arange(len(energies)) + 0.9,
            y,
            width=0.4,
            fc='blue',
            alpha=0.5,
            label='sampledByRule')

    # Let's also return the array so that we can plot more things if we want to
    return statesOverTime
Beispiel #45
0
def histeq(img, nb_bins=256):
    """ Histogram equalization of a grayscale image."""

    # get image histogram
    img_hist, bins = pl.histogram(img.flatten(), nb_bins, normed=True)
    cdf = img_hist.cumsum()  # cumulative distribution function
    cdf = 255 * cdf / cdf[-1]  # normalize

    # use linear interpolation of cdf to find new pixel values
    img2 = pl.interp(img.flatten(), bins[:-1], cdf)

    return img2.reshape(img.shape), cdf
Beispiel #46
0
    def sizeHistogram(self, plot=True):
        '''
		Simplifies the procss of displaying histograms of payload sizes. This is a useful way to classify packets.
		If plot = True, it will display the histogram, else it will return the tuple ( counts, bins ).
		
		If you want finer grained control just import pylab as pl and get cracking!
		'''
        if plot:
            pl.hist(self.size())
            pl.show()
        else:
            return pl.histogram(self.size())
Beispiel #47
0
def draw_line_hist(data,bins=20,linewidth=3,color=None,normed=False,fig=1):
    '''draws an outline histogram, returns values,bins
    '''

    pylab.figure(fig)
    values,bins = pylab.histogram(data,bins=bins,new=True,normed=normed)
    if color:
        pylab.plot(bins[:-1],values,color,drawstyle='steps-post',linewidth=linewidth)
    else:
        pylab.plot(bins[:-1],values,drawstyle='steps-post',linewidth=linewidth)        
              
    return values,bins
Beispiel #48
0
    def plot_layer_histogram(self, layer_name, filename):
        """ Draw a plot of the given layer.
        """
        layer = self._get_layer_securely(layer_name)

        # Calculate the histogram:
        histogram, bin_edges = pylab.histogram(layer, 50, normed=True)

        # Make a plot based on the histogram:
        pylab.plot(histogram)
        pylab.grid(True)
        pylab.title(layer_name.capitalize() + ' layer histogram plot.')
        pylab.savefig(filename)
Beispiel #49
0
    def plot_layer_histogram(self, layer_name, filename):
        """ Draw a plot of the given layer.
        """
        layer = self._get_layer_securely(layer_name)

        # Calculate the histogram:
        histogram, bin_edges = pylab.histogram(layer, 50, normed=True)

        # Make a plot based on the histogram:
        pylab.plot(histogram)
        pylab.grid(True)
        pylab.title(layer_name.capitalize() + ' layer histogram plot.')
        pylab.savefig(filename)
Beispiel #50
0
def equalize(image, bins=256):
    ''' Given an image, equalize it based on the generated
    histogram so all the intensities are balanced.

    :param image: The image to equalize
    :param bins: The number of bins for the histogram
    :returns: The equalized image
    '''
    im_hist, bins = pl.histogram(image.flatten(), bins, normed=True)
    cdf = im_hist.cumsum()                               # cumulative distribution function
    cdf = 255 * cdf / cdf[-1]                            # normalize
    im_norm = pl.interp(image.flatten(), bins[:-1], cdf) # linear interpolate new pixel values
    return im_norm.reshape(image.shape)                  # restore to the correct shape
    def get_kl_sigma(sigma):
        
        snr = np.sqrt(arr_normsq) / sigma     
        h_sim, b_sim = pl.histogram(snr,bins=plotstools.get_bins_edges(b_des[b_des>min_snr_to_use],constant_spacing=True),normed=True)
        h_sim = h_sim/sum(h_sim)
        # pl.plot(b_des,h_sim)
        # pl.show()

        h_des_use = h_des[b_des>min_snr_to_use]
        KL_divergence = -sum(  h_des_use * np.log(h_sim)  ) + sum( h_des_use * np.log(h_des_use) )

        print 'KL_divergence=' , KL_divergence , 'sigma=' , sigma
        return KL_divergence
Beispiel #52
0
def spike_make_diagram(ts, gids, name, title, hist):
    pylab.figure()
    color_marker = "."
    color_bar = "blue"
    color_edge = "black"
    ylabel = "Neuron ID"

    if hist == "True":
        #TODO this part doesn't work! Trying to fix
        hist_binwidth = 5.0
        ts1 = ts
        neurons = gids

        ax1 = pylab.axes([0.1, 0.3, 0.85, 0.6])
        pylab.plot(ts1, gids, color_marker)
        pylab.ylabel(ylabel)
        pylab.xticks([])
        xlim = pylab.xlim()

        pylab.axes([0.1, 0.1, 0.85, 0.17])
        t_bins = numpy.arange(numpy.amin(ts), numpy.amax(ts), hist_binwidth)
        n, bins = pylab.histogram(ts, bins=t_bins)
        num_neurons = len(numpy.unique(neurons))
        print "num_neurons " + str(num_neurons)
        heights = 1000 * n / (hist_binwidth * num_neurons)
        print "t_bins " + str(len(t_bins)) + "\n" + str(t_bins) + "\n" + \
               "height " + str(len(heights)) + "\n" + str(heights) + "\n"
        #bar(left,height, width=0.8, bottom=None, hold=None, **kwargs):
        pylab.bar(t_bins,
                  heights,
                  width=hist_binwidth,
                  color=color_bar,
                  edgecolor=color_edge)
        pylab.yticks([
            int(a) for a in numpy.linspace(0.0,
                                           int(max(heights) * 1.1) + 5, 4)
        ])
        pylab.ylabel("Rate (Hz)")
        pylab.xlabel("Time (ms)")
        pylab.xlim(xlim)
        pylab.axes(ax1)
    else:
        pylab.plot(ts, gids, color_marker)
        pylab.xlabel("Time (ms)")
        pylab.ylabel(ylabel)

    pylab.title(title)
    pylab.draw()
    pylab.savefig(path + name + ".png", dpi=dpi_n, format='png')
    pylab.close()
Beispiel #53
0
def plotpdf(x=None,xmin=None,alpha=None,nbins=50,dolog=True,dnds=False,**kwargs):
    """
    Plots PDF and powerlaw.
    """
    #if not(x): x=self.data
    #if not(xmin): xmin=self._xmin
    #if not(alpha): alpha=self._alpha
    import numpy,pylab
    import numpy.random as npr
    from numpy import log,log10,sum,argmin,argmax,exp,min,max

    x=numpy.sort(x)
    n=len(x)

    pylab.gca().set_xscale('log')
    pylab.gca().set_yscale('log')

    if dnds:
        hb = pylab.histogram(x,bins=numpy.logspace(log10(min(x)),log10(max(x)),nbins))
        h = hb[0]
        b = hb[1]
        db = hb[1][1:]-hb[1][:-1]
        h = h/db
        pylab.plot(b[:-1],h,drawstyle='steps-post',color='k',**kwargs)
        #alpha -= 1
    elif dolog:
        hb = pylab.hist(x,bins=numpy.logspace(log10(min(x)),log10(max(x)),nbins),log=True,fill=False,edgecolor='k',**kwargs)
        alpha -= 1
        h,b=hb[0],hb[1]
        pylab.hold(False)
        pylab.loglog(b[1:],h,'o',mfc="None",mec='blue')
        pylab.hold(True)
    else:
        hb = pylab.hist(x,bins=numpy.linspace((min(x)),(max(x)),nbins),fill=False,edgecolor='k',**kwargs)
        h,b=hb[0],hb[1]
    b = b[1:]

    q = x[x>=xmin]
    px = (alpha-1)/xmin * (q/xmin)**(-alpha)

    arg = argmin(abs(b-xmin))
    plotloc = (b>xmin)*(h>0)
    norm = numpy.median( h[plotloc] / ((alpha-1)/xmin * (b[plotloc]/xmin)**(-alpha))  )
    px = px*norm

    pylab.loglog(q,px,'r',label=('$x_{min}=%.1f$, $\\alpha=%.2f$'%(xmin,alpha)),**kwargs)
    pylab.legend()
    #pylab.vlines(xmin,0.1,max(px),colors='r',linestyle='dashed')
    
    pylab.gca().set_xlim(min(x),max(x))
def fitgauss(hulp, station, verschil):
    y, bins =  histogram(hulp, bins=np.arange(-41.25e-9, 41.25e-9, 2.5e-9), range=[-40e-9, 40e-9]) 
    x = (bins[:-1] + bins[1:])/2.
    N = max(y)
    
    f = lambda x, N, mu, sigma: N * scipy.stats.norm.pdf(x, mu, sigma)
    
    popt, pcov = curve_fit(gauss, x, y, [N * 1e-9, 0., 1e-9])
    xx = linspace(min(x), max(x), 1000)  
    versch = popt[1]
    waarden = 1e9*versch
    print 'van station %d en verschil %s' %(station, verschil) 
    print 'de verschuiving is %f'% waarden
    print 80*'-'
Beispiel #55
0
def isi_histogram(timestamps,
                  start_time=0,
                  zero_times=0,
                  end_time=None,
                  window_len=1,
                  range=.2,
                  nbins=11):
    """Given the time stamps compute the isi histogram with a jumping window.
  Inputs:
    timestamps - the spike timestamps
    start_time - time rel to zero_time we end our windows (needs to be <= 0).
                 If zero, means no pre windows.
                 If None, means prewindows stretch to begining of data
                 The start_time is extended to include an integer number of windows
    zero_times  - reference time. Can be a zx1 array, in which case will give us an array of windows. If scalar
                 will only give one set of windows.
    end_time   - time rel to zero_time we end our windows (needs to >= 0)
                 If zero, means no post-windows
                 If None, means post-windows stretch to end of data
                 The end_time is extended to include an integer number of windows
    window_len - length of window to look at isi (in same units as time stamps)
    range - maximum isi
    nbins - number of bins in the histogram
  Outputs:
    t - time vector
    be - bin edges
    isihist - the histogram matrix. Normalized for each time slice

  (Can be plotted by doing pylab.pcolor(t, be, isihist.T,vmin=0,vmax=1, cmap=pylab.cm.gray_r))
  """
    window_edges, windows, subwindows = window_spike_train(
        timestamps, start_time, zero_times, end_time, window_len=window_len)
    isi = pylab.diff(timestamps)

    if windows.shape[1]:
        windows[:, -1, 1] -= 1  #we have one less isi sample than timestamps

    bins = pylab.linspace(0, range, nbins + 1)  #We are doing bin edges

    isihist = pylab.zeros((windows.shape[1], nbins))
    for m in xrange(windows.shape[0]):
        for n in xrange(windows.shape[1]):
            hist, be = pylab.histogram(isi[windows[m, n, 0]:windows[m, n, 1]],
                                       bins,
                                       density=True)
            isihist[n, :] += hist

    t = (window_edges[1:] + window_edges[:-1]) / 2
    return t, be, isihist / windows.shape[0]
Beispiel #56
0
def Histeq(img, nbr_bins=256):
    """
        图像直方图均衡化
    :param img: 图像
    :param nbr_bins:像素值范围[0~255]
    :return:均衡化直方图后的图像
    """
    # 获取直方图p(r)
    imhist, bins = pl.histogram(img.flatten(), nbr_bins, normed=True)
    # 获取T(r)
    cdf = imhist.cumsum()  # cumulative distribution function
    cdf = 255 * cdf / cdf[-1]
    # 获取s,并用s替换原始图像对应的灰度值
    result = pl.interp(img.flatten(), bins[:-1], cdf)
    return result.reshape(img.shape)
def testCollisionsE8(n, d=24):

    M = [
        array([[gauss(0, 1) / (24**.5) for a in range(24)] for b in range(d)])
        for k in range(12)
    ]
    S = [0.0] * n
    C = [0] * n
    #generate distances and buckets
    i = 0
    while i < n:
        p = [random() for j in xrange(d)]
        q = [p[j] + (gauss(0, 1) / (d**.5)) for j in xrange(d)]

        S[i] = distance(p, q, d)
        j = 0
        m = 3

        while j < len(M) / m:
            C[i] = C[i] or decodeE8gt8(p, M[j * m:(j + 1) * m], 0.0,
                                       1.0) == decodeE8gt8(
                                           q, M[j * m:(j + 1) * m], 0.0, 1.0)
            j = j + 1
        i = i + 1

    ranges = histogram(S, 25)[1]
    bucketsCol = [0] * len(ranges)
    bucketsDis = [0] * len(ranges)

    #fill buckets with counts
    for i in xrange(n):
        k = len(ranges) - 1
        while S[i] < ranges[k]:
            k = k - 1
        if C[i]: bucketsCol[k] = bucketsCol[k] + 1
        bucketsDis[k] = bucketsDis[k] + 1

    print bucketsDis
    print bucketsCol
    print ranges

    p = [0] * len(ranges)
    for m in range(len(ranges)):
        if bucketsDis[m] > 0:
            p[m] = bucketsCol[m] / float(bucketsDis[m])

    plot(ranges, p)
def histeq(im, nb_bins=256):
    """
    This function equalises the histogram of im (numpy array), distributing
    the pixels accross nbr_bins bins.
    Returns the equalised image as a numpy array (same shape as input).
    Largely copied from https://stackoverflow.com/a/28520445  
        (author: Trilarion, 17/07/2017)
    """
    # get image histogram
    imhist, bins = pl.histogram(im.flatten(), nb_bins, normed=True)
    cdf = imhist.cumsum() #cumulative distribution function
    cdf = 65535 * cdf / cdf[-1] #normalize
    
    # use linear interpolation of cdf to find new pixel values
    im2 = pl.interp(im.flatten(), bins[:-1], cdf)
    
    return im2.reshape(im.shape)
Beispiel #59
0
    def test_histogram1d(self):

        h = histogram.Histogram1DFast(10, 0, 10)
        self.assertTrue((np.abs(h.bin_edges - np.arange(11)) < 1.0e-12).all())

        x = np.array([-1.0, 0.5, 3.2, 0.77, 9.99, 10.1, 8.2])
        h.update(x)

        xc = np.array([1.5, 2.5, 8.3])
        cc = np.array([10, 5, 22])
        h.update_counts(xc, cc)
        self.assertTrue(
            (h.bin_count == np.array([3, 10, 5, 1, 0, 0, 0, 0, 23, 2])).all())

        # check compute_indices
        self.assertTrue((h.compute_indices(np.arange(12) - 0.5) == np.array(
            [0, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 9])).all())

        # benchmark
        x = np.random.randn(1e7)
        time1 = time.time()
        h = histogram.Histogram1DFast(100, -5, 5)
        h.update(x)
        time2 = time.time()
        out = np.histogram(x, bins=100, range=[-5, 5])
        time3 = time.time()

        print "Time for fast = " + str(time2 - time1) + " s"
        print "Time for numpy = " + str(time3 - time2) + " s"

        # check sampler
        t1 = time.time()
        samples = h.sample(3e6)
        t2 = time.time()
        print "Time to sample 1D for 3e6 = " + str(t2 - t1) + " s"

        # TODO: replace this "eye norm" with an actual norm
        (counts, edges) = plt.histogram(samples, 50, normed=True)
        centers = 0.5 * (edges[1:] + edges[0:-1])
        actual_pdf = 1.0 / np.sqrt(2.0 * 3.14159) * np.exp(-centers**2 / 2.0)
        fig = plt.figure(1)
        fig.clf()
        plt.plot(centers, counts, label="Sample")
        plt.plot(centers, actual_pdf, label="Actual")
        plt.legend()
        fig.show()