def main():
    x1 = 0.0
    N = 5000
    x1Values = np.zeros(N)
    x2Values = np.zeros(N)

    for i in range(N):
        x2 = updateX2(x1)
        x2Values[i] = x2
        x1 = updateX1(x2)
        x1Values[i] = x1

    #plot
    plt.hist(x1Values, bins=20, alpha=0.6, label='calc. marginal', normed=True)
    x = np.linspace(-3,5)
    plt.plot(x,mlab.normpdf(x,1.0,1.0), lw=3)
    plt.xlabel("Value")
    plt.ylabel("Frequency")
    plt.legend(loc='upper right')
    plt.savefig('px1')
    plt.clf()

    plt.hist(x2Values, bins=20, alpha=0.6, label='calc. marginal', normed=True)
    x = np.linspace(-3,5)
    plt.plot(x,mlab.normpdf(x,1.0,1.0), lw=3)
    plt.xlabel("Value")
    plt.ylabel("Frequency")
    plt.legend(loc='upper right')
    plt.savefig('px2')
Exemple #2
0
def plot_score_distributions(threshold, neg_devel, pos_devel, neg_test, pos_test, filename='score_dist.png'):

    plt.clf()
    plt.figure(1)
    plt.subplot(211)
    plt.title("Score distributions (Deve set)")
    n, bins, patches = plt.hist(neg_devel, bins=25, normed=1, histtype='bar', label='Negative class')
    na, bins_a, patches_a = plt.hist(pos_devel, bins=25, normed=1, histtype='bar', label='Positive class')

    # add a line showing the expected distribution
    y = mlab.normpdf(bins, np.mean(neg_devel), np.std(neg_devel))
    plt.plot(bins, y, 'k--', linewidth=1.5)
    y = mlab.normpdf(bins_a, np.mean(pos_devel), np.std(pos_devel))
    plt.plot(bins_a, y, 'k--', linewidth=1.5)
    plt.axvline(x=threshold, linewidth=2, color='blue')
    plt.legend()

    plt.subplot(212)
    plt.title("Score distributions (Test set)")
    n, bins, patches = plt.hist(neg_test, bins=25, normed=1, facecolor='green', alpha=0.5, histtype='bar',
                                label='Negative class')
    na, bins_a, patches_a = plt.hist(pos_test, bins=25, normed=1, facecolor='red', alpha=0.5, histtype='bar',
                                     label='Positive class')

    # add a line showing the expected distribution
    y = mlab.normpdf(bins, np.mean(neg_test), np.std(neg_test))
    plt.plot(bins, y, 'k--', linewidth=1.5)
    y = mlab.normpdf(bins_a, np.mean(pos_test), np.std(pos_test))
    plt.plot(bins_a, y, 'k--', linewidth=1.5)
    plt.axvline(x=threshold, linewidth=2, color='blue')
    plt.legend()

    current_dir = os.getcwd()
    output = '{0}/{1}.png'.format(current_dir, filename)
    plt.savefig(output)
Exemple #3
0
def plot_bourgdata(N1,N2):
	A=TRICLAIRModele()
	Tb15 = A.get_data_triathlon(link='/triathlon-bourg-resultats-1996.htm',year=2015)
	Tb14 = A.get_data_triathlon(link='/triathlon-bourg-resultats-1715.htm',year=2014)	
	S15_ = map(lambda x: x.total_seconds()/60,Tb15['Scratch'].dropna())
	S14_ = map(lambda x: x.total_seconds()/60,Tb14['Scratch'].dropna())
	
	S15 = S15_[N1:N2]
	S14 = S14_[N1:N2]

	(mu14, sigma14) = norm.fit(S14)
	(mu15, sigma15) = norm.fit(S15)

	N_BINS = 50
	fig = plt.figure()
	ax = fig.add_subplot(1, 1, 1)
	n, bins, patches = ax.hist(S14, N_BINS,normed=1, facecolor='red', alpha=0.5,label=r'$\mathrm{2014:}\ \mu=%.3f,\ \sigma=%.3f$' %(mu14, sigma14))
	y = mlab.normpdf( bins, mu14, sigma14)
	l = ax.plot(bins, y, 'r-', linewidth=4)
	n, bins, patches = ax.hist(S15, N_BINS, normed=1, facecolor='green', alpha=0.5,label=r'$\mathrm{ 2015:}\ \mu=%.3f,\ \sigma=%.3f$' %(mu15, sigma15))
	y = mlab.normpdf( bins, mu15, sigma15)
	l = ax.plot(bins, y, 'g-', linewidth=4)

	fig.tight_layout()
	ax.set_xlabel('Scratch Time (minutes)')
	ax.set_ylabel('Number of athletes per scratch time (normalized)')
	ax.legend(loc='best', fancybox=True, framealpha=0.5)
	ax.set_title(r'$\mathrm{Athletes\ from\ rank\ } %d \mathrm{\ to\ } %d$' %(N1, N2))

	plt.show()
	
    def extract_coarse_coding_features_absolute(self, phone_duration):
        dur = int(phone_duration)

        cc_feat_matrix = numpy.zeros((dur, 3))

        npoints1 = (dur*2)*10+1
        npoints2 = (dur-1)*10+1
        npoints3 = (2*dur-1)*10+1

        x1 = numpy.linspace(-dur, dur, npoints1)
        x2 = numpy.linspace(1, dur, npoints2)
        x3 = numpy.linspace(1, 2*dur-1, npoints3)

        mu1 = 0
        mu2 = (1+dur)/2
        mu3 = dur
        variance = 1
        sigma = variance*((dur/10)+2)
        sigma1 = sigma
        sigma2 = sigma-1
        sigma3 = sigma

        y1 = mlab.normpdf(x1, mu1, sigma1)
        y2 = mlab.normpdf(x2, mu2, sigma2)
        y3 = mlab.normpdf(x3, mu3, sigma3)

        for i in range(dur):
            cc_feat_matrix[i,0] = y1[(dur+1+i)*10]
            cc_feat_matrix[i,1] = y2[i*10]
            cc_feat_matrix[i,2] = y3[i*10]

        for i in range(3):
            cc_feat_matrix[:,i] = cc_feat_matrix[:,i]/max(cc_feat_matrix[:,i])

        return cc_feat_matrix
Exemple #5
0
def naive_bayes(w1train,w2train,test):
	# prior
	n = w1train.shape[0]+w2train.shape[0]
	w_1 = w1train.shape[0] / float(n)
	w_2 = w2train.shape[0] / float(n)
	print 'prior w1:', w_1
	print 'prior w2:', w_2
	# likelihood
	mu_1, s_1 = gauss_mle_1d(w1train)
	mu_2, s_2 = gauss_mle_1d(w2train)
	post_1 = gaussian(test,mu_1,s_1)
	post_2 = gaussian(test,mu_2,s_2)
	print 'p(w1|x)=',post_1
	print 'p(w2|x)=',post_2
	p_1 = post_1*w_1
	p_2 = post_2*w_2
	print 'class 1',p_1
	print 'class 2',p_2
	print 'bla_1', p_1 / (p_1+p_2)
	print 'bla_2', p_2 / (p_1+p_2)
	x1 = np.linspace(-3,8,100)
	plt.title('Epic Info')
	plt.ylabel('Y axis')
	plt.xlabel('X axis')
	plt.plot(x1,mlab.normpdf(x1,mu_1,s_1),label='estimate class1')
	plt.plot(x1,mlab.normpdf(x1,mu_2,s_2),label='estimate class2')
	plt.legend()
	plt.text(-2,0.7,'class 1:%s\nclass2: %s'%(p_1,p_2))
	plt.plot(test,0,'o',label='test point')
	plt.show()
def get_prob_for_distributions(p):
    """
    Based on the integral of the three normal distributions,
    the likelihood from which of the three distributions a distance is to be drawn 
    is calculated here.
    Returns the three probabilities for the three distributions.
    """
    w1 = p[0]
    mu1 = p[1]
    sigma1 = p[2]
    w2 = p[3]
    mu2 = p[4]
    sigma2 = p[5]
    w3 = p[6]
    mu3 = p[7]
    sigma3 = p[8]
    dist_range = (0, 4.330310991999920844e+01)
    x = np.linspace(dist_range[0], dist_range[1], 1000)
    A1 = np.array(w1 * mlab.normpdf(x, mu1, sigma1)).sum()
    A2 = np.array(w2 * mlab.normpdf(x, mu2, sigma2)).sum()
    A3 = np.array(w3 * mlab.normpdf(x, mu3, sigma3)).sum()
    p1 = A1 / (A1 + A2 + A3)
    p2 = A2 / (A1 + A2 + A3)
    p3 = A3 / (A1 + A2 + A3)
    return p1, p2, p3
def classify_2d(data_a, data_b, x):
    x1 = x[0]
    x2 = x[1]

    probability_a = data_a.shape[1] / (data_a.shape[1] + data_b.shape[1])
    probability_b = data_b.shape[1] / (data_a.shape[1] + data_b.shape[1])

    mean_x1_a = np.mean(data_a[0,:])
    mean_x2_a = np.mean(data_a[1,:])

    mean_x1_b = np.mean(data_b[0,:])
    mean_x2_b = np.mean(data_b[1,:])

    variance_x1_a = np.var(data_a[0,:])
    variance_x2_a = np.var(data_a[1,:])

    variance_x1_b = np.var(data_b[0,:])
    variance_x2_b = np.var(data_b[1,:])

    pd_x1_given_a = mlab.normpdf(x1, mean_x1_a, variance_x1_a)
    pd_x2_given_a = mlab.normpdf(x2, mean_x2_a, variance_x2_a)
    pd_x1_given_b = mlab.normpdf(x1, mean_x1_b, variance_x1_b)
    pd_x2_given_b = mlab.normpdf(x2, mean_x2_b, variance_x2_b)

    posterior_numerator_a = probability_a * pd_x1_given_a * pd_x2_given_a
    posterior_numerator_b = probability_b * pd_x1_given_b * pd_x2_given_b

    posterior_numerators = { 'A': posterior_numerator_a, 'B': posterior_numerator_b }

    return max(posterior_numerators.iterkeys(), key=(lambda k: posterior_numerators[k]))
def compute_costed_threshold(weight, thresholds, meanS1, sdS1, meanS2, sdS2):
    """Compute the costed threshold of two spike responses to
two independent stimuli.

    Args:
        weight: costed threshold multiplier
        thresholds: values to test for suitability
        meanS1: mean of firing rate distribution triggered by stimulus 1
        sdS1: standard deviation of firing rate distribution triggered by stimulus 1
        meanS2: mean of firing rate distribution triggered by stimulus 2
        sdS1: standard deviation of firing rate distribution triggered by stimulus 2

    Returns:
        neuronal firing tate as which to set optimum costed threshold"""

    opt_thresh = 0.0

    for threshold in thresholds:
        Ps1 = mlab.normpdf(threshold, meanS1, sdS1)
        Ps2 = mlab.normpdf(threshold, meanS2, sdS2)

        ratio_raw = Ps2 / Ps1  # make likelihood twice for Ps2
        ratio = round(ratio_raw, 1)

        print "Ps1 = %s, Ps2 = %s. Threshold = %s. Ratio raw = %s Weight = %s\n" % (Ps1, Ps2, threshold, ratio_raw, ratio)
        if ratio == weight:
            opt_thresh = threshold

    return opt_thresh
Exemple #9
0
    def test_kernel_smoothing(self):
        # Qualitatively view kernel smoothed noisy Gaussian landscape

        # Make Normal distribution, and secondary smaller normal.
        realNorm = np.array([mlab.normpdf(i,40,10) for i in range(100)])
        realNorm = realNorm / np.max(realNorm)
        gaussBlip = np.array([mlab.normpdf(i,80,3) for i in range(100)])
        gaussBlip = gaussBlip / np.max(gaussBlip)
        signal = realNorm + (gaussBlip * 0.5)

        # Add noise.
        noise = np.random.random(100)        
        signal = (signal * noise) + (0.2 * noise)
        signal = signal / np.sum(signal)
        signal = np.concatenate((signal, signal))
        
        # Apply kernel smoothing.
        smoothGauss = analysis.force._kernel_smoothing(signal, 0.25)
        smootherGauss = analysis.force._kernel_smoothing(signal, 0.75)
        smoothestGauss = analysis.force._kernel_smoothing(signal, 0.98)

        # Compare kernel smoothed plot to noisy data plot.
        plot.plot(signal, 'k')
        plot.hold(True)
        plot.plot(smoothGauss, 'm--')
        plot.plot(smootherGauss, 'c--')
        plot.plot(smoothestGauss, 'r--')
        plot.hold(False)
        plot.show()
Exemple #10
0
Fichier : gmm.py Projet : cqian/GMM
def gaussian_1d(data, Pi, means, sds, N, K):
	x = np.linspace(min(data), max(data), N);
	gmm = 0*mlab.normpdf(x,0,1);
	for i in range(len(Pi)):
		gmm += Pi[i]*mlab.normpdf(x,means[i],sds[i]);
	
	plt.plot(x, gmm);
Exemple #11
0
def grafix1(VP,VPp,m,x,y,c):
    error = []
    for i in range(len(VP)):
        error.append(abs(VP[i]-VPp[i]))
    bins_s=60
    bins_vp = np.linspace(min(VP), max(VP), bins_s)
    scatterP= m+' \n $r=$'+str(round(np.corrcoef(VP,VPp)[0,1],2))
    label_hist_pl = '\n '+x+'\n $\overline{e} =$'+str(round(np.mean(error))) \
    +'\n $\sigma_e =$'+str(round(np.std(error)))
    #--------------------------------------------------------------------------------------------------#
    X_VP  = np.linspace(min(VP), max(VP),bins_s)
    dx_VP = np.histogram(VP ,bins=bins_vp)[1][1] - np.histogram(VP ,bins=bins_vp)[1][0]
    Y_VP  = mlab.normpdf(np.linspace(min(VP),max(VP),bins_s),np.mean(VP),np.sqrt(np.var(VP)))*len(VP)*dx_VP
    #-----------------------------------------------------------------------------------------------------#
    X_VPp  = np.linspace(min(VPp), max(VPp),bins_s)
    dx_VPp = np.histogram(VPp ,bins=bins_vp)[1][1] - np.histogram(VPp ,bins=bins_vp)[1][0]
    Y_VPp  = mlab.normpdf(np.linspace(min(VPp),max(VPp),bins_s),np.mean(VPp),np.sqrt(np.var(VPp)))*len(VPp)*dx_VPp
    #-----------------------------------------------------------------------------------------------------#
    fig = plt.figure(figsize= (12,12))

    ax1 = plt.subplot(222)
    ax1.hist(VP,bins_vp,histtype='bar',stacked=True,color='k',alpha=0.5,label='Valores $VP$')
    ax1.plot(X_VP,Y_VP,linewidth=2,color='k')
    ax1.hist(VPp , bins_vp, histtype='bar', stacked=True, color=c, alpha=0.3,label=label_hist_pl)
    ax1.plot(X_VPp,Y_VPp,linewidth = 2, color=c)
    plt.xlabel('Velocidades $(m / s)$');plt.ylabel('Distribuição');plt.grid();plt.xlim(xmax=max(VP),xmin=min(VP));
    plt.ylim(ymax=180,ymin=0);legend = ax1.legend(loc=1, shadow=True)

    ax2=plt.subplot(221);ax2.plot(VP,VP,'+k');ax2.plot(VP,VPp,'+'+c,label=scatterP);legend=ax2.legend(loc=4)
    plt.xlim(xmax=max(VP),xmin=min(VP));plt.ylim(ymax=max(VP),ymin=min(VP));
    plt.xlabel('Velocidade Original $VP$ em $m/s$')
    plt.ylabel('Velocidade Estimada '+x+' em $m/s$');plt.grid()

    plt.show()
Exemple #12
0
def plotting(ls1, ls2, head):
    # ls1 normal, ls2 satire
    mu1 = np.mean(ls1)
    mu2 = np.mean(ls2)
    sigma1 = np.std(ls1) # standard deviation of distribution
    sigma2 = np.std(ls2)
    x = ls1
    y = ls2
    plt.figure(1)
    num_bins = 100
    # the histogram of the data
    n1, bins1, patches1 = plt.hist(x, num_bins, normed=1, facecolor='green', alpha=0.5, label='normal')
    plt.legend(loc=2)
    # add a 'best fit' line
    x1 = mlab.normpdf(bins1, mu1, sigma1)

    n2, bins2, patches2 = plt.hist(y, num_bins, normed=1, facecolor='red', alpha=0.5, label = 'satire')
    plt.legend(loc=2)
    # add a 'best fit' line
    y2 = mlab.normpdf(bins2, mu2, sigma2)

    plt.plot(bins1, x1, 'b--')
    plt.plot(bins2, y2, 'b--')
    plt.xlabel('Variance')
    plt.ylabel('Density')
    plt.title('Distribution of docs')
    

    # Tweak spacing to prevent clipping of ylabel
    plt.subplots_adjust(left=0.15)
    if head == True: filename = 'dist_head.png'
    elif head == False: filename = 'dist.png'
    plt.savefig(filename, format='png')
def kldistancecluster(planets):
    nlist = nall(knownplanets, 'earth')[1]
    ntrue = nall(knownplanets, 'earth')[0]
    difference = variance(ntrue,nlist)
    uniformdist = np.asarray(np.random.uniform(0.0,0.5,len(difference)))
    difference = np.asarray(difference)

    plt.hist(nlist, bins = 25, color = 'blue', alpha = 0.7, normed = True)
    plt.hist(ntrue, bins = 25, color = 'green', alpha = 0.5, normed = True)
    
    # Find best fit
    x = np.linspace(0.0, 8, 25)
    best_fit_uniform = mlab.normpdf(x, np.mean(nlist), np.std(nlist))
    best_fit_dif = mlab.normpdf(x, np.mean(ntrue), np.std(ntrue))
    plt.plot(x, best_fit_uniform, label = 'unif')
    plt.plot(x, best_fit_dif, label = 'dif')
    plt.xlabel('Distribution Value')
    plt.ylabel('Frequency')
    
    blue = mpatches.Patch(color='blue', label = 'Normed PDF for Integer Distribution')
    green = mpatches.Patch(color = 'green', label = 'Normed PDF for Calculate Rank')
    plt.legend(handles = [blue, green])
   
    plt.text(3.4, 1.0, 'KL Divergence: \n  ( rank, integer distribution) = 0.0112', style='italic', bbox={'facecolor':'red', 'alpha':0.5, 'pad':10})
    plt.show()
    #kldiv = stats.entropy(difference, qk=uniformdist, base=None)
    kldiv = stats.entropy(nlist, qk=ntrue, base=None)
    return(kldiv)
Exemple #14
0
def avg_score_distribution(n, m, data_points=10e4, bins=100, visualize=False):
    '''
    Returns estimated mean and standard deviation of score distribution
    for randomized amino acid recognition result
     
    n := sum of all fragment lengths
    m := length of sequence
    '''
    
    assert n <= m
    
    scores = []
    for i in range(int(data_points)):
    
        p = random(n)
        avg = 1 - ((m - n + p.sum()) / m)
        scores.append(avg)
        
    data = array(scores)
    mu = mean(data) ## mean value
    sigma = std(data) ## standard deviation
    
    if visualize:
        n, bins, patches = plt.hist(data, bins, normed=1, alpha=.3)
        y = mlab.normpdf(bins, mu, sigma)
        plt.plot(bins, y, 'r-', linewidth=1)
        plt.vlines(mu, 0, mlab.normpdf([mu], mu, sigma), colors='r')
        plt.show()
    
    return mu, sigma
def peval_binormal(x, p):
    # p[0] = w1
    # p[1] = mu1
    # p[2] = sigma1
    # p[3] = w2
    # p[4] = mu2
    # p[5] = sigma2
    return (p[0] * mlab.normpdf(x, p[1], p[2]) + p[3] * mlab.normpdf(x, p[4], p[5]))
def kalman_plot(prediction, measurement, correction):
    """Helper to draw all curves in each filter step."""
    plot([normpdf(x, prediction.mu, sqrt(prediction.sigma2))
          for x in range(*arena)], color = 'b', linewidth=2)
    plot([normpdf(x, measurement.mu, sqrt(measurement.sigma2))
          for x in range(*arena)], color = 'g', linewidth=2)
    plot([normpdf(x, correction.mu, sqrt(correction.sigma2))
          for x in range(*arena)], color = 'r', linewidth=2)
Exemple #17
0
 def max_score_func(m):
     mu = 40
     sigma = 8
     if m <= mu:
         return 1.
     else:
         top_val = normpdf(mu, mu, sigma)
         return normpdf(m, mu, sigma) / top_val
def plot_density(mean, class_prior):
    x = np.linspace(-5, 7, 100)
    pyplot.xticks([1*k for k in range(-5, 8)])
    pyplot.plot(x, (mlab.normpdf(x, mean[0], 1)*class_prior[0] + mlab.normpdf(x, mean[1], 1)*class_prior[1]))
    pyplot.title("Density plot for the data")
    pyplot.xlabel("Data")
    pyplot.ylabel("Density")
    pyplot.savefig('./density_plot.png')
    pyplot.close()
Exemple #19
0
  def plot_PDFs(self):
    """
    Plot probability density functions
    """
    import matplotlib.mlab as mlab
    NB_class = len(self.opdict['types'])
    feat_1 = self.x.columns[0]
    feat_2 = self.x.columns[1]

    binwidth = .05
    lim_sup_1 = (int(np.max(self.x[feat_1])/binwidth)+2)*binwidth
    lim_inf_1 = (int(np.min(self.x[feat_1])/binwidth)-2)*binwidth
    bins_1 = np.arange(lim_inf_1, lim_sup_1 + binwidth, binwidth)
    lim_sup_2 = (int(np.max(self.x[feat_2])/binwidth)+2)*binwidth
    lim_inf_2 = (int(np.min(self.x[feat_2])/binwidth)-2)*binwidth
    bins_2 = np.arange(lim_inf_2, lim_sup_2 + binwidth, binwidth)

    x_hist, y_hist = [],[]
    g_x, g_y = {}, {}
    for i in range(NB_class):
      index = self.y[self.y.NumType.values==i].index
      x1 = self.x.reindex(columns=[feat_1],index=index).values
      x2 = self.x.reindex(columns=[feat_2],index=index).values
      g_x[i] = mlab.normpdf(bins_1, np.mean(x1), np.std(x1))
      g_y[i] = mlab.normpdf(bins_2, np.mean(x2), np.std(x2))
      x_hist.append(x1)
      y_hist.append(x2)

    if NB_class > 2:
      colors_g = ('y','orange','r')
      colors_h = ('k','gray','w')
    elif NB_class == 2:
      colors_g = ('y','r')
      colors_h = ('k','w')

    fig = plt.figure()
    fig.set_facecolor('white')
    plt.hist(x_hist,bins=bins_1,color=colors_h,normed=1,histtype='stepfilled',alpha=.5)
    for key in sorted(g_x):
      plt.plot(bins_1,g_x[key],color=colors_g[key],lw=2.,label='Class %s'%self.opdict['types'][key])
    plt.xlabel(feat_1)
    plt.legend(loc=2)
    plt.savefig('%s/histo_%s.png'%(self.opdict['fig_path'],feat_1))

    fig = plt.figure()
    fig.set_facecolor('white')
    plt.hist(y_hist,bins=bins_2,color=colors_h,normed=1,histtype='stepfilled',alpha=.5)
    for key in sorted(g_y):
      plt.plot(bins_2,g_y[key],color=colors_g[key],lw=2.,label='Class %s'%self.opdict['types'][key])
    plt.plot([.5,.5],[0,2],'g--',lw=2.)
    plt.figtext(.52,.7,'?',color='g',size=20)
    plt.xlabel(feat_2)
    plt.legend(loc=2)
    plt.savefig('%s/histo_%s.png'%(self.opdict['fig_path'],feat_2))
    plt.show()
def peval_trimodal_gauss(x, p):
    w1 = p[0]
    mu1 = p[1]
    sigma1 = p[2]
    w2 = p[3]
    mu2 = p[4]
    sigma2 = p[5]
    w3 = p[6]
    mu3 = p[7]
    sigma3 = p[8]
    return w1 * mlab.normpdf(x, mu1, sigma1) + w2 * mlab.normpdf(x, mu2, sigma2) + w3 * mlab.normpdf(x, mu3, sigma3)
Exemple #21
0
def plot(muVar, sigmaVar, muSDTrue, sigmaSDTrue):
    xMin = min(muVar,muSDTrue)-2*max(sigmaVar,np.sqrt(varSDTrue))
    xMax = max(muVar,muSDTrue)+2*max(sigmaVar,np.sqrt(varSDTrue))
    x = np.linspace(xMin,xMax, 1000)
    #plt.title('m= %f, sigma_e=%f, weight=2, no bias' % (m, sigma_e)) 
    plt.plot(x,mlab.normpdf(x,muVar,sigmaVar),label='variational mu=%f, sd=%f' % (muVar, sigmaVar),color='blue')
    plt.plot(x,mlab.normpdf(x,muSDTrue,sigmaSDTrue), ls='--',color='red')
    #, label='true mu=%f, sd=%f' % (muSDTrue, sigmaSDTrue))
    #plt.legend(('variational mu=%f, sd=%f' % (muVar, sigmaVar), 'true posterior mu=%f, sd=%f' % (muSDTrue, sigmaSDTrue)))
    #plt.legend()
    plt.show()
Exemple #22
0
    def draw_gaussian(self, ax, mu, sigma, label=None):
        # fig = plt.figure()
        # fig.suptitle(title)
        # ax = fig.add_subplot(111)

        sigma = math.sqrt(sigma)
        x = np.linspace(mu-3, mu+3, 100)

        if label is None:
            return ax.plot(x, mlab.normpdf(x, mu, sigma))[0]
        else:
            return ax.plot(x, mlab.normpdf(x, mu, sigma), c=self.label_to_color(label), label=label)[0]
Exemple #23
0
def ema_bgd(data):
    '''
    Calculate parameters based on Expectation–Maximization Algorithm; EMA
    '''
    pi = 0.5 # 負担率
    ms = [random.choice(data), random.choice(data)] # randomでスタートを決める
    vs = [np.var(data), np.var(data)] # dataの分散
    T = 50  #反復回数, iteration number
    ls = []  #対数尤度関数の計算結果を保存しておく

    #結果のプロット
    fig = plt.figure()
    ax1 = fig.add_subplot(211)
    ax2 = fig.add_subplot(212)

    # 描画方法
    ax1.set_xlim(min(data), max(data))
    ax1.set_xlabel("x")
    ax1.set_ylabel("Probability")
    ax2.set_xlabel("step")
    ax2.set_ylabel("log_likelihood")
    ax2.set_ylim(-500,0)
    ax2.set_xlim(0, T)

    for i in range(T):

        '''EM Algorithm'''
        burden_rates = e_step(xs=data, ms=ms, vs=vs, pi=pi) # Eステップ
        ms, vs, pi = m_step(xs=data, burden_rates=burden_rates) # Mステップ
        ls.append(calc_log_likelihood(data, ms, vs, pi)) # 対数尤度を更新する
        print ls[i]
        # 描画
        xs = np.linspace(min(data), max(data), 200)
        norm1 = mlab.normpdf(xs, ms[0], math.sqrt(vs[0]))
        norm2 = mlab.normpdf(xs, ms[1], math.sqrt(vs[1]))
        ax1.hist(data, 20, normed=1, color='dodgerblue')
        ax1.plot(xs, (1 - pi) * norm1, color="orange", lw=3)
        ax1.plot(xs, pi * norm2, color="orange", lw=3)
        # ax1.plot(xs, (1 - p) * norm1 + p * norm2, color="red", lw=3)

        ax2.plot(np.arange(len(ls)), ls, color='dodgerblue')        

        if i==T-1: # 収束条件
            print i
            ax1.plot(xs, (1 - pi) * norm1 + pi * norm2, color="red", lw=3)
            [ax1.lines.pop(0) for l in range(2)] # remove line 
            print '...Converge in the {}th...'.format(i+1)
            plt.pause(-1)
            break

        # plt.pause(0.1)
        [ax1.lines.pop(0) for i in range(2)] # remove line 
def get_intersection(G1, G2, ws, tol=0.01):
    #sort so G1.mu < G2.mu
    #ui < uj
    oGs = [G1, G2] 
    ows = ws
    Gs, ws = [], []
    args = np.argsort([G1[0],G2[0]])
    
    for i in args:
        Gs.append(oGs[i])
        ws.append(ows[i])
    ui, vi = Gs[0]
    uj, vj = Gs[1]
    si, sj = np.sqrt(vi), np.sqrt(vj)
    al, be = ws
    print ui, si, uj, sj
    
    if si == sj:
        x=(ui+uj)/2.0
    else:
        sq2pi = np.power(2*np.pi,0.5)
        c = (2*si*si*sj*sj) * ( np.log( al/(si*sq2pi) ) - np.log( be/(sj*sq2pi) ) )
        c = c  + (si*si*uj*uj)-(sj*sj*ui*ui)
        b = -((2*uj*si*si)-(2*ui*sj*sj))
        a = (si*si)-(sj*sj)
        
        q=(b**2 - 4*a*c)
        if q<0: 
            x=None
        else:
            x1 = (-b + np.sqrt(q)) / (2*a)
            x2 = (-b - np.sqrt(q)) / (2*a)
            
            x=x1
            if (x1 < ui and x1 < uj) or (x1 > ui and x1 > uj):
                x=x2
    
    if x==None:
        return None, None, None, None

    y = al*eval_G(G1, x) 

    mn = ui - 5*si
    mx = uj + 5*sj
    xis = np.arange(x,mx, tol)
    xjs = np.arange(mn,x, tol)

    i_integral = np.sum(mlab.normpdf(xis, ui, si)*al)*tol
    j_integral = np.sum(mlab.normpdf(xjs, uj, sj)*be)*tol
    overlap = i_integral+j_integral

    return x, y, overlap/al, overlap/be
Exemple #25
0
    def mixtureFunction(x, *p):
        """
        Mixture function to model four gaussian (nucleosomal) and one exponential (nucleosome-free) distributions.
        """
        m1, s1, w1, m2, s2, w2, m3, s3, w3, m4, s4, w4, q, r = p
        nfr = expo(x, 2.9e-02, 2.8e-02)
        nfr[:smallestInsert] = 0

        return (mlab.normpdf(x, m1, s1) * w1 +
                mlab.normpdf(x, m2, s2) * w2 +
                mlab.normpdf(x, m3, s3) * w3 +
                mlab.normpdf(x, m4, s4) * w4 +
                nfr)
def gaussian_dist(mean, variance):
	"""Gaussian Distribution"""
	sigma = np.sqrt(variance)
	x = np.linspace(-3-len(mean),3+len(mean),100)
	for i in range(len(sigma)):
		plt.plot(x,mlab.normpdf(x, mean[0], sigma[i]), linewidth=4, label='$\mu=0, \sigma^2 =%0.1f$' %variance[i])
	for j in range(1,len(mean)):
		plt.plot(x,mlab.normpdf(x, mean[j], sigma[1]), linewidth=4, label='$\mu=%d, \sigma^2=%0.1f$' %(mean[j], variance[1]))
	plt.title('Gaussian Distribution', fontsize=26)
	plt.xlabel('X', fontsize=22)
	plt.ylabel('Probability', fontsize=22)
	plt.legend()
	plt.show()
Exemple #27
0
 def logEvidenceWeight(self, theta, t):
     """ Evaluate the probability of sampling z from a gaussian centered
         at theta.
     """
     (x,y) = theta
     (zx, zy) = self.zs[t]
     px = mlab.normpdf(zx, x, self.options.sigma)
     py = mlab.normpdf(zy, y, self.options.sigma)
     if px == 0 or py == 0:
         return True, 0
     else:
         ret = math.log(px) + math.log(py)
         return False, ret
Exemple #28
0
 def logEvidenceWeight(self, theta, t):
     """ The log evidence weight is a product of Gaussians as in the
         GaussianNoise Example.
     """
     (x,y) = theta
     (zx, zy) = self.zs[t]
     px = mlab.normpdf(zx, x, self.options.sigma)
     py = mlab.normpdf(zy, y, self.options.sigma)
     if px == 0 or py == 0:
         return True, 0
     else:
         ret = math.log(px) + math.log(py)
         return False, ret
Exemple #29
0
def overlapping_percentage(data1, data2):
	if (data1[1] == 0 or data2[1] == 0):
		return 0

	#plot_curves(data1, data2)
	bounds1 = get_bounds(*data1)
	bounds2 = get_bounds(*data2)

	#globalbounds
	gb = get_integration_bounds(bounds1, bounds2)
	result = quad(lambda x: min(normpdf(x, *data1), normpdf(x, *data2)), *gb)[0]

	#no need to calculate percentage, as the area of a normal distribution is always 1
	return result
Exemple #30
0
def make_histogram(data, options, output_stub, numbins=10, norm=False):

    print "norm", norm

    try:
        if options.logarithm:
            data = np.log(data)
    except AttributeError:
        pass

    fig = plt.figure()          # noqa
    if np.iscomplexobj(data):
        realplot = plt.subplot(211)
        realplot.set_title("real")
        realdata = np.real(data)
        _, bins, _ = realplot.hist(realdata, numbins, normed=norm)
        if norm:
            bincenters = 0.5*(bins[1:]+bins[:-1])
            y = mlab.normpdf(bincenters, np.mean(realdata), np.std(realdata))
            realplot.plot(bincenters, y, 'r--', linewidth=1)

        imagplot = plt.subplot(212)
        imagplot.set_title("imag")
        imagdata = np.imag(data)
        _, bins, _ = imagplot.hist(np.imag(data), numbins, facecolor="green", normed=norm)
        if norm:
            bincenters = 0.5*(bins[1:]+bins[:-1])
            y = mlab.normpdf(bincenters, np.mean(imagdata), np.std(imagdata))
            imagplot.plot(bincenters, y, 'r--', linewidth=1)
    else:
        _, bins, _ = plt.hist(data, numbins, normed=norm)
        if norm:
            bincenters = 0.5*(bins[1:]+bins[:-1])
            y = mlab.normpdf(bincenters, np.mean(data), np.std(data))
            plt.plot(bincenters, y, 'r--', linewidth=1)
            plt.yticks([])

    # if options.title:
    #     plt.suptitle(options.title)
    try:
        if options.zero:
            plt.xlim(min(0,min(data)[0]), plt.xlim()[1])
    except AttributeError:
        pass

    if(output_stub):
        logging.info("Saving plot to {}".format(output_stub+".png"))
        plt.savefig(output_stub+".png",dpi=200)
    else:
        plt.show()
Exemple #31
0
ax[0].yaxis.set_ticks(np.arange(0, 0.6, 0.2))
ax[1].set_title("Empirical Distribution", fontsize=12)
ax[1].set_ylim(0, 1.5)
ax[1].yaxis.set_ticks(np.arange(0, 2, 1))
ax[2].set_title("Kernel Functions", fontsize=12)
ax[2].set_ylim(0, .65)
ax[2].yaxis.set_ticks(np.arange(0, .64, .3))
ax[3].set_title("Parzen Density Estimate", fontsize=12)
ax[3].set_ylim(0, 0.35)
ax[3].yaxis.set_ticks(np.arange(0, 0.6, 0.2))
for i in range(4):
    ax[i].set_xlim(0, 14)

pts = 100
x = np.linspace(0, 14, pts)
pdf = p_0 * mlab.normpdf(x, mu_0, sig_0) + p_1 * mlab.normpdf(x, mu_1, sig_1)
ax[0].plot(x, pdf)
cdf = np.cumsum(pdf) / sum(pdf)

emperical = np.interp(np.random.rand(pts, 1), cdf, x)
emperical = np.sort(emperical, axis=None)

ax[1].stem(emperical, np.ones(pts), 'b', markerfmt=' ')
ax[1].set_xlim(0, 14)

kernel = np.empty([emperical.shape[0], pts])
for i in range(emperical.shape[0]):
    kernel[i] = mlab.normpdf(x, emperical[i], lam)
    ax[2].plot(x, kernel[i], color='c')

parzen = np.empty(100)
Exemple #32
0
import numpy as np
import matplotlib.mlab as mlab
import matplotlib.pyplot as my_plt
#mean value
mean = 100
#standard deviation value
sd = 15
x = mean + sd * np.random.randn(10000)
num_bins = 20
# Histogram
n, bins, patches = my_plt.hist(x, num_bins, normed=1, facecolor='green', alpha=0.5)
 # add a 'best fit' line
y = mlab.normpdf(bins, mean, sd)
my_plt.plot(bins, y, 'r--')
my_plt.xlabel('Intelligent persons in a Organization')
my_plt.ylabel('Probability')
my_plt.title('Histogram')
 # Adjusting the spacing
my_plt.subplots_adjust(left=0.15)
my_plt.show()
# Plot the Distributions in this range:
x = np.linspace(-100,100,1000)

# <headingcell level=2>

# In the beginning

# <codecell>

mean0 = 0.0   # e.g. meters or miles
var0  = 20.0

# <codecell>

plt.figure(figsize=(fw,5))
plt.plot(x,mlab.normpdf(x, mean0, var0), label='Normal Distribution')
plt.ylim(0, 0.1);
plt.legend(loc='best');
plt.xlabel('Position');

# <markdowncell>

# You are at position `0` and you are pretty unsure (flat normal distribution)

# <headingcell level=2>

# Now we have something, which estimates the moved distance

# <codecell>

meanMove = 25.0  # e.g. meters, calculated from velocity*dt or step counter or wheel encoder ...
Exemple #34
0
#     i = int(row[2]) - 1
#     y[i] = y[i] + 1
# print('(1-10分)评分分布:',y)

data = []
# reader = csv.reader(open("../../data/image.csv"))
reader = csv.reader(open("../../data/score_decimal.csv"))
for row1 in reader:
    data.append(float(row1[2]))

data1 = np.array(data)
print('平均分', data1.mean())
# plt.hist(data,bins=30)
mu = data1.mean()
sigma = data1.std()
n, bins, patches = plt.hist(data,
                            30,
                            density=1,
                            facecolor='green',
                            edgecolor='black',
                            alpha=0.5)
#直方图函数,x为x轴的值,density=1表示为概率密度,即和为一,绿色方块,色深参数0.5.返回n个概率,直方块左边线的x值,及各个方块对象
y = mlab.normpdf(bins, mu, sigma)  #画一条逼近的曲线
plt.plot(bins, y, 'r--')
# plt.bar(range(0,10), y)
plt.xlabel('score')
plt.ylabel('probability')
plt.title(r'Histogram of Grade')  #中文标题 u'xxx'
plt.show()

# np.save('../../data/statistics.npy', y)
Exemple #35
0
    def plot_PDFs(self):
        """
    Plot probability density functions
    """
        import matplotlib.mlab as mlab
        NB_class = len(self.opdict['types'])
        feat_1 = self.x.columns[0]
        feat_2 = self.x.columns[1]

        binwidth = .05
        lim_sup_1 = (int(np.max(self.x[feat_1]) / binwidth) + 2) * binwidth
        lim_inf_1 = (int(np.min(self.x[feat_1]) / binwidth) - 2) * binwidth
        bins_1 = np.arange(lim_inf_1, lim_sup_1 + binwidth, binwidth)
        lim_sup_2 = (int(np.max(self.x[feat_2]) / binwidth) + 2) * binwidth
        lim_inf_2 = (int(np.min(self.x[feat_2]) / binwidth) - 2) * binwidth
        bins_2 = np.arange(lim_inf_2, lim_sup_2 + binwidth, binwidth)

        x_hist, y_hist = [], []
        g_x, g_y = {}, {}
        for i in range(NB_class):
            index = self.y[self.y.NumType.values == i].index
            x1 = self.x.reindex(columns=[feat_1], index=index).values
            x2 = self.x.reindex(columns=[feat_2], index=index).values
            g_x[i] = mlab.normpdf(bins_1, np.mean(x1), np.std(x1))
            g_y[i] = mlab.normpdf(bins_2, np.mean(x2), np.std(x2))
            x_hist.append(x1)
            y_hist.append(x2)

        if NB_class > 2:
            colors_g = ('y', 'orange', 'r')
            colors_h = ('k', 'gray', 'w')
        elif NB_class == 2:
            colors_g = ('y', 'r')
            colors_h = ('k', 'w')

        fig = plt.figure()
        fig.set_facecolor('white')
        plt.hist(x_hist,
                 bins=bins_1,
                 color=colors_h,
                 normed=1,
                 histtype='stepfilled',
                 alpha=.5)
        for key in sorted(g_x):
            plt.plot(bins_1,
                     g_x[key],
                     color=colors_g[key],
                     lw=2.,
                     label='Class %s' % self.opdict['types'][key])
        plt.xlabel(feat_1)
        plt.legend(loc=2)
        plt.savefig('%s/histo_%s.png' % (self.opdict['fig_path'], feat_1))

        fig = plt.figure()
        fig.set_facecolor('white')
        plt.hist(y_hist,
                 bins=bins_2,
                 color=colors_h,
                 normed=1,
                 histtype='stepfilled',
                 alpha=.5)
        for key in sorted(g_y):
            plt.plot(bins_2,
                     g_y[key],
                     color=colors_g[key],
                     lw=2.,
                     label='Class %s' % self.opdict['types'][key])
        plt.plot([.5, .5], [0, 2], 'g--', lw=2.)
        plt.figtext(.52, .7, '?', color='g', size=20)
        plt.xlabel(feat_2)
        plt.legend(loc=2)
        plt.savefig('%s/histo_%s.png' % (self.opdict['fig_path'], feat_2))
        plt.show()
Exemple #36
0
 ax.hist(same, bins=bi, normed=True, alpha=0.8, label="Same person")
 ax.hist(diff, bins=bi, normed=True, alpha=0.8, label="Different person")
 ax.set_ylabel('Probability density')
 ax.set_xlabel('Similarity(The higher the more similar)')
 ax.set_title('Histogram of Similarity')
 plt.legend()
 plt.show()
 fig = plt.figure()
 ax = fig.add_subplot(111)
 ax.hist(same, bins=bi, normed=True, alpha=0.8, label="Same person")
 ax.hist(diff, bins=bi, normed=True, alpha=0.8, label="Different person")
 ax.set_ylabel('Probability density')
 ax.set_xlabel('Similarity(The higher the more similar)')
 ax.set_title('Histogram of Similarity')
 x1 = np.linspace(min(diff), max(diff), 1000)
 normal = mlab.normpdf(x1, np.mean(diff), np.std(diff))
 line1, = plt.plot(x1, normal, 'r-', linewidth=2)
 kde = mlab.GaussianKDE(diff)
 x2 = np.linspace(min(diff), max(diff), 1000)
 line2, = plt.plot(x2, kde(x2), 'g-', linewidth=2)
 plt.legend(
     [line1, line2],
     ['normal', 'gussiankde'],
 )
 x3 = np.linspace(min(same), max(same), 1000)
 normal = mlab.normpdf(x3, np.mean(same), np.std(same))
 line3, = plt.plot(x3, normal, 'r-', linewidth=2)
 kde = mlab.GaussianKDE(same)
 x4 = np.linspace(min(same), max(same), 1000)
 line4, = plt.plot(x4, kde(x4), 'g-', linewidth=2)
 plt.legend([line1, line2], ['normal', 'gussiankde'], loc="best")
Exemple #37
0
import numpy as np
from scipy.stats import norm
import matplotlib.mlab as mlab
import matplotlib.pyplot as plt
Data = np.loadtxt("data.dat")
_mu = []
_max = []
_N = Data[:, 0]
for dat in Data:
    (mu, sigma) = norm.fit(dat[1:])
    n, bins, patches = plt.hist(dat[1:],
                                60,
                                normed=1,
                                facecolor='green',
                                alpha=0.75)
    y = mlab.normpdf(bins, mu, sigma)
    l = plt.plot(bins, y, 'r--', linewidth=2)
    _mu = np.append(_mu, mu)
    _max = np.append(_max, max(dat[1:]))
    plt.xlabel('Smarts')
    plt.ylabel('Probability')
    plt.title(r'$\mathrm{Histogram\ of\ IQ:}\ \mu=%.3f,\ \sigma=%.3f$' %
              (mu, sigma))
    plt.grid(True)
    plt.show()
print(_N)
print(_mu)
print(_max)
np.savetxt("statistics.dat", np.c_[_N, _mu, _max])
plt.plot(_N, _mu, '.')
p = np.polyfit(_N, _mu, 2, rcond=None, full=False)
Exemple #38
0
    print("Minimum is " + fmt % mn)
    print("Maximum is " + fmt % mx)
    print("Mean is " + fmt % mu)
    print("Standard Deviation is " + fmt % sd)
    print(("Sigma %d boundaries are " + fmt + " and " + fmt) %
          (options.sigma, sigmin, sigmax))
    n, bins, patches = plt.hist(x,
                                options.nbins,
                                normed=True,
                                facecolor='green',
                                alpha=0.75,
                                range=(axisXmin, axisXmax))
    axisYmax = n.max() * 1.1

    # add a 'best fit' line
    y = mlab.normpdf(bins, mu, sd)
    l = plt.plot(bins, y, 'r--', linewidth=1)
    plt.axvspan(mu - options.sigma * sd,
                mu + options.sigma * sd,
                alpha=0.2,
                color="cyan")
    plt.xlabel(TRACE)
    plt.ylabel('Distribution [Normalised]')

    if options.title is None:
        title = (r'$\mathrm{Histogram\ of\ %s:}\ \mu=' + fmt + r',\ stdev=' +
                 fmt + r',\ \sigma=%d$') % (TRACE, mu, sd, options.sigma)
    else:
        title = options.title
    plt.title(title)
Exemple #39
0
                        array('d'),
                        array('d'),
                        array('d')
                    ]
                    sample = array('d')
#				quit(-1)

if plot == 3:
    for i in [2]:  #range(0-16)
        #	for i in range(0-16)
        print "\nplotting histagram of data for %d events from ch%d" % (
            num_events, i + 1)
        plt.hist(ch[i], normed=True, facecolor='blue', align='left')
        mean, std = norm.fit(ch[i])
        Xgau = np.arange(min(ch[i]), max(ch[i]), 0.1)
        Ygau = mlab.normpdf(Xgau, mean, std)
        plt.plot(Xgau,
                 Ygau,
                 'r--',
                 linewidth=2,
                 label='$\mu$:%.3f\n$\sigma$:%.3f\nEntries:%d' %
                 (mean, std, len(sample) / 1))
        plt.title('DC%d CH%d win:%d-%d' %
                  (DCNum, i + 1, WinStart, WinStart + 3))
        plt.legend(loc='best', frameon=False, prop={'size': 15})
        plt.xlabel("ADC counts")
        plt.ylabel("Probability")
        plt.grid(True)
        plotname = 'outdir/plots/DC%d_ch%d_hist_%d_events.png' % (DCNum, i + 1,
                                                                  num_events)
        plt.savefig(plotname)
Exemple #40
0
def main():
    # Check args
    if len(sys.argv) > 1:
        print(sys.argv[1])
        pos1 = sys.argv[1].find('-h')
        if (pos1 >= 0):
            printOutHelp()
            sys.exit()
        pos1 = sys.argv[1].find('-t')
        if (pos1 >= 0) and len(sys.argv) > 2:
            title = sys.argv[2]
            if len(sys.argv) > 3:
                fname = sys.argv[3]
                dataFile = open(fname, 'r')
            else:
                fname = ""
                dataFile = sys.stdin
        else:
            title = ""
            fname = sys.argv[2]
            dataFile = open(fname, 'r')
    else:
        title = ""
        fname = ""
        dataFile = sys.stdin

    parser = caMonitorArrayParser()
    pvs = []

    for line in dataFile:
        if not parser.lineValid(line):
            continue

        pvName, timeVal, data = parser.getValues(line)
        newPv = True
        pvToAddDataTo = caPVArray(pvName)
        # See if old or new pv
        for pv in pvs:
            if pv.getName() == pvName:
                pvToAddDataTo = pv
                newPv = False
                break

        pvToAddDataTo.setValues(timeVal, data)
        if newPv:
            pvs.append(pvToAddDataTo)
            print("Added PV: " + pvName)

    print("Statistics: ")
    legend = []
    count = 0
    for pv in pvs:

        count += 1
        timeSet, dataSet = pv.getData()
        #for d in dataSet:
        #  print(d)
        pvLength = pv.getLength()
        pvMax = np.max(dataSet)
        pvMin = np.min(dataSet)
        pvAvg = np.mean(dataSet)
        pvStd = np.std(dataSet)
        legStr = pv.getName() + "[" + str(pvLength) + "] " + str(
            pvMin) + ".." + str(pvMax) + ", mean: " + str(
                pvAvg) + ", std: " + str(pvStd) + ", range: " + str(pvMax -
                                                                    pvMin)
        #infoStr = "[" + str(pvLength) + "] " + str(pvMin) + ".." + str(pvMax) + ", mean: " + str(pvAvg) + ", std: " + str(pvStd) + ", range: " +str(pvMax-pvMin)
        infoStr = pv.getName(
        ) + "[{0}]:\n  range: {1:.7f}.. {2:.7f} ({3:.7f}), \n  mean: {4:.7f},\n  std: {5:.7f}".format(
            pvLength, pvMin, pvMax, pvMax - pvMin, pvAvg, pvStd)
        legend.append(pv.getName())

        x = timeSet

        print(legStr)
        plt.figure(figsize=(8, 8))
        n, bins, patches = plt.hist(dataSet, int(pvLength / 5), density=1)
        y = mlab.normpdf(bins, pvAvg, pvStd)
        l = plt.plot(bins, y, linewidth=1)

    if count == 1:
        plt.gcf().text(0.01, 0.91, infoStr)

    plt.legend(legend)
    plt.grid()
    plt.title(title)
    plt.show()
Exemple #41
0
#creating array of data
arr = np.array(list1, dtype=float)

#plt.hist(arr,7)
#plt.show

#plt.figure(1)a

arr.min()

arr.max()

plt.hist(arr, normed=True, color='black', bins=7)
plt.xlim((min(arr), max(arr)))

x = np.linspace(min(arr), max(arr), 100)
plt.plot(x, mlab.normpdf(x, np.mean(arr), np.sqrt(np.var(arr))), color="red")
plt.show()

# Normal distribution Test

print("mean is:", np.mean(arr))
print("median is", np.median(arr))
print("mode is", stats.mode(arr))

stats, pvalue = stats.normaltest(list1)
if pvalue > 0.05:
    print("Data is normally distributed")
else:
    print("Data is not normally distributed")
j=0
v=0


for i in range(0,totalframe):
  n=l[i].split()
  k=[float(x) for x in n]
  a=(k[0]-k[2])*(k[0]-k[2])
  b=(k[1]-k[3])*(k[1]-k[3])
  s=math.sqrt(a+b)
  j=j+s

mean=j/totalframe
for i in range(0,totalframe):
  n=l[i].split()
  k=[float(x) for x in n]
  a=(k[0]-k[2])*(k[0]-k[2])
  b=(k[1]-k[3])*(k[1]-k[3])
  s=math.sqrt(a+b)
  v=v+(s-mean)*(s-mean)

variance=v/(totalframe-1)#Get the variance 
sigma=math.sqrt(variance)

x = np.linspace(-100,300,400) #Can change if needed to show accuracy
plt.plot(x,mlab.normpdf(x,mean,sigma))#Plot the graph
pylab.axvline(x=100)
plt.grid()

plt.show()  #Show the graph
Exemple #43
0
                                normed=1,
                                facecolor='purple',
                                alpha=0.50,
                                label='Observed')
    handler.insert_legend(ax3)
    # ------------------------------

    # ------------------------------
    ax3 = plt.subplot(3, 3, 3)
    ax3.set_ylabel('Probability Density Function (%)', {'fontsize': 14})
    ax3.set_yticks([0.000, 0.0005, 0.001, 0.0015, 0.002])
    plt.yticks([0.000, 0.00075, 0.0015, 0.00225, 0.003],
               ['0.00%', '0.75%', '1.50%', '2.25%', '3.0%'])
    plt.axis([-1500, 1500, 0, 10])
    ax3.set_ylim(ymax=0.003, ymin=0)
    y = mlab.normpdf(bins, mu, sigma)
    synt_data = calibrate_arma(data=err_data)
    plt.plot(bins,
             y,
             linestyle='--',
             color='purple',
             linewidth=1.5,
             label='Observed')
    handler.insert_legend(ax3)
    # ------------------------------

    # ------------------------------
    ax3 = plt.subplot(3, 3, 4)
    ax3.set_xlim(xmax=725, xmin=0)
    ax3.set_ylim(ymax=2500, ymin=-2500)
    plt.axhline(0, color='gray', linestyle='--')
Exemple #44
0
    def plotFifoData(self, outList):
        import matplotlib.pyplot as plt
        import numpy as np
        import matplotlib.mlab as mlab

        coarseColumn= [row[1] for row in outList]
        fineColumn= [row[2] for row in outList]
        timeStamp= [sum(x) for x in zip(coarseColumn, fineColumn)]
        correctTs= [-1]*len(coarseColumn)
        coarseVal= 0.000000025 #coarse time value (40 Mhz, 25 ns)
        fineVal=   0.00000000078125 #fine time value (1280 MHz, 0.78125 ns)
        for iTs in range(0, len(coarseColumn)):
            correctTs[iTs]= coarseColumn[iTs]*coarseVal + fineColumn[iTs]*fineVal
            #if iTs:
                #print correctTs[iTs]-correctTs[iTs-1], "\t ", correctTs[iTs], "\t", coarseColumn[iTs], "\t", fineColumn[iTs]

        xdiff = np.diff(correctTs)
        np.all(xdiff[0] == xdiff)
        P= 1000000000 #display in ns
        nsDeltas = [x * P for x in xdiff]
        #centerRange= np.mean(nsDeltas)
        centerRange= 476
        windowsns= 30
        minRange= centerRange-windowsns
        maxRange= centerRange+windowsns

        #Divide figure in two axes
        plt.subplot(311)

        #Create first histogram
        plt.hist(nsDeltas, 60, range=[minRange, maxRange], facecolor='blue', align='mid', alpha= 0.75)
        #plt.hist(nsDeltas, 100, normed=True, facecolor='blue', align='mid', alpha=0.75)
        #plt.xlim((min(nsDeltas), max(nsDeltas)))
        plt.xlabel('Time (ns)')
        plt.ylabel('Entries')
        plt.title('Histogram DeltaTime')
        plt.grid(True)

        #Superimpose Gauss to first plot
        mean = np.mean(nsDeltas)
        variance = np.var(nsDeltas)
        sigma = np.sqrt(variance)
        x = np.linspace(min(nsDeltas), max(nsDeltas), 100)
        plt.plot(x, mlab.normpdf(x, mean, sigma))

        MSBTs= [-1]*len(fineColumn)
        LSBTs= [-1]*len(fineColumn)
        for iTs in range(0, len(fineColumn)):
            MSBTs[iTs]= fineColumn[iTs] & 0b11000
            LSBTs[iTs]= fineColumn[iTs] & 0b00111
            #if iTs:
                #print correctTs[iTs]-correctTs[iTs-1], "\t ", correctTs[iTs], "\t", coarseColumn[iTs], "\t", fineColumn[iTs]

        #Second plot
        plt.subplot(312)
        plt.xlabel('Clock sample')
        plt.ylabel('Entries')
        plt.title('Histogram Fine Time Stamp (2 MSB)')
        plt.grid(True)
        plt.hist(MSBTs, 100, normed=False, facecolor='blue', align='mid', alpha=0.75)

        #Third plot
        plt.subplot(313)
        plt.xlabel('Clock sample')
        plt.ylabel('Entries')
        plt.title('Histogram Fine Time Stamp (3 LSB)')
        plt.grid(True)
        plt.hist(LSBTs, 100, normed=False, facecolor='blue', align='mid', alpha=0.75)

        #Display plot
        plt.show()
Exemple #45
0
def plot_single_rho(args,work):
    # Plot rho stats for one ccd at at time

    if args.file != '':
        print 'Read file ',args.file
        with open(args.file) as fin:
            data = [ line.split() for line in fin ]
        runs, exps = zip(*data)
    else:
        runs = args.runs
        exps = args.exps

    nexp = len(exps)
    cat_dir = os.path.join(work,'psf_cats')

    if True:
        ccd_meanlogr = numpy.empty( (nexp*62,37) )
        ccd_rho1p = numpy.empty( (nexp*62,37) )
        ccd_rho1m = numpy.empty( (nexp*62,37) )
        ccd_rho2p = numpy.empty( (nexp*62,37) )
        ccd_rho2m = numpy.empty( (nexp*62,37) )
        ccd_rho3 = numpy.empty( (nexp*62,37) )
        exp_meanlogr = numpy.empty( (nexp,53) )
        exp_rho1p = numpy.empty( (nexp,53) )
        exp_rho1m = numpy.empty( (nexp,53) )
        exp_rho2p = numpy.empty( (nexp,53) )
        exp_rho2m = numpy.empty( (nexp,53) )
        exp_rho3 = numpy.empty( (nexp,53) )
        exp_var1 = numpy.empty( (nexp,53) )
        exp_var2 = numpy.empty( (nexp,53) )
        exp_var3 = numpy.empty( (nexp,53) )
        exp_var4 = numpy.empty( (nexp,53) )
        desdm_meanlogr = numpy.empty( (nexp,53) )
        desdm_rho1p = numpy.empty( (nexp,53) )
        desdm_rho1m = numpy.empty( (nexp,53) )
        desdm_rho2p = numpy.empty( (nexp,53) )
        desdm_rho2m = numpy.empty( (nexp,53) )
        desdm_rho3 = numpy.empty( (nexp,53) )
        desdm_var1 = numpy.empty( (nexp,53) )
        desdm_var2 = numpy.empty( (nexp,53) )
        desdm_var3 = numpy.empty( (nexp,53) )
        desdm_var4 = numpy.empty( (nexp,53) )

        meande1 = 0
        meande2 = 0
        varde1 = 0
        varde2 = 0
        nde = 0
        histde1 = numpy.zeros(200)  # bin size = 1.e-3
        histde2 = numpy.zeros(200)  # bin size = 1.e-3
        histnstars = numpy.zeros(200)  # bin size = 10
        listnstars = []
        meannstars = 0
        ngoodccd = 0

        iexp = 0
        iccd = 0
        for run,exp in zip(runs,exps):

            print 'Start work on run, exp = ',run,exp
            expnum = int(exp[6:])
            print 'expnum = ',expnum,'  ',iexp,'/',nexp,'  ',iccd

            exp_dir = os.path.join(work,exp)

            cat_file = os.path.join(cat_dir, exp + "_psf.fits")
            try:
                with pyfits.open(cat_file) as pyf:
                    data = pyf[1].data
            except IOError as e:
                print 'Caught exception: ',e
                print 'skipping this exposure'
                continue
            ccdnums = numpy.unique(data['ccdnum'])
            for ccdnum in ccdnums:
                nstars = ((data['ccdnum'] == ccdnum) & (data['flag'] == 0)).sum()
                if nstars > 0 and nstars < 2000:
                    histnstars[ int(numpy.floor(nstars/10)) ] += 1
                if nstars > 0:
                    meannstars += nstars
                    ngoodccd += 1
                    listnstars.append(nstars)
            mask = data['flag'] == 0
            de1 = data['obs_e1'][mask] - data['piff_e1'][mask]
            de2 = data['obs_e2'][mask] - data['piff_e2'][mask]
            meande1 += numpy.sum(de1)
            meande2 += numpy.sum(de2)
            varde1 += numpy.sum(de1*de1)
            varde2 += numpy.sum(de2*de2)
            nde += len(de1)
            histde1 += numpy.histogram(de1, bins=200, range=(-1.e-1,1.e-1))[0]
            histde2 += numpy.histogram(de2, bins=200, range=(-1.e-1,1.e-1))[0]

            stat_file = os.path.join(exp_dir, exp + ".json")

            # Read the json file 
            if not os.path.exists(stat_file):
                print stat_file,' not found'
                print 'No JSON file for this exposure.  Skipping.'
                continue
            with open(stat_file,'r') as f:
                stats = json.load(f)

            print "len stats = ",len(stats)
            ( expnum, 
              rho1_meanlogr,
              rho1_xip,
              rho1_xip_im,
              rho1_xim,
              rho1_xim_im,
              rho1_varxi,
              rho2_xip,
              rho2_xip_im,
              rho2_xim,
              rho2_xim_im,
              rho2_varxi,
              rho3_xi,
              rho3_varxi,
              drho1_meanlogr,
              drho1_xip,
              drho1_xip_im,
              drho1_xim,
              drho1_xim_im,
              drho1_varxi,
              drho2_xip,
              drho2_xip_im,
              drho2_xim,
              drho2_xim_im,
              drho2_varxi,
              drho3_xi,
              drho3_varxi,
            ) = stats[-1]
            exp_meanlogr[iexp,:] = rho1_meanlogr
            exp_rho1p[iexp,:] = rho1_xip
            exp_rho1m[iexp,:] = rho1_xim
            exp_rho2p[iexp,:] = rho2_xip
            exp_rho2m[iexp,:] = rho2_xim
            exp_rho3[iexp,:] = rho3_xi
            exp_var1[iexp,:] = rho1_varxi
            exp_var2[iexp,:] = rho2_varxi
            exp_var3[iexp,:] = rho3_varxi
            desdm_meanlogr[iexp,:] = drho1_meanlogr
            desdm_rho1p[iexp,:] = drho1_xip
            desdm_rho1m[iexp,:] = drho1_xim
            desdm_rho2p[iexp,:] = drho2_xip
            desdm_rho2m[iexp,:] = drho2_xim
            desdm_rho3[iexp,:] = drho3_xi
            desdm_var1[iexp,:] = drho1_varxi
            desdm_var2[iexp,:] = drho2_varxi
            desdm_var3[iexp,:] = drho3_varxi
            iexp += 1
 
            for s in stats[:-1]:

                ( ccdnum, 
                  rho1_meanlogr,
                  rho1_xip,
                  rho1_xim,
                  rho2_xip,
                  rho2_xim,
                  rho3_xi,
                ) = s

                ccd_meanlogr[iccd,:] = rho1_meanlogr
                ccd_rho1p[iccd,:] = rho1_xip
                ccd_rho1m[iccd,:] = rho1_xim
                ccd_rho2p[iccd,:] = rho2_xip
                ccd_rho2m[iccd,:] = rho2_xim
                ccd_rho3[iccd,:] = rho3_xi
                iccd += 1

        print '\nFinished processing all exposures'
        nexp = iexp

        nccd = iccd
        # Compute some stats and plot histograms
        meande1 /= nde
        meande2 /= nde
        varde1 -= nde * meande1**2
        varde2 -= nde * meande2**2
        varde1 /= nde
        varde2 /= nde
        print 'nde = ',nde
        print 'mean de1 = ',meande1
        print 'sigma = ',numpy.sqrt(varde1)
        print 'mean de2 = ',meande2
        print 'sigma = ',numpy.sqrt(varde2)

        plt.clf()
        left = [ (i-100) * 1.e-3 for i in range(200) ]
        plt.bar( left, histde1, width=1.e-3 )
        plt.xlim( [-1.e-1,1.e-1] )
        plt.xlabel(r'$e1_{psf} - e1_{model}$')
        plt.ylabel(r'$N_{stars}$')
        plt.title('Distribution of PSF e1 residuals')
        import matplotlib.mlab as mlab
        plt.plot(left,numpy.sum(histde1)*1.e-3*mlab.normpdf(left,meande1,numpy.sqrt(varde1)),
                 color='red')
        #plt.savefig('de1hist.png')
        plt.savefig('de1hist.pdf')

        plt.clf()
        plt.bar( left, histde2, width=1.e-3 )
        plt.xlim( [-1.e-1,1.e-1] )
        plt.xlabel(r'$e2_{psf} - e2_{model}$')
        plt.ylabel(r'$N_{stars}$')
        plt.title('Distribution of PSF e2 residuals')
        plt.plot(left,numpy.sum(histde2)*1.e-3*mlab.normpdf(left,meande2,numpy.sqrt(varde2)),
                 color='red')
        #plt.savefig('de2hist.png')
        plt.savefig('de2hist.pdf')

        plt.clf()
        left = [ 10*i for i in range(200) ]
        plt.bar( left, histnstars, width=10 )
        plt.xlim( [0,700] )
        plt.xlabel(r'stars per CCD')
        plt.ylabel(r'$N_\mathrm{CCDs}$')
        #plt.title('Distribution of number of stars per chip')
        #plt.savefig('nstarshist.png')
        plt.tight_layout()
        plt.savefig('nstarshist.pdf')

        imax = numpy.argmax(histnstars)
        meannstars /= ngoodccd
        print 'mode nstars = ',(imax+0.5)*10.
        print 'mean nstars = ',meannstars
        listnstars.sort()
        print 'mean nstars = ',sum(listnstars)/len(listnstars)
        print 'median nstars = ',listnstars[len(listnstars)//2]


    if False:
        # Plots for CCDs
        print 'nccd = ',nccd
        sqrtn = numpy.sqrt(nccd)
        meanr = numpy.exp(numpy.mean(ccd_meanlogr[:nccd,:], axis=0))
        rho1p = numpy.mean(ccd_rho1p[:nccd,:], axis=0)
        rho1m = numpy.mean(ccd_rho1m[:nccd,:], axis=0)
        rho2p = numpy.mean(ccd_rho2p[:nccd,:], axis=0)
        rho2m = numpy.mean(ccd_rho2m[:nccd,:], axis=0)
        sig_rho1p = numpy.std(ccd_rho1p[:nccd,:], axis=0)
        sig_rho1m = numpy.std(ccd_rho1m[:nccd,:], axis=0)
        sig_rho2p = numpy.std(ccd_rho2p[:nccd,:], axis=0)
        sig_rho2m = numpy.std(ccd_rho2m[:nccd,:], axis=0)
        print 'meanr = ',meanr
        print 'rho1p = ',rho1p
        print 'sig_rho1p = ',sig_rho1p
        plt.rc('font', family='serif')

        plt.clf()
        plt.title(r'SPTE $\rho_1$ (i.e. $\langle de de \rangle$) for individual CCDs')
        lines = plot_rho(meanr, rho1p, sig_rho1p, sqrtn, rho1m, sig_rho1m)
        plt.legend(lines, [r'$\rho_1(\theta)+$', r'$\rho_1(\theta)-$'] )
        plt.xlim( [0.5,20] )
        plt.ylabel(r'$\rho_1$')
        #plt.savefig('ccd_rho1.png')
        plt.savefig('ccd_rho1.pdf')

        plt.clf()
        plt.title(r'SPTE $\rho_2$ (i.e. $\langle e de \rangle$) for individual CCDs')
        lines = plot_rho(meanr, rho2p, sig_rho2p, sqrtn, rho2m, sig_rho2m)
        plt.legend(lines, [r'$\rho_2(\theta)+$', r'$\rho_2(\theta)-$'] )
        plt.xlim( [0.5,20] )
        plt.ylabel(r'$\rho_2$')
        #plt.savefig('ccd_rho2.png')
        plt.savefig('ccd_rho2.pdf')

    if True:
        # Plots for exposures:
        print 'nexp = ',nexp
        sqrtn = numpy.sqrt(nexp)
        meanr = numpy.exp(numpy.mean(exp_meanlogr[:nexp,:], axis=0))
        rho1p = numpy.mean(exp_rho1p[:nexp,:], axis=0)
        rho1m = numpy.mean(exp_rho1m[:nexp,:], axis=0)
        rho2p = numpy.mean(exp_rho2p[:nexp,:], axis=0)
        rho2m = numpy.mean(exp_rho2m[:nexp,:], axis=0)
        sig_rho1p = numpy.std(exp_rho1p[:nexp,:], axis=0)
        sig_rho1m = numpy.std(exp_rho1m[:nexp,:], axis=0)
        sig_rho2p = numpy.std(exp_rho2p[:nexp,:], axis=0)
        sig_rho2m = numpy.std(exp_rho2m[:nexp,:], axis=0)
        print 'meanr = ',meanr
        print 'rho1p = ',rho1p
        print 'sig_rho1p = ',sig_rho1p
        plt.rc('font', family='serif')

        plt.clf()
        plt.title(r'SPTE $\rho_1$ (i.e. $\langle de de \rangle$) for full exposures')
        lines = plot_rho(meanr, rho1p, sig_rho1p, sqrtn, rho1m, sig_rho1m)
        plt.legend(lines, [r'$\rho_1(\theta)+$', r'$\rho_1(\theta)-$'] )
        plt.xlim( [0.5,100] )
        plt.ylabel(r'$\rho_1$')
        #plt.savefig('exp_rho1.png')
        plt.savefig('exp_rho1.pdf')

        plt.clf()
        plt.title(r'SPTE $\rho_2$ (i.e. $\langle e de \rangle$) for full exposures')
        lines = plot_rho(meanr, rho2p, sig_rho2p, sqrtn, rho2m, sig_rho2m)
        plt.legend(lines, [r'$\rho_2(\theta)+$', r'$\rho_2(\theta)-$'] )
        plt.xlim( [0.5,100] )
        plt.ylabel(r'$\rho_2$')
        #plt.savefig('exp_rho2.png')
        plt.savefig('exp_rho2.pdf')

        # Prettier plots for Erin's talk
        plt.clf()
        pretty_rho1(meanr, rho1p, sig_rho1p, sqrtn)
        plt.savefig('rho1.pdf')
        #plt.savefig('rho1.png')

        plt.clf()
        pretty_rho2(meanr, rho2p, sig_rho2p, sqrtn)
        plt.savefig('rho2.pdf')
        #plt.savefig('rho2.png')

    k10arcmin = int(round(numpy.log(10 / 0.5)/0.1))
    if False:
        # Plots for worst rho1 exposure:
        # Find worst exposure based on rho2 at theta = 10 arcmin
        i = numpy.argmax(numpy.abs(exp_rho1p[:nexp,k10arcmin]), axis=0)
        print 'k10arcmin = ',k10arcmin
        print 'rho1[k] = ',exp_rho1p[:nexp,k10arcmin]
        print 'rho2[k] = ',exp_rho2p[:nexp,k10arcmin]
        print 'i = ',i
        print 'rho1[i] = ',exp_rho1p[i,:]
        meanr = numpy.exp(exp_meanlogr[i,:])
        rho1p = exp_rho1p[i,:]
        rho1m = exp_rho1m[i,:]
        rho2p = exp_rho2p[i,:]
        print 'rho2p = ',rho2p
        rho2m = exp_rho2m[i,:]
        sig_rho1p = numpy.sqrt(exp_var1[i,:])
        sig_rho1m = numpy.sqrt(exp_var1[i,:])
        sig_rho2p = numpy.sqrt(exp_var2[i,:])
        sig_rho2m = numpy.sqrt(exp_var2[i,:])

        plt.clf()
        plt.title(r'$\rho_1$ for exposure with worst $\rho_1$ at 10 arcmin')
        lines = plot_rho(meanr, rho1p, sig_rho1p, 1, rho1m, sig_rho1m)
        plt.legend(lines, [r'$\rho_1(\theta)+$', r'$\rho_1(\theta)-$'] )
        plt.xlim( [0.5,100] )
        plt.ylabel(r'$\rho_1$')
        #plt.savefig('w1_rho1.png')
        plt.savefig('w1_rho1.pdf')

        plt.clf()
        plt.title(r'$\rho_2$ for exposure with worst $\rho_1$ at 10 arcmin')
        lines = plot_rho(meanr, rho2p, sig_rho2p, 1, rho2m, sig_rho2m)
        plt.legend(lines, [r'$\rho_2(\theta)+$', r'$\rho_2(\theta)-$'] )
        plt.xlim( [0.5,100] )
        plt.ylabel(r'$\rho_2$')
        #plt.savefig('w1_rho2.png')
        plt.savefig('w1_rho2.pdf')

        # Plots for worst rho2 exposure:
        # Find worst exposure based on rho2 at theta = 10 arcmin
        i = numpy.argmax(numpy.abs(exp_rho2p[:nexp,k10arcmin]), axis=0)
        print 'k10arcmin = ',k10arcmin
        print 'rho1[k] = ',exp_rho1p[:nexp,k10arcmin]
        print 'rho2[k] = ',exp_rho2p[:nexp,k10arcmin]
        print 'i = ',i
        print 'rho2[i] = ',exp_rho2p[i,:]
        meanr = numpy.exp(exp_meanlogr[i,:])
        rho1p = exp_rho1p[i,:]
        rho1m = exp_rho1m[i,:]
        rho2p = exp_rho2p[i,:]
        print 'rho2p = ',rho2p
        rho2m = exp_rho2m[i,:]
        sig_rho1p = numpy.sqrt(exp_var1[i,:])
        sig_rho1m = numpy.sqrt(exp_var1[i,:])
        sig_rho2p = numpy.sqrt(exp_var2[i,:])
        sig_rho2m = numpy.sqrt(exp_var2[i,:])

        plt.clf()
        plt.title(r'$\rho_1$ for exposure with worst $\rho_2$ at 10 arcmin')
        lines = plot_rho(meanr, rho1p, sig_rho1p, 1, rho1m, sig_rho1m)
        plt.legend(lines, [r'$\rho_1(\theta)+$', r'$\rho_1(\theta)-$'] )
        plt.xlim( [0.5,100] )
        plt.ylabel(r'$\rho_1$')
        #plt.savefig('w2_rho1.png')
        plt.savefig('w2_rho1.pdf')

        plt.clf()
        plt.title(r'$\rho_2$ for exposure with worst $\rho_2$ at 10 arcmin')
        lines = plot_rho(meanr, rho2p, sig_rho2p, 1, rho2m, sig_rho2m)
        plt.legend(lines, [r'$\rho_2(\theta)+$', r'$\rho_2(\theta)-$'] )
        plt.xlim( [0.5,100] )
        plt.ylabel(r'$\rho_2$')
        #plt.savefig('w2_rho2.png')
        plt.savefig('w2_rho2.pdf')

    if False:
        # Plots for desdm:
        print 'nexp = ',nexp
        sqrtn = numpy.sqrt(nexp)
        meanr = numpy.exp(numpy.mean(desdm_meanlogr[:nexp,:], axis=0))
        rho1p = numpy.mean(desdm_rho1p[:nexp,:], axis=0)
        rho1m = numpy.mean(desdm_rho1m[:nexp,:], axis=0)
        rho2p = numpy.mean(desdm_rho2p[:nexp,:], axis=0)
        rho2m = numpy.mean(desdm_rho2m[:nexp,:], axis=0)
        sig_rho1p = numpy.std(desdm_rho1p[:nexp,:], axis=0)
        sig_rho1m = numpy.std(desdm_rho1m[:nexp,:], axis=0)
        sig_rho2p = numpy.std(desdm_rho2p[:nexp,:], axis=0)
        sig_rho2m = numpy.std(desdm_rho2m[:nexp,:], axis=0)
        print 'meanr = ',meanr
        print 'rho1p = ',rho1p
        print 'sig_rho1p = ',sig_rho1p
        plt.rc('font', family='serif')

        plt.clf()
        plt.title(r'SPTE $\rho_1$ (i.e. $\langle de de \rangle$) for DESDM PSFEx solution')
        lines = plot_rho(meanr, rho1p, sig_rho1p, sqrtn, rho1m, sig_rho1m)
        plt.legend(lines, [r'$\rho_1(\theta)+$', r'$\rho_1(\theta)-$'] )
        plt.xlim( [0.5,100] )
        plt.ylabel(r'$\rho_1$')
        #plt.savefig('desdm_rho1.png')
        plt.savefig('desdm_rho1.pdf')

        plt.clf()
        plt.title(r'SPTE $\rho_2$ (i.e. $\langle e de \rangle$) for DESDM PSFEx solution')
        lines = plot_rho(meanr, rho2p, sig_rho2p, sqrtn, rho2m, sig_rho2m)
        plt.legend(lines, [r'$\rho_2(\theta)+$', r'$\rho_2(\theta)-$'] )
        plt.xlim( [0.5,100] )
        plt.ylabel(r'$\rho_2$')
        #plt.savefig('desdm_rho2.png')
        plt.savefig('desdm_rho2.pdf')

        plt.clf()
        pretty_rho1(meanr, rho1p, sig_rho1p, sqrtn)
        #plt.savefig('desdm_pretty_rho1.png')
        plt.savefig('desdm_pretty_rho1.pdf')

        plt.clf()
        pretty_rho2(meanr, rho2p, sig_rho2p, sqrtn)
        #plt.savefig('desdm_pretty_rho2.png')
        plt.savefig('desdm_pretty_rho2.pdf')

    if False:
        # Do some counts of how many exposures have high rho2
        count5 = (numpy.abs(exp_rho2p[:nexp,k10arcmin]) > 5.e-4).sum()
        count4 = (numpy.abs(exp_rho2p[:nexp,k10arcmin]) > 4.e-4).sum()
        count3 = (numpy.abs(exp_rho2p[:nexp,k10arcmin]) > 3.e-4).sum()
        count2 = (numpy.abs(exp_rho2p[:nexp,k10arcmin]) > 2.e-4).sum()
        count1 = (numpy.abs(exp_rho2p[:nexp,k10arcmin]) > 1.e-4).sum()
        count05 = (numpy.abs(exp_rho2p[:nexp,k10arcmin]) > 5.e-5).sum()
        count03 = (numpy.abs(exp_rho2p[:nexp,k10arcmin]) > 3.e-5).sum()
        count02 = (numpy.abs(exp_rho2p[:nexp,k10arcmin]) > 2.e-5).sum()

        print 'Exposure outliers:'
        print 'N with |rho2| > 5e-4 = ',count5
        print 'N with |rho2| > 4e-4 = ',count4
        print 'N with |rho2| > 3e-4 = ',count3
        print 'N with |rho2| > 2e-4 = ',count2
        print 'N with |rho2| > 1e-4 = ',count1
        print 'N with |rho2| > 5e-5 = ',count05
        print 'N with |rho2| > 3e-5 = ',count03
        print 'N with |rho2| > 2e-5 = ',count02

        count100 = (numpy.abs(ccd_rho2p[:nexp,k10arcmin]) > 1.e-2).sum()
        count50 = (numpy.abs(ccd_rho2p[:nexp,k10arcmin]) > 5.e-3).sum()
        count30 = (numpy.abs(ccd_rho2p[:nexp,k10arcmin]) > 3.e-3).sum()
        count20 = (numpy.abs(ccd_rho2p[:nexp,k10arcmin]) > 2.e-3).sum()
        count10 = (numpy.abs(ccd_rho2p[:nexp,k10arcmin]) > 1.e-3).sum()
        count5 = (numpy.abs(ccd_rho2p[:nexp,k10arcmin]) > 5.e-4).sum()
        count4 = (numpy.abs(ccd_rho2p[:nexp,k10arcmin]) > 4.e-4).sum()
        count3 = (numpy.abs(ccd_rho2p[:nexp,k10arcmin]) > 3.e-4).sum()
        count2 = (numpy.abs(ccd_rho2p[:nexp,k10arcmin]) > 2.e-4).sum()
        count1 = (numpy.abs(ccd_rho2p[:nexp,k10arcmin]) > 1.e-4).sum()
        count05 = (numpy.abs(ccd_rho2p[:nexp,k10arcmin]) > 5.e-5).sum()
        count03 = (numpy.abs(ccd_rho2p[:nexp,k10arcmin]) > 3.e-5).sum()
        count02 = (numpy.abs(ccd_rho2p[:nexp,k10arcmin]) > 2.e-5).sum()

        print 'CCD outliers:'
        print 'N with |rho2| > 1e-2 = ',count100
        print 'N with |rho2| > 5e-3 = ',count50
        print 'N with |rho2| > 3e-3 = ',count30
        print 'N with |rho2| > 2e-3 = ',count20
        print 'N with |rho2| > 1e-3 = ',count10
        print 'N with |rho2| > 5e-4 = ',count5
        print 'N with |rho2| > 4e-4 = ',count4
        print 'N with |rho2| > 3e-4 = ',count3
        print 'N with |rho2| > 2e-4 = ',count2
        print 'N with |rho2| > 1e-4 = ',count1
        print 'N with |rho2| > 5e-5 = ',count05
        print 'N with |rho2| > 3e-5 = ',count03
        print 'N with |rho2| > 2e-5 = ',count02
Exemple #46
0
def genBell(mu=0, sigma=1, a=-3.5, b=5, n=1000):
    x = np.linspace(a, b, n)
    return x, mlab.normpdf(x, mu, sigma)
Exemple #47
0
    i = i + 1
BsigmaOLS
#draw the five baysian
import matplotlib.mlab as mlab
import math
sigma = np.sqrt(varianceOLS)
#i = 0
#while i < len(bi):
# draw a (betabar, si)

i = 0
while i < len(bi):
    x = np.linspace(np.asscalar(bi[i] - 3 * sigma[0, i]),
                    np.asscalar(bi[i] + 3 * sigma[0, i]), 100)

    plt.plot(x, mlab.normpdf(x, np.asscalar(bi[i]), np.asscalar(sigma[0, i])))
    i = i + 1
plt.show()

# use LAD
i = 0
ssquareLAD = np.zeros([1, 5])
Ebeta = np.zeros([1, 5])
varianceLAD = np.zeros([1, 5])
while i < len(bi):
    ssquareLAD[0, i] = (1 / (n - 2)) * (
        np.square(excess_return[i, :] - beta[0, i] -
                  np.multiply(beta[1, i], market_excess_return[:, 1])).sum())
    ssquareLAD[0, i] = np.divide(ssquareLAD[0, i], v[0, i])
    ssquareLADsigma = np.sqrt(ssquareLAD)
    # draw of original LAD
Exemple #48
0
# Plot of synthetic weights vs calibration
plt.scatter(W, sol)
plt.plot([0, .2], [0, .2], color='red', linestyle='solid')
plt.xlabel('Calibration weight')
plt.ylabel('Synthetic weight')
plt.title(r'$\mathrm{Calibration\ v\ Synthetic\ weights}$')
plt.show()

# Get corresponding beta for synthetic control
ly = np.log(sol * sum(d))
beta_SC = inv(X[d == 0].T.dot(X[d == 0])).dot(X[d == 0].T.dot(ly))

### Distrbution of SC estimate
n, bins, patches = plt.hist(val_s, 60, normed=1, facecolor='red', alpha=0.5)
y = mlab.normpdf(bins, 0, np.std(val_s))
l = plt.plot(bins, y, 'r--', linewidth=2)

n, bins, patches = plt.hist(val_b, 60, normed=1, facecolor='blue', alpha=0.5)

#plot
plt.xlabel('ATT')
plt.ylabel('Probability')
plt.title(
    r'$\mathrm{Histogram\ of\ Synthetic\ Control\ (red)\ and\ Calibration\ (blue)\ ATT:}$'
)
plt.grid(True)

plt.show()

### Distrbution of SC counterfactual
# Get mean and std-dev of fitted gaussian, then array of x-values for plotting
(mu_res, sigma_res) = stats.norm.fit(filtered_residuals)
gaus_x_res = np.linspace(mu_res - 4 * sigma_res, mu_res + 4 * sigma_res, 500)

# Plot histogram of residuals
plt.subplot(1, 2, 2)
res_obs_bin_contents_unfiltered, res_obs_bin_edges, _ = plt.hist(
    filtered_residuals, bins=20)

# Plot fitted gaussian function
bin_width = (res_obs_bin_edges[-1] -
             res_obs_bin_edges[0]) / len(res_obs_bin_contents_unfiltered)
plt.plot(
    gaus_x_res,
    sum(res_obs_bin_contents_unfiltered) * bin_width *
    mlab.normpdf(gaus_x_res, mu_res, sigma_res), 'r')
plt.xlabel("Residual Value / mm")
plt.ylabel("Frequency")

# Output Gaussian parameters
print ""
print "Gaussian Goodness-of-Fit Testing:"
print ""
print "Fitted Gaussian Mean:", mu_res, "mm"
print "Fitted Gaussian Std-Dev:", sigma_res, "mm"
print ""

# Get expected number of residuals in each bin, from fitted gaussian
bin_count = len(res_obs_bin_contents_unfiltered)
res_func_bin_contents_unfiltered = [
    len(filtered_residuals) *
def fx_func(nModels, x, mu, sig, w):
	fx = np.zeros(x.size)
	for i in range(nModels):
		fx = fx + w[i] * mlab.normpdf(x, mu[i], np.sqrt(sig[i]))
	return fx
Exemple #51
0
def plot_hist(energy_file, system):

    energy_file = open(energy_file)
    energylines = energy_file.readlines()
    energy_file.close()
    ligand_energy_list = []
    decoy_energy_list = []

    for line in energylines:
        line = line.strip().split()
        if line[0][-8:] == "_ligands":
            lig_energy = float(line[2])
            ligand_energy_list.append(lig_energy)
        elif line[0][-7:] == "_decoys":
            dec_energy = float(line[2])
            decoy_energy_list.append(dec_energy)

    #mu = np.mean(rmsd_list)
    #sigma = np.std(rmsd_list)
    #x = mu + sigma*np.random.randn(10000)
    num_bins = 20
    f, axarr = plt.subplots(4)
    #axarr[0, 0].plot(x, y)
    #axarr[0, 0].set_title('Axis [0,0]')
    #axarr[0, 1].scatter(x, y)
    #axarr[0, 1].set_title('Axis [0,1]')
    #axarr[1, 0].plot(x, y ** 2)
    #axarr[1, 0].set_title('Axis [1,0]')
    #axarr[1, 1].scatter(x, y ** 2)
    #axarr[1, 1].set_title('Axis [1,1]')
    n, bins, patches = axarr[0].hist(ligand_energy_list, num_bins)
    n2, bins2, patches2 = axarr[1].hist(decoy_energy_list, num_bins)
    mu = np.mean(decoy_energy_list)
    sigma = np.std(decoy_energy_list)
    y = mlab.normpdf(bins2, mu, sigma)
    r = np.random.uniform(-15, 15, 1000)
    #      print r
    n3, bins3, patches3 = axarr[2].hist(r, num_bins)
    #       print n,bins,patches
    s1 = sum(n)
    s2 = sum(n2)
    s3 = sum(n3)
    meanbins = []
    for i in range(1, len(bins)):
        meanbins.append((bins[i] + bins[i - 1]) / 2.0)
    meanbins2 = []
    for i in range(1, len(bins2)):
        meanbins2.append((bins2[i] + bins2[i - 1]) / 2.0)
    meanbins3 = []
    for i in range(1, len(bins3)):
        meanbins3.append((bins3[i] + bins3[i - 1]) / 2.0)
    print(meanbins)
    print(meanbins2)
    print(meanbins3)
    #	print len(n),len(bins),len(patches)
    axarr[3].plot(meanbins, n / s1, 'r-', meanbins2, n2 / s2, 'g-', meanbins3,
                  n3 / s3, 'y-', bins2, y, 'k--')
    #plt.plot(bins, y, 'r--')
    #pylab.xlabel('GIST Energy')
    #pylab.ylabel('Count')
    #pylab.title(system)
    plt.subplots_adjust(left=0.15)
    #plt.show()
    #plt.savefig(system+"energy_GISTogram.png",dpi=1000)
    os.system("pwd")
    plt.savefig(system + "_energy_GISTogram.png")
    plt.clf()
Exemple #52
0
def gaussian(x, mu, sigma):
    return mlab.normpdf(x, mu, sigma)
Exemple #53
0
def main():
    parser = argparse.ArgumentParser(
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    parser.add_argument("--image", type=str, help="input image to plot")
    parser.add_argument("--template",
                        type=str,
                        help="input template for subtraction")
    parser.add_argument("--cutoff",
                        type=float,
                        help="cutoff counts in the unit of SATCNTS",
                        default=1.0)
    parser.add_argument("--radius",
                        type=int,
                        help="radius to apply cutoff",
                        default=10)
    parser.add_argument("--method",
                        type=str,
                        help="what method...kernels,pca?",
                        default=None)
    parser.add_argument("--kernels",
                        type=str,
                        help="what kernels to convolve?",
                        default=None)
    parser.add_argument("--boundary",
                        type=str,
                        help="boundary to do the image differncing",
                        default=None)
    parser.add_argument("--maskreg",
                        type=str,
                        help="mask a region in the image",
                        default=None)
    parser.add_argument("--sqrt",
                        type=str,
                        help="use sqrt of the ivar",
                        default=False)
    parser.add_argument("--outfile",
                        type=str,
                        help="differenced image name",
                        default=None)
    parser.add_argument("--effout",
                        type=str,
                        help="output effective file",
                        default=None)
    parser.add_argument("--plot",
                        type=str,
                        help="show plot or not?",
                        default=False)
    parser.add_argument("--rdnoise",
                        type=str,
                        help="read noise from the image header",
                        default=False)
    parser.add_argument("--varlimit",
                        type=float,
                        help="variance limit to be expected",
                        default=0.8)

    args = parser.parse_args()

    print "processing", args.image
    image = fits.open(args.image)

    print "Reference ", args.template
    template = fits.open(args.template)

    diffimage, diffvar, efftemplate, Zs, R_var, chisq = do_subtraction(
        image,
        template,
        cutoff=args.cutoff,
        radius=args.radius,
        boundary=args.boundary,
        maskreg=args.maskreg,
        kerneltype=args.kernels,
        method=args.method,
        sqrt=args.sqrt,
        rdnoise=args.rdnoise)

    image_base = str.split(args.image, '_c.fit')
    image_break = str.split(args.image, '_')
    temp_base = str.split(args.template, '_c.fit')

    imghdr = image[0].header
    temphdr = template[0].header

    diff_imagename = image_base[0] + '-sub_c.fit'
    diff_header = temphdr
    diff_header["MJD"] = imghdr["MJD"]
    diff_header["EXPTIME"] = imghdr["EXPTIME"]
    diff_header["EFFTIME"] = imghdr["EFFTIME"]
    diff_header["DATE-OBS"] = imghdr["DATE-OBS"]
    diff_header["OBSTIME"] = imghdr["OBSTIME"]

    #write to the file if variance meets expectation;
    print "diff_R2: ", R_var

    if R_var > args.varlimit:
        if args.outfile is not None:
            outfilename = args.outfilename
        else:
            outfilename = diff_imagename

        #- fill in the diff image to full size of boundary
        if args.boundary is not None:
            print "Boundary:", args.boundary
            filledimage = image[0].data
            print filledimage.shape
            print diffimage.shape
            xlo, xhi, ylo, yhi = map(int, args.boundary.split(','))
            filledimage[xlo:xhi, ylo:yhi] = diffimage
        else:
            filledimage = diffimage

        fits.writeto(outfilename,
                     filledimage,
                     clobber=True,
                     header=diff_header)
        print "wrote differenced image", outfilename

        if args.effout is not None:
            fits.writeto(args.effout,
                         efftemplate,
                         clobber=True,
                         header=diff_header)  #- use same header
            print "wrote model image", args.effout
    else:
        print "INFO: Not enough variance in the model.... Not writing to file!"

    if args.plot:
        import matplotlib.mlab as mlab
        plt.xticks(fontsize=14)
        plt.yticks(fontsize=14)
        ax1 = plt.subplot(132, projection='3d')
        ax1.set_xlabel('X')
        ax1.set_ylabel('Y')

        sc_x = np.linspace(0, Zs.shape[0] - 1,
                           Zs.shape[0])[Zs.shape[0] / 2 - 40:Zs.shape[0] / 2 +
                                        40]
        sc_y = np.linspace(0, Zs.shape[1] - 1,
                           Zs.shape[1])[Zs.shape[0] / 2 - 40:Zs.shape[0] / 2 +
                                        40]

        SCX, SCY = np.meshgrid(sc_x, sc_y)

        surf = ax1.plot_surface(
            SCX,
            SCY,
            diffimage[Zs.shape[0] / 2 - 40:Zs.shape[0] / 2 + 40,
                      Zs.shape[0] / 2 - 40:Zs.shape[0] / 2 + 40],
            rstride=1,
            cstride=1,
            cmap=cm.Accent,
            linewidth=0.2)
        #ax1.text(0.7,0.7, r"$R = %.2f$"%R_var, verticalalignment='bottom', horizontalalignment='right',transform=ax1.transAxes,size=2)
        ax1.set_title(r"$R = %.2f$" % R_var, fontsize=20)
        ax2 = plt.subplot(131)

        refpixel = Zs.shape[0] / 2
        ax2.step(np.arange(int(Zs.shape[0] / 2 - 40),
                           int(Zs.shape[0] / 2 + 40)),
                 Zs[Zs.shape[0] / 2 - 40:Zs.shape[0] / 2 + 40, refpixel],
                 label='Data')
        ax2.step(np.arange(int(Zs.shape[0] / 2 - 40),
                           int(Zs.shape[0] / 2 + 40)),
                 efftemplate[Zs.shape[0] / 2 - 40:Zs.shape[0] / 2 + 40,
                             refpixel],
                 label='Model')
        ax2.step(np.arange(int(Zs.shape[0] / 2 - 40),
                           int(Zs.shape[0] / 2 + 40)),
                 diffimage[Zs.shape[0] / 2 - 40:Zs.shape[0] / 2 + 40,
                           refpixel],
                 label='Residual')

        ax2.text(0.85,
                 0.7,
                 r"$\chi^2/dof = %.2f$" % chisq,
                 verticalalignment='bottom',
                 horizontalalignment='right',
                 transform=ax2.transAxes,
                 fontsize=18)
        ax2.set_xlabel("Pixels (relative position)", fontsize=18)
        ax2.set_ylabel("Counts", fontsize=18)
        ylim0 = np.min(Zs[Zs.shape[0] / 2 - 40:Zs.shape[0] / 2 + 40,
                          refpixel]) - 10
        ylim1 = np.max(Zs[Zs.shape[0] / 2 - 40:Zs.shape[0] / 2 + 40,
                          refpixel]) + 10
        ax2.set_ylim(ylim0, ylim1)
        ax2.legend(fontsize=20)
        plt.xticks(fontsize=14)
        plt.yticks(fontsize=14)

        gd_diff = diffvar > 0.
        print "Mean diffimage", np.mean(diffimage)
        devs = (diffimage) / np.sqrt(diffvar)
        #devs=devs[ss]
        print devs.shape
        meandevs = np.mean(devs)
        #sigmadevs=np.median(devs)-np.percentile(devs,15.865)
        sigmadevs = np.std(devs)
        print "Mean devs", meandevs
        print "Sigma devs", sigmadevs

        (mu, sig) = norm.fit(devs)
        print "norm fit of devs", mu, sig
        binsz = 0.3
        i0, i1 = int(np.min(devs) / binsz) - 1, int(np.max(devs) / binsz) + 1
        rng = tuple(binsz * np.array([i0, i1]))
        print i0, i1
        nbin = i1 - i0
        hist, edges = np.histogram(devs, range=rng, bins=nbin)
        xhist = (edges[1:] + edges[:-1]) / 2.
        ax3 = plt.subplot(133)
        ax3.hist(xhist, color='blue', bins=edges,
                 weights=hist)  #, histtype='step')
        # PDF for Gaussian
        area = binsz * np.sum(hist)

        xppf = np.linspace(scipy.stats.norm.ppf(0.0001),
                           scipy.stats.norm.ppf(0.9999), 100)
        xx = np.linspace(-6, 6, 100)
        gaussfit = mlab.normpdf(xx, meandevs, sigmadevs)
        ax3.plot(xx,
                 area * gaussfit,
                 'r-',
                 alpha=5.0,
                 linewidth=2,
                 label=r'$\mathcal{N}(%.2f,%.2f)$' % (meandevs, sigmadevs))
        ax3.plot(xppf,
                 area * scipy.stats.norm.pdf(xppf),
                 'k-',
                 alpha=5.0,
                 linewidth=2,
                 label=r'$\mathcal{N}(0,1)$')

        ax3.set_xlabel(r'Residual/$\sigma$', fontsize=18)
        ax3.set_ylabel('No. of pixels', fontsize=18)
        ax3.yaxis.set_label_position("right")
        ax3.yaxis.tick_right()
        ax3.set_xlim(-5, 5)
        plt.xticks(fontsize=14)
        plt.yticks(fontsize=14)

        plt.legend(frameon=False, fontsize=18, loc=2)
        #plt.tight_layout()
        plt.show()
Exemple #54
0
P = 1000000000  #display in ns
nsDeltas = [x * P for x in nsDeltas]
centerRange = 25
windowsns = 5
minRange = centerRange - windowsns
maxRange = centerRange + windowsns
plt.hist(nsDeltas,
         60,
         range=[minRange, maxRange],
         facecolor='blue',
         align='mid',
         alpha=0.75)
#plt.hist(nsDeltas, 100, normed=True, facecolor='blue', align='mid', alpha=0.75)
#plt.xlim((min(nsDeltas), max(nsDeltas)))
plt.xlabel('Time (ns)')
plt.ylabel('Entries')
plt.title('Histogram DeltaTime')
plt.grid(True)

#Superimpose Gauss
mean = np.mean(nsDeltas)
variance = np.var(nsDeltas)
sigma = np.sqrt(variance)
x = np.linspace(min(nsDeltas), max(nsDeltas), 100)
plt.plot(x, mlab.normpdf(x, mean, sigma))
print(mean, sigma)

#Display plot
plt.show()
Exemple #55
0
estimated_osnr = [Decimal(10 * np.log10(value[-2])) for value in est_results]
estimated_osnr = np.double(
    estimated_osnr) - 1.111613661127074  # difference mean

mu_measured = np.mean(measured_osnr)
variance_measured = np.var(measured_osnr)
sigma_measured = math.sqrt(variance_measured)
x_measured = np.linspace(mu_measured - 4 * sigma_measured,
                         mu_measured + 4 * sigma_measured, 100)

mu_estimated = np.mean(estimated_osnr)
variance_estimated = np.var(estimated_osnr)
sigma_estimated = math.sqrt(variance_estimated)
x_estimated = np.linspace(mu_estimated - 4 * sigma_estimated,
                          mu_estimated + 4 * sigma_estimated, 100)
plt.plot(x_estimated, mlab.normpdf(x_estimated, mu_estimated, sigma_estimated),
         'r')
plt.plot(x_measured, mlab.normpdf(x_measured, mu_measured, sigma_measured),
         'b')

blue_patch = mpatches.Patch(color='blue', label='Simulator Calculation')
red_patch = mpatches.Patch(color='red', label='Controller Estimation')

plt.xlabel('OSNR (dB)')
plt.grid(True)

TH_QPSK = [10] * 8
TH_8QAM = [14] * 8
TH_16QAM = [17] * 8

d_th = [0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.8]
Exemple #56
0
print numpy.average(h4), numpy.var(h4)
print numpy.average(h5), numpy.var(h5)

# pylab.subplot(3,1,1)
# pylab.title('QIGA$(\\theta)$')
# pylab.xlim((1380, 1500))
# pylab.hist(h1, 300)

pylab.subplot(4,1,1)
pylab.title('SGA')
pylab.xlim((1350, 1500))
n, bins, patches = pylab.hist(sga, 300)
bincenters = 0.5*(bins[1:]+bins[:-1])
mu = numpy.average(sga)
sigma = numpy.var(sga) ** .5
y = mlab.normpdf( bincenters, mu, sigma)
y *= 120. / max(y)
pylab.plot(bincenters, y, 'r--', linewidth=1)

pylab.subplot(4,1,2)
pylab.title('bQIGAo1')
pylab.xlim((1350, 1500))
n, bins, patches = pylab.hist(h2, 300)
bincenters = 0.5*(bins[1:]+bins[:-1])
mu = numpy.average(h2)
sigma = numpy.var(h2) ** .5
y = mlab.normpdf( bincenters, mu, sigma)
y *= 250. / max(y)
pylab.plot(bincenters, y, 'r--', linewidth=1)

Exemple #57
0
# mpl.style.use('classic')
mpl.style.use('default')

# To have the same random numbers repeated again and again.
np.random.seed(2785)

# For details
# https://stackoverflow.com/questions/21494489/what-does-numpy-random-seed0-do

mean = 100
sd = 15
N = 1000
binsize = 50

# Data
IQ = np.random.normal(mean, sd, N)

counts, bins, extras = plt.hist(IQ, binsize, facecolor='chocolate', edgecolor='k', label='IQs', density=True)

# An idealised PDF
pdf = mlab.normpdf(bins, mean, sd) # Creates the pdf of normal distribution
plt.plot(bins, pdf, label='series', color='xkcd:navy blue')

plt.xlabel('IQ')
plt.ylabel('Count/Fraction')
plt.xticks(bins)
plt.title('IQ Distribution Histogram')
plt.grid(True)
plt.legend()
plt.show()
Exemple #58
0
import matplotlib.pyplot as plt
import numpy as np
import matplotlib.mlab as mlab
import math
from scipy.stats import uniform


def arran(x, y):
    for i in x:
        print(i)
        a = i * y
        print(" " + a)
        i = a
    return x


mu = 2
variance = 1
sigma = math.sqrt(variance)
x = np.linspace(-3, 6, 100)
y = mlab.normpdf(x, mu, sigma)
plt.plot(x, mlab.normpdf(x, mu, sigma))
plt.plot(x, arran(y, 0.4), 'r-')

xn = np.linspace(uniform.ppf(0.01), uniform.ppf(0.99), 100)
plt.plot(xn, uniform.pdf(xn), 'r-', lw=2, alpha=0.6, label='uniform pdf')

plt.show()
Exemple #59
0
x1 = np.linspace(0, 1, 100, dtype=np.float32)
y1 = np.sin(2 * np.pi * x1)

y_hat1 = (w1[0] * tf.exp((-1) * tf.pow(
    (x1 - mu1[0]), 2) / tf.pow(sig1[0], 2)) + w1[1] * tf.exp((-1) * tf.pow(
        (x1 - mu1[1]), 2) / tf.pow(sig1[1], 2)) + w1[2] * tf.exp((-1) * tf.pow(
            (x1 - mu1[2]), 2) / tf.pow(sig1[2], 2)) + w1[3] * tf.exp(
                (-1) * tf.pow((x1 - mu1[3]), 2) / tf.pow(sig1[3], 2)) + b1)

x2 = np.linspace(0, 1, 100)
print(mu1)
plt.figure(1)

plt.scatter(data.x, data.y)
plt.plot(x1, y1)
plt.plot(x1, sess.run(y_hat1), 'r--')
plt.ylabel('y')
plt.xlabel('x')
plt.title('Base Function, Training Data and Trained Model')

plt.figure(2)
plt.plot(x2, mlab.normpdf(x2, mu1[0], sig1[0]), label='gaussian 1')
plt.plot(x2, mlab.normpdf(x2, mu1[1], sig1[1]), label='gaussian 2')
plt.plot(x2, mlab.normpdf(x2, mu1[2], sig1[2]), label='gaussian 3')
plt.plot(x2, mlab.normpdf(x2, mu1[3], sig1[3]), label='gaussian 4')
plt.ylabel('y')
plt.xlabel('x')
plt.title('Gaussian Bases for Fit')
plt.show()
Exemple #60
0
def add_normal(mean, std):
    x = np.linspace(mean - 4 * std, mean + 4 * std, 100)
    #x = np.linspace(mean - 3*std, mean + 3*std, 100)
    plt.plot(x, mlab.normpdf(x, mean, std))