def test_skewed(): from ctapipe.image.toymodel import SkewedGaussian # test if the parameters we calculated for the skew normal # distribution produce the correct moments np.random.seed(0) geom = CameraGeometry.from_name('LSTCam') x, y = u.Quantity([0.2, 0.3], u.m) width = 0.05 * u.m length = 0.15 * u.m intensity = 50 psi = '30d' skewness = 0.3 model = SkewedGaussian( x=x, y=y, width=width, length=length, psi=psi, skewness=skewness ) image, signal, _ = model.generate_image( geom, intensity=intensity, nsb_level_pe=5, ) a, loc, scale = model._moments_to_parameters() mean, var, skew = skewnorm(a=a, loc=loc, scale=scale).stats(moments='mvs') assert np.isclose(mean, 0) assert np.isclose(var, length.to_value(u.m)**2) assert np.isclose(skew, skewness)
def plotpdf(mean=0.0,stdev=1.0,skew=0.0,npts=250): x = np.linspace(-5,5,100) pdf = skewnorm(convert_to_alpha(skew),loc=mean,scale=stdev) y = pdf.pdf(x) data = pdf.rvs(npts) data = realign_data(data,mean,stdev) mn,var=pdf.stats(moments='mv') xtrans = x-mn xtrans = mean+(xtrans*stdev/np.sqrt(var)) plt.figure(1,figsize=(12,5)) plt.subplot(121) nbins=np.min([npts//10,50]) (N,xbins,_)=plt.hist(data,bins=nbins) midx = xbins[:-1]+(np.diff(xbins)/2) midy = pdf.pdf(midx) mult=np.sum(np.diff(xbins)*N)/np.sum(np.diff(xbins*stdev/np.sqrt(var))*midy) plt.plot(xtrans,y*mult,'k-') mline=plt.axvline(np.mean(data),color='r',linestyle='--',linewidth=2,label='Mean') dline=plt.axvline(np.median(data),color='k',linestyle='--',linewidth=2,label='Median') plt.xlim(-4.5,4.5) plt.ylim(0,npts//8) plt.legend() plt.title('Histogram of data') plt.subplot(122) plt.plot(data[::5],'k.-') plt.axhline(np.mean(data),color='r',linestyle='--') plt.ylim(-5,5) plt.title('Sample of randomly-generated data\n with the given parameters') return
def pdf(self, x, y): '''2d probability for photon electrons in the camera plane''' mu = u.Quantity([self.x, self.y]).to_value(u.m) rotation = linalg.rotation_matrix_2d(-Angle(self.psi)) pos = np.column_stack([x.to_value(u.m), y.to_value(u.m)]) long, trans = rotation @ (pos - mu).T trans_pdf = norm(loc=0, scale=self.width).pdf(trans) a, loc, scale = self._moments_to_parameters() return trans_pdf * skewnorm(a=a, loc=loc, scale=scale).pdf(long)
def demo(): # hide module load time from Timer from sklearn.neighbors import NearestNeighbors ## Bootstrap didn't help, but leave the test code in place for now #D = Dirichlet(alpha=[0.02]*20) #theta = D.rvs(size=1000) #S, Serr = wnn_bootstrap(D.rvs(size=200000)) #print("bootstrap", S, D.entropy()) #return if False: # Multivariate T distribution D = stats.t(df=4) _show_entropy("T;df=4", D, N=20000) D = MultivariateT(sigma=np.diag([1]), df=4) _show_entropy("MT[1];df=4", D, N=20000) D = MultivariateT(sigma=np.diag([1, 12, 0.2])**2, df=4) _show_entropy("MT[1,12,0.2];df=4", D, N=10000) D = MultivariateT(sigma=np.diag([1]*10), df=4) _show_entropy("MT[1]*10;df=4", D, N=10000) D = MultivariateT(sigma=np.diag([1, 12, 0.2, 1e2, 1e-2, 1])**2, df=4) _show_entropy("MT[1,12,0.2,1e3,1e-3,1];df=4", D, N=10000) return if False: # Multivariate skew normal distribution D = stats.skewnorm(5) _show_entropy("skew=5 N[1]", D, N=20000) D = Joint(stats.skewnorm(5, 0, s) for s in [1, 12, 0.2]) _show_entropy("skew=5 N[1,12,0.2]", D, N=10000) D = Joint(stats.skewnorm(5, 0, s) for s in [1]*10) _show_entropy("skew=5 N[1]*10", D, N=10000) D = Joint(stats.skewnorm(5, 0, s) for s in [1, 12, 0.2, 1e2, 1e-2, 1]) _show_entropy("skew=5 N[1,12,0.2,1e3,1e-3,1]", D, N=10000) #print("double check entropy", D.entropy()/LN2, entropy_mc(D)/LN2) return D = Box(center=[100]*10, width=np.linspace(1, 10, 10)) _show_entropy("Box 10!", D, N=10000) D = stats.norm(10, 8) #_show_entropy("N[100,8]", D, N=100) #_show_entropy("N[100,8]", D, N=200) #_show_entropy("N[100,8]", D, N=500) #_show_entropy("N[100,8]", D, N=1000) #_show_entropy("N[100,8]", D, N=2000) #_show_entropy("N[100,8]", D, N=5000) _show_entropy("N[100,8]", D, N=10000) #_show_entropy("N[100,8]", D, N=20000) #_show_entropy("N[100,8]", D, N=50000) #_show_entropy("N[100,8]", D, N=100000) D = stats.multivariate_normal(cov=np.diag([1, 12, 0.2])**2) #_show_entropy("MVN[1,12,0.2]", D) D = stats.multivariate_normal(cov=np.diag([1]*10)**2) #_show_entropy("MVN[1]*10", D, N=1000) _show_entropy("MVN[1]*10", D, N=10000) #_show_entropy("MVN[1]*10", D, N=100000) #_show_entropy("MVN[1]*10", D, N=200000, N_entropy=20000) D = stats.multivariate_normal(cov=np.diag([1, 12, 0.2, 1, 1, 1])**2) #_show_entropy("MVN[1,12,0.2,1,1,1]", D, N=100) #_show_entropy("MVN[1,12,0.2,1,1,1]", D, N=1000) _show_entropy("MVN[1,12,0.2,1,1,1]", D, N=10000) #_show_entropy("MVN[1,12,0.2,1,1,1]", D, N=100000) D = stats.multivariate_normal(cov=np.diag([1, 12, 0.2, 1e2, 1e-2, 1])**2) #_show_entropy("MVN[1,12,0.2,1e3,1e-3,1]", D, N=100) #_show_entropy("MVN[1,12,0.2,1e3,1e-3,1]", D, N=1000) _show_entropy("MVN[1,12,0.2,1e3,1e-3,1]", D, N=10000) #_show_entropy("MVN[1,12,0.2,1e3,1e-3,1]", D, N=100000) D = GaussianMixture([1,10], mu=[[0]*10, [100]*10], sigma=[[10]*10, [0.1]*10]) _show_entropy("bimodal mixture", D) D = Dirichlet(alpha=[0.02]*20) #_show_entropy("Dirichlet[0.02]*20", D, N=1000) #_show_entropy("Dirichlet[0.02]*20", D, N=2000) #_show_entropy("Dirichlet[0.02]*20", D, N=5000) #_show_entropy("Dirichlet[0.02]*20", D, N=10000) _show_entropy("Dirichlet[0.02]*20", D, N=20000)