Esempio n. 1
0
    GM.plot()


    #! Sign, Abs
    #! ----------
    figure()
    N = NormalDistr(0.1,1)
    S = sign(N)
    A = abs(N)
    S.plot(color='r', linewidth=2.0, label="sign(N)")
    A.plot(color='g', linewidth=2.0, label="abs(N)")
    N.plot(color='b', linewidth=2.0, label="N")
    legend()
    figure()
    params.general.warn_on_dependent = False
    demo_distr(S * A)#, theoretical=N)
    params.general.warn_on_dependent = True

    #! min, max, Conditional distributions
    #! ------------------------------------
    #!
    #!
    figure()
    E = max(ExponentialDistr() - 1, ZeroDistr())
    subplot(211)
    E.plot(linewidth=2.0)
    E.summary()
    subplot(212)
    params.general.warn_on_dependent = False
    S = E + E + E + E + E + E
    params.general.warn_on_dependent = True
Esempio n. 2
0
#!
from __future__ import print_function

from functools import partial
import numpy

from pylab import figure, show

from pacal import *
from pacal.distr import demo_distr


#! Exercise 6.3
d = NormalDistr() * NormalDistr() + NormalDistr() * NormalDistr()
figure()
demo_distr(d, theoretical = LaplaceDistr())

#! Example 7.3.1
w1 = WeibullDistr(2)
w2 = WeibullDistr(3)
figure()
demo_distr(w1 * w2)

#! Example 7.3.2
x1 = BetaDistr(9,3)
x2 = BetaDistr(8,3)
x3 = BetaDistr(4,2)
figure()
demo_distr(x1 * x2 * x3)

#! Example 8.6.1
Esempio n. 3
0
from functools import partial
from pylab import *
from mpl_toolkits.axes_grid.inset_locator import zoomed_inset_axes
from mpl_toolkits.axes_grid.inset_locator import mark_inset

from pacal import *
from pacal.distr import demo_distr

if __name__ == "__main__":
    #!-------------------------------------------
    #! Product of two shifted normal variables
    #!-------------------------------------------
    #! such a product always has a singularity at 0, but the further the factors' means are from zero, the 'lighter' the singularity becomes
    figure()
    d = NormalDistr(0, 1) * NormalDistr(0, 1)
    demo_distr(d, ymax=1.5, xmin=-5, xmax=5)
    #show()

    figure()
    d = NormalDistr(1, 1) * NormalDistr(1, 1)
    demo_distr(d)
    #show()

    figure()
    d = NormalDistr(2, 1) * NormalDistr(2, 1)
    demo_distr(d)
    #show()

    figure()
    d = NormalDistr(3, 1) * NormalDistr(3, 1)
    d.plot()
Esempio n. 4
0
#!-------------------
#!
from functools import partial
import numpy

from pylab import figure, show

from pacal import *
from pacal.distr import demo_distr
import time
if __name__ == "__main__":
        
    tic  =time.time()
    #! Figure 3.1.1
    figure()
    demo_distr(UniformDistr(0,1) + UniformDistr(0,1),          
                theoretical = lambda x: x * ((x >= 0) & (x < 1)) + (2-x) * ((x >= 1) & (x <= 2)))
    #! Figure 3.1.2
    figure()
    demo_distr(UniformDistr(0,1) - UniformDistr(0,1),
               theoretical = lambda x: (x+1) * ((x >= -1) & (x < 0)) + (1-x) * ((x >= 0) & (x <= 1)))
    
    #!-------------------
    #! Section 3.2.2
    #!-------------------
    figure()
    demo_distr(ChiSquareDistr(1) + ChiSquareDistr(1),
              theoretical = ExponentialDistr(0.5))
    figure()
    demo_distr(ChiSquareDistr(1) + ChiSquareDistr(1) + ChiSquareDistr(1),
              theoretical = ChiSquareDistr(3))
    figure()
Esempio n. 5
0
from pacal import *

from pacal.distr import demo_distr

#!
#! Inverse of r.v.
#! ---------------

N = NormalDistr(0,1)
d = 1/(N**2)**0.5
#d.summary()
C = CauchyDistr()
(2/C).summary()
(C/2).summary()
demo_distr(C/2, 0.5/C)

E1 = ChiSquareDistr(1)
E2 = ChiSquareDistr(3)
figure()
demo_distr(log(E1))
figure()
demo_distr(log(E2))
figure()
demo_distr(log(UniformDistr(0.0,1.0)))
figure()
demo_distr(exp(UniformDistr(0.0,1.0)))
figure()
demo_distr(exp(UniformDistr(0.0,1.0))+exp(UniformDistr(0.0,1.0)))
figure()
demo_distr(log(UniformDistr(0.0,1.0))+log(UniformDistr(0.0,1.0)))
Esempio n. 6
0
from pacal import *
from pacal.distr import demo_distr
import time


def funEx4_20(m, a, b, x):
    return (m + 1) * x / (b**(m + 1) - a**(m + 1))


if __name__ == "__main__":
    #!-------------------
    #! Section 4.1
    #!-------------------
    tic = time.time()
    figure()
    demo_distr(UniformDistr(0, 1) * UniformDistr(0, 1),
               theoretical=lambda x: -log(x))
    figure()
    demo_distr(UniformDistr(0, 1) / UniformDistr(0, 1),
               theoretical=lambda x: (x <= 1) * 0.5 + (x > 1) * 0.5 / x**2)

    #! Section 4.4.1
    def prod_uni_pdf(n, x):
        pdf = (-log(x))**(n - 1)
        for i in range(2, n):
            pdf /= i
        return pdf

    figure()
    demo_distr(UniformDistr(0, 1) * UniformDistr(0, 1) * UniformDistr(0, 1),
               theoretical=partial(prod_uni_pdf, 3))
Esempio n. 7
0
from pacal import *
from pacal.distr import demo_distr
import time


def funEx4_20(m, a, b, x):
    return (m + 1) * x / (b ** (m + 1) - a ** (m + 1))

if __name__ == "__main__":
    #!-------------------
    #! Section 4.1
    #!-------------------
    tic = time.time()
    figure()
    demo_distr(UniformDistr(0,1) * UniformDistr(0,1), theoretical = lambda x: -log(x))
    figure()
    demo_distr(UniformDistr(0,1) / UniformDistr(0,1), theoretical = lambda x: (x<=1) * 0.5 + (x>1) * 0.5 / x**2)
    
    #! Section 4.4.1
    def prod_uni_pdf(n, x):
        pdf = (-log(x)) ** (n-1)
        for i in xrange(2, n):
            pdf /= i
        return pdf
    figure()
    demo_distr(UniformDistr(0,1) * UniformDistr(0,1) * UniformDistr(0,1), theoretical = partial(prod_uni_pdf, 3))
    
    pu = UniformDistr(0,1)
    for i in xrange(4):
        pu *= UniformDistr(0,1)
Esempio n. 8
0
from pacal import *
from pacal.distr import demo_distr

if __name__ == "__main__":
    #! Here we describe inaccurate places in the PaCal project and future topics.
    
    #! Exponential function and logarithm
    #! ----------------------------------
    
    #$ The real power or random variable $(X^\alpha)$ is always
    #$ well defined, but logarithm and exponent may cause troubles. Bellow we have 
    #$ some of examples:
    #! 
    figure()
    Y = abs(NormalDistr()) ** NormalDistr()
    demo_distr(Y, histogram = True, xmin = 0, xmax = 3, ymax = 3, hist_bins = 500)
    Y.get_piecewise_cdf().plot(color='b', linewidth = 2.0) 
    #! 
    #! As one can see we obtain only six digits accuracy. The main problem
    #! concerns singularity at point 1 such kind of singularity is difficult to
    #! detect and difficult to interpolate.
    #! 
    Y = UniformDistr(0,1) ** NormalDistr(1,1)
    figure()
    demo_distr(Y, histogram = True, xmin = 0, xmax = 3, ymax = 3, hist_bins = 500)
    Y.get_piecewise_cdf().plot(color='b', linewidth = 2.0) 
    #! 
    Y = UniformDistr(0.5, 1.5) ** NormalDistr(0,1)
    figure()
    demo_distr(Y, histogram = True, xmin = 0, xmax = 3, ymax = 3, hist_bins = 500)
    Y.get_piecewise_cdf().plot(color='b', linewidth = 2.0) 
Esempio n. 9
0
    t0 = time.time()

    def theor_sum_exp(a1, a2, a3, x):
        t1 = exp(-3 * a1 * x) / (a2 - a1) / (a3 - a1)
        t2 = exp(-3 * a2 * x) / (a1 - a2) / (a3 - a2)
        t3 = exp(-3 * a3 * x) / (a1 - a3) / (a2 - a3)
        return 3 * a1 * a2 * a3 * (t1 + t2 + t3)

    for a1, a2, a3 in [
        (1.0, 2.0, 3.0),
        (1, 0.01, 100.0),
    ]:
        figure()
        d = (ExponentialDistr(a1) + ExponentialDistr(a2) +
             ExponentialDistr(a3)) / 3
        demo_distr(d, theoretical=partial(theor_sum_exp, a1, a2, a3))

    #! Section 9.1.2
    #! the L1 statistic for variances
    #! Question: are the numerator and denominator independent?
    for ns in [
        [3, 5, 2],  # sample sizes
        [4, 5, 10, 7, 3]
    ]:
        print "sample sizes:", ns
        N = sum(ns)
        num = ChiSquareDistr(ns[0] - 1)
        for n in ns[1:]:
            num *= ChiSquareDistr(n - 1)
        #num.summary()
        num **= (1.0 / N)
Esempio n. 10
0
#!------------------
#! CHAPTERS 6, 7, 8
#!------------------
#!
from functools import partial
import numpy

from pylab import figure, show

from pacal import *
from pacal.distr import demo_distr

#! Exercise 6.3
d = NormalDistr() * NormalDistr() + NormalDistr() * NormalDistr()
figure()
demo_distr(d, theoretical=LaplaceDistr())

#! Example 7.3.1
w1 = WeibullDistr(2)
w2 = WeibullDistr(3)
figure()
demo_distr(w1 * w2)

#! Example 7.3.2
x1 = BetaDistr(9, 3)
x2 = BetaDistr(8, 3)
x3 = BetaDistr(4, 2)
figure()
demo_distr(x1 * x2 * x3)

#! Example 8.6.1
Esempio n. 11
0
from functools import partial
import numpy
import time

from pylab import figure, show

from pacal import *
from pacal.distr import demo_distr

if __name__ == "__main__":

    tic = time.time()
    #! Example 5.1.3
    d = NormalDistr() + NormalDistr() * NormalDistr()
    demo_distr(d)

    #! Example 5.5
    d = ExponentialDistr() / (ExponentialDistr() + ExponentialDistr())
    figure()
    demo_distr(d, xmax=20, ymax=1.5)

    #! Exercise 5.5
    #! part a
    figure()
    demo_distr(NormalDistr() / sqrt((NormalDistr()**2 + NormalDistr()**2) / 2),
               xmin=-3,
               xmax=3)
    #! part b
    figure()
    demo_distr(2 * NormalDistr()**2 / (NormalDistr()**2 + NormalDistr()**2),
Esempio n. 12
0
from pacal import *

from pacal.distr import demo_distr

#!
#! Inverse of r.v. 
#! ---------------

N = NormalDistr(0,1)
d = 1/(N**2)**0.5
#d.summary()
C = CauchyDistr()
(2/C).summary()
(C/2).summary()
demo_distr(C/2, 0.5/C)

E1 = ChiSquareDistr(1)
E2 = ChiSquareDistr(3)
figure()
demo_distr(log(E1))
figure()
demo_distr(log(E2))
figure()
demo_distr(log(UniformDistr(0.0,1.0)))
figure()
demo_distr(exp(UniformDistr(0.0,1.0)))
figure()
demo_distr(exp(UniformDistr(0.0,1.0))+exp(UniformDistr(0.0,1.0)))
figure()
demo_distr(log(UniformDistr(0.0,1.0))+log(UniformDistr(0.0,1.0)))
Esempio n. 13
0
#!
from functools import partial
import numpy
import time

from pylab import figure, show

from pacal import *
from pacal.distr import demo_distr

if __name__ == "__main__":
        
    tic = time.time()
    #! Example 5.1.3
    d = NormalDistr() + NormalDistr() * NormalDistr()
    demo_distr(d)
    
    #! Example 5.5
    d = ExponentialDistr() / (ExponentialDistr() + ExponentialDistr())
    figure()
    demo_distr(d, xmax=20, ymax=1.5)
    
    #! Exercise 5.5
    #! part a
    figure()
    demo_distr(NormalDistr() / sqrt((NormalDistr()**2 + NormalDistr()**2) / 2), xmin=-3, xmax=3)
    #! part b
    figure()
    demo_distr(2 * NormalDistr()**2 / (NormalDistr()**2 + NormalDistr()**2), xmax=20, ymax=2)
    #! part c
    figure()
Esempio n. 14
0
from pacal import *
from pacal.distr import demo_distr

if __name__ == "__main__":
    #! Here we describe inaccurate places in the PaCal project and future topics.

    #! Exponential function and logarithm
    #! ----------------------------------

    #$ The real power or random variable $(X^\alpha)$ is always
    #$ well defined, but logarithm and exponent may cause troubles. Bellow we have
    #$ some of examples:
    #!
    figure()
    Y = abs(NormalDistr())**NormalDistr()
    demo_distr(Y, histogram=True, xmin=0, xmax=3, ymax=3, hist_bins=500)
    Y.get_piecewise_cdf().plot(color='b', linewidth=2.0)
    #!
    #! As one can see we obtain only six digits accuracy. The main problem
    #! concerns singularity at point 1 such kind of singularity is difficult to
    #! detect and difficult to interpolate.
    #!
    Y = UniformDistr(0, 1)**NormalDistr(1, 1)
    figure()
    demo_distr(Y, histogram=True, xmin=0, xmax=3, ymax=3, hist_bins=500)
    Y.get_piecewise_cdf().plot(color='b', linewidth=2.0)
    #!
    Y = UniformDistr(0.5, 1.5)**NormalDistr(0, 1)
    figure()
    demo_distr(Y, histogram=True, xmin=0, xmax=3, ymax=3, hist_bins=500)
    Y.get_piecewise_cdf().plot(color='b', linewidth=2.0)
Esempio n. 15
0
from functools import partial
from pylab import *
from mpl_toolkits.axes_grid.inset_locator import zoomed_inset_axes
from mpl_toolkits.axes_grid.inset_locator import mark_inset

from pacal import *
from pacal.distr import demo_distr

if __name__ == "__main__":
    #!-------------------------------------------
    #! Product of two shifted normal variables
    #!-------------------------------------------
    #! such a product always has a singularity at 0, but the further the factors' means are from zero, the 'lighter' the singularity becomes
    figure()
    d = NormalDistr(0,1) * NormalDistr(0,1)
    demo_distr(d, ymax=1.5, xmin=-5, xmax=5)
    #show()

    figure()
    d = NormalDistr(1,1) * NormalDistr(1,1)
    demo_distr(d)
    #show()

    figure()
    d = NormalDistr(2,1) * NormalDistr(2,1)
    demo_distr(d)
    #show()

    figure()
    d = NormalDistr(3,1) * NormalDistr(3,1)
    d.plot()
Esempio n. 16
0
from functools import partial
import numpy

from pylab import figure, show

from pacal import *
from pacal.distr import demo_distr
import time
if __name__ == "__main__":

    tic = time.time()
    #! Figure 3.1.1
    figure()
    demo_distr(UniformDistr(0, 1) + UniformDistr(0, 1),
               theoretical=lambda x: x * ((x >= 0) & (x < 1)) + (2 - x) *
               ((x >= 1) & (x <= 2)))
    #! Figure 3.1.2
    figure()
    demo_distr(UniformDistr(0, 1) - UniformDistr(0, 1),
               theoretical=lambda x: (x + 1) * ((x >= -1) & (x < 0)) +
               (1 - x) * ((x >= 0) & (x <= 1)))

    #!-------------------
    #! Section 3.2.2
    #!-------------------
    figure()
    demo_distr(ChiSquareDistr(1) + ChiSquareDistr(1),
               theoretical=ExponentialDistr(0.5))
    figure()
    demo_distr(ChiSquareDistr(1) + ChiSquareDistr(1) + ChiSquareDistr(1),
Esempio n. 17
0
 
 #! Example 9.1.1
 #! Implemented elsewhere
 
 #! Example 9.1.2
 t0 = time.time()
 def theor_sum_exp(a1, a2, a3, x):
     t1 = exp(-3*a1*x) / (a2-a1) / (a3-a1)
     t2 = exp(-3*a2*x) / (a1-a2) / (a3-a2)
     t3 = exp(-3*a3*x) / (a1-a3) / (a2-a3)
     return 3*a1*a2*a3 * (t1+t2+t3)
 for a1, a2, a3 in [(1.0,2.0,3.0),
                    (1,0.01,100.0),]:
     figure()
     d = (ExponentialDistr(a1) + ExponentialDistr(a2) + ExponentialDistr(a3))/3
     demo_distr(d, theoretical = partial(theor_sum_exp, a1, a2, a3))
 
 #! Section 9.1.2
 #! the L1 statistic for variances
 #! Question: are the numerator and denominator independent?
 for ns in [[3, 5, 2], # sample sizes
            [4, 5, 10, 7, 3]
            ]:
     print "sample sizes:", ns
     N = sum(ns)
     num = ChiSquareDistr(ns[0] - 1)
     for n in ns[1:]:
         num *= ChiSquareDistr(n - 1)
     #num.summary()
     num **= (1.0 / N)
     den = ChiSquareDistr(N - len(ns)) / N