Esempio n. 1
0
 def test_estimate(self):
     print "Testing parameter estimation of Gamma distribution ..."
     sys.stdout.flush()
     myu = 10 * np.random.rand(1)[0]
     mys = 10 * np.random.rand(1)[0]
     p = Distributions.Gamma({'u': myu, 's': mys})
     dat = p.sample(1000000)
     p = Distributions.Gamma()
     p.estimate(dat)
     self.assertFalse(
         np.abs(p.param['u'] - myu) > self.TolParam,
         'Difference in Shape parameter for Gamma distribution greater than '
         + str(self.TolParam))
     self.assertFalse(
         np.abs(p.param['s'] - mys) > self.TolParam,
         'Difference in Scale parameter for Gamma distribution greater than '
         + str(self.TolParam))
Esempio n. 2
0
 def test_derivatives(self):
     print "Testing derivatives w.r.t. data ... "
     sys.stdout.flush()
     myu = 3.0 * np.random.rand(1)[0] + 1.0
     mys = 3.0 * np.random.rand(1)[0] + 1.0
     p = Distributions.Gamma({'u': myu, 's': mys})
     dat = p.sample(100)
     h = 1e-7
     tol = 1e-4
     y = np.array(dat.X) + h
     df = p.dldx(dat)
     df2 = (p.loglik(Data(y)) - p.loglik(dat)) / h
     self.assertFalse(np.max(np.abs(df-df2)) > tol,\
                      'Difference ' + str(np.max(np.abs(df-df2)))+ 'in derivative of log-likelihood for Gamma greater than ' + str(tol))
Esempio n. 3
0
    def test_dldtheta(self):
        p = Distributions.Gamma({'u': 2.0, 's': 3.0})
        p.primary = ['u', 's']
        dat = p.sample(1000)

        def f(arr):
            p.array2primary(arr)
            return np.sum(p.loglik(dat))

        def df(arr):
            p.array2primary(arr)
            return np.sum(p.dldtheta(dat), axis=1)

        arr0 = p.primary2array()
        arr0 = abs(np.random.randn(len(arr0)))
        err = optimize.check_grad(f, df, arr0)
        print "Error in graident: ", err
        self.assertTrue(err < 1e-02)
Esempio n. 4
0
    def test_LogDetRadialTransform(self):
        print "Testing logdet of radial transformation ... "
        sys.stdout.flush()
        p = np.random.rand() * 3. + .5
        # source distribution
        psource = Distributions.LpSphericallySymmetric({'p': p})
        # target distribution
        ptarget = Distributions.LpSphericallySymmetric({
            'p':
            p,
            'rp':
            Distributions.Gamma({
                'u': np.random.rand() * 3.0,
                's': np.random.rand() * 2.0
            })
        })
        # create Filter
        F = NonlinearTransformFactory.RadialTransformation(psource, ptarget)
        # sample data from source distribution
        dat = psource.sample(100)

        # apply filter to data
        dat2 = F * dat
        logDetJ = F.logDetJacobian(dat)
        logDetJ2 = 0 * logDetJ

        h = 1e-8

        tmp = Data(dat.X.copy())
        tmp.X[0, :] += h
        W1 = ((F * tmp).X - dat2.X) / h

        tmp = Data(dat.X.copy())
        tmp.X[1, :] += h
        W2 = ((F * tmp).X - dat2.X) / h
        for i in range(dat.numex()):

            logDetJ2[i] = np.log(
                np.abs(W1[0, i] * W2[1, i] - W1[1, i] * W2[0, i]))

        self.assertFalse(np.max(np.abs(logDetJ - logDetJ2)) > self.detTol,\
                         'Log determinant of radial transformation deviates by more than ' + str(self.detTol) + '!')
Esempio n. 5
0
    def test_RadialFactorization(self):
        print "Testing Radial Factorization ..."
        sys.stdout.flush()
        p = np.random.rand() + 1.0
        n = 5
        psource = Distributions.LpSphericallySymmetric({
            'n':
            n,
            'p':
            p,
            'rp':
            Distributions.Gamma({
                'u': 2.0 * np.random.rand() + 1.0,
                's': 5.0 * np.random.rand() + 1.0
            })
        })
        ptarget = Distributions.LpGeneralizedNormal({
            'n':
            n,
            'p':
            p,
            's': (special.gamma(1.0 / p) / special.gamma(3.0 / p))**(p / 2.0)
        })

        F = NonlinearTransformFactory.RadialFactorization(psource)

        dat = psource.sample(10000)
        ld = F.logDetJacobian(dat)
        ld = np.mean(np.abs(ld)) / dat.size(0) / np.log(2)

        all_source = psource.all(dat)
        all_target = ptarget.all(F * dat)

        tol = 1e-2
        prot = {}
        prot['message'] = 'Difference in logdet correted ALL >  ' + str(tol)
        prot["1/n/log(2) * <|det J|> "] = ld
        prot["ALL(TARGET)"] = all_target
        prot["ALL(SOURCE)"] = all_source
        prot[
            "ALL(TARGET) + 1/n/log(2) * <|det J|> - ALL(SOURCE)"] = all_target + ld - all_source
Esempio n. 6
0
    def test_estimate(self):
        print "Testing parameter estimation for p-nested symmetric distribution with radial gamma"
        sys.stdout.flush()
        L = Auxiliary.LpNestedFunction('(0,0,(1,1:3),3,(2,4:7))')
        L.p = np.random.rand(3) * 1.5 + .5

        d = Distributions.LpNestedSymmetric({'f': L, 'n': L.n[()]})
        L2 = Auxiliary.LpNestedFunction('(0,0,(1,1:3),3,(2,4:7))')
        L2.p = np.random.rand(3) * 1.5 + .5
        L.lb = 0.0 * L.p
        L.ub = 2.0 * L2.p

        rd2 = Distributions.Gamma({
            'u': 5 * np.random.rand(),
            's': 10 * np.random.rand()
        })
        # create Distributions object and sample
        d2 = Distributions.LpNestedSymmetric({
            'f': L2,
            'n': L2.n[()],
            'rp': rd2
        })
        print "\t ... checking greedy method"
        sys.stdout.flush()
        dat = d2.sample(50000)
        d.estimate(dat, method="greedy")

        self.assertFalse( np.max(np.abs(d.param['f'].p - d2.param['f'].p)) > self.TolParam['p'],\
           'Estimated parameter p deviates by more than ' + str(self.TolParam['p']) + '!')
        self.assertFalse( np.abs(d.param['rp'].param['u'] -  d2.param['rp'].param['u']) > self.TolParam['u'],\
           'Estimated parameter u deviates by more than ' + str(self.TolParam['u']) + '!')
        self.assertFalse( np.abs(d.param['rp'].param['s'] -  d2.param['rp'].param['s']) > self.TolParam['s'],\
           'Estimated parameter s deviates by more than ' + str(self.TolParam['s']) + '!')

        print "\t ... checking Nelder-Mead method"
        sys.stdout.flush()
        d = Distributions.LpNestedSymmetric({'f': L, 'n': L.n[()]})
        d.estimate(dat, method="neldermead")



        self.assertFalse( np.max(np.abs(d.param['f'].p - d2.param['f'].p)) > self.TolParam['p'],\
           'Estimated parameter p deviates by more than ' + str(self.TolParam['p']) + '!')
        self.assertFalse( np.abs(d.param['rp'].param['u'] -  d2.param['rp'].param['u']) > self.TolParam['u'],\
           'Estimated parameter u deviates by more than ' + str(self.TolParam['u']) + '!')
        self.assertFalse( np.abs(d.param['rp'].param['s'] -  d2.param['rp'].param['s']) > self.TolParam['s'],\
           'Estimated parameter s deviates by more than ' + str(self.TolParam['s']) + '!')

        print "\t ... checking Gradient method"
        sys.stdout.flush()
        d = Distributions.LpNestedSymmetric({'f': L, 'n': L.n[()]})
        d.estimate(dat, method="gradient")

        self.assertFalse( np.max(np.abs(d.param['f'].p - d2.param['f'].p)) > self.TolParam['p'],\
           'Estimated parameter p deviates by more than ' + str(self.TolParam['p']) + '!')
        self.assertFalse( np.abs(d.param['rp'].param['u'] -  d2.param['rp'].param['u']) > self.TolParam['u'],\
           'Estimated parameter u deviates by more than ' + str(self.TolParam['u']) + '!')
        self.assertFalse( np.abs(d.param['rp'].param['s'] -  d2.param['rp'].param['s']) > self.TolParam['s'],\
           'Estimated parameter s deviates by more than ' + str(self.TolParam['s']) + '!')

        print "[Ok]"