Пример #1
0
class CommonMinimizerTest(unittest.TestCase):
    def setUp(self):
        """
        test scale minimizers except newton-cg (needs jacobian) and
        anneal (doesn't work out of the box).
        """
        p_true = Parameters()
        p_true.add('amp', value=14.0)
        p_true.add('period', value=5.33)
        p_true.add('shift', value=0.123)
        p_true.add('decay', value=0.010)
        self.p_true = p_true

        n = 2500
        xmin = 0.
        xmax = 250.0
        noise = np.random.normal(scale=0.7215, size=n)
        self.x = np.linspace(xmin, xmax, n)
        self.data = self.residual(p_true, self.x) + noise

        fit_params = Parameters()
        fit_params.add('amp', value=11.0, min=5, max=20)
        fit_params.add('period', value=5., min=1., max=7)
        fit_params.add('shift', value=.10, min=0.0, max=0.2)
        fit_params.add('decay', value=6.e-3, min=0, max=0.1)
        self.fit_params = fit_params

        self.mini = Minimizer(self.residual, fit_params, [self.x, self.data])

    def residual(self, pars, x, data=None):
        amp = pars['amp'].value
        per = pars['period'].value
        shift = pars['shift'].value
        decay = pars['decay'].value

        if abs(shift) > pi / 2:
            shift = shift - np.sign(shift) * pi
        model = amp * np.sin(shift + x / per) * np.exp(-x * x * decay * decay)
        if data is None:
            return model
        return model - data

    def test_diffev_bounds_check(self):
        # You need finite (min, max) for each parameter if you're using
        # differential_evolution.
        self.fit_params['decay'].min = None
        self.minimizer = 'differential_evolution'
        np.testing.assert_raises(ValueError, self.scalar_minimizer)

    def test_scalar_minimizers(self):
        # test all the scalar minimizers
        for method in SCALAR_METHODS:
            if method in ['newton', 'dogleg', 'trust-ncg']:
                continue
            self.minimizer = SCALAR_METHODS[method]
            if method == 'Nelder-Mead':
                sig = 0.2
            else:
                sig = 0.15
            self.scalar_minimizer(sig=sig)

    def scalar_minimizer(self, sig=0.15):
        try:
            from scipy.optimize import minimize as scipy_minimize
        except ImportError:
            raise SkipTest

        print(self.minimizer)
        out = self.mini.scalar_minimize(method=self.minimizer)

        self.residual(out.params, self.x)

        for name, par in out.params.items():
            nout = "%s:%s" % (name, ' ' * (20 - len(name)))
            print("%s: %s (%s) " % (nout, par.value, self.p_true[name].value))

        for para, true_para in zip(out.params.values(), self.p_true.values()):
            check_wo_stderr(para, true_para.value, sig=sig)

    @decorators.slow
    def test_emcee(self):
        # test emcee
        if not HAS_EMCEE:
            return True

        np.random.seed(123456)
        out = self.mini.emcee(nwalkers=100, steps=200, burn=50, thin=10)

        check_paras(out.params, self.p_true, sig=3)

    @decorators.slow
    def test_emcee_PT(self):
        # test emcee with parallel tempering
        if not HAS_EMCEE:
            return True

        np.random.seed(123456)
        self.mini.userfcn = residual_for_multiprocessing
        out = self.mini.emcee(ntemps=4,
                              nwalkers=50,
                              steps=200,
                              burn=100,
                              thin=10,
                              workers=2)

        check_paras(out.params, self.p_true, sig=3)

    @decorators.slow
    def test_emcee_multiprocessing(self):
        # test multiprocessing runs
        if not HAS_EMCEE:
            return True

        np.random.seed(123456)
        self.mini.userfcn = residual_for_multiprocessing
        out = self.mini.emcee(steps=10, workers=4)

    def test_emcee_bounds_length(self):
        # the log-probability functions check if the parameters are
        # inside the bounds. Check that the bounds and parameters
        # are the right lengths for comparison. This can be done
        # if nvarys != nparams
        if not HAS_EMCEE:
            return True
        self.mini.params['amp'].vary = False
        self.mini.params['period'].vary = False
        self.mini.params['shift'].vary = False

        out = self.mini.emcee(steps=10)

    @decorators.slow
    def test_emcee_partial_bounds(self):
        # mcmc with partial bounds
        if not HAS_EMCEE:
            return True

        np.random.seed(123456)
        # test mcmc output vs lm, some parameters not bounded
        self.fit_params['amp'].max = None
        # self.fit_params['amp'].min = None
        out = self.mini.emcee(nwalkers=100, steps=300, burn=100, thin=10)

        check_paras(out.params, self.p_true, sig=3)

    def test_emcee_init_with_chain(self):
        # can you initialise with a previous chain
        if not HAS_EMCEE:
            return True

        out = self.mini.emcee(nwalkers=100, steps=5)
        # can initialise with a chain
        out2 = self.mini.emcee(nwalkers=100, steps=1, pos=out.chain)

        # can initialise with a correct subset of a chain
        out3 = self.mini.emcee(nwalkers=100,
                               steps=1,
                               pos=out.chain[..., -1, :])

        # but you can't initialise if the shape is wrong.
        assert_raises(ValueError,
                      self.mini.emcee,
                      nwalkers=100,
                      steps=1,
                      pos=out.chain[..., -1, :-1])

    def test_emcee_reuse_sampler(self):
        if not HAS_EMCEE:
            return True

        self.mini.emcee(nwalkers=100, steps=5)

        # if you've run the sampler the Minimizer object should have a _lastpos
        # attribute
        assert_(hasattr(self.mini, '_lastpos'))

        # now try and re-use sampler
        out2 = self.mini.emcee(steps=10, reuse_sampler=True)
        assert_(out2.chain.shape[1] == 15)

        # you shouldn't be able to reuse the sampler if nvarys has changed.
        self.mini.params['amp'].vary = False
        assert_raises(ValueError, self.mini.emcee, reuse_sampler=True)

    def test_emcee_lnpost(self):
        # check ln likelihood is calculated correctly. It should be
        # -0.5 * chi**2.
        result = self.mini.minimize()

        # obtain the numeric values
        # note - in this example all the parameters are varied
        fvars = np.array([par.value for par in result.params.values()])

        # calculate the cost function with scaled values (parameters all have
        # lower and upper bounds.
        scaled_fvars = []
        for par, fvar in zip(result.params.values(), fvars):
            par.value = fvar
            scaled_fvars.append(par.setup_bounds())

        val = self.mini.penalty(np.array(scaled_fvars))

        # calculate the log-likelihood value
        bounds = np.array([(par.min, par.max)
                           for par in result.params.values()])
        val2 = _lnpost(fvars,
                       self.residual,
                       result.params,
                       result.var_names,
                       bounds,
                       userargs=(self.x, self.data))

        assert_almost_equal(-0.5 * val, val2)

    def test_emcee_output(self):
        # test mcmc output
        if not HAS_EMCEE:
            return True
        try:
            from pandas import DataFrame
        except ImportError:
            return True
        out = self.mini.emcee(nwalkers=10, steps=20, burn=5, thin=2)
        assert_(isinstance(out, MinimizerResult))
        assert_(isinstance(out.flatchain, DataFrame))

        # check that we can access the chains via parameter name
        assert_(out.flatchain['amp'].shape[0] == 80)
        assert_(out.errorbars is True)
        assert_(np.isfinite(out.params['amp'].correl['period']))

        # the lnprob array should be the same as the chain size
        assert_(np.size(out.chain) // 4 == np.size(out.lnprob))

    @decorators.slow
    def test_emcee_float(self):
        # test that it works if the residuals returns a float, not a vector
        if not HAS_EMCEE:
            return True

        def resid(pars, x, data=None):
            return -0.5 * np.sum(self.residual(pars, x, data=data)**2)

        # just return chi2
        def resid2(pars, x, data=None):
            return np.sum(self.residual(pars, x, data=data)**2)

        self.mini.userfcn = resid
        np.random.seed(123456)
        out = self.mini.emcee(nwalkers=100, steps=200, burn=50, thin=10)
        check_paras(out.params, self.p_true, sig=3)

        self.mini.userfcn = resid2
        np.random.seed(123456)
        out = self.mini.emcee(nwalkers=100,
                              steps=200,
                              burn=50,
                              thin=10,
                              float_behavior='chi2')
        check_paras(out.params, self.p_true, sig=3)
Пример #2
0
class CommonMinimizerTest(unittest.TestCase):

    def setUp(self):
        """
        test scale minimizers except newton-cg (needs jacobian) and
        anneal (doesn't work out of the box).
        """
        p_true = Parameters()
        p_true.add('amp', value=14.0)
        p_true.add('period', value=5.33)
        p_true.add('shift', value=0.123)
        p_true.add('decay', value=0.010)
        self.p_true = p_true

        n = 2500
        xmin = 0.
        xmax = 250.0
        noise = np.random.normal(scale=0.7215, size=n)
        self.x = np.linspace(xmin, xmax, n)
        self.data = self.residual(p_true, self.x) + noise

        fit_params = Parameters()
        fit_params.add('amp', value=11.0, min=5, max=20)
        fit_params.add('period', value=5., min=1., max=7)
        fit_params.add('shift', value=.10, min=0.0, max=0.2)
        fit_params.add('decay', value=6.e-3, min=0, max=0.1)
        self.fit_params = fit_params

        self.mini = Minimizer(self.residual, fit_params, [self.x, self.data])

    def residual(self, pars, x, data=None):
        amp = pars['amp']
        per = pars['period']
        shift = pars['shift']
        decay = pars['decay']

        if abs(shift) > pi/2:
            shift = shift - np.sign(shift) * pi
        model = amp*np.sin(shift + x/per) * np.exp(-x*x*decay*decay)
        if data is None:
            return model
        return model - data

    def test_diffev_bounds_check(self):
        # You need finite (min, max) for each parameter if you're using
        # differential_evolution.
        self.fit_params['decay'].min = -np.inf
        self.fit_params['decay'].vary = True
        self.minimizer = 'differential_evolution'
        pytest.raises(ValueError, self.scalar_minimizer)

        # but only if a parameter is not fixed
        self.fit_params['decay'].vary = False
        self.mini.scalar_minimize(method='differential_evolution', maxiter=1)

    def test_scalar_minimizers(self):
        # test all the scalar minimizers
        for method in SCALAR_METHODS:
            if method in ['newton', 'dogleg', 'trust-ncg', 'cg', 'trust-exact',
                          'trust-krylov', 'trust-constr']:
                continue
            self.minimizer = SCALAR_METHODS[method]
            if method == 'Nelder-Mead':
                sig = 0.2
            else:
                sig = 0.15
            self.scalar_minimizer(sig=sig)

    def scalar_minimizer(self, sig=0.15):
        out = self.mini.scalar_minimize(method=self.minimizer)

        self.residual(out.params, self.x)

        for para, true_para in zip(out.params.values(), self.p_true.values()):
            check_wo_stderr(para, true_para.value, sig=sig)

    def test_nan_policy(self):
        # check that an error is raised if there are nan in
        # the data returned by userfcn
        self.data[0] = np.nan

        major, minor, _micro = scipy_version.split('.', 2)
        for method in SCALAR_METHODS:
            if (method == 'differential_evolution' and int(major) > 0 and
                    int(minor) >= 2):
                pytest.raises(RuntimeError, self.mini.scalar_minimize,
                              SCALAR_METHODS[method])
            else:
                pytest.raises(ValueError, self.mini.scalar_minimize,
                              SCALAR_METHODS[method])

        pytest.raises(ValueError, self.mini.minimize)

        # now check that the fit proceeds if nan_policy is 'omit'
        self.mini.nan_policy = 'omit'
        res = self.mini.minimize()
        assert_equal(res.ndata, np.size(self.data, 0) - 1)

        for para, true_para in zip(res.params.values(), self.p_true.values()):
            check_wo_stderr(para, true_para.value, sig=0.15)

    def test_nan_policy_function(self):
        a = np.array([0, 1, 2, 3, np.nan])
        pytest.raises(ValueError, _nan_policy, a)
        assert_(np.isnan(_nan_policy(a, nan_policy='propagate')[-1]))
        assert_equal(_nan_policy(a, nan_policy='omit'), [0, 1, 2, 3])

        a[-1] = np.inf
        pytest.raises(ValueError, _nan_policy, a)
        assert_(np.isposinf(_nan_policy(a, nan_policy='propagate')[-1]))
        assert_equal(_nan_policy(a, nan_policy='omit'), [0, 1, 2, 3])
        assert_equal(_nan_policy(a, handle_inf=False), a)

    @dec.slow
    def test_emcee(self):
        # test emcee
        if not HAS_EMCEE:
            return True

        np.random.seed(123456)
        out = self.mini.emcee(nwalkers=100, steps=200, burn=50, thin=10)

        check_paras(out.params, self.p_true, sig=3)

    @dec.slow
    def test_emcee_method_kwarg(self):
        # test with emcee as method keyword argument
        if not HAS_EMCEE:
            return True

        np.random.seed(123456)
        out = self.mini.minimize(method='emcee', nwalkers=100, steps=200,
                                 burn=50, thin=10)
        assert out.method == 'emcee'
        assert out.nfev == 100*200

        check_paras(out.params, self.p_true, sig=3)

    @dec.slow
    def test_emcee_PT(self):
        # test emcee with parallel tempering
        if not HAS_EMCEE:
            return True

        np.random.seed(123456)
        self.mini.userfcn = residual_for_multiprocessing
        out = self.mini.emcee(ntemps=4, nwalkers=50, steps=200,
                              burn=100, thin=10, workers=2)

        check_paras(out.params, self.p_true, sig=3)

    @dec.slow
    def test_emcee_multiprocessing(self):
        # test multiprocessing runs
        if not HAS_EMCEE:
            return True

        np.random.seed(123456)
        self.mini.userfcn = residual_for_multiprocessing
        self.mini.emcee(steps=10, workers=4)

    def test_emcee_bounds_length(self):
        # the log-probability functions check if the parameters are
        # inside the bounds. Check that the bounds and parameters
        # are the right lengths for comparison. This can be done
        # if nvarys != nparams
        if not HAS_EMCEE:
            return True
        self.mini.params['amp'].vary = False
        self.mini.params['period'].vary = False
        self.mini.params['shift'].vary = False

        self.mini.emcee(steps=10)

    @dec.slow
    def test_emcee_partial_bounds(self):
        # mcmc with partial bounds
        if not HAS_EMCEE:
            return True

        np.random.seed(123456)
        # test mcmc output vs lm, some parameters not bounded
        self.fit_params['amp'].max = np.inf
        # self.fit_params['amp'].min = -np.inf
        out = self.mini.emcee(nwalkers=100, steps=300, burn=100, thin=10)

        check_paras(out.params, self.p_true, sig=3)

    def test_emcee_init_with_chain(self):
        # can you initialise with a previous chain
        if not HAS_EMCEE:
            return True

        out = self.mini.emcee(nwalkers=100, steps=5)
        # can initialise with a chain
        self.mini.emcee(nwalkers=100, steps=1, pos=out.chain)

        # can initialise with a correct subset of a chain
        self.mini.emcee(nwalkers=100, steps=1, pos=out.chain[..., -1, :])

        # but you can't initialise if the shape is wrong.
        pytest.raises(ValueError,
                      self.mini.emcee,
                      nwalkers=100,
                      steps=1,
                      pos=out.chain[..., -1, :-1])

    def test_emcee_reuse_sampler(self):
        if not HAS_EMCEE:
            return True

        self.mini.emcee(nwalkers=100, steps=5)

        # if you've run the sampler the Minimizer object should have a _lastpos
        # attribute
        assert_(hasattr(self.mini, '_lastpos'))

        # now try and re-use sampler
        out2 = self.mini.emcee(steps=10, reuse_sampler=True)
        assert_(out2.chain.shape[1] == 15)

        # you shouldn't be able to reuse the sampler if nvarys has changed.
        self.mini.params['amp'].vary = False
        pytest.raises(ValueError, self.mini.emcee, reuse_sampler=True)

    def test_emcee_lnpost(self):
        # check ln likelihood is calculated correctly. It should be
        # -0.5 * chi**2.
        result = self.mini.minimize()

        # obtain the numeric values
        # note - in this example all the parameters are varied
        fvars = np.array([par.value for par in result.params.values()])

        # calculate the cost function with scaled values (parameters all have
        # lower and upper bounds.
        scaled_fvars = []
        for par, fvar in zip(result.params.values(), fvars):
            par.value = fvar
            scaled_fvars.append(par.setup_bounds())

        val = self.mini.penalty(np.array(scaled_fvars))

        # calculate the log-likelihood value
        bounds = np.array([(par.min, par.max)
                           for par in result.params.values()])
        val2 = _lnpost(fvars,
                       self.residual,
                       result.params,
                       result.var_names,
                       bounds,
                       userargs=(self.x, self.data))

        assert_almost_equal(-0.5 * val, val2)

    def test_emcee_output(self):
        # test mcmc output
        if not HAS_EMCEE:
            return True
        try:
            from pandas import DataFrame
        except ImportError:
            return True
        out = self.mini.emcee(nwalkers=10, steps=20, burn=5, thin=2)
        assert_(isinstance(out, MinimizerResult))
        assert_(isinstance(out.flatchain, DataFrame))

        # check that we can access the chains via parameter name
        assert_(out.flatchain['amp'].shape[0] == 80)
        assert out.errorbars
        assert_(np.isfinite(out.params['amp'].correl['period']))

        # the lnprob array should be the same as the chain size
        assert_(np.size(out.chain)//out.nvarys == np.size(out.lnprob))

        # test chain output shapes
        assert_(out.lnprob.shape == (10, (20-5+1)/2))
        assert_(out.chain.shape == (10, (20-5+1)/2, out.nvarys))
        assert_(out.flatchain.shape == (10*(20-5+1)/2, out.nvarys))

    def test_emcee_PT_output(self):
        # test mcmc output when using parallel tempering
        if not HAS_EMCEE:
            return True
        try:
            from pandas import DataFrame
        except ImportError:
            return True
        out = self.mini.emcee(ntemps=6, nwalkers=10, steps=20, burn=5, thin=2)
        assert_(isinstance(out, MinimizerResult))
        assert_(isinstance(out.flatchain, DataFrame))

        # check that we can access the chains via parameter name
        assert_(out.flatchain['amp'].shape[0] == 80)
        assert out.errorbars
        assert_(np.isfinite(out.params['amp'].correl['period']))

        # the lnprob array should be the same as the chain size
        assert_(np.size(out.chain)//out.nvarys == np.size(out.lnprob))

        # test chain output shapes
        assert_(out.lnprob.shape == (6, 10, (20-5+1)/2))
        assert_(out.chain.shape == (6, 10, (20-5+1)/2, out.nvarys))
        # Only the 0th temperature is returned
        assert_(out.flatchain.shape == (10*(20-5+1)/2, out.nvarys))

    @dec.slow
    def test_emcee_float(self):
        # test that it works if the residuals returns a float, not a vector
        if not HAS_EMCEE:
            return True

        def resid(pars, x, data=None):
            return -0.5 * np.sum(self.residual(pars, x, data=data)**2)

        # just return chi2
        def resid2(pars, x, data=None):
            return np.sum(self.residual(pars, x, data=data)**2)

        self.mini.userfcn = resid
        np.random.seed(123456)
        out = self.mini.emcee(nwalkers=100, steps=200, burn=50, thin=10)
        check_paras(out.params, self.p_true, sig=3)

        self.mini.userfcn = resid2
        np.random.seed(123456)
        out = self.mini.emcee(nwalkers=100, steps=200,
                              burn=50, thin=10, float_behavior='chi2')
        check_paras(out.params, self.p_true, sig=3)

    @dec.slow
    def test_emcee_seed(self):
        # test emcee seeding can reproduce a sampling run
        if not HAS_EMCEE:
            return True

        out = self.mini.emcee(params=self.fit_params,
                              nwalkers=100,
                              steps=1, seed=1)
        out2 = self.mini.emcee(params=self.fit_params,
                               nwalkers=100,
                               steps=1, seed=1)

        assert_almost_equal(out.chain, out2.chain)
Пример #3
0
class CommonMinimizerTest(unittest.TestCase):

    def setUp(self):
        """
        test scale minimizers except newton-cg (needs jacobian) and
        anneal (doesn't work out of the box).
        """
        p_true = Parameters()
        p_true.add('amp', value=14.0)
        p_true.add('period', value=5.33)
        p_true.add('shift', value=0.123)
        p_true.add('decay', value=0.010)
        self.p_true = p_true

        n = 2500
        xmin = 0.
        xmax = 250.0
        noise = np.random.normal(scale=0.7215, size=n)
        self.x = np.linspace(xmin, xmax, n)
        self.data = self.residual(p_true, self.x) + noise

        fit_params = Parameters()
        fit_params.add('amp', value=11.0, min=5, max=20)
        fit_params.add('period', value=5., min=1., max=7)
        fit_params.add('shift', value=.10,  min=0.0, max=0.2)
        fit_params.add('decay', value=6.e-3, min=0, max=0.1)
        self.fit_params = fit_params

        self.mini = Minimizer(self.residual, fit_params, [self.x, self.data])

    def residual(self, pars, x, data=None):
        amp = pars['amp'].value
        per = pars['period'].value
        shift = pars['shift'].value
        decay = pars['decay'].value

        if abs(shift) > pi/2:
            shift = shift - np.sign(shift) * pi
        model = amp*np.sin(shift + x/per) * np.exp(-x*x*decay*decay)
        if data is None:
            return model
        return model - data
        
    def test_diffev_bounds_check(self):
        # You need finite (min, max) for each parameter if you're using
        # differential_evolution.
        self.fit_params['decay'].min = None
        self.minimizer = 'differential_evolution'
        np.testing.assert_raises(ValueError, self.scalar_minimizer)

    def test_scalar_minimizers(self):
        # test all the scalar minimizers
        for method in SCALAR_METHODS:
            if method in ['newton', 'dogleg', 'trust-ncg']:
                continue
            self.minimizer = SCALAR_METHODS[method]
            if method == 'Nelder-Mead':
                sig = 0.2
            else:
                sig = 0.15
            self.scalar_minimizer(sig=sig)
        
    def scalar_minimizer(self, sig=0.15):
        try:
            from scipy.optimize import minimize as scipy_minimize
        except ImportError:
            raise SkipTest

        print(self.minimizer)
        out = self.mini.scalar_minimize(method=self.minimizer)

        self.residual(out.params, self.x)

        for name, par in out.params.items():
            nout = "%s:%s" % (name, ' '*(20-len(name)))
            print("%s: %s (%s) " % (nout, par.value, self.p_true[name].value))

        for para, true_para in zip(out.params.values(),
                                   self.p_true.values()):
            check_wo_stderr(para, true_para.value, sig=sig)

    @decorators.slow
    def test_emcee(self):
        # test emcee
        if not HAS_EMCEE:
            return True

        np.random.seed(123456)
        out = self.mini.emcee(nwalkers=100, steps=200,
                                      burn=50, thin=10)

        check_paras(out.params, self.p_true, sig=3)

    @decorators.slow
    def test_emcee_PT(self):
        # test emcee with parallel tempering
        if not HAS_EMCEE:
            return True

        np.random.seed(123456)
        self.mini.userfcn = residual_for_multiprocessing
        out = self.mini.emcee(ntemps=4, nwalkers=50, steps=200,
                              burn=100, thin=10, workers=2)

        check_paras(out.params, self.p_true, sig=3)

    @decorators.slow
    def test_emcee_multiprocessing(self):
        # test multiprocessing runs
        if not HAS_EMCEE:
            return True

        np.random.seed(123456)
        self.mini.userfcn = residual_for_multiprocessing
        out = self.mini.emcee(steps=10, workers=4)

    def test_emcee_bounds_length(self):
        # the log-probability functions check if the parameters are
        # inside the bounds. Check that the bounds and parameters
        # are the right lengths for comparison. This can be done
        # if nvarys != nparams
        if not HAS_EMCEE:
            return True
        self.mini.params['amp'].vary=False
        self.mini.params['period'].vary=False
        self.mini.params['shift'].vary=False

        out = self.mini.emcee(steps=10)

    @decorators.slow
    def test_emcee_partial_bounds(self):
        # mcmc with partial bounds
        if not HAS_EMCEE:
            return True

        np.random.seed(123456)
        # test mcmc output vs lm, some parameters not bounded
        self.fit_params['amp'].max = None
        # self.fit_params['amp'].min = None
        out = self.mini.emcee(nwalkers=100, steps=300,
                                      burn=100, thin=10)

        check_paras(out.params, self.p_true, sig=3)

    def test_emcee_init_with_chain(self):
        # can you initialise with a previous chain
        if not HAS_EMCEE:
            return True

        out = self.mini.emcee(nwalkers=100, steps=5)
        # can initialise with a chain
        out2 = self.mini.emcee(nwalkers=100, steps=1, pos=out.chain)

        # can initialise with a correct subset of a chain
        out3 = self.mini.emcee(nwalkers=100,
                               steps=1,
                               pos=out.chain[..., -1, :])

        # but you can't initialise if the shape is wrong.
        assert_raises(ValueError,
                      self.mini.emcee,
                      nwalkers=100,
                      steps=1,
                      pos=out.chain[..., -1, :-1])

    def test_emcee_reuse_sampler(self):
        if not HAS_EMCEE:
            return True

        self.mini.emcee(nwalkers=100, steps=5)

        # if you've run the sampler the Minimizer object should have a _lastpos
        # attribute
        assert_(hasattr(self.mini, '_lastpos'))

        # now try and re-use sampler
        out2 = self.mini.emcee(steps=10, reuse_sampler=True)
        assert_(out2.chain.shape[1] == 15)

        # you shouldn't be able to reuse the sampler if nvarys has changed.
        self.mini.params['amp'].vary = False
        assert_raises(ValueError, self.mini.emcee, reuse_sampler=True)

    def test_emcee_lnpost(self):
        # check ln likelihood is calculated correctly. It should be
        # -0.5 * chi**2.
        result = self.mini.minimize()

        # obtain the numeric values
        # note - in this example all the parameters are varied
        fvars = np.array([par.value for par in result.params.values()])

        # calculate the cost function with scaled values (parameters all have
        # lower and upper bounds.
        scaled_fvars = []
        for par, fvar in zip(result.params.values(), fvars):
            par.value = fvar
            scaled_fvars.append(par.setup_bounds())

        val = self.mini.penalty(np.array(scaled_fvars))

        # calculate the log-likelihood value
        bounds = np.array([(par.min, par.max)
                           for par in result.params.values()])
        val2 = _lnpost(fvars,
                       self.residual,
                       result.params,
                       result.var_names,
                       bounds,
                       userargs=(self.x, self.data))

        assert_almost_equal(-0.5 * val, val2)

    def test_emcee_output(self):
        # test mcmc output
        if not HAS_EMCEE:
            return True
        try:
            from pandas import DataFrame
        except ImportError:
            return True
        out = self.mini.emcee(nwalkers=10, steps=20, burn=5, thin=2)
        assert_(isinstance(out, MinimizerResult))
        assert_(isinstance(out.flatchain, DataFrame))

        # check that we can access the chains via parameter name
        assert_(out.flatchain['amp'].shape[0] == 80)
        assert_(out.errorbars is True)
        assert_(np.isfinite(out.params['amp'].correl['period']))

        # the lnprob array should be the same as the chain size
        assert_(np.size(out.chain)//4 == np.size(out.lnprob))

    @decorators.slow
    def test_emcee_float(self):
        # test that it works if the residuals returns a float, not a vector
        if not HAS_EMCEE:
            return True

        def resid(pars, x, data=None):
            return -0.5 * np.sum(self.residual(pars, x, data=data)**2)

        # just return chi2
        def resid2(pars, x, data=None):
            return np.sum(self.residual(pars, x, data=data)**2)

        self.mini.userfcn = resid
        np.random.seed(123456)
        out = self.mini.emcee(nwalkers=100, steps=200,
                                      burn=50, thin=10)
        check_paras(out.params, self.p_true, sig=3)

        self.mini.userfcn = resid2
        np.random.seed(123456)
        out = self.mini.emcee(nwalkers=100, steps=200,
                              burn=50, thin=10, float_behavior='chi2')
        check_paras(out.params, self.p_true, sig=3)