コード例 #1
0
def test_MCMC_parameters_initialization():
    parameters = [0.0, 1.1, 2.2, 3.3, 4.4, 5.5]
    parameters_dictionnary = {
        'to': 0,
        'uo': 1,
        'tE': 2,
        'rho': 3,
        'fs_LCOGT': 4,
        'g_LCOGT': 5
    }

    parameter_key_0 = 'to'
    trial_0 = microlguess.MCMC_parameters_initialization(
        parameter_key_0, parameters_dictionnary, parameters)[0]
    assert (trial_0 > -1.0) & (trial_0 < 1.0)

    parameter_key_1 = 'uo'
    trial_1 = microlguess.MCMC_parameters_initialization(
        parameter_key_1, parameters_dictionnary, parameters)[0]
    assert (trial_1 > 0.9 * 1.1) & (trial_1 < 1.1 * 1.1)

    parameter_key_2 = 'tE'
    trial_2 = microlguess.MCMC_parameters_initialization(
        parameter_key_2, parameters_dictionnary, parameters)[0]
    assert (trial_2 > 0.9 * 2.2) & (trial_2 < 1.1 * 2.2)

    parameter_key_3 = 'rho'
    trial_3 = microlguess.MCMC_parameters_initialization(
        parameter_key_3, parameters_dictionnary, parameters)[0]
    assert (trial_3 > 0.1 * 3.3) & (trial_3 < 10 * 3.3)

    parameter_key_4 = 'fs_LCOGT'
    trial_4 = microlguess.MCMC_parameters_initialization(
        parameter_key_4, parameters_dictionnary, parameters)

    assert len(trial_4) == 1
    assert (trial_4[0] > 0.9 * 4.4) & (trial_4[0] < 1.1 * 4.4)

    parameter_key_5 = 'g_LCOGT'
    trial_5 = microlguess.MCMC_parameters_initialization(
        parameter_key_5, parameters_dictionnary, parameters)
    assert len(trial_4) == 1
    assert (trial_5[0] > 0.9 * 5.5) & (trial_5[0] < 1.1 * 5.5)
コード例 #2
0
ファイル: microlfits.py プロジェクト: AzNOAOTares/pyLIMA
    def MCMC(self):
        """ The MCMC method. Construct starting points of the chains around
            the best solution found by the 'DE' method.
            The objective function is :func:`chichi_MCMC`. Telescope flux (fs and g), can be optimized thanks to MCMC if
            flux_estimation_MCMC is 'MCMC', either they are derived through np.polyfit.

            Based on the emcee python package :
            " emcee: The MCMC Hammer" (Foreman-Mackey et al. 2013).
            Have a look here : http://dan.iel.fm/emcee/current/

            :return: a tuple containing (MCMC_chains, MCMC_probabilities)
            :rtype: tuple

            **WARNING** :
                   nwalkers is set to 100
                   nlinks is set to 300
                   nwalkers*nlinks MCMC steps in total
        """

        nwalkers = 100
        nlinks = 100

        # start = python_time.time()

        if len(self.model.parameters_guess) == 0:

            differential_evolution_estimation = self.differential_evolution(
            )[0]
            self.DE_population_size = 10
            self.guess = differential_evolution_estimation

        else:

            self.guess = list(self.model.parameters_guess)
            self.guess += self.find_fluxes(self.guess, self.model)

        # Best solution

        best_solution = self.guess

        if self.fluxes_MCMC_method == 'MCMC':
            limit_parameters = len(self.model.model_dictionnary.keys())

        else:
            limit_parameters = len(self.model.parameters_boundaries)

        # Initialize the population of MCMC
        population = []

        count_walkers = 0
        while count_walkers < nwalkers:

            # Construct an individual of the population around the best solution.
            individual = []
            for parameter_key in list(
                    self.model.model_dictionnary.keys())[:limit_parameters]:

                parameter_trial = microlguess.MCMC_parameters_initialization(
                    parameter_key, self.model.model_dictionnary, best_solution)

                if parameter_trial:

                    for parameter in parameter_trial:
                        individual.append(parameter)

            # fluxes = self.find_fluxes(individual,self.model)
            # individual += fluxes

            chichi = self.chichi_MCMC(individual)
            if chichi != -np.inf:
                # np.array(individual)
                # print count_walkers

                population.append(np.array(individual))
                count_walkers += 1
        # number_of_parameters = number_of_paczynski_parameters + len(fluxes)
        # number_of_parameters = number_of_paczynski_parameters
        print('pre MCMC done')

        number_of_parameters = len(individual)

        # pool = MPIPool()
        # if not pool.is_master():
        # pool.wait()
        # sys.exit(0)
        sampler = emcee.EnsembleSampler(nwalkers,
                                        number_of_parameters,
                                        self.chichi_MCMC,
                                        a=2.0,
                                        pool=self.pool)

        # First estimation using population as a starting points.

        final_positions, final_probabilities, state = sampler.run_mcmc(
            population, nlinks)

        print('MCMC preburn done')

        sampler.reset()

        # Final estimation using the previous output.

        sampler.run_mcmc(final_positions, 3 * nlinks)

        MCMC_chains = sampler.chain
        MCMC_probabilities = sampler.lnprobability
        # pool.close()
        # print python_time.time()-start
        print(sys._getframe().f_code.co_name, ' : MCMC fit SUCCESS')
        return MCMC_chains, MCMC_probabilities
コード例 #3
0
ファイル: microlfits.py プロジェクト: ebachelet/pyLIMA
    def MCMC(self,pool):
        """ The MCMC method. Construct starting points of the chains around
            the best solution found by the 'DE' method.
            The objective function is :func:`chichi_MCMC`. Telescope flux (fs and g), can be optimized thanks to MCMC if
            flux_estimation_MCMC is 'MCMC', either they are derived through np.polyfit.

            Based on the emcee python package :
            " emcee: The MCMC Hammer" (Foreman-Mackey et al. 2013).
            Have a look here : http://dan.iel.fm/emcee/current/

            :return: a tuple containing (MCMC_chains, MCMC_probabilities)
            :rtype: tuple

            **WARNING** :
                   nwalkers is set to 4 times the len of pazynski_parameters
                   nlinks is set to 4000
                   5*nwalkers*nlinks MCMC steps in total
        """



        # start = python_time.time()

        if len(self.model.parameters_guess) == 0:

            differential_evolution_estimation = self.differential_evolution(pool)[0]
            self.DE_population_size = 10
            self.guess = differential_evolution_estimation

        else:

            self.guess = list(self.model.parameters_guess)
            self.guess += self.find_fluxes(self.guess, self.model)

        # Best solution


        if self.fluxes_MCMC_method != 'MCMC':
            limit_parameters = len(self.model.parameters_boundaries)
            best_solution = self.guess[:limit_parameters]
        else:
            limit_parameters = len(self.guess)
            best_solution = self.guess
        nwalkers = 2*len(best_solution)
        nlinks = 5*1000

        # Initialize the population of MCMC
        population = []

        count_walkers = 0

        while count_walkers < nwalkers:

            # Construct an individual of the population around the best solution.
            individual = []
            for parameter_key in list(self.model.model_dictionnary.keys())[:limit_parameters]:

                parameter_trial = microlguess.MCMC_parameters_initialization(parameter_key,
                                                                             self.model.model_dictionnary,
                                                                             best_solution)

                if parameter_trial:

                    for parameter in parameter_trial:
                        individual.append(parameter)

            #if self.fluxes_MCMC_method == 'MCMC':

            #    fluxes = self.find_fluxes(individual, self.model)
            #    individual += (fluxes*np.random.uniform(0.99,1.01,len(fluxes))+np.random.uni).tolist()

            chichi = self.chichi_MCMC(individual)

            if chichi != -np.inf:
                # np.array(individual)
                # print count_walkers

                population.append(np.array(individual))
                count_walkers += 1

        print('pre MCMC done')

        number_of_parameters = len(individual)

        try:
            # create a new MPI pool
            from schwimmbad import MPIPool
            pool = MPIPool()
            if not pool.is_master():
                pool.wait()
                sys.exit(0)
        except:

            pass

        sampler = emcee.EnsembleSampler(nwalkers, number_of_parameters, self.chichi_MCMC,
                                        a=2.0, pool= pool)
        # First estimation using population as a starting points.

        #final_positions, final_probabilities, state = sampler.run_mcmc(population, nlinks, progress=True)

        #print('MCMC preburn done')


        #sampler.reset()

        sampler.run_mcmc(population, nlinks, progress=True)
        MCMC_chains = np.c_[sampler.get_chain().reshape(nlinks*nwalkers,number_of_parameters),sampler.get_log_prob().reshape(nlinks*nwalkers)]

        # Final estimation using the previous output.
        #for positions, probabilities, states in sampler.sample(final_positions, iterations=  nlinks,
        #                                                       storechain=True):
        #    chains = np.c_[positions, probabilities]
        #    if MCMC_chains is not None:

        #        MCMC_chains = np.r_[MCMC_chains, chains]
        #    else:

        #        MCMC_chains = chains

        print(sys._getframe().f_code.co_name, ' : MCMC fit SUCCESS')
        return MCMC_chains