Ejemplo n.º 1
0
    def test_history_correct_after_sampling_simple_model(self):
        """Test that the history saved matches with the returned sampled parameter values for a one-dimensional test model."""
        self.param, self.like = onedmodel()
        model = Model(self.like, self.param)
        step = Dream(model=model,
                     save_history=True,
                     history_thin=1,
                     model_name='test_history_correct',
                     adapt_crossover=False)
        sampled_params, logps = run_dream(self.param,
                                          self.like,
                                          niterations=10,
                                          nchains=5,
                                          save_history=True,
                                          history_thin=1,
                                          model_name='test_history_correct',
                                          adapt_crossover=False,
                                          verbose=False)
        history = np.load('test_history_correct_DREAM_chain_history.npy')
        self.assertEqual(
            len(history),
            step.total_var_dimension *
            ((10 * 5 / step.history_thin) + step.nseedchains))
        history_no_seedchains = history[(step.total_var_dimension *
                                         step.nseedchains)::]
        sorted_history = np.sort(history_no_seedchains)
        sorted_sampled_params = np.sort(np.array(sampled_params).flatten())

        for sampled_param, history_param in zip(sorted_history,
                                                sorted_sampled_params):
            self.assertEqual(sampled_param, history_param)

        remove('test_history_correct_DREAM_chain_history.npy')
        remove('test_history_correct_DREAM_chain_adapted_crossoverprob.npy')
        remove('test_history_correct_DREAM_chain_adapted_gammalevelprob.npy')
Ejemplo n.º 2
0
    def test_crossover_file_loading(self):
        """Test that when a crossover file is loaded the crossover values are set to the file values and not adapted."""
        self.param, self.like = multidmodel()
        old_crossovervals = np.array([.45, .20, .35])
        np.save('testing_crossoverval_load_DREAM.npy', old_crossovervals)
        model = Model(self.like, self.param)
        dream = Dream(model=model,
                      crossover_file='testing_crossoverval_load_DREAM.npy',
                      save_history=True,
                      model_name='testing_crossover_load')
        self.assertTrue(
            np.array_equal(dream.CR_probabilities, old_crossovervals))

        sampled_vals, logps = run_dream(
            self.param,
            self.like,
            niterations=100,
            nchains=3,
            crossover_file='testing_crossoverval_load_DREAM.npy',
            model_name='testing_crossover_load',
            save_history=True,
            verbose=False)

        crossover_vals_after_sampling = np.load(
            'testing_crossover_load_DREAM_chain_adapted_crossoverprob.npy')
        self.assertIs(
            np.array_equal(crossover_vals_after_sampling, old_crossovervals),
            True)
        remove('testing_crossover_load_DREAM_chain_adapted_crossoverprob.npy')
        remove('testing_crossover_load_DREAM_chain_adapted_gammalevelprob.npy')
        remove('testing_crossoverval_load_DREAM.npy')
        remove('testing_crossover_load_DREAM_chain_history.npy')
Ejemplo n.º 3
0
    def test_history_file_loading(self):
        """Test that when a history file is provided it is loaded and appended to the new history."""
        self.param, self.like = onedmodel()
        model = Model(self.like, self.param)
        step = Dream(model=model)
        old_history = np.array([1, 3, 5, 7, 9, 11])
        step.save_history_to_disc(old_history, 'testing_history_load_')
        sampled_params, logps = run_dream(self.param, self.like, niterations=3, nchains=3, history_thin=1, history_file='testing_history_load_DREAM_chain_history.npy', save_history=True, model_name='test_history_loading', verbose=False)
        new_history = np.load('test_history_loading_DREAM_chain_history.npy')
        self.assertEqual(len(new_history), (len(old_history.flatten())+(3*step.total_var_dimension*3)))
        new_history_seed = new_history[:len(old_history.flatten())]
        new_history_seed_reshaped = new_history_seed.reshape(old_history.shape)
        self.assertIs(np.array_equal(old_history, new_history_seed_reshaped), True)
        
        added_history = new_history[len(old_history.flatten())::]
        sorted_history = np.sort(added_history)
        sorted_sampled_params = np.sort(np.array(sampled_params).flatten())

        for sampled_param, history_param in zip(sorted_history, sorted_sampled_params):
            self.assertEqual(sampled_param, history_param)
        remove('testing_history_load_DREAM_chain_history.npy')
        remove('testing_history_load_DREAM_chain_adapted_crossoverprob.npy')
        remove('testing_history_load_DREAM_chain_adapted_gammalevelprob.npy')
        remove('test_history_loading_DREAM_chain_adapted_crossoverprob.npy')
        remove('test_history_loading_DREAM_chain_adapted_gammalevelprob.npy')
        remove('test_history_loading_DREAM_chain_history.npy')
Ejemplo n.º 4
0
    def test_boundaries_obeyed_aftersampling(self):
        """Test that boundaries are respected if included."""

        self.param, self.like = multidmodel_uniform()
        model = Model(self.like, self.param)
        step = Dream(model=model,
                     save_history=True,
                     history_thin=1,
                     model_name='test_boundaries',
                     adapt_crossover=False,
                     hardboundaries=True,
                     nverbose=10)
        sampled_params, logps = run_dream(self.param,
                                          self.like,
                                          niterations=1000,
                                          nchains=5,
                                          save_history=True,
                                          history_thin=1,
                                          model_name='test_boundaries',
                                          adapt_crossover=False,
                                          verbose=True,
                                          hardboundaries=True,
                                          nverbose=10)
        history = np.load('test_boundaries_DREAM_chain_history.npy')
        variables = model.sampled_parameters
        dim = 0
        for var in variables:
            interval = var.interval()
            dim += var.dsize

        lowerbound = interval[0]
        upperbound = interval[1]

        npoints = int(len(history) / float(dim))
        reshaped_history = np.reshape(history, (npoints, dim))

        print('reshaped history: ', reshaped_history)
        print('upper ', upperbound, ' and lower ', lowerbound)
        print('lower bounds: ', (reshaped_history < lowerbound).any())
        print('upper bounds: ', (reshaped_history > upperbound).any())
        print('disobeyed lower: ',
              reshaped_history[reshaped_history < lowerbound])
        print('disobeyed upper: ',
              reshaped_history[reshaped_history > upperbound])

        self.assertFalse((reshaped_history < lowerbound).any())
        self.assertFalse((reshaped_history > upperbound).any())

        remove('test_boundaries_DREAM_chain_adapted_crossoverprob.npy')
        remove('test_boundaries_DREAM_chain_adapted_gammalevelprob.npy')
        remove('test_boundaries_DREAM_chain_history.npy')
Ejemplo n.º 5
0
    def test_robertson_example(self):
        """Test that the Robertson example runs and returns values of the expected shape."""
        nchains = robertson_kwargs['nchains']
        robertson_kwargs['niterations'] = 100
        robertson_kwargs['verbose'] = False
        robertson_kwargs['save_history'] = False

        #Check likelihood fxn runs
        logp = robertson_like([3, 8, .11])

        #Check sampling runs and gives output of expected shape
        sampled_params, logps = run_dream(**robertson_kwargs)
        self.assertEqual(len(sampled_params), nchains)
        self.assertEqual(len(sampled_params[0]), 100)
        self.assertEqual(len(sampled_params[0][0]), 3)
        self.assertEqual(len(logps), nchains)
        self.assertEqual(len(logps[0]), 100)
        self.assertEqual(len(logps[0][0]), 1)
Ejemplo n.º 6
0
    def test_ndimgaussian_example(self):
        """Test that the n-dimensional gaussian example runs and returns values of the expected shape."""
        nchains = ndimgauss_kwargs['nchains']
        ndimgauss_kwargs['niterations'] = 100
        ndimgauss_kwargs['verbose'] = False
        ndimgauss_kwargs['save_history'] = False

        #Check likelihood fxn runs
        logp = ndimgauss_like(np.random.random_sample((200, )) * 10)

        #Check sampling runs and gives output of expected shape
        sampled_params, logps = run_dream(**ndimgauss_kwargs)
        self.assertEqual(len(sampled_params), nchains)
        self.assertEqual(len(sampled_params[0]), 100)
        self.assertEqual(len(sampled_params[0][0]), 200)
        self.assertEqual(len(logps), nchains)
        self.assertEqual(len(logps[0]), 100)
        self.assertEqual(len(logps[0][0]), 1)
        remove('ndim_gaussian_seed.npy')
Ejemplo n.º 7
0
    def test_CORM_example(self):
        """Test that the CORM example runs and returns values of the expected shape."""
        nchains = corm_kwargs['nchains']
        corm_kwargs['niterations'] = 100
        corm_kwargs['verbose'] = False
        #Check likelihood fxn works
        logp = corm_like([-5, -3, .1, 10, 8, 4, .33, -.58, 99, 1, 0, 11])

        #Check entire algorithm will run and give results of the expected shape
        sampled_params, logps = run_dream(**corm_kwargs)
        self.assertEqual(len(sampled_params), nchains)
        self.assertEqual(len(sampled_params[0]), 100)
        self.assertEqual(len(sampled_params[0][0]), 12)
        self.assertEqual(len(logps), nchains)
        self.assertEqual(len(logps[0]), 100)
        self.assertEqual(len(logps[0][0]), 1)
        remove('corm_dreamzs_5chain_DREAM_chain_adapted_crossoverprob.npy')
        remove('corm_dreamzs_5chain_DREAM_chain_adapted_gammalevelprob.npy')
        remove('corm_dreamzs_5chain_DREAM_chain_history.npy')
Ejemplo n.º 8
0
    def test_mixturemodel_example(self):
        """Test that the mixture model example runs and returns values of the expected shape."""
        nchains = mix_kwargs['nchains']
        mix_kwargs['niterations'] = 100
        mix_kwargs['verbose'] = False
        mix_kwargs['save_history'] = False

        #Check likelihood fxn works
        logp = mix_like(np.array([1, -9, 3, .04, 2, -8, 11, .001, -1, 10]))

        #Check that sampling runs and gives output of expected shape
        sampled_params, logps = run_dream(**mix_kwargs)
        self.assertEqual(len(sampled_params), nchains)
        self.assertEqual(len(sampled_params[0]), 100)
        self.assertEqual(len(sampled_params[0][0]), 10)
        self.assertEqual(len(logps), nchains)
        self.assertEqual(len(logps[0]), 100)
        self.assertEqual(len(logps[0][0]), 1)
        remove('mixturemodel_seed.npy')
Ejemplo n.º 9
0
sampled_parameter_names = [parameters_to_sample]

niterations = 100
converged = False
total_iterations = niterations
nchains = 5

if __name__ == '__main__':

    #Run DREAM sampling.  Documentation of DREAM options is in Dream.py.
    sampled_params, log_ps = run_dream(sampled_parameter_names,
                                       likelihood,
                                       niterations=niterations,
                                       nchains=nchains,
                                       multitry=8,
                                       gamma_levels=4,
                                       adapt_gamma=True,
                                       history_thin=1,
                                       model_name='EF5',
                                       verbose=True,
                                       parallel=True)

    #Save sampling output (sampled parameter values and their corresponding logps).
    for chain in range(len(sampled_params)):
        np.save(
            'EF5_sampled_params_chain_' + str(chain) + '_' +
            str(total_iterations), sampled_params[chain])
        np.save('EF5_logps_chain_' + str(chain) + '_' + str(total_iterations),
                log_ps[chain])

    # Check convergence and continue sampling if not converged
Ejemplo n.º 10
0
def fit_with_DREAM(sim_name, parameter_dict, likelihood):
    original_params = [parameter_dict[k] for k in parameter_dict.keys()]

    priors_list = []
    for p in original_params:
        priors_list.append(SampledParam(norm, loc=np.log(p), scale=1.0))
    # Set simulation parameters
    niterations = 10000
    converged = False
    total_iterations = niterations
    nchains = 5

    # Make save directory
    today = datetime.now()
    save_dir = "PyDREAM_" + today.strftime('%d-%m-%Y') + "_" + str(niterations)
    os.makedirs(os.path.join(os.getcwd(), save_dir), exist_ok=True)

    # Run DREAM sampling.  Documentation of DREAM options is in Dream.py.
    sampled_params, log_ps = run_dream(priors_list, likelihood, start=np.log(original_params),
                                       niterations=niterations, nchains=nchains, multitry=False,
                                       gamma_levels=4, adapt_gamma=True, history_thin=1, model_name=sim_name,
                                       verbose=True)

    # Save sampling output (sampled parameter values and their corresponding logps).
    for chain in range(len(sampled_params)):
        np.save(os.path.join(save_dir, sim_name + str(chain) + '_' + str(total_iterations)), sampled_params[chain])
        np.save(os.path.join(save_dir, sim_name + str(chain) + '_' + str(total_iterations)), log_ps[chain])

    # Check convergence and continue sampling if not converged

    GR = Gelman_Rubin(sampled_params)
    print('At iteration: ', total_iterations, ' GR = ', GR)
    np.savetxt(os.path.join(save_dir, sim_name + str(total_iterations) + '.txt'), GR)

    old_samples = sampled_params
    if np.any(GR > 1.2):
        starts = [sampled_params[chain][-1, :] for chain in range(nchains)]
        while not converged:
            total_iterations += niterations

            sampled_params, log_ps = run_dream(priors_list, likelihood, start=starts, niterations=niterations,
                                               nchains=nchains, multitry=False, gamma_levels=4, adapt_gamma=True,
                                               history_thin=1, model_name=sim_name, verbose=True, restart=True)

            for chain in range(len(sampled_params)):
                np.save(os.path.join(save_dir, sim_name + '_' + str(chain) + '_' + str(total_iterations)),
                        sampled_params[chain])
                np.save(os.path.join(save_dir, sim_name + '_' + str(chain) + '_' + str(total_iterations)),
                        log_ps[chain])

            old_samples = [np.concatenate((old_samples[chain], sampled_params[chain])) for chain in range(nchains)]
            GR = Gelman_Rubin(old_samples)
            print('At iteration: ', total_iterations, ' GR = ', GR)
            np.savetxt(os.path.join(save_dir, sim_name + '_' + str(total_iterations) + '.txt'), GR)

            if np.all(GR < 1.2):
                converged = True
    try:
        # Plot output
        total_iterations = len(old_samples[0])
        burnin = int(total_iterations / 2)
        samples = np.concatenate(list((old_samples[i][burnin:, :] for i in range(len(old_samples)))))
        np.save(os.path.join(save_dir, sim_name+'_samples'), samples)
        ndims = len(old_samples[0][0])
        colors = sns.color_palette(n_colors=ndims)
        for dim in range(ndims):
            fig = plt.figure()
            sns.distplot(samples[:, dim], color=colors[dim])
            fig.savefig(os.path.join(save_dir, sim_name + '_dimension_' + str(dim) + '_' + list(parameter_dict.keys())[dim]+ '.pdf'))

        # Convert to dataframe
        df = pd.DataFrame(samples, columns=parameter_dict.keys())
        g = sns.pairplot(df)
        for i, j in zip(*np.triu_indices_from(g.axes, 1)):
            g.axes[i,j].set_visible(False)
        g.savefig(os.path.join(save_dir, 'corner_plot.pdf'))

        # Basic statistics
        mean_parameters = np.mean(samples, axis=0)
        median_parameters = np.median(samples, axis=0)
        np.save(os.path.join(save_dir, 'mean_parameters'), mean_parameters)
        np.save(os.path.join(save_dir, 'median_parameters'), median_parameters)
        df.describe().to_csv(os.path.join(save_dir, 'descriptive_statistics.csv'))

    except ImportError:
        pass
    return 0
Ejemplo n.º 11
0
    return cost

# Run pydream


niterations = 10000
nchains = 4
converged = False
if __name__ == '__main__':
    sampled_params, log_ps = run_dream(parameters=sampled_params_list,
                                       likelihood=likelihood,
                                       niterations=niterations,
                                       nchains=nchains,
                                       multitry=False,
                                       gamma_levels=6,
                                       nCR=6,
                                       hardboundaries=True,
                                       snooker_=0.4,
                                       adapt_gamma=False,
                                       history_thin=1,
                                       model_name='dreamzs_5chain_dirichlet',
                                       verbose=True)
    total_iterations = niterations
    # Save sampling output (sampled parameter values and their corresponding logps).
    for chain in range(len(sampled_params)):
        np.save('dreamzs_5chain_dirichlet_sampled_params_chain_' + str(chain)+'_'+str(total_iterations), sampled_params[chain])
        np.save('dreamzs_5chain_dirichletlogps_chain_' + str(chain)+'_'+str(total_iterations), log_ps[chain])

    GR = Gelman_Rubin(sampled_params)
    print('At iteration: ',total_iterations,' GR = ',GR)
    np.savetxt('dreamzs_5chain_dirichlet_GelmanRubin_iteration_'+str(total_iterations)+'.txt', GR)
Ejemplo n.º 12
0
sampled_parameter_names = [parameters_to_sample]

niterations = 10
converged = False
total_iterations = niterations
nchains = 5

if __name__ == '__main__':

    #Run DREAM sampling.  Documentation of DREAM options is in Dream.py.
    sampled_params, log_ps = run_dream(
        sampled_parameter_names,
        likelihood,
        niterations=niterations,
        nchains=nchains,
        multitry=False,
        gamma_levels=4,
        adapt_gamma=True,
        history_thin=1,
        model_name='robertson_nopysb_dreamzs_5chain',
        verbose=True)

    #Save sampling output (sampled parameter values and their corresponding logps).
    for chain in range(len(sampled_params)):
        np.save(
            'robertson_nopysb_dreamzs_5chain_sampled_params_chain_' +
            str(chain) + '_' + str(total_iterations), sampled_params[chain])
        np.save(
            'robertson_nopysb_dreamzs_5chain_logps_chain_' + str(chain) + '_' +
            str(total_iterations), log_ps[chain])
Ejemplo n.º 13
0
        return total_cost


# Run pydream

niterations = 10000
nchains = 5
converged = False
if __name__ == '__main__':
    sampled_params, log_ps = run_dream(
        parameters=sampled_params_list,
        likelihood=likelihood,
        niterations=niterations,
        nchains=nchains,
        multitry=False,
        gamma_levels=6,
        nCR=6,
        snooker_=0.4,
        adapt_gamma=False,
        history_thin=1,
        model_name='dreamzs_5chain_NEv2_Sage_test_NM',
        verbose=True)
    total_iterations = niterations
    # Save sampling output (sampled parameter values and their corresponding logps).
    for chain in range(len(sampled_params)):
        np.save(
            'dreamzs_5chain_NEv2_Sage_test_NM_sampled_params_chain_' +
            str(chain) + '_' + str(total_iterations), sampled_params[chain])
        np.save(
            'dreamzs_5chain_NEv2_Sage_test_NM_logps_chain_' + str(chain) +
            '_' + str(total_iterations), log_ps[chain])
Ejemplo n.º 14
0
sim_name = 'mixed_IFN'

if __name__ == '__main__':

    # Make save directory
    today = datetime.now()
    save_dir = "PyDREAM_" + today.strftime('%d-%m-%Y') + "_" + str(niterations)
    os.makedirs(os.path.join(os.getcwd(), save_dir), exist_ok=True)

    #Run DREAM sampling.  Documentation of DREAM options is in Dream.py.
    sampled_params, log_ps = run_dream(priors_list,
                                       likelihood,
                                       start=original_params,
                                       niterations=niterations,
                                       nchains=nchains,
                                       multitry=False,
                                       gamma_levels=4,
                                       adapt_gamma=True,
                                       history_thin=1,
                                       model_name=sim_name,
                                       verbose=True)

    #Save sampling output (sampled parameter values and their corresponding logps).
    for chain in range(len(sampled_params)):
        np.save(
            os.path.join(save_dir,
                         sim_name + str(chain) + '_' + str(total_iterations)),
            sampled_params[chain])
        np.save(
            os.path.join(save_dir,
                         sim_name + str(chain) + '_' + str(total_iterations)),
Ejemplo n.º 15
0
sp_kf_diff_nev1_nev2 = SampledParam(uniform, loc=np.log10(0.05), scale=2.5)
sampled_params_list.append(sp_kf_diff_nev1_nev2)
sp_kr_diff_nev1_nev2 = SampledParam(uniform, loc=np.log10(0.05), scale=2.5)
sampled_params_list.append(sp_kr_diff_nev1_nev2)

sp_kf_diff_nev2_nonNe = SampledParam(uniform, loc=np.log10(0.05), scale=2.5)
sampled_params_list.append(sp_kf_diff_nev2_nonNe)

converged = False
sampled_params, log_ps = run_dream(parameters=sampled_params_list,
                                   likelihood=likelihood,
                                   niterations=niterations,
                                   nchains=nchains,
                                   start=starts,
                                   multitry=False,
                                   gamma_levels=6,
                                   nCR=6,
                                   p_gamma_unity=0.5,
                                   adapt_gamma=True,
                                   history_thin=1,
                                   model_name='dreamzs_5chain_NEv2_Sage_NM',
                                   verbose=True)
total_iterations = niterations
# Save sampling output (sampled parameter values and their corresponding logps).
for chain in range(len(sampled_params)):
    np.save(
        'dreamzs_5chain_NEv2_Sage_NM_sampled_params_chain_' + str(chain) +
        '_' + str(total_iterations), sampled_params[chain])
    np.save(
        'dreamzs_5chain_NEv2_Sage_NM_logps_chain_' + str(chain) + '_' +
        str(total_iterations), log_ps[chain])
                                    scale=upper_bounds)

# Parameters for run_dream
n_iterations = 500
nchains = 3

if __name__ == '__main__':
    total_iterations = n_iterations
    converged = False
    sampled_params, log_ps = core.run_dream(parameters_to_sample,
                                            calculate_likelihood_dream_wm,
                                            niterations=n_iterations,
                                            nchains=nchains,
                                            random_start=True,
                                            start=None,
                                            save_history=True,
                                            adapt_gamma=False,
                                            gamma_levels=1,
                                            tempering=False,
                                            multitry=False,
                                            parallel=False,
                                            model_name='wm_model')

    # Save sampling output (sampled parameter values and their corresponding logps).
    for chain in range(len(sampled_params)):
        np.save(
            'wm_model_sampled_params_chain_' + str(chain) + '_' +
            str(total_iterations), sampled_params[chain])
        np.save(
            'wm_model_logps_chain_' + str(chain) + '_' + str(total_iterations),
            log_ps[chain])
Ejemplo n.º 17
0
def DREAM_fit(model, priors_list, posterior, start_params,
              sampled_param_names, niterations, nchains, sim_name,
              save_dir, custom_params={}, GR_cutoff=1.2, iteration_cutoff=1E7,
              verbose=True, plot_posteriors=True):
    """
    The DREAM fitting algorithm as implemented in run_dream(), plus decorations
    for saving run parameters, checking convergence, and post fitting analysis.
    """
    converged = False
    total_iterations = niterations
    np.save(save_dir + os.sep + 'param_names.npy', sampled_param_names)
    with open(save_dir + os.sep + 'init_params.pkl', 'wb') as f:
        pickle.dump(dict(model.parameters), f)

    # Run DREAM sampling.  Documentation of DREAM options is in Dream.py.
    sampled_params, log_ps = run_dream(priors_list, posterior,
                                       start=start_params,
                                       niterations=niterations,
                                       nchains=nchains,
                                       multitry=True, parallel=True,
                                       gamma_levels=4, adapt_gamma=True,
                                       history_thin=1, model_name=sim_name,
                                       verbose=verbose)

    # Save sampling output (sampled param values and their corresponding logps)
    for chain in range(len(sampled_params)):
        np.save(os.path.join(save_dir, sim_name+str(chain) + '_' + str(total_iterations)), sampled_params[chain])
        np.save(os.path.join(save_dir, sim_name+str(chain) + '_logPs_' + str(total_iterations)), log_ps[chain])

    # Check convergence and continue sampling if not converged

    GR = Gelman_Rubin(sampled_params)
    print('At iteration: ', total_iterations, ' GR = ', GR)
    np.savetxt(os.path.join(save_dir, sim_name + '_' + str(total_iterations) + '.txt'), GR)

    old_samples = sampled_params
    if np.any(GR > GR_cutoff):
        starts = [sampled_params[chain][-1, :] for chain in range(nchains)]
        while not converged:
            total_iterations += niterations

            sampled_params, log_ps = run_dream(priors_list, posterior,
                                               start=starts,
                                               niterations=niterations,
                                               nchains=nchains,
                                               multitry=True, parallel=True,
                                               gamma_levels=4,
                                               adapt_gamma=True,
                                               history_thin=1,
                                               model_name=sim_name,
                                               verbose=verbose, restart=True)

            for chain in range(len(sampled_params)):
                np.save(os.path.join(save_dir, sim_name + '_' + str(chain) + '_' + str(total_iterations)), sampled_params[chain])
                np.save(os.path.join(save_dir, sim_name + '_' + str(chain) + '_logPs_' + str(total_iterations)), log_ps[chain])

            old_samples = [np.concatenate((old_samples[chain], sampled_params[chain])) for chain in range(nchains)]
            GR = Gelman_Rubin(old_samples)
            print('At iteration: ', total_iterations, ' GR = ', GR)
            np.savetxt(os.path.join(save_dir, sim_name + '_' + str(total_iterations)+'.txt'), GR)

            if np.all(GR < GR_cutoff) or total_iterations >= iteration_cutoff:
                converged = True

    log_ps = np.array(log_ps)
    sampled_params = np.array(sampled_params)

    try:
        # Maximum posterior model:
        max_in_each_chain = [np.argmax(chain) for chain in log_ps]
        global_max_chain_idx = np.argmax([log_ps[chain][max_idx] for chain, max_idx in enumerate(max_in_each_chain)])
        ml_params = sampled_params[global_max_chain_idx, max_in_each_chain[global_max_chain_idx]]
        ml_model = {pname: 10 ** pvalue for pname, pvalue in zip(sampled_param_names, ml_params)}
        print(ml_model,
              file=open(os.path.join(save_dir, sim_name + '_ML_params.txt'), 'w'))
        # Maximum posterior for each chain
        ml_samples = [{pname: 10 ** pvalue for pname, pvalue in zip(sampled_param_names, sampled_params[chain_idx, max_in_each_chain[chain_idx]])} for chain_idx in range(nchains)]
        print(ml_samples,
              file=open(os.path.join(save_dir, sim_name + '_ML_samples.txt'), 'w'))

    except IndexError:
        print("IndexError finding maximum posterior parameters")
        pass

    try:
        # Compute burn-in
        total_iterations = len(old_samples[0])
        burnin = int(total_iterations / 2)
        samples = np.concatenate(list((old_samples[i][burnin:, :] for i in range(len(old_samples)))))
        np.save(os.path.join(save_dir, sim_name+'_samples'), samples)

        # Basic statistics
        mean_parameters = np.mean(samples, axis=0)
        median_parameters = np.median(samples, axis=0)
        np.save(os.path.join(save_dir, 'mean_parameters'), mean_parameters)
        np.save(os.path.join(save_dir, 'median_parameters'), median_parameters)
        df = pd.DataFrame(samples, columns=sampled_param_names)
        df.describe().to_csv(os.path.join(save_dir,
                             'descriptive_statistics.csv'))

        if plot_posteriors:
            # Prepare plot canvas
            ndims = len(old_samples[0][0])
            colors = sns.color_palette(n_colors=ndims)
            priors_dict = dict(list(zip(sampled_param_names, priors_list)))
            # computes the factors of ndims:
            f1 = list(set(reduce(list.__add__, ([i, ndims//i] for i in range(1, int(ndims**0.5) + 1) if ndims % i == 0))))
            ncols = f1[int(len(f1) / 2 - 1)]
            nrows = f1[int(len(f1) / 2)]
            fig, axes = plt.subplots(nrows=nrows, ncols=ncols)

            # Plot posterior distributions
            for dim, ax in enumerate(fig.axes):
                p = sampled_param_names[dim]
                sns.histplot(samples[:, dim], color=colors[dim], ax=ax, kde=True, stat='density')
                xrange = np.arange(priors_dict[p][0] - 3 * priors_dict[p][1],
                                   priors_dict[p][0] + 3 * priors_dict[p][1], 0.01)
                yrange = norm.pdf(xrange, priors_dict[p][0], priors_dict[p][1])
                ax.plot(xrange, yrange, 'k--')
                ax.set_xlabel(p)
                ax.set_ylabel(None)
                ax.spines['right'].set_visible(False)
                ax.spines['top'].set_visible(False)
            plt.tight_layout()
            plt.savefig(os.path.join(save_dir, sim_name + 'posteriors.pdf'))
            plt.close()

            # Create pairplot
            g = sns.pairplot(df)
            for i, j in zip(*np.triu_indices_from(g.axes, 1)):
                g.axes[i, j].set_visible(False)
            g.savefig(os.path.join(save_dir, 'corner_plot.png'))

    except (ImportError, OSError, AttributeError, TypeError):
        pass

    # Clean up stray files
    try:
        shutil.move(os.path.join(os.getcwd(), os.sep, '*_DREAM_chain_*.*'),
                    save_dir)
    except FileNotFoundError:
        pass
Ejemplo n.º 18
0
starts = [m[chain] for chain in range(3)]

if __name__ == '__main__':

    niterations = 50000
    # Run DREAM sampling.  Documentation of DREAM options is in Dream.py.
    converged = False
    total_iterations = niterations
    nchains = 3

    sampled_params, log_ps = run_dream([params],
                                       likelihood,
                                       niterations=niterations,
                                       nchains=nchains,
                                       start=starts,
                                       start_random=False,
                                       save_history=True,
                                       history_file='mixturemodel_seed.npy',
                                       multitry=5,
                                       parallel=False)

    for chain in range(len(sampled_params)):
        np.save(
            'mixmod_mtdreamzs_3chain_sampled_params_chain_' + str(chain) +
            '_' + str(total_iterations), sampled_params[chain])
        np.save(
            'mixmod_mtdreamzs_3chain_logps_chain_' + str(chain) + '_' +
            str(total_iterations), log_ps[chain])

    os.remove('mixturemodel_seed.npy')
Ejemplo n.º 19
0
params = FlatParam(test_value=mu)

if __name__ == '__main__':
    niterations = 150000
    # Run DREAM sampling.  Documentation of DREAM options is in Dream.py.
    converged = False
    total_iterations = niterations
    nchains = 3

    sampled_params, log_ps = run_dream([params],
                                       likelihood,
                                       niterations=niterations,
                                       nchains=nchains,
                                       start=starts,
                                       start_random=False,
                                       save_history=True,
                                       adapt_gamma=False,
                                       gamma_levels=1,
                                       tempering=False,
                                       history_file='ndim_gaussian_seed.npy',
                                       multitry=5,
                                       parallel=False,
                                       model_name='ndim_gaussian')

    for chain in range(len(sampled_params)):
        np.save(
            'ndimgauss_mtdreamzs_3chain_sampled_params_chain_' + str(chain) +
            '_' + str(total_iterations), sampled_params[chain])
        np.save(
            'ndimgauss_mtdreamzs_3chain_logps_chain_' + str(chain) + '_' +
            str(total_iterations), log_ps[chain])
Ejemplo n.º 20
0

optimization_method = 'MCMC'  # If a name different from MCMC is given then scipy is invoked with optimization_method routine

if optimization_method == 'MCMC':

    if __name__ == '__main__':

        #Run DREAM sampling.  Documentation of DREAM options is in Dream.py.
        sampled_params, log_ps = run_dream(
            parameters_to_sample,
            likelihood,
            niterations=niterations,
            nchains=nchains,
            multitry=True,
            gamma_levels=4,
            adapt_gamma=True,
            parallel=True,
            history_thin=200,
            model_name='bsfit_nopysb_dreamzs_5chain',
            verbose=True,
            start=startchains)

        #Save sampling output (sampled parameter values and their corresponding logps).
        for chain in range(len(sampled_params)):
            np.save(
                'dream/bsfit_sampled_params_chain' + str(chain) + '_' +
                str(total_iterations), sampled_params[chain])
            np.save(
                'dream/bsfit_logps_chain' + str(chain) + '_' +
                str(total_iterations), log_ps[chain])
Ejemplo n.º 21
0

nchains = 4
niterations = 50000

if __name__ == '__main__':

    # Run DREAM sampling.  Documentation of DREAM options is in Dream.py.
    converged = False
    total_iterations = niterations
    sampled_params, log_ps = run_dream(
        parameters=sampled_parameter_names,
        likelihood=likelihood,
        niterations=niterations,
        nchains=nchains,
        multitry=False,
        gamma_levels=4,
        adapt_gamma=True,
        history_thin=1,
        model_name='earm_smallest_dreamzs_5chain2',
        verbose=True)

    # Save sampling output (sampled parameter values and their corresponding logps).
    for chain in range(len(sampled_params)):
        np.save(
            'earm_smallest_dreamzs_5chain_sampled_params_chain_' + str(chain) +
            '_' + str(total_iterations), sampled_params[chain])
        np.save(
            'earm_smallest_dreamzs_5chain_logps_chain_' + str(chain) + '_' +
            str(total_iterations), log_ps[chain])
Ejemplo n.º 22
0
def DREAM_fit(model, priors_list, posterior, start_params,
              sampled_param_names, niterations, nchains, sim_name,
              save_dir, custom_params={}, GR_cutoff=1.2):
    """
    The DREAM fitting algorithm as implemented in run_dream(), plus decorations
    for saving run parameters, checking convergence, and post fitting analysis.
    """
    converged = False
    total_iterations = niterations

    # Run DREAM sampling.  Documentation of DREAM options is in Dream.py.
    sampled_params, log_ps = run_dream(priors_list, posterior,
                                       start=start_params,
                                       niterations=niterations,
                                       nchains=nchains,
                                       multitry=False,
                                       gamma_levels=4, adapt_gamma=True,
                                       history_thin=1, model_name=sim_name,
                                       verbose=True)

    # Save sampling output (sampled param values and their corresponding logps)
    for chain in range(len(sampled_params)):
        np.save(os.path.join(save_dir, sim_name+str(chain) + '_' +
                             str(total_iterations)), sampled_params[chain])
        np.save(os.path.join(save_dir, sim_name+str(chain) + '_' +
                             str(total_iterations)), log_ps[chain])

    # Check convergence and continue sampling if not converged

    GR = Gelman_Rubin(sampled_params)
    print('At iteration: ', total_iterations, ' GR = ', GR)
    np.savetxt(os.path.join(save_dir, sim_name + str(total_iterations) +
                            '.txt'), GR)

    old_samples = sampled_params
    if np.any(GR > GR_cutoff):
        starts = [sampled_params[chain][-1, :] for chain in range(nchains)]
        while not converged:
            total_iterations += niterations

            sampled_params, log_ps = run_dream(priors_list, posterior,
                                               start=starts,
                                               niterations=niterations,
                                               nchains=nchains, multitry=False,
                                               gamma_levels=4,
                                               adapt_gamma=True,
                                               history_thin=1,
                                               model_name=sim_name,
                                               verbose=True, restart=True)

            for chain in range(len(sampled_params)):
                np.save(os.path.join(save_dir, sim_name + '_' + str(chain) +
                                     '_' + str(total_iterations)),
                        sampled_params[chain])
                np.save(os.path.join(save_dir, sim_name + '_' + str(chain) +
                                     '_' + str(total_iterations)),
                        log_ps[chain])

            old_samples = [np.concatenate((old_samples[chain],
                           sampled_params[chain])) for chain in range(nchains)]
            GR = Gelman_Rubin(old_samples)
            print('At iteration: ', total_iterations, ' GR = ', GR)
            np.savetxt(os.path.join(save_dir, sim_name + '_' +
                                    str(total_iterations)+'.txt'), GR)

            if np.all(GR < GR_cutoff):
                converged = True

    log_ps = np.array(log_ps)
    sampled_params = np.array(sampled_params)

    try:
        # Maximum posterior model:
        max_in_each_chain = [np.argmax(chain) for chain in log_ps]
        global_max_chain_idx = np.argmax([log_ps[chain][max_idx] for
                                          chain, max_idx in
                                          enumerate(max_in_each_chain)])
        ml_params = sampled_params[global_max_chain_idx,
                                   max_in_each_chain[global_max_chain_idx]]
        ml_model = {pname: 10 ** pvalue for pname, pvalue in
                    zip(sampled_param_names, ml_params)}
        print(ml_model,
              file=open(os.path.join(save_dir, sim_name +
                                     '_ML_params.txt'), 'w'))

    except IndexError:
        print("IndexError finding maximum posterior parameters")
        pass

    try:
        # Plot output
        total_iterations = len(old_samples[0])
        burnin = int(total_iterations / 2)
        samples = np.concatenate(list((old_samples[i][burnin:, :] for
                                       i in range(len(old_samples)))))
        np.save(os.path.join(save_dir, sim_name+'_samples'), samples)
        ndims = len(old_samples[0][0])
        colors = sns.color_palette(n_colors=ndims)
        for dim in range(ndims):
            sns.distplot(samples[:, dim], color=colors[dim])
            plt.savefig(os.path.join(save_dir, sim_name + '_dimension_' +
                                     str(dim) + '_' +
                                     sampled_param_names[dim] +
                                     '.pdf'))

        # Convert to dataframe
        df = pd.DataFrame(samples, columns=sampled_param_names)
        g = sns.pairplot(df)
        for i, j in zip(*np.triu_indices_from(g.axes, 1)):
            g.axes[i, j].set_visible(False)
        g.savefig(os.path.join(save_dir, 'corner_plot.pdf'))

        # Basic statistics
        mean_parameters = np.mean(samples, axis=0)
        median_parameters = np.median(samples, axis=0)
        np.save(os.path.join(save_dir, 'mean_parameters'), mean_parameters)
        np.save(os.path.join(save_dir, 'median_parameters'), median_parameters)
        df.describe().to_csv(os.path.join(save_dir,
                             'descriptive_statistics.csv'))

    except (ImportError, OSError):
        pass

    # Clean up stray files
    try:
        shutil.move(os.path.join(os.getcwd(), '*_DREAM_chain_*.*'),
                    save_dir)
    except FileNotFoundError:
        pass