Exemplo n.º 1
0
 def __init__(self, distribution, num_components, num_sample_discard=1000,
              num_samples_gmm=1000, num_samples_when_to_switch=40000, num_runs_em=1):
     StandardMetropolis.__init__(self, distribution)
     
     self.num_components = num_components
     self.num_sample_discard = num_sample_discard
     self.num_samples_gmm = num_samples_gmm
     self.num_samples_when_to_switch = num_samples_when_to_switch
     self.num_runs_em = num_runs_em
     
     # start with empty proposal, is changed to something in adapt method
     self.proposal = None
Exemplo n.º 2
0
 def construct_proposal(self, y):
     # fixed proposal exists from a certain iteration, return std MH otherwise
     # was created in adapt method
     if self.proposal is not None:
         return self.proposal
     else:
         return StandardMetropolis.construct_proposal(self, y)
Exemplo n.º 3
0
 def construct_proposal(self, y):
     # fixed proposal exists from a certain iteration, return std MH otherwise
     # was created in adapt method
     if self.proposal is not None:
         return self.proposal
     else:
         return StandardMetropolis.construct_proposal(self, y)
Exemplo n.º 4
0
def main():
    Log.set_loglevel(logging.DEBUG)

    prior = Gaussian(Sigma=eye(2) * 100)

    posterior = OzonePosterior(prior,
                               logdet_alg="scikits",
                               solve_method="scikits")

    proposal_cov = diag([4.000000000000000e-05, 1.072091680000000e+02])
    mcmc_sampler = StandardMetropolis(posterior, scale=1.0, cov=proposal_cov)

    start = asarray([-11.35, -13.1])
    mcmc_params = MCMCParams(start=start, num_iterations=5000)
    chain = MCMCChain(mcmc_sampler, mcmc_params)

    chain.append_mcmc_output(StatisticsOutput(print_from=1, lag=1))

    home = expanduser("~")
    folder = os.sep.join([home, "sample_ozone_posterior_average_serial"])
    store_chain_output = StoreChainOutput(folder)
    chain.append_mcmc_output(store_chain_output)

    loaded = store_chain_output.load_last_stored_chain()
    if loaded is None:
        logging.info("Running chain from scratch")
    else:
        logging.info("Running chain from iteration %d" % loaded.iteration)
        chain = loaded

    chain.run()

    f = open(folder + os.sep + "final_chain", "w")
    dump(chain, f)
    f.close()
Exemplo n.º 5
0
    def __init__(self,
                 distribution,
                 num_components,
                 num_sample_discard=1000,
                 num_samples_gmm=1000,
                 num_samples_when_to_switch=40000,
                 num_runs_em=1):
        StandardMetropolis.__init__(self, distribution)

        self.num_components = num_components
        self.num_sample_discard = num_sample_discard
        self.num_samples_gmm = num_samples_gmm
        self.num_samples_when_to_switch = num_samples_when_to_switch
        self.num_runs_em = num_runs_em

        # start with empty proposal, is changed to something in adapt method
        self.proposal = None
Exemplo n.º 6
0
 def __str__(self):
     s = self.__class__.__name__ + "=["
     s += "num_components=" + str(self.num_components)
     s += ", num_sample_discard=" + str(self.num_sample_discard)
     s += ", num_samples_gmm=" + str(self.num_samples_gmm)
     s += ", num_runs_em=" + str(self.num_runs_em)
     s += ", " + StandardMetropolis.__str__(self)
     s += "]"
     return s
Exemplo n.º 7
0
 def __str__(self):
     s = self.__class__.__name__ + "=["
     s += "num_components=" + str(self.num_components)
     s += ", num_sample_discard=" + str(self.num_sample_discard)
     s += ", num_samples_gmm=" + str(self.num_samples_gmm)
     s += ", num_runs_em=" + str(self.num_runs_em)
     s += ", " + StandardMetropolis.__str__(self)
     s += "]"
     return s
def main():
    Log.set_loglevel(logging.DEBUG)

    prior = Gaussian(Sigma=eye(2) * 100)
    num_estimates = 1000

    home = expanduser("~")
    folder = os.sep.join([home, "sample_ozone_posterior_rr_sge"])

    # cluster admin set project jump for me to exclusively allocate nodes
    parameter_prefix = ""  # #$ -P jump"

    cluster_parameters = BatchClusterParameters(
        foldername=folder,
        memory=7.8,
        loglevel=logging.DEBUG,
        parameter_prefix=parameter_prefix,
        max_walltime=60 * 60 * 24 - 1)

    computation_engine = SGEComputationEngine(cluster_parameters,
                                              check_interval=10)

    rr_instance = RussianRoulette(1e-3, block_size=400)

    posterior = OzonePosteriorRREngine(rr_instance=rr_instance,
                                       computation_engine=computation_engine,
                                       num_estimates=num_estimates,
                                       prior=prior)

    posterior.logdet_method = "shogun_estimate"

    proposal_cov = diag([4.000000000000000e-05, 1.072091680000000e+02])
    mcmc_sampler = StandardMetropolis(posterior, scale=1.0, cov=proposal_cov)

    start = asarray([-11.55, -10.1])
    mcmc_params = MCMCParams(start=start, num_iterations=5000)
    chain = MCMCChain(mcmc_sampler, mcmc_params)

    #    chain.append_mcmc_output(PlottingOutput(None, plot_from=1, lag=1))
    chain.append_mcmc_output(StatisticsOutput(print_from=1, lag=1))

    store_chain_output = StoreChainOutput(folder, lag=1)
    chain.append_mcmc_output(store_chain_output)

    loaded = store_chain_output.load_last_stored_chain()
    if loaded is None:
        logging.info("Running chain from scratch")
    else:
        logging.info("Running chain from iteration %d" % loaded.iteration)
        chain = loaded

    chain.run()

    f = open(folder + os.sep + "final_chain", "w")
    dump(chain, f)
    f.close()
def main():
    Log.set_loglevel(logging.DEBUG)

    modulename = "sample_ozone_posterior_average_slurm"
    if not FileSystem.cmd_exists("sbatch"):
        engine = SerialComputationEngine()
    else:
        johns_slurm_hack = "#SBATCH --partition=intel-ivy,wrkstn,compute"
        johns_slurm_hack = "#SBATCH --partition=intel-ivy,compute"

        folder = os.sep + os.sep.join(["nfs", "data3", "ucabhst", modulename])
        batch_parameters = BatchClusterParameters(
            foldername=folder,
            max_walltime=24 * 60 * 60,
            resubmit_on_timeout=False,
            memory=3,
            parameter_prefix=johns_slurm_hack)
        engine = SlurmComputationEngine(batch_parameters,
                                        check_interval=1,
                                        do_clean_up=True)

    prior = Gaussian(Sigma=eye(2) * 100)
    num_estimates = 100

    posterior = OzonePosteriorAverageEngine(computation_engine=engine,
                                            num_estimates=num_estimates,
                                            prior=prior)
    posterior.logdet_method = "shogun_estimate"

    proposal_cov = diag([4.000000000000000e-05, 1.072091680000000e+02])
    mcmc_sampler = StandardMetropolis(posterior, scale=1.0, cov=proposal_cov)

    start = asarray([-11.35, -13.1])
    mcmc_params = MCMCParams(start=start, num_iterations=2000)
    chain = MCMCChain(mcmc_sampler, mcmc_params)

    chain.append_mcmc_output(StatisticsOutput(print_from=1, lag=1))

    home = expanduser("~")
    folder = os.sep.join([home, modulename])
    store_chain_output = StoreChainOutput(folder)
    chain.append_mcmc_output(store_chain_output)

    loaded = store_chain_output.load_last_stored_chain()
    if loaded is None:
        logging.info("Running chain from scratch")
    else:
        logging.info("Running chain from iteration %d" % loaded.iteration)
        chain = loaded

    chain.run()

    f = open(folder + os.sep + "final_chain", "w")
    dump(chain, f)
    f.close()
def main():
    Log.set_loglevel(logging.DEBUG)

    prior = Gaussian(Sigma=eye(2) * 100)
    num_estimates = 2

    home = expanduser("~")
    folder = os.sep.join([home, "sample_ozone_posterior_rr_sge"])

    computation_engine = SerialComputationEngine()

    rr_instance = RussianRoulette(1e-3, block_size=10)

    posterior = OzonePosteriorRREngine(rr_instance=rr_instance,
                                       computation_engine=computation_engine,
                                       num_estimates=num_estimates,
                                       prior=prior)

    posterior.logdet_method = "shogun_estimate"

    proposal_cov = diag([4.000000000000000e-05, 1.072091680000000e+02])
    mcmc_sampler = StandardMetropolis(posterior, scale=1.0, cov=proposal_cov)

    start = asarray([-11.35, -13.1])
    mcmc_params = MCMCParams(start=start, num_iterations=200)
    chain = MCMCChain(mcmc_sampler, mcmc_params)

    #    chain.append_mcmc_output(PlottingOutput(None, plot_from=1, lag=1))
    chain.append_mcmc_output(StatisticsOutput(print_from=1, lag=1))

    store_chain_output = StoreChainOutput(folder, lag=50)
    chain.append_mcmc_output(store_chain_output)

    loaded = store_chain_output.load_last_stored_chain()
    if loaded is None:
        logging.info("Running chain from scratch")
    else:
        logging.info("Running chain from iteration %d" % loaded.iteration)
        chain = loaded

    chain.run()

    f = open(folder + os.sep + "final_chain", "w")
    dump(chain, f)
    f.close()
Exemplo n.º 11
0
def main():
    distribution = Banana(dimension=2, bananicity=0.03, V=100.0)

    mcmc_sampler = StandardMetropolis(distribution)

    start = zeros(distribution.dimension)
    start = asarray([0., -2.])
    mcmc_params = MCMCParams(start=start, num_iterations=10000)
    chain = MCMCChain(mcmc_sampler, mcmc_params)

    chain.append_mcmc_output(StatisticsOutput(plot_times=True, lag=1000))
    #     chain.append_mcmc_output(PlottingOutput(distribution, plot_from=1, num_samples_plot=0,
    #                                             colour_by_likelihood=False))

    chain.run()
    f = open("std_metropolis_chain_gaussian.bin", 'w')
    dump(chain, f)
    f.close()
Exemplo n.º 12
0
        mean_est = zeros(distribution.dimension, dtype="float64")
        cov_est = 1.0 * eye(distribution.dimension)
        cov_est[0, 0] = distribution.V
        mcmc_samplers.append(
            AdaptiveMetropolisLearnScale(distribution,
                                         mean_est=mean_est,
                                         cov_est=cov_est))
        mcmc_samplers.append(
            AdaptiveMetropolis(distribution,
                               mean_est=mean_est,
                               cov_est=cov_est))
        #
        #        num_eigen = distribution.dimension
        #        mcmc_samplers.append(AdaptiveMetropolisPCA(distribution, num_eigen=num_eigen, mean_est=mean_est, cov_est=cov_est))
        #
        mcmc_samplers.append(StandardMetropolis(distribution))

        start = zeros(distribution.dimension, dtype="float64")
        mcmc_params = MCMCParams(start=start,
                                 num_iterations=num_iterations,
                                 burnin=burnin)

        mcmc_chains = [
            MCMCChain(mcmc_sampler, mcmc_params)
            for mcmc_sampler in mcmc_samplers
        ]
        for mcmc_chain in mcmc_chains:
            mcmc_chain.append_mcmc_output(StatisticsOutput())

        experiments = [
            SingleChainExperiment(mcmc_chain, experiment_dir)
        os.sep)[-1].split(".")[0] + os.sep

    distribution = Flower(amplitude=6,
                          frequency=6,
                          variance=1,
                          radius=10,
                          dimension=8)
    sigma = 5
    kernel = GaussianKernel(sigma=sigma)

    burnin = 60000
    num_iterations = 120000

    #mcmc_sampler = KameleonWindowLearnScale(distribution, kernel, stop_adapt=burnin)
    mean_est = zeros(distribution.dimension, dtype="float64")
    cov_est = 1.0 * eye(distribution.dimension)
    #mcmc_sampler = AdaptiveMetropolisLearnScale(distribution, mean_est=mean_est, cov_est=cov_est)
    #mcmc_sampler = AdaptiveMetropolis(distribution, mean_est=mean_est, cov_est=cov_est)
    mcmc_sampler = StandardMetropolis(distribution)

    start = zeros(distribution.dimension, dtype="float64")
    mcmc_params = MCMCParams(start=start,
                             num_iterations=num_iterations,
                             burnin=burnin)

    mcmc_chain = MCMCChain(mcmc_sampler, mcmc_params)
    mcmc_chain.append_mcmc_output(StatisticsOutput())

    experiment = SingleChainExperiment(mcmc_chain, experiment_dir)
    experiment.run()
Exemplo n.º 14
0
        num_iterations = 120000

        mcmc_samplers.append(
            KameleonWindowLearnScale(distribution, kernel, stop_adapt=burnin))

        mean_est = zeros(distribution.dimension, dtype="float64")
        cov_est = 1.0 * eye(distribution.dimension)
        mcmc_samplers.append(
            AdaptiveMetropolisLearnScale(distribution,
                                         mean_est=mean_est,
                                         cov_est=cov_est))
        mcmc_samplers.append(
            AdaptiveMetropolis(distribution,
                               mean_est=mean_est,
                               cov_est=cov_est))
        mcmc_samplers.append(StandardMetropolis(distribution, cov=cov_est))

        start = zeros(distribution.dimension, dtype="float64")
        mcmc_params = MCMCParams(start=start,
                                 num_iterations=num_iterations,
                                 burnin=burnin)

        mcmc_chains = [
            MCMCChain(mcmc_sampler, mcmc_params)
            for mcmc_sampler in mcmc_samplers
        ]
        for mcmc_chain in mcmc_chains:
            mcmc_chain.append_mcmc_output(StatisticsOutput())

        experiments = [
            SingleChainExperiment(mcmc_chain, experiment_dir)
    dim = shape(data)[1]

    # prior on theta and posterior target estimate
    theta_prior = Gaussian(mu=0 * ones(dim), Sigma=eye(dim) * 5)
    target=PseudoMarginalHyperparameterDistributionDiffusion(data, labels, \
                                                    n_importance=100, prior=theta_prior, \
                                                    ridge=1e-3)

    # create sampler
    burnin = 50000
    num_iterations = burnin + 500000
    kernel = GaussianKernel(sigma=23.0)
    #     sampler=KameleonWindowLearnScale(target, kernel, stop_adapt=burnin)
    #    sampler=AdaptiveMetropolisLearnScale(target)
    sampler = StandardMetropolis(target)

    # posterior mode derived by initial tests
    start = zeros(target.dimension)
    params = MCMCParams(start=start,
                        num_iterations=num_iterations,
                        burnin=burnin)

    # create MCMC chain
    chain = MCMCChain(sampler, params)
    chain.append_mcmc_output(StatisticsOutput(print_from=0, lag=100))
    #chain.append_mcmc_output(PlottingOutput(plot_from=0, lag=100))

    # create experiment instance to store results
    experiment_dir = str(os.path.abspath(sys.argv[0])).split(
        os.sep)[-1].split(".")[0] + os.sep