class BayesOptParticleFiltering(BayesOptContinuous):
    def __init__(self,
                 idx_simul,
                 low_b,
                 upp_b,
                 nb_it,
                 nb_ini,
                 refine,
                 load_save_flag,
                 max_loglkd=None):
        if refine == True:
            assert (max_loglkd is not None)
            self.max_loglkd = max_loglkd

        string = """
		library(pomp)

		square <- function(x) {
		    return(x^2)
		}

		pf_r <- function(Y, r = 0.1, sigma = 0.1, tau = 0.1)
		{
			gompertz.proc.sim <- function(x, t, params, delta.t,...) {
			eps <- exp(rnorm(n = 1, mean = 0, sd = params["sigma"]))
			S <- exp(-params["r"] * delta.t)
			setNames(params["K"]^(1 - S) * x["X"]^S * eps, "X")
			}

			gompertz.meas.sim <- function(x, t, params, ...) {
			setNames(rlnorm(n = 1, meanlog = log(x["X"]), sd = params["tau"]), "Y")
			}

			gompertz.meas.dens <- function(y, x, t, params, log, ...) {
			dlnorm(x = y["Y"], meanlog = log(x["X"]), sdlog = params["tau"],
			log = log)
			}

			gompertz <- pomp(data = data.frame(time = 1:100, Y = Y), times = "time",
			rprocess = discrete.time.sim(step.fun = gompertz.proc.sim, delta.t = 1),
			rmeasure = gompertz.meas.sim, t0 = 0)

			# set parameters
			theta <- c(r = r, K = 1, sigma = sigma, tau = tau, X.0 = 1)

			############# INFERENCE #############

			 gompertz <- pomp(gompertz, dmeasure = gompertz.meas.dens)

			 # lkd estimator with pf
			 pf           <- pfilter(gompertz, params = theta, Np = 20)
			 loglik.truth <- logLik(pf)
			 return(loglik.truth)
		}
		"""
        self.n_dim = 3
        self.refine = refine
        self.idx_simul = idx_simul
        BayesOptContinuous.__init__(self, self.n_dim)
        self.powerpack = SignatureTranslatedAnonymousPackage(
            string, "powerpack")
        self.params = {}
        self.params['n_iterations'] = nb_it  # 200
        self.params['n_init_samples'] = nb_ini  # 100
        self.params['n_iter_relearn'] = 1
        self.params['verbose_level'] = 2
        if self.refine:
            self.params['noise'] = .01
            self.params['crit_name'] = "cLCB"
        else:
            self.params['noise'] = 1
            self.params['crit_name'] = "cEI"
        self.params['l_type'] = "L_EMPIRICAL"
        self.params['mean.name'] = "mZero"
        self.params['l_all'] = True
        self.params['sc_type'] = "SC_MTL"
        self.params['surr_name'] = "sGaussianProcessML"
        self.params['load_save_flag'] = load_save_flag
        self.params['force_jump'] = 5
        if load_save_flag == 2 and refine == 1:
            self.params['save_filename'] = "logs/bayesopt_" + str(
                idx_simul) + "_N_20_refine{0}_tmp.dat".format(refine * 1)
            self.params['load_filename'] = "logs/bayesopt_" + str(
                idx_simul) + "_N_20_refine{0}_tmp.dat".format(refine * 1)
        else:
            self.params['save_filename'] = "logs/bayesopt_" + str(
                idx_simul) + "_N_20_refine{0}.dat".format(refine * 1)
            self.params['load_filename'] = "logs/bayesopt_" + str(
                idx_simul) + "_N_20_refine{0}.dat".format(refine * 1)
        self.params['kernel_name'] = "kMaternARD5"
        self.params['n_inner_iterations'] = 1000  # 500
        self.params['lower_bound'] = low_b
        self.params['upper_bound'] = upp_b
        self.lower_bound = self.params['lower_bound']
        self.upper_bound = self.params['upper_bound']
        self.ub = self.upper_bound
        self.lb = self.lower_bound
        self.Y = get_Y(path='../simulations/simulation{0}.csv'.format(
            self.idx_simul),
                       T=100)
        print('Rough likelihood estimate is  {0}'.format(
            self.powerpack.pf_r(robjects.FloatVector(self.Y), .1, .1, .1)[0]))

    def evaluateSample(self, Xin):
        r = .5 * (10**Xin[0] - 10**0.) / (10**1. - 10**0.)
        sigma = .5 * (10**Xin[1] - 10**0.) / (10**1. - 10**0.)
        tau = .5 * (10**Xin[2] - 10**0.) / (10**1. - 10**0.)
        print('parameters are : {0}, {1}, {2}'.format(r, sigma, tau))
        if self.refine:
            return -np.exp(
                self.powerpack.pf_r(robjects.FloatVector(self.Y), r, sigma,
                                    tau)[0] - self.max_loglkd)
        else:
            return -self.powerpack.pf_r(robjects.FloatVector(self.Y), r, sigma,
                                        tau)[0]
Esempio n. 2
0
	 gompertz <- pomp(gompertz, dmeasure = gompertz.meas.dens)

	 # lkd estimator with pf
	 pf           <- pfilter(gompertz, params = theta, Np = 1000)
	 loglik.truth <- logLik(pf)
	 return(loglik.truth)
}
"""


def get_Y(path='arguments.csv', T=100):
    warnings.warn('Only one space for csv')
    output = []
    csvfile = open(path, 'rb')
    spamreader = csv.reader(csvfile, delimiter=' ', quotechar='|')
    spamreader.next()
    for line in spamreader:
        output += line
    output = np.asarray(output, dtype=np.float).ravel()
    assert (len(output) == T)
    return output


powerpack = SignatureTranslatedAnonymousPackage(string, "powerpack")

Y = get_Y()
lkd = powerpack.pf_r(robjects.FloatVector(Y))[0]
# rsum = robjects.r['square.R']
# r = robjects.r
# r.source()[0][0]
Esempio n. 3
0
        np.asarray(
            powerpack.traj_r(
                'N_20/iterated_filtering_4{0}'.format(idx_simul)))[:, 4][:-1])
    trajectories_tau[1, idx_simul - 1] = np.exp(
        np.asarray(
            powerpack.traj_r(
                'N_20/iterated_filtering_4{0}'.format(idx_simul)))[:, 5][:-1])
    #indexes                            = np.asarray(get_traj_index('N_20/python_bayesopt{0}_4.dat'.format(idx_simul)))[:-1]
    trajectories_r[0, idx_simul - 1] = get_params(
        'N_20/python_bayesopt{0}_4.dat'.format(idx_simul))[0]
    trajectories_sig[0, idx_simul - 1] = get_params(
        'N_20/python_bayesopt{0}_4.dat'.format(idx_simul))[1]
    trajectories_tau[0, idx_simul - 1] = get_params(
        'N_20/python_bayesopt{0}_4.dat'.format(idx_simul))[2]
    likelihoods[0, idx_simul - 1] = powerpack.pf_r(
        robjects.FloatVector(Y), r, sigma, tau
    )[0]  # -info[0] # get_lkd('python_bayesopt_1{0}.dat'.format(idx_simul)) #
    likelihoods[1, idx_simul - 1] = powerpack.pf_r(
        robjects.FloatVector(Y), trajectories_r[1, idx_simul - 1, -1],
        trajectories_sig[1, idx_simul - 1,
                         -1], trajectories_tau[1, idx_simul - 1, -1]
    )[0]  #powerpack.res_r(idx_simul)[0] # powerpack.res_r(idx_simul)[0] #


def to_normalized_weights(logWeights):
    b = np.max(logWeights)
    weights = [np.exp(logw - b) for logw in logWeights]
    return weights / sum(weights)


idx_sim = 1