def new_figure(self): printlog('New figure...') fig = plt.figure(figsize=(12, 8)) fig.suptitle(self.modelDescription + ' (' + self.country + ')') if (self.silentPlot): plt.ion() return fig
def optimize(self, populationSize, maxiter=1000): self.nSamples = 1 self.e = korali.Experiment() self.e['Problem']['Type'] = 'Bayesian/Reference' self.e['Problem']['Likelihood Model'] = self.likelihoodModel self.e['Problem']['Reference Data'] = list( map(float, self.data['Model']['y-data'])) self.e['Problem']['Computational Model'] = self.computational_model self.e["Solver"]["Type"] = "Optimizer/CMAES" self.e["Solver"]["Population Size"] = populationSize self.e["Solver"]["Termination Criteria"]["Max Generations"] = maxiter self.e["Solver"]["Termination Criteria"][ "Min Value Difference Threshold"] = 1e-9 js = self.get_variables_and_distributions() self.set_variables_and_distributions(js) self.set_korali_output_files(self.saveInfo['korali samples'], maxiter) self.e['Console Output']['Verbosity'] = 'Detailed' if self.silent: self.e['Console Output']['Verbosity'] = 'Silent' k = korali.Engine() k['Conduit']['Type'] = 'Concurrent' k['Conduit']['Concurrent Jobs'] = self.nThreads k.run(self.e) printlog('Copy variables from Korali to Epidemics...') self.parameters = [] myDatabase = self.e['Results']['Best Sample']['Parameters'] for j in range(self.nParameters): self.parameters.append({}) self.parameters[j]['Name'] = self.e['Variables'][j]['Name'] self.parameters[j]['Values'] = np.asarray([myDatabase[j]]) self.has_been_called['optimize'] = True self.has_been_called['propagate'] = False printlog('Done copying variables.') names = [] best = [] for j in range(self.nParameters): best.append(myDatabase[j]) names.append(self.parameters[j]['Name']) js = {} js["Value"] = self.e['Results']['Best Sample']['F(x)'] js["Parameter"] = best js["Names"] = names save_file(js, self.saveInfo['cmaes'], 'Optimum', fileType='json')
def load_parameters(self, samples_path): printlog('Loading posterior samples from {}'.format(samples_path)) files = list( set([ filename for filename in os.listdir(samples_path) if (filename.endswith(".json")) ])) files.sort() filename = files[-1] variable_names = [] with open(samples_path + '/latest') as json_file: data = json.load(json_file) print(data.keys()) if 'Sample Database' in data['Results']: samples = data['Results']['Sample Database'] elif 'Posterior Sample Database' in data['Results']: samples = data['Results']['Posterior Sample Database'] else: print('Not avail') variables = data['Variables'] self.nParameters = len(variables) self.nSamples = len(samples) self.parameters = [] for j in range(self.nParameters): self.parameters.append({}) self.parameters[j]['Name'] = variables[j]['Name'] self.parameters[j]['Values'] = np.asarray( [samples[k][j] for k in range(self.nSamples)]) self.has_been_called['sample'] = True printlog('Loaded')
def sample(self, nSamples=1000, cov=0.4, maxiter=100): self.e = korali.Experiment() self.nSamples = nSamples self.e['Problem']['Type'] = 'Bayesian/Reference' self.e['Problem']['Likelihood Model'] = self.likelihoodModel self.e['Problem']['Reference Data'] = list( map(float, self.data['Model']['y-data'])) self.e['Problem']['Computational Model'] = self.computational_model self.e['Solver']['Type'] = "Sampler/TMCMC" self.e['Solver']['Version'] = self.sampler self.e['Solver']['Step Size'] = 0.1 self.e['Solver']['Population Size'] = self.nSamples self.e['Solver']['Target Coefficient Of Variation'] = cov self.e['Solver']['Termination Criteria']['Max Generations'] = maxiter js = self.get_variables_and_distributions() self.set_variables_and_distributions(js) self.set_korali_output_files(self.saveInfo['korali samples'], maxiter) self.e['Console Output']['Verbosity'] = 'Detailed' if (self.silent): self.e['Console Output']['Verbosity'] = 'Silent' k = korali.Engine() k['Conduit']['Type'] = 'Concurrent' k['Conduit']['Concurrent Jobs'] = self.nThreads k.run(self.e) js = {} js['Log Evidence'] = self.e['Solver']['LogEvidence'] printlog(f"Log Evidence = {js['Log Evidence']}") save_file(js, self.saveInfo['evidence'], 'Log Evidence', fileType='json') printlog('Copy variables from Korali to Epidemics...') self.parameters = [] myDatabase = self.e['Results']['Sample Database'] for j in range(self.nParameters): self.parameters.append({}) self.parameters[j]['Name'] = self.e['Variables'][j]['Name'] self.parameters[j]['Values'] = np.asarray( [myDatabase[k][j] for k in range(self.nSamples)]) self.has_been_called['sample'] = True self.has_been_called['propagate'] = False printlog('Done copying variables.')
for k, m in mean_params.items() } js['intervals_params'].update({ k: [ ( p, # np.quantile(samples[k], 0.5 - p / 2), # np.quantile(samples[k], 0.5 + p / 2), # ) for p in percentages ] for k in samples.dtype.names }) for k, v in vv.items(): t, mean, median, intervals = v js['x-axis'] = list(t) r = dict() r['Intervals'] = [{ "Percentage": float(q[0]), "Low Interval": list(q[1]), "High Interval": list(q[2]), } for q in intervals] r['Mean'] = list(mean) r['Median'] = list(median) js[k] = r fn = os.path.join(dataFolder, 'intervals.json') printlog(f'Save intervals in: {fn}') with open(fn, 'w') as f: json.dump(js, f, indent=2, sort_keys=True)
def compute_plot_intervals(self, varName, ns, ax, ylabel, cumulate=-1): Np = self.propagatedVariables[varName].shape[0] Nt = self.propagatedVariables[varName].shape[1] samples = np.zeros((Np * ns, Nt)) start = time.process_time() printlog( f"Sampling from {self.likelihoodModel} for '{varName}' variable... ", end='', flush=True) if self.likelihoodModel == 'Normal': for k in range(Nt): m = self.propagatedVariables[varName][:, k] r = self.propagatedVariables['Standard Deviation {0}'.format( varName)][:, k] x = [np.random.normal(m, r) for _ in range(ns)] samples[:, k] = np.asarray(x).flatten() elif self.likelihoodModel == 'Positive Normal': for k in range(Nt): m = self.propagatedVariables[varName][:, k] s = self.propagatedVariables['Standard Deviation {0}'.format( varName)][:, k] t = get_truncated_normal(m, s, 0, np.Inf) x = [t.rvs() for _ in range(ns)] samples[:, k] = np.asarray(x).flatten() elif self.likelihoodModel == 'StudentT': for k in range(Nt): m = self.propagatedVariables[varName][:, k] dof = self.propagatedVariables['Degrees Of Freedom {0}'.format( varName)][:, k] x = [m + np.random.standard_t(dof) for _ in range(ns)] samples[:, k] = np.asarray(x).flatten() elif self.likelihoodModel == 'Positive StudentT': for k in range(Nt): m = self.propagatedVariables[varName][:, k] dof = self.propagatedVariables['Degrees Of Freedom {0}'.format( varName)][:, k] x = [positive_standard_t(m, dof) for _ in range(ns)] samples[:, k] = np.asarray(x).flatten() elif self.likelihoodModel == 'Poisson': for k in range(Nt): m = self.propagatedVariables[varName][:, k] try: x = [np.random.poisson(m) for _ in range(ns)] except: printlog("Error p: {}".format(p)) samples[:, k] = np.asarray(x).flatten() elif self.likelihoodModel == 'Geometric': for k in range(Nt): m = self.propagatedVariables[varName][:, k] p = 1.0 / (1.0 + m) try: x = [np.random.geometric(p) - 1 for _ in range(ns)] except: printlog("Error p: {}".format(p)) samples[:, k] = np.asarray(x).flatten() elif self.likelihoodModel == 'Negative Binomial': for k in range(Nt): m = self.propagatedVariables[varName][:, k] r = self.propagatedVariables['Dispersion {0}'.format( varName)][:, k] p = m / (m + r) try: x = [ np.random.negative_binomial(r, 1 - p) for _ in range(ns) ] except: printlog("Error p: {}".format(p)) samples[:, k] = np.asarray(x).flatten() else: abort("Likelihood not found in compute_plot_intervals.") if cumulate > 0: samples = np.cumsum(samples, axis=cumulate) elapsed = time.process_time() - start printlog(f" elapsed {elapsed:.2f} sec") printlog(f"Computing quantiles... ") mean = np.zeros((Nt, 1)) median = np.zeros((Nt, 1)) for k in range(Nt): median[k] = np.quantile(samples[:, k], 0.5) mean[k] = np.mean(samples[:, k]) for p in np.sort(self.percentages)[::-1]: q1 = np.zeros((Nt, )) q2 = np.zeros((Nt, )) for k in range(Nt): q1[k] = np.quantile(samples[:, k], 0.5 - p / 2) q2[k] = np.quantile(samples[:, k], 0.5 + p / 2) ax.fill_between(self.data['Propagation']['x-data'], q1, q2, alpha=0.5, label=f' {100*p:.1f}% credible interval') ax.plot(self.data['Propagation']['x-data'], mean, '-', lw=2, label='Mean', color='black') ax.plot(self.data['Propagation']['x-data'], median, '--', lw=2, label='Median', color='black') ax.legend(loc='upper left') ax.set_ylabel(ylabel) x = range( np.ceil(max(self.data['Propagation']['x-data']) + 1).astype(int)) ax.set_xticks(x[0:-1:14]) ax.grid() ax.set_xlim(left=x[1]) if (self.logPlot and cumulate < 1): ax.set_yscale('log') ax.set_ylim(bottom=1e-1) plt.draw() plt.pause(0.001)
def propagate(self, nPropagate=1000): if not self.has_been_called['sample'] and not self.has_been_called[ 'optimize']: abort('[Error] Sample or Optimize before propagation') return self.e = korali.Experiment() self.nPropagate = nPropagate self.e['Problem']['Type'] = 'Propagation' self.e['Problem'][ 'Execution Model'] = self.computational_model_propagate for k in range(self.nParameters): self.e['Variables'][k]['Name'] = self.parameters[k]['Name'] self.e['Variables'][k]['Precomputed Values'] = self.parameters[k][ 'Values'].tolist() self.e['Solver']['Type'] = 'Executor' self.set_korali_output_files(self.saveInfo['korali propagation']) if (self.silent): self.e['Console Output']['Verbosity'] = 'Silent' self.e['Store Sample Information'] = True k = korali.Engine() k.run(self.e) propagate_idx = random.sample(range(self.nSamples), nPropagate) Nv = self.e['Samples'][0]['Saved Results']['Number of Variables'] Nt = self.e['Samples'][0]['Saved Results']['Length of Variables'] varNames = [] for k in range(Nv): varNames.append( self.e['Samples'][0]['Saved Results']['Variables'][k]['Name']) printlog('Copy variables from Korali to Epidemics...') self.propagatedVariables = {} for i, x in enumerate(varNames): self.propagatedVariables[x] = np.zeros((nPropagate, Nt)) for k, idx in enumerate(propagate_idx): self.propagatedVariables[x][k] = np.asarray( self.e['Samples'][idx]['Saved Results']['Variables'][i] ['Values']) varNames = [] if (self.likelihoodModel == 'Normal' or self.likelihoodModel == 'Positive Normal'): if self.useInfections: varNames.append('Standard Deviation Daily Incidence') if self.useDeaths: varNames.append('Standard Deviation Daily Deaths') elif (self.likelihoodModel == 'StudentT' or self.likelihoodModel == 'Positive StudentT'): if self.useInfections: varNames.append('Degrees Of Freedom Daily Incidence') if self.useDeaths: varNames.append('Degrees Of Freedom Daily Deaths') elif (self.likelihoodModel == 'Poisson'): pass elif (self.likelihoodModel == 'Geometric'): pass elif (self.likelihoodModel == 'Negative Binomial'): if self.useInfections: varNames.append('Dispersion Daily Incidence') if self.useDeaths: varNames.append('Dispersion Daily Deaths') else: abort('Likelihood not found in propagate.') for varName in varNames: self.propagatedVariables[varName] = np.zeros((nPropagate, Nt)) for k in range(nPropagate): self.propagatedVariables[varName][k] = np.asarray( self.e['Samples'][k]['Saved Results'][varName]) printlog('Done copying variables.') # TODO clear variable? self.e = korali.Experiment() self.has_been_called['propagate'] = True
def sample_knested(self, nLiveSamples=1500, freq=1500, maxiter=1e9, dlogz=0.1, batch=1): self.e = korali.Experiment() self.e['Problem']['Type'] = 'Bayesian/Reference' self.e['Problem']['Likelihood Model'] = self.likelihoodModel self.e['Problem']['Reference Data'] = list( map(float, self.data['Model']['y-data'])) self.e['Problem']['Computational Model'] = self.computational_model self.e["Solver"]["Type"] = "Sampler/Nested" self.e["Solver"]["Resampling Method"] = "Multi Ellipse" self.e["Solver"]["Number Live Points"] = nLiveSamples self.e["Solver"]["Proposal Update Frequency"] = freq self.e["Solver"]["Ellipsoidal Scaling"] = 1.10 self.e["Solver"]["Batch Size"] = batch self.e["Solver"]["Termination Criteria"]["Max Generations"] = maxiter self.e["Solver"]["Termination Criteria"][ "Min Log Evidence Delta"] = dlogz self.e["Solver"]["Termination Criteria"][ "Max Effective Sample Size"] = 25000 js = self.get_variables_and_distributions() self.set_variables_and_distributions(js) self.set_korali_output_files(self.saveInfo['korali samples'], maxiter) self.e['Console Output']['Verbosity'] = 'Detailed' self.e["Console Output"]["Frequency"] = 100 if (self.silent): self.e['Console Output']['Verbosity'] = 'Silent' k = korali.Engine() k['Conduit']['Type'] = 'Concurrent' k['Conduit']['Concurrent Jobs'] = self.nThreads k.run(self.e) js = {} js['Log Evidence'] = self.e['Solver']['LogEvidence'] js['Error'] = self.e['Solver']['LogEvidence Var'] printlog(f"Log Evidence = {js['Log Evidence']}") printlog(f"Variance = {js['Error']}") save_file(js, self.saveInfo['evidence'], 'Log Evidence', fileType='json') printlog('Copy variables from Korali to Epidemics...') myDatabase = self.e['Results']['Posterior Sample Database'] self.nSamples, _ = np.shape(myDatabase) self.parameters = [] for j in range(self.nParameters): self.parameters.append({}) self.parameters[j]['Name'] = self.e['Variables'][j]['Name'] self.parameters[j]['Values'] = np.asarray( [myDatabase[k][j] for k in range(self.nSamples)]) self.has_been_called['sample'] = True self.has_been_called['propagate'] = False printlog('Done copying variables.')