'likelihood': 'Binomial_' + str(nb_meas), 'normalize_Y': False }) optim_b_max_step = copy.copy(optim_b) optim_b_max_step['constraints'] = 'step_0.4_0_1' optim_b_max_smooth = copy.copy(optim_b) optim_b_max_smooth['constraints'] = 'smoothlin_0.1_0_1' # ================================================================== # Without constraints # with binomial observations () # #=================================================================== if (do_base): # perfect measurement optim = Learner.learner_Opt(model=model_base, **optim_base) res_base = optim(track_learning=True) p_base = res_base['params'] p_base_exp = res_base['params_exp'] func_base.theta = p_base func_base.plot_function(x_plot) model_base(p_base) if (do_proj10): # binomial measurement, gaussian likelihood optim = Learner.learner_Opt(model=model_proj10, **optim_base) res_proj10 = optim(track_learning=True) p_proj10 = res_proj10['params'] p_proj10_exp = res_proj10['params_exp'] func_base.theta = p_proj10 func_base.plot_function(x_plot)
} dico_simul = learner1DBH._process_controler(dico_simul) dico_simul['control_obj'] = learner1DBH._build_control_from_string( dico_simul['control_obj'], None, context_dico=dico_simul) model = bh1d.BH1D(**dico_simul) if (optim_type == 'BO2'): #BO optim_args = { 'algo': 'BO2', 'maxiter': 30, 'num_cores': 1, 'init_obj': 15, 'acq': 'EI' } optim = Learner.learner_Opt(model=model, **optim_args) resBO2 = optim(track_learning=True) resBO2['last_func'] = model.control_fun print(resBO2) res = resBO2 resBO2['opt_more'] if (optim_type == 'DE'): optim_args = {'algo': 'DE', 'popsize': 5, 'maxiter': 75} optim = Learner.learner_Opt(model=model, **optim_args) resDE = optim() print(resDE) res = resDE if (optim_type == 'BO'): optim_args = {'algo': 'BO', 'maxiter': 250}
'state_init': 'GS_i', 'state_tgt': 'GS_inf', 'fom': fom, 'fom_print': True, 'track_learning': True, 'ctl_shortcut': 'owbds01_pwl15' } dico_simul = learner1DBH._process_controler(dico_simul) dico_simul['control_obj'] = learner1DBH._build_control_from_string( dico_simul['control_obj'], None, context_dico=dico_simul) model = bh1d.BH1D(**dico_simul) try: func_used = pFunc_base.pFunc_base.read_func_from_file("SFtoMI_0") except: optim = Learner.learner_Opt(model=model, **optim_args) resBO2 = optim(track_learning=True) resBO2['last_func'] = model.control_fun res = resBO2 func_used = model.control_fun func_used.theta = res['params'] if (save): func_used.save_to_file("SFtoMI_0") #Testing fom_test = fom + ['f2t2', 'fluence', 'smooth', 'varN'] dico_test = copy.copy(dico_simul) dico_test['fom'] = fom_test dico_test['track_learning'] = False model_test = bh1d.BH1D(**dico_test) optim_params = func_used.theta
'state_init': 'GS_i', 'state_tgt': 'GS_inf', 'fom': fom_GS, 'fom_print': True, 'track_learning': True, 'ctl_shortcut': 'owbds01_pwl15', 'kblock': 0, 'pblock': 1 } dico_GS = learner1DBH._process_controler(dico_GS) dico_GS['control_obj'] = learner1DBH._build_control_from_string( dico_GS['control_obj'], None, context_dico=dico_GS) model_GS = bh1d.BH1D(**dico_GS) optim_GS = Learner.learner_Opt(model=model_GS, **optim_args) res_GS = optim_GS(track_learning=True) model_GS.control_fun.plot_function(np.arange(-0.01, T + 0.01, 0.01)) state_tmp = model_GS.EvolutionPopAdiab(nb_ev=2) model_GS.plot_pop_adiab(plot_gap=True) #============================================================================== # Try to reach ES at the end #============================================================================== fom_firstE = ['projSS:neg_fluence:0.0001_smooth:0.05'] dico_firstE = { 'L': 2, 'Nb': 2, 'mu': 0, 'T': T, 'dt': 0.01,
# is there a better way #=================================================================== optim_with_noise = True optim_ideal = True optim_with_noise_custom = True optim_args = { 'algo': 'BO2', 'maxiter': 50, 'num_cores': 4, 'init_obj': 25, 'acq': 'EI' } func_test = dico_no_noise['control_obj'] if (optim_with_noise): optim = Learner.learner_Opt(model=model_ensemble, **optim_args) res_ensemble = optim(track_learning=True) params_noise = res_ensemble['params'] params_noise_exp = res_ensemble['params_exp'] if (optim_with_noise_custom): optim = Learner.learner_Opt(model=model_ensemble_custom, **optim_args) res_ensemble_custom = optim(track_learning=True) params_noise_custom = res_ensemble_custom['params'] params_noise_custom_exp = res_ensemble_custom['params_exp'] if (optim_ideal): optim = Learner.learner_Opt(model=model_no_noise, **optim_args) res_no_noise = optim(track_learning=True) params_no_noise = res_no_noise['params']