# 1.00000000e-16, 1.00000000e-16, # 5.00000000e-14, 5.00000000e-16, # 5.00000000e-14, 5.00000000e-16, # 5.00000000e-14, 3.00000000e-14, # 5.00000000e-15, 5.00000000e-15, # 5.00000000e-14, 5.00000000e-16, # 5.00000000e-14, 5.00000000e-16])) # # process_model_coarse = GC.GeoModel(name='test_process_model_kericini', # datfile_name='input-files/kericini/coarse-model/Keriv0_027', # incon_name='input-files/kericini/coarse-model/Keriv0_027', # geom_name='input-files/kericini/coarse-model/gKerinci_v0', # islayered=True) bmodel = IC.BayesModel(name='test_bayes_model', process_model=process_model_coarse) #---test calculate model run time start = timeit.default_timer() bmodel.process_model.simulate(do_update_obs_wells=False) stop = timeit.default_timer() print('time to run model (s): ') print(stop - start) #sflat = pickle.load( # open("./saved_data/sampler_flatchain_test_process_model_coarse.p", "rb"))
else: synthetic_data = pickle.load(open("./saved_data/synthetic_data.p", "rb")) synthetic_model_fine.ss_temps = synthetic_data['T_measured'] #syn_model.ss_temps = syn_model.T_measured synthetic_model_fine.T_noise = synthetic_data['T_noise'] synthetic_model_fine.ss_temps_obs_well = synthetic_data['T_obs_well'] synthetic_model_fine.d_obs_well = synthetic_data['d_obs_well'] #synthetic_data = GC.GeoModel(name='test_synthetic_data', # datfile_name='input-files/elvar-new-tighter-cap/fine-model/2DF002', # incon_name='input-files/elvar-new-tighter-cap/fine-model/2DF002_IC', # geom_name = 'input-files/elvar-new-tighter-cap/fine-model/g2fine', # islayered=True) #---create a parameter model parameter_space = IC.ParameterSpace(mu=-15, sigma=1.5) #----create a process space. Need for predictive checks. process_space = IC.ProcessSpace() #---create or load comparison space model (basis of likelihood function) #pre-discrep. measurement_space = IC.MeasurementSpace(bias=0.0, sigma=5.0) #without discrep. #-----set up discrepancy info discrepancy_filename = 'discrepancies_combined.p' load_discrepancy = True map_coarse_discrep_to_data_grid = False # - default should be False? if load_discrepancy:
#---load real data of appropriate resolution and store in above. real_data_model.d_obs_well = {} real_data_model.ss_temps_obs_well = {} for i,welli in enumerate(list_of_obs_wells): df = pd.read_csv('./saved_data/kerinci_data/Temp_' + welli + '.dat',header=None,sep=' ') df.rename(columns={1:'d',0:'T'},inplace=True) real_data_model.d_obs_well[i] = df['d'] real_data_model.ss_temps_obs_well[i] = df['T'] #---create a basic comparison model (basis of likelihood function) measurement_space = IC.MeasurementSpace(bias=0.0, sigma=5.0) #---create a parameter model parameter_space = IC.ParameterSpace(mu=-15, sigma=1.5) #-----create a basic process space model process_space = IC.ProcessSpace() #---create a Bayes model #use pro_model_coarse for coarse, pro_model_medium for medium #bmodel = IC.BayesModel(name='test_bayes_model', # process_model=process_model_medium, # data_model=synthetic_model_fine, # comparison_model=comparison_model, # parameter_model=parameter_model)
real_data_model.d_obs_well = {} real_data_model.ss_temps_obs_well = {} for i, welli in enumerate(list_of_obs_wells): df = pd.read_csv('./saved_data/kerinci_data/Temp_' + welli + '.dat', header=None, sep=' ') df.rename(columns={1: 'd', 0: 'T'}, inplace=True) real_data_model.d_obs_well[i] = df['d'] real_data_model.ss_temps_obs_well[i] = df['T'] #---create a basic comparison model (basis of likelihood function) measurement_space = IC.MeasurementSpace(bias=0.0, sigma=10.0) #---create a parameter model parameter_space = IC.ParameterSpace(mu=-15, sigma=1.5) #-----create a basic process space model process_space = IC.ProcessSpace() #-----set up discrepancy info load_discrepancy = True map_coarse_discrep_to_data_grid = False # - default should be False? #discrepancy_filename = 'discrepancies_combined_kerinci.p' discrepancy_filename = 'discrepancies_combined_kerinci_map_data.p' if load_discrepancy: discrep = IC.ModelDiscrep(process_space=process_space,
else: synthetic_data = pickle.load(open("./saved_data/synthetic_data.p", "rb")) synthetic_model_fine.ss_temps = synthetic_data['T_measured'] #syn_model.ss_temps = syn_model.T_measured synthetic_model_fine.T_noise = synthetic_data['T_noise'] synthetic_model_fine.ss_temps_obs_well = synthetic_data['T_obs_well'] synthetic_model_fine.d_obs_well = synthetic_data['d_obs_well'] #synthetic_data = GC.GeoModel(name='test_synthetic_data', # datfile_name='input-files/elvar-new-tighter-cap/fine-model/2DF002', # incon_name='input-files/elvar-new-tighter-cap/fine-model/2DF002_IC', # geom_name = 'input-files/elvar-new-tighter-cap/fine-model/g2fine', # islayered=True) #---create a basic comparison model (basis of likelihood function) measurement_space = IC.MeasurementSpace(bias=0.0, sigma=5.0) #---create a parameter model parameter_space = IC.ParameterSpace(mu=-15, sigma=1.5) #----create a process space. Need for predictive checks. process_space = IC.ProcessSpace() #use process_model_coarse for coarse, process_model_medium for medium. bmodel = IC.BayesModel(name='test_bayes_model_med_fine_tighter_cap', process_model=process_model_medium, data_model=synthetic_model_fine, measurement_space=measurement_space, parameter_space=parameter_space, process_space=process_space)
perm_powers=perm_powers_truths) synthetic_data_model_fine.simulate() if generate_new_data: synthetic_data_model_fine.generate_synthetic_data( perm_powers_truths=perm_powers_truths) else: synthetic_data = pickle.load(open("./saved_data/synthetic_data.p", "rb")) synthetic_data_model_fine.ss_temps = synthetic_data['T_measured'] #syn_model.ss_temps = syn_model.T_measured synthetic_data_model_fine.T_noise = synthetic_data['T_noise'] synthetic_data_model_fine.ss_temps_obs_well = synthetic_data['T_obs_well'] synthetic_data_model_fine.d_obs_well = synthetic_data['d_obs_well'] #---create a basic comparison model (basis of likelihood function) measurement_space = IC.MeasurementSpace(bias=0.0, sigma=5.0) #---create a parameter model parameter_space = IC.ParameterSpace(mu=-15, sigma=1.5) #-----create a basic process space model process_space = IC.ProcessSpace() #---create a Bayes model #use pro_model_coarse for coarse, pro_model_medium for medium #bmodel = IC.BayesModel(name='test_bayes_model', # process_model=process_model_medium, # data_model=synthetic_data_model_fine, # comparison_model=comparison_model, # parameter_model=parameter_model)
name='test_process_model_fine', datfile_name='input-files/elvar-new-tighter-cap/fine-model/2DF002', incon_name='input-files/elvar-new-tighter-cap/fine-model/2DF002_IC', geom_name='input-files/elvar-new-tighter-cap/fine-model/g2fine', islayered=True) #---fine synthetic model synthetic_model_fine = GC.GeoModel( name='test_synthetic_model_fine', datfile_name='input-files/elvar-new-tighter-cap/fine-model/2DF002', incon_name='input-files/elvar-new-tighter-cap/fine-model/2DF002_IC', geom_name='input-files/elvar-new-tighter-cap/fine-model/g2fine', islayered=True) #---create a basic comparison model (basis of likelihood function) measurement_space = IC.MeasurementSpace(bias=0.0, sigma=5.0) #just default/null process and parameter spaces required. process_space = IC.ProcessSpace() parameter_space = IC.ParameterSpace() #sflat = pickle.load( # open("./saved_data/sampler_flatchain_test_process_model_coarse.p", "rb")) sflat = pickle.load( open("./saved_data/sampler_flatchain_test_process_model_medium.p", "rb")) param_sets = sflat #param_sets = np.random.normal(loc=-15, scale=1.5, size=(30, 12)) #param_sets[:, 2:4] = -16