def post_proc(state_file, work_dir): #Reload the campaign my_campaign = uq.Campaign(state_file = state_file, work_dir = work_dir) print('========================================================') print('Reloaded campaign', my_campaign.campaign_dir.split('/')[-1]) print('========================================================') #get sampler and output columns from my_campaign object my_sampler = my_campaign._active_sampler output_columns = my_campaign._active_app_decoder.output_columns #fetch the results from the (remote) host via FabSim3 #get_UQ_results(my_campaign.campaign_dir, machine='eagle_vecma') fab.get_uq_samples(my_campaign.campaign_dir, machine='localhost') #collate output my_campaign.collate() # Post-processing analysis sc_analysis = uq.analysis.SCAnalysis(sampler=my_sampler, qoi_cols=output_columns) my_campaign.apply_analysis(sc_analysis) results = my_campaign.get_last_analysis() results['n_samples'] = sc_analysis._number_of_samples # #store data # store_uq_results(my_campaign.campaign_dir, results) return results, sc_analysis, my_sampler, my_campaign
############################################### fab.run_uq_ensemble(CONFIG, campaign.campaign_dir, script='ade', machine=MACHINE, PJ=PILOT_JOB) # wait for job to complete fab.wait(machine=MACHINE) # check if all output files are retrieved from the remote machine, returns a Boolean flag all_good = fab.verify(CONFIG, campaign.campaign_dir, TARGET_FILENAME, machine=MACHINE) if all_good: # copy the results from the FabSim results dir to the EasyVVUQ results dir fab.get_uq_samples(CONFIG, campaign.campaign_dir, sampler.n_samples, machine=MACHINE) else: print("Not all samples executed correctly") import sys sys.exit() ############################################# # All output files are present, decode them # ############################################# output_columns = ["u"] decoder = uq.decoders.SimpleCSV( target_filename=TARGET_FILENAME, output_columns=output_columns) actions = uq.actions.Actions(
# work_dir = home + "/VECMA/Campaigns/" work_dir = '/tmp' #Reload the campaign my_campaign = uq.Campaign(state_file = 'campaign_state.json', work_dir = work_dir) print('========================================================') print('Reloaded campaign', my_campaign.campaign_dir.split('/')[-1]) print('========================================================') #get sampler and output columns from my_campaign object my_sampler = my_campaign.get_active_sampler() output_columns = my_campaign._active_app_decoder.output_columns #fetch the results from the (remote) host via FabSim3 fab.get_uq_samples(my_campaign.campaign_dir, machine='localhost') #collate output my_campaign.collate() # Post-processing analysis sc_analysis = uq.analysis.SCAnalysis(sampler=my_sampler, qoi_cols=output_columns) my_campaign.apply_analysis(sc_analysis) results = my_campaign.get_last_analysis() results['n_samples'] = sc_analysis._number_of_samples mu_S = results['statistical_moments']['S']['mean'] std_S = results['statistical_moments']['S']['std'] mu_I = results['statistical_moments']['I']['mean'] std_I = results['statistical_moments']['I']['std'] mu_R = results['statistical_moments']['R']['mean']
# print(sampler._samples.shape) output_columns = campaign._active_app_decoder.output_columns #Manually specify a subset of the output QoIs, is faster #output_columns = ["IC_prev_avg_max", "IC_ex_max"] fab.verify(config, campaign.campaign_dir, campaign._active_app_decoder.target_filename, machine=machine, PJ=True) fab.get_uq_samples(config, campaign.campaign_dir, sampler.n_samples(), skip=0, machine=machine) # collate output campaign.collate() # get full dataset of data data = campaign.get_collation_result() # print(data) # Post-processing analysis qmc_analysis = uq.analysis.QMCAnalysis(sampler=sampler, qoi_cols=output_columns) campaign.apply_analysis(qmc_analysis) #manually execute analyse, such that we can supply output_index=-1, only using the last entry
def run_campaign(poly_order, work_dir='/tmp'): # Set up a fresh campaign called "sc" my_campaign = uq.Campaign(name='sc', work_dir=work_dir) # Define parameter space params = { "x1": { "type": "float", "min": 0.0, "max": 1.0, "default": 0.5 }, "x2": { "type": "float", "min": 0.0, "max": 1.0, "default": 0.5 }, "out_file": { "type": "string", "default": "output.csv" } } output_filename = params["out_file"]["default"] output_columns = ["f"] # Create an encoder, decoder and collation element encoder = uq.encoders.GenericEncoder(template_fname=HOME + '/sc/sobol.template', delimiter='$', target_filename='sobol_in.json') decoder = uq.decoders.SimpleCSV(target_filename=output_filename, output_columns=output_columns, header=0) collater = uq.collate.AggregateSamples(average=False) # Add the SC app (automatically set as current app) my_campaign.add_app(name="sc", params=params, encoder=encoder, decoder=decoder, collater=collater) # Create the sampler vary = {"x1": cp.Uniform(0.0, 1.0), "x2": cp.Uniform(0.0, 1.0)} """ SPARSE GRID PARAMETERS ---------------------- - sparse = True: use a Smolyak sparse grid - growth = True: use an exponential rule for the growth of the number of 1D collocation points per level. Used to make e.g. clenshaw-curtis quadrature nested. """ my_sampler = uq.sampling.SCSampler(vary=vary, polynomial_order=poly_order, quadrature_rule="G", sparse=False, growth=False) # Associate the sampler with the campaign my_campaign.set_sampler(my_sampler) print('Number of samples:', my_sampler.xi_d.shape[0]) # Will draw all (of the finite set of samples) my_campaign.draw_samples() my_campaign.populate_runs_dir() # Use this instead to run the samples using EasyVVUQ on the localhost # my_campaign.apply_for_each_run_dir(uq.actions.ExecuteLocal( # "sc/sobol_model.py sobol_in.json")) #Run execution using Fabsim fab.run_uq_ensemble(my_campaign.campaign_dir, 'sobol_test', machine='localhost') fab.get_uq_samples(my_campaign.campaign_dir, machine='localhost') my_campaign.collate() # Post-processing analysis analysis = uq.analysis.SCAnalysis(sampler=my_sampler, qoi_cols=output_columns) my_campaign.apply_analysis(analysis) results = my_campaign.get_last_analysis() #the unique ID of this Campaign ID = my_campaign.campaign_dir.split('/')[-1] #store the sobol indices of each campaign to the same results directory results_dir = work_dir + '/sobols/' + ID if os.path.exists(results_dir) == False: os.makedirs(results_dir) #store the 1st order sobols indices to a CSV file sobols = pd.DataFrame(results['sobols_first']['f']) sobols.to_csv(results_dir + '/sobols.csv') return results, ID