sampler = uq.sampling.SCSampler(vary=vary, polynomial_order=4) # Associate the sampler with the campaign campaign.set_sampler(sampler) ############################### # execute the defined actions # ############################### campaign.execute().collate() ############################################### # run the UQ ensemble using FabSim3 interface # ############################################### fab.run_uq_ensemble(CONFIG, campaign.campaign_dir, script='ade', machine=MACHINE, PJ=PILOT_JOB) # wait for job to complete fab.wait(machine=MACHINE) # check if all output files are retrieved from the remote machine, returns a Boolean flag all_good = fab.verify(CONFIG, campaign.campaign_dir, TARGET_FILENAME, machine=MACHINE) if all_good: # copy the results from the FabSim results dir to the EasyVVUQ results dir fab.get_uq_samples(CONFIG, campaign.campaign_dir, sampler.n_samples, machine=MACHINE) else: print("Not all samples executed correctly") import sys
sampler = uq.sampling.MCSampler(vary=vary, n_mc_samples=2000) # Associate the sampler with the campaign campaign.set_sampler(sampler) # Will draw all (of the finite set of samples) campaign.draw_samples() campaign.populate_runs_dir() #Save the Campaign campaign.save_state("campaign_state_PO.json") # run the UQ ensemble fab.run_uq_ensemble(config, campaign.campaign_dir, script=script, machine=machine, PJ=True) #wait for job to complete # fab.wait(machine=machine) # #wait for jobs to complete and check if all output files are retrieved # #from the remote machine # fab.verify(config, campaign.campaign_dir, # campaign._active_app_decoder.target_filename, # machine=machine, PilotJob=True) # #run the UQ ensemble # fab.get_uq_samples(config, campaign.campaign_dir, sampler._n_samples, # skip=0, machine='eagle_vecma')
def run_sc_samples(work_dir): # Set up a fresh campaign called "sc" my_campaign = uq.Campaign(name='ocean', work_dir=work_dir) # Define parameter space params = { "decay_time_nu": { "type": "float", "min": 0.0, "max": 1000.0, "default": 5.0}, "decay_time_mu": { "type": "float", "min": 0.0, "max": 1000.0, "default": 90.0}, "out_file": { "type": "string", "default": "output.csv"}} output_filename = params["out_file"]["default"] output_columns = ["E_mean", "Z_mean", "E_std", "Z_std"] # Create an encoder, decoder and collation element for PCE test app encoder = uq.encoders.GenericEncoder( template_fname= HOME + '/sc/ocean.template', delimiter='$', target_filename='ocean_in.json') decoder = uq.decoders.SimpleCSV(target_filename=output_filename, output_columns=output_columns, header=0) collater = uq.collate.AggregateSamples(average=False) # Add the SC app (automatically set as current app) my_campaign.add_app(name="sc", params=params, encoder=encoder, decoder=decoder, collater=collater) # Create the sampler vary = { "decay_time_nu": cp.Uniform(1.0, 5.0), "decay_time_mu": cp.Uniform(85.0, 95.0) } my_sampler = uq.sampling.SCSampler(vary=vary, polynomial_order=2) # Associate the sampler with the campaign my_campaign.set_sampler(my_sampler) # Will draw all (of the finite set of samples) my_campaign.draw_samples() my_campaign.populate_runs_dir() #Run execution using Fabsim fab.run_uq_ensemble(my_campaign.campaign_dir, 'ocean', machine='localhost') #Save the Campaign my_campaign.save_state("campaign_state.json")
delimiter='$', target_filename='theta_initial_conditions.csv') decoder = uq.decoders.SimpleCSV(target_filename=output_filename, output_columns=output_columns, header=0) collater = uq.collate.AggregateSamples(average=False) # Add the SC app (automatically set as current app) my_campaign.add_app(name="sc", params=params, encoder=encoder, decoder=decoder, collater=collater) # Create the sampler vary = {"incubation": cp.Normal(5.2, 0.1), "r0": cp.Normal(2.5, 0.1)} my_sampler = uq.sampling.SCSampler(vary=vary, polynomial_order=2) # Associate the sampler with the campaign my_campaign.set_sampler(my_sampler) # Will draw all (of the finite set of samples) my_campaign.draw_samples() my_campaign.populate_runs_dir() #Run execution using Fabsim fab.run_uq_ensemble(my_campaign.campaign_dir, 'corona', machine='localhost') #Save the Campaign my_campaign.save_state("campaign_state.json")
my_sampler = uq.sampling.SCSampler(vary=vary, polynomial_order=2, quadrature_rule="G", sparse=False, growth=False) # Associate the sampler with the campaign my_campaign.set_sampler(my_sampler) # Will draw all (of the finite set of samples) my_campaign.draw_samples() my_campaign.populate_runs_dir() ## Use this instead to run the samples using EasyVVUQ on the localhost #my_campaign.apply_for_each_run_dir(uq.actions.ExecuteLocal( # "sc_model.py ade_in.json")) fab.run_uq_ensemble(my_campaign.campaign_dir, script_name='ade', machine='localhost') fab.get_uq_samples(my_campaign.campaign_dir, machine='localhost') my_campaign.collate() # Post-processing analysis analysis = uq.analysis.SCAnalysis(sampler=my_sampler, qoi_cols=output_columns) my_campaign.apply_analysis(analysis) results = my_campaign.get_last_analysis() ################################### # Plot the moments and SC samples # ###################################
# Associate the sampler with the campaign campaign.set_sampler(sampler) ############################### # execute the defined actions # ############################### campaign.execute().collate() #################################### # Ensemble execution using FabSim3 # #################################### fab.run_uq_ensemble(CONFIG, campaign.campaign_dir, script='ohagan', machine=MACHINE, PJ=PILOT_JOB) # wait for job to complete fab.wait(machine=MACHINE) # check if all output files are retrieved from the remote machine, returns a Boolean flag all_good = fab.verify(CONFIG, campaign.campaign_dir, TARGET_FILENAME, machine=MACHINE) if all_good: # copy the results from the FabSim results dir to the EasyVVUQ results dir fab.get_uq_samples(CONFIG,
def run_campaign(poly_order, work_dir='/tmp'): # Set up a fresh campaign called "sc" my_campaign = uq.Campaign(name='sc', work_dir=work_dir) # Define parameter space params = { "x1": { "type": "float", "min": 0.0, "max": 1.0, "default": 0.5 }, "x2": { "type": "float", "min": 0.0, "max": 1.0, "default": 0.5 }, "out_file": { "type": "string", "default": "output.csv" } } output_filename = params["out_file"]["default"] output_columns = ["f"] # Create an encoder, decoder and collation element encoder = uq.encoders.GenericEncoder(template_fname=HOME + '/sc/sobol.template', delimiter='$', target_filename='sobol_in.json') decoder = uq.decoders.SimpleCSV(target_filename=output_filename, output_columns=output_columns, header=0) collater = uq.collate.AggregateSamples(average=False) # Add the SC app (automatically set as current app) my_campaign.add_app(name="sc", params=params, encoder=encoder, decoder=decoder, collater=collater) # Create the sampler vary = {"x1": cp.Uniform(0.0, 1.0), "x2": cp.Uniform(0.0, 1.0)} """ SPARSE GRID PARAMETERS ---------------------- - sparse = True: use a Smolyak sparse grid - growth = True: use an exponential rule for the growth of the number of 1D collocation points per level. Used to make e.g. clenshaw-curtis quadrature nested. """ my_sampler = uq.sampling.SCSampler(vary=vary, polynomial_order=poly_order, quadrature_rule="G", sparse=False, growth=False) # Associate the sampler with the campaign my_campaign.set_sampler(my_sampler) print('Number of samples:', my_sampler.xi_d.shape[0]) # Will draw all (of the finite set of samples) my_campaign.draw_samples() my_campaign.populate_runs_dir() # Use this instead to run the samples using EasyVVUQ on the localhost # my_campaign.apply_for_each_run_dir(uq.actions.ExecuteLocal( # "sc/sobol_model.py sobol_in.json")) #Run execution using Fabsim fab.run_uq_ensemble(my_campaign.campaign_dir, 'sobol_test', machine='localhost') fab.get_uq_samples(my_campaign.campaign_dir, machine='localhost') my_campaign.collate() # Post-processing analysis analysis = uq.analysis.SCAnalysis(sampler=my_sampler, qoi_cols=output_columns) my_campaign.apply_analysis(analysis) results = my_campaign.get_last_analysis() #the unique ID of this Campaign ID = my_campaign.campaign_dir.split('/')[-1] #store the sobol indices of each campaign to the same results directory results_dir = work_dir + '/sobols/' + ID if os.path.exists(results_dir) == False: os.makedirs(results_dir) #store the 1st order sobols indices to a CSV file sobols = pd.DataFrame(results['sobols_first']['f']) sobols.to_csv(results_dir + '/sobols.csv') return results, ID