output_filename = params["out_file"]["default"] output_columns = ["IC_prev_avg_max", "IC_ex_max"] encoder = uq.encoders.GenericEncoder(template_fname=HOME + '/corona.template', delimiter='$', target_filename='corona_in.json') decoder = uq.decoders.SimpleCSV(target_filename=output_filename, output_columns=output_columns) # Add the SC app (automatically set as current app) campaign.add_app(name="sc", params=params, encoder=encoder, decoder=decoder) # Create the sampler vary = { "seed": cp.DiscreteUniform(2**14, 2**16), "lockdown_effect": cp.Beta(alpha=14, beta=42), "phase_interval": cp.Gamma(shape=25, scale=2), "uptake": cp.Beta(alpha=16, beta=2), # "Rzero": cp.Gamma(shape=100,scale=.025), # "duration_infectiousness": cp.Gamma(shape=25,scale=.2), # "shape_exposed_time": cp.Gamma(shape=17.5,scale=1), # "intervention_effect_var_inv": cp.Gamma(shape=2,scale=.05) } #sampler = uq.sampling.SCSampler(vary=vary, polynomial_order=3, # quadrature_rule='G', sparse=False) sampler = uq.sampling.MCSampler(vary=vary, n_mc_samples=2000) # Associate the sampler with the campaign campaign.set_sampler(sampler)
def test_jinjaencoder(tmpdir): """ Set up a campaign using the jinja2 template. This example is based on the DALES model. The input file is a Fortran namelist. """ params = { "Nc_0": { # concentration of cloud condensation nuclei "type": "float", "min": 0.1e6, "max": 1000e6, "default": 70e6, }, "cf": { # cf subgrid filter constant "type": "float", "min": 1.0, "max": 4.0, "default": 2.5, }, "cn": { # subfilterscale parameter "type": "float", "min": 0.5, "max": 1.0, "default": 0.76, }, "Rigc": { # critical Richardson number "type": "float", "min": 0.1, "max": 1.0, "default": 0.25, }, "Prandtl": { # Prandtl number, subgrid. "type": "float", "min": 0.1, "max": 1.0, "default": 1.0 / 3, }, "z0": { # surface roughness "type": "float", "min": 1e-4, "max": 1.0, "default": 1.6e-4, }, "l_sb": { # flag for microphysics scheme: false - KK00 Khairoutdinov and Kogan, 2000 "type": "integer", # true - SB Seifert and Beheng, 2001, 2006, Default "min": 0, "max": 1, "default": 1 }, "Nh": { # number of grid points in the horizontal directions - itot, jtot "type": "integer", "min": 3, "max": 1024, "default": 10 }, "extent": { # norizontal domain size in x, y - xsize, ysize. unit: m "type": "float", "min": 1, "max": 1000000, "default": 1000, }, "seed": { # random seed "type": "integer", "min": 1, "max": 1000000, "default": 43 }, "nprocx": { # number of MPI tasks in x "type": "integer", "min": 1, "max": 1000, "default": 1 }, "nprocy": { # number of MPI tasks in y "type": "integer", "min": 1, "max": 1000, "default": 1 }, } vary = { "Nc_0": cp.Uniform(50e6, 100e6), # "cf": cp.Uniform(2.4, 2.6), # "cn": cp.Uniform(0.5, 0.9), # "Rigc": cp.Uniform(0.1, 0.4), # "Prandtl": cp.Uniform(0.2, 0.4), # "z0": cp.Uniform(1e-4, 2e-4), "l_sb": cp.DiscreteUniform(0, 1), # "Nh": cp.DiscreteUniform(10, 20), # "extent": cp.Uniform(1000, 2000), "seed": cp.Uniform(1, 2000), } output_columns = ['cfrac', 'lwp', 'rwp', 'zb', 'zi', 'prec', 'wq', 'wtheta', 'we', 'walltime'] my_sampler = uq.sampling.SCSampler(vary=vary, polynomial_order=2, quadrature_rule="C") my_campaign = uq.Campaign(name='dales', work_dir=tmpdir, db_location='sqlite:///') encoder = JinjaEncoder(template_fname='tests/jinjaencoder/namoptions.template', target_filename='namoptions.001') decoder = uq.decoders.SimpleCSV( target_filename='results.csv', output_columns=output_columns) my_campaign.add_app(name="dales", params=params, encoder=encoder, decoder=decoder) my_campaign.verify_all_runs = False # to prevent errors on integer quantities my_campaign.set_sampler(my_sampler) my_campaign.draw_samples() my_campaign.populate_runs_dir()
def init_VVP_campaign(campaign_name, campaign_config, polynomial_order, campaign_work_dir): ###################################### # delete campaign_work_dir is exists # ###################################### if os.path.exists(campaign_work_dir): rmtree(campaign_work_dir) os.makedirs(campaign_work_dir) ##################### # Create an encoder # ##################### encoder = uq.encoders.GenericEncoder( template_fname=os.path.join(get_plugin_path("FabFlee"), "templates", campaign_config["encoder_template_fname"] ), delimiter=campaign_config["encoder_delimiter"], target_filename=campaign_config["encoder_target_filename"] ) ########################### # Set up a fresh campaign # ########################### db_location = "sqlite:///" + campaign_work_dir + "/campaign.db" actions = uq.actions.Actions( uq.actions.CreateRunDirectory(root=campaign_work_dir, flatten=True), uq.actions.Encode(encoder), ) campaign = uq.Campaign( name=campaign_name, db_location=db_location, work_dir=campaign_work_dir ) ################################ # Add the flee-vvp-Sampler app # ################################ campaign.add_app( name=campaign_name, params=campaign_config["params"], actions=actions ) ###################### # parameters to vary # ###################### vary = {} for param in campaign_config["selected_vary_parameters"]: lower_value = campaign_config[ "vary_parameters_range"][param]["range"][0] upper_value = campaign_config[ "vary_parameters_range"][param]["range"][1] if campaign_config["distribution_type"] == "DiscreteUniform": vary.update({param: cp.DiscreteUniform(lower_value, upper_value)}) elif campaign_config["distribution_type"] == "Uniform": vary.update({param: cp.Uniform(lower_value, upper_value)}) #################### # create Sampler # #################### sampler_name = campaign_config["sampler_name"] if sampler_name == "SCSampler": sampler = uq.sampling.SCSampler( vary=vary, polynomial_order=polynomial_order, quadrature_rule=campaign_config["quadrature_rule"], growth=campaign_config["growth"], sparse=campaign_config["sparse"], midpoint_level1=campaign_config["midpoint_level1"], dimension_adaptive=campaign_config["dimension_adaptive"] ) elif sampler_name == "PCESampler": sampler = uq.sampling.PCESampler( vary=vary, polynomial_order=polynomial_order, rule=campaign_config["quadrature_rule"], sparse=campaign_config["sparse"], growth=campaign_config["growth"] ) # TODO: add other sampler here ########################################### # Associate the sampler with the campaign # ########################################### campaign.set_sampler(sampler) ######################################### # draw all of the finite set of samples # ######################################### campaign.execute().collate() ######################################### # extract generated runs id by campaign # ######################################### runs_dir = [] for _, run_info in campaign.campaign_db.runs( status=uq.constants.Status.NEW ): runs_dir.append(run_info["run_name"]) campaign_dir = campaign.campaign_db.campaign_dir() ###################################################### # backup campaign files, i.e, *.db, *.json, *.pickle # ###################################################### backup_campaign_files(campaign.work_dir) print("=" * 50) print("With user's specified parameters for {}".format(sampler_name)) print("campaign name : {}".format(campaign_name)) print("number of generated runs : {}".format(len(runs_dir))) print("campaign dir : {}".format(campaign_work_dir)) print("=" * 50) return runs_dir, campaign_dir
"""Testing polynomial related to distributions.""" import chaospy import numpy import pytest DISTRIBUTIONS = { "discrete": chaospy.DiscreteUniform(-10, 10), "normal": chaospy.Normal(0, 1), "uniform": chaospy.Uniform(-1, 1), "exponential": chaospy.Exponential(1), "gamma": chaospy.Gamma(1), "beta": chaospy.Beta(3, 3, lower=-1, upper=1), "mvnormal": chaospy.MvNormal([0], [1]), "custom": chaospy.UserDistribution( cdf=lambda x: (x+1)/2, pdf=lambda x: 1/2., lower=lambda: -1, upper=lambda: 1, ppf=lambda q: 2*q-1, mom=lambda k: ((k+1.)%2)/(k+1), ttr=lambda k: (0., k*k/(4.*k*k-1)), ), } BUILDERS = { "stieltjes": chaospy.expansion.stieltjes, "cholesky": chaospy.expansion.cholesky, # "gram_schmidt": chaospy.expansion.gram_schmidt, } @pytest.fixture(params=DISTRIBUTIONS)
def init_campaign(): ############################################# # load campaign configuration from yml file # ############################################# campaign_config = load_campaign_config() campaign_name = 'BAC_%s' % (campaign_config['sampler_name']) campaign_work_dir = os.path.join( work_dir, 'BAC_easyvvuq_%s' % (campaign_config['sampler_name'])) ###################################### # delete campaign_work_dir is exists # ###################################### if os.path.exists(campaign_work_dir): rmtree(campaign_work_dir) os.makedirs(campaign_work_dir) ########################### # Set up a fresh campaign # ########################### db_location = "sqlite:///" + campaign_work_dir + "/campaign.db" campaign = uq.Campaign(name=campaign_name, db_location=db_location, work_dir=campaign_work_dir) ################################# # Create an encoder and decoder # ################################# n_replicas = campaign_config['n_replicas'] directory_tree = { 'build': None, 'constraint': None, 'fe': { 'build': None, 'dcd': None, 'mmpbsa': {'rep' + str(i): None for i in range(1, n_replicas + 1)}, }, 'par': None, 'replica-confs': None, 'replicas': { 'rep' + str(i): { 'equilibration': None, 'simulation': None } for i in range(1, n_replicas + 1) } } src_confs = os.path.join(campaign_config_dir, 'templates', campaign_config['app_name'], 'replica-confs') dst_confs = os.path.join('replica-confs') multiencoder = uq.encoders.MultiEncoder( uq.encoders.DirectoryBuilder(tree=directory_tree), Eq0Encoder(template_fname=os.path.join(src_confs, 'template_eq0.conf'), target_filename=os.path.join(dst_confs, 'eq0.conf')), Eq0Encoder(template_fname=os.path.join(src_confs, 'eq0-replicas.conf'), target_filename=os.path.join(dst_confs, 'eq0-replicas.conf')), Eq1Encoder(template_fname=os.path.join(src_confs, 'template_eq1.conf'), target_filename=os.path.join(dst_confs, 'eq1.conf')), Eq1Encoder(template_fname=os.path.join(src_confs, 'eq1-replicas.conf'), target_filename=os.path.join(dst_confs, 'eq1-replicas.conf')), Eq2Encoder(template_fname=os.path.join(src_confs, 'template_eq2.conf'), target_filename=os.path.join(dst_confs, 'eq2.conf')), Eq2Encoder(template_fname=os.path.join(src_confs, 'eq2-replicas.conf'), target_filename=os.path.join(dst_confs, 'eq2-replicas.conf')), SimEncoder(template_fname=os.path.join(src_confs, 'template_sim1.conf'), target_filename=os.path.join(dst_confs, 'sim1.conf')), SimEncoder(template_fname=os.path.join(src_confs, 'sim1-replicas.conf'), target_filename=os.path.join(dst_confs, 'sim1-replicas.conf')), uq.encoders.GenericEncoder( delimiter=campaign_config['encoder_delimiter'], template_fname=os.path.join(campaign_config_dir, 'templates', campaign_config['app_name'], 'build', 'tleap.in'), target_filename=os.path.join('build', 'tleap.in'))) decoder = uq.decoders.SimpleCSV( target_filename=campaign_config['decoder_target_filename'], output_columns=campaign_config['decoder_output_columns'], header=0, delimiter=',') collater = uq.collate.AggregateSamples(average=False) campaign_config['params']['n_replicas']['default'] = n_replicas ################### # Add the BAC app # ################### campaign.add_app(name=campaign_name, params=campaign_config['params'], encoder=multiencoder, collater=collater, decoder=decoder) # campaign.set_app(campaign_name) ###################### # parameters to vary # ###################### vary_physical = { "setTemperature": cp.Uniform(300.0 * 0.85, 300.0 * 1.15), "time_factor_eq": cp.Uniform(600.0 * 0.85, 600.0 * 1.15), "BerendsenPressureTarget": cp.Uniform(1.01325 * 0.85, 1.01325 * 1.15), "time_sim1": cp.Uniform(1000.0 * 0.85, 1000.0 * 1.15), } vary_solver = { "box_size": cp.Uniform(14.0 * 0.85, 14.0 * 1.15), "cutoff": cp.Uniform(12.0 * 0.85, 12.0 * 1.15), "timestep": cp.Uniform(2.0 * 0.85, 2.0 * 1.15), "rigidtolerance": cp.Uniform(0.00001 * 0.85, 0.00001 * 1.15), "PMEGridSpacing": cp.Uniform(1.0 * 0.85, 1.0 * 1.15), "initTemperature_eq1": cp.Uniform(50.0 * 0.85, 50.0 * 1.15), "reassignIncr_eq1": cp.Uniform(1.0 * 0.85, 1.0 * 1.15), "langevinDamping": cp.Uniform(5.0 * 0.85, 5.0 * 1.15), "BerendsenPressureCompressibility": cp.Uniform(0.0000457 * 0.85, 0.0000457 * 1.15), "BerendsenPressureRelaxationTime": cp.Uniform(100.0 * 0.85, 100.0 * 1.15), } vary_discrete = { "switching": cp.DiscreteUniform(0, 1), # ["off", "on"] "rigidBonds": cp.DiscreteUniform(0, 2), # ["none", "water", "all"] "rigidIterations": cp.DiscreteUniform(int(math.floor(100 * 0.85)), int(math.ceil(100 * 1.15))), "nonbondedFreq": cp.DiscreteUniform(0, 2), "fullElectFrequency": cp.DiscreteUniform(1, 3), "stepspercycle": cp.DiscreteUniform(8, 12), "minimize_eq0": cp.DiscreteUniform(int(1000 * 0.85), int(1000 * 1.15)), "reassignFreq_eq1": cp.DiscreteUniform(int(100 * 0.85), int(100 * 1.15)), "langevinHydrogen": cp.DiscreteUniform(0, 1), # ["no", "yes"] "useGroupPressure": cp.DiscreteUniform(0, 1), # ["no", "yes"] "BerendsenPressureFreq": cp.DiscreteUniform(1, 3), } vary = {} vary.update(vary_physical) vary.update(vary_solver) #################### # create Sampler # #################### if campaign_config['sampler_name'] == 'SCSampler': sampler = uq.sampling.SCSampler( vary=vary, polynomial_order=campaign_config['polynomial_order'], quadrature_rule=campaign_config['quadrature_rule'], growth=campaign_config['growth'], sparse=campaign_config['sparse'], midpoint_level1=campaign_config['midpoint_level1'], dimension_adaptive=campaign_config['dimension_adaptive']) elif campaign_config['sampler_name'] == 'PCESampler': sampler = uq.sampling.PCESampler( vary=vary, polynomial_order=polynomial_order, rule=campaign_config['quadrature_rule'], sparse=campaign_config['sparse'], growth=campaign_config['growth']) # TODO: add other sampler here ########################################### # Associate the sampler with the campaign # ########################################### campaign.set_sampler(sampler) ######################################### # draw all of the finite set of samples # ######################################### campaign.draw_samples() run_ids = campaign.populate_runs_dir() ################################### # save campaign and sampler state # ################################### campaign.save_state(os.path.join(campaign_work_dir, "campaign_state.json")) sampler.save_state( os.path.join(campaign_work_dir, "namd_sampler_state.0.pickle")) backup_campaign_files(campaign_work_dir)
def test_worker(tmpdir): # Set up a fresh campaign called "cannon" my_campaign = uq.Campaign(name='cannon', work_dir=tmpdir) # Define parameter space for the cannonsim app params = { "angle": { "type": "float", "min": 0.0, "max": 6.28, "default": 0.79 }, "air_resistance": { "type": "float", "min": 0.0, "max": 1.0, "default": 0.2 }, "height": { "type": "integer", "min": 0, "max": 1000, "default": 1 }, "time_step": { "type": "float", "min": 0.0001, "max": 1.0, "default": 0.01 }, "gravity": { "type": "float", "min": 0.0, "max": 1000.0, "default": 9.8 }, "mass": { "type": "float", "min": 0.0001, "max": 1000.0, "default": 1.0 }, "velocity": { "type": "float", "min": 0.0, "max": 1000.0, "default": 10.0 } } # Create an encoder, decoder and collater for the cannonsim app encoder = uq.encoders.GenericEncoder( template_fname='tests/cannonsim/test_input/cannonsim.template', delimiter='#', target_filename='in.cannon') decoder = uq.decoders.SimpleCSV( target_filename='output.csv', output_columns=['Dist', 'lastvx', 'lastvy'], header=0) collater = uq.collate.AggregateSamples(average=False) # Add the cannonsim app my_campaign.add_app(name="cannonsim", params=params, encoder=encoder, decoder=decoder, collater=collater) # Set the active app to be cannonsim (this is redundant when only one app # has been added) my_campaign.set_app("cannonsim") # Make a random sampler vary = { "angle": cp.Uniform(0.0, 1.0), "height": cp.DiscreteUniform(0, 100), "velocity": cp.Normal(10.0, 1.0), "mass": cp.Uniform(5.0, 1.0) } sampler1 = uq.sampling.RandomSampler(vary=vary) print("Serialized sampler:", sampler1.serialize()) # Set the campaign to use this sampler my_campaign.set_sampler(sampler1) # Draw 5 samples my_campaign.draw_samples(num_samples=5) # Print the list of runs now in the campaign db print("List of runs added:") pprint(my_campaign.list_runs()) print("---") # User defined function def encode_and_execute_cannonsim(run_id, run_data): enc_args = [ my_campaign.db_type, my_campaign.db_location, 'FALSE', "cannon", "cannonsim", run_id ] encoder_path = os.path.realpath( os.path.expanduser("easyvvuq/tools/external_encoder.py")) subprocess.run(['python3', encoder_path] + enc_args) subprocess.run([CANNONSIM_PATH, "in.cannon", "output.csv"], cwd=run_data['run_dir']) my_campaign.campaign_db.set_run_statuses( [run_id], Status.ENCODED) # see note further down # Encode and execute. Note to call function for all runs with status NEW (and not ENCODED) my_campaign.call_for_each_run(encode_and_execute_cannonsim, status=uq.constants.Status.NEW) #### # Important note: In this example the execution is done with subprocess which is blocking. # However, in practice this will be some sort of middleware (e.g. PJM) which is generally # non-blocking. In such a case it is the job of the middleware section to keep track of # which runs have been encoded, and updating the database (all at the end if need be) to # indicate this to EasyVVUQ _before_ trying to run the collation/analysis section. If # EasyVVUQ has not been informed that runs have been encoded, it will most likely just tell # you that 'nothing has been collated' or something to that effect. #### print("Runs list after encoding and execution:") pprint(my_campaign.list_runs()) # Collate all data into one pandas data frame my_campaign.collate() print("data:", my_campaign.get_collation_result()) # Create a BasicStats analysis element and apply it to the campaign stats = uq.analysis.BasicStats(qoi_cols=['Dist', 'lastvx', 'lastvy']) my_campaign.apply_analysis(stats) print("stats:\n", my_campaign.get_last_analysis()) bootstrap = uq.analysis.EnsembleBoot(groupby=['Dist'], qoi_cols=['lastv']) with pytest.raises(RuntimeError, match=r".* lastv"): my_campaign.apply_analysis(bootstrap) # Print the campaign log pprint(my_campaign._log) print("All completed?", my_campaign.all_complete())