def _campaign(work_dir, campaign_name, app_name, params, encoder, decoder, sampler, actions, stats, vary, num_samples=0, replicas=1, db_type='sql', call_fn=None): my_campaign = uq.Campaign(name=campaign_name, work_dir=work_dir) db_location = my_campaign.db_location # Add the cannonsim app actions_ = Actions(CreateRunDirectory('/tmp'), Encode(encoder), actions, Decode(decoder)) my_campaign.add_app(name=app_name, params=params, actions=actions_) my_campaign.set_app(app_name) # Set the campaign to use this sampler my_campaign.set_sampler(sampler) my_campaign.execute(nsamples=num_samples, sequential=True).collate() my_campaign = None # Load state in new campaign object reloaded_campaign = uq.Campaign(name=campaign_name, db_location=db_location) reloaded_campaign.set_app(app_name) reloaded_campaign.execute(nsamples=num_samples).collate() # Draw 3 more samples, execute, and collate onto existing dataframe #reloaded_campaign.draw_samples(num_samples=num_samples, replicas=replicas) # reloaded_campaign.collate() if stats is not None: reloaded_campaign.apply_analysis(stats)
def test_mcmc(tmp_path): campaign = uq.Campaign(name="mcmc", work_dir=tmp_path) params = { "x1": {"type": "float", "default": 0.0}, "x2": {"type": "float", "default": 0.0}, "out_file": {"type": "string", "default": "output.json"}, "chain_id": {"type": "integer", "default": 0} } encoder = uq.encoders.GenericEncoder(template_fname=os.path.abspath( "tutorials/rosenbrock.template"), delimiter="$", target_filename="input.json") decoder = uq.decoders.JSONDecoder("output.json", ["value"]) actions = Actions(ExecutePython(rosenbrock)) campaign.add_app(name="mcmc", params=params, actions=actions) vary_init = { "x1": [-1.0, 0.0, 1.0, 0.5, 0.1], "x2": [1.0, 0.0, 0.5, 1.0, 0.2] } def q(x, b=1): return cp.J(cp.Normal(x['x1'], b), cp.Normal(x['x2'], b)) np.random.seed(1969) sampler = uq.sampling.MCMCSampler(vary_init, q, 'value', 5) campaign.set_sampler(sampler) iterator = campaign.iterate() for _ in range(200): next(iterator).collate() df = campaign.get_collation_result() analysis = uq.analysis.MCMCAnalysis(sampler, 'value') result = analysis.analyse(df) result.plot_hist('x1') result.plot_hist('x2') result.plot_chains('x1') result.plot_chains('x2')
def campaign(): def model(params): return {'y': params['x'] + 1} actions = Actions(ExecutePython(model)) sampler = RandomSampler({'x': cp.Uniform(0, 1)}) campaign = Campaign('test', {'x': {'default': 0}}, actions) campaign.set_sampler(sampler) return campaign
def test_gauss_vector_pce(tmpdir): # vector version of test_gauss # loads json output containing vector data from gauss test params = { "sigma": { "type": "float", "min": 0.0, "max": 100000.0, "default": 0.25 }, "mu": { "type": "float", "min": 0.0, "max": 100000.0, "default": 1 }, "num_steps": { "type": "integer", "min": 0, "max": 100000, "default": 10 }, "out_file": { "type": "string", "default": "output.csv" }, } vary = { "mu": cp.Uniform(1.0, 100.0), } encoder = uq.encoders.GenericEncoder( template_fname='tests/gauss/gauss.template', target_filename='gauss_in.json') #decoder = JSONDecoder(target_filename='output.csv.json', output_columns=['numbers']) decoder = uq.decoders.SimpleCSV(target_filename="output.csv", output_columns=["numbers"]) execute = uq.actions.ExecuteLocal( os.path.abspath("tests/gauss/gauss_json.py") + " gauss_in.json") actions = Actions(CreateRunDirectory('/tmp'), Encode(encoder), execute, Decode(decoder)) sampler = uq.sampling.PCESampler(vary=vary, polynomial_order=4) my_campaign = uq.Campaign(name='gauss_vector', work_dir=tmpdir) my_campaign.add_app(name="gauss_vector", params=params, actions=actions) my_campaign.set_sampler(sampler) my_campaign.execute().collate() data = my_campaign.get_collation_result() analysis = uq.analysis.PCEAnalysis(sampler=sampler, qoi_cols=["numbers"]) my_campaign.apply_analysis(analysis) results = my_campaign.get_last_analysis()
def test_gp(tmp_path): campaign = uq.Campaign(name='test', work_dir=tmp_path) params = { "temp_init": { "type": "float", "min": 0.0, "max": 100.0, "default": 95.0 }, "kappa": { "type": "float", "min": 0.0, "max": 0.1, "default": 0.025 }, "t_env": { "type": "float", "min": 0.0, "max": 40.0, "default": 15.0 }, "out_file": { "type": "string", "default": "output.csv" } } output_filename = params["out_file"]["default"] output_columns = ["te"] # Create an encoder and decoder for PCE test app encoder = uq.encoders.GenericEncoder( template_fname='tests/cooling/cooling.template', delimiter='$', target_filename='cooling_in.json') decoder = uq.decoders.SimpleCSV(target_filename=output_filename, output_columns=output_columns) execute = ExecuteLocal("{} cooling_in.json".format( os.path.abspath("tests/cooling/cooling_model.py"))) actions = Actions(CreateRunDirectory('/tmp'), Encode(encoder), execute, Decode(decoder)) vary = {"kappa": cp.Uniform(0.025, 0.075), "t_env": cp.Uniform(15, 25)} sampler = uq.sampling.quasirandom.LHCSampler(vary=vary) campaign.add_app(name='test_app', params=params, actions=actions) campaign.set_app('test_app') campaign.set_sampler(sampler) campaign.execute(nsamples=100).collate() df = campaign.get_collation_result() analysis = uq.analysis.gp_analyse.GaussianProcessSurrogate( ['kappa', 't_env'], ['te']) result = analysis.analyse(df)
def test_qcgpj(settings): cooling_action = uq.actions.ExecuteLocal( os.path.abspath("tests/cooling/cooling_model.py") + " cooling_in.json") actions = Actions(CreateRunDirectory('/tmp'), Encode(settings['encoder']), cooling_action, Decode(settings['decoder'])) campaign = uq.Campaign(name='beam', params=settings['params'], actions=actions) campaign.set_sampler(settings['cooling_sampler']) with QCGPJPool() as qcgpj: campaign.execute(pool=qcgpj).collate() campaign.apply_analysis(settings['cooling_stats'])
def test_draw(benchmark): params = { "S0": { "type": "float", "default": 997 }, "I0": { "type": "float", "default": 3 }, "beta": { "type": "float", "default": 0.2 }, "gamma": { "type": "float", "default": 0.04, "min": 0.0, "max": 1.0 }, "iterations": { "type": "integer", "default": 100 }, "outfile": { "type": "string", "default": "output.csv" } } encoder = uq.encoders.GenericEncoder( template_fname='tutorials/sir.template', delimiter='$', target_filename='input.json') decoder = uq.decoders.SimpleCSV(target_filename='output.csv', output_columns=['I']) execute = uq.actions.ExecuteLocal('test') actions = Actions(execute) campaign = uq.Campaign(name='sir_benchmark', params=params, actions=actions) pytest.shared = campaign vary = { "beta": cp.Uniform(0.15, 0.25), "gamma": cp.Normal(0.04, 0.001), } campaign.set_sampler(uq.sampling.RandomSampler(vary=vary)) benchmark(campaign.draw_samples, 10000)
def setup_cooling_app(): params = { "temp_init": { "type": "float", "min": 0.0, "max": 100.0, "default": 95.0 }, "kappa": { "type": "float", "min": 0.0, "max": 0.1, "default": 0.025 }, "t_env": { "type": "float", "min": 0.0, "max": 40.0, "default": 15.0 }, "out_file": { "type": "string", "default": "output.csv" } } output_filename = params["out_file"]["default"] output_columns = ["te"] encoder = uq.encoders.GenericEncoder( template_fname='tests/cooling/cooling.template', delimiter='$', target_filename='cooling_in.json') decoder = uq.decoders.SimpleCSV(target_filename=output_filename, output_columns=output_columns) vary = {"kappa": cp.Uniform(0.025, 0.075), "t_env": cp.Uniform(15, 25)} cooling_sampler = uq.sampling.PCESampler(vary=vary, polynomial_order=3) cooling_action = uq.actions.ExecuteLocal( os.path.abspath("tests/cooling/cooling_model.py") + " cooling_in.json") cooling_stats = uq.analysis.PCEAnalysis(sampler=cooling_sampler, qoi_cols=output_columns) actions = Actions(CreateRunDirectory('/tmp'), Encode(encoder), cooling_action, Decode(decoder)) return params, cooling_sampler, actions, cooling_stats
def test_surrogate_workflow(tmpdir): campaign = uq.Campaign(name='sc', work_dir=tmpdir) params = { "Pe": { "type": "float", "min": 1.0, "max": 2000.0, "default": 100.0 }, "f": { "type": "float", "min": 0.0, "max": 10.0, "default": 1.0 }, "out_file": { "type": "string", "default": "output.csv" } } output_filename = params["out_file"]["default"] output_columns = ["u"] encoder = uq.encoders.GenericEncoder(template_fname='tests/sc/sc.template', delimiter='$', target_filename='ade_in.json') decoder = uq.decoders.SimpleCSV(target_filename=output_filename, output_columns=output_columns) execute = ExecuteLocal("{} ade_in.json".format( os.path.abspath('tests/sc/sc_model.py'))) actions = Actions(CreateRunDirectory('/tmp'), Encode(encoder), execute, Decode(decoder)) campaign.add_app(name="sc", params=params, actions=actions) vary = {"Pe": cp.Uniform(100.0, 200.0), "f": cp.Uniform(0.95, 1.05)} sampler = uq.sampling.SCSampler(vary=vary, polynomial_order=[2, 5], quadrature_rule="G") campaign.set_sampler(sampler) campaign.execute().collate() results = campaign.analyse(qoi_cols=output_columns)
def campaign(tmpdir): params = { "angle": { "type": "float", "min": 0.0, "max": 6.28, "default": 0.79 }, "air_resistance": { "type": "float", "min": 0.0, "max": 1.0, "default": 0.2 }, "height": { "type": "float", "min": 0.0, "max": 1000.0, "default": 1.0 }, "time_step": { "type": "float", "min": 0.0001, "max": 1.0, "default": 0.01 }, "gravity": { "type": "float", "min": 0.0, "max": 1000.0, "default": 9.8 }, "mass": { "type": "float", "min": 0.0001, "max": 1000.0, "default": 1.0 }, "velocity": { "type": "float", "min": 0.0, "max": 1000.0, "default": 10.0 } } encoder = uq.encoders.GenericEncoder( template_fname=f'{TEST_PATH}/cannonsim/test_input/cannonsim.template', target_filename='in.cannon') decoder = uq.decoders.SimpleCSV( target_filename='output.csv', output_columns=['Dist', 'lastvx', 'lastvy']) execute = ExecuteLocal( f"{TEST_PATH}/cannonsim/bin/cannonsim in.cannon output.csv") campaign = uq.Campaign(name='test', work_dir=tmpdir) actions = Actions(CreateRunDirectory('/tmp'), Encode(encoder), execute, Decode(decoder)) campaign.add_app(name='test', params=params, actions=actions) campaign.set_app('test') stats = uq.analysis.BasicStats(qoi_cols=['Dist', 'lastvx', 'lastvy']) # Make a random sampler vary = { "angle": cp.Uniform(0.0, 1.0), "height": cp.Uniform(2.0, 10.0), "velocity": cp.Normal(10.0, 1.0), "mass": cp.Uniform(1.0, 5.0) } sampler = uq.sampling.RandomSampler(vary=vary) campaign.set_sampler(sampler) campaign.execute(nsamples=100, sequential=True).collate() return campaign
def test_surrogate_workflow(tmpdir, sampler): campaign = uq.Campaign(name='sc', work_dir=tmpdir) params = { "Pe": { "type": "float", "min": 1.0, "max": 2000.0, "default": 100.0 }, "f": { "type": "float", "min": 0.0, "max": 10.0, "default": 1.0 }, "out_file": { "type": "string", "default": "output.csv" }, "chain_id": { "type": "integer", "default": 0 } } output_filename = params["out_file"]["default"] output_columns = ["u"] encoder = uq.encoders.GenericEncoder(template_fname='tests/sc/sc.template', delimiter='$', target_filename='ade_in.json') decoder = uq.decoders.SimpleCSV(target_filename=output_filename, output_columns=output_columns) execute = ExecuteLocal("{} ade_in.json".format( os.path.abspath('tests/sc/sc_model.py'))) actions = Actions(CreateRunDirectory('/tmp'), Encode(encoder), execute, Decode(decoder)) campaign.add_app(name="sc", params=params, actions=actions) campaign.set_sampler(sampler) campaign.execute().collate() results = campaign.analyse(qoi_cols=output_columns) surrogate = results.surrogate() df = campaign.get_collation_result() for index, row in df.iterrows(): surrogate_y = surrogate({'Pe': row['Pe'][0], 'f': row['f'][0]})['u'] model_y = row['u'].values assert (pytest.approx(surrogate_y == model_y)) # Attempt callibration with MCMC campaign.add_app(name='surrogate', params=params, actions=Actions(ExecutePython(surrogate))) db_location = campaign.db_location campaign = None reloaded_campaign = uq.Campaign('sc', db_location=db_location) assert (reloaded_campaign._active_app_name == 'surrogate') u = np.array([ 0., 0.00333333, 0.00666667, 0.01, 0.01333333, 0.01666667, 0.02, 0.02333333, 0.02666667, 0.03, 0.03333333, 0.03666667, 0.04, 0.04333333, 0.04666667, 0.05, 0.05333333, 0.05666667, 0.06, 0.06333333, 0.06666667, 0.07, 0.07333333, 0.07666667, 0.08, 0.08333333, 0.08666667, 0.09, 0.09333333, 0.09666667, 0.1, 0.10333333, 0.10666667, 0.11, 0.11333333, 0.11666667, 0.12, 0.12333333, 0.12666667, 0.13, 0.13333333, 0.13666667, 0.14, 0.14333333, 0.14666667, 0.15, 0.15333333, 0.15666667, 0.16, 0.16333333, 0.16666667, 0.17, 0.17333333, 0.17666667, 0.18, 0.18333333, 0.18666667, 0.19, 0.19333333, 0.19666667, 0.2, 0.20333333, 0.20666667, 0.21, 0.21333333, 0.21666667, 0.22, 0.22333333, 0.22666667, 0.23, 0.23333333, 0.23666667, 0.24, 0.24333333, 0.24666667, 0.25, 0.25333333, 0.25666667, 0.26, 0.26333333, 0.26666667, 0.27, 0.27333333, 0.27666667, 0.28, 0.28333333, 0.28666667, 0.29, 0.29333333, 0.29666667, 0.3, 0.30333333, 0.30666667, 0.31, 0.31333333, 0.31666667, 0.32, 0.32333333, 0.32666667, 0.33, 0.33333333, 0.33666667, 0.34, 0.34333333, 0.34666667, 0.35, 0.35333333, 0.35666667, 0.36, 0.36333333, 0.36666667, 0.37, 0.37333333, 0.37666667, 0.38, 0.38333333, 0.38666667, 0.39, 0.39333333, 0.39666667, 0.4, 0.40333333, 0.40666667, 0.41, 0.41333333, 0.41666667, 0.42, 0.42333333, 0.42666667, 0.43, 0.43333333, 0.43666667, 0.44, 0.44333333, 0.44666667, 0.45, 0.45333333, 0.45666667, 0.46, 0.46333333, 0.46666667, 0.47, 0.47333333, 0.47666667, 0.48, 0.48333333, 0.48666667, 0.49, 0.49333333, 0.49666667, 0.5, 0.50333333, 0.50666667, 0.51, 0.51333333, 0.51666667, 0.52, 0.52333333, 0.52666667, 0.53, 0.53333333, 0.53666667, 0.54, 0.54333333, 0.54666667, 0.55, 0.55333333, 0.55666667, 0.56, 0.56333333, 0.56666667, 0.57, 0.57333333, 0.57666667, 0.58, 0.58333333, 0.58666667, 0.59, 0.59333333, 0.59666667, 0.6, 0.60333333, 0.60666667, 0.61, 0.61333333, 0.61666667, 0.62, 0.62333333, 0.62666667, 0.63, 0.63333333, 0.63666667, 0.64, 0.64333333, 0.64666667, 0.65, 0.65333333, 0.65666667, 0.66, 0.66333333, 0.66666667, 0.67, 0.67333333, 0.67666667, 0.68, 0.68333333, 0.68666667, 0.69, 0.69333333, 0.69666667, 0.7, 0.70333333, 0.70666667, 0.71, 0.71333333, 0.71666667, 0.72, 0.72333333, 0.72666667, 0.73, 0.73333333, 0.73666667, 0.74, 0.74333333, 0.74666667, 0.75, 0.75333333, 0.75666667, 0.76, 0.76333333, 0.76666667, 0.77, 0.77333333, 0.77666667, 0.78, 0.78333333, 0.78666667, 0.79, 0.79333333, 0.79666667, 0.8, 0.80333333, 0.80666667, 0.81, 0.81333333, 0.81666667, 0.82, 0.82333333, 0.82666667, 0.83, 0.83333333, 0.83666667, 0.84, 0.84333333, 0.84666667, 0.85, 0.85333333, 0.85666667, 0.86, 0.86333333, 0.86666667, 0.87, 0.87333333, 0.87666666, 0.87999999, 0.88333332, 0.88666664, 0.88999995, 0.89333325, 0.89666653, 0.89999978, 0.90333296, 0.90666605, 0.90999898, 0.91333163, 0.91666382, 0.91999526, 0.92332544, 0.9266535, 0.92997806, 0.93329677, 0.93660573, 0.93989844, 0.94316407, 0.94638456, 0.94952982, 0.95254969, 0.9553606, 0.95782322, 0.95970536, 0.96062005, 0.9599223, 0.95653717, 0.94867307, 0.933344, 0.90557333, 0.85706667, 0.774, 0.63333333, 0.39666667, 0. ]) def proposal(x): return cp.J(cp.Normal(x['Pe'], 1.0), cp.Normal(x['f'], 0.001)) def loglikelihood(x): return -((u - x)**2).sum() init = {'Pe': [110.0], 'f': [2.0]} reloaded_campaign.set_sampler( uq.sampling.MCMCSampler(init, proposal, 'u', 1, loglikelihood)) iterator = reloaded_campaign.iterate(mark_invalid=True) for _ in range(100): next(iterator).collate() df = reloaded_campaign.get_collation_result() assert (len(df) > 0) assert (len(df) <= 100) results = reloaded_campaign.analyse()
"default": 200000 }, "outfile": { "type": "string", "default": "output.json" } } encoder = uq.encoders.GenericEncoder(template_fname='beam.template', delimiter='$', target_filename='input.json') decoder = uq.decoders.JSONDecoder(target_filename='output.json', output_columns=['g1']) execute = ExecuteLocal('{}/beam input.json'.format(os.getcwd())) actions = Actions(CreateRunDirectory('/tmp'), Encode(encoder), execute, Decode(decoder)) campaign = uq.Campaign(name='beam', params=params, actions=actions) vary = { "F": cp.Normal(1, 0.1), "L": cp.Normal(1.5, 0.01), "a": cp.Uniform(0.7, 1.2), "D": cp.Triangle(0.75, 0.8, 0.85) } campaign.set_sampler(uq.sampling.PCESampler(vary=vary, polynomial_order=1)) with QCGPJPool( template_params={'venv': '/home/bartek/.virtualenv/qcgpj7'}) as qcgpj: #with QCGPJPool() as qcgpj:
def adaptive_campaign(): d = 3 number_of_adaptations = 3 campaign = uq.Campaign(name='sc', work_dir='/tmp') params = {} for i in range(d): params["x%d" % (i + 1)] = {"type": "float", "min": 0.0, "max": 1.0, "default": 0.5} params["out_file"] = {"type": "string", "default": "output.csv"} output_filename = params["out_file"]["default"] output_columns = ["f"] # Create an encoder, decoder and collation element encoder = uq.encoders.GenericEncoder( template_fname='tests/sc/poly_model_anisotropic.template', # template_fname='./sc/poly_model_anisotropic.template', delimiter='$', target_filename='poly_in.json') decoder = uq.decoders.SimpleCSV(target_filename=output_filename, output_columns=output_columns) execute = ExecuteLocal(os.path.abspath("tests/sc/poly_model_anisotropic.py") + " poly_in.json") actions = Actions(CreateRunDirectory('/tmp'), Encode(encoder), execute, Decode(decoder)) # Add the SC app (automatically set as current app) campaign.add_app(name="sc", params=params, actions=actions) # Create the sampler vary = {} for i in range(d): vary["x%d" % (i + 1)] = cp.Uniform(0, 1) sampler = uq.sampling.SCSampler(vary=vary, polynomial_order=1, quadrature_rule="C", sparse=True, growth=True, midpoint_level1=True, dimension_adaptive=True) campaign.set_sampler(sampler) campaign.execute().collate() data_frame = campaign.get_collation_result() analysis = uq.analysis.SCAnalysis(sampler=sampler, qoi_cols=output_columns) campaign.apply_analysis(analysis) for i in range(number_of_adaptations): sampler.look_ahead(analysis.l_norm) campaign.execute().collate() data_frame = campaign.get_collation_result() analysis.adapt_dimension('f', data_frame) campaign.apply_analysis(analysis) logging.debug(analysis.l_norm) logging.debug(sampler.admissible_idx) results = campaign.get_last_analysis() return sampler, analysis, results
def sparse_campaign(): # Set up a fresh campaign called "sc" campaign = uq.Campaign(name='sc', work_dir='/tmp') # Define parameter space params = { "x1": { "type": "float", "min": 0.0, "max": 1.0, "default": 0.5 }, "x2": { "type": "float", "min": 0.0, "max": 1.0, "default": 0.5 }, "out_file": { "type": "string", "default": "output.csv" } } output_filename = params["out_file"]["default"] output_columns = ["f"] # Create an encoder, decoder and collation element encoder = uq.encoders.GenericEncoder(template_fname=HOME + '/sc/sobol.template', delimiter='$', target_filename='poly_in.json') decoder = uq.decoders.SimpleCSV(target_filename=output_filename, output_columns=output_columns) execute = ExecuteLocal( os.path.abspath("tests/sc/sobol_model.py") + " poly_in.json") actions = Actions(CreateRunDirectory('/tmp'), Encode(encoder), execute, Decode(decoder)) # Add the SC app (automatically set as current app) campaign.add_app(name="sc", params=params, actions=actions) # Create the sampler vary = {"x1": cp.Uniform(0.0, 1.0), "x2": cp.Uniform(0.0, 1.0)} # To use 'next_level_sparse_grid' below, we must select a nested # sparse grid here sampler = uq.sampling.SCSampler(vary=vary, polynomial_order=5, quadrature_rule="C", sparse=True, growth=True) # Associate the sampler with the campaign campaign.set_sampler(sampler) logging.debug('Number of samples:', sampler.n_samples) campaign.execute().collate() # Post-processing analysis analysis = uq.analysis.SCAnalysis(sampler=sampler, qoi_cols=output_columns) campaign.apply_analysis(analysis) results = campaign.get_last_analysis() n_adaptations = 1 for i in range(n_adaptations): # update the sparse grid to the next level sampler.next_level_sparse_grid() campaign.execute().collate() campaign.apply_analysis(analysis) results = campaign.get_last_analysis() return sampler, analysis, results
params["a%d" % (i + 1)] = { "type": "float", "min": 0.0, "max": 100.0, "default": a[i] } # create encoder, decoder, and execute locally encoder = uq.encoders.GenericEncoder(template_fname=HOME + '/model/func.template', delimiter='$', target_filename='in.json') decoder = uq.decoders.SimpleCSV(target_filename=output_filename, output_columns=output_columns) execute = ExecuteLocal('{}/model/func.py in.json'.format(os.getcwd())) actions = Actions(CreateRunDirectory(root=WORK_DIR), Encode(encoder), execute, Decode(decoder)) # uncertain variables vary = {} for i in range(D): vary["x%d" % (i + 1)] = cp.Uniform(0, 1) # Latin Hypercube sampler my_sampler = uq.sampling.quasirandom.LHCSampler(vary=vary, max_num=1000) # EasyVVUQ Campaign campaign = uq.Campaign(name='func', params=params, actions=actions, work_dir=WORK_DIR, db_location=DB_LOCATION)
def test_multiencoder(tmpdir): # Set up a fresh campaign called "cannon" my_campaign = uq.Campaign(name='cannon', work_dir=tmpdir) # Define parameter space for the cannonsim app params = { "angle": { "type": "float", "min": 0.0, "max": 6.28, "default": 0.79 }, "air_resistance": { "type": "float", "min": 0.0, "max": 1.0, "default": 0.2 }, "height": { "type": "float", "min": 0.0, "max": 1000.0, "default": 1.0 }, "time_step": { "type": "float", "min": 0.0001, "max": 1.0, "default": 0.01 }, "gravity": { "type": "float", "min": 0.0, "max": 1000.0, "default": 9.8 }, "mass": { "type": "float", "min": 0.0001, "max": 1000.0, "default": 1.0 }, "velocity": { "type": "float", "min": 0.0, "max": 1000.0, "default": 10.0 } } # Specify a complicated directory hierarchy to test the DirectoryBuilder encoder directory_tree = { 'dir1': { 'dir2': { 'dir3': None, 'dir4': None } }, 'dir5': { 'dir6': None } } # Create a multiencoder combining a directory build, and two template encodes multiencoder = uq.encoders.MultiEncoder( uq.encoders.DirectoryBuilder(tree=directory_tree), uq.encoders.GenericEncoder( template_fname='tests/cannonsim/test_input/cannonsim.template', delimiter='#', target_filename='dir1/dir2/dir3/in.cannon'), uq.encoders.GenericEncoder( template_fname='tests/cannonsim/test_input/cannonsim.template', delimiter='#', target_filename='dir5/dir6/in.cannon.2')) # Create decoder and collater for the cannonsim app decoder = uq.decoders.SimpleCSV( target_filename='output.csv', output_columns=['Dist', 'lastvx', 'lastvy']) actions = Actions( CreateRunDirectory('/tmp'), Encode(multiencoder), uq.actions.ExecuteLocal( os.path.abspath( "tests/cannonsim/bin/cannonsim dir5/dir6/in.cannon.2") + " output.csv"), Decode(decoder)) # Add the cannonsim app my_campaign.add_app(name="cannonsim", params=params, actions=actions) # Set the active app to be cannonsim (this is redundant when only one app # has been added) my_campaign.set_app("cannonsim") # Set up sampler sweep1 = { "angle": [0.1, 0.2, 0.3], "height": [2.0, 10.0], "velocity": [10.0, 10.1, 10.2] } sampler = uq.sampling.BasicSweep(sweep=sweep1) # Set the campaign to use this sampler my_campaign.set_sampler(sampler) reloaded_campaign = uq.Campaign('cannon', db_location=my_campaign.db_location) my_campaign.execute(sequential=True).collate() # Create a BasicStats analysis element and apply it to the campaign stats = uq.analysis.BasicStats(qoi_cols=['Dist', 'lastvx', 'lastvy']) my_campaign.apply_analysis(stats)
def test_anisotropic_order(tmpdir): # Set up a fresh campaign called "sc" my_campaign = uq.Campaign(name='sc', work_dir=tmpdir, db_location='sqlite:///') # Define parameter space params = { "Pe": { "type": "float", "min": 1.0, "max": 2000.0, "default": 100.0 }, "f": { "type": "float", "min": 0.0, "max": 10.0, "default": 1.0 }, "out_file": { "type": "string", "default": "output.csv" } } output_filename = params["out_file"]["default"] output_columns = ["u"] # Create an encoder, decoder and collation element encoder = uq.encoders.GenericEncoder(template_fname='tests/sc/sc.template', delimiter='$', target_filename='ade_in.json') decoder = uq.decoders.SimpleCSV(target_filename=output_filename, output_columns=output_columns) execute = ExecuteLocal("{} ade_in.json".format( os.path.abspath('tests/sc/sc_model.py'))) actions = Actions(CreateRunDirectory('/tmp'), Encode(encoder), execute, Decode(decoder)) # Add the SC app (automatically set as current app) my_campaign.add_app(name="sc", params=params, actions=actions) # Create the sampler vary = {"Pe": cp.Uniform(100.0, 200.0), "f": cp.Uniform(0.95, 1.05)} # different orders for the 2 parameters sampler = uq.sampling.SCSampler(vary=vary, polynomial_order=[2, 5], quadrature_rule="G") # Associate the sampler with the campaign my_campaign.set_sampler(sampler) my_campaign.execute().collate() # Post-processing analysis analysis = uq.analysis.SCAnalysis(sampler=sampler, qoi_cols=output_columns) my_campaign.apply_analysis(analysis) results = my_campaign.get_last_analysis() return results, sampler, analysis
def setup_cannonsim_app(): params = { "angle": { "type": "float", "min": 0.0, "max": 6.28, "default": 0.79 }, "air_resistance": { "type": "float", "min": 0.0, "max": 1.0, "default": 0.2 }, "height": { "type": "float", "min": 0.0, "max": 1000.0, "default": 1.0 }, "time_step": { "type": "float", "min": 0.0001, "max": 1.0, "default": 0.01 }, "gravity": { "type": "float", "min": 0.0, "max": 1000.0, "default": 9.8 }, "mass": { "type": "float", "min": 0.0001, "max": 1000.0, "default": 1.0 }, "velocity": { "type": "float", "min": 0.0, "max": 1000.0, "default": 10.0 } } encoder = uq.encoders.GenericEncoder( template_fname='tests/cannonsim/test_input/cannonsim.template', delimiter='#', target_filename='in.cannon') decoder = uq.decoders.SimpleCSV( target_filename='output.csv', output_columns=['Dist', 'lastvx', 'lastvy']) vary = { "gravity": cp.Uniform(1.0, 9.8), "mass": cp.Uniform(2.0, 10.0), } cannon_sampler = uq.sampling.RandomSampler(vary=vary, max_num=5) cannon_action = uq.actions.ExecuteLocal( os.path.abspath("tests/cannonsim/bin/cannonsim") + " in.cannon output.csv") cannon_stats = uq.analysis.BasicStats( qoi_cols=['Dist', 'lastvx', 'lastvy']) actions = Actions(CreateRunDirectory('/tmp'), Encode(encoder), cannon_action, Decode(decoder)) return params, cannon_sampler, actions, cannon_stats
def test_multisampler(tmpdir): # Set up a fresh campaign called "cannon" my_campaign = uq.Campaign(name='cannon', work_dir=tmpdir) # Define parameter space for the cannonsim app params = { "angle": { "type": "float", "min": 0.0, "max": 6.28, "default": 0.79 }, "air_resistance": { "type": "float", "min": 0.0, "max": 1.0, "default": 0.2 }, "height": { "type": "float", "min": 0.0, "max": 1000.0, "default": 1.0 }, "time_step": { "type": "float", "min": 0.0001, "max": 1.0, "default": 0.01 }, "gravity": { "type": "float", "min": 0.0, "max": 1000.0, "default": 9.8 }, "mass": { "type": "float", "min": 0.0001, "max": 1000.0, "default": 1.0 }, "velocity": { "type": "float", "min": 0.0, "max": 1000.0, "default": 10.0 } } # Create an encoder, decoder and collater for the cannonsim app encoder = uq.encoders.GenericEncoder( template_fname='tests/cannonsim/test_input/cannonsim.template', delimiter='#', target_filename='in.cannon') decoder = uq.decoders.SimpleCSV( target_filename='output.csv', output_columns=['Dist', 'lastvx', 'lastvy']) execute = uq.actions.ExecuteLocal( os.path.abspath("tests/cannonsim/bin/cannonsim") + " in.cannon output.csv") actions = Actions(CreateRunDirectory('/tmp'), Encode(encoder), execute, Decode(decoder)) # Add the cannonsim app my_campaign.add_app(name="cannonsim", params=params, actions=actions) # Set the active app to be cannonsim (this is redundant when only one app # has been added) my_campaign.set_app("cannonsim") # Set up samplers sweep1 = { "angle": [0.1, 0.2, 0.3], "height": [2.0, 10.0], "velocity": [10.0, 10.1, 10.2] } sampler1 = uq.sampling.BasicSweep(sweep=sweep1) sweep2 = {"air_resistance": [0.2, 0.3, 0.4]} sampler2 = uq.sampling.BasicSweep(sweep=sweep2) vary = { "gravity": cp.Uniform(1.0, 9.8), "mass": cp.Uniform(2.0, 10.0), } sampler3 = uq.sampling.RandomSampler(vary=vary, max_num=5) # Make a multisampler multisampler = uq.sampling.MultiSampler(sampler1, sampler2, sampler3) # Set the campaign to use this sampler my_campaign.set_sampler(multisampler) # Test reloading reloaded_campaign = uq.Campaign('cannon', db_location=my_campaign.db_location) my_campaign.execute(sequential=True).collate() # Create a BasicStats analysis element and apply it to the campaign stats = uq.analysis.BasicStats(qoi_cols=['Dist', 'lastvx', 'lastvy']) my_campaign.apply_analysis(stats)