def _check_recorder_file(self, pb, cstr, filename): pb.driver = OneraSegoDriver() pb.driver.options["optimizer"] = "SEGOMOE" pb.driver.opt_settings["maxiter"] = 10 # default model n_var = 2 mod_obj = { "type": "Krig", "corr": "squared_exponential", "regr": "constant", "theta0": [1.0] * n_var, "thetaL": [0.1] * n_var, "thetaU": [10.0] * n_var, "normalize": True, } model_type = {"obj": mod_obj} if cstr: model_type["con"] = mod_obj pb.driver.opt_settings["model_type"] = model_type recorder = SqliteRecorder(self.case_recorder_filename) pb.model.add_recorder(recorder) pb.setup() self.pb.run_driver() self.assertTrue(os.path.exists(self.case_recorder_filename)) reader = CaseReader(self.case_recorder_filename) for case_id in reader.list_cases(): case = reader.get_case(case_id) print(case.outputs["obj"])
def compute(self, inputs, outputs): folder_output = self.options['folder_output'] optimization_log = self.options['folder_output'] + self.options['optimization_log'] if os.path.exists(optimization_log): cr = CaseReader(optimization_log) cases = cr.list_cases() rec_data = {} iterations = [] for i, casei in enumerate(cases): iterations.append(i) it_data = cr.get_case(casei) # parameters = it_data.get_responses() for parameters in [it_data.get_responses(), it_data.get_design_vars()]: for j, param in enumerate(parameters.keys()): if i == 0: rec_data[param] = [] rec_data[param].append(parameters[param]) for param in rec_data.keys(): fig, ax = plt.subplots(1,1,figsize=(5.3, 4)) ax.plot(iterations, rec_data[param]) ax.set(xlabel='Number of Iterations' , ylabel=param) fig_name = 'Convergence_trend_' + param + '.pdf' fig.savefig(folder_output + fig_name) plt.close(fig)
def assert_morris_case_generation(self, nt, driver): pb, case_recorder_filename = TestSalibDoeDriver.run_driver( "morris" + str(nt), driver) self.assertTrue(os.path.exists(case_recorder_filename)) reader = CaseReader(case_recorder_filename) cases = reader.list_cases("driver") os.remove(case_recorder_filename) n = sum(data["size"] for data in pb.model.get_design_vars().values()) self.assertEqual(len(cases), (n + 1) * nt)
def test_recording(self): # coloring involves an underlying call to run_model (and final_setup), # this verifies that it is handled properly by the recording setup logic recorder = SqliteRecorder('cases.sql') p = run_opt(pyOptSparseDriver, 'auto', assemble_type='csc', optimizer='SNOPT', dynamic_simul_derivs=True, print_results=False, recorder=recorder) cr = CaseReader('cases.sql') self.assertEqual(cr.list_cases(), ['rank0:SNOPT|%d' % i for i in range(p.driver.iter_count)])
def test_resids(self): """ Tests that the reader returns resids correctly. """ cr = CaseReader(self.filename) last_case = cr.get_case(-1) last_case_id = cr.list_cases()[-1] with SqliteDict(self.filename, 'iterations', flag='r') as db: for key in db[last_case_id]['Residuals'].keys(): val = db[last_case_id]['Residuals'][key][()] np.testing.assert_almost_equal(last_case.resids[key], val, err_msg='Case reader gives ' 'incorrect Unknown value' ' for {0}'.format(key))
def test_unknowns(self): """ Tests that the reader returns unknowns correctly. """ cr = CaseReader(self.filename) last_case = cr.get_case(-1) last_case_id = cr.list_cases()[-1] with h5py.File(self.filename, 'r') as f: for key in f[last_case_id]['Unknowns'].keys(): val = f[last_case_id]['Unknowns'][key][()] np.testing.assert_almost_equal(last_case[key], val, err_msg='Case reader gives ' 'incorrect Unknown value' ' for {0}'.format(key))
def test_params(self): """ Tests that the reader returns params correctly. """ cr = CaseReader(self.filename) last_case = cr.get_case(-1) last_case_id = cr.list_cases()[-1] n = cr.num_cases with SqliteDict(self.filename, 'iterations', flag='r') as db: for key in db[last_case_id]['Parameters'].keys(): val = db[last_case_id]['Parameters'][key] np.testing.assert_almost_equal(last_case.parameters[key], val, err_msg='Case reader gives ' 'incorrect Parameter value' ' for {0}'.format(key))
def assert_case_generation(self, n, driver): pb = SellarProblem() pb.driver = driver case_recorder_filename = "test_smt_doe_driver_{}.sqlite".format(n) recorder = SqliteRecorder(case_recorder_filename) pb.driver.add_recorder(recorder) pb.setup() pb.run_driver() pb.cleanup() reader = CaseReader(case_recorder_filename) cases = reader.list_cases("driver") os.remove(case_recorder_filename) self.assertEqual(len(cases), n)
def test_resids(self): """ Tests that the reader returns resids correctly. """ cr = CaseReader(self.filename) last_case = cr.get_case(-1) last_case_id = cr.list_cases()[-1] n = cr.num_cases with h5py.File(self.filename, 'r') as f: for key in f[last_case_id]['Residuals'].keys(): val = f[last_case_id]['Residuals'][key][()] np.testing.assert_almost_equal(last_case.resids[key], val, err_msg='Case reader gives' ' incorrect Unknown' ' value for' ' {0}'.format(key))
def load_from_sqlite(filename): reader = CaseReader(filename) cases = reader.list_cases("driver") if len(cases) == 0: raise Exception("No case found in {}".format(filename)) # find driver name driver_first_coord = cases[0] m = re.match(r"\w+:(\w+)|.*", driver_first_coord) name = os.path.splitext(os.path.basename(filename))[0] if m: name = m.group(1) # format cases and statuses cases, statuses = format_upload_cases(reader) return name, cases, statuses
def upload(self, sqlite_filename, analysis_id=None, operation_id=None, cleanup=False, dry_run=False): from socket import gethostname mda_id = self.get_analysis_id() if not analysis_id else analysis_id reader = CaseReader(sqlite_filename) cases = reader.list_cases('driver') if len(cases)==0: raise Exception("No case found in {}".format(sqlite_filename)) driver_first_coord = cases[0] name = os.path.splitext(sqlite_filename)[0] m = re.match(r"\w+:(\w+)|.*", driver_first_coord) if m: name = m.group(1) cases, statuses = self._format_upload_cases(reader) if dry_run: WhatsOpt._print_cases(cases, statuses) exit() resp = None if operation_id: url = self._endpoint(('/api/v1/operations/%s') % operation_id) operation_params = {'cases': cases} resp = self.session.patch(url, headers=self.headers, json={'operation': operation_params}) else: url = self._endpoint(('/api/v1/analyses/%s/operations') % mda_id) if name=='LHS': driver='smt_doe_lhs' elif name=='Morris': driver='salib_doe_morris' elif name=='SLSQP': driver='scipy_optimizer_slsqp' else: # suppose name well-formed <lib>-<doe|optimizer|screening>-<algoname> # otherwise it will default to doe driver=name.lower() operation_params = {'name': name, 'driver': driver, 'host': gethostname(), 'cases': cases, 'success': statuses} resp = self.session.post(url, headers=self.headers, json={'operation': operation_params}) resp.raise_for_status() print("Results data from {} uploaded with driver {}".format(sqlite_filename, driver)) if cleanup: os.remove(sqlite_filename) print("%s removed" % sqlite_filename)
def load_sqlite_file(filename): log("Load {}...".format(filename)) reader = CaseReader(filename) cases = reader.list_cases("driver", out_stream=None) if len(cases) == 0: raise Exception("No case found in {}".format(filename)) # find driver name driver_first_coord = cases[0] m = re.match(r"\w+:(\w+)|.*", driver_first_coord) name = os.path.splitext(os.path.basename(filename))[0] if m: name = m.group(1) # format cases and statuses # cases : [{"varname": varname, "coord_index": idx, "values": [...]}*] cases, statuses = _format_upload_cases(reader) return name, cases, statuses
def test_sellar(self): self.pb = pb = Problem(SellarMDA()) pb.model.add_design_var("x", lower=0, upper=10) pb.model.add_design_var("z", lower=0, upper=10) pb.model.add_objective("obj") pb.model.add_constraint("con1", upper=0) pb.model.add_constraint("con2", upper=0) pb.driver = OneraSegoDriver(optimizer="SEGOMOE") pb.driver.opt_settings["maxiter"] = 10 self.case_recorder_filename = "test_segomoe_driver_sellar.sqlite" recorder = SqliteRecorder(self.case_recorder_filename) pb.model.add_recorder(recorder) pb.setup() self.pb.run_driver() self.assertTrue(os.path.exists(self.case_recorder_filename)) reader = CaseReader(self.case_recorder_filename) for case_id in reader.list_cases(): case = reader.get_case(case_id) print(case.outputs["obj"])
""" Plots objective and constraint histories from the recorded data in 'data.sql'. """ from __future__ import print_function from six.moves import range import numpy as np from matplotlib import pylab from openmdao.api import CaseReader # load cases from recording database cr = CaseReader('ru_mdp.sql') #cases = cr.get_cases('driver') cases = cr.list_cases('driver') case = cr.get_case(cases[0]) num_cases = len(cases) if num_cases == 0: print('No data yet...') quit() else: print('# cases:', num_cases) # determine # of constraints constraints = list(case.get_constraints()) n_con = len(constraints) # collect data into arrays for plotting
pb.model.add_design_var('p', lower=-sys.float_info.max, upper=sys.float_info.max) pb.model.add_design_var('PayCap', lower=-sys.float_info.max, upper=sys.float_info.max) pb.model.add_objective('C_TOTAL') pb.setup() pb.run_driver() if options.batch or options.parallel: exit(0) reader = CaseReader(case_recorder_filename) cases = reader.list_cases('driver') n = len(cases) data = {'inputs': {}, 'outputs': {}} data['inputs']['LpA'] = np.zeros((n, ) + (1, )) data['inputs']['Ns'] = np.zeros((n, ) + (1, )) data['inputs']['p'] = np.zeros((n, ) + (1, )) data['inputs']['PayCap'] = np.zeros((n, ) + (1, )) data['outputs']['C_TOTAL'] = np.zeros((n, ) + (1, )) for i in range(len(cases)): case = reader.get_case(cases[i]) data['inputs']['LpA'][i, :] = case.outputs['LpA'] data['inputs']['Ns'][i, :] = case.outputs['Ns'] data['inputs']['p'][i, :] = case.outputs['p']
pb.model.add_design_var('x', lower=0, upper=10) pb.model.add_design_var('z', lower=0, upper=10) pb.model.add_objective('f') pb.model.add_constraint('g1', upper=0.) pb.model.add_constraint('g2', upper=0.) pb.setup() pb.run_driver() if options.batch or options.parallel: exit(0) reader = CaseReader(case_recorder_filename) cases = reader.list_cases('driver', recurse=False) n = len(cases) data = {'inputs': {}, 'outputs': {}} data['inputs']['x'] = np.zeros((n, ) + (1, )) data['inputs']['z'] = np.zeros((n, ) + (2, )) data['outputs']['f'] = np.zeros((n, ) + (1, )) data['outputs']['g1'] = np.zeros((n, ) + (1, )) data['outputs']['g2'] = np.zeros((n, ) + (1, )) for i in range(len(cases)): case = reader.get_case(cases[i]) data['inputs']['x'][i, :] = case.outputs['x'] data['inputs']['z'][i, :] = case.outputs['z'] data['outputs']['f'][i, :] = case.outputs['f'] data['outputs']['g1'][i, :] = case.outputs['g1'] data['outputs']['g2'][i, :] = case.outputs['g2']
def test_brachistochrone_recording(self): import matplotlib matplotlib.use('Agg') from openmdao.api import Problem, Group, ScipyOptimizeDriver, DirectSolver, \ SqliteRecorder, CaseReader from openmdao.utils.assert_utils import assert_rel_error from dymos import Phase, GaussLobatto from dymos.examples.brachistochrone.brachistochrone_ode import BrachistochroneODE p = Problem(model=Group()) p.driver = ScipyOptimizeDriver() phase = Phase(ode_class=BrachistochroneODE, transcription=GaussLobatto(num_segments=10)) p.model.add_subsystem('phase0', phase) phase.set_time_options(initial_bounds=(0, 0), duration_bounds=(.5, 10)) phase.set_state_options('x', fix_initial=True, fix_final=True) phase.set_state_options('y', fix_initial=True, fix_final=True) phase.set_state_options('v', fix_initial=True) phase.add_control('theta', units='deg', rate_continuity=False, lower=0.01, upper=179.9) phase.add_design_parameter('g', units='m/s**2', opt=False, val=9.80665) # Minimize time at the end of the phase phase.add_objective('time', loc='final', scaler=10) p.model.linear_solver = DirectSolver() # Recording rec = SqliteRecorder('brachistochrone_solution.db') p.driver.recording_options['record_desvars'] = True p.driver.recording_options['record_responses'] = True p.driver.recording_options['record_objectives'] = True p.driver.recording_options['record_constraints'] = True p.model.recording_options['record_metadata'] = True p.driver.add_recorder(rec) p.model.add_recorder(rec) phase.add_recorder(rec) p.setup() p['phase0.t_initial'] = 0.0 p['phase0.t_duration'] = 2.0 p['phase0.states:x'] = phase.interpolate(ys=[0, 10], nodes='state_input') p['phase0.states:y'] = phase.interpolate(ys=[10, 5], nodes='state_input') p['phase0.states:v'] = phase.interpolate(ys=[0, 9.9], nodes='state_input') p['phase0.controls:theta'] = phase.interpolate(ys=[5, 100.5], nodes='control_input') # Solve for the optimal trajectory p.run_driver() # Test the results assert_rel_error(self, p.get_val('phase0.timeseries.time')[-1], 1.8016, tolerance=1.0E-3) cr = CaseReader('brachistochrone_solution.db') system_cases = cr.list_cases('root') case = cr.get_case(system_cases[-1]) outputs = dict([ (o[0], o[1]) for o in case.list_outputs(units=True, shape=True, out_stream=None) ]) assert_rel_error( self, p['phase0.controls:theta'], outputs['phase0.control_group.indep_controls.controls:theta'] ['value'])
prob.set_val("traj.phase0.controls:alpha", phase0.interpolate(ys=[17.4*np.pi/180, 17.4*np.pi/180], nodes="control_input"), units="rad") prob.set_val("traj.phase0.controls:beta", phase0.interpolate(ys=[-75*np.pi/180, 0*np.pi/180], nodes="control_input"), units="rad") recorder = SqliteRecorder("reentry.sql") prob.driver.add_recorder(recorder) prob.run_driver() # prob.run_model() sim_out = traj.simulate() prob.cleanup() case_reader = CaseReader("reentry.sql") driver_cases = case_reader.list_cases("driver") last_case = case_reader.get_case(driver_cases[-1]) final_constraints = last_case.get_constraints() final_q = final_constraints["traj.phase0.path_constraints.path:q"] plt.figure(0) plt.plot(prob.get_val("traj.phase0.timeseries.time", units="s"), prob.get_val("traj.phase0.timeseries.controls:alpha", units="deg"), "ro", label="Solution") plt.plot(sim_out.get_val("traj.phase0.timeseries.time", units="s"), sim_out.get_val("traj.phase0.timeseries.controls:alpha", units="deg"), "b-", label="Simulation") plt.title("Angle of Attack over Time") plt.xlabel("Time (s)") plt.ylabel("Angle of Attack (degrees)") plt.legend() plt.figure(1) plt.plot(prob.get_val("traj.phase0.timeseries.time", units="s"), prob.get_val("traj.phase0.timeseries.controls:beta", units="deg"), "ro", label="Solution") plt.plot(sim_out.get_val("traj.phase0.timeseries.time", units="s"), sim_out.get_val("traj.phase0.timeseries.controls:beta", units="deg"), "b-", label="Simulation")
def compute(self, inputs, outputs): # type: (Vector, Vector) -> None """Computation performed by the component. Parameters ---------- inputs : all inputs coming from outside the component in the group outputs : all outputs provided outside the component in the group""" # Define problem of subdriver p = self.prob m = p.model # Push global inputs down for input_name in m.model_constants: p[input_name] = inputs[input_name] failed_experiments = {} # sort to have outputs first sorted_model_super_inputs = sorted(m.model_super_inputs.keys(), reverse=True) for input_name in sorted_model_super_inputs: if input_name in m.sm_of_training_params.keys(): # Add these inputs as training data sm_uid = m.sm_of_training_params[input_name] pred_param = m.find_mapped_parameter(input_name, m.sm_prediction_inputs[sm_uid] | m.sm_prediction_outputs[sm_uid]) sm_comp = getattr(m, str_to_valid_sys_name(sm_uid)) if sm_uid not in failed_experiments.keys(): failed_experiments[sm_uid] = (None, None) sm_comp.options['train:'+pred_param], failed_experiments[sm_uid]\ = p.postprocess_experiments(inputs[input_name], input_name, failed_experiments[sm_uid]) else: p[input_name] = inputs[input_name] # Provide message on failed experiments warn_about_failed_experiments(failed_experiments) # Set initial values of design variables back to original ones (to avoid using values of # last run) for des_var, attrbs in m.design_vars.items(): p[des_var] = attrbs['initial'] # Run the driver print('Running subdriver {}'.format(self.options['driver_uid'])) if 'Sub-Optimizer' not in p.case_reader_path: p.driver.cleanup() basename, extension = os.path.splitext(p.case_reader_path) case_reader_filename = basename + '_loop' + str(self._run_count) + extension p.driver.add_recorder(SqliteRecorder(case_reader_filename)) p.driver.recording_options['includes'] = ['*'] p.driver.recording_options['record_model_metadata'] = True p.driver._setup_recording() p.run_driver() self._add_run_count() # Pull the value back up to the output array doe_out_vecs = {} for output_name in m.model_super_outputs: # Add these outputs as vectors based on DOE driver if output_name in m.doe_parameters.keys(): doe_out_vecs[output_name] = [] else: if not p.driver.fail: outputs[output_name] = p[output_name] else: outputs[output_name] = float('nan') # If the driver failed (hence, optimization failed), then send message and clean if p.driver.fail: print('Driver run failed!') p.clean_driver_after_failure() # Provide DOE output vectors as output of the component, if this is expected if doe_out_vecs: # First read out the case reader cr = CaseReader(case_reader_filename) cases = cr.list_cases('driver') for n in range(len(cases)): cr_outputs = cr.get_case(n).outputs doe_param_matches = {} for output_name in doe_out_vecs.keys(): doe_param_matches[output_name] = doe_param_match \ = m.find_mapped_parameter(output_name, cr_outputs.keys()) doe_out_vecs[output_name].append(cr_outputs[doe_param_match][0]) # Then write the final vectors to the global output array for output_name in doe_out_vecs.keys(): if output_name in p.doe_samples[p.driver_uid]['inputs']: des_var_match = m.find_mapped_parameter(output_name, m._design_vars.keys()) doe_out_vecs[output_name] = unscale_value(doe_out_vecs[output_name], m._design_vars[des_var_match]['ref0'], m._design_vars[des_var_match]['ref']) outputs[output_name] = np.array(doe_out_vecs[output_name])