def test_parab_FD(self): model = Problem(impl=impl) root = model.root = Group() par = root.add("par", ParallelGroup()) par.add("c1", Parab1D(root=2.0)) par.add("c2", Parab1D(root=3.0)) root.add("p1", ParamComp("x", val=0.0)) root.add("p2", ParamComp("x", val=0.0)) root.connect("p1.x", "par.c1.x") root.connect("p2.x", "par.c2.x") root.add("sumcomp", ExecComp("sum = x1+x2")) root.connect("par.c1.y", "sumcomp.x1") root.connect("par.c2.y", "sumcomp.x2") driver = model.driver = pyOptSparseDriver() driver.add_param("p1.x", low=-100, high=100) driver.add_param("p2.x", low=-100, high=100) driver.add_objective("sumcomp.sum") root.fd_options["force_fd"] = True model.setup(check=False) model.run() if not MPI or self.comm.rank == 0: assert_rel_error(self, model["p1.x"], 2.0, 1.0e-6) assert_rel_error(self, model["p2.x"], 3.0, 1.0e-6)
def test_simple_paraboloid_scaled_objective_rev(self): prob = Problem() root = prob.root = Group() root.add('p1', IndepVarComp('x', 50.0), promotes=['*']) root.add('p2', IndepVarComp('y', 50.0), promotes=['*']) root.add('comp', Paraboloid(), promotes=['*']) root.add('con', ExecComp('c = x - y'), promotes=['*']) prob.driver = pyOptSparseDriver() prob.driver.options['optimizer'] = OPTIMIZER if OPTIMIZER == 'SNOPT': prob.driver.opt_settings['Verify level'] = 3 prob.driver.options['print_results'] = False prob.driver.add_desvar('x', lower=-50.0, upper=50.0) prob.driver.add_desvar('y', lower=-50.0, upper=50.0) prob.driver.add_objective('f_xy', scaler=1 / 10.) prob.driver.add_constraint('c', lower=10.0, upper=11.0) root.ln_solver.options['mode'] = 'rev' prob.setup(check=False) prob.run() # Minimum should be at (7.166667, -7.833334) assert_rel_error(self, prob['x'] - prob['y'], 11.0, 1e-6)
def test_parab_FD_subbed_Pcomps(self): model = Problem(impl=impl) root = model.root = Group() par = root.add("par", ParallelGroup()) par.add("s1", MP_Point(root=2.0)) par.add("s2", MP_Point(root=3.0)) root.add("sumcomp", ExecComp("sum = x1+x2")) root.connect("par.s1.c.y", "sumcomp.x1") root.connect("par.s2.c.y", "sumcomp.x2") driver = model.driver = pyOptSparseDriver() driver.add_param("par.s1.p.x", low=-100, high=100) driver.add_param("par.s2.p.x", low=-100, high=100) driver.add_objective("sumcomp.sum") root.fd_options["force_fd"] = True model.setup(check=False) model.run() if not MPI or self.comm.rank == 0: assert_rel_error(self, model["par.s1.p.x"], 2.0, 1.0e-6) if not MPI or self.comm.rank == 1: assert_rel_error(self, model["par.s2.p.x"], 3.0, 1.0e-6)
def test_simple_paraboloid_equality_linear(self): prob = Problem() root = prob.root = Group() root.add('p1', IndepVarComp('x', 50.0), promotes=['*']) root.add('p2', IndepVarComp('y', 50.0), promotes=['*']) root.add('comp', Paraboloid(), promotes=['*']) root.add('con', ExecComp('c = - x + y'), promotes=['*']) prob.driver = pyOptSparseDriver() prob.driver.options['optimizer'] = OPTIMIZER if OPTIMIZER == 'SLSQP': prob.driver.opt_settings['ACC'] = 1e-9 prob.driver.options['print_results'] = False prob.driver.add_desvar('x', lower=-50.0, upper=50.0) prob.driver.add_desvar('y', lower=-50.0, upper=50.0) prob.driver.add_objective('f_xy') prob.driver.add_constraint('c', equals=-15.0, linear=True) if OPTIMIZER == 'SNOPT': # there is currently a bug in SNOPT, it requires at least one # nonlinear inequality constraint, so provide a 'fake' one prob.driver.add_constraint('x', lower=-100.0) prob.setup(check=False) prob.run() # Minimum should be at (7.166667, -7.833334) assert_rel_error(self, prob['x'], 7.16667, 1e-6) assert_rel_error(self, prob['y'], -7.833334, 1e-6)
def test_simple_paraboloid_equality_linear(self): prob = Problem() root = prob.root = Group() root.add('p1', IndepVarComp('x', 50.0), promotes=['*']) root.add('p2', IndepVarComp('y', 50.0), promotes=['*']) root.add('comp', Paraboloid(), promotes=['*']) root.add('con', ExecComp('c = - x + y'), promotes=['*']) prob.driver = pyOptSparseDriver() prob.driver.options['optimizer'] = OPTIMIZER if OPTIMIZER == 'SLSQP': prob.driver.opt_settings['ACC'] = 1e-9 prob.driver.options['print_results'] = False prob.driver.add_desvar('x', lower=-50.0, upper=50.0) prob.driver.add_desvar('y', lower=-50.0, upper=50.0) prob.driver.add_objective('f_xy') prob.driver.add_constraint('c', equals=-15.0, linear=True) if OPTIMIZER == 'SNOPT': # there is currently a bug in SNOPT, it requires at least one # nonlinear inequality constraint, so provide a 'fake' one prob.driver.add_constraint('x', lower=-100.0) prob.setup(check=False) prob.run() # Minimum should be at (7.166667, -7.833334) assert_rel_error(self, prob['x'], 7.16667, 1e-6) assert_rel_error(self, prob['y'], -7.833334, 1e-6)
def test_parab_subbed_Pcomps(self): model = Problem(impl=impl) root = model.root = Group() root.ln_solver = lin_solver() par = root.add('par', ParallelGroup()) par.add('s1', MP_Point(root=2.0)) par.add('s2', MP_Point(root=3.0)) root.add('sumcomp', ExecComp('sum = x1+x2')) root.connect('par.s1.c.y', 'sumcomp.x1') root.connect('par.s2.c.y', 'sumcomp.x2') driver = model.driver = pyOptSparseDriver() driver.options['optimizer'] = OPTIMIZER driver.options['print_results'] = False driver.add_desvar('par.s1.p.x', lower=-100, upper=100) driver.add_desvar('par.s2.p.x', lower=-100, upper=100) driver.add_objective('sumcomp.sum') model.setup(check=False) model.run() if not MPI or self.comm.rank == 0: assert_rel_error(self, model['par.s1.p.x'], 2.0, 1.e-6) if not MPI or self.comm.rank == 1: assert_rel_error(self, model['par.s2.p.x'], 3.0, 1.e-6)
def run(typ="adjoint", m=300): t = time.time() model = Problem(impl=impl) model.root = MPPT_MDP(m) # add optimizer model.driver = pyOptSparseDriver() model.driver.options['optimizer'] = "SNOPT" model.driver.options['print_results'] = False model.driver.opt_settings = { 'Major optimality tolerance': 1e-3, 'Major feasibility tolerance': 1.0e-5, 'Iterations limit': 500000000, "Verify level" : -1 } model.driver.add_objective("perf.result") model.driver.add_desvar("pt0.param.CP_Isetpt", lower=0., upper=0.4) #model.driver.add_desvar("pt1.param.CP_Isetpt", lower=0., upper=0.4) if typ == "fd": model.root.fd_options['force_fd'] = True elif typ == "fwd": model.root.ln_solver.options['mode'] = "fwd" else: model.root.ln_solver.options['mode'] = "rev" model.setup(check=False) model.run() #os.rename("SNOPT_summary.out", "SNOPT_summary_%s_%d.out" % (typ, m)) return time.time() - t
def test_driver_param_indices_snopt(self): """ Test driver param indices with pyOptSparse and force_fd=False """ prob = Problem() prob.root = SellarStateConnection() prob.root.fd_options['force_fd'] = False prob.driver = pyOptSparseDriver() prob.driver.add_desvar('z', low=np.array([-10.0]), high=np.array([10.0]),indices=[0]) prob.driver.add_desvar('x', low=0.0, high=10.0) prob.driver.add_objective('obj') prob.driver.add_constraint('con1', upper=0.0) prob.driver.add_constraint('con2', upper=0.0) prob.setup(check=False) prob['z'][1] = 0.0 prob.run() assert_rel_error(self, prob['z'][0], 1.9776, 1e-3) assert_rel_error(self, prob['z'][1], 0.0, 1e-3) assert_rel_error(self, prob['x'], 0.0, 1e-3)
def test_root_derivs_dict(self): if OPT is None: raise unittest.SkipTest("pyoptsparse is not installed") if OPTIMIZER is None: raise unittest.SkipTest("pyoptsparse is not providing SNOPT or SLSQP") prob = Problem() prob.root = SellarDerivativesGrouped() prob.driver = pyOptSparseDriver() prob.driver.options['optimizer'] = 'SLSQP' prob.driver.opt_settings['ACC'] = 1e-9 prob.driver.options['print_results'] = False prob.driver.add_desvar('z', lower=np.array([-10.0, 0.0]), upper=np.array([10.0, 10.0])) prob.driver.add_desvar('x', lower=0.0, upper=10.0) prob.driver.add_objective('obj') prob.driver.add_constraint('con1', upper=0.0) prob.driver.add_constraint('con2', upper=0.0) prob.driver.add_recorder(self.recorder) self.recorder.options['record_metadata'] = False self.recorder.options['record_derivs'] = True prob.setup(check=False) prob.run() prob.cleanup() self.io.seek(0) csv_reader = csv.DictReader(self.io) rows = [row for row in csv_reader] # execution row = rows[0] self.assertEqual(row['Derivatives'], '') # derivatives row = rows[1] self.assertEqual(row['obj'], '') J1 = eval(row['Derivatives'])[0] Jbase = {} Jbase['con1'] = {} Jbase['con1']['x'] = -0.98061433 Jbase['con1']['z'] = np.array([-9.61002285, -0.78449158]) Jbase['con2'] = {} Jbase['con2']['x'] = 0.09692762 Jbase['con2']['z'] = np.array([1.94989079, 1.0775421 ]) Jbase['obj'] = {} Jbase['obj']['x'] = 2.98061392 Jbase['obj']['z'] = np.array([9.61001155, 1.78448534]) for key1, val1 in Jbase.items(): for key2, val2 in val1.items(): assert_rel_error(self, J1[key1][key2], val2, .00001)
def test_parab_FD(self): model = Problem(impl=impl) root = model.root = Group() par = root.add('par', ParallelGroup()) par.add('c1', Parab1D(root=2.0)) par.add('c2', Parab1D(root=3.0)) root.add('p1', IndepVarComp('x', val=0.0)) root.add('p2', IndepVarComp('x', val=0.0)) root.connect('p1.x', 'par.c1.x') root.connect('p2.x', 'par.c2.x') root.add('sumcomp', ExecComp('sum = x1+x2')) root.connect('par.c1.y', 'sumcomp.x1') root.connect('par.c2.y', 'sumcomp.x2') driver = model.driver = pyOptSparseDriver() driver.options['optimizer'] = OPTIMIZER driver.options['print_results'] = False driver.add_desvar('p1.x', lower=-100, upper=100) driver.add_desvar('p2.x', lower=-100, upper=100) driver.add_objective('sumcomp.sum') root.fd_options['force_fd'] = True model.setup(check=False) model.run() if not MPI or self.comm.rank == 0: assert_rel_error(self, model['p1.x'], 2.0, 1.e-6) assert_rel_error(self, model['p2.x'], 3.0, 1.e-6)
def test_simple_paraboloid_lower(self): prob = Problem() root = prob.root = Group() root.add('p1', IndepVarComp('x', 50.0), promotes=['*']) root.add('p2', IndepVarComp('y', 50.0), promotes=['*']) root.add('comp', Paraboloid(), promotes=['*']) root.add('con', ExecComp('c = x - y'), promotes=['*']) prob.driver = pyOptSparseDriver() prob.driver.options['optimizer'] = OPTIMIZER if OPTIMIZER == 'SLSQP': prob.driver.opt_settings['ACC'] = 1e-9 prob.driver.options['print_results'] = False prob.driver.add_desvar('x', lower=-50.0, upper=50.0) prob.driver.add_desvar('y', lower=-50.0, upper=50.0) prob.driver.add_objective('f_xy') prob.driver.add_constraint('c', lower=15.0) prob.setup(check=False) prob.run() # Minimum should be at (7.166667, -7.833334) assert_rel_error(self, prob['x'], 7.16667, 1e-6) assert_rel_error(self, prob['y'], -7.833334, 1e-6)
def test_inf_as_desvar_bounds(self): # User may use np.inf as a bound. It is unneccessary, but the user # may do it anyway, so make sure SLSQP doesn't blow up with it (bug # reported by rfalck) prob = Problem() root = prob.root = Group() root.add('p1', IndepVarComp('x', 50.0), promotes=['*']) root.add('p2', IndepVarComp('y', 50.0), promotes=['*']) root.add('comp', Paraboloid(), promotes=['*']) root.add('con', ExecComp('c = - x + y'), promotes=['*']) prob.driver = pyOptSparseDriver() prob.driver.options['optimizer'] = 'SLSQP' prob.driver.opt_settings['ACC'] = 1e-9 prob.driver.options['print_results'] = False prob.driver.add_desvar('x', lower=-np.inf, upper=np.inf) prob.driver.add_desvar('y', lower=-50.0, upper=50.0) prob.driver.add_objective('f_xy') prob.driver.add_constraint('c', upper=-15.0) prob.setup(check=False) prob.run() # Minimum should be at (7.166667, -7.833334) assert_rel_error(self, prob['x'], 7.16667, 1e-6) assert_rel_error(self, prob['y'], -7.833334, 1e-6)
def test_pbo_desvar_slsqp(self): if pyOptSparseDriver is None: raise unittest.SkipTest("pyOptSparse not installed") top = Problem() root = top.root = Group() root.add('p1', IndepVarComp('x', u'var_x', pass_by_obj=True)) root.add('p2', IndepVarComp('y', -4.0)) root.add('p', PassByObjParaboloid()) root.connect('p1.x', 'p.x') root.connect('p2.y', 'p.y') top.driver = pyOptSparseDriver() top.driver.options['optimizer'] = 'SLSQP' top.driver.add_desvar('p1.x') top.driver.add_desvar('p2.y') top.driver.add_objective('p.f_xy') try: top.setup(check=False) except Exception as err: self.assertEqual(str(err), "Parameter 'p1.x' is a 'pass_by_obj' variable and " "can't be used with a gradient based driver of type 'SLSQP'.") else: self.fail("Exception expected")
def test_raised_error_sensfunc(self): # Component fails hard this time during gradient eval, so we expect # pyoptsparse to raise. prob = Problem() root = prob.root = Group() root.add('p1', IndepVarComp('x', 50.0), promotes=['*']) root.add('p2', IndepVarComp('y', 50.0), promotes=['*']) root.add('comp', ParaboloidAE(), promotes=['*']) root.add('con', ExecComp('c = - x + y'), promotes=['*']) prob.driver = pyOptSparseDriver() prob.driver.options['optimizer'] = OPTIMIZER if OPTIMIZER == 'SLSQP': prob.driver.opt_settings['ACC'] = 1e-9 prob.driver.options['print_results'] = False prob.driver.add_desvar('x', lower=-50.0, upper=50.0) prob.driver.add_desvar('y', lower=-50.0, upper=50.0) prob.driver.add_objective('f_xy') prob.driver.add_constraint('c', upper=-15.0) prob.root.comp.fail_hard = True prob.root.comp.grad_fail_at = 2 prob.root.comp.eval_fail_at = 100 prob.setup(check=False) with self.assertRaises(Exception) as err: prob.run()
def test_parab_FD_subbed_Pcomps(self): model = Problem(impl=impl) root = model.root = Group() par = root.add('par', ParallelGroup()) par.add('s1', MP_Point(root=2.0)) par.add('s2', MP_Point(root=3.0)) root.add('sumcomp', ExecComp('sum = x1+x2')) root.connect('par.s1.c.y', 'sumcomp.x1') root.connect('par.s2.c.y', 'sumcomp.x2') driver = model.driver = pyOptSparseDriver() driver.add_param('par.s1.p.x', low=-100, high=100) driver.add_param('par.s2.p.x', low=-100, high=100) driver.add_objective('sumcomp.sum') root.fd_options['force_fd'] = True model.setup(check=False) model.run() if not MPI or self.comm.rank == 0: assert_rel_error(self, model['par.s1.p.x'], 2.0, 1.e-6) if not MPI or self.comm.rank == 1: assert_rel_error(self, model['par.s2.p.x'], 3.0, 1.e-6)
def test_simple_paraboloid_scaled_objective_rev(self): prob = Problem() root = prob.root = Group() root.add('p1', IndepVarComp('x', 50.0), promotes=['*']) root.add('p2', IndepVarComp('y', 50.0), promotes=['*']) root.add('comp', Paraboloid(), promotes=['*']) root.add('con', ExecComp('c = x - y'), promotes=['*']) prob.driver = pyOptSparseDriver() prob.driver.options['optimizer'] = OPTIMIZER if OPTIMIZER == 'SNOPT': prob.driver.opt_settings['Verify level'] = 3 prob.driver.options['print_results'] = False prob.driver.add_desvar('x', lower=-50.0, upper=50.0) prob.driver.add_desvar('y', lower=-50.0, upper=50.0) prob.driver.add_objective('f_xy', scaler=1/10.) prob.driver.add_constraint('c', lower=10.0, upper=11.0) root.ln_solver.options['mode'] = 'rev' prob.setup(check=False) prob.run() # Minimum should be at (7.166667, -7.833334) assert_rel_error(self, prob['x'] - prob['y'], 11.0, 1e-6)
def test_driver_param_indices_force_fd(self): """ Test driver param indices with pyOptSparse and force_fd=True """ prob = Problem() prob.root = SellarStateConnection() prob.root.fd_options['force_fd'] = True prob.driver = pyOptSparseDriver() prob.driver.options['optimizer'] = OPTIMIZER prob.driver.options['print_results'] = False prob.driver.add_desvar('z', lower=np.array([-10.0]), upper=np.array([10.0]), indices=[0]) prob.driver.add_desvar('x', lower=0.0, upper=10.0) prob.driver.add_objective('obj') prob.driver.add_constraint('con1', upper=0.0) prob.driver.add_constraint('con2', upper=0.0) #prob.driver.options['disp'] = False prob.setup(check=False) prob['z'][1] = 0.0 prob.run() assert_rel_error(self, prob['z'][0], 1.9776, 1e-3) assert_rel_error(self, prob['z'][1], 0.0, 1e-3) assert_rel_error(self, prob['x'], 0.0, 1e-3)
def setUp(self): if OPT is None: raise unittest.SkipTest("pyoptsparse is not installed") if OPTIMIZER is None: raise unittest.SkipTest( "pyoptsparse is not providing SNOPT or SLSQP") prob = Problem(impl=impl) root = prob.root = Group() #root.ln_solver = lin_solver() root.ln_solver = LinearGaussSeidel() par = root.add('par', ParallelGroup()) par.ln_solver = LinearGaussSeidel() ser1 = par.add('ser1', Group()) ser1.ln_solver = LinearGaussSeidel() ser1.add('p1', IndepVarComp('x', np.zeros([2])), promotes=['x']) ser1.add('comp', SimpleArrayComp(), promotes=['x', 'y']) ser1.add('con', ExecComp('c = y - 20.0', c=np.array([0.0, 0.0]), y=np.array([0.0, 0.0])), promotes=['c', 'y']) ser1.add('obj', ExecComp('o = y[0]', y=np.array([0.0, 0.0])), promotes=['y', 'o']) ser2 = par.add('ser2', Group()) ser2.ln_solver = LinearGaussSeidel() ser2.add('p1', IndepVarComp('x', np.zeros([2])), promotes=['x']) ser2.add('comp', SimpleArrayComp(), promotes=['x', 'y']) ser2.add('con', ExecComp('c = y - 30.0', c=np.array([0.0, 0.0]), y=np.array([0.0, 0.0])), promotes=['c', 'y']) ser2.add('obj', ExecComp('o = y[0]', y=np.array([0.0, 0.0])), promotes=['o', 'y']) root.add('total', ExecComp('obj = x1 + x2')) root.connect('par.ser1.o', 'total.x1') root.connect('par.ser2.o', 'total.x2') prob.driver = pyOptSparseDriver() prob.driver.options['optimizer'] = OPTIMIZER prob.driver.options['print_results'] = False prob.driver.add_desvar('par.ser1.x', lower=-50.0, upper=50.0) prob.driver.add_desvar('par.ser2.x', lower=-50.0, upper=50.0) prob.driver.add_objective('total.obj') prob.driver.add_constraint('par.ser1.c', equals=0.0) prob.driver.add_constraint('par.ser2.c', equals=0.0) self.prob = prob
def test_record_derivs_dicts(self): if OPT is None: raise unittest.SkipTest("pyoptsparse is not installed") if OPTIMIZER is None: raise unittest.SkipTest("pyoptsparse is not providing SNOPT or SLSQP") prob = Problem() prob.root = SellarDerivativesGrouped() prob.driver = pyOptSparseDriver() prob.driver.options['optimizer'] = 'SLSQP' prob.driver.opt_settings['ACC'] = 1e-9 prob.driver.options['print_results'] = False prob.driver.add_desvar('z', lower=np.array([-10.0, 0.0]), upper=np.array([10.0, 10.0])) prob.driver.add_desvar('x', lower=0.0, upper=10.0) prob.driver.add_objective('obj') prob.driver.add_constraint('con1', upper=0.0) prob.driver.add_constraint('con2', upper=0.0) prob.driver.add_recorder(self.recorder) self.recorder.options['record_metadata'] = False self.recorder.options['record_derivs'] = True prob.setup(check=False) prob.run() prob.cleanup() hdf = h5py.File(self.filename, 'r') deriv_group = hdf['rank0:SLSQP|1']['Derivs'] self.assertEqual(deriv_group.attrs['success'],1) self.assertEqual(deriv_group.attrs['msg'],'') J1 = deriv_group['Derivatives'] Jbase = {} Jbase['con1'] = {} Jbase['con1']['x'] = -0.98061433 Jbase['con1']['z'] = np.array([-9.61002285, -0.78449158]) Jbase['con2'] = {} Jbase['con2']['x'] = 0.09692762 Jbase['con2']['z'] = np.array([1.94989079, 1.0775421 ]) Jbase['obj'] = {} Jbase['obj']['x'] = 2.98061392 Jbase['obj']['z'] = np.array([9.61001155, 1.78448534]) for key1, val1 in Jbase.items(): for key2, val2 in val1.items(): assert_rel_error(self, J1[key1][key2][:], val2, .00001) hdf.close()
def test_simple_driver_recording(self, m): self.setup_endpoints(m) recorder = WebRecorder(self._accepted_token, suppress_output=True) prob = Problem() model = prob.model = Group() model.add_subsystem('p1', IndepVarComp('x', 50.0), promotes=['*']) model.add_subsystem('p2', IndepVarComp('y', 50.0), promotes=['*']) model.add_subsystem('comp', Paraboloid(), promotes=['*']) model.add_subsystem('con', ExecComp('c = - x + y'), promotes=['*']) model.suppress_solver_output = True prob.driver = pyOptSparseDriver() prob.driver.add_recorder(recorder) prob.driver.recording_options['record_desvars'] = True prob.driver.recording_options['record_responses'] = True prob.driver.recording_options['record_objectives'] = True prob.driver.recording_options['record_constraints'] = True prob.driver.options['optimizer'] = OPTIMIZER if OPTIMIZER == 'SLSQP': prob.driver.opt_settings['ACC'] = 1e-9 model.add_design_var('x', lower=-50.0, upper=50.0) model.add_design_var('y', lower=-50.0, upper=50.0) model.add_objective('f_xy') model.add_constraint('c', upper=-15.0) prob.setup(check=False) t0, t1 = run_driver(prob) prob.cleanup() driver_iteration_data = json.loads(self.driver_iteration_data) expected_desvars = [ {'name': 'p1.x', 'values': [7.1666666]}, {'name': 'p2.y', 'values': [-7.8333333]} ] expected_objectives = [ {'name': 'comp.f_xy', 'values': [-27.083333]} ] expected_constraints = [ {'name': 'con.c', 'values': [-15.0]} ] for d in expected_desvars: self.assert_array_close(d, driver_iteration_data['desvars']) for o in expected_objectives: self.assert_array_close(o, driver_iteration_data['objectives']) for c in expected_constraints: self.assert_array_close(c, driver_iteration_data['constraints'])
def test_multipoint_with_coloring(self): size = 10 num_pts = self.N_PROCS np.random.seed(11) p = Problem() p.driver = pyOptSparseDriver() p.driver.options['optimizer'] = OPTIMIZER p.driver.options['dynamic_simul_derivs'] = True if OPTIMIZER == 'SNOPT': p.driver.opt_settings['Major iterations limit'] = 100 p.driver.opt_settings['Major feasibility tolerance'] = 1.0E-6 p.driver.opt_settings['Major optimality tolerance'] = 1.0E-6 p.driver.opt_settings['iSumm'] = 6 model = p.model for i in range(num_pts): model.add_subsystem('indep%d' % i, IndepVarComp('x', val=np.ones(size))) model.add_design_var('indep%d.x' % i) par1 = model.add_subsystem('par1', ParallelGroup()) for i in range(num_pts): mat = _get_mat(5, size) par1.add_subsystem('comp%d' % i, ExecComp('y=A.dot(x)', A=mat, x=np.ones(size), y=np.ones(5))) model.connect('indep%d.x' % i, 'par1.comp%d.x' % i) par2 = model.add_subsystem('par2', ParallelGroup()) for i in range(num_pts): mat = _get_mat(size, 5) par2.add_subsystem('comp%d' % i, ExecComp('y=A.dot(x)', A=mat, x=np.ones(5), y=np.ones(size))) model.connect('par1.comp%d.y' % i, 'par2.comp%d.x' % i) par2.add_constraint('comp%d.y' % i, lower=-1.) model.add_subsystem('normcomp%d' % i, ExecComp("y=sum(x*x)", x=np.ones(size))) model.connect('par2.comp%d.y' % i, 'normcomp%d.x' % i) model.add_subsystem('obj', ExecComp("y=" + '+'.join(['x%d' % i for i in range(num_pts)]))) for i in range(num_pts): model.connect('normcomp%d.y' % i, 'obj.x%d' % i) model.add_objective('obj.y') p.setup() p.run_driver() J = p.compute_totals() for i in range(num_pts): vname = 'par2.comp%d.A' % i if vname in model._var_abs_names['input']: norm = np.linalg.norm(J['par2.comp%d.y'%i,'indep%d.x'%i] - getattr(par2, 'comp%d'%i)._inputs['A'].dot(getattr(par1, 'comp%d'%i)._inputs['A'])) self.assertLess(norm, 1.e-7) elif vname not in model._var_allprocs_abs_names['input']: self.fail("Can't find variable par2.comp%d.A" % i)
def test_record_derivs_dicts(self): if OPT is None: raise unittest.SkipTest("pyoptsparse is not installed") if OPTIMIZER is None: raise unittest.SkipTest( "pyoptsparse is not providing SNOPT or SLSQP") prob = Problem() prob.root = SellarDerivativesGrouped() prob.driver = pyOptSparseDriver() prob.driver.options['optimizer'] = 'SLSQP' prob.driver.opt_settings['ACC'] = 1e-9 prob.driver.options['print_results'] = False prob.driver.add_desvar('z', lower=np.array([-10.0, 0.0]), upper=np.array([10.0, 10.0])) prob.driver.add_desvar('x', lower=0.0, upper=10.0) prob.driver.add_objective('obj') prob.driver.add_constraint('con1', upper=0.0) prob.driver.add_constraint('con2', upper=0.0) prob.driver.add_recorder(self.recorder) self.recorder.options['record_metadata'] = False self.recorder.options['record_derivs'] = True prob.setup(check=False) prob.run() prob.cleanup() hdf = h5py.File(self.filename, 'r') deriv_group = hdf['rank0:SLSQP/1']['deriv'] self.assertEqual(deriv_group.attrs['success'], 1) self.assertEqual(deriv_group.attrs['msg'], '') J1 = deriv_group['Derivatives'] Jbase = {} Jbase['con1'] = {} Jbase['con1']['x'] = -0.98061433 Jbase['con1']['z'] = np.array([-9.61002285, -0.78449158]) Jbase['con2'] = {} Jbase['con2']['x'] = 0.09692762 Jbase['con2']['z'] = np.array([1.94989079, 1.0775421]) Jbase['obj'] = {} Jbase['obj']['x'] = 2.98061392 Jbase['obj']['z'] = np.array([9.61001155, 1.78448534]) for key1, val1 in Jbase.items(): for key2, val2 in val1.items(): assert_rel_error(self, J1[key1][key2][:], val2, .00001)
def test_multipoint_with_coloring(self): size = 10 num_pts = self.N_PROCS np.random.seed(11) p = Problem() p.driver = pyOptSparseDriver() p.driver.options['optimizer'] = OPTIMIZER p.driver.options['dynamic_simul_derivs'] = True if OPTIMIZER == 'SNOPT': p.driver.opt_settings['Major iterations limit'] = 100 p.driver.opt_settings['Major feasibility tolerance'] = 1.0E-6 p.driver.opt_settings['Major optimality tolerance'] = 1.0E-6 p.driver.opt_settings['iSumm'] = 6 model = p.model for i in range(num_pts): model.add_subsystem('indep%d' % i, IndepVarComp('x', val=np.ones(size))) model.add_design_var('indep%d.x' % i) par1 = model.add_subsystem('par1', ParallelGroup()) for i in range(num_pts): mat = _get_mat(5, size) par1.add_subsystem('comp%d' % i, ExecComp('y=A.dot(x)', A=mat, x=np.ones(size), y=np.ones(5))) model.connect('indep%d.x' % i, 'par1.comp%d.x' % i) par2 = model.add_subsystem('par2', ParallelGroup()) for i in range(num_pts): mat = _get_mat(size, 5) par2.add_subsystem('comp%d' % i, ExecComp('y=A.dot(x)', A=mat, x=np.ones(5), y=np.ones(size))) model.connect('par1.comp%d.y' % i, 'par2.comp%d.x' % i) par2.add_constraint('comp%d.y' % i, lower=-1.) model.add_subsystem('normcomp%d' % i, ExecComp("y=sum(x*x)", x=np.ones(size))) model.connect('par2.comp%d.y' % i, 'normcomp%d.x' % i) model.add_subsystem('obj', ExecComp("y=" + '+'.join(['x%d' % i for i in range(num_pts)]))) for i in range(num_pts): model.connect('normcomp%d.y' % i, 'obj.x%d' % i) model.add_objective('obj.y') p.setup() p.run_driver() J = p.compute_totals() for i in range(num_pts): vname = 'par2.comp%d.A' % i if vname in model._var_abs_names['input']: norm = np.linalg.norm(J['par2.comp%d.y'%i,'indep%d.x'%i] - getattr(par2, 'comp%d'%i)._inputs['A'].dot(getattr(par1, 'comp%d'%i)._inputs['A'])) self.assertLess(norm, 1.e-7) elif vname not in model._var_allprocs_abs_names['input']: self.fail("Can't find variable par2.comp%d.A" % i)
def setUp(self): if OPT is None: raise unittest.SkipTest("pyoptsparse is not installed") if OPTIMIZER is None: raise unittest.SkipTest("pyoptsparse is not providing SNOPT or SLSQP") self.concurrent_setUp(prefix='par_mpi_opt-') prob = Problem(impl=impl) root = prob.root = Group() #root.ln_solver = lin_solver() root.ln_solver = LinearGaussSeidel() par = root.add('par', ParallelGroup()) par.ln_solver = LinearGaussSeidel() ser1 = par.add('ser1', Group()) ser1.ln_solver = LinearGaussSeidel() ser1.add('p1', IndepVarComp('x', np.zeros([2]))) ser1.add('comp', SimpleArrayComp()) ser1.add('con', ExecComp('c = y - 20.0', c=np.array([0.0, 0.0]), y=np.array([0.0, 0.0]))) ser1.add('obj', ExecComp('o = y[0]', y=np.array([0.0, 0.0]))) ser2 = par.add('ser2', Group()) ser2.ln_solver = LinearGaussSeidel() ser2.add('p1', IndepVarComp('x', np.zeros([2]))) ser2.add('comp', SimpleArrayComp()) ser2.add('con', ExecComp('c = y - 30.0', c=np.array([0.0, 0.0]), y=np.array([0.0, 0.0]))) ser2.add('obj', ExecComp('o = y[0]', y=np.array([0.0, 0.0]))) root.add('total', ExecComp('obj = x1 + x2')) ser1.connect('p1.x', 'comp.x') ser1.connect('comp.y', 'con.y') ser1.connect('comp.y', 'obj.y') root.connect('par.ser1.obj.o', 'total.x1') ser2.connect('p1.x', 'comp.x') ser2.connect('comp.y', 'con.y') ser2.connect('comp.y', 'obj.y') root.connect('par.ser2.obj.o', 'total.x2') prob.driver = pyOptSparseDriver() prob.driver.options['optimizer'] = OPTIMIZER prob.driver.options['print_results'] = False prob.driver.add_desvar('par.ser1.p1.x', lower=-50.0, upper=50.0) prob.driver.add_desvar('par.ser2.p1.x', lower=-50.0, upper=50.0) prob.driver.add_objective('total.obj') prob.driver.add_constraint('par.ser1.con.c', equals=0.0) prob.driver.add_constraint('par.ser2.con.c', equals=0.0) self.prob = prob
def magneplane_brachistochrone(solver='SLSQP', num_seg=3, seg_ncn=3): prob = Problem() traj = prob.add_traj(Trajectory("traj0")) if solver == 'SNOPT': if pyOptSparseDriver is None: raise ValueError('Requested SNOPT but pyoptsparse is not available') driver=pyOptSparseDriver() driver.options['optimizer'] = solver driver.opt_settings['Major iterations limit'] = 1000 driver.opt_settings['iSumm'] = 6 driver.opt_settings['Major step limit'] = 0.5 driver.opt_settings["Major feasibility tolerance"] = 1.0E-6 driver.opt_settings["Major optimality tolerance"] = 1.0E-6 driver.opt_settings["Minor feasibility tolerance"] = 1.0E-4 driver.opt_settings['Verify level'] = 3 else: driver=ScipyOptimizer() driver.options['tol'] = 1.0E-6 driver.options['disp'] = True driver.options['maxiter'] = 500 prob.driver = driver dynamic_controls = [ {'name':'g','units':'m/s**2'}, {'name':'T','units':'N'}, {'name':'D','units':'N'}, {'name':'mass','units':'kg'}, {'name':'psi','units':'rad'}, {'name':'theta', 'units':'rad'}, {'name':'phi','units':'rad'} ] phase0 = CollocationPhase(name='phase0',rhs_class=MagneplaneRHS,num_seg=num_seg,seg_ncn=seg_ncn,rel_lengths="equal", dynamic_controls=dynamic_controls,static_controls=None) traj.add_phase(phase0) phase0.set_state_options('x', lower=0,upper=10,ic_val=0,ic_fix=True,fc_val=10,fc_fix=True,defect_scaler=0.1) phase0.set_state_options('y', lower=0,upper=0,ic_val=0,ic_fix=True,fc_val=0,fc_fix=True,defect_scaler=0.1) phase0.set_state_options('z', lower=-10,upper=0,ic_val=-10,ic_fix=True,fc_val=-5,fc_fix=True,defect_scaler=0.1) phase0.set_state_options('v', lower=0, upper=np.inf,ic_val=0.0,ic_fix=True,fc_val=10.0,fc_fix=False,defect_scaler=0.1) phase0.set_dynamic_control_options(name='psi', val=phase0.node_space(0.0, 0.0), opt=False) phase0.set_dynamic_control_options(name='theta', val=phase0.node_space(-.46,-.46),opt=True,lower=-1.57,upper=1.57,scaler=1.0) phase0.set_dynamic_control_options(name='phi', val=phase0.node_space(0.0, 0.0), opt=False) phase0.set_dynamic_control_options(name='g', val=phase0.node_space(9.80665, 9.80665), opt=False) phase0.set_dynamic_control_options(name='T', val=phase0.node_space(0.0, 0.0), opt=False) phase0.set_dynamic_control_options(name='D', val=phase0.node_space(0.0, 0.0), opt=False) phase0.set_dynamic_control_options(name='mass', val=phase0.node_space(1000.0, 1000.0), opt=False) phase0.set_time_options(t0_val=0,t0_lower=0,t0_upper=0,tp_val=2.0,tp_lower=0.5,tp_upper=10.0) traj.add_objective(name="t",phase="phase0",place="end",scaler=1.0) return prob
def test_reading_system_metadata(self): if OPT is None: raise unittest.SkipTest("pyoptsparse is not installed") if OPTIMIZER is None: raise unittest.SkipTest("pyoptsparse is not providing SNOPT or SLSQP") self.setup_sellar_grouped_scaled_model() self.prob.driver = pyOptSparseDriver() self.prob.driver.options['optimizer'] = OPTIMIZER self.prob.model.options.declare("test1", 1) self.prob.model.mda.d1.options.declare("test2", "2") self.prob.model.pz.options.declare("test3", True) self.prob.model.pz.recording_options['options_excludes'] = ['*'] if OPTIMIZER == 'SLSQP': self.prob.driver.opt_settings['ACC'] = 1e-9 self.prob.model.recording_options['record_inputs'] = True self.prob.model.recording_options['record_outputs'] = True self.prob.model.recording_options['record_residuals'] = True self.prob.model.recording_options['record_metadata'] = True self.prob.model.add_recorder(self.recorder) pz = self.prob.model.pz # IndepVarComp which is an ExplicitComponent pz.add_recorder(self.recorder) mda = self.prob.model.mda # Group d1 = mda.d1 d1.add_recorder(self.recorder) self.prob.setup(check=False, mode='rev') self.prob.run_driver() self.prob.cleanup() cr = CaseReader(self.filename) self.assertEqual( sorted(cr.system_metadata.keys()), sorted(['root', 'mda.d1', 'pz']) ) self.assertEqual(cr.system_metadata['root']['component_options']['test1'], 1) self.assertEqual(cr.system_metadata['mda.d1']['component_options']['test2'], "2") self.assertFalse('test3' in cr.system_metadata['pz']['component_options']) assert_rel_error( self, cr.system_metadata['pz']['scaling_factors']['output']['nonlinear']['phys'][0][1], [2.0, 2.0], 1.0e-3)
def setUp(self): if OPT is None: raise unittest.SkipTest("pyoptsparse is not installed") if OPTIMIZER is None: raise unittest.SkipTest("pyoptsparse is not providing SNOPT or SLSQP") prob = Problem(impl=impl) root = prob.root = Group() #root.ln_solver = lin_solver() # this works too (PetscKSP) root.ln_solver = LinearGaussSeidel() par = root.add('par', ParallelGroup()) par.ln_solver = LinearGaussSeidel() ser1 = par.add('ser1', Group()) ser1.ln_solver = LinearGaussSeidel() ser1.add('p1', IndepVarComp('x', np.zeros([2])), promotes=['*']) ser1.add('comp', SimpleArrayComp(), promotes=['*']) ser1.add('con', ExecComp4Test('c = y - 20.0', # lin_delay=.1, c=np.array([0.0, 0.0]), y=np.array([0.0, 0.0])), promotes=['c', 'y']) ser1.add('obj', ExecComp4Test('o = y[0]', # lin_delay=.1, y=np.array([0.0, 0.0])), promotes=['y', 'o']) ser2 = par.add('ser2', Group()) ser2.ln_solver = LinearGaussSeidel() ser2.add('p1', IndepVarComp('x', np.zeros([2])), promotes=['*']) ser2.add('comp', SimpleArrayComp(), promotes=['*']) ser2.add('obj', ExecComp('o = y[0]', y=np.array([0.0, 0.0])), promotes=['y', 'o']) root.add('con', ExecComp('c = y - 30.0', c=np.array([0.0, 0.0]), y=np.array([0.0, 0.0]))) root.add('total', ExecComp('obj = x1 + x2')) root.connect('par.ser1.o', 'total.x1') root.connect('par.ser2.o', 'total.x2') root.connect('par.ser2.y', 'con.y') prob.driver = pyOptSparseDriver() prob.driver.options['optimizer'] = OPTIMIZER prob.driver.add_desvar('par.ser1.x', low=-50.0, high=50.0) prob.driver.add_desvar('par.ser2.x', low=-50.0, high=50.0) prob.driver.add_objective('total.obj') prob.driver.add_constraint('par.ser1.c', equals=0.0) prob.driver.add_constraint('con.c', equals=0.0) self.prob = prob
def test_reading_driver_cases(self): """ Tests that the reader returns params correctly. """ prob = SellarProblem(SellarDerivativesGrouped) driver = prob.driver = pyOptSparseDriver(optimizer='SLSQP') driver.options['print_results'] = False driver.opt_settings['ACC'] = 1e-9 driver.recording_options['record_desvars'] = True driver.recording_options['record_responses'] = True driver.recording_options['record_objectives'] = True driver.recording_options['record_constraints'] = True driver.add_recorder(self.recorder) prob.setup() prob.run_driver() prob.cleanup() cr = CaseReader(self.filename) # Test to see if we got the correct number of cases self.assertEqual(cr.driver_cases.num_cases, 7) self.assertEqual(cr.system_cases.num_cases, 0) self.assertEqual(cr.solver_cases.num_cases, 0) # Test to see if the access by case keys works: seventh_slsqp_iteration_case = cr.driver_cases.get_case( 'rank0:SLSQP|5') np.testing.assert_almost_equal( seventh_slsqp_iteration_case.outputs['z'], [1.97846296, -2.21388305e-13], decimal=2, err_msg='Case reader gives ' 'incorrect Parameter value' ' for {0}'.format('pz.z')) # Test values from one case, the last case last_case = cr.driver_cases.get_case(-1) np.testing.assert_almost_equal(last_case.outputs['z'], prob['z'], err_msg='Case reader gives ' 'incorrect Parameter value' ' for {0}'.format('pz.z')) np.testing.assert_almost_equal(last_case.outputs['x'], [-0.00309521], decimal=2, err_msg='Case reader gives ' 'incorrect Parameter value' ' for {0}'.format('px.x')) # Test to see if the case keys (iteration coords) come back correctly case_keys = cr.driver_cases.list_cases() print(case_keys) for i, iter_coord in enumerate(case_keys): self.assertEqual(iter_coord, 'rank0:SLSQP|{}'.format(i))
def setUp(self): if SKIP: raise unittest.SkipTest('Could not import pyOptSparseDriver. Is pyoptsparse installed?') prob = Problem(impl=impl) root = prob.root = Group() #root.ln_solver = lin_solver() root.ln_solver = LinearGaussSeidel() par = root.add('par', ParallelGroup()) par.ln_solver = LinearGaussSeidel() ser1 = par.add('ser1', Group()) ser1.ln_solver = LinearGaussSeidel() ser1.add('p1', IndepVarComp('x', np.zeros([2]))) ser1.add('comp', SimpleArrayComp()) ser1.add('con', ExecComp('c = y - 20.0', c=np.array([0.0, 0.0]), y=np.array([0.0, 0.0]))) ser1.add('obj', ExecComp('o = y[0]', y=np.array([0.0, 0.0]))) ser2 = par.add('ser2', Group()) ser2.ln_solver = LinearGaussSeidel() ser2.add('p1', IndepVarComp('x', np.zeros([2]))) ser2.add('comp', SimpleArrayComp()) ser2.add('con', ExecComp('c = y - 30.0', c=np.array([0.0, 0.0]), y=np.array([0.0, 0.0]))) ser2.add('obj', ExecComp('o = y[0]', y=np.array([0.0, 0.0]))) root.add('total', ExecComp('obj = x1 + x2')) ser1.connect('p1.x', 'comp.x') ser1.connect('comp.y', 'con.y') ser1.connect('comp.y', 'obj.y') root.connect('par.ser1.obj.o', 'total.x1') ser2.connect('p1.x', 'comp.x') ser2.connect('comp.y', 'con.y') ser2.connect('comp.y', 'obj.y') root.connect('par.ser2.obj.o', 'total.x2') prob.driver = pyOptSparseDriver() prob.driver.add_desvar('par.ser1.p1.x', low=-50.0, high=50.0) prob.driver.add_desvar('par.ser2.p1.x', low=-50.0, high=50.0) prob.driver.add_objective('total.obj') prob.driver.add_constraint('par.ser1.con.c', equals=0.0) prob.driver.add_constraint('par.ser2.con.c', equals=0.0) self.prob = prob
def test_root_derivs_dict(self): if OPT is None: raise unittest.SkipTest("pyoptsparse is not installed") if OPTIMIZER is None: raise unittest.SkipTest("pyoptsparse is not providing SNOPT or SLSQP") prob = Problem() prob.root = SellarDerivativesGrouped() prob.driver = pyOptSparseDriver() prob.driver.options["optimizer"] = "SLSQP" prob.driver.opt_settings["ACC"] = 1e-9 prob.driver.options["print_results"] = False prob.driver.add_desvar("z", lower=np.array([-10.0, 0.0]), upper=np.array([10.0, 10.0])) prob.driver.add_desvar("x", lower=0.0, upper=10.0) prob.driver.add_objective("obj") prob.driver.add_constraint("con1", upper=0.0) prob.driver.add_constraint("con2", upper=0.0) prob.driver.add_recorder(self.recorder) self.recorder.options["record_metadata"] = False self.recorder.options["record_derivs"] = True prob.setup(check=False) prob.run() prob.cleanup() db = SqliteDict(self.filename, self.tablename_derivs, flag="r") J1 = db["rank0:SLSQP|1"]["Derivatives"] Jbase = {} Jbase["con1"] = {} Jbase["con1"]["x"] = -0.98061433 Jbase["con1"]["z"] = np.array([-9.61002285, -0.78449158]) Jbase["con2"] = {} Jbase["con2"]["x"] = 0.09692762 Jbase["con2"]["z"] = np.array([1.94989079, 1.0775421]) Jbase["obj"] = {} Jbase["obj"]["x"] = 2.98061392 Jbase["obj"]["z"] = np.array([9.61001155, 1.78448534]) for key1, val1 in Jbase.items(): for key2, val2 in val1.items(): assert_rel_error(self, J1[key1][key2], val2, 0.00001)
def test_fan_out(self): # This tests sparse-response specification. # This is a slightly modified FanOut prob = Problem() model = prob.model = Group() model.add_subsystem('p1', IndepVarComp('x', 1.0)) model.add_subsystem('p2', IndepVarComp('x', 1.0)) model.add_subsystem('comp1', ExecComp('y = 3.0*x')) model.add_subsystem('comp2', ExecComp('y = 5.0*x')) model.add_subsystem('obj', ExecComp('o = i1 + i2')) model.add_subsystem('con1', ExecComp('c = 15.0 - x')) model.add_subsystem('con2', ExecComp('c = 15.0 - x')) # hook up explicitly model.connect('p1.x', 'comp1.x') model.connect('p2.x', 'comp2.x') model.connect('comp1.y', 'obj.i1') model.connect('comp2.y', 'obj.i2') model.connect('comp1.y', 'con1.x') model.connect('comp2.y', 'con2.x') prob.set_solver_print(level=0) prob.driver = pyOptSparseDriver() prob.driver.options['optimizer'] = OPTIMIZER prob.driver.options['print_results'] = False model.add_design_var('p1.x', lower=-50.0, upper=50.0) model.add_design_var('p2.x', lower=-50.0, upper=50.0) model.add_objective('obj.o') model.add_constraint('con1.c', equals=0.0) model.add_constraint('con2.c', equals=0.0) prob.setup(check=False) prob.run_driver() obj = prob['obj.o'] assert_rel_error(self, obj, 30.0, 1e-6) # Verify that pyOpt has the correct wrt names con1 = prob.driver.pyopt_solution.constraints['con1.c'] self.assertEqual(con1.wrt, ['p1.x']) con2 = prob.driver.pyopt_solution.constraints['con2.c'] self.assertEqual(con2.wrt, ['p2.x'])
def test_prob_split_comm(self): colors = [0, 0, 1, 1] comm = MPI.COMM_WORLD.Split(colors[MPI.COMM_WORLD.rank]) # split the size 4 comm into 2 size 2 comms self.assertEqual(comm.size, 2) prob = Problem(comm=comm) model = prob.model p1 = model.add_subsystem('p1', IndepVarComp('x', 99.0)) p1.add_design_var('x', lower=-50.0, upper=50.0) par = model.add_subsystem('par', ParallelGroup()) c1 = par.add_subsystem('C1', ExecComp('y = x*x')) c2 = par.add_subsystem('C2', ExecComp('y = x*x')) model.add_subsystem('obj', ExecComp('o = a + b + 2.')) model.connect('p1.x', ['par.C1.x', 'par.C2.x']) model.connect('par.C1.y', 'obj.a') model.connect('par.C2.y', 'obj.b') model.add_objective('obj.o') prob.set_solver_print(level=0) prob.driver = pyOptSparseDriver() prob.driver.options['optimizer'] = OPTIMIZER prob.driver.options['print_results'] = False prob.setup() prob.run_model() failed = prob.run_driver() all_failed = comm.allgather(failed) if any(all_failed): all_msgs = comm.allgather(str( prob.driver.pyopt_solution.optInform)) for i, tup in enumerate(zip(all_failed, all_msgs)): failed, msg = tup if failed: self.fail("Optimization failed on rank %d: %s" % (i, msg)) objs = comm.allgather(prob['obj.o']) for i, obj in enumerate(objs): assert_near_equal(obj, 2.0, 1e-6)
def test_analysis_error_sensfunc(self): # Component raises an analysis error during some linearize calls, and # pyopt attempts to recover. prob = Problem() model = prob.model = Group() model.add_subsystem('p1', IndepVarComp('x', 50.0), promotes=['*']) model.add_subsystem('p2', IndepVarComp('y', 50.0), promotes=['*']) comp = model.add_subsystem('comp', ParaboloidAE(), promotes=['*']) model.add_subsystem('con', ExecComp('c = - x + y'), promotes=['*']) prob.driver = pyOptSparseDriver() prob.driver.options['optimizer'] = OPTIMIZER if OPTIMIZER == 'SLSQP': prob.driver.opt_settings['ACC'] = 1e-9 prob.driver.options['print_results'] = False model.add_design_var('x', lower=-50.0, upper=50.0) model.add_design_var('y', lower=-50.0, upper=50.0) model.add_objective('f_xy') model.add_constraint('c', upper=-15.0) comp.grad_fail_at = 2 comp.eval_fail_at = 100 prob.setup(check=False) prob.run_driver() # SLSQP does a bad job recovering from gradient failures if OPTIMIZER == 'SLSQP': tol = 1e-2 else: tol = 1e-6 # Minimum should be at (7.166667, -7.833334) assert_rel_error(self, prob['x'], 7.16667, tol) assert_rel_error(self, prob['y'], -7.833334, tol) # Normally it takes 9 iterations, but takes 13 here because of the # gradfunc failures. (note SLSQP just doesn't do well) if OPTIMIZER == 'SNOPT': self.assertEqual(prob.driver.iter_count, 15)
def setup(self): self.model = Problem(impl=impl) self.model.root = MPPT_MDP() # add SNOPT driver self.model.driver = pyOptSparseDriver() self.model.driver.options['optimizer'] = "SNOPT" self.model.driver.opt_settings = {'Major optimality tolerance': 1e-3, 'Major feasibility tolerance': 1.0e-5, 'Iterations limit': 500000000, "New basis file": 10} self.model.driver.add_objective("perf.result") self.model.driver.add_desvar("pt0.param.CP_Isetpt", lower=0., upper=0.4) self.model.driver.add_desvar("pt1.param.CP_Isetpt", lower=0., upper=0.4) self.model.setup()
def test_sparsity_fd(self): prob = Problem() root = prob.root = Group() root.add('p1', IndepVarComp('x', 1.0)) root.add('p2', IndepVarComp('x', 1.0)) root.add('comp1', ExecComp('y = 3.0*x')) root.add('comp2', ExecComp('y = 5.0*x')) root.add('obj', ExecComp('o = i1 + i2')) root.add('con1', ExecComp('c = 15.0 - x')) root.add('con2', ExecComp('c = 15.0 - x')) # hook up explicitly root.connect('p1.x', 'comp1.x') root.connect('p2.x', 'comp2.x') root.connect('comp1.y', 'obj.i1') root.connect('comp2.y', 'obj.i2') root.connect('comp1.y', 'con1.x') root.connect('comp2.y', 'con2.x') prob.driver = pyOptSparseDriver() prob.driver.options['optimizer'] = OPTIMIZER prob.driver.options['print_results'] = False prob.driver.add_desvar('p1.x', lower=-50.0, upper=50.0) prob.driver.add_desvar('p2.x', lower=-50.0, upper=50.0) prob.driver.add_objective('obj.o') prob.driver.add_constraint('con1.c', equals=0.0) prob.driver.add_constraint('con2.c', equals=0.0) prob.root.fd_options['force_fd'] = True prob.setup(check=False) prob.run() # Verify that the appropriate sparsity pattern is applied dv_dict = {'p1.x': 1.0, 'p2.x': 1.0} prob.driver._problem = prob sens_dict, fail = prob.driver._gradfunc(dv_dict, {}) self.assertTrue('p2.x' not in sens_dict['con1.c']) self.assertTrue('p1.x' in sens_dict['con1.c']) self.assertTrue('p2.x' in sens_dict['con2.c']) self.assertTrue('p1.x' not in sens_dict['con2.c']) self.assertTrue('p1.x' in sens_dict['obj.o']) self.assertTrue('p2.x' in sens_dict['obj.o'])
def test_root_derivs_dict(self): if OPT is None: raise unittest.SkipTest("pyoptsparse is not installed") if OPTIMIZER is None: raise unittest.SkipTest( "pyoptsparse is not providing SNOPT or SLSQP") prob = Problem() prob.root = SellarDerivativesGrouped() prob.driver = pyOptSparseDriver() prob.driver.options['optimizer'] = 'SLSQP' prob.driver.opt_settings['ACC'] = 1e-9 prob.driver.options['print_results'] = False self.recorder.options['record_unknowns'] = True prob.driver.add_desvar('z', lower=np.array([-10.0, 0.0]), upper=np.array([10.0, 10.0])) prob.driver.add_desvar('x', lower=0.0, upper=10.0) prob.driver.add_objective('obj') prob.driver.add_constraint('con1', upper=0.0) prob.driver.add_constraint('con2', upper=0.0) prob.driver.add_recorder(self.recorder) self.recorder.options['record_metadata'] = False self.recorder.options['record_derivs'] = True prob.setup(check=False) prob.run() prob.cleanup() sout = open(self.filename) lines = sout.readlines() self.assertEqual(lines[14].rstrip(), 'Derivatives:') self.assertTrue(' con1 wrt x:' in lines[15]) self.assertTrue(' con1 wrt z:' in lines[16]) self.assertTrue(' con2 wrt x:' in lines[17]) self.assertTrue(' con2 wrt z:' in lines[18]) self.assertTrue(' obj wrt x:' in lines[19]) self.assertTrue(' obj wrt z:' in lines[20]) self.assertTrue('1.784' in lines[20])
def test_sparsity_fd(self): prob = Problem() root = prob.root = Group() root.add('p1', IndepVarComp('x', 1.0)) root.add('p2', IndepVarComp('x', 1.0)) root.add('comp1', ExecComp('y = 3.0*x')) root.add('comp2', ExecComp('y = 5.0*x')) root.add('obj', ExecComp('o = i1 + i2')) root.add('con1', ExecComp('c = 15.0 - x')) root.add('con2', ExecComp('c = 15.0 - x')) # hook up explicitly root.connect('p1.x', 'comp1.x') root.connect('p2.x', 'comp2.x') root.connect('comp1.y', 'obj.i1') root.connect('comp2.y', 'obj.i2') root.connect('comp1.y', 'con1.x') root.connect('comp2.y', 'con2.x') prob.driver = pyOptSparseDriver() prob.driver.options['optimizer'] = OPTIMIZER prob.driver.options['print_results'] = False prob.driver.add_desvar('p1.x', lower=-50.0, upper=50.0) prob.driver.add_desvar('p2.x', lower=-50.0, upper=50.0) prob.driver.add_objective('obj.o') prob.driver.add_constraint('con1.c', equals=0.0) prob.driver.add_constraint('con2.c', equals=0.0) prob.root.fd_options['force_fd'] = True prob.setup(check=False) prob.run() # Verify that the appropriate sparsity pattern is applied dv_dict = {'p1.x': 1.0, 'p2.x': 1.0} prob.driver._problem = prob sens_dict, fail = prob.driver._gradfunc(dv_dict, {}) self.assertTrue('p2.x' not in sens_dict['con1.c']) self.assertTrue('p1.x' in sens_dict['con1.c']) self.assertTrue('p2.x' in sens_dict['con2.c']) self.assertTrue('p1.x' not in sens_dict['con2.c']) self.assertTrue('p1.x' in sens_dict['obj.o']) self.assertTrue('p2.x' in sens_dict['obj.o'])
def test_totals_of_wrt_indices(self, method): prob = Problem(coloring_dir=self.tempdir) model = prob.model = CounterGroup() prob.driver = pyOptSparseDriver(optimizer='SLSQP') prob.driver.declare_coloring() mask = np.array([[1, 0, 0, 1, 1], [0, 1, 0, 1, 1], [0, 1, 0, 1, 1], [1, 0, 0, 0, 0], [0, 1, 1, 0, 0]]) isplit = 2 sparsity = setup_sparsity(mask) indeps, conns = setup_indeps(isplit, mask.shape[1], 'indeps', 'comp') model.add_subsystem('indeps', indeps) comp = model.add_subsystem( 'comp', SparseCompExplicit(sparsity, method, isplit=isplit, osplit=2)) model.connect('indeps.x0', 'comp.x0') model.connect('indeps.x1', 'comp.x1') model.comp.add_objective('y0', index=1) model.comp.add_constraint('y1', lower=[1., 2.]) model.add_design_var('indeps.x0', indices=[0, 2], lower=np.ones(2), upper=np.ones(2) + .1) model.add_design_var('indeps.x1', lower=np.ones(2), upper=np.ones(2) + .1) model.approx_totals(method=method) prob.setup(check=False, mode='fwd') prob.set_solver_print(level=0) prob.run_driver() # need this to trigger the dynamic coloring prob.driver._total_jac = None start_nruns = model._nruns derivs = prob.driver._compute_totals() # colored self.assertEqual(model._nruns - start_nruns, 2) cols = [0, 2, 3, 4] rows = [1, 3, 4] _check_total_matrix(model, derivs, sparsity[rows, :][:, cols], method)
def test_report_generation_basic_pyoptsparse(self): # Just to try a different driver setup_default_reports() prob = self.setup_and_run_simple_problem(driver=pyOptSparseDriver( optimizer='SLSQP')) # get the path to the problem subdirectory problem_reports_dir = pathlib.Path(_reports_dir).joinpath(prob._name) path = pathlib.Path(problem_reports_dir).joinpath(self.n2_filename) self.assertTrue(path.is_file(), f'The N2 report file, {str(path)} was not found') path = pathlib.Path(problem_reports_dir).joinpath( self.scaling_filename) self.assertTrue( path.is_file(), f'The scaling report file, {str(path)}, was not found')
def test_reading_driver_recording_with_system_vars(self): self.setup_sellar_grouped_model() self.prob.driver = pyOptSparseDriver() self.prob.driver.options['optimizer'] = OPTIMIZER if OPTIMIZER == 'SLSQP': self.prob.driver.opt_settings['ACC'] = 1e-9 self.prob.driver.add_recorder(self.recorder) driver = self.prob.driver driver.recording_options['record_desvars'] = True driver.recording_options['record_responses'] = True driver.recording_options['record_objectives'] = True driver.recording_options['record_constraints'] = True driver.recording_options['includes'] = [ 'mda.d2.y2', ] self.prob.driver.options['optimizer'] = OPTIMIZER if OPTIMIZER == 'SLSQP': self.prob.driver.opt_settings['ACC'] = 1e-9 self.prob.setup(check=False) self.prob.run_driver() self.prob.cleanup() cr = CaseReader(self.filename) # Test values from one case, the last case last_case = cr.driver_cases.get_case(-1) np.testing.assert_almost_equal(last_case.desvars['z'], self.prob['pz.z'], err_msg='Case reader gives ' 'incorrect Parameter value' ' for {0}'.format('pz.z')) np.testing.assert_almost_equal(last_case.desvars['x'], self.prob['px.x'], err_msg='Case reader gives ' 'incorrect Parameter value' ' for {0}'.format('px.x')) np.testing.assert_almost_equal(last_case.sysincludes['y2'], self.prob['mda.d2.y2'], err_msg='Case reader gives ' 'incorrect Parameter value' ' for {0}'.format('mda.d2.y2'))
def test_simple_totals(self, method): prob = Problem(coloring_dir=self.tempdir) model = prob.model = CounterGroup() prob.driver = pyOptSparseDriver(optimizer='SLSQP') prob.driver.declare_coloring() mask = np.array([[1, 0, 0, 1, 1], [0, 1, 0, 1, 1], [0, 1, 0, 1, 1], [1, 0, 0, 0, 0], [0, 1, 1, 0, 0]]) isplit = 2 sparsity = setup_sparsity(mask) indeps, conns = setup_indeps(isplit, mask.shape[1], 'indeps', 'comp') model.add_subsystem('indeps', indeps) comp = model.add_subsystem( 'comp', SparseCompExplicit(sparsity, method, isplit=isplit, osplit=2)) model.connect('indeps.x0', 'comp.x0') model.connect('indeps.x1', 'comp.x1') model.declare_coloring('*', method=method, step=1e-6 if method == 'fd' else None) model.comp.add_objective( 'y0', index=0 ) # pyoptsparse SLSQP requires a scalar objective, so pick index 0 model.comp.add_constraint('y1', lower=[1., 2.]) model.add_design_var('indeps.x0', lower=np.ones(3), upper=np.ones(3) + .1) model.add_design_var('indeps.x1', lower=np.ones(2), upper=np.ones(2) + .1) model.approx_totals(method=method) prob.setup(check=False, mode='fwd') prob.set_solver_print(level=0) prob.run_driver() # need this to trigger the dynamic coloring prob.driver._total_jac = None start_nruns = model._nruns derivs = prob.compute_totals() _check_total_matrix(model, derivs, sparsity[[0, 3, 4], :], method) nruns = model._nruns - start_nruns self.assertEqual(nruns, 3)
def test_reading_system_metadata(self): if OPT is None: raise unittest.SkipTest("pyoptsparse is not installed") if OPTIMIZER is None: raise unittest.SkipTest( "pyoptsparse is not providing SNOPT or SLSQP") self.setup_sellar_grouped_scaled_model() self.prob.driver = pyOptSparseDriver() self.prob.driver.options['optimizer'] = OPTIMIZER if OPTIMIZER == 'SLSQP': self.prob.driver.opt_settings['ACC'] = 1e-9 self.recorder.options['record_inputs'] = True self.recorder.options['record_outputs'] = True self.recorder.options['record_residuals'] = True self.recorder.options['record_metadata'] = True self.prob.model.add_recorder(self.recorder) pz = self.prob.model.get_subsystem( 'pz') # IndepVarComp which is an ExplicitComponent pz.add_recorder(self.recorder) mda = self.prob.model.get_subsystem('mda') # Group d1 = mda.get_subsystem('d1') d1.add_recorder(self.recorder) self.prob.setup(check=False, mode='rev') self.prob.run_driver() self.prob.cleanup() cr = CaseReader(self.filename) self.assertEqual(sorted(cr.system_metadata.keys()), sorted(['root', 'mda.d1', 'pz'])) assert_rel_error( self, cr.system_metadata['pz']['output']['nonlinear']['phys'][0][1], [2.0, 2.0], 1.0e-3)
def test_root_derivs_dict(self): if OPT is None: raise unittest.SkipTest("pyoptsparse is not installed") if OPTIMIZER is None: raise unittest.SkipTest("pyoptsparse is not providing SNOPT or SLSQP") prob = Problem() prob.root = SellarDerivativesGrouped() prob.driver = pyOptSparseDriver() prob.driver.options['optimizer'] = 'SLSQP' prob.driver.opt_settings['ACC'] = 1e-9 prob.driver.options['print_results'] = False self.recorder.options['record_unknowns'] = True prob.driver.add_desvar('z', lower=np.array([-10.0, 0.0]), upper=np.array([10.0, 10.0])) prob.driver.add_desvar('x', lower=0.0, upper=10.0) prob.driver.add_objective('obj') prob.driver.add_constraint('con1', upper=0.0) prob.driver.add_constraint('con2', upper=0.0) prob.driver.add_recorder(self.recorder) self.recorder.options['record_metadata'] = False self.recorder.options['record_derivs'] = True prob.setup(check=False) prob.run() prob.cleanup() sout = open(self.filename) lines = sout.readlines() self.assertEqual(lines[12].rstrip(), 'Derivatives:') self.assertTrue(' con1 wrt x:' in lines[13]) self.assertTrue(' con1 wrt z:' in lines[14]) self.assertTrue(' con2 wrt x:' in lines[15]) self.assertTrue(' con2 wrt z:' in lines[16]) self.assertTrue(' obj wrt x:' in lines[17]) self.assertTrue(' obj wrt z:' in lines[18]) self.assertTrue('1.784' in lines[18])
def test_dido(self): prob = Problem(root=Group(), impl=impl, driver=pyOptSparseDriver()) # Total horizontal space of area to be enclosed. x = 100.0 # Number of segments used to enclose area. n = 50 # Horizonal size of each segment dx = x / n prob.root.add(name="ys_ivc", system=IndepVarComp("ys", val=np.zeros(n), units="m"), promotes=["ys"]) prob.root.add(name="rec_group", system=RectangleGroup(n, dx)) prob.root.add(name="total_area_comp", system=Summer(n), promotes=["total_area"]) prob.root.add(name="perimeter_comp", system=PerimeterComp(n, dx), promotes=["ys", "total_perimeter"]) for i in range(n): prob.root.connect("ys", "rec_group.section_{0}.y".format(i), src_indices=[i]) prob.root.connect("rec_group.section_{0}.area".format(i), "total_area_comp.area_{0}".format(i)) idxs = range(n)[1:-1] prob.driver.options["optimizer"] = OPTIMIZER prob.driver.options["print_results"] = False prob.driver.add_desvar("ys", lower=np.zeros(n - 2), indices=idxs) prob.driver.add_constraint("total_perimeter", upper=150) prob.driver.add_objective("total_area", scaler=-1.0e-3) prob.setup(check=False) prob.run() data = prob.check_total_derivatives(out_stream=None) for key, val in data.items(): assert_rel_error(self, val["abs error"][0], 0.0, 1e-5) assert_rel_error(self, val["abs error"][1], 0.0, 1e-5) assert_rel_error(self, val["abs error"][2], 0.0, 1e-5) assert_rel_error(self, val["rel error"][0], 0.0, 1e-5) assert_rel_error(self, val["rel error"][1], 0.0, 1e-5) assert_rel_error(self, val["rel error"][2], 0.0, 1e-5) assert_rel_error(self, 3574.94, prob["total_area"], 0.1)
def test_root_derivs_dict(self): if OPT is None: raise unittest.SkipTest("pyoptsparse is not installed") if OPTIMIZER is None: raise unittest.SkipTest("pyoptsparse is not providing SNOPT or SLSQP") prob = Problem() prob.root = SellarDerivativesGrouped() prob.driver = pyOptSparseDriver() prob.driver.options["optimizer"] = "SLSQP" prob.driver.opt_settings["ACC"] = 1e-9 prob.driver.options["print_results"] = False self.recorder.options["record_unknowns"] = True prob.driver.add_desvar("z", lower=np.array([-10.0, 0.0]), upper=np.array([10.0, 10.0])) prob.driver.add_desvar("x", lower=0.0, upper=10.0) prob.driver.add_objective("obj") prob.driver.add_constraint("con1", upper=0.0) prob.driver.add_constraint("con2", upper=0.0) prob.driver.add_recorder(self.recorder) self.recorder.options["record_metadata"] = False self.recorder.options["record_derivs"] = True prob.setup(check=False) prob.run() prob.cleanup() sout = open(self.filename) lines = sout.readlines() self.assertEqual(lines[12].rstrip(), "Derivatives:") self.assertTrue(" con1 wrt x:" in lines[13]) self.assertTrue(" con1 wrt z:" in lines[14]) self.assertTrue(" con2 wrt x:" in lines[15]) self.assertTrue(" con2 wrt z:" in lines[16]) self.assertTrue(" obj wrt x:" in lines[17]) self.assertTrue(" obj wrt z:" in lines[18]) self.assertTrue("1.784" in lines[18])
def test_analysis_error_sensfunc(self): # Component raises an analysis error during some linearize calls, and # pyopt attempts to recover. prob = Problem() root = prob.root = Group() root.add('p1', IndepVarComp('x', 50.0), promotes=['*']) root.add('p2', IndepVarComp('y', 50.0), promotes=['*']) root.add('comp', ParaboloidAE(), promotes=['*']) root.add('con', ExecComp('c = - x + y'), promotes=['*']) prob.driver = pyOptSparseDriver() prob.driver.options['optimizer'] = OPTIMIZER if OPTIMIZER == 'SLSQP': prob.driver.opt_settings['ACC'] = 1e-9 prob.driver.options['print_results'] = False prob.driver.add_desvar('x', lower=-50.0, upper=50.0) prob.driver.add_desvar('y', lower=-50.0, upper=50.0) prob.driver.add_objective('f_xy') prob.driver.add_constraint('c', upper=-15.0) prob.root.comp.grad_fail_at = 2 prob.root.comp.eval_fail_at = 100 prob.setup(check=False) prob.run() # SLSQP does a bad job recovering from gradient failures if OPTIMIZER == 'SLSQP': tol = 1e-2 else: tol = 1e-6 # Minimum should be at (7.166667, -7.833334) assert_rel_error(self, prob['x'], 7.16667, tol) assert_rel_error(self, prob['y'], -7.833334, tol) # Normally it takes 9 iterations, but takes 12 here because of the # gradfunc failures. (note SLSQP just doesn't do well) if OPTIMIZER == 'SNOPT': self.assertEqual(prob.driver.iter_count, 12)
def test_fan_out(self): prob = Problem() root = prob.root = Group() root.add('p1', IndepVarComp('x', 1.0)) root.add('p2', IndepVarComp('x', 1.0)) root.add('comp1', ExecComp('y = 3.0*x')) root.add('comp2', ExecComp('y = 5.0*x')) root.add('obj', ExecComp('o = i1 + i2')) root.add('con1', ExecComp('c = 15.0 - x')) root.add('con2', ExecComp('c = 15.0 - x')) # hook up explicitly root.connect('p1.x', 'comp1.x') root.connect('p2.x', 'comp2.x') root.connect('comp1.y', 'obj.i1') root.connect('comp2.y', 'obj.i2') root.connect('comp1.y', 'con1.x') root.connect('comp2.y', 'con2.x') prob.driver = pyOptSparseDriver() prob.driver.options['optimizer'] = OPTIMIZER prob.driver.options['print_results'] = False prob.driver.add_desvar('p1.x', lower=-50.0, upper=50.0) prob.driver.add_desvar('p2.x', lower=-50.0, upper=50.0) prob.driver.add_objective('obj.o') prob.driver.add_constraint('con1.c', equals=0.0) prob.driver.add_constraint('con2.c', equals=0.0) prob.setup(check=False) prob.run() obj = prob['obj.o'] assert_rel_error(self, obj, 30.0, 1e-6) # Verify that pyOpt has the correct wrt names con1 = prob.driver.pyopt_solution.constraints['con1.c'] self.assertEqual(con1.wrt, ['p1.x']) con2 = prob.driver.pyopt_solution.constraints['con2.c'] self.assertEqual(con2.wrt, ['p2.x'])
def test_fan_out(self): prob = Problem() root = prob.root = Group() root.add('p1', IndepVarComp('x', 1.0)) root.add('p2', IndepVarComp('x', 1.0)) root.add('comp1', ExecComp('y = 3.0*x')) root.add('comp2', ExecComp('y = 5.0*x')) root.add('obj', ExecComp('o = i1 + i2')) root.add('con1', ExecComp('c = 15.0 - x')) root.add('con2', ExecComp('c = 15.0 - x')) # hook up explicitly root.connect('p1.x', 'comp1.x') root.connect('p2.x', 'comp2.x') root.connect('comp1.y', 'obj.i1') root.connect('comp2.y', 'obj.i2') root.connect('comp1.y', 'con1.x') root.connect('comp2.y', 'con2.x') prob.driver = pyOptSparseDriver() prob.driver.options['optimizer'] = OPTIMIZER prob.driver.options['print_results'] = False prob.driver.add_desvar('p1.x', lower=-50.0, upper=50.0) prob.driver.add_desvar('p2.x', lower=-50.0, upper=50.0) prob.driver.add_objective('obj.o') prob.driver.add_constraint('con1.c', equals=0.0) prob.driver.add_constraint('con2.c', equals=0.0) prob.setup(check=False) prob.run() obj = prob['obj.o'] assert_rel_error(self, obj, 30.0, 1e-6) # Verify that pyOpt has the correct wrt names con1 = prob.driver.pyopt_solution.constraints['con1.c'] self.assertEqual(con1.wrt, ['p1.x']) con2 = prob.driver.pyopt_solution.constraints['con2.c'] self.assertEqual(con2.wrt, ['p2.x'])
def test_basic(self): prob = Problem() model = prob.model = SellarDerivativesGrouped() prob.driver = pyOptSparseDriver() prob.driver.options['optimizer'] = "SLSQP" model.add_design_var('z', lower=np.array([-10.0, 0.0]), upper=np.array([10.0, 10.0])) model.add_design_var('x', lower=0.0, upper=10.0) model.add_objective('obj') model.add_constraint('con1', upper=0.0) model.add_constraint('con2', upper=0.0) prob.set_solver_print(level=0) prob.setup(check=False, mode='rev') prob.run_driver() assert_rel_error(self, prob['z'][0], 1.9776, 1e-3)
def test_fan_out(self): prob = Problem() root = prob.root = Group() root.add('p1', ParamComp('x', 1.0)) root.add('p2', ParamComp('x', 1.0)) root.add('comp1', ExecComp('y = 3.0*x')) root.add('comp2', ExecComp('y = 5.0*x')) root.add('obj', ExecComp('o = i1 + i2')) root.add('con1', ExecComp('c = 15.0 - x')) root.add('con2', ExecComp('c = 15.0 - x')) # hook up non explicitly root.connect('p1.x', 'comp1.x') root.connect('p2.x', 'comp2.x') root.connect('comp1.y', 'obj.i1') root.connect('comp2.y', 'obj.i2') root.connect('comp1.y', 'con1.x') root.connect('comp2.y', 'con2.x') prob.driver = pyOptSparseDriver() prob.driver.add_param('p1.x', low=-50.0, high=50.0) prob.driver.add_param('p2.x', low=-50.0, high=50.0) prob.driver.add_objective('obj.o') prob.driver.add_constraint('con1.c', ctype='eq') prob.driver.add_constraint('con2.c', ctype='eq') prob.setup(check=False) prob.run() obj = prob['obj.o'] assert_rel_error(self, obj, 30.0, 1e-6) # Verify that pyOpt has the correct wrt names con1 = prob.driver.pyopt_solution.constraints['con1.c'] self.assertEqual(con1.wrt, ['p1.x']) con2 = prob.driver.pyopt_solution.constraints['con2.c'] self.assertEqual(con2.wrt, ['p2.x'])
def test_analysis_error_objfunc(self): # Component raises an analysis error during some runs, and pyopt # attempts to recover. prob = Problem() model = prob.model = Group() model.add_subsystem('p1', IndepVarComp('x', 50.0), promotes=['*']) model.add_subsystem('p2', IndepVarComp('y', 50.0), promotes=['*']) model.add_subsystem('comp', ParaboloidAE(), promotes=['*']) model.add_subsystem('con', ExecComp('c = - x + y'), promotes=['*']) prob.driver = pyOptSparseDriver() prob.driver.options['optimizer'] = OPTIMIZER if OPTIMIZER == 'SLSQP': prob.driver.opt_settings['ACC'] = 1e-9 prob.driver.options['print_results'] = False model.add_design_var('x', lower=-50.0, upper=50.0) model.add_design_var('y', lower=-50.0, upper=50.0) model.add_objective('f_xy') model.add_constraint('c', upper=-15.0) prob.setup(check=False) prob.run_driver() # Minimum should be at (7.166667, -7.833334) assert_rel_error(self, prob['x'], 7.16667, 1e-6) assert_rel_error(self, prob['y'], -7.833334, 1e-6) # Normally it takes 9 iterations, but takes 13 here because of the # analysis failures. (note SLSQP takes 5 instead of 4) if OPTIMIZER == 'SLSQP': self.assertEqual(prob.driver.iter_count, 7) else: self.assertEqual(prob.driver.iter_count, 15)
def test_fan_out(self): prob = Problem() root = prob.root = Group() root.add('p1', ParamComp('x', 1.0)) root.add('p2', ParamComp('x', 1.0)) root.add('comp1', ExecComp('y = 3.0*x')) root.add('comp2', ExecComp('y = 5.0*x')) root.add('obj', ExecComp('o = i1 + i2')) root.add('con1', ExecComp('c = 15.0 - x')) root.add('con2', ExecComp('c = 15.0 - x')) # hook up non explicitly root.connect('p1.x', 'comp1.x') root.connect('p2.x', 'comp2.x') root.connect('comp1.y', 'obj.i1') root.connect('comp2.y', 'obj.i2') root.connect('comp1.y', 'con1.x') root.connect('comp2.y', 'con2.x') prob.driver = pyOptSparseDriver() prob.driver.add_param('p1.x', low=-50.0, high=50.0) prob.driver.add_param('p2.x', low=-50.0, high=50.0) prob.driver.add_objective('obj.o') prob.driver.add_constraint('con1.c', ctype='eq') prob.driver.add_constraint('con2.c', ctype='eq') prob.setup(check=False) prob.run() obj = prob['obj.o'] assert_rel_error(self, obj, 30.0, 1e-6) # Verify that pyOpt has the correct wrt names con1 = prob.driver.pyopt_solution.constraints['con1.c'] self.assertEqual(con1.wrt, ['p1.x']) con2 = prob.driver.pyopt_solution.constraints['con2.c'] self.assertEqual(con2.wrt, ['p2.x'])
def test_run(self): nProblems = 4 top = Problem(impl=impl) top.root = SellarDerivativesSuperGroup(nProblems=nProblems) top.driver = ScipyOptimizer() top.driver = pyOptSparseDriver() if OPTIMIZER == 'SNOPT': top.driver.options['optimizer'] = 'SNOPT' top.driver.opt_settings['Verify level'] = 0 top.driver.opt_settings['Print file'] = 'SNOPT_print_petsctest.out' top.driver.opt_settings[ 'Summary file'] = 'SNOPT_summary_petsctest.out' top.driver.opt_settings['Major iterations limit'] = 1000 else: top.driver.options['optimizer'] = 'SLSQP' top.driver.add_desvar('z', lower=np.array([-10.0, 0.0]), upper=np.array([10.0, 10.0])) top.driver.add_desvar('x', lower=0.0, upper=10.0) top.driver.add_objective('obj') top.driver.add_constraint('con1', upper=0.0) top.driver.add_constraint('con2', upper=0.0) top.root.ln_solver.options['single_voi_relevance_reduction'] = True top.setup(check=False) # Setting initial values for design variables top['x'] = 1.0 top['z'] = np.array([5.0, 2.0]) top.run() if top.root.comm.rank == 0: assert_rel_error(self, top['z'][0], 1.977639, 1.0e-6) assert_rel_error(self, top['z'][1], 0.0, 1.0e-6) assert_rel_error(self, top['x'], 0.0, 1.0e-6)
def test_simple_paraboloid_upper_indices(self): prob = Problem() model = prob.model = Group() size = 3 model.add_subsystem('p1', IndepVarComp('x', np.array([50.0]*size))) model.add_subsystem('p2', IndepVarComp('y', np.array([50.0]*size))) model.add_subsystem('comp', ExecComp('f_xy = (x-3.0)**2 + x*y + (y+4.0)**2 - 3.0', x=np.zeros(size), y=np.zeros(size), f_xy=np.zeros(size))) model.add_subsystem('con', ExecComp('c = - x + y', c=np.zeros(size), x=np.zeros(size), y=np.zeros(size))) model.connect('p1.x', 'comp.x') model.connect('p2.y', 'comp.y') model.connect('p1.x', 'con.x') model.connect('p2.y', 'con.y') prob.set_solver_print(level=0) prob.driver = pyOptSparseDriver() prob.driver.options['optimizer'] = OPTIMIZER if OPTIMIZER == 'SLSQP': prob.driver.opt_settings['ACC'] = 1e-9 prob.driver.options['print_results'] = False model.add_design_var('p1.x', indices=[1], lower=-50.0, upper=50.0) model.add_design_var('p2.y', indices=[1], lower=-50.0, upper=50.0) model.add_objective('comp.f_xy', index=1) model.add_constraint('con.c', indices=[1], upper=-15.0) prob.setup(check=False) prob.run_driver() # Minimum should be at (7.166667, -7.833334) assert_rel_error(self, prob['p1.x'], np.array([50., 7.16667, 50.]), 1e-6) assert_rel_error(self, prob['p2.y'], np.array([50., -7.833334, 50.]), 1e-6)
def test_raised_error_sensfunc(self): # Component fails hard this time during gradient eval, so we expect # pyoptsparse to raise. prob = Problem() model = prob.model = Group() model.add_subsystem('p1', IndepVarComp('x', 50.0), promotes=['*']) model.add_subsystem('p2', IndepVarComp('y', 50.0), promotes=['*']) comp = model.add_subsystem('comp', ParaboloidAE(), promotes=['*']) model.add_subsystem('con', ExecComp('c = - x + y'), promotes=['*']) prob.driver = pyOptSparseDriver() # SNOPT has a weird cleanup problem when this fails, so we use SLSQP. For the # regular failure, it doesn't matter which opt we choose since they all fail through. prob.driver.options['optimizer'] = 'SLSQP' prob.driver.opt_settings['ACC'] = 1e-9 prob.driver.options['print_results'] = False model.add_design_var('x', lower=-50.0, upper=50.0) model.add_design_var('y', lower=-50.0, upper=50.0) model.add_objective('f_xy') model.add_constraint('c', upper=-15.0) comp.fail_hard = True comp.grad_fail_at = 2 comp.eval_fail_at = 100 prob.setup(check=False) with self.assertRaises(Exception) as err: prob.run_driver() # pyopt's failure message differs by platform and is not informative anyway del prob
def test_reading_driver_recording_with_system_vars(self): prob = SellarProblem(SellarDerivativesGrouped) driver = prob.driver = pyOptSparseDriver(optimizer='SLSQP') driver.options['print_results'] = False driver.opt_settings['ACC'] = 1e-9 driver.recording_options['record_desvars'] = True driver.recording_options['record_responses'] = True driver.recording_options['record_objectives'] = True driver.recording_options['record_constraints'] = True driver.recording_options['includes'] = [ 'mda.d2.y2', ] driver.add_recorder(self.recorder) prob.setup() prob.run_driver() prob.cleanup() cr = CaseReader(self.filename) # Test values from one case, the last case last_case = cr.driver_cases.get_case(-1) np.testing.assert_almost_equal(last_case.outputs['z'], prob['pz.z'], err_msg='Case reader gives ' 'incorrect Parameter value' ' for {0}'.format('pz.z')) np.testing.assert_almost_equal(last_case.outputs['x'], prob['px.x'], err_msg='Case reader gives ' 'incorrect Parameter value' ' for {0}'.format('px.x')) np.testing.assert_almost_equal(last_case.outputs['y2'], prob['mda.d2.y2'], err_msg='Case reader gives ' 'incorrect Parameter value' ' for {0}'.format('mda.d2.y2'))
def test_pbo_desvar_nsga2(self): if pyOptSparseDriver is None: raise unittest.SkipTest("pyOptSparse not installed") top = Problem() root = top.root = Group() root.add('p1', IndepVarComp('x', u'var_x', pass_by_obj=True)) root.add('p2', IndepVarComp('y', -4.0)) root.add('p', PassByObjParaboloid()) root.connect('p1.x', 'p.x') root.connect('p2.y', 'p.y') top.driver = pyOptSparseDriver() top.driver.options['optimizer'] = 'NSGA2' top.driver.add_desvar('p1.x') top.driver.add_desvar('p2.y') top.driver.add_objective('p.f_xy') top.setup(check=False)
def test_sellar_mdf(self): prob = Problem() model = prob.model = SellarDerivativesGrouped() prob.driver = pyOptSparseDriver() prob.driver.options['optimizer'] = OPTIMIZER if OPTIMIZER == 'SNOPT': prob.driver.opt_settings['Verify level'] = 3 prob.driver.options['print_results'] = False model.add_design_var('z', lower=np.array([-10.0, 0.0]), upper=np.array([10.0, 10.0])) model.add_design_var('x', lower=0.0, upper=10.0) model.add_objective('obj') model.add_constraint('con1', upper=0.0) model.add_constraint('con2', upper=0.0) prob.setup(check=False, mode='rev') prob.run_driver() assert_rel_error(self, prob['z'][0], 1.9776, 1e-3) assert_rel_error(self, prob['z'][1], 0.0, 1e-3) assert_rel_error(self, prob['x'], 0.0, 1e-3)
def test_simple_paraboloid_equality(self): prob = Problem() root = prob.root = Group() root.add('p1', ParamComp('x', 50.0), promotes=['*']) root.add('p2', ParamComp('y', 50.0), promotes=['*']) root.add('comp', Paraboloid(), promotes=['*']) root.add('con', ExecComp('c = 15.0 - x + y'), promotes=['*']) prob.driver = pyOptSparseDriver() prob.driver.add_param('x', low=-50.0, high=50.0) prob.driver.add_param('y', low=-50.0, high=50.0) prob.driver.add_objective('f_xy') prob.driver.add_constraint('c', ctype='ineq') prob.setup(check=False) prob.run() # Minimum should be at (7.166667, -7.833334) assert_rel_error(self, prob['x'], 7.16667, 1e-6) assert_rel_error(self, prob['y'], -7.833334, 1e-6)
def test_simple_paraboloid_double_sided_high(self): prob = Problem() root = prob.root = Group() root.add('p1', IndepVarComp('x', 50.0), promotes=['*']) root.add('p2', IndepVarComp('y', 50.0), promotes=['*']) root.add('comp', Paraboloid(), promotes=['*']) root.add('con', ExecComp('c = x - y'), promotes=['*']) prob.driver = pyOptSparseDriver() prob.driver.options['optimizer'] = OPTIMIZER prob.driver.options['print_results'] = False prob.driver.add_desvar('x', lower=-50.0, upper=50.0) prob.driver.add_desvar('y', lower=-50.0, upper=50.0) prob.driver.add_objective('f_xy') prob.driver.add_constraint('c', lower=10.0, upper=11.0) prob.setup(check=False) prob.run() # Minimum should be at (7.166667, -7.833334) assert_rel_error(self, prob['x'] - prob['y'], 11.0, 1e-6)