Esempio n. 1
0
    def test_basic_CONMIN(self):

        try:
            from pyopt_driver.pyopt_driver import pyOptDriver
        except ImportError:
            raise SkipTest("this test requires pyOpt to be installed")

        self.top = OptimizationConstrained()
        set_as_top(self.top)

        try:
            self.top.driver.optimizer = 'CONMIN'
            self.top.driver.optimizer = 'COBYLA'
        except ValueError:
            raise SkipTest("CONMIN not present on this system")

        self.top.driver.title = 'Little Test'
        optdict = {}
        self.top.driver.options = optdict
        self.top.driver.pyopt_diff = True

        self.top.run()

        assert_rel_error(self, self.top.paraboloid.x, 7.175775, 0.01)
        assert_rel_error(self, self.top.paraboloid.y, -7.824225, 0.01)
 def test_in_assembly(self):
     asm = self._get_assembly()
     self.assertEqual(set(asm.list_connections()), 
                      set([('metamodel.d', 'comp2.b'), ('metamodel.c', 'comp2.a'), 
                           ('comp1.c', 'metamodel.a'), ('comp1.d', 'metamodel.b')]))
     
     # do some training
     data = [1,2,3,4]        
     
     for a,b in zip(data[:-1],data[1:]):
         asm.comp1.a = a
         asm.comp1.b = b
         asm.metamodel.train_next = 1
         asm.run()
         
     # now run and get some results
     asm.comp1.a = 1.
     asm.comp1.b = 2.
     
     asm.run()
     
     assert_rel_error(self, asm.comp2.c, 6, 0.02)
     assert_rel_error(self, asm.comp2.d, -2, 0.02)
     
     # set new model and verify disconnect
     asm.metamodel.model = Simple2()
     self.assertEqual(asm.list_connections(), [])
 def test_parameter_groups(self):
     
     self.top = set_as_top(Assembly())
 
     exp1 = ['y = 2.0*x']
     deriv1 = ['dy_dx = 2.0']
     self.top.add('driver', Driv())
 
     self.top.add('comp1', ExecCompWithDerivatives(exp1, deriv1))
     self.top.add('comp2', ExecCompWithDerivatives(exp1, deriv1))
         
     self.top.driver.workflow.add(['comp1', 'comp2'])
     
     # Top driver setup
     self.top.driver.differentiator = Analytic()
     obj = 'comp1.y+comp2.y'
     self.top.driver.add_parameter(['comp1.x', 'comp2.x'], low=-100., high=100., fd_step=.001)
     self.top.driver.add_objective(obj)
 
     self.top.comp1.x1 = 1.0
     self.top.comp2.x2 = 1.0
     self.top.run()
     self.top.driver.differentiator.calc_gradient()
     
     grad = self.top.driver.differentiator.get_gradient(obj)
     assert_rel_error(self, grad[0], 4.0, .001)
Esempio n. 4
0
    def test_opt1(self):

        self.top.driver.add_objective('comp.result')
        
        self.top.driver.add_parameter('comp.x[0]', 0.0, 100.0,
                                      fd_step = .00001)
        self.top.driver.add_parameter('comp.x[1]', 0.0, 100.0,
                                      fd_step = .00001)

        map(self.top.driver.add_constraint,[
            '2.0 * comp.x[0] - comp.x[1] - 1.0 > 0.0',
            'comp.x[0] - 2.0 * comp.x[1] + 1.0 > 0.0',
            '- comp.x[0]**2 + 2.0 * ( comp.x[0] + comp.x[1]) - 1.0 > 0.0'
            ])

        self.top.run()

        assert_rel_error(self,
                         self.top.driver.eval_objective(),
                         self.top.comp.opt_objective, 
                         0.005)
        self.assertAlmostEqual(self.top.comp.opt_design_vars[0], 
                               self.top.comp.x[0], places=2)
        self.assertAlmostEqual(self.top.comp.opt_design_vars[1], 
                               self.top.comp.x[1], places=2)
Esempio n. 5
0
    def test_sellar_Newton_parallel(self):

        top = set_as_top(SellarMDFwithDerivs())
        top.replace('driver', NewtonSolver())

        top.driver.add_parameter('C2.y1', low=-1e99, high=1e99)
        top.driver.add_constraint('C1.y1 = C2.y1')
        top.driver.add_parameter('C1.y2', low=-1.e99, high=1.e99)
        top.driver.add_constraint('C2.y2 = C1.y2')

        expected = {'C1.y1': 3.1598617768014536, 'C2.y2': 3.7551999159927316}

        top.driver.iprint = 0
        top.driver.max_iteration = 20
        top.run()
        # print top.C1.y1, top.C2.y1
        # print top.C1.y2, top.C2.y2

        # gather the values back to the rank 0 process and compare to expected
        dist_answers = top._system.mpi.comm.gather(
            [(k[0], v) for k, v in top._system.vec['u'].items()], root=0)
        if self.comm.rank == 0:
            for answers in dist_answers:
                for name, val in answers:
                    if name in expected:
                        #print self.comm.rank, name, val[0]
                        assert_rel_error(self, val[0], expected[name], 0.001)
                        del expected[name]

            if expected:
                self.fail("not all expected values were found")
    def test_varTree_on_boundary_subassenbly(self):
        top = set_as_top(Assembly())
        top.add('comp', AssemblyWithBoundryVarTree())
        top.add('driver', SimpleDriver())
        top.driver.workflow.add('comp')
        top.driver.add_parameter('comp.ins.x1', low=-100, high=100)
        top.driver.add_objective('comp.y')

        top.run()

        # check for invalidation problems
        top.comp.ins.x1 = 123.4
        top.comp.run()
        self.assertEqual(top.comp.ins.x1, top.comp.comp1.ins.x1)

        #print top.comp.driver.workflow.calc_gradient(['ins.x1'], ['y'], mode='fd')
        inputs = ['comp.ins.x1',]
        outputs = ['comp.y']
        J_fd = top.driver.workflow.calc_gradient(inputs, outputs, mode='fd')
        top.driver.workflow.config_changed()
        J_forward = top.driver.workflow.calc_gradient(inputs, outputs, mode="forward")
        top.driver.workflow.config_changed()
        J_reverse = top.driver.workflow.calc_gradient(inputs, outputs, mode="adjoint")

        assert_rel_error(self, linalg.norm(J_fd - J_forward), 0, .00001)
        assert_rel_error(self, linalg.norm(J_fd - J_reverse), 0, .00001)
Esempio n. 7
0
    def test_chordProperties(self):
        comp = ChordProperties()
        comp.yN = np.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10],
                           dtype=np.float64)
        comp.c = np.array([
            0.2729, 1.3903, 1.1757, 1.0176, 0.8818, 0.7602, 0.6507, 0.5528,
            0.4666, 0.3925
        ])
        comp.d = np.array([
            0.0843, 0.0780, 0.0718, 0.0655, 0.0592, 0.0530, 0.0477, 0.0431,
            0.0384, 0.0338
        ])
        comp.GWing = 1
        comp.xtU = np.array([
            0.0500, 0.1500, 0.1500, 0.1500, 0.1500, 0.1500, 0.1500, 0.1500,
            0.1500, 0.1500
        ])

        comp.run()

        tol = 0.001
        expected_mChord = [
            0.018497, 0.167168, 0.133574, 0.110904, 0.092785, 0.077596,
            0.064695, 0.053795, 0.044734, 0.037342
        ]
        expected_xCGChord = [
            0.37481, 0.29252, 0.29447, 0.29634, 0.29838, 0.30073, 0.30350,
            0.30677, 0.31059, 0.31498
        ]
        for i, (e_mChord, e_xCGChord) in enumerate(
                zip(expected_mChord, expected_xCGChord)):
            assert_rel_error(self, comp.mChord[i], e_mChord, tol)
            assert_rel_error(self, comp.xCGChord[i], e_xCGChord, tol)
Esempio n. 8
0
    def test_splitterBPR(self): 
        comp = self.comp

        comp.BPR = 2.2285
        comp.MNexit1_des = 1.00
        comp.MNexit2_des = 1.00
        comp.design = True

        comp.Fl_I = self.fs

        comp.run()

        self.check(comp)

        #run off design
        comp.run()

        self.check(comp)

        #try changing something

        TOL = 0.001
        comp.Fl_I.W *= .95
        comp.run()
        assert_rel_error(self,comp.Fl_O1.Mach, .76922 ,TOL)
        assert_rel_error(self,comp.Fl_O2.Mach, .76922 ,TOL)
Esempio n. 9
0
    def test_splitterW(self): 
        comp = self.comp = set_as_top(splitter.SplitterW())

        comp.W1_des = 1.08
        comp.MNexit1_des = 1.00
        comp.MNexit2_des = 1.00
        comp.design = True

        comp.Fl_I = self.fs

        comp.run()

        self.check(comp)

        #run off design
        comp.run()

        self.check(comp)

        #try changing something

        TOL = 0.001
        comp.Fl_I.W *= .95
        comp.run()
        assert_rel_error(self,comp.Fl_O1.Mach, .76922 ,TOL)
        assert_rel_error(self,comp.Fl_O2.Mach, .76922 ,TOL)
Esempio n. 10
0
    def test_simple_array(self):

        model = set_as_top(Assembly())
        model.add('comp', SimpleCompArray())
        model.driver.workflow.add('comp')
        #model.driver.gradient_options.fd_form = 'complex_step'
        model.run()

        J = model.driver.workflow.calc_gradient(inputs=['comp.x'],
                                                outputs=['comp.y'])
        diff = abs(J - model.comp.J).max()
        assert_rel_error(self, diff, 0.0, .0001)
        self.assertTrue(J[0, 0] is not complex)

        model.add('driver', SimpleDriver())
        model.driver.add_parameter('comp.x', low=-10, high=10)
        model.driver.add_objective('comp.y - comp.x')
        model.run()

        model.driver.workflow.config_changed()
        J = model.driver.workflow.calc_gradient(mode='fd')

        diff = abs(J + eye(4) - model.comp.J).max()
        assert_rel_error(self, diff, 0.0, .0001)
        self.assertTrue(J[0, 0] is not complex)
    def test_SolverCO2scalar(self):
        sim = set_as_top(SolverCO2scalar())
        sim.x = 1.0
        sim.run()
        J = sim.driver.calc_gradient()

        assert_rel_error(self, J[0, 0], 2.0, .001)
Esempio n. 12
0
    def test_simple_float_subassy(self):

        model = set_as_top(Assembly())
        model.add('sub', Assembly())
        model.sub.add('subsub', Assembly())
        model.driver.workflow.add('sub')
        model.sub.driver.workflow.add('subsub')

        model.sub.subsub.add('comp', SimpleCompFloat())
        model.sub.subsub.driver.workflow.add('comp')
        model.sub.subsub.create_passthrough('comp.x')
        model.sub.subsub.create_passthrough('comp.y')
        model.sub.create_passthrough('subsub.x')
        model.sub.create_passthrough('subsub.y')

        model.driver.gradient_options.fd_form = 'complex_step'
        model.driver.gradient_options.force_fd = True
        model.run()

        J = model.driver.workflow.calc_gradient(inputs=['sub.x'],
                                                outputs=['sub.y'])

        assert_rel_error(self, J[0, 0], 2.0, .000001)
        self.assertTrue(model.sub.subsub.comp.x is not complex)
        self.assertTrue(model.sub.subsub.comp.y is not complex)
Esempio n. 13
0
    def test_Abasic_SNOPT_derivatives_linear_constraints(self):

        try:
            from pyoptsparse import Optimization
        except ImportError:
            raise SkipTest("this test requires pyoptsparse to be installed")

        self.top = OptimizationConstrainedDerivatives()
        set_as_top(self.top)

        try:
            self.top.driver.optimizer = 'SNOPT'
        except ValueError:
            raise SkipTest("SNOPT not present on this system")

        self.top.driver.title = 'Little Test with Gradient'
        optdict = {}
        self.top.driver.options = optdict

        self.top.driver.clear_constraints()
        self.top.driver.add_constraint('paraboloid.x-paraboloid.y >= 15.0')
        self.top.driver.add_constraint('paraboloid.x > 7.1', linear=True)

        self.top.run()

        assert_rel_error(self, self.top.paraboloid.x, 7.175775, 0.01)
        assert_rel_error(self, self.top.paraboloid.y, -7.824225, 0.01)
Esempio n. 14
0
    def test_2_drivers(self):
        print "*** test_2_drivers ***"
        self.rosen_setUp()
        drv = self.top.add('driver1a', CONMINdriver())
        self.top.add('comp1a', ExprComp(expr='x**2'))
        self.top.add('comp2a', ExprComp(expr='x-5.0*sqrt(x)'))
        self.top.connect('comp1a.f_x', 'comp2a.x')

        self.top.driver.workflow.add('driver1a')
        drv.workflow.add(['comp1a', 'comp2a'])

        drv.itmax = 40
        drv.add_objective('comp2a.f_x')
        drv.add_parameter('comp1a.x', low=0, high=99)

        self.top.run()

        self.assertAlmostEqual(self.opt_objective,
                               self.top.driver1.eval_objective(),
                               places=2)
        self.assertAlmostEqual(self.opt_design_vars[0],
                               self.top.comp1.x,
                               places=1)
        assert_rel_error(self, self.opt_design_vars[1], self.top.comp2.x, 0.01)
        assert_rel_error(self, self.opt_design_vars[2], self.top.comp3.x, 0.01)
        self.assertAlmostEqual(self.opt_design_vars[3],
                               self.top.comp4.x,
                               places=1)
        self.assertAlmostEqual(-6.2498054387439232,
                               self.top.driver1a.eval_objective(),
                               places=2)
        self.assertAlmostEqual(2.4860514783551508, self.top.comp1a.x, places=1)
Esempio n. 15
0
    def test_opt1_with_CONMIN_gradient_a(self):
        # Scalar parameters, array constraint, CONMIN gradient.
        # Note: all other tests use OpenMDAO gradient
        self.top.driver.add_objective('10*comp.result')
        self.top.driver.add_parameter('comp.x[0]', fd_step=.00001)
        self.top.driver.add_parameter('comp.x[1]', fd_step=.00001)
        self.top.driver.add_parameter('comp.x[2]', fd_step=.00001)
        self.top.driver.add_parameter('comp.x[3]', fd_step=.00001)

        self.top.driver.add_constraint('comp.g <= 0')

        self.top.driver.conmin_diff = True
        self.top.run()

        # pylint: disable-msg=E1101
        assert_rel_error(self, self.top.comp.opt_objective,
                         self.top.driver.eval_objective(), 0.01)
        assert_rel_error(self, 1 + self.top.comp.opt_design_vars[0],
                         1 + self.top.comp.x[0], 0.05)
        assert_rel_error(self, self.top.comp.opt_design_vars[1],
                         self.top.comp.x[1], 0.06)
        assert_rel_error(self, self.top.comp.opt_design_vars[2],
                         self.top.comp.x[2], 0.06)
        assert_rel_error(self, self.top.comp.opt_design_vars[3],
                         self.top.comp.x[3], 0.05)
    def test_vartree_subassy_behavior(self):

        class tree(VariableTree):
            x = Float(3.0)
            y = Float(4.0)
            z = Float(5.0)

        class MyComp(Component):
            ins = VarTree(tree(), iotype='in')
            outs = VarTree(tree(), iotype='out')

            def execute(self):
                self.outs = self.ins.copy()
                self.outs.z = 2.0*self.ins.x

        top = set_as_top(Assembly())
        top.add('sub', Assembly())
        top.driver.workflow.add('sub')
        top.sub.add('comp1', MyComp())
        top.sub.add('comp2', MyComp())
        top.sub.driver.workflow.add(['comp1', 'comp2'])
        top.sub.create_passthrough('comp1.ins')
        top.sub.create_passthrough('comp1.outs')
        top.sub.create_passthrough('comp2.outs', 'zzz')
        top.sub.connect('ins.z', 'comp2.ins.x')

        top.run()

        J = top.driver.workflow.calc_gradient(['sub.ins.x'], ['sub.outs.z'])
        assert_rel_error(self, J[0, 0], 2.0, .00001)

        top.driver.workflow.config_changed()
        J = top.driver.workflow.calc_gradient(['sub.ins.z'], ['sub.zzz.z'])
        print J
    def test_branch_output_in_opaque_system(self):

        # This test replicates a bug where an interior output was missing in
        # an opaque system.

        top = set_as_top(Assembly())
        top.add('nest', Assembly())
        top.nest.add('comp1', ExecComp(['y=7.0*x1']))
        top.nest.add('comp2', ExecComp(['y=5.0*x1 + 2.0*x2']))
        top.driver.workflow.add(['nest'])
        top.nest.driver.workflow.add(['comp1', 'comp2'])

        top.nest.add('x1', Float(3.0, iotype='in'))
        top.nest.add('y2', Float(3.0, iotype='out'))
        top.nest.connect('comp1.y', 'comp2.x2')
        top.nest.connect('x1', 'comp1.x1')
        top.nest.connect('x1', 'comp2.x1')
        top.nest.create_passthrough('comp1.y')
        top.nest.connect('comp2.y', 'y2')

        top.run()

        J = top.driver.calc_gradient(inputs=['nest.x1'],
                                     outputs=['nest.y', 'nest.y2'],
                                     mode='forward')

        assert_rel_error(self, J[0, 0], 7.0, .001)
        assert_rel_error(self, J[1, 0], 19.0, .001)
Esempio n. 18
0
 def verify(self, names, case, expected):
     for name, value in expected.items():
         i = names.index(name)
         if isinstance(value, float):
             assert_rel_error(self, case[i], value, 0.001)
         else:
             self.assertEqual(case[i], value)
Esempio n. 19
0
    def compare_derivatives(self, var_in, var_out):

        wflow = self.model.driver.workflow
        inputs = ['comp.%s' % v for v in var_in]
        outputs = ['comp.%s' % v for v in var_out]

        # Numeric
        self.model.driver.update_parameters()
        wflow.config_changed()
        Jn = wflow.calc_gradient(inputs=inputs, outputs=outputs, mode="fd")

        # Analytic forward
        self.model.driver.update_parameters()
        wflow.config_changed()
        Jf = wflow.calc_gradient(inputs=inputs, outputs=outputs)

        diff = abs(Jf - Jn)
        assert_rel_error(self, diff.max(), 0.0, 1e-5)

        # Analytic adjoint
        self.model.driver.update_parameters()
        wflow.config_changed()
        Ja = wflow.calc_gradient(inputs=inputs,
                                 outputs=outputs,
                                 mode='adjoint')

        diff = abs(Ja - Jn)
        assert_rel_error(self, diff.max(), 0.0, 1e-5)
    def test_optimization(self):
        model = set_as_top(OptimizationConstrained())
        model.driver.gradient_options.fd_form = 'complex_step'
        model.run()

        assert_rel_error(self, model.paraboloid.x, 7.175775, 0.01)
        assert_rel_error(self, model.paraboloid.y, -7.824225, 0.01)
 def test_2_drivers(self):
     print "*** test_2_drivers ***"
     self.rosen_setUp()
     drv = self.top.add('driver1a', CONMINdriver())
     self.top.add('comp1a', ExprComp(expr='x**2'))
     self.top.add('comp2a', ExprComp(expr='x-5.0*sqrt(x)'))
     self.top.connect('comp1a.f_x', 'comp2a.x')
     
     self.top.driver.workflow.add('driver1a')
     drv.workflow.add(['comp1a', 'comp2a'])
     
     drv.itmax = 40
     drv.add_objective('comp2a.f_x')
     drv.add_parameter('comp1a.x', low=0, high=99)
     
     self.top.run()
     
     self.assertAlmostEqual(self.opt_objective, 
                            self.top.driver1.eval_objective(), places=2)
     self.assertAlmostEqual(self.opt_design_vars[0], 
                            self.top.comp1.x, places=1)
     assert_rel_error(self, self.opt_design_vars[1], self.top.comp2.x, 0.01)
     assert_rel_error(self, self.opt_design_vars[2], self.top.comp3.x, 0.01)
     self.assertAlmostEqual(self.opt_design_vars[3], 
                            self.top.comp4.x, places=1)
     self.assertAlmostEqual(-6.2498054387439232, 
                            self.top.driver1a.eval_objective(), 
                            places=2)
     self.assertAlmostEqual(2.4860514783551508, 
                            self.top.comp1a.x, places=1)
Esempio n. 22
0
    def test_rel_error_inf_nan(self):

        try:
            assert_rel_error(self, float('nan'), 6.5, 0.0001)
        except AssertionError, exc:
            msg = "actual nan, desired 6.5, rel error nan, tolerance 0.0001"
            self.assertEqual(str(exc), msg)
    def test_scale_adder(self):

        opt_problem = OptimizationUnconstrainedScaleShift()
        opt_problem.run()

        assert_rel_error(self, opt_problem.paraboloid.x, 0.006667, 0.001)
        assert_rel_error(self, opt_problem.paraboloid.y, -1733.333313, 0.001)
    def test_opt1(self):
        # Run with scalar parameters, scalar constraints, and OpenMDAO gradient.
        self.top.driver.add_objective('10*comp.result')
        # pylint: disable-msg=C0301
        map(self.top.driver.add_parameter,
            ['comp.x[0]', 'comp.x[1]','comp.x[2]', 'comp.x[3]'])

        map(self.top.driver.add_constraint, [
            'comp.x[0]**2+comp.x[0]+comp.x[1]**2-comp.x[1]+comp.x[2]**2+comp.x[2]+comp.x[3]**2-comp.x[3] < 8',
            'comp.x[0]**2-comp.x[0]+2*comp.x[1]**2+comp.x[2]**2+2*comp.x[3]**2-comp.x[3] < 10',
            '2*comp.x[0]**2+2*comp.x[0]+comp.x[1]**2-comp.x[1]+comp.x[2]**2-comp.x[3] < 5'])
        self.top.driver.recorders = [ListCaseRecorder()]
        self.top.driver.printvars = ['comp.opt_objective']
        self.top.driver.iprint = 0
        self.top.run()

        # pylint: disable-msg=E1101
        assert_rel_error(self, self.top.comp.opt_objective,
                         self.top.driver.eval_objective(), 0.01)
        assert_rel_error(self, 1 + self.top.comp.opt_design_vars[0],
                         1 + self.top.comp.x[0], 0.05)
        assert_rel_error(self, self.top.comp.opt_design_vars[1],
                         self.top.comp.x[1], 0.06)
        assert_rel_error(self, self.top.comp.opt_design_vars[2],
                         self.top.comp.x[2], 0.06)
        assert_rel_error(self, self.top.comp.opt_design_vars[3],
                         self.top.comp.x[3], 0.05)

        cases = self.top.driver.recorders[0].get_iterator()
        end_case = cases[-1]

        self.assertEqual(self.top.comp.x[1],
                         end_case.get_input('comp.x[1]'))
        self.assertEqual(self.top.comp.opt_objective,
                         end_case.get_output('comp.opt_objective'))
    def test_gauss_seidel(self):

        self.top.run()

        assert_rel_error(self, self.top.d1.y1, self.top.d2.y1, 1.0e-4)
        assert_rel_error(self, self.top.d1.y2, self.top.d2.y2, 1.0e-4)
        self.assertTrue(self.top.d1.exec_count < 10)
    def test_opt1_with_CONMIN_gradient(self):
        # Note: all other tests use OpenMDAO gradient
        self.top.driver.add_objective('10*comp.result')
        self.top.driver.add_parameter('comp.x[0]', fd_step=.00001)
        self.top.driver.add_parameter('comp.x[1]', fd_step=.00001)
        self.top.driver.add_parameter('comp.x[2]', fd_step=.00001)
        self.top.driver.add_parameter('comp.x[3]', fd_step=.00001)

        # pylint: disable-msg=C0301
        map(self.top.driver.add_constraint, [
            'comp.x[0]**2+comp.x[0]+comp.x[1]**2-comp.x[1]+comp.x[2]**2+comp.x[2]+comp.x[3]**2-comp.x[3] < 8',
            'comp.x[0]**2-comp.x[0]+2*comp.x[1]**2+comp.x[2]**2+2*comp.x[3]**2-comp.x[3] < 10',
            '2*comp.x[0]**2+2*comp.x[0]+comp.x[1]**2-comp.x[1]+comp.x[2]**2-comp.x[3] < 5'])

        self.top.driver.conmin_diff = True
        self.top.run()

        # pylint: disable-msg=E1101
        assert_rel_error(self, self.top.comp.opt_objective,
                         self.top.driver.eval_objective(), 0.01)
        assert_rel_error(self, 1 + self.top.comp.opt_design_vars[0],
                         1 + self.top.comp.x[0], 0.06)
        assert_rel_error(self, self.top.comp.opt_design_vars[1],
                         self.top.comp.x[1], 0.06)
        assert_rel_error(self, self.top.comp.opt_design_vars[2],
                         self.top.comp.x[2], 0.06)
        assert_rel_error(self, self.top.comp.opt_design_vars[3],
                         self.top.comp.x[3], 0.05)
    def test_basic_CONMIN(self):

        try:
            from pyopt_driver.pyopt_driver import pyOptDriver
        except ImportError:
            raise SkipTest("this test requires pyOpt to be installed")

        self.top = OptimizationConstrained()
        set_as_top(self.top)

        try:
            self.top.driver.optimizer = 'CONMIN'
            self.top.driver.optimizer = 'COBYLA'
        except ValueError:
            raise SkipTest("CONMIN not present on this system")

        self.top.driver.title = 'Little Test'
        optdict = {}
        self.top.driver.options = optdict
        self.top.driver.pyopt_diff = True

        self.top.run()

        assert_rel_error(self, self.top.paraboloid.x, 7.175775, 0.01)
        assert_rel_error(self, self.top.paraboloid.y, -7.824225, 0.01)
    def test_opt1_with_CONMIN_gradient_a(self):
        # Scalar parameters, array constraint, CONMIN gradient.
        # Note: all other tests use OpenMDAO gradient
        self.top.driver.add_objective('10*comp.result')
        self.top.driver.add_parameter('comp.x[0]', fd_step=.00001)
        self.top.driver.add_parameter('comp.x[1]', fd_step=.00001)
        self.top.driver.add_parameter('comp.x[2]', fd_step=.00001)
        self.top.driver.add_parameter('comp.x[3]', fd_step=.00001)

        self.top.driver.add_constraint('comp.g <= 0')

        self.top.driver.conmin_diff = True
        self.top.run()

        # pylint: disable-msg=E1101
        assert_rel_error(self, self.top.comp.opt_objective,
                         self.top.driver.eval_objective(), 0.01)
        assert_rel_error(self, 1 + self.top.comp.opt_design_vars[0],
                         1 + self.top.comp.x[0], 0.05)
        assert_rel_error(self, self.top.comp.opt_design_vars[1],
                         self.top.comp.x[1], 0.06)
        assert_rel_error(self, self.top.comp.opt_design_vars[2],
                         self.top.comp.x[2], 0.06)
        assert_rel_error(self, self.top.comp.opt_design_vars[3],
                         self.top.comp.x[3], 0.05)
Esempio n. 29
0
    def test_gauss_seidel_sub(self):
        # Note, Fake Finite Difference is active in this test.

        self.top = set_as_top(Sellar_MDA_subbed())
        self.top.run()

        assert_rel_error(self, self.top.d1.y1,
                               self.top.d2.y1,
                               1.0e-4)
        assert_rel_error(self, self.top.d1.y2,
                               self.top.d2.y2,
                               1.0e-4)
        self.assertTrue(self.top.d1.exec_count < 10)

        inputs = ['d1.z1', 'd1.z2', 'd2.z1', 'd2.z2']
        outputs = ['d1.y1', 'd2.y2']
        self.top.driver.workflow.config_changed()
        J1 = self.top.driver.workflow.calc_gradient(inputs=inputs,
                                                   outputs=outputs)
        self.top.run()
        J2 = self.top.driver.workflow.calc_gradient(inputs=inputs,
                                                   outputs=outputs,
                                                   mode='fd')

        J = (J1 - J2)
        print J.max()
        self.assertTrue(J.max() < 1.0e-3)
    def test_simple_float(self):

        model = set_as_top(Assembly())
        model.add('comp', SimpleCompFloat())
        model.driver.workflow.add('comp')
        model.driver.gradient_options.fd_form = 'complex_step'

        model.run()

        J = model.driver.workflow.calc_gradient(inputs=['comp.x'],
                                                outputs=['comp.y'])

        assert_rel_error(self, J[0, 0], 2.0, .000001)
        self.assertTrue(model.comp.x is not complex)
        self.assertTrue(model.comp.y is not complex)

        # Make sure we can do whole workflows.
        model.add('comp2', SimpleCompFloat())
        model.driver.workflow.add('comp2')
        model.connect('comp.y', 'comp2.x')

        model.run()

        model.driver.workflow.config_changed()
        J = model.driver.workflow.calc_gradient(inputs=['comp.x'],
                                                outputs=['comp2.y'])

        assert_rel_error(self, J[0, 0], 4.0, .000001)
        self.assertTrue(model.comp.x is not complex)
        self.assertTrue(model.comp2.y is not complex)
Esempio n. 31
0
    def test_opt1_with_CONMIN_gradient(self):
        # Note: all other tests use OpenMDAO gradient
        self.top.driver.add_objective('10*comp.result')
        self.top.driver.add_parameter('comp.x[0]', fd_step=.00001)
        self.top.driver.add_parameter('comp.x[1]', fd_step=.00001)
        self.top.driver.add_parameter('comp.x[2]', fd_step=.00001)
        self.top.driver.add_parameter('comp.x[3]', fd_step=.00001)

        # pylint: disable-msg=C0301
        map(self.top.driver.add_constraint, [
            'comp.x[0]**2+comp.x[0]+comp.x[1]**2-comp.x[1]+comp.x[2]**2+comp.x[2]+comp.x[3]**2-comp.x[3] < 8',
            'comp.x[0]**2-comp.x[0]+2*comp.x[1]**2+comp.x[2]**2+2*comp.x[3]**2-comp.x[3] < 10',
            '2*comp.x[0]**2+2*comp.x[0]+comp.x[1]**2-comp.x[1]+comp.x[2]**2-comp.x[3] < 5'
        ])

        self.top.driver.conmin_diff = True
        self.top.run()

        # pylint: disable-msg=E1101
        assert_rel_error(self, self.top.comp.opt_objective,
                         self.top.driver.eval_objective(), 0.01)
        assert_rel_error(self, 1 + self.top.comp.opt_design_vars[0],
                         1 + self.top.comp.x[0], 0.06)
        assert_rel_error(self, self.top.comp.opt_design_vars[1],
                         self.top.comp.x[1], 0.06)
        assert_rel_error(self, self.top.comp.opt_design_vars[2],
                         self.top.comp.x[2], 0.06)
        assert_rel_error(self, self.top.comp.opt_design_vars[3],
                         self.top.comp.x[3], 0.05)
    def test_simple_float_subassy(self):

        model = set_as_top(Assembly())
        model.add('sub', Assembly())
        model.sub.add('subsub', Assembly())
        model.driver.workflow.add('sub')
        model.sub.driver.workflow.add('subsub')

        model.sub.subsub.add('comp', SimpleCompFloat())
        model.sub.subsub.driver.workflow.add('comp')
        model.sub.subsub.create_passthrough('comp.x')
        model.sub.subsub.create_passthrough('comp.y')
        model.sub.create_passthrough('subsub.x')
        model.sub.create_passthrough('subsub.y')

        model.driver.gradient_options.fd_form = 'complex_step'
        model.driver.gradient_options.force_fd = True
        model.run()

        J = model.driver.workflow.calc_gradient(inputs=['sub.x'],
                                                outputs=['sub.y'])

        assert_rel_error(self, J[0, 0], 2.0, .000001)
        self.assertTrue(model.sub.subsub.comp.x is not complex)
        self.assertTrue(model.sub.subsub.comp.y is not complex)
Esempio n. 33
0
    def test_conmin_gradient_s(self):
        # Run with 1D parameter, scalar constraints, and CONMIN gradient.
        # pylint: disable-msg=C0301
        map(self.top.driver.add_constraint, [
            'comp.x[0]**2+comp.x[0]+comp.x[1]**2-comp.x[1]+comp.x[2]**2+comp.x[2]+comp.x[3]**2-comp.x[3] < 8',
            'comp.x[0]**2-comp.x[0]+2*comp.x[1]**2+comp.x[2]**2+2*comp.x[3]**2-comp.x[3] < 10',
            '2*comp.x[0]**2+2*comp.x[0]+comp.x[1]**2-comp.x[1]+comp.x[2]**2-comp.x[3] < 5'
        ])

        self.top.driver.conmin_diff = True
        self.top.driver.fdch = .000001
        self.top.driver.fdchm = .000001
        self.top.run()

        # pylint: disable-msg=E1101
        assert_rel_error(self, self.top.comp.opt_objective,
                         self.top.driver.eval_objective(), 0.01)
        assert_rel_error(self, 1 + self.top.comp.opt_design_vars[0],
                         1 + self.top.comp.x[0], 0.05)
        assert_rel_error(self, self.top.comp.opt_design_vars[1],
                         self.top.comp.x[1], 0.06)
        assert_rel_error(self, self.top.comp.opt_design_vars[2],
                         self.top.comp.x[2], 0.06)
        assert_rel_error(self, self.top.comp.opt_design_vars[3],
                         self.top.comp.x[3], 0.05)
    def test_simple_array(self):

        model = set_as_top(Assembly())
        model.add('comp', SimpleCompArray())
        model.driver.workflow.add('comp')
        #model.driver.gradient_options.fd_form = 'complex_step'
        model.run()

        J = model.driver.workflow.calc_gradient(inputs=['comp.x'],
                                                outputs=['comp.y'])
        diff = abs(J - model.comp.J).max()
        assert_rel_error(self, diff, 0.0, .0001)
        self.assertTrue(J[0, 0] is not complex)

        model.add('driver', SimpleDriver())
        model.driver.add_parameter('comp.x', low=-10, high=10)
        model.driver.add_objective('comp.y - comp.x')
        model.run()

        model.driver.workflow.config_changed()
        J = model.driver.workflow.calc_gradient(mode='fd')

        diff = abs(J + eye(4) - model.comp.J).max()
        assert_rel_error(self, diff, 0.0, .0001)
        self.assertTrue(J[0, 0] is not complex)
Esempio n. 35
0
    def test_gauss_seidel(self):

        self.top.run()

        assert_rel_error(self, self.top.d1.y1, self.top.d2.y1, 1.0e-4)
        assert_rel_error(self, self.top.d1.y2, self.top.d2.y2, 1.0e-4)
        self.assertTrue(self.top.d1.exec_count < 10)
Esempio n. 36
0
    def test_splitterBPR(self):
        comp = self.comp

        comp.BPR = 2.2285
        comp.MNexit1_des = 1.00
        comp.MNexit2_des = 1.00
        comp.design = True

        comp.Fl_I = self.fs

        comp.run()

        self.check(comp)

        #run off design
        comp.run()

        self.check(comp)

        #try changing something

        TOL = 0.001
        comp.Fl_I.W *= .95
        comp.run()
        assert_rel_error(self, comp.Fl_O1.Mach, .76922, TOL)
        assert_rel_error(self, comp.Fl_O2.Mach, .76922, TOL)
    def test_rel_error_inf_nan(self):

        try:
            assert_rel_error(self, float('nan'), 6.5, 0.0001)
        except AssertionError, exc:
            msg = "actual nan, desired 6.5, rel error nan, tolerance 0.0001"
            self.assertEqual(str(exc), msg)
Esempio n. 38
0
    def test_splitterW(self):
        comp = self.comp = set_as_top(splitter.SplitterW())

        comp.W1_des = 1.08
        comp.MNexit1_des = 1.00
        comp.MNexit2_des = 1.00
        comp.design = True

        comp.Fl_I = self.fs

        comp.run()

        self.check(comp)

        #run off design
        comp.run()

        self.check(comp)

        #try changing something

        TOL = 0.001
        comp.Fl_I.W *= .95
        comp.run()
        assert_rel_error(self, comp.Fl_O1.Mach, .76922, TOL)
        assert_rel_error(self, comp.Fl_O2.Mach, .76922, TOL)
Esempio n. 39
0
    def test_sparProperties(self):
        comp = SparProperties()
        comp.yN = np.array([0, 14.5057])
        comp.d = np.array([
            0.1016,
        ])
        comp.theta = np.array([
            0.6109,
        ])
        comp.nTube = np.array([
            4,
        ])
        comp.nCap = np.array([0, 0])
        comp.lBiscuit = np.array([
            0.3048,
        ])
        #comp.CFRPType = 1
        comp.CFRPType = 'NCT301-1X HS40 G150 33 +/-2%RW'

        comp.run()

        tol = 0.0001
        assert_rel_error(self, comp.EIx[0], 23704.383, tol)
        assert_rel_error(self, comp.EIz[0], 23704.383, tol)
        assert_rel_error(self, comp.EA[0], 18167620.0, tol)
        assert_rel_error(self, comp.GJ[0], 2.2828e4, tol)
        assert_rel_error(self, comp.mSpar[0], 4.7244, tol)
Esempio n. 40
0
    def test_newton(self):

        self.top.driver.newton = True
        self.top.run()

        assert_rel_error(self, self.top.d1.y1, self.top.d2.y1, 1.0e-4)
        assert_rel_error(self, self.top.d1.y2, self.top.d2.y2, 1.0e-4)
Esempio n. 41
0
    def test_paramgroup(self):

        top = set_as_top(Assembly())
        top.add('comp1', GComp_noD())
        top.add('driver', SimpleDriver())
        top.driver.workflow.add(['comp1'])
        top.driver.gradient_options.directional_fd = True

        top.run()

        J = top.driver.calc_gradient(inputs=[('comp1.x1', 'comp1.x2')],
                                     outputs=['comp1.y1'],
                                     mode='forward')
        assert_rel_error(self, J[0, 0], 12.0, .001)

        J = top.driver.calc_gradient(inputs=[('comp1.x1', 'comp1.x2')],
                                     outputs=['comp1.y1'],
                                     mode='fd')
        assert_rel_error(self, J[0, 0], 12.0, .001)

        J = top.driver.calc_gradient(inputs=['comp1.x1', ('comp1.x2')],
                                     outputs=['comp1.y1'],
                                     mode='forward')
        assert_rel_error(self, J[0, 0], 5.0, .001)
        assert_rel_error(self, J[0, 1], 7.0, .001)

        J = top.driver.calc_gradient(inputs=[('comp1.x1', 'comp1.x2', 'comp1.x3')],
                                     outputs=['comp1.y1'],
                                     mode='forward')
        assert_rel_error(self, J[0, 0], 9.0, .001)
Esempio n. 42
0
    def test_turbine(self): 
    	    
    	self.turbine = set_as_top(turbine_prmap.TurbinePRmap())

        self.fs = flowstation.FlowStation()
        self.fs._species=[.9816,0,.018355,0,0,0,0]
        self.fs.W = 103.47
        self.fs.setTotalTP( 2371.94, 119.086 )
     
        self.b1 = flowstation.FlowStation()
        self.b1.W = 6.1386
        self.b1.setTotalTP( 1211.05, 126.017 )

        self.b2 = flowstation.FlowStation()
        self.b2.W = 3.9064
        self.b2.setTotalTP( 1211.05, 126.017 )
        
        turbine = self.turbine
        turbine.eff = .9133
        turbine.PR = 2.670
        turbine.Nmech = 8000.
        
        turbine.Fl_I = self.fs
        turbine.Fl_bld1 = self.b1
        turbine.Fl_bld2 = self.b2        
        turbine.run()
        TOL = .001

        assert_rel_error( self,turbine.Fl_O.W, 113.51, TOL )
        assert_rel_error( self,turbine.Fl_O.Pt, 44.60, TOL )
        assert_rel_error( self,turbine.Fl_O.Tt, 1850.6, TOL )
        
        
        assert_rel_error( self,turbine.pwr, 19684, TOL )
        assert_rel_error( self,turbine.trq, 1353.2, TOL )
Esempio n. 43
0
    def test_simple_float(self):

        model = set_as_top(Assembly())
        model.add('comp', SimpleCompFloat())
        model.driver.workflow.add('comp')
        model.driver.gradient_options.fd_form = 'complex_step'

        model.run()

        J = model.driver.calc_gradient(inputs=['comp.x'], outputs=['comp.y'])

        assert_rel_error(self, J[0, 0], 2.0, .000001)
        self.assertTrue(model.comp.x is not complex)
        self.assertTrue(model.comp.y is not complex)

        # Make sure we can do whole workflows.
        model.add('comp2', SimpleCompFloat())
        model.driver.workflow.add('comp2')
        model.connect('comp.y', 'comp2.x')

        model.run()

        J = model.driver.calc_gradient(inputs=['comp.x'], outputs=['comp2.y'])

        assert_rel_error(self, J[0, 0], 4.0, .000001)
        self.assertTrue(model.comp.x is not complex)
        self.assertTrue(model.comp2.y is not complex)
    def test_simple_float(self):

        model = set_as_top(Assembly())
        model.add("comp", SimpleCompFloat())
        model.driver.workflow.add("comp")
        model.driver.gradient_options.fd_form = "complex_step"

        model.run()

        J = model.driver.calc_gradient(inputs=["comp.x"], outputs=["comp.y"])

        assert_rel_error(self, J[0, 0], 2.0, 0.000001)
        self.assertTrue(model.comp.x is not complex)
        self.assertTrue(model.comp.y is not complex)

        # Make sure we can do whole workflows.
        model.add("comp2", SimpleCompFloat())
        model.driver.workflow.add("comp2")
        model.connect("comp.y", "comp2.x")

        model.run()

        J = model.driver.calc_gradient(inputs=["comp.x"], outputs=["comp2.y"])

        assert_rel_error(self, J[0, 0], 4.0, 0.000001)
        self.assertTrue(model.comp.x is not complex)
        self.assertTrue(model.comp2.y is not complex)
Esempio n. 45
0
    def test_in_assembly(self):
        asm = self._get_assembly()
        self.assertEqual(
            set(asm.list_connections()),
            set([('metamodel.d', 'comp2.b'), ('metamodel.c', 'comp2.a'),
                 ('comp1.c', 'metamodel.a'), ('comp1.d', 'metamodel.b')]))

        # do some training
        data = [1, 2, 3, 4]

        for a, b in zip(data[:-1], data[1:]):
            asm.comp1.a = a
            asm.comp1.b = b
            asm.metamodel.train_next = 1
            asm.run()

        # now run and get some results
        asm.comp1.a = 1.
        asm.comp1.b = 2.

        asm.run()

        assert_rel_error(self, asm.comp2.c, 6, 0.02)
        assert_rel_error(self, asm.comp2.d, -2, 0.02)

        # set new model and verify disconnect
        asm.metamodel.model = Simple2()
        self.assertEqual(asm.list_connections(), [])
Esempio n. 46
0
    def test_simple_units(self):

        self.top = set_as_top(Assembly())

        self.top.add('comp1', CompFoot())
        self.top.add('comp2', CompInch())

        self.top.connect('comp1.y', 'comp2.x')

        self.top.add('driver', Driv())
        self.top.driver.workflow.add(['comp1', 'comp2'])

        self.top.driver.differentiator = Analytic()

        obj = 'comp2.y'
        self.top.driver.add_parameter('comp1.x',
                                      low=-50.,
                                      high=50.,
                                      fd_step=.0001)
        self.top.driver.add_objective(obj)

        self.top.comp1.x = 2.0
        self.top.run()
        self.top.driver.differentiator.calc_gradient()

        grad = self.top.driver.differentiator.get_gradient(obj)
        assert_rel_error(self, grad[0], 48.0, .001)
    def test_Abasic_SNOPT_derivatives_linear_constraints(self):

        try:
            from pyoptsparse import Optimization
        except ImportError:
            raise SkipTest("this test requires pyoptsparse to be installed")

        self.top = OptimizationConstrainedDerivatives()
        set_as_top(self.top)

        try:
            self.top.driver.optimizer = 'SNOPT'
        except ValueError:
            raise SkipTest("SNOPT not present on this system")

        self.top.driver.title = 'Little Test with Gradient'
        optdict = {}
        self.top.driver.options = optdict

        self.top.driver.clear_constraints()
        self.top.driver.add_constraint('paraboloid.x-paraboloid.y >= 15.0')
        self.top.driver.add_constraint('paraboloid.x > 7.1', linear=True)

        self.top.run()

        assert_rel_error(self, self.top.paraboloid.x, 7.175775, 0.01)
        assert_rel_error(self, self.top.paraboloid.y, -7.824225, 0.01)
Esempio n. 48
0
    def test_parameter_groups(self):

        self.top = set_as_top(Assembly())

        exp1 = ['y = 2.0*x']
        deriv1 = ['dy_dx = 2.0']
        self.top.add('driver', Driv())

        self.top.add('comp1', ExecCompWithDerivatives(exp1, deriv1))
        self.top.add('comp2', ExecCompWithDerivatives(exp1, deriv1))

        self.top.driver.workflow.add(['comp1', 'comp2'])

        # Top driver setup
        self.top.driver.differentiator = Analytic()
        obj = 'comp1.y+comp2.y'
        self.top.driver.add_parameter(['comp1.x', 'comp2.x'],
                                      low=-100.,
                                      high=100.,
                                      fd_step=.001)
        self.top.driver.add_objective(obj)

        self.top.comp1.x1 = 1.0
        self.top.comp2.x2 = 1.0
        self.top.run()
        self.top.driver.differentiator.calc_gradient()

        grad = self.top.driver.differentiator.get_gradient(obj)
        assert_rel_error(self, grad[0], 4.0, .001)
Esempio n. 49
0
    def test_MIMO_ExcitingMixing(self):
        # Testing Broyden on a 2 input 2 output case

        self.prob = MIMOBroyden()
        set_as_top(self.prob)

        driver = self.prob.driver
        driver.add_parameter('dis1.x[0]')
        driver.add_parameter('dis1.x[1]')
        driver.add_parameter('dis1.x[2]')
        driver.add_parameter('dis1.x[3]')
        driver.add_parameter('dis1.x[4]')

        driver.add_constraint('dis1.f1 = 0.0')
        driver.add_constraint('dis1.f2 = 0.0')
        driver.add_constraint('dis1.f3 = 0.0')
        driver.add_constraint('dis1.f4 = 0.0')
        driver.add_constraint('dis1.f5 = 0.0')

        self.prob.dis1.x = [1., 1., 1., 1., 1.]
        driver.algorithm = "excitingmixing"
        driver.alpha = 0.1

        self.prob.run()

        assert_rel_error(self, 1.0 - self.prob.dis1.x[0], 1.0, 0.0001)
        assert_rel_error(self, 1.0 - self.prob.dis1.x[1], 1.0, 0.0001)
        assert_rel_error(self, 1.0 - self.prob.dis1.x[2], 1.0, 0.0001)
        assert_rel_error(self, 1.0 - self.prob.dis1.x[3], 1.0, 0.0001)
        assert_rel_error(self, 1.0 - self.prob.dis1.x[4], 1.0, 0.0001)
    def test_MIMO_ExcitingMixing(self):
        # Testing Broyden on a 2 input 2 output case

        prob = MIMOBroyden()
        set_as_top(prob)

        driver = prob.driver
        driver.add_parameter('dis1.x[0]', low=-9.e99, high=9.e99)
        driver.add_parameter('dis1.x[1]', low=-9.e99, high=9.e99)
        driver.add_parameter('dis1.x[2]', low=-9.e99, high=9.e99)
        driver.add_parameter('dis1.x[3]', low=-9.e99, high=9.e99)
        driver.add_parameter('dis1.x[4]', low=-9.e99, high=9.e99)

        driver.add_constraint('dis1.f1 = 0.0')
        driver.add_constraint('dis1.f2 = 0.0')
        driver.add_constraint('dis1.f3 = 0.0')
        driver.add_constraint('dis1.f4 = 0.0')
        driver.add_constraint('dis1.f5 = 0.0')

        prob.dis1.x = [1., 1., 1., 1., 1.]
        driver.algorithm = "excitingmixing"
        driver.alpha = 0.1

        prob.run()

        assert_rel_error(self, 1.0 - prob.dis1.x[0], 1.0, 0.0001)
        assert_rel_error(self, 1.0 - prob.dis1.x[1], 1.0, 0.0001)
        assert_rel_error(self, 1.0 - prob.dis1.x[2], 1.0, 0.0001)
        assert_rel_error(self, 1.0 - prob.dis1.x[3], 1.0, 0.0001)
        assert_rel_error(self, 1.0 - prob.dis1.x[4], 1.0, 0.0001)
Esempio n. 51
0
    def test_sellar_Newton_parallel(self):

        top = set_as_top(SellarMDFwithDerivs())
        top.replace('driver', NewtonSolver())

        top.driver.add_parameter('C2.y1', low=-1e99, high=1e99)
        top.driver.add_constraint('C1.y1 = C2.y1')
        top.driver.add_parameter('C1.y2', low=-1.e99, high=1.e99)
        top.driver.add_constraint('C2.y2 = C1.y2')

        expected = { 'C1.y1': 3.1598617768014536, 'C2.y2': 3.7551999159927316 }

        top.driver.iprint = 0
        top.driver.max_iteration = 20
        top.run()
        # print top.C1.y1, top.C2.y1
        # print top.C1.y2, top.C2.y2

        # gather the values back to the rank 0 process and compare to expected
        dist_answers = top._system.mpi.comm.gather([(k[0],v) for k,v in top._system.vec['u'].items()],
                                                   root=0)
        if self.comm.rank == 0:
            for answers in dist_answers:
                for name, val in answers:
                    if name in expected:
                        #print self.comm.rank, name, val[0]
                        assert_rel_error(self, val[0], expected[name], 0.001)
                        del expected[name]

            if expected:
                self.fail("not all expected values were found")
    def test_scale_adder(self):

        opt_problem = OptimizationUnconstrainedScaleShift()
        opt_problem.run()

        assert_rel_error(self, opt_problem.paraboloid.x, 0.006667, 0.001)
        assert_rel_error(self, opt_problem.paraboloid.y, -1733.333313, 0.001)
    def test_unconstrained(self):

        try:
            from pyopt_driver.pyopt_driver import pyOptDriver
        except ImportError:
            raise SkipTest("this test requires pyOpt to be installed")

        self.top = OptimizationUnconstrained()
        set_as_top(self.top)

        for optimizer in [ 'CONMIN', 'COBYLA', 'SNOPT', 'SLSQP' ] :

            try:
                self.top.driver.optimizer = optimizer
            except ValueError:
                raise SkipTest("%s not present on this system" % optimizer)

            self.top.driver.title = 'Little Test'
            optdict = {}
            self.top.driver.options = optdict
            self.top.driver.pyopt_diff = True

            self.top.run()

            assert_rel_error(self, self.top.paraboloid.x, 6.6667, 0.01)
            assert_rel_error(self, self.top.paraboloid.y, -7.3333, 0.01)
Esempio n. 54
0
    def test_varTree_on_boundary_subassembly(self):
        top = set_as_top(Assembly())
        top.add('comp', AssemblyWithBoundryVarTree())
        top.add('driver', SimpleDriver())
        top.driver.workflow.add('comp')
        top.driver.add_parameter('comp.ins.x1', low=-100, high=100)
        top.driver.add_objective('comp.y')
        top.comp.comp1.missing_deriv_policy = 'assume_zero'

        top.run()

        # check for invalidation problems
        top.comp.ins.x1 = 123.4
        top.run()
        self.assertEqual(top.comp.ins.x1, top.comp.comp1.ins.x1)

        #print top.comp.driver.calc_gradient(['ins.x1'], ['y'], mode='fd')
        inputs = ['comp.ins.x1', ]
        outputs = ['comp.y']
        J_fd = top.driver.calc_gradient(inputs, outputs, mode='fd')
        J_forward = top.driver.calc_gradient(inputs, outputs, mode="forward")
        J_reverse = top.driver.calc_gradient(inputs, outputs, mode="adjoint")

        assert_rel_error(self, linalg.norm(J_fd - J_forward), 0, .00001)
        assert_rel_error(self, linalg.norm(J_fd - J_reverse), 0, .00001)
    def test_branch_output_in_opaque_system(self):

        # This test replicates a bug where an interior output was missing in
        # an opaque system.

        top = set_as_top(Assembly())
        top.add('nest', Assembly())
        top.nest.add('comp1', ExecComp(['y=7.0*x1']))
        top.nest.add('comp2', ExecComp(['y=5.0*x1 + 2.0*x2']))
        top.driver.workflow.add(['nest'])
        top.nest.driver.workflow.add(['comp1', 'comp2'])

        top.nest.add('x1', Float(3.0, iotype='in'))
        top.nest.add('y2', Float(3.0, iotype='out'))
        top.nest.connect('comp1.y', 'comp2.x2')
        top.nest.connect('x1', 'comp1.x1')
        top.nest.connect('x1', 'comp2.x1')
        top.nest.create_passthrough('comp1.y')
        top.nest.connect('comp2.y', 'y2')

        top.run()

        J = top.driver.calc_gradient(inputs=['nest.x1'],
                                     outputs=['nest.y', 'nest.y2'],
                                     mode='forward')

        assert_rel_error(self, J[0, 0], 7.0, .001)
        assert_rel_error(self, J[1, 0], 19.0, .001)
Esempio n. 56
0
    def test_vartree_subassy_behavior(self):

        class tree(VariableTree):
            x = Float(3.0)
            y = Float(4.0)
            z = Float(5.0)

        class MyComp(Component):
            ins = VarTree(tree(), iotype='in')
            outs = VarTree(tree(), iotype='out')

            def execute(self):
                self.outs = self.ins.copy()
                self.outs.z = 2.0*self.ins.x

        top = set_as_top(Assembly())
        top.add('sub', Assembly())
        top.driver.workflow.add('sub')
        top.sub.add('comp1', MyComp())
        top.sub.add('comp2', MyComp())
        top.sub.driver.workflow.add(['comp1', 'comp2'])
        top.sub.create_passthrough('comp1.ins')
        top.sub.create_passthrough('comp1.outs')
        top.sub.create_passthrough('comp2.outs', 'zzz')
        top.sub.connect('ins.z', 'comp2.ins.x')

        top.run()

        J = top.driver.calc_gradient(['sub.ins.x'], ['sub.outs.z'])
        assert_rel_error(self, J[0, 0], 2.0, .00001)

        J = top.driver.calc_gradient(['sub.ins.z'], ['sub.zzz.z'])
        print J
    def test_cascade_opt(self):
        top = set_as_top(Assembly())

        eq = ["f = (x-3)**2 + x*y + (y+4)**2 - 3"]
        deriv = ["df_dx = 2.0*x - 6.0 + y", "df_dy = 2.0*y + 8.0 + x"]
        top.add("comp", ExecCompWithDerivatives(eq, deriv))
        top.add("driver", SimpleDriver())
        top.add("opt1", SLSQPdriver())
        top.add("opt2", SLSQPdriver())

        top.opt1.workflow.add(["comp"])
        top.opt2.workflow.add(["comp"])
        top.driver.workflow.add(["opt1", "opt2"])

        top.opt1.add_parameter("comp.x", low=-100, high=100)
        top.opt1.add_parameter("comp.y", low=-100, high=100)
        top.opt1.add_objective("comp.f")
        top.opt1.maxiter = 2
        top.opt2.add_parameter("comp.x", low=-100, high=100)
        top.opt2.add_parameter("comp.y", low=-100, high=100)
        top.opt2.add_objective("comp.f")
        top.opt2.maxiter = 50

        top.run()

        assert_rel_error(self, top.comp.x, 6.666309, 0.01)
        assert_rel_error(self, top.comp.y, -7.333026, 0.01)

        J = top.driver.workflow.calc_gradient(inputs=["comp.x", "comp.y"], outputs=["comp.f"])
        edges = top.driver.workflow._edges
        print edges
        self.assertEqual(set(edges["@in0"]), set(["~opt1.comp|x", "~opt2.comp|x"]))
        self.assertEqual(set(edges["@in1"]), set(["~opt1.comp|y", "~opt2.comp|y"]))
        self.assertEqual(set(edges["~opt1.comp|f"]), set(["@out0"]))
        self.assertEqual(set(edges["~opt2.comp|f"]), set(["@out0"]))
Esempio n. 58
0
    def test_opt1(self):
        # Run with scalar parameters, scalar constraints, and OpenMDAO gradient.
        self.top.driver.add_objective('10*comp.result')
        # pylint: disable-msg=C0301
        map(self.top.driver.add_parameter,
            ['comp.x[0]', 'comp.x[1]', 'comp.x[2]', 'comp.x[3]'])

        map(self.top.driver.add_constraint, [
            'comp.x[0]**2+comp.x[0]+comp.x[1]**2-comp.x[1]+comp.x[2]**2+comp.x[2]+comp.x[3]**2-comp.x[3] < 8',
            'comp.x[0]**2-comp.x[0]+2*comp.x[1]**2+comp.x[2]**2+2*comp.x[3]**2-comp.x[3] < 10',
            '2*comp.x[0]**2+2*comp.x[0]+comp.x[1]**2-comp.x[1]+comp.x[2]**2-comp.x[3] < 5'
        ])
        self.top.driver.recorders = [ListCaseRecorder()]
        self.top.driver.printvars = ['comp.opt_objective']
        self.top.driver.iprint = 0
        self.top.run()

        # pylint: disable-msg=E1101
        assert_rel_error(self, self.top.comp.opt_objective,
                         self.top.driver.eval_objective(), 0.01)
        assert_rel_error(self, 1 + self.top.comp.opt_design_vars[0],
                         1 + self.top.comp.x[0], 0.05)
        assert_rel_error(self, self.top.comp.opt_design_vars[1],
                         self.top.comp.x[1], 0.06)
        assert_rel_error(self, self.top.comp.opt_design_vars[2],
                         self.top.comp.x[2], 0.06)
        assert_rel_error(self, self.top.comp.opt_design_vars[3],
                         self.top.comp.x[3], 0.05)

        cases = self.top.driver.recorders[0].get_iterator()
        end_case = cases[-1]

        self.assertEqual(self.top.comp.x[1], end_case.get_input('comp.x[1]'))
        self.assertEqual(self.top.comp.opt_objective,
                         end_case.get_output('comp.opt_objective'))
Esempio n. 59
0
    def compare_derivatives(self, var_in, var_out):

        wflow = self.model.driver
        inputs = ['comp.%s' % v for v in var_in]
        outputs = ['comp.%s' % v for v in var_out]

        # Numeric
        self.model.driver.update_parameters()
        Jn = wflow.calc_gradient(inputs=inputs,
                                 outputs=outputs,
                                 mode="fd")

        # Analytic forward
        self.model.driver.update_parameters()
        Jf = wflow.calc_gradient(inputs=inputs,
                                 outputs=outputs)

        diff = abs(Jf - Jn)
        assert_rel_error(self, diff.max(), 0.0, 1e-5)

        # Analytic adjoint
        self.model.driver.update_parameters()
        Ja = wflow.calc_gradient(inputs=inputs,
                                 outputs=outputs,
                                 mode='adjoint')

        diff = abs(Ja - Jn)
        assert_rel_error(self, diff.max(), 0.0, 1e-5)
Esempio n. 60
0
    def test_unconstrained(self):

        try:
            from pyopt_driver.pyopt_driver import pyOptDriver
        except ImportError:
            raise SkipTest("this test requires pyOpt to be installed")

        self.top = OptimizationUnconstrained()
        set_as_top(self.top)

        for optimizer in ['CONMIN', 'COBYLA', 'SNOPT', 'SLSQP']:

            try:
                self.top.driver.optimizer = optimizer
            except ValueError:
                raise SkipTest("%s not present on this system" % optimizer)

            self.top.driver.title = 'Little Test'
            optdict = {}
            self.top.driver.options = optdict
            self.top.driver.pyopt_diff = True

            self.top.run()

            assert_rel_error(self, self.top.paraboloid.x, 6.6667, 0.01)
            assert_rel_error(self, self.top.paraboloid.y, -7.3333, 0.01)