Beispiel #1
0
    def test_single_diamond_grouped(self):

        prob = Problem()
        prob.root = SingleDiamondGrouped()
        prob.root.ln_solver = ExplicitSolver()
        prob.setup(check=False)
        prob.run()

        param_list = ['p.x']
        unknown_list = ['comp4.y1', 'comp4.y2']

        J = prob.calc_gradient(param_list,
                               unknown_list,
                               mode='fwd',
                               return_format='dict')
        assert_rel_error(self, J['comp4.y1']['p.x'][0][0], 25, 1e-6)
        assert_rel_error(self, J['comp4.y2']['p.x'][0][0], -40.5, 1e-6)

        J = prob.calc_gradient(param_list,
                               unknown_list,
                               mode='rev',
                               return_format='dict')
        assert_rel_error(self, J['comp4.y1']['p.x'][0][0], 25, 1e-6)
        assert_rel_error(self, J['comp4.y2']['p.x'][0][0], -40.5, 1e-6)

        J = prob.calc_gradient(param_list,
                               unknown_list,
                               mode='fd',
                               return_format='dict')
        assert_rel_error(self, J['comp4.y1']['p.x'][0][0], 25, 1e-6)
        assert_rel_error(self, J['comp4.y2']['p.x'][0][0], -40.5, 1e-6)
Beispiel #2
0
    def test_double_arraycomp(self):
        # Mainly testing a bug in the array return for multiple arrays

        group = Group()
        group.add('x_param1', IndepVarComp('x1', np.ones((2))), promotes=['*'])
        group.add('x_param2', IndepVarComp('x2', np.ones((2))), promotes=['*'])
        group.add('mycomp', DoubleArrayComp(), promotes=['*'])

        prob = Problem(impl=impl)
        prob.root = group
        prob.root.ln_solver = PetscKSP()
        prob.setup(check=False)
        prob.run()

        Jbase = group.mycomp.JJ

        J = prob.calc_gradient(['x1', 'x2'], ['y1', 'y2'], mode='fwd',
                               return_format='array')
        diff = np.linalg.norm(J - Jbase)
        assert_rel_error(self, diff, 0.0, 1e-8)

        J = prob.calc_gradient(['x1', 'x2'], ['y1', 'y2'], mode='fd',
                               return_format='array')
        diff = np.linalg.norm(J - Jbase)
        assert_rel_error(self, diff, 0.0, 1e-8)

        J = prob.calc_gradient(['x1', 'x2'], ['y1', 'y2'], mode='rev',
                               return_format='array')
        diff = np.linalg.norm(J - Jbase)
        assert_rel_error(self, diff, 0.0, 1e-8)
Beispiel #3
0
    def test_fan_in_grouped(self):

        prob = Problem(impl=impl)
        prob.root = FanInGrouped()
        prob.root.ln_solver = PetscKSP()

        param_list = ['p1.x1', 'p2.x2']
        unknown_list = ['comp3.y']

        prob.setup(check=False)
        prob.run()

        J = prob.calc_gradient(param_list,
                               unknown_list,
                               mode='fwd',
                               return_format='dict')
        assert_rel_error(self, J['comp3.y']['p1.x1'][0][0], -6.0, 1e-6)
        assert_rel_error(self, J['comp3.y']['p2.x2'][0][0], 35.0, 1e-6)

        J = prob.calc_gradient(param_list,
                               unknown_list,
                               mode='rev',
                               return_format='dict')
        assert_rel_error(self, J['comp3.y']['p1.x1'][0][0], -6.0, 1e-6)
        assert_rel_error(self, J['comp3.y']['p2.x2'][0][0], 35.0, 1e-6)
Beispiel #4
0
    def test_converge_diverge_compfd(self):

        prob = Problem(impl=impl)
        prob.root = ConvergeDivergePar()
        prob.root.ln_solver = PetscKSP()

        # fd comp2 and comp5. each is under a par group
        prob.root.par1.comp2.fd_options['force_fd'] = True
        prob.root.par2.comp5.fd_options['force_fd'] = True

        prob.setup(check=False)
        prob.run()

        # Make sure value is fine.
        assert_rel_error(self, prob['comp7.y1'], -102.7, 1e-6)

        indep_list = ['p.x']
        unknown_list = ['comp7.y1']

        J = prob.calc_gradient(indep_list, unknown_list, mode='fwd', return_format='dict')
        assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)

        J = prob.calc_gradient(indep_list, unknown_list, mode='rev', return_format='dict')
        assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)

        J = prob.calc_gradient(indep_list, unknown_list, mode='fd', return_format='dict')
        assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)
Beispiel #5
0
    def test_converge_diverge_groups(self):

        prob = Problem()
        prob.root = ConvergeDivergeGroups()
        prob.root.ln_solver = ExplicitSolver()
        prob.setup(check=False)
        prob.run()

        # Make sure value is fine.
        assert_rel_error(self, prob['comp7.y1'], -102.7, 1e-6)

        param_list = ['p.x']
        unknown_list = ['comp7.y1']

        J = prob.calc_gradient(param_list,
                               unknown_list,
                               mode='fwd',
                               return_format='dict')
        assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)

        J = prob.calc_gradient(param_list,
                               unknown_list,
                               mode='rev',
                               return_format='dict')
        assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)

        J = prob.calc_gradient(param_list,
                               unknown_list,
                               mode='fd',
                               return_format='dict')
        assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)
Beispiel #6
0
    def test_fan_out_grouped(self):

        prob = Problem(impl=impl)
        prob.root = FanOutGrouped()
        prob.root.ln_solver = PetscKSP()

        prob.setup(check=False)
        prob.run()

        param = 'sub.pgroup.p.x'
        unknown_list = ['sub.comp2.y', "sub.comp3.y"]

        J = prob.calc_gradient([param],
                               unknown_list,
                               mode='fwd',
                               return_format='dict')

        assert_rel_error(self, J[unknown_list[0]][param][0][0], -6.0, 1e-6)
        assert_rel_error(self, J[unknown_list[1]][param][0][0], 15.0, 1e-6)

        J = prob.calc_gradient([param],
                               unknown_list,
                               mode='rev',
                               return_format='dict')
        assert_rel_error(self, J[unknown_list[0]][param][0][0], -6.0, 1e-6)
        assert_rel_error(self, J[unknown_list[1]][param][0][0], 15.0, 1e-6)
Beispiel #7
0
    def test_double_arraycomp(self):
        # Mainly testing a bug in the array return for multiple arrays

        group = Group()
        group.add('x_param1', ParamComp('x1', np.ones((2))), promotes=['*'])
        group.add('x_param2', ParamComp('x2', np.ones((2))), promotes=['*'])
        group.add('mycomp', DoubleArrayComp(), promotes=['*'])

        prob = Problem(impl=impl)
        prob.root = group
        prob.root.ln_solver = PetscKSP()
        prob.setup(check=False)
        prob.run()

        Jbase = group.mycomp.JJ

        J = prob.calc_gradient(['x1', 'x2'], ['y1', 'y2'],
                               mode='fwd',
                               return_format='array')
        diff = np.linalg.norm(J - Jbase)
        assert_rel_error(self, diff, 0.0, 1e-8)

        J = prob.calc_gradient(['x1', 'x2'], ['y1', 'y2'],
                               mode='fd',
                               return_format='array')
        diff = np.linalg.norm(J - Jbase)
        assert_rel_error(self, diff, 0.0, 1e-8)

        J = prob.calc_gradient(['x1', 'x2'], ['y1', 'y2'],
                               mode='rev',
                               return_format='array')
        diff = np.linalg.norm(J - Jbase)
        assert_rel_error(self, diff, 0.0, 1e-8)
Beispiel #8
0
    def test_sellar_derivs_grouped(self):

        prob = Problem(impl=impl)
        prob.root = SellarDerivativesGrouped()
        prob.root.ln_solver = PetscKSP()

        prob.root.mda.nl_solver.options['atol'] = 1e-12
        prob.setup(check=False)
        prob.run()

        # Just make sure we are at the right answer
        assert_rel_error(self, prob['y1'], 25.58830273, .00001)
        assert_rel_error(self, prob['y2'], 12.05848819, .00001)

        param_list = ['x', 'z']
        unknown_list = ['obj', 'con1', 'con2']

        Jbase = {}
        Jbase['con1'] = {}
        Jbase['con1']['x'] = -0.98061433
        Jbase['con1']['z'] = np.array([-9.61002285, -0.78449158])
        Jbase['con2'] = {}
        Jbase['con2']['x'] = 0.09692762
        Jbase['con2']['z'] = np.array([1.94989079, 1.0775421])
        Jbase['obj'] = {}
        Jbase['obj']['x'] = 2.98061392
        Jbase['obj']['z'] = np.array([9.61001155, 1.78448534])

        J = prob.calc_gradient(param_list,
                               unknown_list,
                               mode='fwd',
                               return_format='dict')
        for key1, val1 in Jbase.items():
            for key2, val2 in val1.items():
                assert_rel_error(self, J[key1][key2], val2, .00001)

        J = prob.calc_gradient(param_list,
                               unknown_list,
                               mode='rev',
                               return_format='dict')
        for key1, val1 in Jbase.items():
            for key2, val2 in val1.items():
                assert_rel_error(self, J[key1][key2], val2, .00001)

        prob.root.fd_options['form'] = 'central'
        J = prob.calc_gradient(param_list,
                               unknown_list,
                               mode='fd',
                               return_format='dict')
        for key1, val1 in Jbase.items():
            for key2, val2 in val1.items():
                assert_rel_error(self, J[key1][key2], val2, .00001)
Beispiel #9
0
    def test_fan_out_grouped(self):

        prob = Problem(impl=impl)
        prob.root = root = Group()

        root.add('p', IndepVarComp('x', 1.0))
        root.add('comp1', ExecComp(['y=3.0*x']))

        sub = root.add('sub', ParallelGroup())
        sub.add('comp2', ExecComp(['y=-2.0*x']))
        sub.add('comp3', ExecComp(['y=5.0*x']))

        root.add('c2', ExecComp(['y=-x']))
        root.add('c3', ExecComp(['y=3.0*x']))
        root.connect('sub.comp2.y', 'c2.x')
        root.connect('sub.comp3.y', 'c3.x')

        root.connect("comp1.y", "sub.comp2.x")
        root.connect("comp1.y", "sub.comp3.x")
        root.connect("p.x", "comp1.x")

        prob.root.ln_solver = LinearGaussSeidel()
        prob.root.sub.ln_solver = LinearGaussSeidel()

        prob.setup(check=False)
        prob.run()

        param = 'p.x'
        unknown_list = ['sub.comp2.y', "sub.comp3.y"]

        J = prob.calc_gradient([param], unknown_list, mode='fwd', return_format='dict')

        assert_rel_error(self, J[unknown_list[0]][param][0][0], -6.0, 1e-6)
        assert_rel_error(self, J[unknown_list[1]][param][0][0], 15.0, 1e-6)

        J = prob.calc_gradient([param], unknown_list, mode='rev', return_format='dict')
        assert_rel_error(self, J[unknown_list[0]][param][0][0], -6.0, 1e-6)
        assert_rel_error(self, J[unknown_list[1]][param][0][0], 15.0, 1e-6)

        unknown_list = ['c2.y', "c3.y"]

        J = prob.calc_gradient([param], unknown_list, mode='fwd', return_format='dict')

        assert_rel_error(self, J[unknown_list[0]][param][0][0], 6.0, 1e-6)
        assert_rel_error(self, J[unknown_list[1]][param][0][0], 45.0, 1e-6)

        J = prob.calc_gradient([param], unknown_list, mode='rev', return_format='dict')
        assert_rel_error(self, J[unknown_list[0]][param][0][0], 6.0, 1e-6)
        assert_rel_error(self, J[unknown_list[1]][param][0][0], 45.0, 1e-6)
Beispiel #10
0
    def test_complex_step2(self):
        prob = Problem(Group())
        comp = prob.root.add('comp', ExecComp('y=x*x + x*2.0'))
        prob.root.add('p1', ParamComp('x', 2.0))
        prob.root.connect('p1.x', 'comp.x')

        comp.fd_options['force_fd'] = False

        prob.setup(check=False)
        prob.run()

        J = prob.calc_gradient(['p1.x'], ['comp.y'], mode='fwd', return_format='dict')
        assert_rel_error(self, J['comp.y']['p1.x'], np.array([6.0]), 0.00001)

        J = prob.calc_gradient(['p1.x'], ['comp.y'], mode='rev', return_format='dict')
        assert_rel_error(self, J['comp.y']['p1.x'], np.array([6.0]), 0.00001)
Beispiel #11
0
    def test_simple(self):
        group = Group()
        group.add('x_param', IndepVarComp('x', 1.0), promotes=['*'])
        group.add('mycomp', SimpleCompDerivMatVec(), promotes=['x', 'y'])

        prob = Problem(impl=impl)
        prob.root = group
        prob.root.ln_solver = PetscKSP()
        prob.setup(check=False)
        prob.run()

        J = prob.calc_gradient(['x'], ['y'], mode='fwd', return_format='dict')
        assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)

        J = prob.calc_gradient(['x'], ['y'], mode='rev', return_format='dict')
        assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)
Beispiel #12
0
    def test_derivatives(self):
        meta = MetaModel()
        meta.add_param('x', 0.)
        meta.add_output('f', 0.)
        meta.default_surrogate = FloatKrigingSurrogate()

        prob = Problem(Group())
        prob.root.add('meta', meta, promotes=['x'])
        prob.root.add('p', IndepVarComp('x', 0.), promotes=['x'])
        prob.setup(check=False)

        prob['meta.train:x'] = [0., .25, .5, .75, 1.]
        prob['meta.train:f'] = [1., .75, .5, .25, 0.]
        prob['x'] = 0.125
        prob.run()

        Jf = prob.calc_gradient(['x'], ['meta.f'], mode='fwd')
        Jr = prob.calc_gradient(['x'], ['meta.f'], mode='rev')

        assert_rel_error(self, Jf[0][0], -1.00011, 1.0e-5)
        assert_rel_error(self, Jr[0][0], -1.00011, 1.0e-5)

        stream = cStringIO()
        prob.check_partial_derivatives(out_stream=stream)

        abs_errors = findall('Absolute Error \(.+\) : (.+)', stream.getvalue())
        self.assertTrue(len(abs_errors) > 0)
        for match in abs_errors:
            abs_error = float(match)
            self.assertTrue(abs_error < 1e-6)
Beispiel #13
0
    def test_simple(self):
        group = Group()
        group.add('x_param', ParamComp('x', 1.0), promotes=['*'])
        group.add('mycomp', SimpleCompDerivMatVec(), promotes=['x', 'y'])

        prob = Problem(impl=impl)
        prob.root = group
        prob.root.ln_solver = PetscKSP()
        prob.setup(check=False)
        prob.run()

        J = prob.calc_gradient(['x'], ['y'], mode='fwd', return_format='dict')
        assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)

        J = prob.calc_gradient(['x'], ['y'], mode='rev', return_format='dict')
        assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)
Beispiel #14
0
    def test_simple_jac(self):
        group = Group()
        group.add('x_param', ParamComp('x', 1.0), promotes=['*'])
        group.add('mycomp', ExecComp(['y=2.0*x']), promotes=['x', 'y'])

        prob = Problem()
        prob.root = group
        prob.root.ln_solver = ExplicitSolver()
        prob.setup(check=False)
        prob.run()

        J = prob.calc_gradient(['x'], ['y'], mode='fwd', return_format='dict')
        assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)

        J = prob.calc_gradient(['x'], ['y'], mode='rev', return_format='dict')
        assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)
Beispiel #15
0
    def test_simple_jac(self):
        group = Group()
        group.add('x_param', IndepVarComp('x', 1.0), promotes=['*'])
        group.add('mycomp', ExecComp(['y=2.0*x']), promotes=['x', 'y'])

        prob = Problem()
        prob.root = group
        prob.root.ln_solver = DirectSolver()
        prob.setup(check=False)
        prob.run()

        J = prob.calc_gradient(['x'], ['y'], mode='fwd', return_format='dict')
        assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)

        J = prob.calc_gradient(['x'], ['y'], mode='rev', return_format='dict')
        assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)
    def test_simple_in_group_matvec(self):
        group = Group()
        sub = group.add('sub', Group(), promotes=['x', 'y'])
        group.add('x_param', ParamComp('x', 1.0), promotes=['*'])
        sub.add('mycomp', SimpleCompDerivMatVec(), promotes=['x', 'y'])

        prob = Problem()
        prob.root = group
        prob.root.ln_solver = ExplicitSolver()
        prob.setup(check=False)
        prob.run()

        J = prob.calc_gradient(['x'], ['y'], mode='fwd', return_format='dict')
        assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)

        J = prob.calc_gradient(['x'], ['y'], mode='rev', return_format='dict')
        assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)
    def test_linear_system(self):
        root = Group()

        root.add('lin', LinearSystem(3))

        x = np.array([1, 2, -3])
        A = np.array([[5.0, -3.0, 2.0], [1.0, 7.0, -4.0], [1.0, 0.0, 8.0]])
        b = A.dot(x)

        root.add('p1', ParamComp('A', A))
        root.add('p2', ParamComp('b', b))
        root.connect('p1.A', 'lin.A')
        root.connect('p2.b', 'lin.b')

        prob = Problem(root)
        prob.setup(check=False)
        prob.run()

        # Make sure it gets the right answer
        assert_rel_error(self, prob['lin.x'], x, .0001)
        assert_rel_error(self, np.linalg.norm(prob.root.resids.vec), 0.0,
                         1e-10)

        # Compare against calculated derivs
        Ainv = np.linalg.inv(A)
        dx_dA = np.outer(Ainv, -x).reshape(3, 9)
        dx_db = Ainv

        J = prob.calc_gradient(['p1.A', 'p2.b'], ['lin.x'],
                               mode='fwd',
                               return_format='dict')
        assert_rel_error(self, J['lin.x']['p1.A'], dx_dA, .0001)
        assert_rel_error(self, J['lin.x']['p2.b'], dx_db, .0001)

        J = prob.calc_gradient(['p1.A', 'p2.b'], ['lin.x'],
                               mode='rev',
                               return_format='dict')
        assert_rel_error(self, J['lin.x']['p1.A'], dx_dA, .0001)
        assert_rel_error(self, J['lin.x']['p2.b'], dx_db, .0001)

        J = prob.calc_gradient(['p1.A', 'p2.b'], ['lin.x'],
                               mode='fd',
                               return_format='dict')
        assert_rel_error(self, J['lin.x']['p1.A'], dx_dA, .0001)
        assert_rel_error(self, J['lin.x']['p2.b'], dx_db, .0001)
Beispiel #18
0
    def test_single_diamond(self):

        prob = Problem(impl=impl)
        prob.root = SingleDiamond()
        prob.root.ln_solver = PetscKSP()
        prob.setup(check=False)
        prob.run()

        indep_list = ['p.x']
        unknown_list = ['comp4.y1', 'comp4.y2']

        J = prob.calc_gradient(indep_list, unknown_list, mode='fwd', return_format='dict')
        assert_rel_error(self, J['comp4.y1']['p.x'][0][0], 25, 1e-6)
        assert_rel_error(self, J['comp4.y2']['p.x'][0][0], -40.5, 1e-6)

        J = prob.calc_gradient(indep_list, unknown_list, mode='rev', return_format='dict')
        assert_rel_error(self, J['comp4.y1']['p.x'][0][0], 25, 1e-6)
        assert_rel_error(self, J['comp4.y2']['p.x'][0][0], -40.5, 1e-6)
    def test_no_derivatives(self):

        prob = Problem()
        prob.root = Group()
        comp = prob.root.add('comp', ExecComp('y=x*2.0'))
        prob.root.add('p1', ParamComp('x', 2.0))
        prob.root.connect('p1.x', 'comp.x')

        comp.fd_options['force_fd'] = True

        prob.setup(check=False)
        prob.run()

        J = prob.calc_gradient(['p1.x'], ['comp.y'], mode='fwd', return_format='dict')
        assert_rel_error(self, J['comp.y']['p1.x'][0][0], 2.0, 1e-6)

        J = prob.calc_gradient(['p1.x'], ['comp.y'], mode='rev', return_format='dict')
        assert_rel_error(self, J['comp.y']['p1.x'][0][0], 2.0, 1e-6)
    def test_fan_in_grouped(self):

        prob = Problem()
        prob.root = FanInGrouped()
        prob.root.ln_solver = ExplicitSolver()
        prob.setup(check=False)
        prob.run()

        param_list = ['p1.x1', 'p2.x2']
        unknown_list = ['comp3.y']

        J = prob.calc_gradient(param_list, unknown_list, mode='fwd', return_format='dict')
        assert_rel_error(self, J['comp3.y']['p1.x1'][0][0], -6.0, 1e-6)
        assert_rel_error(self, J['comp3.y']['p2.x2'][0][0], 35.0, 1e-6)

        J = prob.calc_gradient(param_list, unknown_list, mode='rev', return_format='dict')
        assert_rel_error(self, J['comp3.y']['p1.x1'][0][0], -6.0, 1e-6)
        assert_rel_error(self, J['comp3.y']['p2.x2'][0][0], 35.0, 1e-6)
Beispiel #21
0
    def test_fan_in(self):

        prob = Problem(impl=impl)
        prob.root = FanIn()
        prob.root.ln_solver = PetscKSP()
        prob.setup(check=False)
        prob.run()

        indep_list = ['p1.x1', 'p2.x2']
        unknown_list = ['comp3.y']

        J = prob.calc_gradient(indep_list, unknown_list, mode='fwd', return_format='dict')
        assert_rel_error(self, J['comp3.y']['p1.x1'][0][0], -6.0, 1e-6)
        assert_rel_error(self, J['comp3.y']['p2.x2'][0][0], 35.0, 1e-6)

        J = prob.calc_gradient(indep_list, unknown_list, mode='rev', return_format='dict')
        assert_rel_error(self, J['comp3.y']['p1.x1'][0][0], -6.0, 1e-6)
        assert_rel_error(self, J['comp3.y']['p2.x2'][0][0], 35.0, 1e-6)
Beispiel #22
0
    def test_fan_out(self):

        prob = Problem()
        prob.root = FanOut()
        prob.root.ln_solver = DirectSolver()
        prob.setup(check=False)
        prob.run()

        indep_list = ['p.x']
        unknown_list = ['comp2.y', "comp3.y"]

        J = prob.calc_gradient(indep_list, unknown_list, mode='fwd', return_format='dict')
        assert_rel_error(self, J['comp2.y']['p.x'][0][0], -6.0, 1e-6)
        assert_rel_error(self, J['comp3.y']['p.x'][0][0], 15.0, 1e-6)

        J = prob.calc_gradient(indep_list, unknown_list, mode='rev', return_format='dict')
        assert_rel_error(self, J['comp2.y']['p.x'][0][0], -6.0, 1e-6)
        assert_rel_error(self, J['comp3.y']['p.x'][0][0], 15.0, 1e-6)
Beispiel #23
0
    def test_fan_out_grouped(self):

        prob = Problem(impl=impl)
        prob.root = FanOutGrouped()
        prob.root.ln_solver = PetscKSP()
        prob.setup(check=False)
        prob.run()

        indep_list = ['p.x']
        unknown_list = ['sub.comp2.y', "sub.comp3.y"]

        J = prob.calc_gradient(indep_list, unknown_list, mode='fwd', return_format='dict')
        assert_rel_error(self, J['sub.comp2.y']['p.x'][0][0], -6.0, 1e-6)
        assert_rel_error(self, J['sub.comp3.y']['p.x'][0][0], 15.0, 1e-6)

        J = prob.calc_gradient(indep_list, unknown_list, mode='rev', return_format='dict')
        assert_rel_error(self, J['sub.comp2.y']['p.x'][0][0], -6.0, 1e-6)
        assert_rel_error(self, J['sub.comp3.y']['p.x'][0][0], 15.0, 1e-6)
    def test_fd_options_meta_step_size(self):

        class MetaParaboloid(Component):
            """ Evaluates the equation f(x,y) = (x-3)^2 + xy + (y+4)^2 - 3 """

            def __init__(self):
                super(MetaParaboloid, self).__init__()

                # Params
                self.add_param('x', 1.0, fd_step_size = 1.0e5)
                self.add_param('y', 1.0, fd_step_size = 1.0e5)

                # Unknowns
                self.add_output('f_xy', 0.0)

            def solve_nonlinear(self, params, unknowns, resids):
                """f(x,y) = (x-3)^2 + xy + (y+4)^2 - 3
                Optimal solution (minimum): x = 6.6667; y = -7.3333
                """

                x = params['x']
                y = params['y']

                f_xy = ((x-3.0)**2 + x*y + (y+4.0)**2 - 3.0)
                unknowns['f_xy'] = f_xy

            def jacobian(self, params, unknowns, resids):
                """Analytical derivatives"""

                x = params['x']
                y = params['y']
                J = {}

                J['f_xy', 'x'] = (2.0*x - 6.0 + y)
                J['f_xy', 'y'] = (2.0*y + 8.0 + x)

                return J

        prob = Problem()
        prob.root = Group()
        comp = prob.root.add('comp', MetaParaboloid())
        prob.root.add('p1', ParamComp('x', 15.0))
        prob.root.add('p2', ParamComp('y', 15.0))
        prob.root.connect('p1.x', 'comp.x')
        prob.root.connect('p2.y', 'comp.y')

        comp.fd_options['force_fd'] = True

        prob.setup(check=False)
        prob.run()

        # Make sure bad meta step_size is used
        # Derivative should be way high with this.

        J = prob.calc_gradient(['p1.x'], ['comp.f_xy'], return_format='dict')
        self.assertGreater(J['comp.f_xy']['p1.x'][0][0], 1000.0)
Beispiel #25
0
    def test_array2D(self):
        group = Group()
        group.add('x_param', IndepVarComp('x', np.ones((2, 2))), promotes=['*'])
        group.add('mycomp', ArrayComp2D(), promotes=['x', 'y'])

        prob = Problem(impl=impl)
        prob.root = group
        prob.root.ln_solver = PetscKSP()
        prob.setup(check=False)
        prob.run()

        J = prob.calc_gradient(['x'], ['y'], mode='fwd', return_format='dict')
        Jbase = prob.root.mycomp._jacobian_cache
        diff = np.linalg.norm(J['y']['x'] - Jbase['y', 'x'])
        assert_rel_error(self, diff, 0.0, 1e-8)

        J = prob.calc_gradient(['x'], ['y'], mode='rev', return_format='dict')
        diff = np.linalg.norm(J['y']['x'] - Jbase['y', 'x'])
        assert_rel_error(self, diff, 0.0, 1e-8)
Beispiel #26
0
    def test_sellar_derivs_grouped(self):

        prob = Problem(impl=impl)
        prob.root = SellarDerivativesGrouped()
        prob.root.ln_solver = PetscKSP()

        prob.root.mda.nl_solver.options['atol'] = 1e-12
        prob.setup(check=False)
        prob.run()

        # Just make sure we are at the right answer
        assert_rel_error(self, prob['y1'], 25.58830273, .00001)
        assert_rel_error(self, prob['y2'], 12.05848819, .00001)

        indep_list = ['x', 'z']
        unknown_list = ['obj', 'con1', 'con2']

        Jbase = {}
        Jbase['con1'] = {}
        Jbase['con1']['x'] = -0.98061433
        Jbase['con1']['z'] = np.array([-9.61002285, -0.78449158])
        Jbase['con2'] = {}
        Jbase['con2']['x'] = 0.09692762
        Jbase['con2']['z'] = np.array([1.94989079, 1.0775421 ])
        Jbase['obj'] = {}
        Jbase['obj']['x'] = 2.98061392
        Jbase['obj']['z'] = np.array([9.61001155, 1.78448534])

        J = prob.calc_gradient(indep_list, unknown_list, mode='fwd', return_format='dict')
        for key1, val1 in Jbase.items():
            for key2, val2 in val1.items():
                assert_rel_error(self, J[key1][key2], val2, .00001)

        J = prob.calc_gradient(indep_list, unknown_list, mode='rev', return_format='dict')
        for key1, val1 in Jbase.items():
            for key2, val2 in val1.items():
                assert_rel_error(self, J[key1][key2], val2, .00001)

        prob.root.fd_options['form'] = 'central'
        J = prob.calc_gradient(indep_list, unknown_list, mode='fd', return_format='dict')
        for key1, val1 in Jbase.items():
            for key2, val2 in val1.items():
                assert_rel_error(self, J[key1][key2], val2, .00001)
Beispiel #27
0
    def test_array2D(self):
        group = Group()
        group.add('x_param', ParamComp('x', np.ones((2, 2))), promotes=['*'])
        group.add('mycomp', ArrayComp2D(), promotes=['x', 'y'])

        prob = Problem()
        prob.root = group
        prob.root.ln_solver = ExplicitSolver()
        prob.setup(check=False)
        prob.run()

        J = prob.calc_gradient(['x'], ['y'], mode='fwd', return_format='dict')
        Jbase = prob.root.mycomp._jacobian_cache
        diff = np.linalg.norm(J['y']['x'] - Jbase['y', 'x'])
        assert_rel_error(self, diff, 0.0, 1e-8)

        J = prob.calc_gradient(['x'], ['y'], mode='rev', return_format='dict')
        diff = np.linalg.norm(J['y']['x'] - Jbase['y', 'x'])
        assert_rel_error(self, diff, 0.0, 1e-8)
Beispiel #28
0
    def test_complex_step2(self):
        prob = Problem(Group())
        comp = prob.root.add('comp', ExecComp('y=x*x + x*2.0'))
        prob.root.add('p1', ParamComp('x', 2.0))
        prob.root.connect('p1.x', 'comp.x')

        comp.fd_options['force_fd'] = False

        prob.setup(check=False)
        prob.run()

        J = prob.calc_gradient(['p1.x'], ['comp.y'],
                               mode='fwd',
                               return_format='dict')
        assert_rel_error(self, J['comp.y']['p1.x'], np.array([6.0]), 0.00001)

        J = prob.calc_gradient(['p1.x'], ['comp.y'],
                               mode='rev',
                               return_format='dict')
        assert_rel_error(self, J['comp.y']['p1.x'], np.array([6.0]), 0.00001)
    def test_fd_options_meta_step_size(self):
        class MetaParaboloid(Component):
            """ Evaluates the equation f(x,y) = (x-3)^2 + xy + (y+4)^2 - 3 """
            def __init__(self):
                super(MetaParaboloid, self).__init__()

                # Params
                self.add_param('x', 1.0, fd_step_size=1.0e5)
                self.add_param('y', 1.0, fd_step_size=1.0e5)

                # Unknowns
                self.add_output('f_xy', 0.0)

            def solve_nonlinear(self, params, unknowns, resids):
                """f(x,y) = (x-3)^2 + xy + (y+4)^2 - 3
                Optimal solution (minimum): x = 6.6667; y = -7.3333
                """

                x = params['x']
                y = params['y']

                f_xy = ((x - 3.0)**2 + x * y + (y + 4.0)**2 - 3.0)
                unknowns['f_xy'] = f_xy

            def jacobian(self, params, unknowns, resids):
                """Analytical derivatives"""

                x = params['x']
                y = params['y']
                J = {}

                J['f_xy', 'x'] = (2.0 * x - 6.0 + y)
                J['f_xy', 'y'] = (2.0 * y + 8.0 + x)

                return J

        prob = Problem()
        prob.root = Group()
        comp = prob.root.add('comp', MetaParaboloid())
        prob.root.add('p1', ParamComp('x', 15.0))
        prob.root.add('p2', ParamComp('y', 15.0))
        prob.root.connect('p1.x', 'comp.x')
        prob.root.connect('p2.y', 'comp.y')

        comp.fd_options['force_fd'] = True

        prob.setup(check=False)
        prob.run()

        # Make sure bad meta step_size is used
        # Derivative should be way high with this.

        J = prob.calc_gradient(['p1.x'], ['comp.f_xy'], return_format='dict')
        self.assertGreater(J['comp.f_xy']['p1.x'][0][0], 1000.0)
    def test_no_derivatives(self):

        prob = Problem()
        prob.root = Group()
        comp = prob.root.add('comp', ExecComp('y=x*2.0'))
        prob.root.add('p1', ParamComp('x', 2.0))
        prob.root.connect('p1.x', 'comp.x')

        comp.fd_options['force_fd'] = True

        prob.setup(check=False)
        prob.run()

        J = prob.calc_gradient(['p1.x'], ['comp.y'],
                               mode='fwd',
                               return_format='dict')
        assert_rel_error(self, J['comp.y']['p1.x'][0][0], 2.0, 1e-6)

        J = prob.calc_gradient(['p1.x'], ['comp.y'],
                               mode='rev',
                               return_format='dict')
        assert_rel_error(self, J['comp.y']['p1.x'][0][0], 2.0, 1e-6)
    def test_single_diamond_grouped(self):

        prob = Problem()
        prob.root = SingleDiamondGrouped()
        prob.root.ln_solver = ExplicitSolver()
        prob.setup(check=False)
        prob.run()

        param_list = ['p.x']
        unknown_list = ['comp4.y1', 'comp4.y2']

        J = prob.calc_gradient(param_list, unknown_list, mode='fwd', return_format='dict')
        assert_rel_error(self, J['comp4.y1']['p.x'][0][0], 25, 1e-6)
        assert_rel_error(self, J['comp4.y2']['p.x'][0][0], -40.5, 1e-6)

        J = prob.calc_gradient(param_list, unknown_list, mode='rev', return_format='dict')
        assert_rel_error(self, J['comp4.y1']['p.x'][0][0], 25, 1e-6)
        assert_rel_error(self, J['comp4.y2']['p.x'][0][0], -40.5, 1e-6)

        J = prob.calc_gradient(param_list, unknown_list, mode='fd', return_format='dict')
        assert_rel_error(self, J['comp4.y1']['p.x'][0][0], 25, 1e-6)
        assert_rel_error(self, J['comp4.y2']['p.x'][0][0], -40.5, 1e-6)
Beispiel #32
0
    def test_converge_diverge_groups(self):

        prob = Problem(impl=impl)
        prob.root = ConvergeDivergeGroups()
        prob.root.ln_solver = PetscKSP()
        prob.setup(check=False)
        prob.run()

        # Make sure value is fine.
        assert_rel_error(self, prob['comp7.y1'], -102.7, 1e-6)

        indep_list = ['p.x']
        unknown_list = ['comp7.y1']

        J = prob.calc_gradient(indep_list, unknown_list, mode='fwd', return_format='dict')
        assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)

        J = prob.calc_gradient(indep_list, unknown_list, mode='rev', return_format='dict')
        assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)

        J = prob.calc_gradient(indep_list, unknown_list, mode='fd', return_format='dict')
        assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)
    def test_linear_system(self):
        root = Group()

        root.add('lin', LinearSystem(3))

        x = np.array([1, 2, -3])
        A = np.array([[ 5.0, -3.0, 2.0], [1.0, 7.0, -4.0], [1.0, 0.0, 8.0]])
        b = A.dot(x)

        root.add('p1', IndepVarComp('A', A))
        root.add('p2', IndepVarComp('b', b))
        root.connect('p1.A', 'lin.A')
        root.connect('p2.b', 'lin.b')

        prob = Problem(root)
        prob.setup(check=False)
        prob.run()

        # Make sure it gets the right answer
        assert_rel_error(self, prob['lin.x'], x, .0001)
        assert_rel_error(self, np.linalg.norm(prob.root.resids.vec), 0.0, 1e-10)

        # Compare against calculated derivs
        Ainv = np.linalg.inv(A)
        dx_dA = np.outer(Ainv, -x).reshape(3, 9)
        dx_db = Ainv

        J = prob.calc_gradient(['p1.A', 'p2.b'], ['lin.x'], mode='fwd', return_format='dict')
        assert_rel_error(self, J['lin.x']['p1.A'], dx_dA, .0001)
        assert_rel_error(self, J['lin.x']['p2.b'], dx_db, .0001)

        J = prob.calc_gradient(['p1.A', 'p2.b'], ['lin.x'], mode='rev', return_format='dict')
        assert_rel_error(self, J['lin.x']['p1.A'], dx_dA, .0001)
        assert_rel_error(self, J['lin.x']['p2.b'], dx_db, .0001)

        J = prob.calc_gradient(['p1.A', 'p2.b'], ['lin.x'], mode='fd', return_format='dict')
        assert_rel_error(self, J['lin.x']['p1.A'], dx_dA, .0001)
        assert_rel_error(self, J['lin.x']['p2.b'], dx_db, .0001)
    def test_fd_options_step_size(self):

        prob = Problem()
        prob.root = Group()
        comp = prob.root.add('comp', Paraboloid())
        prob.root.add('p1', ParamComp([('x', 15.0), ('y', 15.0)]))
        prob.root.connect('p1.x', 'comp.x')
        prob.root.connect('p1.y', 'comp.y')

        comp.fd_options['force_fd'] = True

        prob.setup(check=False)
        prob.run()

        J = prob.calc_gradient(['p1.x'], ['comp.f_xy'], return_format='dict')
        assert_rel_error(self, J['comp.f_xy']['p1.x'][0][0], 39.0, 1e-6)

        # Make sure step_size is used
        # Derivative should be way high with this.
        comp.fd_options['step_size'] = 1e5

        J = prob.calc_gradient(['p1.x'], ['comp.f_xy'], return_format='dict')
        self.assertGreater(J['comp.f_xy']['p1.x'][0][0], 1000.0)
    def test_fd_options_step_size(self):

        prob = Problem()
        prob.root = Group()
        comp = prob.root.add('comp', Paraboloid())
        prob.root.add('p1', ParamComp([('x', 15.0), ('y', 15.0)]))
        prob.root.connect('p1.x', 'comp.x')
        prob.root.connect('p1.y', 'comp.y')

        comp.fd_options['force_fd'] = True

        prob.setup(check=False)
        prob.run()

        J = prob.calc_gradient(['p1.x'], ['comp.f_xy'], return_format='dict')
        assert_rel_error(self, J['comp.f_xy']['p1.x'][0][0], 39.0, 1e-6)

        # Make sure step_size is used
        # Derivative should be way high with this.
        comp.fd_options['step_size'] = 1e5

        J = prob.calc_gradient(['p1.x'], ['comp.f_xy'], return_format='dict')
        self.assertGreater(J['comp.f_xy']['p1.x'][0][0], 1000.0)
    def test_converge_diverge(self):

        prob = Problem()
        prob.root = ConvergeDiverge()
        prob.root.ln_solver = ExplicitSolver()
        prob.setup(check=False)
        prob.run()

        param_list = ['p.x']
        unknown_list = ['comp7.y1']

        prob.run()

        # Make sure value is fine.
        assert_rel_error(self, prob['comp7.y1'], -102.7, 1e-6)

        J = prob.calc_gradient(param_list, unknown_list, mode='fwd', return_format='dict')
        assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)

        J = prob.calc_gradient(param_list, unknown_list, mode='rev', return_format='dict')
        assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)

        J = prob.calc_gradient(param_list, unknown_list, mode='fd', return_format='dict')
        assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)
Beispiel #37
0
    def test_converge_diverge_compfd(self):

        prob = Problem(impl=impl)
        prob.root = ConvergeDivergePar()
        prob.root.ln_solver = PetscKSP()

        # fd comp2 and comp5. each is under a par group
        prob.root.par1.comp2.fd_options['force_fd'] = True
        prob.root.par2.comp5.fd_options['force_fd'] = True

        prob.setup(check=False)
        prob.run()

        # Make sure value is fine.
        assert_rel_error(self, prob['comp7.y1'], -102.7, 1e-6)

        param_list = ['p.x']
        unknown_list = ['comp7.y1']

        J = prob.calc_gradient(param_list,
                               unknown_list,
                               mode='fwd',
                               return_format='dict')
        assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)

        J = prob.calc_gradient(param_list,
                               unknown_list,
                               mode='rev',
                               return_format='dict')
        assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)

        J = prob.calc_gradient(param_list,
                               unknown_list,
                               mode='fd',
                               return_format='dict')
        assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)
Beispiel #38
0
    def test_fan_out_grouped(self):

        prob = Problem()
        prob.root = FanOutGrouped()
        prob.root.ln_solver = ExplicitSolver()
        prob.setup(check=False)
        prob.run()

        param_list = ['p.x']
        unknown_list = ['sub.comp2.y', "sub.comp3.y"]

        J = prob.calc_gradient(param_list,
                               unknown_list,
                               mode='fwd',
                               return_format='dict')
        assert_rel_error(self, J['sub.comp2.y']['p.x'][0][0], -6.0, 1e-6)
        assert_rel_error(self, J['sub.comp3.y']['p.x'][0][0], 15.0, 1e-6)

        J = prob.calc_gradient(param_list,
                               unknown_list,
                               mode='rev',
                               return_format='dict')
        assert_rel_error(self, J['sub.comp2.y']['p.x'][0][0], -6.0, 1e-6)
        assert_rel_error(self, J['sub.comp3.y']['p.x'][0][0], 15.0, 1e-6)
Beispiel #39
0
    def test_fan_out(self):

        prob = Problem(impl=impl)
        prob.root = FanOut()
        prob.root.ln_solver = PetscKSP()
        prob.setup(check=False)
        prob.run()

        param_list = ['p.x']
        unknown_list = ['comp2.y', "comp3.y"]

        J = prob.calc_gradient(param_list,
                               unknown_list,
                               mode='fwd',
                               return_format='dict')
        assert_rel_error(self, J['comp2.y']['p.x'][0][0], -6.0, 1e-6)
        assert_rel_error(self, J['comp3.y']['p.x'][0][0], 15.0, 1e-6)

        J = prob.calc_gradient(param_list,
                               unknown_list,
                               mode='rev',
                               return_format='dict')
        assert_rel_error(self, J['comp2.y']['p.x'][0][0], -6.0, 1e-6)
        assert_rel_error(self, J['comp3.y']['p.x'][0][0], 15.0, 1e-6)
    def test_overrides(self):
        class OverrideComp(Component):
            def __init__(self):
                super(OverrideComp, self).__init__()

                # Params
                self.add_param('x', 3.0)

                # Unknowns
                self.add_output('y', 5.5)

            def solve_nonlinear(self, params, unknowns, resids):
                """ Doesn't do much. """
                unknowns['y'] = 7.0 * params['x']

            def apply_linear(self, params, unknowns, dparams, dunknowns,
                             dresids, mode):
                """Never Call."""
                raise RuntimeError(
                    "This should have been overridden by force_fd.")

            def jacobian(self, params, unknowns, resids):
                """Never Call."""
                raise RuntimeError(
                    "This should have been overridden by force_fd.")

        prob = Problem()
        prob.root = Group()
        comp = prob.root.add('comp', OverrideComp())
        prob.root.add('p1', ParamComp('x', 2.0))
        prob.root.connect('p1.x', 'comp.x')

        comp.fd_options['force_fd'] = True

        prob.setup(check=False)
        prob.run()

        J = prob.calc_gradient(['p1.x'], ['comp.y'],
                               mode='fwd',
                               return_format='dict')
        assert_rel_error(self, J['comp.y']['p1.x'][0][0], 7.0, 1e-6)
    def test_fd_options_form(self):

        prob = Problem()
        prob.root = Group()
        comp = prob.root.add('comp', Paraboloid())
        prob.root.add('p1', ParamComp('x', 15.0))
        prob.root.add('p2', ParamComp('y', 15.0))
        prob.root.connect('p1.x', 'comp.x')
        prob.root.connect('p2.y', 'comp.y')

        comp.fd_options['force_fd'] = True
        comp.fd_options['form'] = 'forward'

        param_list = ['p1.x']
        unknowns_list = ['comp.f_xy']
        prob.setup(check=False)
        prob.run()

        J = prob.calc_gradient(param_list, unknowns_list, return_format='dict')
        assert_rel_error(self, J['comp.f_xy']['p1.x'][0][0], 39.0, 1e-6)

        # Make sure it gives good result with small stepsize
        comp.fd_options['form'] = 'backward'

        J = prob.calc_gradient(['p1.x'], ['comp.f_xy'], return_format='dict')
        assert_rel_error(self, J['comp.f_xy']['p1.x'][0][0], 39.0, 1e-6)

        # Make sure it gives good result with small stepsize
        comp.fd_options['form'] = 'central'

        J = prob.calc_gradient(['p1.x'], ['comp.f_xy'], return_format='dict')
        assert_rel_error(self, J['comp.f_xy']['p1.x'][0][0], 39.0, 1e-6)

        # Now, Make sure we really are going foward and backward
        comp.fd_options['form'] = 'forward'
        comp.fd_options['step_size'] = 1e3
        J = prob.calc_gradient(['p1.x'], ['comp.f_xy'], return_format='dict')
        self.assertGreater(J['comp.f_xy']['p1.x'][0][0], 0.0)

        comp.fd_options['form'] = 'backward'
        J = prob.calc_gradient(['p1.x'], ['comp.f_xy'], return_format='dict')
        self.assertLess(J['comp.f_xy']['p1.x'][0][0], 0.0)

        # Central should get pretty close even for the bad stepsize
        comp.fd_options['form'] = 'central'
        J = prob.calc_gradient(['p1.x'], ['comp.f_xy'], return_format='dict')
        assert_rel_error(self, J['comp.f_xy']['p1.x'][0][0], 39.0, 1e-1)
    def test_fd_options_form(self):

        prob = Problem()
        prob.root = Group()
        comp = prob.root.add('comp', Paraboloid())
        prob.root.add('p1', ParamComp('x', 15.0))
        prob.root.add('p2', ParamComp('y', 15.0))
        prob.root.connect('p1.x', 'comp.x')
        prob.root.connect('p2.y', 'comp.y')

        comp.fd_options['force_fd'] = True
        comp.fd_options['form'] = 'forward'

        param_list = ['p1.x']
        unknowns_list = ['comp.f_xy']
        prob.setup(check=False)
        prob.run()

        J = prob.calc_gradient(param_list, unknowns_list, return_format='dict')
        assert_rel_error(self, J['comp.f_xy']['p1.x'][0][0], 39.0, 1e-6)

        # Make sure it gives good result with small stepsize
        comp.fd_options['form'] = 'backward'

        J = prob.calc_gradient(['p1.x'], ['comp.f_xy'], return_format='dict')
        assert_rel_error(self, J['comp.f_xy']['p1.x'][0][0], 39.0, 1e-6)

        # Make sure it gives good result with small stepsize
        comp.fd_options['form'] = 'central'

        J = prob.calc_gradient(['p1.x'], ['comp.f_xy'], return_format='dict')
        assert_rel_error(self, J['comp.f_xy']['p1.x'][0][0], 39.0, 1e-6)

        # Now, Make sure we really are going foward and backward
        comp.fd_options['form'] = 'forward'
        comp.fd_options['step_size'] = 1e3
        J = prob.calc_gradient(['p1.x'], ['comp.f_xy'], return_format='dict')
        self.assertGreater(J['comp.f_xy']['p1.x'][0][0], 0.0)

        comp.fd_options['form'] = 'backward'
        J = prob.calc_gradient(['p1.x'], ['comp.f_xy'], return_format='dict')
        self.assertLess(J['comp.f_xy']['p1.x'][0][0], 0.0)

        # Central should get pretty close even for the bad stepsize
        comp.fd_options['form'] = 'central'
        J = prob.calc_gradient(['p1.x'], ['comp.f_xy'], return_format='dict')
        assert_rel_error(self, J['comp.f_xy']['p1.x'][0][0], 39.0, 1e-1)
    def test_overrides(self):

        class OverrideComp(Component):

            def __init__(self):
                super(OverrideComp, self).__init__()

                # Params
                self.add_param('x', 3.0)

                # Unknowns
                self.add_output('y', 5.5)

            def solve_nonlinear(self, params, unknowns, resids):
                """ Doesn't do much. """
                unknowns['y'] = 7.0*params['x']

            def apply_linear(self, params, unknowns, dparams, dunknowns, dresids,
                             mode):
                """Never Call."""
                raise RuntimeError("This should have been overridden by force_fd.")

            def jacobian(self, params, unknowns, resids):
                """Never Call."""
                raise RuntimeError("This should have been overridden by force_fd.")

        prob = Problem()
        prob.root = Group()
        comp = prob.root.add('comp', OverrideComp())
        prob.root.add('p1', ParamComp('x', 2.0))
        prob.root.connect('p1.x', 'comp.x')

        comp.fd_options['force_fd'] = True

        prob.setup(check=False)
        prob.run()

        J = prob.calc_gradient(['p1.x'], ['comp.y'], mode='fwd', return_format='dict')
        assert_rel_error(self, J['comp.y']['p1.x'][0][0], 7.0, 1e-6)
    def test_fd_options_meta_form(self):
        class MetaParaboloid(Component):
            """ Evaluates the equation f(x,y) = (x-3)^2 + xy + (y+4)^2 - 3 """
            def __init__(self):
                super(MetaParaboloid, self).__init__()

                # Params
                self.add_param('x1', 1.0, fd_form='forward')
                self.add_param('x2', 1.0, fd_form='backward')
                self.add_param('y', 1.0)

                # Unknowns
                self.add_output('f_xy', 0.0)

            def solve_nonlinear(self, params, unknowns, resids):
                """f(x,y) = (x-3)^2 + xy + (y+4)^2 - 3
                Optimal solution (minimum): x = 6.6667; y = -7.3333
                """

                x1 = params['x1']
                x2 = params['x2']
                y = params['y']

                f_xy = ((x1 - 3.0)**2 + (x2 - 3.0)**2 + (x2 + x2) * y +
                        (y + 4.0)**2 - 3.0)
                unknowns['f_xy'] = f_xy

            def jacobian(self, params, unknowns, resids):
                """Analytical derivatives"""

                x1 = params['x1']
                x2 = params['x2']
                y = params['y']
                J = {}

                J['f_xy', 'x1'] = (2.0 * x1 - 6.0 + x2 * y)
                J['f_xy', 'x2'] = (2.0 * x2 - 6.0 + x1 * y)
                J['f_xy', 'y'] = (2.0 * y + 8.0 + x1 + x2)

                return J

        prob = Problem()
        prob.root = Group()
        comp = prob.root.add('comp', MetaParaboloid())
        prob.root.add('p11', ParamComp('x1', 15.0))
        prob.root.add('p12', ParamComp('x2', 15.0))
        prob.root.add('p2', ParamComp('y', 15.0))
        prob.root.connect('p11.x1', 'comp.x1')
        prob.root.connect('p12.x2', 'comp.x2')
        prob.root.connect('p2.y', 'comp.y')

        comp.fd_options['force_fd'] = True
        comp.fd_options['step_size'] = 1e3

        params_list = ['p11.x1']
        unknowns_list = ['comp.f_xy']

        prob.setup(check=False)
        prob.run()

        J = prob.calc_gradient(params_list,
                               unknowns_list,
                               return_format='dict')
        self.assertGreater(J['comp.f_xy']['p11.x1'][0][0], 0.0)

        J = prob.calc_gradient(['p12.x2'], unknowns_list, return_format='dict')
        self.assertLess(J['comp.f_xy']['p12.x2'][0][0], 0.0)
Beispiel #45
0
    def test_CADRE_MDP(self):

        n = 1500
        m = 300
        npts = 2

        # Instantiate
        model = Problem(impl=impl)
        root = model.root = CADRE_MDP_Group(n=n, m=m, npts=npts)

        # Add parameters and constraints to each CADRE instance.
        names = ['pt%s' % i for i in range(npts)]
        for i, name in enumerate(names):

            # add parameters to driver
            model.driver.add_desvar("%s.CP_Isetpt" % name, low=0., high=0.4)
            model.driver.add_desvar("%s.CP_gamma" % name, low=0, high=np.pi/2.)
            model.driver.add_desvar("%s.CP_P_comm" % name, low=0., high=25.)
            model.driver.add_desvar("%s.iSOC" % name, indices=[0], low=0.2, high=1.)

            model.driver.add_constraint('%s.ConCh'% name, upper=0.0)
            model.driver.add_constraint('%s.ConDs'% name, upper=0.0)
            model.driver.add_constraint('%s.ConS0'% name, upper=0.0)
            model.driver.add_constraint('%s.ConS1'% name, upper=0.0)
            model.driver.add_constraint('%s_con5.val'% name, equals=0.0)

        # Add Parameter groups
        model.driver.add_desvar("bp1.cellInstd", low=0., high=1.0)
        model.driver.add_desvar("bp2.finAngle", low=0., high=np.pi/2.)
        model.driver.add_desvar("bp3.antAngle", low=-np.pi/4, high=np.pi/4)

        # Add objective
        model.driver.add_objective('obj.val')

        # For Parallel exeuction, we must use KSP
        if MPI:
            model.root.ln_solver = PetscKSP()

        model.setup()
        model.run()

        # Read pickle
        fpath = os.path.dirname(os.path.realpath(__file__))
        data = pickle.load(open(fpath + "/mdp_execute.pkl", 'rb'))

        for var in data:

            # We changed constraint names
            xvar = var
            if '_con1' in xvar:
                xvar = xvar.replace('_con1.val', '.ConCh')
            if '_con2' in xvar:
                xvar = xvar.replace('_con2.val', '.ConDs')
            if '_con3' in xvar:
                xvar = xvar.replace('_con3.val', '.ConS0')
            if '_con4' in xvar:
                xvar = xvar.replace('_con4.val', '.ConS1')

            computed = model[xvar]
            actual = data[var]
            if isinstance(computed, np.ndarray):
                rel = np.linalg.norm(actual - computed)/np.linalg.norm(actual)
            else:
                rel = np.abs(actual - computed)/np.abs(actual)

            print(xvar)
            print(computed)
            print(actual)
            if np.mean(actual) > 1e-3 or np.mean(computed) > 1e-3:
                assert rel <= 1e-3

        # Now do derivatives
        params = model.driver.get_desvars().keys()
        unks = model.driver.get_objectives().keys() + model.driver.get_constraints().keys()
        Jb = model.calc_gradient(params, unks, mode='rev', return_format='dict')

        # Read pickle
        fpath = os.path.dirname(os.path.realpath(__file__))
        Ja = pickle.load(open(fpath + "/mdp_derivs.pkl", 'rb'))

        for key1, value in sorted(Ja.items()):
            for key2 in sorted(value.keys()):

                # We changed constraint names
                xkey1 = key1
                if '_con1' in xkey1:
                    xkey1 = xkey1.replace('_con1.val', '.ConCh')
                if '_con2' in xkey1:
                    xkey1 = xkey1.replace('_con2.val', '.ConDs')
                if '_con3' in xkey1:
                    xkey1 = xkey1.replace('_con3.val', '.ConS0')
                if '_con4' in xkey1:
                    xkey1 = xkey1.replace('_con4.val', '.ConS1')

                computed = Jb[xkey1][key2]
                actual = Ja[key1][key2]
                if isinstance(computed, np.ndarray):
                    rel = np.linalg.norm(actual - computed)/np.linalg.norm(actual)
                else:
                    rel = np.abs(actual - computed)/np.abs(actual)

                print(xkey1, 'wrt', key2)
                print(computed)
                print(actual)
                if np.mean(actual) > 1e-3 or np.mean(computed) > 1e-3:
                    assert rel <= 1e-3
    def test_fd_options_meta_form(self):

        class MetaParaboloid(Component):
            """ Evaluates the equation f(x,y) = (x-3)^2 + xy + (y+4)^2 - 3 """

            def __init__(self):
                super(MetaParaboloid, self).__init__()

                # Params
                self.add_param('x1', 1.0, fd_form = 'forward')
                self.add_param('x2', 1.0, fd_form = 'backward')
                self.add_param('y', 1.0)

                # Unknowns
                self.add_output('f_xy', 0.0)

            def solve_nonlinear(self, params, unknowns, resids):
                """f(x,y) = (x-3)^2 + xy + (y+4)^2 - 3
                Optimal solution (minimum): x = 6.6667; y = -7.3333
                """

                x1 = params['x1']
                x2 = params['x2']
                y = params['y']

                f_xy = ((x1-3.0)**2 + (x2-3.0)**2 + (x2+x2)*y + (y+4.0)**2 - 3.0)
                unknowns['f_xy'] = f_xy

            def jacobian(self, params, unknowns, resids):
                """Analytical derivatives"""

                x1 = params['x1']
                x2 = params['x2']
                y = params['y']
                J = {}

                J['f_xy', 'x1'] = (2.0*x1 - 6.0 + x2*y)
                J['f_xy', 'x2'] = (2.0*x2 - 6.0 + x1*y)
                J['f_xy', 'y'] = (2.0*y + 8.0 + x1 + x2)

                return J

        prob = Problem()
        prob.root = Group()
        comp = prob.root.add('comp', MetaParaboloid())
        prob.root.add('p11', ParamComp('x1', 15.0))
        prob.root.add('p12', ParamComp('x2', 15.0))
        prob.root.add('p2', ParamComp('y', 15.0))
        prob.root.connect('p11.x1', 'comp.x1')
        prob.root.connect('p12.x2', 'comp.x2')
        prob.root.connect('p2.y', 'comp.y')

        comp.fd_options['force_fd'] = True
        comp.fd_options['step_size'] = 1e3

        params_list = ['p11.x1']
        unknowns_list = ['comp.f_xy']

        prob.setup(check=False)
        prob.run()

        J = prob.calc_gradient(params_list, unknowns_list, return_format='dict')
        self.assertGreater(J['comp.f_xy']['p11.x1'][0][0], 0.0)

        J = prob.calc_gradient(['p12.x2'], unknowns_list, return_format='dict')
        self.assertLess(J['comp.f_xy']['p12.x2'][0][0], 0.0)
    def test_fd_options_step_type(self):

        class ScaledParaboloid(Component):
            """ Evaluates the equation f(x,y) = (x-3)^2 + xy + (y+4)^2 - 3 """

            def __init__(self):
                super(ScaledParaboloid, self).__init__()

                # Params
                self.add_param('x', 1.0)
                self.add_param('y', 1.0)

                # Unknowns
                self.add_output('f_xy', 0.0)

                self.scale = 1.0e-6

            def solve_nonlinear(self, params, unknowns, resids):
                """f(x,y) = (x-3)^2 + xy + (y+4)^2 - 3
                Optimal solution (minimum): x = 6.6667; y = -7.3333
                """

                x = params['x']
                y = params['y']

                f_xy = ((x-3.0)**2 + x*y + (y+4.0)**2 - 3.0)
                unknowns['f_xy'] = self.scale*f_xy

            def jacobian(self, params, unknowns, resids):
                """Analytical derivatives"""

                x = params['x']
                y = params['y']
                J = {}

                J['f_xy', 'x'] = (2.0*x - 6.0 + y) * self.scale
                J['f_xy', 'y'] = (2.0*y + 8.0 + x) * self.scale

                return J

        prob = Problem()
        prob.root = Group()
        comp = prob.root.add('comp', ScaledParaboloid())
        prob.root.add('p1', ParamComp('x', 8.0*comp.scale))
        prob.root.add('p2', ParamComp('y', 8.0*comp.scale))
        prob.root.connect('p1.x', 'comp.x')
        prob.root.connect('p2.y', 'comp.y')

        comp.fd_options['force_fd'] = True
        comp.fd_options['step_type'] = 'absolute'

        prob.setup(check=False)
        prob.run()

        J1 = prob.calc_gradient(['p1.x'], ['comp.f_xy'], return_format='dict')

        comp.fd_options['step_type'] = 'relative'
        J2 = prob.calc_gradient(['p1.x'], ['comp.f_xy'], return_format='dict')

        # Couldnt put together a case where one is much worse, so just make sure they
        # are not equal.
        self.assertNotEqual(self, J1['comp.f_xy']['p1.x'][0][0],
                                  J2['comp.f_xy']['p1.x'][0][0])
    def test_fd_options_step_type(self):
        class ScaledParaboloid(Component):
            """ Evaluates the equation f(x,y) = (x-3)^2 + xy + (y+4)^2 - 3 """
            def __init__(self):
                super(ScaledParaboloid, self).__init__()

                # Params
                self.add_param('x', 1.0)
                self.add_param('y', 1.0)

                # Unknowns
                self.add_output('f_xy', 0.0)

                self.scale = 1.0e-6

            def solve_nonlinear(self, params, unknowns, resids):
                """f(x,y) = (x-3)^2 + xy + (y+4)^2 - 3
                Optimal solution (minimum): x = 6.6667; y = -7.3333
                """

                x = params['x']
                y = params['y']

                f_xy = ((x - 3.0)**2 + x * y + (y + 4.0)**2 - 3.0)
                unknowns['f_xy'] = self.scale * f_xy

            def jacobian(self, params, unknowns, resids):
                """Analytical derivatives"""

                x = params['x']
                y = params['y']
                J = {}

                J['f_xy', 'x'] = (2.0 * x - 6.0 + y) * self.scale
                J['f_xy', 'y'] = (2.0 * y + 8.0 + x) * self.scale

                return J

        prob = Problem()
        prob.root = Group()
        comp = prob.root.add('comp', ScaledParaboloid())
        prob.root.add('p1', ParamComp('x', 8.0 * comp.scale))
        prob.root.add('p2', ParamComp('y', 8.0 * comp.scale))
        prob.root.connect('p1.x', 'comp.x')
        prob.root.connect('p2.y', 'comp.y')

        comp.fd_options['force_fd'] = True
        comp.fd_options['step_type'] = 'absolute'

        prob.setup(check=False)
        prob.run()

        J1 = prob.calc_gradient(['p1.x'], ['comp.f_xy'], return_format='dict')

        comp.fd_options['step_type'] = 'relative'
        J2 = prob.calc_gradient(['p1.x'], ['comp.f_xy'], return_format='dict')

        # Couldnt put together a case where one is much worse, so just make sure they
        # are not equal.
        self.assertNotEqual(self, J1['comp.f_xy']['p1.x'][0][0],
                            J2['comp.f_xy']['p1.x'][0][0])