Exemple #1
0
    def test_converge_diverge_par(self):

        prob = Problem(impl=impl)
        prob.root = ConvergeDivergePar()
        prob.root.ln_solver = PetscKSP()

        prob.setup(check=False)
        prob.run()

        # Make sure value is fine.
        assert_rel_error(self, prob['comp7.y1'], -102.7, 1e-6)

        indep_list = ['p.x']
        unknown_list = ['comp7.y1']

        J = prob.calc_gradient(indep_list,
                               unknown_list,
                               mode='fwd',
                               return_format='dict')
        assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)

        J = prob.calc_gradient(indep_list,
                               unknown_list,
                               mode='rev',
                               return_format='dict')
        assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)

        J = prob.calc_gradient(indep_list,
                               unknown_list,
                               mode='fd',
                               return_format='dict')
        assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)
    def test_converge_diverge_compfd(self):

        prob = Problem(impl=impl)
        prob.root = ConvergeDivergePar()
        prob.root.ln_solver = PetscKSP()

        # fd comp2 and comp5. each is under a par group
        prob.root.par1.comp2.fd_options['force_fd'] = True
        prob.root.par2.comp5.fd_options['force_fd'] = True

        prob.setup(check=False)
        prob.run()

        # Make sure value is fine.
        assert_rel_error(self, prob['comp7.y1'], -102.7, 1e-6)

        indep_list = ['p.x']
        unknown_list = ['comp7.y1']

        J = prob.calc_gradient(indep_list, unknown_list, mode='fwd', return_format='dict')
        assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)

        J = prob.calc_gradient(indep_list, unknown_list, mode='rev', return_format='dict')
        assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)

        J = prob.calc_gradient(indep_list, unknown_list, mode='fd', return_format='dict')
        assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)
    def test_with_relevance_rev(self):

        prob = Problem()
        prob.root = ConvergeDivergePar()
        prob.root.ln_solver = LinearGaussSeidel()

        prob.driver.add_desvar('p.x')
        prob.driver.add_objective('comp7.y1')
        prob.root.ln_solver.options['mode'] = 'rev'
        prob.root.ln_solver.options['single_voi_relevance_reduction'] = True

        prob.setup(check=False)
        prob.run()

        data = prob.check_total_derivatives(out_stream=None)

        for key, val in iteritems(data):
            assert_rel_error(self, val['abs error'][1], 0.0, 1e-5)
            assert_rel_error(self, val['rel error'][1], 0.0, 1e-5)
    def test_converge_diverge_comp_cs(self):

        prob = Problem(impl=impl)
        prob.root = ConvergeDivergePar()
        prob.root.ln_solver = PetscKSP()

        # fd the whole model
        prob.root.fd_options['force_fd'] = True
        prob.root.fd_options['form'] = 'complex_step'

        prob.setup(check=False)
        prob.run()

        # Make sure value is fine.
        assert_rel_error(self, prob['comp7.y1'], -102.7, 1e-6)

        indep_list = ['p.x']
        unknown_list = ['comp7.y1']

        J = prob.calc_gradient(indep_list, unknown_list, mode='fd', return_format='dict')
        assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)