Exemplo n.º 1
0
    def test_generate_numpydocstring(self):
        group = Group()
        group.add('x_param', IndepVarComp('x', 1.0), promotes=['*'])
        group.add('mycomp', SimpleCompDerivMatVec(), promotes=['x', 'y'])

        prob = Problem()
        prob.root = group
        prob.root.ln_solver = ScipyGMRES()

        test_string = prob.root.ln_solver.generate_docstring()

        original_string = \
"""    \"\"\"

    Options
    -------
    options['atol'] : float(1e-12)
        Absolute convergence tolerance.
    options['err_on_maxiter'] : bool(False)
        If True, raise an AnalysisError if not converged at maxiter.
    options['iprint'] : int(0)
        Set to 0 to disable printing, set to 1 to print the residual to stdout each iteration, set to 2 to print subiteration residuals as well.
    options['maxiter'] : int(1000)
        Maximum number of iterations.
    options['mode'] : str('auto')
        Derivative calculation mode, set to 'fwd' for forward mode, 'rev' for reverse mode, or 'auto' to let OpenMDAO determine the best mode.
    options['restart'] : int(20)
        Number of iterations between restarts. Larger values increase iteration cost, but may be necessary for convergence

    \"\"\"
"""
        for sorig, stest in zip(original_string.split('\n'),
                                test_string.split('\n')):
            self.assertEqual(sorig, stest)
Exemplo n.º 2
0
    def test_generate_numpydocstring(self):
        group = Group()
        group.add('x_param', IndepVarComp('x', 1.0), promotes=['*'])
        group.add('mycomp', SimpleCompDerivMatVec(), promotes=['x', 'y'])

        prob = Problem()
        prob.root = group
        prob.root.ln_solver = ScipyGMRES()

        test_string = prob.root.ln_solver.generate_docstring()
        original_string = '    """\n\n    Options\n    -------\n    options[\'atol\'] :  float(1e-12)\n        Absolute convergence tolerance.\n    options[\'iprint\'] :  int(0)\n        Set to 0 to disable printing, set to 1 to print the residual to stdout each iteration, set to 2 to print subiteration residuals as well.\n    options[\'maxiter\'] :  int(1000)\n        Maximum number of iterations.\n    options[\'mode\'] :  str(\'auto\')\n        Derivative calculation mode, set to \'fwd\' for forward mode, \'rev\' for reverse mode, or \'auto\' to let OpenMDAO determine the best mode.\n    options[\'precondition\'] :  bool(False)\n        Set to True to turn on preconditioning.\n    options[\'restart\'] :  int(20)\n        Number of iterations between restarts. Larger values increase iteration cost, but may be necessary for convergence\n\n    """\n'
        self.assertEqual(original_string, test_string)
Exemplo n.º 3
0
    def test_simple_matvec(self):
        group = Group()
        group.add('x_param', IndepVarComp('x', 1.0), promotes=['*'])
        group.add('mycomp', SimpleCompDerivMatVec(), promotes=['x', 'y'])

        prob = Problem()
        prob.root = group
        prob.root.ln_solver = LinearGaussSeidel()
        prob.setup(check=False)
        prob.run()

        J = prob.calc_gradient(['x'], ['y'], mode='fwd', return_format='dict')
        assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)

        J = prob.calc_gradient(['x'], ['y'], mode='rev', return_format='dict')
        assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)
Exemplo n.º 4
0
    def test_simple_in_group_matvec(self):
        group = Group()
        sub = group.add('sub', Group(), promotes=['x', 'y'])
        group.add('x_param', ParamComp('x', 1.0), promotes=['*'])
        sub.add('mycomp', SimpleCompDerivMatVec(), promotes=['x', 'y'])

        prob = Problem()
        prob.root = group
        prob.root.ln_solver = ExplicitSolver()
        prob.setup(check=False)
        prob.run()

        J = prob.calc_gradient(['x'], ['y'], mode='fwd', return_format='dict')
        assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)

        J = prob.calc_gradient(['x'], ['y'], mode='rev', return_format='dict')
        assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)
Exemplo n.º 5
0
    def test_simple_choose_different_alg(self):
        group = Group()
        group.add('x_param', IndepVarComp('x', 1.0), promotes=['*'])
        group.add('mycomp', SimpleCompDerivMatVec(), promotes=['x', 'y'])

        prob = Problem(impl=impl)
        prob.root = group
        prob.root.ln_solver = PetscKSP()
        prob.root.ln_solver.options['ksp_type'] = 'gmres'
        prob.setup(check=False)
        prob.run()

        J = prob.calc_gradient(['x'], ['y'], mode='fwd', return_format='dict')
        assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)

        J = prob.calc_gradient(['x'], ['y'], mode='rev', return_format='dict')
        assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)
    def test_full_model_fd_simple_comp_promoted(self):

        prob = Problem()
        prob.root = Group()
        sub = prob.root.add('sub', Group(), promotes=['*'])
        sub.add('comp', SimpleCompDerivMatVec(), promotes=['*'])
        prob.root.add('p1', IndepVarComp('x', 1.0), promotes=['*'])

        prob.root.fd_options['force_fd'] = True

        prob.setup(check=False)
        prob.run()

        indep_list = ['x']
        unknown_list = ['y']

        J = prob.calc_gradient(indep_list, unknown_list, mode='fwd', return_format='dict')
        assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)
Exemplo n.º 7
0
    def test_simple_matvec(self):
        group = Group()
        group.add('x_param', IndepVarComp('x', 1.0), promotes=['*'])
        group.add('mycomp', SimpleCompDerivMatVec(), promotes=['x', 'y'])

        prob = Problem()
        prob.root = group
        prob.root.ln_solver = DirectSolver()
        prob.root.ln_solver.options['jacobian_method'] = 'assemble'
        prob.setup(check=False)
        prob.run()

        with self.assertRaises(RuntimeError) as cm:
            J = prob.calc_gradient(['x'], ['y'], mode='fwd', return_format='dict')

        expected_msg = "The 'assemble' jacobian_method is not supported when " + \
                       "'apply_linear' is used on a component (mycomp)."

        self.assertEqual(str(cm.exception), expected_msg)
    def test_full_model_fd_simple_comp_promoted(self):

        prob = Problem()
        prob.root = Group()
        sub = prob.root.add('sub', Group(), promotes=['*'])
        sub.add('comp', SimpleCompDerivMatVec(), promotes=['*'])
        prob.root.add('p1', IndepVarComp('x', 1.0), promotes=['*'])

        prob.setup(check=False)
        prob.run()

        data = prob.check_total_derivatives(out_stream=None)

        for key, val in iteritems(data):
            assert_rel_error(self, val['abs error'][0], 0.0, 1e-5)
            assert_rel_error(self, val['abs error'][1], 0.0, 1e-5)
            assert_rel_error(self, val['abs error'][2], 0.0, 1e-5)
            assert_rel_error(self, val['rel error'][0], 0.0, 1e-5)
            assert_rel_error(self, val['rel error'][1], 0.0, 1e-5)
            assert_rel_error(self, val['rel error'][2], 0.0, 1e-5)
Exemplo n.º 9
0
    def test_simple_matvec_subbed(self):
        group = Group()
        group.add('mycomp', SimpleCompDerivMatVec(), promotes=['x', 'y'])

        prob = Problem(impl=impl)
        prob.root = Group()
        prob.root.add('x_param', ParamComp('x', 1.0), promotes=['*'])
        prob.root.add('sub', group, promotes=['*'])

        prob.root.ln_solver = PetscKSP()
        prob.setup(check=False)
        prob.run()

        J = prob.calc_gradient(['x'], ['y'], mode='fwd', return_format='dict')
        assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)

        J = prob.calc_gradient(['x'], ['y'], mode='rev', return_format='dict')
        assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)

        J = prob.calc_gradient(['x'], ['y'], mode='fd', return_format='dict')
        assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)
Exemplo n.º 10
0
    def test_full_model_fd_simple_comp(self):

        prob = Problem()
        prob.root = Group()
        prob.root.add('comp', SimpleCompDerivMatVec())
        prob.root.add('p1', ParamComp('x', 1.0))

        prob.root.connect('p1.x', 'comp.x')

        prob.root.fd_options['force_fd'] = True

        prob.setup(check=False)
        prob.run()

        param_list = ['comp.x']
        unknown_list = ['comp.y']

        J = prob.calc_gradient(param_list,
                               unknown_list,
                               mode='fwd',
                               return_format='dict')
        assert_rel_error(self, J['comp.y']['comp.x'][0][0], 2.0, 1e-6)
Exemplo n.º 11
0
    def test_full_model_fd_simple_comp_promoted(self):

        prob = Problem()
        prob.root = Group()
        sub = prob.root.add('sub', Group(), promotes=['*'])
        sub.add('comp', SimpleCompDerivMatVec(), promotes=['*'])
        prob.root.add('p1', IndepVarComp('x', 1.0), promotes=['*'])

        prob.root.deriv_options['type'] = 'fd'

        prob.setup(check=False)
        prob.run()

        indep_list = ['x']
        unknown_list = ['y']

        J = prob.calc_gradient(indep_list, unknown_list, mode='fwd', return_format='dict')
        assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)

        # We should not allocate deriv vectors for full model FD
        self.assertEqual(len(prob.root.dumat[None].vec), 0)
        self.assertEqual(len(prob.root.drmat[None].vec), 0)
        self.assertEqual(len(prob.root.dpmat[None].vec), 0)
Exemplo n.º 12
0
    def test_simple_matvec_subbed_like_multipoint(self):
        group = Group()
        group.add('mycomp', SimpleCompDerivMatVec(), promotes=['x', 'y'])

        prob = Problem()
        prob.root = Group()
        prob.root.add('sub', group, promotes=['*'])
        prob.root.sub.add('x_param', IndepVarComp('x', 1.0), promotes=['*'])

        prob.root.ln_solver = ScipyGMRES()
        prob.setup(check=False)
        prob.run()

        J = prob.calc_gradient(['x'], ['y'], mode='fwd', return_format='dict')
        assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)

        J = prob.calc_gradient(['x'], ['y'], mode='rev', return_format='dict')
        assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)

        J = prob.calc_gradient(['x'], ['y'], mode='fd', return_format='dict')
        assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)

        J = prob.calc_gradient(['x'], ['y'], mode='fd', return_format='array')
        assert_rel_error(self, J[0][0], 2.0, 1e-6)
Exemplo n.º 13
0
    def test_full_model_fd_simple_comp(self):

        prob = Problem()
        prob.root = Group()
        prob.root.add('comp', SimpleCompDerivMatVec())
        prob.root.add('p1', IndepVarComp('x', 1.0))

        prob.root.connect('p1.x', 'comp.x')

        prob.root.fd_options['force_fd'] = True

        prob.setup(check=False)
        prob.run()

        indep_list = ['comp.x']
        unknown_list = ['comp.y']

        J = prob.calc_gradient(indep_list, unknown_list, mode='fwd', return_format='dict')
        assert_rel_error(self, J['comp.y']['comp.x'][0][0], 2.0, 1e-6)

        # We should not allocate deriv vectors for full model FD
        self.assertEqual(len(prob.root.dumat[None].vec), 0)
        self.assertEqual(len(prob.root.drmat[None].vec), 0)
        self.assertEqual(len(prob.root.dpmat[None].vec), 0)