示例#1
0
    def test_nested_relevancy_gmres(self):

        # This test is just to make sure that values in the dp vector from
        # higher scopes aren't sitting there converting themselves during sub
        # iterations.

        prob = Problem(impl=impl)
        root = prob.root = Group()
        root.add('p1', IndepVarComp('xx', 3.0))
        root.add(
            'c1',
            ExecComp(['y1=0.5*x + 1.0*xx', 'y2=0.3*x - 1.0*xx'],
                     units={'y2': 'km'}))
        root.add('c2', ExecComp(['y=0.5*x']))
        sub = root.add('sub', Group())
        sub.add('cc1', ExecComp(['y=1.01*x1 + 1.01*x2'], units={'x1': 'fm'}))
        sub.add('cc2', ExecComp(['y=1.01*x']))

        root.connect('p1.xx', 'c1.xx')
        root.connect('c1.y1', 'c2.x')
        root.connect('c2.y', 'c1.x')
        root.connect('c1.y2', 'sub.cc1.x1')
        root.connect('sub.cc1.y', 'sub.cc2.x')
        root.connect('sub.cc2.y', 'sub.cc1.x2')

        root.nl_solver = Newton()
        root.nl_solver.options['maxiter'] = 1
        root.ln_solver = PetscKSP()
        root.ln_solver.options['maxiter'] = 1

        sub.nl_solver = Newton()
        #sub.nl_solver.options['maxiter'] = 7
        sub.ln_solver = PetscKSP()

        prob.driver.add_desvar('p1.xx')
        prob.driver.add_objective('sub.cc2.y')

        prob.setup(check=False)
        prob.print_all_convergence()

        prob.run()

        # GMRES doesn't cause a successive build-up in the value of an out-of
        # scope param, but the linear solver doesn't converge. We can test to
        # make sure it does.
        iter_count = sub.ln_solver.iter_count
        self.assertTrue(iter_count < 20)
        self.assertTrue(not np.isnan(prob['sub.cc2.y']))
示例#2
0
    def test_analysis_error(self):

        prob = Problem(impl=impl)
        prob.root = ConvergeDiverge()
        prob.root.ln_solver = PetscKSP()
        prob.root.ln_solver.options['maxiter'] = 2
        prob.root.ln_solver.options['err_on_maxiter'] = True

        prob.setup(check=False)
        prob.run()

        indep_list = ['p.x']
        unknown_list = ['comp7.y1']

        prob.run()

        # Make sure value is fine.
        assert_rel_error(self, prob['comp7.y1'], -102.7, 1e-6)

        try:
            J = prob.calc_gradient(indep_list,
                                   unknown_list,
                                   mode='fwd',
                                   return_format='dict')
        except AnalysisError as err:
            self.assertEqual(
                str(err),
                "Solve in '': PetscKSP FAILED to converge in 6 iterations")
        else:
            self.fail("expected AnalysisError")
示例#3
0
    def test_fan_in_grouped(self):

        prob = Problem(impl=impl)
        prob.root = FanInGrouped()
        prob.root.ln_solver = PetscKSP()

        indep_list = ['p1.x1', 'p2.x2']
        unknown_list = ['comp3.y']

        prob.setup(check=False)
        prob.run()

        J = prob.calc_gradient(indep_list,
                               unknown_list,
                               mode='fwd',
                               return_format='dict')
        assert_rel_error(self, J['comp3.y']['p1.x1'][0][0], -6.0, 1e-6)
        assert_rel_error(self, J['comp3.y']['p2.x2'][0][0], 35.0, 1e-6)

        J = prob.calc_gradient(indep_list,
                               unknown_list,
                               mode='rev',
                               return_format='dict')
        assert_rel_error(self, J['comp3.y']['p1.x1'][0][0], -6.0, 1e-6)
        assert_rel_error(self, J['comp3.y']['p2.x2'][0][0], 35.0, 1e-6)
示例#4
0
    def test_double_arraycomp(self):
        # Mainly testing a bug in the array return for multiple arrays

        group = Group()
        group.add('x_param1', IndepVarComp('x1', np.ones((2))), promotes=['*'])
        group.add('x_param2', IndepVarComp('x2', np.ones((2))), promotes=['*'])
        group.add('mycomp', DoubleArrayComp(), promotes=['*'])

        prob = Problem(impl=impl)
        prob.root = group
        prob.root.ln_solver = PetscKSP()
        prob.setup(check=False)
        prob.run()

        Jbase = group.mycomp.JJ

        J = prob.calc_gradient(['x1', 'x2'], ['y1', 'y2'],
                               mode='fwd',
                               return_format='array')
        diff = np.linalg.norm(J - Jbase)
        assert_rel_error(self, diff, 0.0, 1e-8)

        J = prob.calc_gradient(['x1', 'x2'], ['y1', 'y2'],
                               mode='fd',
                               return_format='array')
        diff = np.linalg.norm(J - Jbase)
        assert_rel_error(self, diff, 0.0, 1e-8)

        J = prob.calc_gradient(['x1', 'x2'], ['y1', 'y2'],
                               mode='rev',
                               return_format='array')
        diff = np.linalg.norm(J - Jbase)
        assert_rel_error(self, diff, 0.0, 1e-8)
示例#5
0
    def test_single_diamond_grouped(self):

        prob = Problem(impl=impl)
        prob.root = SingleDiamondGrouped()
        prob.root.ln_solver = PetscKSP()
        prob.setup(check=False)
        prob.run()

        indep_list = ['p.x']
        unknown_list = ['comp4.y1', 'comp4.y2']

        J = prob.calc_gradient(indep_list,
                               unknown_list,
                               mode='fwd',
                               return_format='dict')
        assert_rel_error(self, J['comp4.y1']['p.x'][0][0], 25, 1e-6)
        assert_rel_error(self, J['comp4.y2']['p.x'][0][0], -40.5, 1e-6)

        J = prob.calc_gradient(indep_list,
                               unknown_list,
                               mode='rev',
                               return_format='dict')
        assert_rel_error(self, J['comp4.y1']['p.x'][0][0], 25, 1e-6)
        assert_rel_error(self, J['comp4.y2']['p.x'][0][0], -40.5, 1e-6)

        J = prob.calc_gradient(indep_list,
                               unknown_list,
                               mode='fd',
                               return_format='dict')
        assert_rel_error(self, J['comp4.y1']['p.x'][0][0], 25, 1e-6)
        assert_rel_error(self, J['comp4.y2']['p.x'][0][0], -40.5, 1e-6)
示例#6
0
    def test_fan_out_grouped(self):

        prob = Problem(impl=impl)
        prob.root = FanOutGrouped()
        prob.root.ln_solver = PetscKSP()

        prob.setup(check=False)
        prob.run()

        param = 'sub.pgroup.p.x'
        unknown_list = ['sub.comp2.y', "sub.comp3.y"]

        J = prob.calc_gradient([param],
                               unknown_list,
                               mode='fwd',
                               return_format='dict')

        assert_rel_error(self, J[unknown_list[0]][param][0][0], -6.0, 1e-6)
        assert_rel_error(self, J[unknown_list[1]][param][0][0], 15.0, 1e-6)

        J = prob.calc_gradient([param],
                               unknown_list,
                               mode='rev',
                               return_format='dict')
        assert_rel_error(self, J[unknown_list[0]][param][0][0], -6.0, 1e-6)
        assert_rel_error(self, J[unknown_list[1]][param][0][0], 15.0, 1e-6)
示例#7
0
    def test_converge_diverge_compfd(self):

        prob = Problem(impl=impl)
        prob.root = ConvergeDivergePar()
        prob.root.ln_solver = PetscKSP()

        # fd comp2 and comp5. each is under a par group
        prob.root.par1.comp2.fd_options['force_fd'] = True
        prob.root.par2.comp5.fd_options['force_fd'] = True

        prob.setup(check=False)
        prob.run()

        # Make sure value is fine.
        assert_rel_error(self, prob['comp7.y1'], -102.7, 1e-6)

        indep_list = ['p.x']
        unknown_list = ['comp7.y1']

        J = prob.calc_gradient(indep_list, unknown_list, mode='fwd', return_format='dict')
        assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)

        J = prob.calc_gradient(indep_list, unknown_list, mode='rev', return_format='dict')
        assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)

        J = prob.calc_gradient(indep_list, unknown_list, mode='fd', return_format='dict')
        assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)
示例#8
0
    def test_converge_diverge_groups(self):

        prob = Problem(impl=impl)
        prob.root = ConvergeDivergeGroups()
        prob.root.ln_solver = PetscKSP()
        prob.setup(check=False)
        prob.run()

        # Make sure value is fine.
        assert_rel_error(self, prob['comp7.y1'], -102.7, 1e-6)

        indep_list = ['p.x']
        unknown_list = ['comp7.y1']

        J = prob.calc_gradient(indep_list,
                               unknown_list,
                               mode='fwd',
                               return_format='dict')
        assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)

        J = prob.calc_gradient(indep_list,
                               unknown_list,
                               mode='rev',
                               return_format='dict')
        assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)

        J = prob.calc_gradient(indep_list,
                               unknown_list,
                               mode='fd',
                               return_format='dict')
        assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)
示例#9
0
    def test_fan_out_all_grouped(self):

        prob = Problem(impl=impl)
        prob.root = FanOutAllGrouped()
        prob.root.ln_solver = PetscKSP()

        prob.root.ln_solver.preconditioner = LinearGaussSeidel()
        prob.root.sub1.ln_solver = DirectSolver()
        prob.root.sub2.ln_solver = DirectSolver()
        prob.root.sub3.ln_solver = DirectSolver()

        prob.setup(check=False)
        prob.run()

        indep_list = ['p.x']
        unknown_list = ['sub2.comp2.y', "sub3.comp3.y"]

        J = prob.calc_gradient(indep_list,
                               unknown_list,
                               mode='fwd',
                               return_format='dict')
        assert_rel_error(self, J['sub2.comp2.y']['p.x'][0][0], -6.0, 1e-6)
        assert_rel_error(self, J['sub3.comp3.y']['p.x'][0][0], 15.0, 1e-6)

        J = prob.calc_gradient(indep_list,
                               unknown_list,
                               mode='rev',
                               return_format='dict')
        assert_rel_error(self, J['sub2.comp2.y']['p.x'][0][0], -6.0, 1e-6)
        assert_rel_error(self, J['sub3.comp3.y']['p.x'][0][0], 15.0, 1e-6)
示例#10
0
    def test_KSP_under_relevance_reduction(self):
        # Test for a bug reported by NREL, where KSP in a subgroup would bomb
        # out during top-level gradients under relevance reduction.

        nProblems = 4
        top = Problem(impl=impl)
        top.root = SellarDerivativesSuperGroup(nProblems=nProblems)

        top.root.manySellars.Sellar0.add('extra_con_cmp3',
                                         ExecComp('con3 = 3.16 - y1_0'),
                                         promotes=['*'])

        top.driver.add_desvar('z',
                              lower=np.array([-10.0, 0.0]),
                              upper=np.array([10.0, 10.0]))
        top.driver.add_desvar('x', lower=0.0, upper=10.0)

        top.driver.add_objective('obj')
        top.driver.add_constraint('y1_0', upper=0.0)
        top.driver.add_constraint('y1_1', upper=0.0)
        top.driver.add_constraint('y2_2', upper=0.0)
        top.driver.add_constraint('y2_3', upper=0.0)
        top.driver.add_constraint('con3', upper=0.0)

        top.root.ln_solver.options['single_voi_relevance_reduction'] = True
        top.root.ln_solver.options['mode'] = 'rev'

        if impl is not None:
            top.root.manySellars.Sellar0.ln_solver = PetscKSP()
            top.root.manySellars.Sellar1.ln_solver = PetscKSP()
            top.root.manySellars.Sellar2.ln_solver = PetscKSP()
            top.root.manySellars.Sellar3.ln_solver = PetscKSP()

        top.setup(check=False)

        # Setting initial values for design variables
        top['x'] = 1.0
        top['z'] = np.array([5.0, 2.0])

        top.run()

        # Should get no error
        J = top.calc_gradient(['x', 'z'], ['obj', 'con3'])
示例#11
0
    def test_sellar_derivs_grouped(self):

        prob = Problem(impl=impl)
        prob.root = SellarDerivativesGrouped()
        prob.root.ln_solver = PetscKSP()

        prob.root.mda.nl_solver.options['atol'] = 1e-12
        prob.setup(check=False)
        prob.run()

        # Just make sure we are at the right answer
        assert_rel_error(self, prob['y1'], 25.58830273, .00001)
        assert_rel_error(self, prob['y2'], 12.05848819, .00001)

        indep_list = ['x', 'z']
        unknown_list = ['obj', 'con1', 'con2']

        Jbase = {}
        Jbase['con1'] = {}
        Jbase['con1']['x'] = -0.98061433
        Jbase['con1']['z'] = np.array([-9.61002285, -0.78449158])
        Jbase['con2'] = {}
        Jbase['con2']['x'] = 0.09692762
        Jbase['con2']['z'] = np.array([1.94989079, 1.0775421])
        Jbase['obj'] = {}
        Jbase['obj']['x'] = 2.98061392
        Jbase['obj']['z'] = np.array([9.61001155, 1.78448534])

        J = prob.calc_gradient(indep_list,
                               unknown_list,
                               mode='fwd',
                               return_format='dict')
        for key1, val1 in Jbase.items():
            for key2, val2 in val1.items():
                assert_rel_error(self, J[key1][key2], val2, .00001)

        J = prob.calc_gradient(indep_list,
                               unknown_list,
                               mode='rev',
                               return_format='dict')
        for key1, val1 in Jbase.items():
            for key2, val2 in val1.items():
                assert_rel_error(self, J[key1][key2], val2, .00001)

        # Cheat a bit so I can twiddle mode
        OptionsDictionary.locked = False

        prob.root.deriv_options['form'] = 'central'
        J = prob.calc_gradient(indep_list,
                               unknown_list,
                               mode='fd',
                               return_format='dict')
        for key1, val1 in Jbase.items():
            for key2, val2 in val1.items():
                assert_rel_error(self, J[key1][key2], val2, .00001)
示例#12
0
    def __init__(self, problem_id=0):
        super(SellarDerivativesSubGroup, self).__init__()

        self.add('d1', SellarDis1(problem_id=problem_id), promotes=['*'])
        self.add('d2', SellarDis2(problem_id=problem_id), promotes=['*'])

        self.nl_solver = NLGaussSeidel()
        self.nl_solver.options['atol'] = 1.0e-12

        if impl is not None:
            self.ln_solver = PetscKSP()
示例#13
0
    def test_simple(self):
        group = Group()
        group.add('x_param', IndepVarComp('x', 1.0), promotes=['*'])
        group.add('mycomp', SimpleCompDerivMatVec(), promotes=['x', 'y'])

        prob = Problem(impl=impl)
        prob.root = group
        prob.root.ln_solver = PetscKSP()
        prob.setup(check=False)
        prob.run()

        J = prob.calc_gradient(['x'], ['y'], mode='fwd', return_format='dict')
        assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)

        J = prob.calc_gradient(['x'], ['y'], mode='rev', return_format='dict')
        assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)
示例#14
0
    def test_array2D(self):
        group = Group()
        group.add('x_param', ParamComp('x', np.ones((2, 2))), promotes=['*'])
        group.add('mycomp', ArrayComp2D(), promotes=['x', 'y'])

        prob = Problem(impl=impl)
        prob.root = group
        prob.root.ln_solver = PetscKSP()
        prob.setup(check=False)
        prob.run()

        J = prob.calc_gradient(['x'], ['y'], mode='fwd', return_format='dict')
        Jbase = prob.root.mycomp._jacobian_cache
        diff = np.linalg.norm(J['y']['x'] - Jbase['y', 'x'])
        assert_rel_error(self, diff, 0.0, 1e-8)

        J = prob.calc_gradient(['x'], ['y'], mode='rev', return_format='dict')
        diff = np.linalg.norm(J['y']['x'] - Jbase['y', 'x'])
        assert_rel_error(self, diff, 0.0, 1e-8)
示例#15
0
    def test_converge_diverge_comp_cs(self):

        prob = Problem(impl=impl)
        prob.root = ConvergeDivergePar()
        prob.root.ln_solver = PetscKSP()

        # fd the whole model
        prob.root.fd_options['force_fd'] = True
        prob.root.fd_options['form'] = 'complex_step'

        prob.setup(check=False)
        prob.run()

        # Make sure value is fine.
        assert_rel_error(self, prob['comp7.y1'], -102.7, 1e-6)

        indep_list = ['p.x']
        unknown_list = ['comp7.y1']

        J = prob.calc_gradient(indep_list, unknown_list, mode='fd', return_format='dict')
        assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)
示例#16
0
    def test_fan_out(self):

        prob = Problem(impl=impl)
        prob.root = FanOut()
        prob.root.ln_solver = PetscKSP()
        prob.setup(check=False)
        prob.run()

        indep_list = ['p.x']
        unknown_list = ['comp2.y', "comp3.y"]

        J = prob.calc_gradient(indep_list,
                               unknown_list,
                               mode='fwd',
                               return_format='dict')
        assert_rel_error(self, J['comp2.y']['p.x'][0][0], -6.0, 1e-6)
        assert_rel_error(self, J['comp3.y']['p.x'][0][0], 15.0, 1e-6)

        J = prob.calc_gradient(indep_list,
                               unknown_list,
                               mode='rev',
                               return_format='dict')
        assert_rel_error(self, J['comp2.y']['p.x'][0][0], -6.0, 1e-6)
        assert_rel_error(self, J['comp3.y']['p.x'][0][0], 15.0, 1e-6)
示例#17
0
    def test_simple_matvec_subbed_like_multipoint(self):
        group = Group()
        group.add('mycomp', SimpleCompDerivMatVec(), promotes=['x', 'y'])

        prob = Problem(impl=impl)
        prob.root = Group()
        prob.root.add('sub', group, promotes=['*'])
        prob.root.sub.add('x_param', ParamComp('x', 1.0), promotes=['*'])

        prob.root.ln_solver = PetscKSP()
        prob.setup(check=False)
        prob.run()

        J = prob.calc_gradient(['x'], ['y'], mode='fwd', return_format='dict')
        assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)

        J = prob.calc_gradient(['x'], ['y'], mode='rev', return_format='dict')
        assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)

        J = prob.calc_gradient(['x'], ['y'], mode='fd', return_format='dict')
        assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)

        J = prob.calc_gradient(['x'], ['y'], mode='fd', return_format='array')
        assert_rel_error(self, J[0][0], 2.0, 1e-6)
示例#18
0
    model.driver.add_constraint('%s.ConCh'% name, upper=0.0)
    model.driver.add_constraint('%s.ConDs'% name, upper=0.0)
    model.driver.add_constraint('%s.ConS0'% name, upper=0.0)
    model.driver.add_constraint('%s.ConS1'% name, upper=0.0)
    model.driver.add_constraint('%s_con5.val'% name, equals=0.0)

# Add Parameter groups
model.driver.add_desvar("bp1.cellInstd", low=0., high=1.0)
model.driver.add_desvar("bp2.finAngle", low=0., high=np.pi/2.)
model.driver.add_desvar("bp3.antAngle", low=-np.pi/4, high=np.pi/4)

# Add objective
model.driver.add_objective('obj.val')

# For Parallel exeuction, we must use KSP
model.root.ln_solver = PetscKSP()
#model.root.ln_solver = LinearGaussSeidel()
#model.root.parallel.ln_solver = LinearGaussSeidel()
#model.root.parallel.pt0.ln_solver = LinearGaussSeidel()
#model.root.parallel.pt1.ln_solver = LinearGaussSeidel()

model.setup()
model.run()

#----------------------------------------------------------------
# Below this line, code I was using for verifying and profiling.
#----------------------------------------------------------------
params = model.driver.get_desvars().keys()
unks = model.driver.get_objectives().keys() + model.driver.get_constraints().keys()

import cProfile
示例#19
0
    def test_CADRE_MDP(self):

        n = 1500
        m = 300
        npts = 2

        # Instantiate
        model = Problem(impl=impl)
        root = model.root = CADRE_MDP_Group(n=n, m=m, npts=npts)

        # Add parameters and constraints to each CADRE instance.
        names = ['pt%s' % i for i in range(npts)]
        for i, name in enumerate(names):

            # add parameters to driver
            model.driver.add_desvar("%s.CP_Isetpt" % name, low=0., high=0.4)
            model.driver.add_desvar("%s.CP_gamma" % name, low=0, high=np.pi/2.)
            model.driver.add_desvar("%s.CP_P_comm" % name, low=0., high=25.)
            model.driver.add_desvar("%s.iSOC" % name, indices=[0], low=0.2, high=1.)

            model.driver.add_constraint('%s.ConCh'% name, upper=0.0)
            model.driver.add_constraint('%s.ConDs'% name, upper=0.0)
            model.driver.add_constraint('%s.ConS0'% name, upper=0.0)
            model.driver.add_constraint('%s.ConS1'% name, upper=0.0)
            model.driver.add_constraint('%s_con5.val'% name, equals=0.0)

        # Add Parameter groups
        model.driver.add_desvar("bp1.cellInstd", low=0., high=1.0)
        model.driver.add_desvar("bp2.finAngle", low=0., high=np.pi/2.)
        model.driver.add_desvar("bp3.antAngle", low=-np.pi/4, high=np.pi/4)

        # Add objective
        model.driver.add_objective('obj.val')

        # For Parallel exeuction, we must use KSP
        if MPI:
            model.root.ln_solver = PetscKSP()

        model.setup()
        model.run()

        # Read pickle
        fpath = os.path.dirname(os.path.realpath(__file__))
        data = pickle.load(open(fpath + "/mdp_execute.pkl", 'rb'))

        for var in data:

            # We changed constraint names
            xvar = var
            if '_con1' in xvar:
                xvar = xvar.replace('_con1.val', '.ConCh')
            if '_con2' in xvar:
                xvar = xvar.replace('_con2.val', '.ConDs')
            if '_con3' in xvar:
                xvar = xvar.replace('_con3.val', '.ConS0')
            if '_con4' in xvar:
                xvar = xvar.replace('_con4.val', '.ConS1')

            computed = model[xvar]
            actual = data[var]
            if isinstance(computed, np.ndarray):
                rel = np.linalg.norm(actual - computed)/np.linalg.norm(actual)
            else:
                rel = np.abs(actual - computed)/np.abs(actual)

            print(xvar)
            print(computed)
            print(actual)
            if np.mean(actual) > 1e-3 or np.mean(computed) > 1e-3:
                assert rel <= 1e-3

        # Now do derivatives
        params = model.driver.get_desvars().keys()
        unks = model.driver.get_objectives().keys() + model.driver.get_constraints().keys()
        Jb = model.calc_gradient(params, unks, mode='rev', return_format='dict')

        # Read pickle
        fpath = os.path.dirname(os.path.realpath(__file__))
        Ja = pickle.load(open(fpath + "/mdp_derivs.pkl", 'rb'))

        for key1, value in sorted(Ja.items()):
            for key2 in sorted(value.keys()):

                # We changed constraint names
                xkey1 = key1
                if '_con1' in xkey1:
                    xkey1 = xkey1.replace('_con1.val', '.ConCh')
                if '_con2' in xkey1:
                    xkey1 = xkey1.replace('_con2.val', '.ConDs')
                if '_con3' in xkey1:
                    xkey1 = xkey1.replace('_con3.val', '.ConS0')
                if '_con4' in xkey1:
                    xkey1 = xkey1.replace('_con4.val', '.ConS1')

                computed = Jb[xkey1][key2]
                actual = Ja[key1][key2]
                if isinstance(computed, np.ndarray):
                    rel = np.linalg.norm(actual - computed)/np.linalg.norm(actual)
                else:
                    rel = np.abs(actual - computed)/np.abs(actual)

                print(xkey1, 'wrt', key2)
                print(computed)
                print(actual)
                if np.mean(actual) > 1e-3 or np.mean(computed) > 1e-3:
                    assert rel <= 1e-3
    def test_newton_backtrack_MPI(self):

        #------------------------------------------------------
        # Test that Newton doesn't drive it past lower bounds
        #------------------------------------------------------

        top = Problem(impl=impl)
        top.root = Group()
        par = top.root.add('par', ParallelGroup())
        par.add('comp1', SimpleImplicitComp1())
        par.add('comp2', SimpleImplicitComp2())
        top.root.ln_solver = PetscKSP()
        top.root.nl_solver = Newton()
        top.root.nl_solver.options['maxiter'] = 5
        top.root.add('px', IndepVarComp('x', np.ones((3, 1))))

        top.root.connect('px.x', 'par.comp1.x')
        top.root.connect('px.x', 'par.comp2.x')
        top.setup(check=False)

        top['px.x'] = np.array([2.0, 2.0, 2.0])
        top.run()

        if not MPI or self.comm.rank == 0:
            self.assertEqual(top['par.comp1.z'][0], 1.5)
            self.assertEqual(top['par.comp1.z'][1], 1.5)
            self.assertEqual(top['par.comp1.z'][2], 1.5)

        if not MPI or self.comm.rank == 1:
            self.assertEqual(top['par.comp2.z'][0], -1.5)
            self.assertEqual(top['par.comp2.z'][1], -1.5)
            self.assertEqual(top['par.comp2.z'][2], -1.5)

        #------------------------------------------------------
        # Test that Newton doesn't drive it past upper bounds
        #------------------------------------------------------

        top = Problem(impl=impl)
        top.root = Group()
        par = top.root.add('par', ParallelGroup())
        par.add('comp1', SimpleImplicitComp1())
        par.add('comp2', SimpleImplicitComp2())
        top.root.ln_solver = PetscKSP()
        top.root.nl_solver = Newton()
        top.root.nl_solver.options['maxiter'] = 5
        top.root.add('px', IndepVarComp('x', np.ones((3, 1))))

        top.root.connect('px.x', 'par.comp1.x')
        top.root.connect('px.x', 'par.comp2.x')
        top.setup(check=False)

        top['px.x'] = 0.5*np.ones((3, 1))
        top.run()

        # Each bound is observed
        if top.root.par.comp1.is_active():
            self.assertEqual(top['par.comp1.z'][0], 2.6)
            self.assertEqual(top['par.comp1.z'][1], 2.5)
            self.assertEqual(top['par.comp1.z'][2], 2.65)

        if top.root.par.comp2.is_active():
            self.assertEqual(top['par.comp2.z'][0], -2.6)
            self.assertEqual(top['par.comp2.z'][1], -2.5)
            self.assertEqual(top['par.comp2.z'][2], -2.65)