Esempio n. 1
0
class Sub(Problem):
    def __init__(self):
        super(Sub, self).__init__()

        self.root = Group()

        # Add the 'Paraboloid Component' to sub's root Group. Alternatively, I could have placed the 'Paraboloid' Component within a group
        self.root.add('P', Paraboloid())

        # Initialize x and y values in seperate IndepVarComps and add them to sub's root group
        self.root.add('p1', IndepVarComp('x', 13.0))
        self.root.add('p2', IndepVarComp('y', -14.0))

        # Define a constraint equation and add it to top's root Group
        self.root.add('con', ExecComp('c = x - y'))

        # Connect 'p1.x' and 'p2.y' to 'con.x' and 'con.y' respectively
        self.root.connect('p1.x', 'con.x')
        self.root.connect('p2.y', 'con.y')

        # Connect the IndepVarComps 'p1.x' and 'p2.y' to 'T.Paraboloid.x' and 'T.Paraboloid.y' respectively
        self.root.connect('p1.x', 'P.x')
        self.root.connect('p2.y', 'P.y')

        # Instantiate sub's optimization driver
        self.driver = ScipyOptimizer()

        # Modify the optimization driver's settings
        self.driver.options[
            'optimizer'] = 'COBYLA'  # Type of Optimizer. 'COBYLA' does not require derivatives
        self.driver.options[
            'tol'] = 1.0e-4  # Tolerance for termination. Not sure exactly what it represents. Default: 1.0e-6
        self.driver.options[
            'maxiter'] = 200  # Maximum iterations. Default: 200
        self.driver.opt_settings[
            'rhobeg'] = 1.0  # Initial step size. Default: 1.0
        #sub.driver.opt_settings['catol'] = 0.1  # Absolute tolerance for constraint violations

        # Add design variables, objective, and constraints to the optimization driver
        self.driver.add_desvar('p1.x', lower=-50, upper=50)
        self.driver.add_objective('P.f_xy')
        self.driver.add_constraint('con.c', lower=15.0)
        self.driver.add_constraint(
            'p1.x', lower=-50.0, upper=50.0
        )  # Note adding this while this variable constraint reduces the degree to which
Esempio n. 2
0
class BruteForceSellarProblem(Problem):
    """ Performs optimization on the Sellar problem.

        Applies a normal distribution to the design vars and runs all of the
        samples, then collects the values of all of the outputs, calculates
        the mean of those and stuffs that back into the unknowns vector.

        This is the brute force version that just stamps out N separate
        sellar models in a parallel group and sets the input of each
        one to be one of these random design vars.

    Args
    ----
    n : number of randomized points to generate for each input value

    derivs : if True, use user-defined derivatives, else use Finite Difference
    """
    def __init__(self, n=10, derivs=False):
        super(BruteForceSellarProblem, self).__init__(impl=impl)

        root = self.root = Group()
        if not derivs:
            root.deriv_options['type'] = 'fd'

        sellars = root.add('sellars', ParallelGroup())
        for i in range(n):
            name = 'sellar%i' % i
            sellars.add(name, SellarDerivatives())

            root.connect('dist_x', 'sellars.' + name + '.x', src_indices=[i])
            root.connect('dist_z',
                         'sellars.' + name + '.z',
                         src_indices=[i * 2, i * 2 + 1])

            root.connect('sellars.' + name + '.obj', 'collect.obj_%i' % i)
            root.connect('sellars.' + name + '.con1', 'collect.con1_%i' % i)
            root.connect('sellars.' + name + '.con2', 'collect.con2_%i' % i)

        root.add('indep',
                 IndepVarComp([('x', 1.0), ('z', np.array([5.0, 2.0]))]),
                 promotes=['x', 'z'])

        root.add(
            'random',
            Randomize(
                n=n,
                params=[
                    # name, value, std dev
                    ('x', 1.0, 1e-2),
                    ('z', np.array([5.0, 2.0]), 1e-2)
                ]),
            promotes=['x', 'z', 'dist_x', 'dist_z'])

        root.add('collect',
                 Collector(n=n, names=['obj', 'con1', 'con2']),
                 promotes=['obj', 'con1', 'con2'])

        # top level driver setup
        self.driver = ScipyOptimizer()
        self.driver.options['optimizer'] = 'SLSQP'
        self.driver.options['tol'] = 1.0e-8
        self.driver.options['maxiter'] = 50
        self.driver.options['disp'] = False

        self.driver.add_desvar('z',
                               lower=np.array([-10.0, 0.0]),
                               upper=np.array([10.0, 10.0]))
        self.driver.add_desvar('x', lower=0.0, upper=10.0)

        self.driver.add_objective('obj')
        self.driver.add_constraint('con1', upper=0.0)
        self.driver.add_constraint('con2', upper=0.0)
Esempio n. 3
0
class BruteForceSellarProblem(Problem):
    """ Performs optimization on the Sellar problem.

        Applies a normal distribution to the design vars and runs all of the
        samples, then collects the values of all of the outputs, calculates
        the mean of those and stuffs that back into the unknowns vector.

        This is the brute force version that just stamps out N separate
        sellar models in a parallel group and sets the input of each
        one to be one of these random design vars.

    Args
    ----
    n : number of randomized points to generate for each input value

    derivs : if True, use user-defined derivatives, else use Finite Difference
    """

    def __init__(self, n=10, derivs=False):
        super(BruteForceSellarProblem, self).__init__(impl=impl)

        root = self.root = Group()
        if not derivs:
            root.deriv_options["type"] = "fd"

        sellars = root.add("sellars", ParallelGroup())
        for i in range(n):
            name = "sellar%i" % i
            sellars.add(name, SellarDerivatives())

            root.connect("dist_x", "sellars." + name + ".x", src_indices=[i])
            root.connect("dist_z", "sellars." + name + ".z", src_indices=[i * 2, i * 2 + 1])

            root.connect("sellars." + name + ".obj", "collect.obj_%i" % i)
            root.connect("sellars." + name + ".con1", "collect.con1_%i" % i)
            root.connect("sellars." + name + ".con2", "collect.con2_%i" % i)

        root.add("indep", IndepVarComp([("x", 1.0), ("z", np.array([5.0, 2.0]))]), promotes=["x", "z"])

        root.add(
            "random",
            Randomize(
                n=n,
                params=[
                    # name, value, std dev
                    ("x", 1.0, 1e-2),
                    ("z", np.array([5.0, 2.0]), 1e-2),
                ],
            ),
            promotes=["x", "z", "dist_x", "dist_z"],
        )

        root.add("collect", Collector(n=n, names=["obj", "con1", "con2"]), promotes=["obj", "con1", "con2"])

        # top level driver setup
        self.driver = ScipyOptimizer()
        self.driver.options["optimizer"] = "SLSQP"
        self.driver.options["tol"] = 1.0e-8
        self.driver.options["maxiter"] = 50
        self.driver.options["disp"] = False

        self.driver.add_desvar("z", lower=np.array([-10.0, 0.0]), upper=np.array([10.0, 10.0]))
        self.driver.add_desvar("x", lower=0.0, upper=10.0)

        self.driver.add_objective("obj")
        self.driver.add_constraint("con1", upper=0.0)
        self.driver.add_constraint("con2", upper=0.0)
Esempio n. 4
0
class BruteForceSellarProblem(Problem):
    """ Performs optimization on the Sellar problem.

        Applies a normal distribution to the design vars and runs all of the
        samples, then collects the values of all of the outputs, calculates
        the mean of those and stuffs that back into the unknowns vector.

        This is the brute force version that just stamps out N separate
        sellar models in a parallel group and sets the input of each
        one to be one of these random design vars.

    Args
    ----
    n : number of randomized points to generate for each input value

    derivs : if True, use user-defined derivatives, else use Finite Difference
    """
    def __init__(self, n=10, derivs=False):
        super(BruteForceSellarProblem, self).__init__(impl=impl)

        root = self.root = Group()
        if not derivs:
            root.deriv_options['type'] = 'fd'

        sellars = root.add('sellars', ParallelGroup())
        for i in range(n):
            name = 'sellar%i' % i
            sellars.add(name, SellarDerivatives())

            root.connect('dist_x', 'sellars.'+name+'.x', src_indices=[i])
            root.connect('dist_z', 'sellars.'+name+'.z', src_indices=[i*2, i*2+1])

            root.connect('sellars.'+name+'.obj',  'collect.obj_%i'  % i)
            root.connect('sellars.'+name+'.con1', 'collect.con1_%i' % i)
            root.connect('sellars.'+name+'.con2', 'collect.con2_%i' % i)

        root.add('indep', IndepVarComp([
                    ('x', 1.0),
                    ('z', np.array([5.0, 2.0]))
                ]),
                promotes=['x', 'z'])

        root.add('random', Randomize(n=n, params=[
                    # name, value, std dev
                    ('x', 1.0, 1e-2),
                    ('z', np.array([5.0, 2.0]), 1e-2)
                ]),
                promotes=['x', 'z', 'dist_x', 'dist_z'])

        root.add('collect', Collector(n=n, names=['obj', 'con1', 'con2']),
                promotes=['obj', 'con1', 'con2'])

        # top level driver setup
        self.driver = ScipyOptimizer()
        self.driver.options['optimizer'] = 'SLSQP'
        self.driver.options['tol'] = 1.0e-8
        self.driver.options['maxiter'] = 50
        self.driver.options['disp'] = False

        self.driver.add_desvar('z', lower=np.array([-10.0,  0.0]),
                                    upper=np.array([ 10.0, 10.0]))
        self.driver.add_desvar('x', lower=0.0, upper=10.0)

        self.driver.add_objective('obj')
        self.driver.add_constraint('con1', upper=0.0)
        self.driver.add_constraint('con2', upper=0.0)