Exemple #1
0
    def objectiveFunction(inVars, mission, includeDrag):
        x = costFunction.ObjectiveMassINIT(inVars, mission)
        eq = constraints.equality(inVars, mission, 'real', includeDrag)
        ineq = constraints.inequality(inVars, mission, 'real')

        g = np.concatenate((eq, ineq), 1)
        fail = 0
        return x, g, fail
Exemple #2
0
def objectiveFunction(inVars, mission):
    x = costFunction.ObjectiveMass(inVars, mission)
    eq = constraints.equality(inVars, mission)
    ineq = constraints.inequality(inVars, mission)

    g = np.concatenate((eq, ineq), 1)
    fail = 0
    return x, g, fail
Exemple #3
0
 def objectiveFunction(inVars,mission,includeDrag):
     x=costFunction.ObjectiveMass(inVars,mission)
     eq=constraints.equality(inVars,mission,'real',includeDrag)
     ineq=constraints.inequality(inVars,mission,'real')
     
     g = np.concatenate((eq,ineq),1)
     fail = 0
     return x,g,fail
Exemple #4
0
def objectiveFunction(inVars, mission):
    x = costFunction.ObjectiveMass(inVars, mission)
    eq = constraints.equality(inVars, mission)
    ineq = constraints.inequality(inVars, mission)
Exemple #5
0
numIters = mission['numerics']['iterationSchedule']['numIterations']

for curIter in range(numIters):
    # Create the initial guess for the current optimization problem
    mission['numerics']['iterationSchedule']['curIter'] = curIter

    if curIter == 0:
        mission = LDP.processAllMissionData(mission)
        vars0 = IC.CreateInitialGuess(mission)
    else:
        mission, vars0 = postProcessing.UpdateMissionAndInitialGuess(mission)
        #vars0 = IC.UpdateInitialGuess(mission)

    numEquality = len(constraints.equality(vars0, mission))
    numInequality = len(constraints.inequality(vars0, mission))

    # Find the upper and lower bounds
    #boundsCase = constraints.bounds(mission)
    lb, ub = constraints.getXLXU(vars0, mission)

    #TJC
    opt_prob = pyOpt.Optimization('Trajectory Optimization',
                                  lambda x: objectiveFunction(x, mission))
    opt_prob.addObj('Objective Mass')

    # Specify all of the variables
    print 'Setting up variables in a hackish way.  MUST CHANGE!!!'
    for curVar in range(len(vars0)):
        opt_prob.addVar('var' + str(curVar),
                        'c',
Exemple #6
0
def SNOPTrun(vars0, mission, includeDrag, flagFORCE=1):
    import pyOpt
    import constraints
    import costFunction
    import numpy as np

    # THESE FUNCTIONS ARE USED BY SNOPT
    # Define the functions SNOPT optimizer will call

    def objectiveFunction(inVars, mission, includeDrag):
        x = costFunction.ObjectiveMass(inVars, mission)
        eq = constraints.equality(inVars, mission, 'real', includeDrag)
        ineq = constraints.inequality(inVars, mission, 'real')

        g = np.concatenate((eq, ineq), 1)
        fail = 0
        return x, g, fail

    def sensitivityFunction(inVars, f, g, mission, includeDrag):
        x = costFunction.fprimeObjectiveMass(inVars, mission)
        eq = constraints.fprimeequality(inVars, mission, '2d', includeDrag)
        ineq = constraints.fprimeinequality(inVars, mission, '2d')

        g = np.concatenate((eq, ineq), 0)
        fail = 0
        return x, g, fail

    numEquality = len(constraints.equality(vars0, mission))
    numInequality = len(constraints.inequality(vars0, mission))

    # Find the upper and lower bounds
    #boundsCase = constraints.bounds(mission)
    lb, ub = constraints.getXLXU(vars0, mission)

    #TJC
    opt_prob = pyOpt.Optimization(
        'Trajectory Optimization',
        lambda x: objectiveFunction(x, mission, includeDrag))
    opt_prob.addObj('Objective Mass')

    # Specify all of the variables
    #print 'Setting up variables in a hackish way.  MUST CHANGE!!!'
    for curVar in range(len(vars0)):
        opt_prob.addVar('var' + str(curVar),
                        'c',
                        value=vars0[curVar],
                        lower=lb[curVar],
                        upper=ub[curVar])

    # Now add in equality constraints
    for curCon in range(numEquality):
        opt_prob.addCon('g' + str(curCon), 'e')

    # Now add in inequality constraints
    for curCon in range(numEquality, numEquality + numInequality):
        opt_prob.addCon('g' + str(curCon), 'i')

    # Confirm that everything is correct
    #print opt_prob

    # Set up the optimizer
    snopt = pyOpt.pySNOPT.SNOPT()
    snopt.setOption('Major feasibility tolerance', value=5e-6)
    snopt.setOption('Major optimality tolerance', value=1e-5)
    snopt.setOption('Minor feasibility tolerance', value=5e-6)
    snopt.setOption('Major iterations limit', 500)
    print 'Using SNOPT'

    # Optimize and save results
    sens2 = lambda x, f, g: sensitivityFunction(x, f, g, mission, includeDrag)

    # by default will try complex step first...if fails...then finite diference
    exitVal = snopt(opt_prob, sens_type=sens2)

    infoOpt = exitVal[2]['text']
    if infoOpt != 'finished successfully' and flagFORCE == 1:
        print 'Failed to finish successfully with CS .... trying FD'
        exitVal = snopt(opt_prob, sens_type='FD')

    return exitVal
Exemple #7
0
def SNOPTrun(vars0,mission,includeDrag,flagFORCE=1):
    import pyOpt
    import constraints
    import costFunction
    import numpy as np
            
    # THESE FUNCTIONS ARE USED BY SNOPT
    # Define the functions SNOPT optimizer will call


    def objectiveFunction(inVars,mission,includeDrag):
        x=costFunction.ObjectiveMass(inVars,mission)
        eq=constraints.equality(inVars,mission,'real',includeDrag)
        ineq=constraints.inequality(inVars,mission,'real')
        
        g = np.concatenate((eq,ineq),1)
        fail = 0
        return x,g,fail

    def sensitivityFunction(inVars,f,g,mission,includeDrag):
        x = costFunction.fprimeObjectiveMass(inVars,mission)
        eq = constraints.fprimeequality(inVars,mission,'2d',includeDrag)
        ineq = constraints.fprimeinequality(inVars,mission,'2d')
        
        g = np.concatenate((eq,ineq),0)
        fail = 0
        return x,g,fail


    numEquality = len(constraints.equality(vars0,mission))
    numInequality = len(constraints.inequality(vars0,mission))

    # Find the upper and lower bounds
    #boundsCase = constraints.bounds(mission)
    lb,ub = constraints.getXLXU(vars0,mission)


    #TJC
    opt_prob = pyOpt.Optimization('Trajectory Optimization',lambda x: objectiveFunction(x,mission,includeDrag))
    opt_prob.addObj('Objective Mass')

    # Specify all of the variables
    #print 'Setting up variables in a hackish way.  MUST CHANGE!!!'
    for curVar in range(len(vars0)):
     opt_prob.addVar('var'+str(curVar), 'c', value=vars0[curVar], lower=lb[curVar], upper=ub[curVar])

    # Now add in equality constraints
    for curCon in range(numEquality):
     opt_prob.addCon('g' + str(curCon), 'e')

    # Now add in inequality constraints
    for curCon in range(numEquality,numEquality + numInequality):
     opt_prob.addCon('g' + str(curCon), 'i')

    # Confirm that everything is correct
    #print opt_prob

    # Set up the optimizer
    snopt = pyOpt.pySNOPT.SNOPT()
    snopt.setOption('Major feasibility tolerance',value=5e-6)
    snopt.setOption('Major optimality tolerance',value=1e-5)
    snopt.setOption('Minor feasibility tolerance',value=5e-6)
    snopt.setOption('Major iterations limit',500)
    print 'Using SNOPT'

    # Optimize and save results
    sens2 = lambda x,f,g:sensitivityFunction(x,f,g,mission,includeDrag)

    # by default will try complex step first...if fails...then finite diference
    exitVal = snopt(opt_prob,sens_type= sens2)
    
    infoOpt = exitVal[2]['text']
    if infoOpt!='finished successfully' and flagFORCE==1:
       print 'Failed to finish successfully with CS .... trying FD'
       exitVal = snopt(opt_prob,sens_type= 'FD')

    return exitVal