Example #1
0
print
print ' Evaluating constraints individually, sparse gradients'
print

for i in range(max_m):
    ci = nlp.icons(i, x0)
    print 'c%d( x0 ) = %-g' % (i, ci)
    sgi = nlp.sigrad(i, x0)
    k = sgi.keys()
    ssgi = {}
    for j in range(min(5, len(k))):
        ssgi[k[j]] = sgi[k[j]]
    print 'grad c%d( x0 ) = ' % i, ssgi

print
print ' Testing matrix-vector product:'
print

e = numpy.ones(n, 'd')
e[0] = 2
e[1] = -1
He = nlp.hprod(pi0, e)
print 'He = ', He[:max_n]

# Output "solution"
nlp.writesol(x0, pi0, 'And the winner is')

# Clean-up
nlp.close()
Example #2
0
print
print ' Evaluating constraints individually, sparse gradients'
print

for i in range(max_m):
    ci = nlp.icons( i, x0 )
    print 'c%d( x0 ) = %-g' % (i, ci)
    sgi =  nlp.sigrad( i, x0 )
    k = sgi.keys()
    ssgi = {}
    for j in range( min( 5, len( k ) ) ):
        ssgi[ k[j] ] = sgi[ k[j] ]
    print 'grad c%d( x0 ) = ' % i, ssgi

print
print ' Testing matrix-vector product:'
print 

e = numpy.ones( n, 'd' )
e[0] = 2
e[1] = -1
He = nlp.hprod( pi0, e )
print 'He = ', He[:max_n]

# Output "solution"
nlp.writesol( x0, pi0, 'And the winner is' )

# Clean-up
nlp.close()
Example #3
0
    print 'c%d( x0 ) = %-g' % (i, ci)
    sgi =  nlp.sigrad( i, x0 )
    k = sgi.keys()
    ssgi = {}
    for j in range( min( 5, len( k ) ) ):
        ssgi[ k[j] ] = sgi[ k[j] ]
    print 'grad c%d( x0 ) = ' % i, ssgi

print
print ' Testing matrix-vector product:'
print

e = numpy.ones( n, 'd' )
e[0] = 2
e[1] = -1
He = nlp.hprod(x0, pi0, e)
print 'He = ', He[:max_n]


print
print ' Testing objective scaling:'
print

g = nlp.grad(x0)
print 'Maximum/Minimum gradient (unscaled): %12.5e / %12.5e' \
      % (max(abs(g)), min(abs(g)))
nlp.compute_scaling_obj() # default is to use x0
g = nlp.grad(x0)
print 'Maximum/Minimum gradient (  scaled): %12.5e / %12.5e' \
      % (max(abs(g)), min(abs(g)))
nlp.compute_scaling_obj(reset=True)