Esempio n. 1
0
def lm_jac(x,f,jac,p):
    tmpjac = adolc.jacobian(adolcID,x)    
    m = tmpjac.shape[0]
    n = tmpjac.shape[1]
    for i in range(m):
        jac[i][:] = tmpjac[i,:]

    f[:] = adolc.function(adolcID,x)
    return
Esempio n. 2
0
def eval(dev, vPort):
    """
    Evaluates current and charge sources of a nonlinear device. 

    vPort is a numpy vector with input voltages
    """
    try:
        tag = dev._tag
    except AttributeError:
        create_tape(dev, vPort)
        tag = dev._tag

    return ad.function(tag, vPort)
Esempio n. 3
0
def eval(dev, vPort):
    """
    Evaluates current and charge sources of a nonlinear device. 

    vPort is a numpy vector with input voltages
    """
    try:
        tag = dev._tag
    except AttributeError:
        create_tape(dev, vPort)
        tag = dev._tag

    return ad.function(tag, vPort)
Esempio n. 4
0
def eval_and_deriv(dev, vPort):
    """
    Evaluates current and charge sources of a nonlinear device. 

    vPort is a numpy vector with input voltages
    
    Returns a tuple with one vector for currents and charges and
    another for the jacobian.
    """
    try:
        tag = dev._tag
    except AttributeError:
        create_tape(dev, vPort)
        tag = dev._tag
    
    fout = ad.function(tag, vPort)
    jac = ad.jacobian(tag, vPort)
    
    return (fout, jac)
Esempio n. 5
0
def eval_and_deriv(dev, vPort):
    """
    Evaluates current and charge sources of a nonlinear device. 

    vPort is a numpy vector with input voltages
    
    Returns a tuple with one vector for currents and charges and
    another for the jacobian.
    """
    try:
        tag = dev._tag
    except AttributeError:
        create_tape(dev, vPort)
        tag = dev._tag

    fout = ad.function(tag, vPort)
    jac = ad.jacobian(tag, vPort)

    return (fout, jac)
Esempio n. 6
0
 def function(self, x):
     return adolc.function(0,x)
def adFun(x):
    return adolc.function(1, x)
Esempio n. 8
0
 def A_jacaA_taped(self, XP):
     return adolc.function(self.adolcID,
                           XP), adolc.jacobian(self.adolcID, XP)
Esempio n. 9
0
 def A_taped(self, XP, user_data=None):
     return adolc.function(self.adolcID, XP)
 def evaluateCosts(self, cost, x):
     cost[0] = adolc.function(1, x)[0]
Esempio n. 11
0
 def _adolc_cons(self, x, **kwargs):
     "Evaluate the constraints from the ADOL-C tape."
     return adolc.function(self._con_trace_id, x)
def eval_f_adolc(x, user_data=None):
    return adolc.function(1, x)[0]
Esempio n. 13
0
import numpy
import math
import adolc

# tape a function evaluation
ax = numpy.array([adolc.adouble(0.) for n in range(2)])
# ay = adolc.adouble(0)
adolc.trace_on(13)
adolc.independent(ax)
ay = numpy.sin(ax[0] + ax[1]*ax[0])
adolc.dependent(ay)
adolc.trace_off()

x = numpy.array([3., 7.])
y = numpy.zeros(1)
adolc.tape_to_latex(13, x, y)

y = adolc.function(13, x)
g = adolc.gradient(13, x)
J = adolc.jacobian(13, x)

print('function y=', y)
print('gradient g=', g)
print('Jacobian J=', J)


Esempio n. 14
0
def LLadolc(x):
    return adolc.function(1, x)
Esempio n. 15
0
 def function(self, x):
     return adolc.function(0, x)
Esempio n. 16
0
def lm_func(x,f,p):
    f[:] = adolc.function(adolcID,x)
    return
Esempio n. 17
0
def alglib_func(x,grad,p):
    grad[:] = adolc.gradient(adolcID,x)    
    return  adolc.function(adolcID,x)
Esempio n. 18
0
 def _adolc_obj(self, x):
     "Evaluate the objective function from the ADOL-C tape."
     return adolc.function(self._obj_trace_id, x)
Esempio n. 19
0
def PyAdolc_vLJ_for_Optimize(x):
    return adolc.function(0, x)
Esempio n. 20
0
    for n, sp in enumerate(sparsity_pattern_list):
        for ns, s in enumerate(sp):
            if s == 1:
                y[ns] *= x[n]

    return y


x = numpy.random.rand(N)

adolc.trace_on(0)
x = adolc.adouble(x)
adolc.independent(x)
y = F(x)
adolc.dependent(y)
adolc.trace_off()

x = numpy.random.rand(N)
y = F(x)
y2 = adolc.function(0, x)
assert numpy.allclose(y, y2)

options = numpy.array([0, 0, 0, 0], dtype=int)
pat = adolc.sparse.jac_pat(0, x, options)
result = adolc.colpack.sparse_jac_no_repeat(0, x, options)

print(adolc.jacobian(0, x))
print(pat)

print(result)
Esempio n. 21
0
        for ns, s in enumerate(sp):
            if s == 1:
                y[ns] *= x[n]
        
    return y
        
x = numpy.random.rand(N)

adolc.trace_on(0)
x = adolc.adouble(x)
adolc.independent(x)
y = F(x)
adolc.dependent(y)
adolc.trace_off()

x = numpy.random.rand(N)
y = F(x)
y2 = adolc.function(0,x)
assert numpy.allclose(y,y2)

options = numpy.array([0,0,0,0],dtype=int)
pat = adolc.sparse.jac_pat(0,x,options)
result = adolc.colpack.sparse_jac_no_repeat(0,x,options)

print adolc.jacobian(0,x)
print pat

print result


H1 = scipy.sparse.coo_matrix((val, (pat[0], pat[1])), shape=(4, 4))
print "symbolic Hessian=\n", H1

pat = eval_h_adolc(x0, numpy.array([1.0, 2.0]), 1.0, True)
val = eval_h_adolc(x0, numpy.array([1.0, 2.0]), 1.0, False)
H2 = scipy.sparse.coo_matrix((val, (pat[0], pat[1])), shape=(4, 4))
print "pyadolc Hessian=\n", H2

# function of f
assert_almost_equal(eval_f(x0), eval_f_adolc(x0))

# gradient of f
assert_array_almost_equal(eval_grad_f(x0), eval_grad_f_adolc(x0))

# function of g
assert_array_almost_equal(eval_g(x0), adolc.function(2, x0))

# sparse jacobian of g
assert_array_equal(eval_jac_g_adolc(x0, True)[0], eval_jac_g(x0, True)[0])
assert_array_equal(eval_jac_g_adolc(x0, True)[1], eval_jac_g(x0, True)[1])
assert_array_equal(eval_jac_g_adolc(x0, False), eval_jac_g(x0, False))


# test optimization with PYIPOPT
nvar = 4
x_L = numpy.ones((nvar), dtype=numpy.float_) * 1.0
x_U = numpy.ones((nvar), dtype=numpy.float_) * 5.0

ncon = 2
g_L = numpy.array([25.0, 40.0])
g_U = numpy.array([2.0 * pow(10.0, 19), 40.0])
Esempio n. 23
0
 def ffcn(self, t, x, f, p, u):
     if self.traced == False:
         dims = {'x': x.size, 'p': p.size, 'u': u.size}
         self.trace(dims)
     v = self.txpu_to_v(t,x,p,u)
     f[:] = adolc.function(123, v)
def eval_g_adolc(x, user_data=None):
    return adolc.function(2, x)
Esempio n. 25
0
 def __call__(self, x):
     result = adolc.function(self.id, x)
     if self.scaler:
         return result[0]
     return result
Esempio n. 26
0
 def A_taped(self, XP):
     return adolc.function(self.adolcID, XP)
Esempio n. 27
0
H1 = scipy.sparse.coo_matrix((val, (pat[0], pat[1])), shape=(4, 4))
print 'symbolic Hessian=\n', H1

pat = eval_h_adolc(x0, numpy.array([1., 2.]), 1., True)
val = eval_h_adolc(x0, numpy.array([1., 2.]), 1., False)
H2 = scipy.sparse.coo_matrix((val, (pat[0], pat[1])), shape=(4, 4))
print 'pyadolc Hessian=\n', H2

# function of f
assert_almost_equal(eval_f(x0), eval_f_adolc(x0))

# gradient of f
assert_array_almost_equal(eval_grad_f(x0), eval_grad_f_adolc(x0))

# function of g
assert_array_almost_equal(eval_g(x0), adolc.function(2, x0))

# sparse jacobian of g
assert_array_equal(eval_jac_g_adolc(x0, True)[0], eval_jac_g(x0, True)[0])
assert_array_equal(eval_jac_g_adolc(x0, True)[1], eval_jac_g(x0, True)[1])
assert_array_equal(eval_jac_g_adolc(x0, False), eval_jac_g(x0, False))

# test optimization with PYIPOPT
nvar = 4
x_L = numpy.ones((nvar), dtype=numpy.float_) * 1.0
x_U = numpy.ones((nvar), dtype=numpy.float_) * 5.0

ncon = 2
g_L = numpy.array([25.0, 40.0])
g_U = numpy.array([2.0 * pow(10.0, 19), 40.0])
 def evaluateConstraints(self, g, x):
     ag = adolc.function(2, x)
     np.copyto(g, ag)
Esempio n. 29
0
def eval_f_adolc(x, user_data=None):
    return adolc.function(1, x)[0]
Esempio n. 30
0
 def A_gradA_taped(self, XP):
     return adolc.function(self.adolcID,
                           XP), adolc.gradient(self.adolcID, XP)
Esempio n. 31
0
def eval_g_adolc(x, user_data=None):
    return adolc.function(2, x)
 def function(self,x):
     return adolc.function(self.tape_number,
                           np.ravel(x))[0]
Esempio n. 33
0
 def _adolc_obj(self, x):
     """Evaluate the objective function."""
     return adolc.function(self._obj_trace_id, x)
def  const_adolc(x):
    return adolc.function(2,x)
Esempio n. 35
0
 def _adolc_cons(self, x, **kwargs):
     """Evaluate the constraints from the ADOL-C tape."""
     return adolc.function(self._con_trace_id, x)