Beispiel #1
0
	x0 = array([v[0], 1., 0.])
	x = explicit_euler(x0,f1,ts,p,q)
	h = measurement_model(x[:,0],p,q)
	etas = h + numpy.random.normal(size=Nm)
	p[0]-= 3.;	p[1] -= 2.

	# taping F
	av = array([adolc.adouble(0) for i in range(Nv)])
	y = zeros(Nm)
	adolc.trace_on(1)
	av[0].is_independent(p[0])
	av[1].is_independent(p[1])
	av[2].is_independent(q[0])
	ay = F(av[:Np],av[Np:],ts,Sigma,etas)
	for m in range(Nm):
		y[m] = adolc.depends_on(ay[m])
	adolc.trace_off()

	# taping dFdp
	av = array([adolc.adouble(0) for i in range(Nv)])
	adolc.trace_on(1)
	av[0].is_independent(p[0])
	av[1].is_independent(p[1])
	av[2].is_independent(q[0])
	ay = dFdp(av[:Np],av[Np:],ts,Sigma,etas)
	for m in range(Nm):
		for n in range(Np):
			adolc.dependent(ay[m,n])
	adolc.trace_off()

	# tape the objective function with Algopy
Beispiel #2
0
	 return 0.5*array(
[[(x[0]-17.)*(x[0]-17.), (x[0]-17.)*(x[0]-17.)],
[ x[1]-19. , x[1]-19.]])

# TAPING THE FUNCTIONS
# --------------------
# taping function ffcn
u = 3.; v = 7.
ax = array([adolc.adouble(u), adolc.adouble(v)])
adolc.trace_on(1)
ax[0].is_independent(u)
ax[1].is_independent(v)
ay = ffcn(ax)
for n in range(2):
	for m in range(2):
		adolc.depends_on(ay[n,m])
adolc.trace_off()

# taping matrix functions with algopy
x = array([u,v])
F = ffcn(x)
Fdot = zeros((2,2))
cg = CGraph()
FF = Function(Mtc(F))
Fy = Phi(FF)
cg.independentFunctionList = [FF]
cg.dependentFunctionList = [Fy]

# COMPUTING THE HESSIAN H = d^2 Phi/ dx^2
# ---------------------------------------
# need for that to propagate two directions
    tape_eval_times.append(end_time - start_time)

    ## reverse evaluation
    start_time = time()
    g_reverse2 = rm.gradient_from_graph(cg)
    end_time = time()
    rev_eval_times.append(end_time - start_time)

    ## PyADOLC taping
    start_time = time()
    ax = numpy.array([adolc.adouble(0.) for i in range(N)])
    adolc.trace_on(0)
    for n in range(N):
        ax[n].is_independent(x[n])
    ay = f(ax)
    adolc.depends_on(ay)
    adolc.trace_off()
    end_time = time()
    adolc_tape_times.append(end_time - start_time)

    ## PyADOLC gradient
    start_time = time()
    adolc_g = adolc.gradient(0, x)
    end_time = time()
    adolc_gradient_times.append(end_time - start_time)

    ### check that both derivatives give the same result
    #print 'difference between forward and reverse gradient computation', numpy.linalg.norm(g_forward - g_reverse)
    #print 'difference between forward and reverse gradient2 computation', numpy.linalg.norm(g_forward - g_reverse2)
    #print 'difference between Algopy and PyAdolc', numpy.linalg.norm(adolc_g - g_reverse2)
    tape_eval_times.append(end_time - start_time)

    ## reverse evaluation
    start_time = time()
    g_reverse2 = rm.gradient_from_graph(cg)
    end_time = time()
    rev_eval_times.append(end_time - start_time)

    ## PyADOLC taping
    start_time = time()
    ax = numpy.array([adolc.adouble(0.0) for i in range(N)])
    adolc.trace_on(0)
    for n in range(N):
        ax[n].is_independent(x[n])
    ay = f(ax)
    adolc.depends_on(ay)
    adolc.trace_off()
    end_time = time()
    adolc_tape_times.append(end_time - start_time)

    ## PyADOLC gradient
    start_time = time()
    adolc_g = adolc.gradient(0, x)
    end_time = time()
    adolc_gradient_times.append(end_time - start_time)

    ### check that both derivatives give the same result
    # print 'difference between forward and reverse gradient computation', numpy.linalg.norm(g_forward - g_reverse)
    # print 'difference between forward and reverse gradient2 computation', numpy.linalg.norm(g_forward - g_reverse2)
    # print 'difference between Algopy and PyAdolc', numpy.linalg.norm(adolc_g - g_reverse2)