forked from tisimst/ad
-
Notifications
You must be signed in to change notification settings - Fork 0
/
test_adarray.py
127 lines (87 loc) · 3.09 KB
/
test_adarray.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
from __future__ import division
import pytest
from . import adnumber, ADF, array, dot, tensordot, fftconvolve, sum, get_order
import numpy as np
import random
'''To run, do
PYTHONPATH=$PWD py.test -v test_adarray.py
from the package directory'''
def check_gradient(x_ad, x_np, *vars):
grad1 = x_ad.gradient(vars)
grad2 = []
for v in vars:
curr = np.zeros(shape=x_np.shape)
for i,xi in np.ndenumerate(x_np):
curr[i] = xi.d(v)
grad2.append(curr)
grad1,grad2 = [np.array(g) for g in grad1,grad2]
assert np.max(np.abs(np.log(grad1 / grad2))) < 1e-12
if get_order() == 1:
return
hess1 = x_ad.hessian(vars)
hess2 = []
for v in vars:
currRow1,currRow2 = [],[]
#hess1.append(currRow1)
hess2.append(currRow2)
for u in vars:
# make d2 entry for hess2
curr = np.zeros(shape=x_np.shape)
for i,xi in np.ndenumerate(x_np):
if u is v:
curr[i] = xi.d2(v)
else:
curr[i] = xi.d2c(u,v)
currRow2.append(curr)
hess1,hess2 = [np.array(h) for h in hess1,hess2]
assert np.max(np.abs(np.log(hess1 / hess2))) < 1e-12
def get_random_monomials(args, shape=None, maxorder=5, minorder=1):
ret = np.zeros(shape=shape, dtype=object)
ret.fill(array(1))
for x in args:
assert isinstance(x, ADF)
for i,_ in np.ndenumerate(ret):
xpow = x ** random.randint(minorder,maxorder)
ret[i] = xpow * ret[i]
return ret
def test_polynomial():
x,y = adnumber(np.random.normal(size=2))
u,v,w = [get_random_monomials([x,y], shape=5) for _ in range(3)]
z1 = sum(array(u) * array(v) * array(w))
z2 = np.sum(u * v * w)
check_gradient(z1,z2,x,y)
def test_dot():
x,y = adnumber(np.random.normal(size=2))
u,v = [get_random_monomials([x,y], shape=5) for _ in range(2)]
z1 = dot(array(u) , array(v))
z2 = np.dot(u , v)
check_gradient(z1,z2,x,y)
def test_dot2():
x,y = adnumber(np.random.normal(size=2))
u = get_random_monomials([x,y], shape=(10,5))
v = get_random_monomials([x,y], shape=5)
z1 = dot(array(u) , array(v))
z2 = np.dot(u , v)
check_gradient(z1,z2,x,y)
def test_convolve():
x,y = adnumber(np.random.normal(size=2))
u = get_random_monomials([x,y], shape=(10,))
v = get_random_monomials([x,y], shape=(10,))
z1 = fftconvolve(array(u), array(v))
z2 = np.convolve(u,v)
check_gradient(z1,z2,x,y)
def test_getitem():
x,y = adnumber(np.random.normal(size=2))
u = get_random_monomials([x,y], shape=(5,5))
v = get_random_monomials([x,y], shape=5)
z2 = u * v[:,None]
z1 = array(u) * (array(v)[:,None])
check_gradient(z1,z2,x,y)
def test_tensordot():
x,y,z = adnumber(np.random.normal(size=3))
u = get_random_monomials([x,y,z], shape=(2,3,4,5))
v = get_random_monomials([x,y,z], shape=(2,3,4,5))
axes = [[2,3]]*2
z2 = np.tensordot(u,v, axes=axes)
z1 = tensordot(array(u), array(v), axes=axes)
check_gradient(z1,z2,x,y,z)