forked from gkhayes/mlrose
-
Notifications
You must be signed in to change notification settings - Fork 0
/
test_activation.py
150 lines (111 loc) · 4.07 KB
/
test_activation.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
""" Unit tests for activation.py
Author: Genevieve Hayes
License: 3-clause BSD license.
"""
import unittest
import numpy as np
from activation import identity, sigmoid, softmax, tanh, relu
class TestActivation(unittest.TestCase):
"""Tests for activation.py."""
@staticmethod
def test_identity():
"""Test identity activation function"""
x = np.array([[0, 1, 3],
[-1, 0, -5],
[1, 0, 3],
[10, -9, -7]])
assert np.array_equal(identity(x), x)
@staticmethod
def test_identity_deriv():
"""Test identity activation function derivative"""
x = np.array([[0, 1, 3],
[-1, 0, -5],
[1, 0, 3],
[10, -9, -7]])
assert np.array_equal(identity(x, deriv=True), np.ones([4, 3]))
@staticmethod
def test_sigmoid():
"""Test sigmoid activation function"""
x = np.array([[0, 1, 3],
[-1, 0, -5],
[1, 0, 3],
[10, -9, -7]])
y = np.array([[0.5, 0.73106, 0.95257],
[0.26894, 0.5, 0.00669],
[0.73106, 0.5, 0.95257],
[0.99995, 0.00012, 0.00091]])
assert np.allclose(sigmoid(x), y, atol=0.00001)
@staticmethod
def test_sigmoid_deriv():
"""Test sigmoid activation function derivative"""
x = np.array([[0, 1, 3],
[-1, 0, -5],
[1, 0, 3],
[10, -9, -7]])
y = np.array([[0.25, 0.19661, 0.04518],
[0.19661, 0.25, 0.00665],
[0.19661, 0.25, 0.04518],
[0.00005, 0.00012, 0.00091]])
assert np.allclose(sigmoid(x, deriv=True), y, atol=0.00001)
@staticmethod
def test_relu():
"""Test relu activation function"""
x = np.array([[0, 1, 3],
[-1, 0, -5],
[1, 0, 3],
[10, -9, -7]])
y = np.array([[0, 1, 3],
[0, 0, 0],
[1, 0, 3],
[10, 0, 0]])
assert np.array_equal(relu(x), y)
@staticmethod
def test_relu_deriv():
"""Test relu activation function derivative"""
x = np.array([[0, 1, 3],
[-1, 0, -5],
[1, 0, 3],
[10, -9, -7]])
y = np.array([[0, 1, 1],
[0, 0, 0],
[1, 0, 1],
[1, 0, 0]])
assert np.array_equal(relu(x, deriv=True), y)
@staticmethod
def test_tanh():
"""Test tanh activation function"""
x = np.array([[0, 1, 3],
[-1, 0, -5],
[1, 0, 3],
[10, -9, -7]])
y = np.array([[0, 0.76159, 0.99505],
[-0.76159, 0, -0.99991],
[0.76159, 0, 0.99505],
[1.00000, -1.00000, -1.00000]])
assert np.allclose(tanh(x), y, atol=0.00001)
@staticmethod
def test_tanh_deriv():
"""Test tanh activation function derivative"""
x = np.array([[0, 1, 3],
[-1, 0, -5],
[1, 0, 3],
[10, -9, -7]])
y = np.array([[1, 0.41998, 0.00988],
[0.41998, 1, 0.00018],
[0.41998, 1, 0.00988],
[0, 0, -0]])
assert np.allclose(tanh(x, deriv=True), y, atol=0.0001)
@staticmethod
def test_softmax():
"""Test softmax activation function"""
x = np.array([[0, 1, 3],
[-1, 0, -5],
[1, 0, 3],
[10, -9, -7]])
y = np.array([[0.04201, 0.11420, 0.84379],
[0.26762, 0.72747, 0.00490],
[0.11420, 0.04201, 0.84379],
[1, 0, 0]])
assert np.allclose(softmax(x), y, atol=0.00001)
if __name__ == '__main__':
unittest.main()