Example #1
0
copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE."""

from random import randint
from MLP import MLP
from numpy import array

mlp = MLP()

rInput = randint(0, 1)
inputs = array([[0, 0, 1], [1, 1, 1], [1, 0, 1], [0, 1, 1]])
training_data = array([[0, 1, 1, 0]]).T

mlp.train_weights(training_data, inputs)

new_input = array((input('X:'), input('Y:'), input('Z:')), dtype=int)
new_input_result = mlp.feedforward(new_input)

print()
print("New input values: ")
print(new_input)
print()
print("New input result: ")
print(new_input_result)
Example #2
0
from MLP import MLP
from utils import *

# build dataset
xs, ys = generate_spiral(1000, 2, 3, show=False)
train_xs, train_ys, test_xs, test_ys = train_test_split(xs, ys)

# normalize data
train_mean = np.mean(train_xs, axis=0)
train_std = np.std(train_xs, axis=0)
train_xs = (train_xs - train_mean) / train_std
test_xs = (test_xs - train_mean) / train_std

# specify neural network
layer_sizes = [2, 100, 3]
hyperparam_dict = {'mb_size': 64, 'lr': 0.001}

# initialize data batching class (DataSet) and MLP
ds = DataSet(train_xs, train_ys, hyperparam_dict['mb_size'])
mlp = MLP(layer_sizes, hyperparam_dict)

n_epochs = 1000
n_batches = n_epochs * ds.batches_in_epoch
for batch_i in range(n_batches):
    mb_xs, mb_ys = ds.get_mb()
    mlp.fit_mb(mb_xs, mb_ys)
    # test_output = mlp.feedforward(test_xs)
    # print(mlp.loss(test_output, test_ys))
test_output = mlp.feedforward(test_xs)
print(mlp.accuracy(test_output, test_ys))