示例#1
0
def show_spiral():
    x, t = spiral.load_data()
    #print(x.shape, t.shape)
    plt.scatter(x[0:100, 0], x[0:100, 1])
    plt.scatter(x[100:200, 0], x[100:200, 1])
    plt.scatter(x[200:300, 0], x[200:300, 1])
    plt.show()
示例#2
0
def show_learning_spiral(model, loss_list, x):
    # 学習曲線
    #plt.plot(loss_list)
    #plt.show()
    # 境界領域のプロット
    h = 0.001
    x_min, x_max = x[:, 0].min() - .1, x[:, 0].max() + .1
    y_min, y_max = x[:, 1].min() - .1, x[:, 1].max() + .1
    xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
                         np.arange(y_min, y_max, h))
    X = np.c_[xx.ravel(), yy.ravel()]
    score = model.predict(X)
    predict_cls = np.argmax(score, axis=1)
    Z = predict_cls.reshape(xx.shape)
    plt.contourf(xx, yy, Z)
    plt.axis('off')

    # データ点のプロット
    x, t = spiral.load_data()
    N = 100
    CLS_NUM = 3
    markers = ['o', 'x', '^']
    for i in range(CLS_NUM):
        plt.scatter(x[i * N:(i + 1) * N, 0],
                    x[i * N:(i + 1) * N, 1],
                    s=40,
                    marker=markers[i])
    plt.show()
示例#3
0
def main():
    max_epoch = 300
    batch_size = 30
    hidden_size = 10
    learning_rate = 1.0

    x, t = spiral.load_data()
    model = TwoLayerNet(input_size=2, hidden_size=hidden_size, output_size=3)
    optimizer = SGD(lr=learning_rate)

    trainer = Trainer(model, optimizer)
    trainer.fit(x, t, max_epoch, batch_size, eval_interval=10)
示例#4
0
文件: sample.py 项目: jojonki/BabyNet
def main():
    max_epoch = 300
    batch_size = 30
    hidden_size = 10
    output_size = 3
    learning_rate = 1.0

    x, t = spiral.load_data()
    model = TwoLayerNet(2, hidden_size, output_size)
    optimizer = SGD(learning_rate)
    trainer = Trainer(model, optimizer)
    trainer.fit(x, t, max_epoch, batch_size)
    trainer.plot()
示例#5
0
def spiral_prot():
    x, t = spiral.load_data()
    print(f"x \n{x.shape}")
    print(f"t \n{t.shape}")

    # データ点のプロット
    N = 100
    CLS_NUM = 3
    markers = ["o", "x", "^"]
    for i in range(CLS_NUM):
        # 散布図のプロット
        plt.scatter(x[i*N:(i+1)*N, 0], x[i*N:(i+1)*N, 1], s=40, marker=markers[i])
    plt.show()
import sys

sys.path.append('..')  # 부모 디렉터리의 파일을 가져올 수 있도록 설정
from dataset import spiral
import matplotlib.pyplot as plt

x, t = spiral.load_data()
print('x', x.shape)  # (300, 2)
print('t', t.shape)  # (300, 3)

# 데이터점 플롯
N = 100
CLS_NUM = 3
markers = ['o', 'x', '^']
for i in range(CLS_NUM):
    plt.scatter(x[i * N:(i + 1) * N, 0],
                x[i * N:(i + 1) * N, 1],
                s=40,
                marker=markers[i])
plt.show()
import sys
sys.path.append('..')  # 부모 디렉터리의 파일을 가져올 수 있도록 설정
import numpy as np
from common.optimizer import SGD
from dataset import spiral
import matplotlib.pyplot as plt
from two_layer_net import TwoLayerNet

# 하이퍼파라미터 설정
max_epoch = 300
batch_size = 30
hidden_size = 10
learning_rate = 1.0

# 데이터 읽기, 모델과 옵티마이저 생성
x, t = spiral.load_data(
)  # (300,2), (300,3) : 300개의 학습데이터가 하나의 배치당 두개의 노드로 구성, t는 3개의 class 정답레이블(one_hot)
#print(f'x.shape:{x.shape}, type(x):{type(x)}\nt.shape:{t.shape}, type(t):{type(t)}')
model = TwoLayerNet(input_size=2, hidden_size=hidden_size, output_size=3)
optimizer = SGD(lr=learning_rate)

# 학습에 사용하는 변수
data_size = len(x)
max_iters = data_size // batch_size  #소수점 버림.
#print(data_size, batch_size, max_iters, 11//3);exit(1)
total_loss = 0
loss_count = 0
loss_list = []

for epoch in range(max_epoch):
    # 데이터 뒤섞기
    idx = np.random.permutation(data_size)  #0~data_size-1 숫자를 랜덤으로 섞어줌.
示例#8
0
    b = np.array([4, 5, 6])
    print(np.dot(a, b))
    # 行列の積
    A = np.array([[1, 2], [3, 4]])
    B = np.array([[5, 6], [7, 8]])
    print(np.dot(A, B))


def all_connection():
    W1 = np.random.randn(2, 4)
    b1 = np.random.randn(4)
    W2 = np.random.randn(4, 3)
    b2 = np.random.randn(3)
    x = np.random.randn(10, 2)  # 10個のサンプルデータ
    h = np.dot(x, W1) + b1
    #print(W1)
    #print(b1)
    #print(x)
    print(h)
    a = Sigmoid.forward(h)
    s = np.dot(a, W2) + b2
    print(s)


if __name__ == "__main__":
    #show_sigmoid()
    #show_spiral()
    #all_connection()
    x, t = spiral.load_data(4343)
    model, loss_list = train_custom_loop(x, t)
    show_learning_spiral(model, loss_list, x)
def main():
    
    # (1) ハイパーパラメータの設定
    max_epoch = 300
    batch_size = 30
    hidden_size = 10
    learning_rate = 1.0

    # (2) データの読み込み、モデルとオプティマイザの生成
    x, t = spiral.load_data()
    model = TwoLayerNet(input_size=2, hidden_size=hidden_size, output_size=3)
    optimizer = SGD(lr=learning_rate)

    # 学習で使用する変数
    data_size = len(x)
    max_iters = data_size // batch_size
    total_loss = 0
    loss_count = 0
    loss_list = []

    for epoch in range(max_epoch):
        # (3) データのシャッフル
        idx = np.random.permutation(data_size)
        x = x[idx]
        t = t[idx]

        for iters in range(max_iters):
            batch_x = x[iters*batch_size:(iters+1)*batch_size]
            batch_t = t[iters*batch_size:(iters+1)*batch_size]

            # (4) 勾配を求め、パラメータを更新
            loss = model.forward(batch_x, batch_t)
            model.backward()
            optimizer.update(model.params, model.grads)

            total_loss += loss
            loss_count += 1

            # (5) 定期的に学習経過を出力
            if (iters + 1) % 10 == 0:
                avg_loss = total_loss / loss_count
                print(f"| epoch {epoch+1} | iter {iters+1} / {max_iters} | loss {avg_loss}")
                loss_list.append(avg_loss)
                total_loss, loss_count = 0, 0
        
    print(f"loss_list: \n{loss_list}")

    # 学習結果のプロット
    plt.plot(np.arange(len(loss_list)), np.asarray(loss_list))
    plt.xlabel('iterations (x10)')
    plt.ylabel('loss')
    plt.show()

    # 境界領域のプロット
    h = 0.001
    x_min, x_max = x[:, 0].min() - 0.1, x[:, 0].max() + 0.1
    y_min, y_max = x[:, 1].min() - 0.1, x[:, 1].max() + 0.1
    xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))
    X = np.c_[xx.ravel(), yy.ravel()]
    score = model.predict(X)
    predict_cls = np.argmax(score, axis=1)
    Z = predict_cls.reshape(xx.shape)
    plt.contourf(xx, yy, Z)
    plt.axis('off')

    # データ点のプロット
    x, t = spiral.load_data()
    N = 100
    CLS_NUM = 3
    markers = ['o', 'x', '^']
    for i in range(CLS_NUM):
       plt.scatter(x[i*N:(i+1)*N, 0], x[i*N:(i+1)*N, 1], s=40, marker=markers[i])
    plt.show()
sys.path.append('..')  # 親ディレクトリのファイルをインポートするための設定
import numpy as np
from common.optimizer import SGD
from dataset import spiral
import matplotlib.pyplot as plt
from two_layer_net import TwoLayerNet


# ハイパーパラメータの設定
max_epoch = 300
batch_size = 30
hidden_size = 10
learning_rate = 1.0

# もしload_data()の引数ランダムシードを与えるならのちのload_data()も同じにすべき。
x, t = spiral.load_data()  # 300個の座標らと、300個のone-hotな正解ら。
# 入力層のニューロンは2個、唯一の隠れ層のニューロンは10個、出力層のニューロンは3個。
model = TwoLayerNet(input_size=2, hidden_size=hidden_size, output_size=3)
optimizer = SGD(lr=learning_rate)  # 学習率は1である。stochastic gradient descent.

# 学習で使用する変数
data_size = len(x)  # 300
max_iters = data_size // batch_size  # 300/30==10
total_loss = 0
loss_count = 0
loss_list = []

for epoch in range(max_epoch):
    # データのシャッフル
    idx = np.random.permutation(data_size)
    x = x[idx]