Beispiel #1
0
import basic_nodes as nodes

np.random.seed(0)
plt.style.use('seaborn')

# dataset preparation
dataset_gen = dataset_generator()
dataset_gen.set_coefficient([5,0])
x_data, y_data = dataset_gen.make_dataset()
dataset_gen.dataset_visualizer()

# model part
node1 = nodes.mul_node()

# square error loss part
node2 = nodes.minus_node()
node3 = nodes.square_node()

# hyperparameter setting
epochs = 2 # total epoch setting
lr = 0.01 # learning rate setting

th = -1 # arbitary theta (=weight)
loss_list = []
th_list = []

for epoch in range(epochs):
    # train data를 랜덤으로 한번 shuffle 해주는 코드
    random_idx = np.arange(len(x_data))
    np.random.shuffle(random_idx)
    x_data = x_data[random_idx]
np.random.seed(0)


dataset_gen = dataset_generator()

dataset_gen.set_coefficient([5,2])
x_data, y_data = dataset_gen.make_dataset()
data = np.hstack((x_data, y_data))


# model implementation
node1 = nodes.mul_node()
node2 = nodes.plus_node()

# square loss/MSE cost implementation
node3 = nodes.minus_node()
node4 = nodes.square_node()

th1, th0 = 1,0
lr = 0.01
epochs = 2

th1_list, th0_list = [], []
loss_list = []

for epoch in range(epochs):
    for data_idx, (x,y) in enumerate(data):

        z1 = node1.forward(th1,x)
        z2 = node2.forward(z1, th0)
        z3 = node3.forward(y, z2)
 def cost_imp(self):
     self.node3 = nodes.minus_node()
     self.node4 = nodes.square_node()
     self.node5 = nodes.mean_node()