def w_indicate(d): myData = rg.artificial(200, 100, dataType=d) x = myData.xTrain x = np.insert(x, 1, 1, axis=0) xs = myData.xTrain.shape[0] x_t = x.transpose() y = myData.yTrain sum1 = np.zeros((xs, xs)) sum2 = np.zeros((xs, xs)) #pdb.set_trace() if d == "1D": for i in np.arange(myData.xTrain.shape[1]): la1 = x[:, i] la2 = x_t[i, :] la1 = la1[np.newaxis, :] la2 = la2[:, np.newaxis] ans1 = la1 * la2 ans2 = y[i] * la1 #pdb.set_trace() sum1 = sum1 + ans1 sum2 = sum2 + ans2 else: for i in np.arange(myData.xTrain.shape[1]): sum1 = np.append(sum1, x[:, i] * x_t[i, :]) sum2 = np.append(sum2, y[i] * x[:, i]) pdb.set_trace() sum1_inv = np.linalg.inv(sum1) w = sum1_inv * sum2 #pdb.set_trace() print(w)
#self.w = np.zeros([self.xDim,1]) #------------------------------------ # クラスの定義終わり #------------------- #------------------- # メインの始まり if __name__ == "__main__": #------------------------------------ #liner # 1) 学習入力次元が2の場合のデーター生成 myData = rg.artificial(200, 100, dataType="1D") #myData = rg.artificial(200,100, dataType="1D",isNonlinear=True) # 2) 線形回帰モデル regression = linearRegression(myData.xTrain, myData.yTrain) #regression = linearRegression(myData.xTrain,myData.yTrain,kernelType="gaussian",kernelParam=1) # 3) 学習(For文版) sTime = time.time() regression.train() eTime = time.time() print("train with for-loop: time={0:.4} sec".format(eTime - sTime)) # 4) 学習(行列版) sTime = time.time() regression.trainMat()
# coding: utf-8 import regressionData as rg import numpy as np import pdb if __name__ == "__main__": m = rg.artificial(200, 100, dataType="2D") xTrain = np.append(m.xTrain, np.ones((1, m.xTrain.shape[1])), axis=0) # w = np.matmul(np.linalg.inv(np.matmul(xTrain, xTrain.T)), np.sum([y * xTrain[:, index] for index, y in enumerate(m.yTrain)], axis=0)) w = np.matmul(np.linalg.inv(np.matmul(xTrain, xTrain.T)), np.sum(m.yTrain * xTrain, axis=1)) print(w) # pdb.set_trace()
import regressionData as rg myData1 = rg.artificial(200, 100, dataType="1D") myData2 = rg.artificial(200, 100, dataType="2D") x1 = myData1.xTrain x2 = myData2.xTrain w1 = myData1.train(x1) print("1次元\n",w1) w2 = myData2.train(x2) print("2次元\n",w2)
y = y[np.newaxis] loss = np.sum(pow(y - f_x, 2)) / (y - f_x).shape[1] return loss #------------------------------------ # クラスの定義終わり #------------------- #------------------- # メインの始まり if __name__ == "__main__": # 1) 学習入力次元が2の場合のデーター生成 myData = rg.artificial(200, 100, dataType="1D") # myData = rg.artificial(200,100, dataType="1D",isNonlinear=True) # myData = rg.artificial(200,100, dataType="2D") # myData = rg.artificial(200,100, dataType="2D",isNonlinear=True) # 2) 線形回帰モデル #regression = linearRegression(myData.xTrain,myData.yTrain) regression = linearRegression(myData.xTrain, myData.yTrain, kernelType="gaussian", kernelParam=1) # 3) 学習(For文版) sTime = time.time() # regression.trainMat() eTime = time.time()