def model(dr): num_input = dr.num_feature # num_hidden1 = 32 # num_hidden2 = 16 # num_hidden3 = 8 # num_hidden4 = 4 nums_hidden = [64, 32, 16, 8, 4] num_output = 1 max_epoch = 2000 batch_size = 16 learning_rate = 0.1 params = HyperParameters_4_0(learning_rate, max_epoch, batch_size, net_type=NetType.Fitting, init_method=InitialMethod.MSRA, stopper=Stopper(StopCondition.StopDiff, 1e-7)) net = NeuralNet_4_0(params, "HouseSingle") fc1 = FcLayer_1_0(num_input, nums_hidden[0], params) net.add_layer(fc1, "fc1") r1 = ActivationLayer(Relu()) net.add_layer(r1, "r1") fc2 = FcLayer_1_0(nums_hidden[0], nums_hidden[1], params) net.add_layer(fc2, "fc2") r2 = ActivationLayer(Relu()) net.add_layer(r2, "r2") fc3 = FcLayer_1_0(nums_hidden[1], nums_hidden[2], params) net.add_layer(fc3, "fc3") r3 = ActivationLayer(Relu()) net.add_layer(r3, "r3") fc4 = FcLayer_1_0(nums_hidden[2], nums_hidden[3], params) net.add_layer(fc4, "fc4") r4 = ActivationLayer(Relu()) net.add_layer(r4, "r4") if len(nums_hidden) == 4: fc5 = FcLayer_1_0(nums_hidden[3], num_output, params) net.add_layer(fc5, "fc5") if len(nums_hidden) == 5: fc5 = FcLayer_1_0(nums_hidden[3], nums_hidden[4], params) net.add_layer(fc5, "fc5") r5 = ActivationLayer(Relu()) net.add_layer(r5, "r5") fc6 = FcLayer_1_0(nums_hidden[4], num_output, params) net.add_layer(fc6, "fc6") net.train(dr, checkpoint=10, need_test=True) return net
def model(): dr = LoadData() num_input = dr.num_feature num_hidden1 = 64 num_hidden2 = 64 num_hidden3 = 32 num_hidden4 = 16 num_output = 1 max_epoch = 100 batch_size = 16 learning_rate = 0.1 params = HyperParameters_4_0(learning_rate, max_epoch, batch_size, net_type=NetType.BinaryClassifier, init_method=InitialMethod.Xavier, stopper=Stopper(StopCondition.StopDiff, 1e-3)) net = NeuralNet_4_0(params, "Income") fc1 = FcLayer_1_0(num_input, num_hidden1, params) net.add_layer(fc1, "fc1") a1 = ActivationLayer(Relu()) net.add_layer(a1, "relu1") fc2 = FcLayer_1_0(num_hidden1, num_hidden2, params) net.add_layer(fc2, "fc2") a2 = ActivationLayer(Relu()) net.add_layer(a2, "relu2") fc3 = FcLayer_1_0(num_hidden2, num_hidden3, params) net.add_layer(fc3, "fc3") a3 = ActivationLayer(Relu()) net.add_layer(a3, "relu3") fc4 = FcLayer_1_0(num_hidden3, num_hidden4, params) net.add_layer(fc4, "fc4") a4 = ActivationLayer(Relu()) net.add_layer(a4, "relu4") fc5 = FcLayer_1_0(num_hidden4, num_output, params) net.add_layer(fc5, "fc5") logistic = ClassificationLayer(Logistic()) net.add_layer(logistic, "logistic") #net.load_parameters() net.train(dr, checkpoint=10, need_test=True) net.ShowLossHistory()
def model(): dataReader = LoadData() num_input = 1 num_hidden1 = 4 num_output = 1 max_epoch = 10000 batch_size = 10 learning_rate = 0.5 eps = 1e-5 params = HyperParameters_4_0(learning_rate, max_epoch, batch_size, net_type=NetType.Fitting, init_method=InitialMethod.Xavier, stopper=Stopper(StopCondition.StopLoss, 0.001)) net = NeuralNet_4_0(params, "Level1_CurveFittingNet") fc1 = FcLayer_1_0(num_input, num_hidden1, params) net.add_layer(fc1, "fc1") sigmoid1 = ActivationLayer(Sigmoid()) net.add_layer(sigmoid1, "sigmoid1") fc2 = FcLayer_1_0(num_hidden1, num_output, params) net.add_layer(fc2, "fc2") net.train(dataReader, checkpoint=100, need_test=True) net.ShowLossHistory() ShowResult(net, dataReader)
def model(dataReader): num_input = 2 num_hidden = 3 num_output = 1 max_epoch = 1000 batch_size = 5 learning_rate = 0.1 params = HyperParameters_4_1(learning_rate, max_epoch, batch_size, net_type=NetType.BinaryClassifier, init_method=InitialMethod.Xavier, stopper=Stopper(StopCondition.StopLoss, 0.02)) net = NeuralNet_4_1(params, "Arc") fc1 = FcLayer_1_1(num_input, num_hidden, params) net.add_layer(fc1, "fc1") sigmoid1 = ActivationLayer(Sigmoid()) net.add_layer(sigmoid1, "sigmoid1") fc2 = FcLayer_1_1(num_hidden, num_output, params) net.add_layer(fc2, "fc2") logistic = ClassificationLayer(Logistic()) net.add_layer(logistic, "logistic") net.train(dataReader, checkpoint=10, need_test=True) return net
def model(dr, lr, bs): num_input = dr.num_feature num_hidden1 = 32 num_hidden2 = 16 num_hidden3 = 8 num_hidden4 = 4 num_output = 1 max_epoch = 1000 #batch_size = 16 batch_size = bs learning_rate = lr params = HyperParameters_4_0(learning_rate, max_epoch, batch_size, net_type=NetType.Fitting, init_method=InitialMethod.MSRA, stopper=Stopper(StopCondition.StopDiff, 1e-7)) net = NeuralNet_4_0(params, "HouseSingle") fc1 = FcLayer_1_0(num_input, num_hidden1, params) net.add_layer(fc1, "fc1") r1 = ActivationLayer(Relu()) net.add_layer(r1, "r1") fc2 = FcLayer_1_0(num_hidden1, num_hidden2, params) net.add_layer(fc2, "fc2") r2 = ActivationLayer(Relu()) net.add_layer(r2, "r2") fc3 = FcLayer_1_0(num_hidden2, num_hidden3, params) net.add_layer(fc3, "fc3") r3 = ActivationLayer(Relu()) net.add_layer(r3, "r3") fc4 = FcLayer_1_0(num_hidden3, num_hidden4, params) net.add_layer(fc4, "fc4") r4 = ActivationLayer(Relu()) net.add_layer(r4, "r4") fc5 = FcLayer_1_0(num_hidden4, num_output, params) net.add_layer(fc5, "fc5") net.train(dr, checkpoint=10, need_test=True) return net
def net(init_method, activator): max_epoch = 1 batch_size = 5 learning_rate = 0.02 eps = 0.01 params = HyperParameters_4_1(learning_rate, max_epoch, batch_size, eps, net_type=NetType.Fitting, init_method=init_method) net = NeuralNet_4_1(params, "level1") num_hidden = [128, 128, 128, 128, 128, 128, 128] fc_count = len(num_hidden) - 1 layers = [] for i in range(fc_count): fc = FcLayer_1_1(num_hidden[i], num_hidden[i + 1], params) net.add_layer(fc, "fc") layers.append(fc) ac = ActivationLayer(activator) net.add_layer(ac, "activator") layers.append(ac) # end for # 从正态分布中取1000个样本,每个样本有num_hidden[0]个特征值 # 转置是为了可以和w1做矩阵乘法 x = np.random.randn(1000, num_hidden[0]) # 激活函数输出值矩阵列表 a_value = [] # 依次做所有层的前向计算 input = x for i in range(len(layers)): output = layers[i].forward(input) # 但是只记录激活层的输出 if isinstance(layers[i], ActivationLayer): a_value.append(output) # end if input = output # end for for i in range(len(a_value)): ax = plt.subplot(1, fc_count + 1, i + 1) ax.set_title("layer" + str(i + 1)) plt.ylim(0, 10000) if i > 0: plt.yticks([]) ax.hist(a_value[i].flatten(), bins=25, range=[0, 1]) #end for # super title plt.suptitle(init_method.name + " : " + activator.get_name()) plt.show()
def __init__(self, init_method, activator, stop_thresh=0.12): #固定超参数 self.dataReader = LoadData() self.num_input = self.dataReader.num_feature self.num_hidden1 = 128 self.num_hidden2 = 64 self.num_hidden3 = 32 self.num_hidden4 = 16 self.num_output = 10 self.max_epoch = 20 self.batch_size = 64 self.learning_rate = 0.1 params = HyperParameters_4_1(self.learning_rate, self.max_epoch, self.batch_size, net_type=NetType.MultipleClassifier, init_method=init_method, stopper=Stopper(StopCondition.Nothing, 0)) net = NeuralNet_4_1(params, "MNIST") fc1 = FcLayer_1_1(self.num_input, self.num_hidden1, params) net.add_layer(fc1, "fc1") r1 = ActivationLayer(Relu()) net.add_layer(r1, "r1") fc2 = FcLayer_1_1(self.num_hidden1, self.num_hidden2, params) net.add_layer(fc2, "fc2") r2 = ActivationLayer(Relu()) net.add_layer(r2, "r2") fc3 = FcLayer_1_1(self.num_hidden2, self.num_hidden3, params) net.add_layer(fc3, "fc3") r3 = ActivationLayer(Relu()) net.add_layer(r3, "r3") fc4 = FcLayer_1_1(self.num_hidden3, self.num_hidden4, params) net.add_layer(fc4, "fc4") r4 = ActivationLayer(Relu()) net.add_layer(r4, "r4") fc5 = FcLayer_1_1(self.num_hidden4, self.num_output, params) net.add_layer(fc5, "fc5") softmax = ClassificationLayer(Softmax()) net.add_layer(softmax, "softmax") self.net = net
def model_relu(num_input, num_hidden, num_output, hp): net = NeuralNet_4_1(hp, "chinabank_relu") fc1 = FcLayer_1_1(num_input, num_hidden, hp) net.add_layer(fc1, "fc1") r1 = ActivationLayer(Relu()) net.add_layer(r1, "Relu1") fc2 = FcLayer_1_1(num_hidden, num_hidden, hp) net.add_layer(fc2, "fc2") r2 = ActivationLayer(Relu()) net.add_layer(r2, "Relu2") fc3 = FcLayer_1_1(num_hidden, num_output, hp) net.add_layer(fc3, "fc3") softmax = ClassificationLayer(Softmax()) net.add_layer(softmax, "softmax") net.train(dataReader, checkpoint=50, need_test=True) net.ShowLossHistory() ShowResult(net, hp.toString()) ShowData(dataReader)
def model_sigmoid(num_input, num_hidden, num_output, hp): net = NeuralNet_4_1(hp, "chinabank_sigmoid") fc1 = FcLayer_1_1(num_input, num_hidden, hp) net.add_layer(fc1, "fc1") s1 = ActivationLayer(Sigmoid()) net.add_layer(s1, "Sigmoid1") fc2 = FcLayer_1_1(num_hidden, num_output, hp) net.add_layer(fc2, "fc2") softmax1 = ClassificationLayer(Softmax()) net.add_layer(softmax1, "softmax1") net.train(dataReader, checkpoint=50, need_test=True) net.ShowLossHistory() ShowResult(net, hp.toString()) ShowData(dataReader)
max_epoch = 30 batch_size = 64 learning_rate = 0.1 params = HyperParameters_4_0(learning_rate, max_epoch, batch_size, net_type=NetType.MultipleClassifier, init_method=InitialMethod.MSRA, stopper=Stopper(StopCondition.StopLoss, 0.26)) net = NeuralNet_4_0(params, "Cifar10") fc1 = FcLayer_1_0(num_input, num_hidden1, params) net.add_layer(fc1, "fc1") r1 = ActivationLayer(Relu()) net.add_layer(r1, "r1") fc2 = FcLayer_1_0(num_hidden1, num_hidden2, params) net.add_layer(fc2, "fc2") r2 = ActivationLayer(Relu()) net.add_layer(r2, "r2") fc3 = FcLayer_1_0(num_hidden2, num_hidden3, params) net.add_layer(fc3, "fc3") r3 = ActivationLayer(Relu()) net.add_layer(r3, "r3") fc4 = FcLayer_1_0(num_hidden3, num_hidden4, params) net.add_layer(fc4, "fc4") r4 = ActivationLayer(Relu())
def model(): dr = LoadData() num_input = dr.num_feature num_hidden1 = 32 num_hidden2 = 16 num_hidden3 = 8 num_hidden4 = 4 num_output = 1 max_epoch = 1000 batch_size = 16 learning_rate = 0.01 params = HyperParameters_4_0(learning_rate, max_epoch, batch_size, net_type=NetType.Fitting, init_method=InitialMethod.Xavier, stopper=Stopper(StopCondition.StopDiff, 1e-6)) net = NeuralNet_4_0(params, "HouseSingle") fc1 = FcLayer_1_0(num_input, num_hidden1, params) net.add_layer(fc1, "fc1") r1 = ActivationLayer(Relu()) net.add_layer(r1, "r1") fc2 = FcLayer_1_0(num_hidden1, num_hidden2, params) net.add_layer(fc2, "fc2") r2 = ActivationLayer(Relu()) net.add_layer(r2, "r2") fc3 = FcLayer_1_0(num_hidden2, num_hidden3, params) net.add_layer(fc3, "fc3") r3 = ActivationLayer(Relu()) net.add_layer(r3, "r3") fc4 = FcLayer_1_0(num_hidden3, num_hidden4, params) net.add_layer(fc4, "fc4") r4 = ActivationLayer(Relu()) net.add_layer(r4, "r4") fc5 = FcLayer_1_0(num_hidden4, num_output, params) net.add_layer(fc5, "fc5") #ShowResult(net, dr) #net.load_parameters() #Inference(net, dr) #exit() #ShowResult(net, dr) net.train(dr, checkpoint=10, need_test=True) output = net.inference(dr.XTest) real_output = dr.DeNormalizeY(output) mse = np.sum((dr.YTestRaw - real_output)**2) / dr.YTest.shape[0] / 10000 print("mse=", mse) net.ShowLossHistory() ShowResult(net, dr)
batch_size = 128 learning_rate = 0.1 eta = 0.2 eps = 0.01 params = HyperParameters_4_0(learning_rate, max_epoch, batch_size, net_type=NetType.Fitting, init_method=InitialMethod.Xavier, stopper=Stopper(StopCondition.StopDiff, 1e-7)) net = NeuralNet_4_0(params, "SAFE") fc1 = FcLayer_1_0(num_input, num_hidden1, params) net.add_layer(fc1, "fc1") r1 = ActivationLayer(Sigmoid()) net.add_layer(r1, "r1") fc2 = FcLayer_1_0(num_hidden1, num_hidden2, params) net.add_layer(fc2, "fc2") r2 = ActivationLayer(Sigmoid()) net.add_layer(r2, "r2") fc3 = FcLayer_1_0(num_hidden2, num_output, params) net.add_layer(fc3, "fc3") r3 = ActivationLayer(Sigmoid()) net.add_layer(r3, "r3") softmax = ClassificationLayer(Softmax()) net.add_layer(softmax, "softmax")
num_output = 1 max_epoch = 10000 batch_size = 5 learning_rate = 0.1 params = HyperParameters_4_0(learning_rate, max_epoch, batch_size, net_type=NetType.BinaryClassifier, init_method=InitialMethod.Xavier, stopper=Stopper(StopCondition.StopLoss, 0.05)) net = NeuralNet_4_0(params, "Arc") fc1 = FcLayer_1_0(num_input, num_hidden, params) net.add_layer(fc1, "fc1") sigmoid1 = ActivationLayer(Sigmoid()) net.add_layer(sigmoid1, "sigmoid1") fc2 = FcLayer_1_0(num_hidden, num_output, params) net.add_layer(fc2, "fc2") logistic = ClassificationLayer(Logistic()) net.add_layer(logistic, "logistic") #net.load_parameters() net.train(dataReader, checkpoint=10, need_test=True) net.ShowLossHistory() ShowResult2D(net, dataReader)