def drowPicture(History): pyplot.plot(History.history['loss']) pyplot.plot(History.history['val_loss']) pyplot.plot(History.history['acc']) pyplot.plot(History.history['val_acc']) pyplot.title('model train vs validation loss') pyplot.ylabel('loss') pyplot.xlabel('epoch') pyplot.legend(['train', 'validation'], loc='upper right') pyplot.show()
# 给出迭代次数,并且加入 Session执行 n_samples = xs.shape[0] init = tf.global_variables_initializer() with tf.Session() as sess: # 首先初始化所有的变量 sess.run(init) # 写入日记,使用tensorboard观察 writer = tf.summary.FileWriter('./graphs', sess.graph) result_loss = [] # 训练模型 for i in range(50): total_loss = 0 for x, y in zip(xs, ys): # 通过feed_dic把数据灌进去 _, l = sess.run([optimizer, loss], feed_dict={X: x, Y: y}) total_loss += l if i % 5 == 0: print('Epoch {0}: {1}'.format(i, total_loss / n_samples)) # 关闭writer writer.close() w, b = sess.run([w, b]) print(w, b) print("W:" + str(w[0])) print("b:" + str(b[0])) plt.plot(xs, ys, "bo", label="Real data") plt.plot(xs, xs * w + b, 'r', label="Predicted data") plt.legend() plt.show()
# 将数据转换为3D输入,timesteps=3,3条数据预测1条 [samples, timesteps, features] train_X = train_X.reshape((train_X.shape[0], n_hours, n_features)) test_X = test_X.reshape((test_X.shape[0], n_hours, n_features)) print(train_X.shape, train_y.shape, test_X.shape, test_y.shape) # 设计网络 model = Sequential() model.add(LSTM(50, input_shape=(train_X.shape[1], train_X.shape[2]))) model.add(Dense(1)) model.compile(loss='mae', optimizer='adam') # 拟合网络 history = model.fit(train_X, train_y, epochs=50, batch_size=72, validation_data=(test_X, test_y), verbose=2, shuffle=False) # plot history pyplot.plot(history.history['loss'], label='train') pyplot.plot(history.history['val_loss'], label='test') pyplot.legend() pyplot.show() # 执行预测 yhat = model.predict(test_X) test_X = test_X.reshape((test_X.shape[0], n_hours*n_features)) # 将预测列据和后7列数据拼接,列数有要有 inv_yhat = concatenate((yhat, test_X[:, -7:]), axis=1) # 对拼接好的数据进行逆缩放 inv_yhat = scaler.inverse_transform(inv_yhat) inv_yhat = inv_yhat[:,0] test_y = test_y.reshape((len(test_y), 1)) # 将真实列据和后7列数据拼接,列数有要有 inv_y = concatenate((test_y, test_X[:, -7:]), axis=1) # 对拼接好的数据进行逆缩放
return X, y # define model model = Sequential() model.add(LSTM(10, input_shape=(1, 1))) # activation是激活函数的选择,linear是线性函数 model.add(Dense(1, activation='linear')) # compile model, loss是损失函数的取值方式,mse是mean_squared_error,代表均方误差, # optimizer是优化控制器的选择,AdamOptimizer通过使用动量(参数的移动平均数)来改善传统梯度下降 model.compile(loss='mse', optimizer='adam') # fit model X, y = get_train() valX, valY = get_val() # validation_data是要验证的测试集,shuffle代表是否混淆打乱数据 # 不合格的原因是epochs=100,训练周期不足 history = model.fit(X, y, epochs=100, validation_data=(valX, valY), shuffle=False) # plot train and validation loss # loss是训练集的损失函数值,val_loss是验证数据集的损失值 pyplot.plot(history.history['loss']) pyplot.plot(history.history['val_loss']) pyplot.title('model train vs validation loss') pyplot.ylabel('loss') pyplot.xlabel('epoch') pyplot.legend(['train', 'validation'], loc='upper right') pyplot.show()