print(data_size) lstm_parameter = LstmParameter(cell_count, x_dim, y_dim) lstm_network = LstmNetwork(lstm_parameter) loss_list = [] batch_size = 128 accuracy_best = 0.4 for iters in range(1000000): ind = random.randint(0, data_size - 1) train_ind = data[ind].reshape((cell_count, x_dim)) label_ind = label[ind] # print("iters is" , iters) for index in range(cell_count): lstm_network.forward_compute(train_ind[index]) if iters % batch_size == 0: if iters == 0: loss = lstm_network.backward_compute(label_ind, ToyLossLayer) else: loss = loss / batch_size # print("predict is", lstm_network.lstm_cell_list[cell_count-1].state.h.shape) # print("iter is {} loss is {}".format(iters, loss)) loss_list.append(loss) lstm_parameter.update_parameter(lr=0.3, batch_size=batch_size) loss = lstm_network.backward_compute(label_ind, ToyLossLayer) else: loss_temp = lstm_network.backward_compute(label_ind, ToyLossLayer) loss += loss_temp