def Draw(): for net_name in ["p120", "r120", "d120", "d40", "r40"]: tb = TB(net_name + "_tbdata/") grad_lis = pickle.load(open(net_name + "_grad.data", "rb")) for i in grad_lis: tb.tick() tb.add_scalar("loged_grad", np.log(i)) tb.add_scalar("grad", i) tb.flush()
tb.tick() token1 = time.time() data = get_minibatch(tr_p, minibatch_size) time_data = time.time() - token1 token2 = time.time() out = train_func(data = data['data'], label = data["label"]) time_train = time.time() - token2 if time_data > (time_train + time_data) * 0.2: print("Loading data may spends too much time {}".format(time_data / (time_train + time_data))) loss = out["pre_loss"] pred = np.array(out["outputs"]).argmax(axis = 1) acc = (pred == np.array(data["label"])).mean() his.append([loss, acc]) tb.add_scalar("loss", loss) tb.add_scalar("traing_acc", acc) print("Minibatch = {}, Loss = {}, Acc = {}".format(i, loss, acc)) #Learning Rate Adjusting if i == ORI_IT // 2 or i == ORI_IT // 4 * 3: optimizer.learning_rate /= 10 if i == ORI_IT: optimizer.learning_rate = 1e-5 if i % (EPOCH_NUM) == 0: epoch += 1 acc = C.test(valid_func) his_test.append([i, acc]) print("Epoch = {}, Acc = {}, Max_acc = {}".format(epoch, acc, max_acc)) b = time.time() b = b + (b - a) / i * (TOT_IT - i)
import sys sys.path.append("/home/liuyanyi02/CIFAR/latest_tools") from th import TensorboardHelper as TB import os tb = TB(os.getcwd()) for t in range(100): tb.add_scalar("tmp", t) tb.tick() tb.flush()