if index: print("All the figures in this group are predicted correctly") print(pred, "<--Predicted figures") print(labels, "<--The right number") plt.show() if __name__ == "__main__": mnist_path = "./MNIST/" test_data_path = "./MNIST/test/" model_path = "./model/ckpt/mindspore_quick_start/" lr = 0.01 momentum = 0.9 # create the network network = LeNet5() param_dict = load_checkpoint( "./model/ckpt/mindspore_quick_start/checkpoint_lenet-5_1500.ckpt") load_param_into_net(network, param_dict) # define the optimizer net_opt = nn.Momentum(network.trainable_params(), lr, momentum) # define the loss function net_loss = SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean') model = Model(network, net_loss, net_opt, metrics={"Accuracy": Accuracy()}) # test_net(network, model, mnist_path) test_inference(model, test_data_path)
network = SentimentNet(vocab_size=embedding_table.shape[0], embed_size=cfg.embed_size, num_hiddens=cfg.num_hiddens, num_layers=cfg.num_layers, bidirectional=cfg.bidirectional, num_classes=cfg.num_classes, weight=Tensor(embedding_table), batch_size=cfg.batch_size) loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean') ds_eval = lstm_create_dataset(args.preprocess_path, cfg.batch_size, training=False) model = Model(network, loss, metrics={ 'acc': Accuracy(), 'recall': Recall(), 'f1': F1() }) print("============== Starting Testing ==============") param_dict = load_checkpoint(args.ckpt_path) load_param_into_net(network, param_dict) if args.device_target == "CPU": acc = model.eval(ds_eval, dataset_sink_mode=False) else: acc = model.eval(ds_eval) print("============== {} ==============".format(acc))
lr = Tensor( get_lr(global_step=cfg.global_step, lr_init=cfg.lr_init, lr_end=cfg.lr_end, lr_max=cfg.lr_max, warmup_epochs=cfg.warmup_epochs, total_epochs=cfg.num_epochs, steps_per_epoch=ds_train.get_dataset_size(), lr_adjust_epoch=cfg.lr_adjust_epoch)) else: lr = cfg.learning_rate opt = nn.Momentum(network.trainable_params(), lr, cfg.momentum) loss_cb = LossMonitor() model = Model(network, loss, opt, {'acc': Accuracy()}) print("============== Starting Training ==============") config_ck = CheckpointConfig( save_checkpoint_steps=cfg.save_checkpoint_steps, keep_checkpoint_max=cfg.keep_checkpoint_max) ckpoint_cb = ModelCheckpoint(prefix="lstm", directory=args.ckpt_path, config=config_ck) time_cb = TimeMonitor(data_size=ds_train.get_dataset_size()) if args.device_target == "CPU": model.train(cfg.num_epochs, ds_train, callbacks=[time_cb, ckpoint_cb, loss_cb], dataset_sink_mode=False) else:
self.fc2 = nn.Dense(hidden_size, 1) self.sig = ops.Sigmoid() def construct(self, x): x = self.fc1(x) x = self.sig(x) x = self.fc2(x) return x m = Net(HIDDEN_SIZE) optim = nn.Momentum(m.trainable_params(), 0.05, 0.9) loss = nn.MSELoss() loss_cb = LossMonitor() model = Model(m, loss, optim, {'acc': Accuracy()}) time_cb = TimeMonitor(data_size=ds_train.get_dataset_size()) model.train(ITERATIONS, ds_train, callbacks=[time_cb, loss_cb], dataset_sink_mode=False) print("TF", model.predict(Tensor([[1, 0]], mindspore.float32)).asnumpy()) print("FF", model.predict(Tensor([[0, 0]], mindspore.float32)).asnumpy()) print("TT", model.predict(Tensor([[1, 1]], mindspore.float32)).asnumpy()) print("FT", model.predict(Tensor([[0, 1]], mindspore.float32)).asnumpy())