def __call__(self, output, target, *args, **kwargs): """Forward and calculate accuracy.""" if len(self.topk) == 1: return Accuracy() else: return { "accuracy": Accuracy(), "accuracy_top1": nn.Top1CategoricalAccuracy(), "accuracy_top5": nn.Top5CategoricalAccuracy() }
}, { 'params': no_decayed_params }, { 'order_params': net.trainable_params() }] optimizer = RMSProp(group_params, lr, decay=0.9, weight_decay=cfg.weight_decay, momentum=cfg.momentum, epsilon=cfg.opt_eps, loss_scale=cfg.loss_scale) eval_metrics = { 'Loss': nn.Loss(), 'Top1-Acc': nn.Top1CategoricalAccuracy(), 'Top5-Acc': nn.Top5CategoricalAccuracy() } if args_opt.resume: ckpt = load_checkpoint(args_opt.resume) load_param_into_net(net, ckpt) model = Model(net, loss_fn=loss, optimizer=optimizer, metrics={'acc'}) print("============== Starting Training ==============") loss_cb = LossMonitor(per_print_times=batches_per_epoch) time_cb = TimeMonitor(data_size=batches_per_epoch) callbacks = [loss_cb, time_cb] config_ck = CheckpointConfig(save_checkpoint_steps=batches_per_epoch, keep_checkpoint_max=cfg.keep_checkpoint_max) ckpoint_cb = ModelCheckpoint(prefix=f"inceptionv3-rank{cfg.rank}", directory=cfg.ckpt_path,
dataset = create_dataset(args_opt.dataset_path, do_train=False, batch_size=config.batch_size, device_num=1, rank=0) step_size = dataset.get_dataset_size() # define net net = xception(class_num=config.class_num) # load checkpoint param_dict = load_checkpoint(args_opt.checkpoint_path) load_param_into_net(net, param_dict) net.set_train(False) # define loss, model loss = CrossEntropySmooth(smooth_factor=config.label_smooth_factor, num_classes=config.class_num) # define model eval_metrics = { 'Loss': nn.Loss(), 'Top_1_Acc': nn.Top1CategoricalAccuracy(), 'Top_5_Acc': nn.Top5CategoricalAccuracy() } model = Model(net, loss_fn=loss, metrics=eval_metrics) # eval model res = model.eval(dataset, dataset_sink_mode=False) print("result:", res, "ckpt=", args_opt.checkpoint_path)