def clf_fun(self, n_class, device, s=20, m=0.01): if self.method == 'softMax': clf = softMax(self.out_dim, n_class).to(device) elif self.method == 'cosMax': clf = cosMax(self.out_dim, n_class, s).to(device) elif self.method == 'arcMax': clf = arcMax(self.out_dim, n_class, s, m).to(device) return clf
def clf_optimizer(args, net, device, frozen_net, s = 15, m = 0.01): if frozen_net: s=5 if args.method == 'softMax': clf = softMax(args.out_dim, args.test_n_way).to(device) elif args.method == 'cosMax': clf = cosMax(args.out_dim, args.test_n_way, s).to(device) elif args.method == 'arcMax': clf = arcMax(args.out_dim, args.test_n_way, s, m).to(device) if frozen_net: optimizer = torch.optim.Adam(clf.parameters(), lr = args.lr) else: optimizer = Adam([{'params': net.parameters()}, {'params': clf.parameters()}], lr = args.lr) return clf, optimizer