elif args.quantize_algo == "kmeans_nnz_fixed_0_center": quantize_algo = kmeans_nnz_fixed_0_center elif args.quantize_algo == "model_size_quant": quantize_algo = pt.quantize_admm_ms else: quantize_algo = None quantize_name = "None" if args.quantize_algo is None else args.quantize_algo # descripter = "{}_proj_{}_nnz_{}_quant_{}_bits_{}_"\ # .format(defend_name, prune_name, args.prune_ratio, quantize_name, args.quantize_bits) \ # if args.prune_algo is not None or args.quantize_algo is not None else "" descripter = '' model_base = CLdense() weight_name = ["weight"] if not args.abc_special else ["weightA", "weightB", "weightC"] weight_name = ["weightA", "weightB"] if args.lr_special else weight_name if args.raw_train: pass else: if args.model_name is None: if args.defend_algo is not None: model_path = os.path.join(args.loaddir, args.defend_algo + "_densepretrain.pth") else: model_path = os.path.join(args.loaddir, '_densepretrain.pth') else: model_path = os.path.join(args.loaddir, args.prefix_name + descripter + args.model_name) if args.abc_special:
parser.add_argument('--prune_ratio', type=float, default=0.1, help='sparse ratio or energy budget') parser.add_argument('--loaddir', default='log/default', help='folder to load the log') parser.add_argument('--savedir', default=None, help="folder to save the log") parser.add_argument('--data_root', default='/media/hdd/mnist/', help='folder to save the data') parser.add_argument('--model_name', default=None, help="file name of pre-train model") parser.add_argument('--prefix_name', default="", help="save model name after training") parser.add_argument("-e", "--exp_logger", default=None, help="exp results stored to") args = parser.parse_args() os.environ["CUDA_VISIBLE_DEVICES"]=args.gpu # model: model_base = CLdense() weight_name = ["weight"] if not args.abc_special else ["weightA", "weightB", "weightC"] weight_name = ["weightA", "weightB"] if args.lr_special else weight_name model_path = os.path.join(args.loaddir, args.prefix_name + args.model_name) if args.abc_special: ranks_up = model_base.get_ranks() model = CLabcv2(ranks_up) elif args.lr_special: with open(os.path.join(args.loaddir, args.model_name[0:-4] + ".npy"), "rb") as filestream: ranks_up = pickle.load(filestream) model = CLlr(ranks_up) else: model = model_base model.load_state_dict(torch.load(model_path))