def test_speed(model,batch_size=64, ctx=mx.gpu()): train_data_loader, _, _ = dataset.train_valid_test_loader('/home1/CASIA-WebFace/aligned_Webface-112X96', (0.9, 0.05), batch_size=64) total = 0 model.hybridize() for batch, label in train_data_loader: batch = batch.as_in_context(ctx) start = time.time() out = model(batch) total += time.time() - start print total
def __init__(self, train_path, model, log_dir=None, ctx=mx.cpu()): self.train_data_loader = dataset.train_loader(train_path, batch_size=128) a, self.valid_data_loader, b = dataset.train_valid_test_loader( train_path, (0.9, 0.05), batch_size=64) self.model = model self.ctx = ctx self.criterion = models.AngleLoss() self.log_dir = log_dir self.p = Printer(log_dir) self.model_save_path = os.path.join(self.log_dir, "model") self.model_saved = False self.device_id = 6
def __init__(self, train_path, test_path, model, log_dir=None, device_id=0): self.train_data_loader, self.valid_data_loader, self.test_data_loader\ = dataset.train_valid_test_loader(train_path) self.test_path = test_path self.model = model self.criterion = torch.nn.CrossEntropyLoss() self.prunner = FilterPrunner(self.model) self.model.train() self.log_dir = log_dir self.p = Printer(log_dir) self.model_save_path = os.path.join(self.log_dir, "model") self.model_saved = False self.device_id = device_id
def init_model(pkl_path): mnet = SphereNet20(use_custom_relu=False) mnet.initialize_from(pkl_path, mx.gpu()) train_data_loader, valid_data_loader, test_data_loader \ = dataset.train_valid_test_loader("../pytorch-pruning/train", batch_size=16) mnet.feature = False for batch, label in valid_data_loader: batch = batch.as_in_context(mx.gpu()) label = label.as_in_context(mx.gpu()) mnet.get_feature = False out = mnet(batch) criterion = AngleLoss() loss = criterion(out[0], out[1], label) break mnet.save_params("./spherenet_model2") return mnet
def export_sym(model_path, ctx=mx.gpu()): if os.path.exists(os.path.join(model_path, 'ModelAchi.pkl')): with open(os.path.join(model_path, 'ModelAchi.pkl'), 'rb') as f: archi_dict = pickle.load(f) else: archi_dict = None model = models.SphereNet20(archi_dict=archi_dict) model.load_params(os.path.join(model_path, 'model'), ctx=ctx) train_data_loader, _, _ = dataset.train_valid_test_loader( '/home1/CASIA-WebFace/aligned_Webface-112X96', (0.9, 0.05), batch_size=1) # run forward once model.hybridize() model.get_feature = False for batch, label in train_data_loader: batch = batch.as_in_context(ctx) model(batch) break model.export(os.path.join(model_path, 'model_symbol'))
parser = argparse.ArgumentParser() parser.add_argument("--model_path", type=str, default="./log/prune-2018-06-05_175834_f/model_pruned") args = parser.parse_args() return args def test(model, data_loader): model.eval() correct = 0 total = 0 for i, (batch, label) in enumerate(data_loader): batch = batch.cuda() output = model(Variable(batch, volatile=True)) pred = output.data.max(1)[1] correct += pred.cpu().eq(label).sum() total += label.size(0) acc = float(correct) / total print("Test Accuracy :%.4f" % (acc)) return acc if __name__ == '__main__': args = get_args() model = torch.load(args.model_path).cuda() print model _, _, data_loader = dataset.train_valid_test_loader("train") acc = test(model, data_loader) print(acc)