Beispiel #1
0
def pruning_process(model):

    print("------------------------- Before pruning --------------------------------")
    criterion = nn.CrossEntropyLoss().cuda()
    accuracy = util.validate(args, test_loader, model, criterion)
    
    print("------------------------- pruning --------------------------------------")
    if args.model_mode == 'c' or args.model_mode =='a':
        model.prune_by_percentile( ['conv1'], q=100-58.0)
        model.prune_by_percentile( ['conv2'], q=100-22.0)
        model.prune_by_percentile( ['conv3'], q=100-34.0)
        model.prune_by_percentile( ['conv4'], q=100-36.0)
        model.prune_by_percentile( ['conv5'], q=100-53.0)
        model.prune_by_percentile( ['conv6'], q=100-24.0)
        model.prune_by_percentile( ['conv7'], q=100-42.0)
        model.prune_by_percentile( ['conv8'], q=100-32.0)
        model.prune_by_percentile( ['conv9'], q=100-27.0)
        model.prune_by_percentile( ['conv10'], q=100-34.0)
        model.prune_by_percentile( ['conv11'], q=100-35.0)
        model.prune_by_percentile( ['conv12'], q=100-29.0)
        model.prune_by_percentile( ['conv13'], q=100-36.0)
    if args.model_mode == 'd' or args.model_mode == 'a':
        model.prune_by_percentile( ['fc1'], q=100-10.0)
        model.prune_by_percentile( ['fc2'], q=100-10.0)
        model.prune_by_percentile( ['fc3'], q=100-10.0)
    
    print("------------------------- After pruning --------------------------------")
    util.print_nonzeros(model, f"{args.save_dir}/{args.log}")
    accuracy = util.validate(args, test_loader, model, criterion)
    torch.save(model, f"{args.save_dir}/model_pruned.ptmodel")

    util.log(f"{args.save_dir}/{args.log}", f"weight\t{args.save_dir}/{args.out_pruned_folder}")
    util.log(f"{args.save_dir}/{args.log}", f"model\t{args.save_dir}/model_pruned.ptmodel")
    util.log(f"{args.save_dir}/{args.log}", f"accuracy after pruning\t{accuracy}")

    util.layer2torch(model, f"{args.save_dir}/{args.out_pruned_folder}")
    weight_list = util.parameters2list(model.children())
    util.save_parameters(f"{args.save_dir}/{args.out_pruned_folder}", weight_list)

    # Retrain
    print("------------------------- Prune and Retrain ----------------------------")
    tok="prune_re"
    util.initial_train(model, args, train_loader, test_loader, tok, use_cuda=True)
    
    print("------------------------- After Retraining -----------------------------")
    util.print_nonzeros(model, f"{args.save_dir}/{args.log}")
    accuracy = util.validate(args, test_loader, model, criterion)
    torch.save(model, f"{args.save_dir}/model_prune_retrain_{args.reepochs}.ptmodel")

    util.log(f"{args.save_dir}/{args.log}", f"weight\t{args.save_dir}/{args.out_pruned_re_folder}")
    util.log(f"{args.save_dir}/{args.log}", f"model\t{args.save_dir}/mmodel_prune_retrain_{args.reepochs}.ptmodel")
    util.log(f"{args.save_dir}/{args.log}", f"accuracy after prune retrain\t{accuracy}")

    util.layer2torch(model, f"{args.save_dir}/{args.out_pruned_re_folder}")
    weight_list = util.parameters2list(model.children())
    util.save_parameters(f"{args.save_dir}/{args.out_pruned_re_folder}", weight_list)



    return model
Beispiel #2
0
def train_and_report(model_name, kernel, warp, ard):
    dataset_dir = os.path.join(MODEL_DIR, DATASET)
    try: 
        os.makedirs(dataset_dir)
    except OSError:
        print "skipping output folder"
    for fold in xrange(10):
        fold_dir = os.path.join(SPLIT_DIR, DATASET, str(fold))
        train_data = np.loadtxt(os.path.join(fold_dir, 'train'))
        test_data = np.loadtxt(os.path.join(fold_dir, 'test'))
        params_file = None
        output_dir = os.path.join(dataset_dir, str(fold))
        try: 
            os.makedirs(output_dir)
        except OSError:
            print "skipping output folder"

        if ard:
            iso_dir = output_dir.replace('True', 'False')
            params_file = os.path.join(iso_dir, 'params')
        gp = util.train_gp_model(train_data, kernel, warp, ard, params_file)
        util.save_parameters(gp, os.path.join(output_dir, 'params'))
        util.save_gradients(gp, os.path.join(output_dir, 'grads'))
        metrics = util.get_metrics(gp, test_data)



        util.save_metrics(metrics, os.path.join(output_dir, 'metrics'))
        util.save_cautious_curves(gp, test_data, os.path.join(output_dir, 'curves'))
        util.save_predictions(gp, test_data, os.path.join(output_dir, 'preds'))

        asym_metrics = util.get_asym_metrics(gp, test_data)
        util.save_asym_metrics(asym_metrics, os.path.join(output_dir, 'asym_metrics'))
        gc.collect(2) # buggy GPy has allocation cycles...
Beispiel #3
0
def initial_process(model):
    print(model)
    util.print_model_parameters(model)
    print(
        "------------------------- Initial training -------------------------------"
    )
    tok = "initial"
    criterion = nn.CrossEntropyLoss().cuda()
    util.initial_train(model,
                       args,
                       train_loader,
                       test_loader,
                       tok,
                       use_cuda=True)
    accuracy = util.validate(args, test_loader, model, criterion)
    torch.save(model, f"{args.save_dir}/model_initial_end.ptmodel")

    util.log(f"{args.save_dir}/{args.log}",
             f"weight:{args.save_dir}/{args.out_oldweight_folder}")
    util.log(f"{args.save_dir}/{args.log}",
             f"model:{args.save_dir}/model_initial_end.ptmodel")
    util.log(f"{args.save_dir}/{args.log}", f"initial_accuracy {accuracy}")

    util.layer2torch(model, f"{args.save_dir}/{args.out_oldweight_folder}")
    weight_list = util.parameters2list(model.children())
    util.save_parameters(f"{args.save_dir}/{args.out_oldweight_folder}",
                         weight_list)

    return model
Beispiel #4
0
def train_and_report(model_name, kernel, warp, ard, likelihood='gaussian'):
    dataset_dir = os.path.join(MODEL_DIR, DATASET)
    try: 
        os.makedirs(dataset_dir)
    except OSError:
        print "skipping output folder"
    for fold in xrange(10):
        fold_dir = os.path.join(SPLIT_DIR, DATASET, str(fold))
        train_data = np.loadtxt(os.path.join(fold_dir, 'train'))
        test_data = np.loadtxt(os.path.join(fold_dir, 'test'))
        output_dir = os.path.join(dataset_dir, str(fold))
        params_file = None
        if ard:
            iso_dir = output_dir.replace('True', 'False')
            params_file = os.path.join(iso_dir, 'params')
        gp = util.train_gp_model(train_data, kernel, warp, ard, params_file, likelihood=likelihood)
        metrics = util.get_metrics(gp, test_data)

        try: 
            os.makedirs(output_dir)
        except OSError:
            print "skipping output folder"
        util.save_parameters(gp, os.path.join(output_dir, 'params'))
        util.save_metrics(metrics, os.path.join(output_dir, 'metrics'))
        #util.save_gradients(gp, os.path.join(output_dir, 'grads'))
        util.save_cautious_curves(gp, test_data, os.path.join(output_dir, 'curves'))
        util.save_predictions(gp, test_data, os.path.join(output_dir, 'preds'))
def pruning_process(model):

    print(
        "------------------------- Before pruning --------------------------------"
    )
    util.print_nonzeros(model, f"{args.save_dir}/{args.log}")
    accuracy = util.validate(val_loader, model, args)

    print(
        "------------------------- pruning CNN--------------------------------------"
    )
    model.prune_by_percentile(['conv1'], q=100 - 66.0)
    model.prune_by_percentile(['conv2'], q=100 - 12.0)

    print(
        "------------------------------- After prune CNN ----------------------------"
    )
    util.print_nonzeros(model, f"{args.save_dir}/{args.log}")
    prec1 = util.validate(val_loader, model, args)
    torch.save(model, f"{args.save_dir}/model_pruned.ptmodel")

    util.log(f"{args.save_dir}/{args.log}",
             f"weight\t{args.save_dir}/{args.out_pruned_folder}")
    util.log(f"{args.save_dir}/{args.log}",
             f"model\t{args.save_dir}/model_pruned.ptmodel")
    util.log(f"{args.save_dir}/{args.log}", f"prune acc\t{prec1}")

    util.layer2torch(f"{args.save_dir}/{args.out_pruned_folder}", model)
    weight_list = util.parameters2list(model.children())
    util.save_parameters(f"{args.save_dir}/{args.out_pruned_folder}",
                         weight_list)

    print(
        "------------------------- start retrain after prune CNN----------------------------"
    )
    util.initial_train(model, args, train_loader, val_loader, 'prune_re')

    print(
        "------------------------- After Retraining -----------------------------"
    )
    util.print_nonzeros(model, f"{args.save_dir}/{args.log}")
    accuracy = util.validate(val_loader, model, args)
    torch.save(model,
               f"{args.save_dir}/model_prune_retrain_{args.reepochs}.ptmodel")

    util.log(f"{args.save_dir}/{args.log}",
             f"weight\t{args.save_dir}/{args.out_pruned_re_folder}")
    util.log(
        f"{args.save_dir}/{args.log}",
        f"model\t{args.save_dir}/model_prune_retrain_{args.reepochs}.ptmodel")
    util.log(f"{args.save_dir}/{args.log}",
             f"prune and retrain acc\t{accuracy}")

    util.layer2torch(f"{args.save_dir}/{args.out_pruned_re_folder}", model)
    weight_list = util.parameters2list(model.children())
    util.save_parameters(f"{args.save_dir}/{args.out_pruned_re_folder}",
                         weight_list)

    return model
Beispiel #6
0
def quantize_process(model):

    #util.print_model_parameters(model)
    print(
        '---------------------- Before weight sharing ---------------------------'
    )
    criterion = nn.CrossEntropyLoss().cuda()
    acc = util.validate(args, test_loader, model, criterion)
    util.log(f"{args.save_dir}/{args.log}",
             f"accuracy before weight sharing\t{acc}")

    # Weight sharing
    old_weight_list, new_weight_list, quantized_index_list, quantized_center_list = apply_weight_sharing(
        model, args.model_mode, args.bits)

    print(
        '----------------------- After weight sharing ---------------------------'
    )
    acc = util.validate(args, test_loader, model, criterion)
    torch.save(model, f"{args.save_dir}/model_quantized.ptmodel")

    util.log(f"{args.save_dir}/{args.log}",
             f"weight\t{args.save_dir}/{args.out_quantized_folder}")
    util.log(f"{args.save_dir}/{args.log}",
             f"model\t{args.save_dir}/model_quantized.ptmodel")
    util.log(f"{args.save_dir}/{args.log}",
             f"accuracy after weight sharing {args.bits}bits\t{acc}")

    util.layer2torch(model, f"{args.save_dir}/{args.out_quantized_folder}")
    util.save_parameters(f"{args.save_dir}/{args.out_quantized_folder}",
                         new_weight_list)

    print(
        '----------------------- quantize retrain -------------------------------'
    )
    util.quantized_retrain(model, args, quantized_index_list,
                           quantized_center_list, train_loader, use_cuda)
    #acc = util.test(model, test_loader, use_cuda=True)
    acc = util.validate(args, test_loader, model, criterion)
    torch.save(
        model,
        f"{args.save_dir}/model_quantized_retrain{args.reepochs}.ptmodel")
    util.layer2torch(model, f"{args.save_dir}/{args.out_quantized_re_folder}")

    util.log(f"{args.save_dir}/{args.log}",
             f"weight\t{args.save_dir}/{args.out_quantized_re_folder}")
    util.log(
        f"{args.save_dir}/{args.log}",
        f"model\t{args.save_dir}/model_quantized_bit{args.bits}_retrain{args.reepochs}.ptmodel"
    )
    util.log(f"{args.save_dir}/{args.log}",
             f"accuracy retrain after weight sharing\t{acc}")

    weight_list = util.parameters2list(model.children())
    util.save_parameters(f"{args.save_dir}/{args.out_quantized_re_folder}",
                         weight_list)

    return model
Beispiel #7
0
def quantize_process(model):
    print('------------------------------- accuracy before weight sharing ----------------------------------')
    acc = util.validate(val_loader, model, args)
    util.log(f"{args.save_dir}/{args.log}", f"accuracy before weight sharing\t{acc}")

    print('------------------------------- accuacy after weight sharing -------------------------------')
    
    tempfc1=torch.index_select(model.fc1.weight, 0, model.invrow1.cuda())
    model.fc1.weight=torch.nn.Parameter(torch.index_select(tempfc1, 1, model.invcol1.cuda()))
    tempfc2=torch.index_select(model.fc2.weight, 0, model.invrow2.cuda())
    model.fc2.weight=torch.nn.Parameter(torch.index_select(tempfc2, 1, model.invcol2.cuda()))
    tempfc3=torch.index_select(model.fc3.weight, 0, model.invrow3.cuda())
    model.fc3.weight=torch.nn.Parameter(torch.index_select(tempfc3, 1, model.invcol3.cuda()))
    
    old_weight_list, new_weight_list, quantized_index_list, quantized_center_list = apply_weight_sharing(model, args.model_mode, args.bits)
    
    temp1=torch.index_select(model.fc1.weight, 0, model.rowp1.cuda())
    model.fc1.weight=torch.nn.Parameter(torch.index_select(temp1, 1, model.colp1.cuda()))
    temp2=torch.index_select(model.fc2.weight, 0, model.rowp2.cuda())
    model.fc2.weight=torch.nn.Parameter(torch.index_select(temp2, 1, model.colp2.cuda()))
    temp3=torch.index_select(model.fc3.weight, 0, model.rowp3.cuda())
    model.fc3.weight=torch.nn.Parameter(torch.index_select(temp3, 1, model.colp3.cuda()))
    
    acc = util.validate(val_loader, model, args)
    util.save_checkpoint({
        'state_dict': model.state_dict(),
        'best_prec1': acc,
    }, True, filename=os.path.join(args.save_dir, 'checkpoint_{}_alpha_{}.tar'.format('quantized',args.alpha)))

    util.log(f"{args.save_dir}/{args.log}", f"weight\t{args.save_dir}/{args.out_quantized_folder}")
    util.log(f"{args.save_dir}/{args.log}", f"model\t{args.save_dir}/model_quantized.ptmodel")
    util.log(f"{args.save_dir}/{args.log}", f"accuracy after weight sharing {args.bits}bits\t{acc}")

    util.layer2torch(f"{args.save_dir}/{args.out_quantized_folder}" , model)
    util.save_parameters(f"{args.save_dir}/{args.out_quantized_folder}", new_weight_list)
    
    print('------------------------------- retraining -------------------------------------------')

    util.quantized_retrain(model, args, quantized_index_list, quantized_center_list, train_loader, val_loader)

    acc = util.validate(val_loader, model, args)
    util.save_checkpoint({
        'state_dict': model.state_dict(),
        'best_prec1': acc,
    }, True, filename=os.path.join(args.save_dir, 'checkpoint_{}_alpha_{}.tar'.format('quantized_re',args.alpha)))

    util.layer2torch(f"{args.save_dir}/{args.out_quantized_re_folder}" , model)

    util.log(f"{args.save_dir}/{args.log}", f"weight:{args.save_dir}/{args.out_quantized_re_folder}")
    util.log(f"{args.save_dir}/{args.log}", f"model:{args.save_dir}/model_quantized_bit{args.bits}_retrain{args.reepochs}.ptmodel")
    util.log(f"{args.save_dir}/{args.log}", f"acc after qauntize and retrain\t{acc}")

    weight_list = util.parameters2list(model.children())
    util.save_parameters(f"{args.save_dir}/{args.out_quantized_re_folder}", weight_list)
    return model
Beispiel #8
0
def pruning_process(model):

    print("------------------------- Before pruning --------------------------------")
    util.print_nonzeros(model, f"{args.save_dir}/{args.log}")
    accuracy = util.validate(val_loader, model, args)

    print("------------------------- pruning CNN--------------------------------------")
    model.prune_by_percentile( ['conv1'], q=100-58.0)
    model.prune_by_percentile( ['conv2'], q=100-22.0)
    model.prune_by_percentile( ['conv3'], q=100-34.0)
    model.prune_by_percentile( ['conv4'], q=100-36.0)
    model.prune_by_percentile( ['conv5'], q=100-53.0)
    model.prune_by_percentile( ['conv6'], q=100-24.0)
    model.prune_by_percentile( ['conv7'], q=100-42.0)
    model.prune_by_percentile( ['conv8'], q=100-32.0)
    model.prune_by_percentile( ['conv9'], q=100-27.0)
    model.prune_by_percentile( ['conv10'], q=100-34.0)
    model.prune_by_percentile( ['conv11'], q=100-35.0)
    model.prune_by_percentile( ['conv12'], q=100-29.0)
    model.prune_by_percentile( ['conv13'], q=100-36.0)
    print("------------------------------- After prune CNN ----------------------------")
    util.print_nonzeros(model, f"{args.save_dir}/{args.log}")

    prec1 = util.validate(val_loader, model, args)

    util.save_checkpoint({
        'state_dict': model.state_dict(),
        'best_prec1': prec1,
    }, True, filename=os.path.join(args.save_dir, 'checkpoint_{}_alpha_{}.tar'.format('pruned',args.alpha)))

    util.log(f"{args.save_dir}/{args.log}", f"weight\t{args.save_dir}/{args.out_pruned_folder}")
    util.log(f"{args.save_dir}/{args.log}", f"model\t{args.save_dir}/model_pruned.ptmodel")
    util.log(f"{args.save_dir}/{args.log}", f"prune acc\t{prec1}")
    
    util.layer2torch(f"{args.save_dir}/{args.out_pruned_folder}" , model)
    weight_list = util.parameters2list(model.children())
    util.save_parameters(f"{args.save_dir}/{args.out_pruned_folder}", weight_list)
    
    print("------------------------- start retrain after prune CNN----------------------------")
    util.initial_train(model, args, train_loader, val_loader, 'prune_re')
    
    print("------------------------- After Retraining -----------------------------")
    util.print_nonzeros(model, f"{args.save_dir}/{args.log}")
    accuracy = util.validate(val_loader, model, args)
    
    util.log(f"{args.save_dir}/{args.log}", f"weight\t{args.save_dir}/{args.out_pruned_re_folder}")
    util.log(f"{args.save_dir}/{args.log}", f"model\t{args.save_dir}/model_prune_retrain_{args.reepochs}.ptmodel")
    util.log(f"{args.save_dir}/{args.log}", f"prune and retrain acc\t{accuracy}")
    
    util.layer2torch(f"{args.save_dir}/{args.out_pruned_re_folder}" , model)
    weight_list = util.parameters2list(model.children())
    util.save_parameters(f"{args.save_dir}/{args.out_pruned_re_folder}", weight_list)

    return model
Beispiel #9
0
def quantize_process(model):
    print(
        '------------------------------- accuracy before weight sharing ----------------------------------'
    )
    acc = util.validate(val_loader, model, args)
    util.log(f"{args.save_dir}/{args.log}",
             f"accuracy before weight sharing\t{acc}")

    print(
        '------------------------------- accuacy after weight sharing -------------------------------'
    )

    old_weight_list, new_weight_list, quantized_index_list, quantized_center_list = apply_weight_sharing(
        model, args.model_mode, args.bits)

    acc = util.validate(val_loader, model, args)

    util.log(f"{args.save_dir}/{args.log}",
             f"weight\t{args.save_dir}/{args.out_quantized_folder}")
    util.log(f"{args.save_dir}/{args.log}",
             f"model\t{args.save_dir}/model_quantized.ptmodel")
    util.log(f"{args.save_dir}/{args.log}",
             f"accuracy after weight sharing {args.bits}bits\t{acc}")

    util.layer2torch(f"{args.save_dir}/{args.out_quantized_folder}", model)
    util.save_parameters(f"{args.save_dir}/{args.out_quantized_folder}",
                         new_weight_list)

    print(
        '------------------------------- retraining -------------------------------------------'
    )

    util.quantized_retrain(model, args, quantized_index_list,
                           quantized_center_list, train_loader, val_loader)

    acc = util.validate(val_loader, model, args)
    util.layer2torch(f"{args.save_dir}/{args.out_quantized_re_folder}", model)

    util.log(f"{args.save_dir}/{args.log}",
             f"weight:{args.save_dir}/{args.out_quantized_re_folder}")
    util.log(
        f"{args.save_dir}/{args.log}",
        f"model:{args.save_dir}/model_quantized_retrain{args.reepochs}.ptmodel"
    )
    util.log(f"{args.save_dir}/{args.log}",
             f"acc after qauntize and retrain\t{acc}")

    weight_list = util.parameters2list(model.children())
    util.save_parameters(f"{args.save_dir}/{args.out_quantized_re_folder}",
                         weight_list)
    return model
Beispiel #10
0
def initial_process(model):
    print(model)
    util.print_model_parameters(model)
    print("------------------------- Initial training -------------------------------")
    util.initial_train(model, args, train_loader, val_loader, 'initial')
    accuracy = util.validate(val_loader, model, args)

    util.log(f"{args.save_dir}/{args.log}", f"weight\t{args.save_dir}/{args.out_oldweight_folder}")
    util.log(f"{args.save_dir}/{args.log}", f"model\t{args.save_dir}/model_initial_end.ptmodel")
    util.log(f"{args.save_dir}/{args.log}", f"initial_accuracy\t{accuracy}")

    util.layer2torch(f"{args.save_dir}/{args.out_oldweight_folder}",model)
    weight_list = util.parameters2list(model.children())
    util.save_parameters(f"{args.save_dir}/{args.out_oldweight_folder}", weight_list)
    return model
def quantize_process(model):
    print(
        '------------------------------- accuracy before weight sharing ----------------------------------'
    )
    acc = util.validate(val_loader, model, args)
    util.log(f"{args.save_dir}/{args.log}",
             f"accuracy before weight sharing\t{acc}")

    print(
        '------------------------------- accuacy after weight sharing -------------------------------'
    )
    tempfc1 = torch.index_select(model.fc1.weight, 0, model.invrow1.cuda())
    model.fc1.weight = torch.nn.Parameter(
        torch.index_select(tempfc1, 1, model.invcol1.cuda()))
    tempfc2 = torch.index_select(model.fc2.weight, 0, model.invrow2.cuda())
    model.fc2.weight = torch.nn.Parameter(
        torch.index_select(tempfc2, 1, model.invcol2.cuda()))

    old_weight_list, new_weight_list, quantized_index_list, quantized_center_list = apply_weight_sharing(
        model, args.model_mode, args.bits)

    temp1 = torch.index_select(model.fc1.weight, 0, model.rowp1.cuda())
    model.fc1.weight = torch.nn.Parameter(
        torch.index_select(temp1, 1, model.colp1.cuda()))
    temp2 = torch.index_select(model.fc2.weight, 0, model.rowp2.cuda())
    model.fc2.weight = torch.nn.Parameter(
        torch.index_select(temp2, 1, model.colp2.cuda()))

    acc = util.validate(val_loader, model, args)

    torch.save(model, f"{args.save_dir}/model_quantized.ptmodel")

    util.log(f"{args.save_dir}/{args.log}",
             f"weight\t{args.save_dir}/{args.out_quantized_folder}")
    util.log(f"{args.save_dir}/{args.log}",
             f"model\t{args.save_dir}/model_quantized.ptmodel")
    util.log(f"{args.save_dir}/{args.log}",
             f"accuracy after weight sharing {args.bits}bits\t{acc}")

    util.layer2torch(f"{args.save_dir}/{args.out_quantized_folder}", model)
    util.save_parameters(f"{args.save_dir}/{args.out_quantized_folder}",
                         new_weight_list)

    print(
        '------------------------------- retraining -------------------------------------------'
    )

    util.quantized_retrain(model, args, quantized_index_list,
                           quantized_center_list, train_loader, val_loader)

    acc = util.validate(val_loader, model, args)
    torch.save(
        model,
        f"{args.save_dir}/model_quantized_retrain{args.reepochs}.ptmodel")
    util.layer2torch(f"{args.save_dir}/{args.out_quantized_re_folder}", model)

    util.log(f"{args.save_dir}/{args.log}",
             f"weight:{args.save_dir}/{args.out_quantized_re_folder}")
    util.log(
        f"{args.save_dir}/{args.log}",
        f"model:{args.save_dir}/model_quantized_retrain{args.reepochs}.ptmodel"
    )
    util.log(f"{args.save_dir}/{args.log}",
             f"acc after qauntize and retrain\t{acc}")

    weight_list = util.parameters2list(model.children())
    util.save_parameters(f"{args.save_dir}/{args.out_quantized_re_folder}",
                         weight_list)
    return model