Exemplo n.º 1
0
def cnn_process():
    use_model = args.cnn_use_model
    util.topic_log(use_model)
    if use_model == 'resnet50':
        pretrain_model = torch_models.resnet50(pretrained=True)
    elif use_model == 'resnet101':
        pretrain_model = torch_models.resnet101(pretrained=True)
    elif use_model == 'resnet152':
        pretrain_model = torch_models.resnet152(pretrained=True)
    elif use_model == 'alexnet':
        pretrain_model = torch_models.alexnet(pretrained=True)
    else:
        raise Exception
    model = models.FineTuneModel(
        pretrain_model,
        'resnet' if 'resnet' in use_model else use_model).to(args.device)
    optimizer = optim.Adam(filter(lambda p: p.requires_grad,
                                  model.parameters()),
                           lr=args.lr,
                           weight_decay=0.0001)
    best_top1 = 0
    if args.load_model:
        best_top1 = util.load_model(model, optimizer, args,
                                    args.save_model_path)
        print(f'load_model: {args.load_model} ({best_top1})')
    util.model_fit(model, optimizer, args, train_loader, test_loader,
                   best_top1)
    _, predicts = util.test_epoch(model, args, test_loader, get_predicts=True)
    util.evaluate_log(predicts, test_labels)
Exemplo n.º 2
0
def encoding_process():
    util.topic_log("Accuracy before huffman encoding")
    util.print_nonzeros(model, args.log_file_path)
    top1_acc, top5_acc = util.val_epoch(val_loader, model, args, topk=(1, 5))
    util.log(args.log_file_path,
             f"accuracy before huffman encoding\t{top1_acc} ({top5_acc})")

    util.topic_log("encoding")
    mesa2_huffman_encode_model(model, args)
Exemplo n.º 3
0
def part1():
    util.topic_log('part1', 80)
    first_image_path, _ = train_set.image_paths_with_labels[0]
    image_ds = dcmread(first_image_path)
    ori_image_data = image_ds.pixel_array.astype(np.float32)
    trans_image_data = train_set.transform(ori_image_data /
                                           np.max(ori_image_data)).numpy()
    print(f'csv-field: {train_set.csv_df.columns}')
    print(
        f'original image: size ({ori_image_data.shape}), min ({np.min(ori_image_data)}), max ({np.max(ori_image_data)}), mean ({np.mean(ori_image_data)})'
    )
    print(
        f'transform image: size ({trans_image_data.shape}), min ({np.min(trans_image_data)}), max ({np.max(trans_image_data)}), mean ({np.mean(trans_image_data)})'
    )
Exemplo n.º 4
0
def pruning_process():
    util.topic_log("Before pruning")
    top1_acc, top5_acc = util.val_epoch(val_loader, model, args, topk=(1, 5))
    util.log(args.log_file_path,
             f"before pruning accuracy\t{top1_acc} ({top5_acc})")

    util.topic_log("Pruning CNN")
    model.prune(args)
    util.topic_log("After prune CNN")
    util.print_nonzeros(model, args.log_file_path)
    top1_acc, top5_acc = util.val_epoch(val_loader, model, args, topk=(1, 5))
    util.log(args.log_file_path,
             f"after pruning accuracy\t{top1_acc} ({top5_acc})")

    util.topic_log("Start retrain after prune CNN")
    util.train(model, args, train_loader, val_loader, 'prune_retrain')

    util.topic_log("After retraining")
    top1_acc, top5_acc = util.val_epoch(val_loader, model, args, topk=(1, 5))
    util.log(args.log_file_path,
             f"after pruning and retrain accuracy\t{top1_acc} ({top5_acc})")
Exemplo n.º 5
0
def quantize_process():
    util.topic_log("Accuracy before weight sharing")
    top1_acc, top5_acc = util.val_epoch(val_loader, model, args, topk=(1, 5))
    util.log(args.log_file_path,
             f"accuracy before weight sharing\t{top1_acc} ({top5_acc})")

    util.topic_log("Accuracy after weight sharing")
    layer_mame_to_quan_indices = apply_weight_sharing(model, args)
    top1_acc, top5_acc = util.val_epoch(val_loader, model, args, topk=(1, 5))
    util.save_masked_checkpoint(model, "quantized", top1_acc, "initial", args)
    util.log(
        args.log_file_path,
        f"accuracy after weight sharing {args.bits}bits\t{top1_acc} ({top5_acc})"
    )

    util.topic_log("Quantize retraining")
    util.quantized_retrain(model, args, layer_mame_to_quan_indices,
                           train_loader, val_loader)
    top1_acc, top5_acc = util.val_epoch(val_loader, model, args, topk=(1, 5))
    util.save_masked_checkpoint(model, "quantized", top1_acc, "end", args)
    util.log(args.log_file_path,
             f"accuracy after qauntize and retrain\t{top1_acc} ({top5_acc})")
Exemplo n.º 6
0
def initial_process():
    util.topic_log("Initial training")
    util.train(model, args, train_loader, val_loader, 'initial_train')
    top1_acc, top5_acc = util.val_epoch(val_loader, model, args, topk=(1, 5))
    util.log(args.log_file_path, f"initial_accuracy\t{top1_acc} ({top5_acc})")
Exemplo n.º 7
0
def part2():
    util.topic_log('part2', 80)
    svm_process()
    kmeans_process()
    random_foreset_process()
    cnn_process()
Exemplo n.º 8
0
def random_foreset_process():
    util.topic_log('Random Forest')
    model = RandomForestClassifier()
    model.fit(train_data2d, train_labels)
    predicts = model.predict(test_data2d)
    util.evaluate_log(predicts, test_labels)
Exemplo n.º 9
0
def kmeans_process():
    util.topic_log('Kmeans')
    model = KMeans(n_clusters=2, random_state=0)
    model.fit(train_data2d)
    predicts = model.predict(test_data2d)
    util.evaluate_log(predicts, test_labels)
Exemplo n.º 10
0
def svm_process():
    util.topic_log('SVM')
    model = svm.SVC()
    model.fit(train_data2d, train_labels)
    predicts = model.predict(test_data2d)
    util.evaluate_log(predicts, test_labels)