示例#1
0
import matplotlib.pyplot as plt
import math

learning_history = {
    'epoch': [],
    'train_acc': [],
    'train_total_loss': [],
    'train_class_loss': [],
    'train_ae_loss': [],
    'train_error_1_loss': [],
    'train_error_2_loss': [],
    'test_acc': []
}

prototype = "10"
train_net = parameter_use(
    f'./result/pkl/prototype_{prototype}/train_model_epoch500_{prototype}.pkl')
optimizer = optim.SGD(train_net.parameters(),
                      lr=0.01,
                      momentum=0.9,
                      weight_decay=5e-4)
# training parameters
num_epochs = 10
save_step = 10
# elastic deformation parameters
sigma = 4
alpha = 20

conv_list = [
    module for module in train_net.modules() if isinstance(module, nn.Conv2d)
]
conv_count = len(conv_list)
learning_history['train_total_loss'].append(0)
learning_history['train_class_loss'].append(0)
learning_history['train_ae_loss'].append(0)
learning_history['train_error_1_loss'].append(0)
learning_history['train_error_2_loss'].append(0)
learning_history['test_acc'].append(0.9920)

prototype = "15"
# training parameters
num_epochs = 20
save_step = 10
# elastic deformation parameters
sigma = 4
alpha = 20

original_net = parameter_use(f'./result/pkl/prototype_{prototype}/train_model_epoch500_{prototype}.pkl')
original_dense_weight = original_net.classifier[0].weight
for k in range(2, 11):
    print(f'count_{k}')
    train_net = parameter_use(f'./result/pkl/prototype_{prototype}/prune_dense/not_abs/prune_conv_proto_finetune_from_small/'
                              f'prune_train_model_epoch{k}_{prototype}.pkl')
    optimizer = optim.Adam(train_net.parameters(), lr=0.002)
    with torch.no_grad():
        train_net.classifier[0].weight = original_dense_weight
        # train_net.classifier[0].weight.data = torch.tensor(torch.rand(class_num, int(prototype)), device=device, dtype=dtype)

    for param in train_net.parameters():
        param.requires_grad = False
    train_net.prototype_feature_vectors.requires_grad = True
    for dense in train_net.classifier:
        dense.weight.requires_grad = True
示例#3
0
    # train_net = parameter_use(f'./result/pkl/prototype_{prototype}/prune_train_model_epoch500_{prototype}.pkl')
    # train_net = parameter_use(f'./result/pkl/3NN_prototype_{prototype}/train_model_epoch500_{prototype}.pkl')
    # train_net = parameter_use(f'./result/pkl/prototype_{prototype}/prune_dense/not_abs/prune_all_finetune_from_small_at_least_1weight/'
    #                           f'prune_train_model_epoch2_{prototype}.pkl')
    # train_net = parameter_use(f'./result/pkl/prototype_{prototype}/prune_dense/prune_finetune_once/prune_negative/'
    #                           f'prune_train_model_epoch2_15.pkl')
    # if offset <= 700:
    #     train_net = parameter_use(f'./result/pkl/prototype_{prototype}/acc_once_epoch50/train_model_epoch{offset+1}_{prototype}.pkl')
    # elif offset == 750:
    #     train_net = parameter_use(f'./result/pkl/prototype_{prototype}/acc_once_epoch50/train_model_epoch{offset}_{prototype}.pkl')
    # if offset <= 750:
    #     offset += 50
    # elif offset > 150:
    #     offset += 10
    train_net = parameter_use(
        f'./result/pkl/fashionmnist_prototype{prototype}/prune_dense/abs/prune_proto_finetune_from_abs_small/'
        f'prune_train_model_epoch{k}_{prototype}.pkl')
    # train_net = parameter_use(f'./result/pkl/prototype_{prototype}/prune_proto/prune_classifier_finetune_from_abs_large_epoch30/'
    #                           f'prune_train_model_epoch{k}_15.pkl')
    classifier_weight = train_net.classifier[0].weight.cpu().detach().numpy().T

    class_name = [i for i in range(10)]
    prototype_name = [chr(ord('A') + i) for i in range(int(prototype))]
    df = pd.DataFrame(classifier_weight, columns=class_name)
    # print(df)
    fig, ax = plt.subplots(figsize=((len(df.columns)) * 1.2, (len(df)) * 0.4))
    ax.axis('off')
    # Color the weights that contribute a lot
    colors = [[
        "silver" if classifier_weight[j][i] - min(classifier_weight[j]) <
        abs(min(classifier_weight[j])) * 0.4 else "w"
示例#4
0
from util_func import parameter_use, pruningtestacc_vis, result_save, batch_elastic_transform, list_of_norms, result_save, parameter_save, conv_vis
from loss import *
from dataset import *
from model import class_num, in_channel_num
import torch
import torch.optim as optim
import numpy as np
import matplotlib.pyplot as plt
import math

learning_history = {'epoch': [], 'train_acc': [], 'train_total_loss': [], 'train_class_loss': [], 'train_ae_loss': [],
                    'train_error_1_loss': [], 'train_error_2_loss': [], 'test_acc': []}

prototype = "15"
# train_net = parameter_use(f'./result/pkl/prototype_{prototype}/train_model_epoch500_{prototype}.pkl')
train_net = parameter_use(f'./result/pkl/fashionmnist_prototype15/train_model_epoch500_{prototype}.pkl')
# Make sure all classes have weight
# train_net = parameter_use(f'./result/pkl/prototype_{prototype}/prune_dense/prune_not_finetune_from_abs_small/'
#                           f'prune_train_model_epoch9_{prototype}.pkl')
optimizer = optim.Adam(train_net.parameters(), lr=0.002)
# training parameters
num_epochs = 5
save_step = 10
# elastic deformation parameters
sigma = 4
alpha = 20

weight_matrix = train_net.classifier[0].weight
de_mask = torch.ones(weight_matrix.shape)
# pruning from the large value--------------------------------------------------
prune_index_list = np.argsort(weight_matrix.detach().cpu().numpy(), axis=0)[::-1]
示例#5
0
    'train_total_loss': [],
    'train_class_loss': [],
    'train_ae_loss': [],
    'train_error_1_loss': [],
    'train_error_2_loss': [],
    'val_acc': [],
    'val_total_loss': [],
    'val_class_loss': [],
    'val_ae_loss': [],
    'val_error_1_loss': [],
    'val_error_2_loss': [],
    'test_acc': []
}

transfer_net = parameter_use(
    f'./result/pkl/prototype_{prototype_num}/train_model_epoch500_{prototype_num}.pkl'
).to(device)
optimizer = optim.Adam(transfer_net.parameters(), lr=0.002)
conv_list = [
    module for module in transfer_net.modules()
    if isinstance(module, nn.Conv2d)
]

# training parameters
num_epochs = 500
test_display_step = 100
save_step = 50

# elastic deformation parameters
sigma = 4
alpha = 20