Ejemplo n.º 1
0
# ------------------------------------ step 1/5 : 加载数据------------------------------------

# 数据预处理
normMean = [0.4948052, 0.48568845, 0.44682974]
normStd = [0.24580306, 0.24236229, 0.2603115]
normTransform = transforms.Normalize(normMean, normStd)
trainTransform = transforms.Compose([
    transforms.Resize(32),
    transforms.RandomCrop(32, padding=4),
    transforms.ToTensor(), normTransform
])

validTransform = transforms.Compose([transforms.ToTensor(), normTransform])

# 构建MyDataset实例
train_data = MyDataset(txt_path=train_txt_path, transform=trainTransform)
valid_data = MyDataset(txt_path=valid_txt_path, transform=validTransform)

# 构建Dataloader
train_loader = DataLoader(dataset=train_data,
                          batch_size=train_bs,
                          shuffle=True)
valid_loader = DataLoader(dataset=valid_data, batch_size=valid_bs)

# -------------------------------------- step 2/5 : 定义网络-----------------------------------


class Net(nn.Module):
    def __init__(self):
        super(Net, self).__init__()
        self.conv1 = nn.Conv2d(3, 6, 5)
txt_path = '../../Data/visual.txt'
pretrained_path = '../../Data/net_params_72p.pkl'

net = Net()
pretrained_dict = torch.load(pretrained_path)
net.load_state_dict(pretrained_dict)

# 数据预处理
normMean = [0.49139968, 0.48215827, 0.44653124]
normStd = [0.24703233, 0.24348505, 0.26158768]
normTransform = transforms.Normalize(normMean, normStd)
testTransform = transforms.Compose(
    [transforms.Resize((32, 32)),
     transforms.ToTensor(), normTransform])
# 载入数据
test_data = MyDataset(txt_path=txt_path, transform=testTransform)
test_loader = DataLoader(dataset=test_data, batch_size=1)
img, label = iter(test_loader).next()

x = img

# Visualize feature maps
features_dict = {}


def get_features(name):
    def hook(model, input, output):
        features_dict[name] = output.detach()

    return hook
Ejemplo n.º 3
0
        dev_instances = prepare_instance_func(
            dicts, args.data_path.replace('train', 'dev'), args,
            args.MAX_LENGTH)
        print("dev_instances {}".format(len(dev_instances)))
    else:
        dev_instances = None
    test_instances = prepare_instance_func(
        dicts, args.data_path.replace('train', 'test'), args, args.MAX_LENGTH)
    print("test_instances {}".format(len(test_instances)))

    if args.model.find("bert") != -1:
        collate_func = my_collate_bert
    else:
        collate_func = my_collate

    train_loader = DataLoader(MyDataset(train_instances),
                              args.batch_size,
                              shuffle=True,
                              collate_fn=collate_func)
    if args.version != 'mimic2':
        dev_loader = DataLoader(MyDataset(dev_instances),
                                1,
                                shuffle=False,
                                collate_fn=collate_func)
    else:
        dev_loader = None
    test_loader = DataLoader(MyDataset(test_instances),
                             1,
                             shuffle=False,
                             collate_fn=collate_func)
Ejemplo n.º 4
0
])
X_abc_test = np.concatenate([X_a_test, X_b_test, X_c_test])
y_abc_test = np.concatenate([y_a_test, y_b_test, y_c_test])
d_abc_test = np.concatenate(
    [np.zeros(len(y_a_test)),
     np.zeros(len(y_b_test)),
     np.ones(len(y_c_test))])
d_a_test = np.ones(len(y_a_test))
d_b_test = np.ones(len(y_b_test))
d_c_test = np.ones(len(y_c_test))
d_d_test = np.ones(len(y_d_test))

transform_train, transform_test = get_transform(task)

#データセットの作成
ds_abc_train = MyDataset(X_abc_train, y_abc_train, d_abc_train,
                         transform_train)

ds_abc_test = MyDataset(X_abc_test, y_abc_test, d_abc_test, transform_test)
ds_a_test = MyDataset(X_a_test, y_a_test, d_a_test, transform_test)
ds_b_test = MyDataset(X_b_test, y_b_test, d_b_test, transform_test)
ds_c_test = MyDataset(X_c_test, y_c_test, d_c_test, transform_test)
ds_d_test = MyDataset(X_d_test, y_d_test, d_d_test, transform_test)

#loaderの作成
loader_abc_train = MyDataLoader(ds_abc_train,
                                batch_size=batch_size,
                                shuffle=True)
loader_abc_test = MyDataLoader(ds_abc_test,
                               batch_size=batch_size,
                               shuffle=False)
loader_a_test = MyDataLoader(ds_a_test, batch_size=batch_size, shuffle=False)
Ejemplo n.º 5
0
    train_instances = prepare_instance_func(dicts, args.data_path, args, args.MAX_LENGTH)
    print("train_instances {}".format(len(train_instances)))
    if args.version != 'mimic2':
        dev_instances = prepare_instance_func(dicts, args.data_path.replace('train','dev'), args, args.MAX_LENGTH)
        print("dev_instances {}".format(len(dev_instances)))
    else:
        dev_instances = None
    test_instances = prepare_instance_func(dicts, args.data_path.replace('train','test'), args, args.MAX_LENGTH)
    print("test_instances {}".format(len(test_instances)))

    if args.model.find("bert") != -1:
        collate_func = my_collate_bert
    else:
        collate_func = my_collate

    train_loader = DataLoader(MyDataset(train_instances), args.batch_size, shuffle=True, collate_fn=collate_func)
    if args.version != 'mimic2':
        dev_loader = DataLoader(MyDataset(dev_instances), 1, shuffle=False, collate_fn=collate_func)
    else:
        dev_loader = None
    test_loader = DataLoader(MyDataset(test_instances), 1, shuffle=False, collate_fn=collate_func)

    if not args.test_model and args.model.find("bert") != -1:
        param_optimizer = list(model.named_parameters())
        param_optimizer = [n for n in param_optimizer if 'pooler' not in n[0]]
        no_decay = ['bias', 'LayerNorm.bias', 'LayerNorm.weight']
        optimizer_grouped_parameters = [
            {'params': [p for n, p in param_optimizer if not any(nd in n for nd in no_decay)],
             'weight_decay': 0.01},
            {'params': [p for n, p in param_optimizer if any(nd in n for nd in no_decay)], 'weight_decay': 0.0}
        ]