示例#1
0
def homework2(test_dir):
    test_transform = transforms.Compose([
        transforms.Resize((224, 224)),
        AddPepperNoise(0.9, p=0.8),
        transforms.ToTensor(),
    ])
    test_data = RMBDataset(data_dir=test_dir, transform=test_transform)
    test_loader = DataLoader(dataset=test_data, batch_size=1)

    for i, data in enumerate(test_loader):
        inputs, labels = data  # B C H W
        img_tensor = inputs[0, ...]  # C H W
        img = transform_invert(img_tensor, test_transform)
        plt.imshow(img)
        plt.show()
        plt.pause(0.5)
        plt.close()
示例#2
0
def homework1(test_dir):
    test_transform = transforms.Compose([
        transforms.Resize((224, 224)),
        # 1 裁剪
        #transforms.CenterCrop(120),
        # 2 翻转
        #transforms.RandomHorizontalFlip(p=1),
        # 3 旋转
        #transforms.RandomRotation(45),
        # 4 色相
        #transforms.ColorJitter(hue=0.4),
        # 5 饱和度
        #transforms.ColorJitter(saturation=50),
        # 6 灰度图
        #transforms.Grayscale(3),
        # 7 错切
        #transforms.RandomAffine(0,shear=45),
        # 8 缩放
        #transforms.RandomAffine(0,scale=(0.5,0.5)),
        # 9 平移
        #transforms.RandomAffine(0,translate=(0.5,0)),
        # 10 遮挡
        #transforms.ToTensor(),
        #transforms.RandomErasing(p=0.5,scale=(0.1,0.4),value=0),
        transforms.ToTensor(),
    ])
    # 构建MyDataset实例
    test_data = RMBDataset(data_dir=test_dir, transform=test_transform)
    test_loader = DataLoader(dataset=test_data, batch_size=1)

    for i, data in enumerate(test_loader):
        inputs, labels = data  # B C H W
        img_tensor = inputs[0, ...]  # C H W
        img = transform_invert(img_tensor, test_transform)
        plt.imshow(img)
        plt.show()
        plt.pause(0.5)
        plt.close()
示例#3
0
train_transform = transforms.Compose([
    transforms.Resize((32, 32)),
    transforms.RandomCrop(32, padding=4),
    transforms.RandomGrayscale(p=0.9),
    transforms.ToTensor(),
    transforms.Normalize(norm_mean, norm_std),
])

valid_transform = transforms.Compose([
    transforms.Resize((32, 32)),
    transforms.ToTensor(),
    transforms.Normalize(norm_mean, norm_std),
])

# 构建MyDataset实例
train_data = RMBDataset(data_dir=train_dir, transform=train_transform)
valid_data = RMBDataset(data_dir=valid_dir, transform=valid_transform)

# 构建DataLoder
train_loader = DataLoader(dataset=train_data, batch_size=BATCH_SIZE, shuffle=True)
valid_loader = DataLoader(dataset=valid_data, batch_size=BATCH_SIZE)

# ============================ step 2/5 模型 ============================

net = LeNet(classes=2)
net.initialize_weights()

# ============================ step 3/5 损失函数 ============================
criterion = nn.CrossEntropyLoss()  # 选择损失函数

# ============================ step 4/5 优化器 ============================
    # transforms.RandomHorizontalFlip(p=1),

    # 2 Vertical Flip
    # transforms.RandomVerticalFlip(p=0.5),

    # 3 RandomRotation
    # transforms.RandomRotation(90),
    # transforms.RandomRotation((90), expand=True),
    # transforms.RandomRotation(30, center=(0, 0)),
    # transforms.RandomRotation(30, center=(0, 0), expand=True),   # expand only for center rotation

    transforms.ToTensor(),
    transforms.Normalize(norm_mean, norm_std),
])

train_data = RMBDataset(data_dir=train_dir, transform=train_transform)
train_loader = DataLoader(dataset=train_data, batch_size=BATCH_SIZE, shuffle=True)

# ============================ step 5/5 训练 ============================
for i, data in enumerate(train_loader):
    inputs, labels = data  # B C H W

    img_tensor = inputs[0, ...]  # C H W
    img = transform_invert(img_tensor, train_transform)
    plt.imshow(img)
    plt.show()
    plt.pause(0.5)
    plt.close()

    # bs, ncrops, c, h, w = inputs.shape
    # for n in range(ncrops):
示例#5
0
train_transform = transforms.Compose([
    transforms.Resize((32, 32)),
    transforms.RandomCrop(32, padding=4),
    transforms.ToTensor(),
    transforms.Normalize(norm_mean, norm_std)
])

valid_transform = transforms.Compose([
    transforms.Resize((32, 32)),
    transforms.ToTensor(),
    transforms.Normalize(norm_mean, norm_std)
])

# 构建MyDataset实例
train_data = RMBDataset(data_dir=train_dir,
                        transform=train_transform,
                        label_name=cat_dog_label)
valid_data = RMBDataset(data_dir=valid_dir,
                        transform=valid_transform,
                        label_name=cat_dog_label)

# 构建DataLoader
train_loader = DataLoader(dataset=train_data,
                          batch_size=BATCH_SIZE,
                          shuffle=True)
valid_loader = DataLoader(dataset=valid_data, batch_size=BATCH_SIZE)

# 2. 构建模型
net = LeNet(classes=2)
net.initialize_weights()