file_dir = 'F:/5.datasets/ip102_v2.2'
for k, v in plant.items():
    train_dataset = Dataset_IP102_Augmentation(file_dir,
                                               train=True,
                                               transforms=data_tf,
                                               category=v)
    print('训练集类别为', k, '的图片数量有:', train_dataset.__len__())
    train_data = t.utils.data.DataLoader(
        train_dataset,
        batch_size=16,  #14
        shuffle=True,
        drop_last=True)

    test_dataset = Dataset_IP102_Augmentation(file_dir,
                                              train=False,
                                              transforms=data_tf,
                                              category=v)
    test_data = t.utils.data.DataLoader(
        test_dataset,
        batch_size=16,  #14
        shuffle=True,
        drop_last=True)

    net = densenet(3, 102)
    optimizer = t.optim.SGD(net.parameters(), lr=0.01)
    criterion = t.nn.CrossEntropyLoss()

    net = utils.train(net, train_data, test_data, 25, optimizer, criterion,
                      'densenet' + '_on_' + k)

    print('\n')
    x = (x - 0.5) / 0.5  #标准化
    x = x.transpose((2, 0, 1))  #将channel放到第一维,只是pytorch要求的输入方式
    x = torch.from_numpy(x)
    return x


if __name__ == '__main__':
    #输入输出形状相同

    test_net = residual_block(3, 32, False)
    test_x = Variable(torch.zeros(1, 3, 96, 96))
    print('input:{}'.format(test_x.shape))
    test_y = test_net(test_x)
    print('output:{}'.format(test_y.shape))

    train_set = CIFAR10('../data', train=True, transform=data_tf)
    train_data = torch.utils.data.DataLoader(train_set,
                                             batch_size=64,
                                             shuffle=True)

    test_set = CIFAR10('../data', train=False, transform=data_tf)
    test_data = torch.utils.data.DataLoader(test_set,
                                            batch_size=128,
                                            shuffle=True)

    net = resnet(3, 10)
    optimizer = torch.optim.SGD(net.parameters(), lr=0.01)
    criterion = nn.CrossEntropyLoss()

    utils.train(net, train_data, test_data, 20, optimizer, criterion)
Exemple #3
0
import os, sys

#得到当前根目录
o_path = os.getcwd()
sys.path.append(o_path)

from vgg_net import utils

from IP102.dataset_ip102 import Dataset_IP102, transform

file_dir = 'F:/5.datasets/ip102_v1.1/'
train_dataset = Dataset_IP102(file_dir, train=True, transforms=data_tf)
train_data = t.utils.data.DataLoader(
    train_dataset,
    batch_size=64,  #14
    shuffle=True,
    drop_last=True)

test_dataset = Dataset_IP102(file_dir, train=False, transforms=data_tf)
test_data = t.utils.data.DataLoader(
    test_dataset,
    batch_size=128,  #14
    shuffle=True,
    drop_last=True)

net = vgg_IP102()
optimizer = t.optim.SGD(net.parameters(), lr=0.01)
criterion = t.nn.CrossEntropyLoss()

utils.train(net, train_data, test_data, 20, optimizer, criterion, 'vgg_net')
from IP102.dataset_ip102_category import Dataset_IP102, plant

file_dir = 'F:/5.datasets/ip102_v1.1'
for k, v in plant.items():
    train_dataset = Dataset_IP102(file_dir,
                                  train=True,
                                  transforms=data_tf,
                                  category=v)
    train_data = t.utils.data.DataLoader(
        train_dataset,
        batch_size=16,  #14
        shuffle=True,
        drop_last=True)

    test_dataset = Dataset_IP102(file_dir,
                                 train=False,
                                 transforms=data_tf,
                                 category=v)
    test_data = t.utils.data.DataLoader(
        test_dataset,
        batch_size=16,  #14
        shuffle=True,
        drop_last=True)

    net = vgg_IP102()
    optimizer = t.optim.SGD(net.parameters(), lr=0.01)
    criterion = t.nn.CrossEntropyLoss()

    utils.train(net, train_data, test_data, 50, optimizer, criterion,
                'vgg_net' + '_on_' + k)
    print('\n')
                                         drop_last=True)
    
    net = densenet(3,102)
    optimizer = t.optim.SGD(net.parameters(),lr=0.01)
    criterion = t.nn.CrossEntropyLoss()
    
    net = utils.train(net,train_data,test_data,25,optimizer,criterion,'densenet'+ '_on_'+ k)
    
    print('\n')
'''

for k,v in plant.items():

    train_dataset = Dataset_IP102_Augmentation(file_dir,train=True,transforms=data_tf,category=v)
    train_data = t.utils.data.DataLoader(train_dataset,
                                         batch_size=16,#14
                                         shuffle=True,
                                         drop_last=True)
    
    test_dataset = Dataset_IP102(file_dir,train=False,transforms=data_tf,category=v)
    test_data = t.utils.data.DataLoader(test_dataset,
                                         batch_size=16,#14
                                         shuffle=True,
                                         drop_last=True)
    
    net = densenet(3,102)
    optimizer = t.optim.SGD(net.parameters(),lr=0.01)
    criterion = t.nn.CrossEntropyLoss()
    
    net = utils.train(net,train_data,test_data,1,optimizer,criterion,'densenet'+ '_on_augmentation_'+ k)