root = args.data_dir train_dir = os.path.join(root, "train") test_dir = os.path.join(root, "test") transform_train = torchvision.transforms.Compose([ torchvision.transforms.Resize((128, 128)), torchvision.transforms.RandomHorizontalFlip(), torchvision.transforms.ToTensor(), torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) ]) transform_test = torchvision.transforms.Compose([ torchvision.transforms.Resize((128, 128)), torchvision.transforms.ToTensor(), torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) ]) trainloader = torch.utils.data.DataLoader( TripletFolder(train_dir, transform=transform_train), batch_size=64, shuffle=True ) testloader = torch.utils.data.DataLoader( torchvision.datasets.ImageFolder(test_dir, transform=transform_test), batch_size=64, shuffle=True ) num_classes = len(trainloader.dataset.classes) # net definition start_epoch = 0 net = Net(num_classes=num_classes) if args.resume: assert os.path.isfile("./checkpoint/ckpt.t7"), "Error: no checkpoint file found!" print('Loading from checkpoint/ckpt.t7') checkpoint = torch.load("./checkpoint/ckpt.t7")
transforms.ColorJitter( brightness=0.1, contrast=0.1, saturation=0.1, hue=0) ] + transform_train_list print(transform_train_list) data_transforms = { 'train': transforms.Compose(transform_train_list), 'val': transforms.Compose(transform_val_list), } train_all = '' if opt.train_all: train_all = '_all' image_datasets = {} image_datasets['train'] = TripletFolder(os.path.join(data_dir, 'train_all'), data_transforms['train']) image_datasets['val'] = TripletFolder(os.path.join(data_dir, 'val'), data_transforms['val']) batch = {} class_names = image_datasets['train'].classes class_vector = [s[1] for s in image_datasets['train'].samples] dataloaders = { x: torch.utils.data.DataLoader(image_datasets[x], batch_size=opt.batchsize, shuffle=True, num_workers=8) for x in ['train', 'val'] }