validation_size = 0.1 # image_path = 'data/ISIC-2017_Training_Data' image_path = 'data/processed' mask_path = 'data/ISIC-2017_Training_Part1_GroundTruth' label_file = 'data/ISIC-2017_Training_Part3_GroundTruth.csv' # call class DataManger mymanager = DataManager(image_path, mask_path, label_file) X_train, y_train, X_test, y_test, X_val, y_val = load_data() # Initialize the variables if torch.cuda.is_available(): X_train = X_train.cuda() y_train = mymanager.convert_labels(y_train).cuda() X_test = X_test.cuda() y_test = mymanager.convert_labels(y_test).cuda() else: X_train = X_train y_train = mymanager.convert_labels(y_train) #.type(torch.LongTensor) X_test = X_test y_test = mymanager.convert_labels(y_test) #.type(torch.LongTensor) # Generate mini-batch for learning train_dataset = torch.utils.data.TensorDataset(X_train, y_train) test_dataset = torch.utils.data.TensorDataset(X_test, y_test) train_dataloader = torch.utils.data.DataLoader(train_dataset, batch_size=batch_size, shuffle=False) test_dataloader = torch.utils.data.DataLoader(test_dataset,
transforms.Normalize(mean, std)]) test_transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize(mean, std)]) if args.dataset == 'melanoma': # Path image_path = './data/processed' mask_path = './data/ISIC-2017_Training_Part1_GroundTruth' label_file = './data/ISIC-2017_Training_Part3_GroundTruth.csv' # Generate a DataManager mymanager = DataManager(image_path, mask_path, label_file) # Load the data X_train, y_train, X_test, y_test, X_val, y_val = load_data() y_train = mymanager.convert_labels(y_train) y_test = mymanager.convert_labels(y_test) train_data = torch.utils.data.TensorDataset(X_train, y_train) test_data = torch.utils.data.TensorDataset(X_test, y_test) # Number of labels nlabels = 3 else: train_data = dset.CIFAR100(args.data_path, train=True, transform=train_transform, download=True) test_data = dset.CIFAR100(args.data_path, train=False, transform=test_transform, download=True) nlabels = 100 train_loader = torch.utils.data.DataLoader(train_data, batch_size=args.batch_size, shuffle=True, num_workers=args.prefetch, pin_memory=True) test_loader = torch.utils.data.DataLoader(test_data, batch_size=args.test_bs, shuffle=False,