shuffle = False # Creating data indices for training and validation splits: dataset_size = len(data) indices = list(range(dataset_size)) validation_split = 0.2 split = int(np.floor(validation_split * dataset_size)) print(split) if shuffle: np.random.seed(random_seed) np.random.shuffle(indices) train_indices, val_indices = indices[split:], indices[:split] # Creating PT data samplers and loaders: train_sampler = SubsetRandomSampler(train_indices) valid_sampler = SubsetRandomSampler(val_indices) trainloader = dataset.CSILoader(data, opt,sampler=train_sampler) testloader = dataset.CSILoader(data, opt,sampler=valid_sampler) print('==> Building model..') # net = VGG('VGG19') net = vgg.VGG('VGG11') # net = ResNet18() # net = GoogLeNet() # net = DenseNet121() # net = ResNeXt29_2x64d() # net = MobileNet() # net = DPN92() # net = ShuffleNetG2() # net = SENet18()
shuffle = True # Creating data indices for training and validation splits: data_train = dataset.CSISet(data_x_train, data_y_train) data_test = dataset.CSISet(data_x_test, data_y_test) # dataset_size = len(data) # indices = list(range(dataset_size)) # split = 0.8 # split = int(np.floor(split * dataset_size)) # print(split) # if shuffle: # np.random.seed(opt.seed) # np.random.shuffle(indices) #train_indices, val_indices = indices[split:], indices[:split] # train_indices, val_indices = indices[:split], indices[split:] trainloader = dataset.CSILoader(data_train, opt, shuffle=True) testloader = dataset.CSILoader(data_test, opt, shuffle=True) print('==> Building model..') net = vgg.VGG('VGG11', linear_in=2048) # net = ResNet18() #net = LeNet.LeNet(in_channel=3, linear_in=9216) # net = DenseNet121() # net = ResNeXt29_2x64d() # net = MobileNet() # net = DPN92() # net = ShuffleNetG2() # net = SENet18() # result_folder = './results/' # if not os.path.exists(result_folder):