def mds_loader(params, is_key_frame=True): # Datasets # TODO: put the path to your train, test, validation txt files if is_key_frame: label_file_train = 'dataloader_files/keyframe_data_train.txt' label_file_val = 'dataloader_files/keyframe_data_val.txt' else: label_file_train = 'dataloader_files/videoframe_data_train.txt' label_file_val = 'dataloader_files/videoframe_data_val.txt' label_file_test = 'dataloader_files/videoframe_data_test.txt' mean = [0.5, 0.5, 0.5] std = [0.5, 0.5, 0.5] train_dataset = Mds189(label_file_train,loader=default_loader,transform=transforms.Compose([ transforms.ColorJitter(hue=.05, saturation=.05), transforms.RandomHorizontalFlip(p=0.33), transforms.RandomRotation(degrees=15), transforms.ToTensor(), transforms.Normalize(mean, std) ])) train_loader = data.DataLoader(train_dataset, **params) val_dataset = Mds189(label_file_val,loader=default_loader,transform=transforms.Compose([ transforms.ToTensor(), transforms.Normalize(mean, std) ])) val_loader = data.DataLoader(val_dataset, **params) if is_key_frame: return train_loader, val_loader elif not is_key_frame: test_dataset = Mds189(label_file_test,loader=default_loader,transform=transforms.Compose([ transforms.ToTensor(), transforms.Normalize(mean, std) ])) test_loader = data.DataLoader(test_dataset, **params) return train_loader, val_loader, test_loader
label_file_train = 'dataloader_files/videoframe_data_train.txt' label_file_val = 'dataloader_files/videoframe_data_val.txt' label_file_test = 'dataloader_files/videoframe_data_test.txt' ####################### NORMALIZE + DATA GEN ####################### if is_key_frame: mean = np.array([134.010302198, 118.599587912, 102.038804945]) / 255 std = np.array([23.5033438916, 23.8827343458, 24.5498666589]) / 255 else: mean = np.array([133.714058398, 118.396875912, 102.262895484]) / 255 std = np.array([23.2021839891, 23.7064439547, 24.3690056102]) / 255 # Generators train_dataset = Mds189(label_file_train, loader=default_loader, transform=transforms.Compose([ transforms.ToTensor(), transforms.Normalize(mean, std) ])) train_loader = data.DataLoader(train_dataset, **params) val_dataset = Mds189(label_file_val, loader=default_loader, transform=transforms.Compose([ transforms.ToTensor(), transforms.Normalize(mean, std) ])) val_loader = data.DataLoader(val_dataset, **params) ####################### TRAIN/VAL ####################### start = time.time() print('Beginning training..')
] std = [23.2021839891 / 255.0, 23.7064439547 / 255.0, 24.3690056102 / 255.0] # TODO: you should normalize based on the average image in the training set. This shows # an example of doing normalization # TODO: if you want to pad or resize your images, you can put the parameters for that below. # Generators # NOTE: if you don't want to pad or resize your images, you should delete the Pad and Resize # transforms from all three _dataset definitions. train_dataset = Mds189( label_file_train, loader=default_loader, transform=transforms.Compose([ # transforms.Pad(requires_parameters), # TODO: if you want to pad your images # transforms.Resize(requires_parameters), # TODO: if you want to resize your images transforms.RandomAffine((-30, 30), shear=(-10, 10), fillcolor=0), transforms.ToTensor(), transforms.Normalize(mean, std) ])) train_loader = data.DataLoader(train_dataset, **params) val_dataset = Mds189( label_file_val, loader=default_loader, transform=transforms.Compose([ # transforms.Pad(), # transforms.Resize(), transforms.ToTensor(), transforms.Normalize(mean, std) ]))
label_file_train = '../data/hw6_mds189/videoframe_data_train.txt' label_file_val = '../data/hw6_mds189/videoframe_data_val.txt' label_file_test = '../data/hw6_mds189/videoframe_data_test.txt' # TODO: you should normalize based on the average image in the training set. This shows # an example of doing normalization mean = [0.5, 0.5, 0.5] std = [0.5, 0.5, 0.5] # TODO: if you want to pad or resize your images, you can put the parameters for that below. # Generators # NOTE: if you don't want to pad or resize your images, you should delete the Pad and Resize # transforms from all three _dataset definitions. train_dataset = Mds189(label_file_train,loader=default_loader,transform=transforms.Compose([ transforms.Pad(requires_parameters), # TODO: if you want to pad your images transforms.Resize(requires_parameters), # TODO: if you want to resize your images transforms.ToTensor(), transforms.Normalize(mean, std) ])) train_loader = data.DataLoader(train_dataset, **params) val_dataset = Mds189(label_file_val,loader=default_loader,transform=transforms.Compose([ transforms.Pad(), transforms.Resize(), transforms.ToTensor(), transforms.Normalize(mean, std) ])) val_loader = data.DataLoader(val_dataset, **params) if not is_key_frame: test_dataset = Mds189(label_file_test,loader=default_loader,transform=transforms.Compose([ transforms.Pad(),