예제 #1
0
def load_EPFL_dataset(args):
	""" Load EPFLDataset.
	Rerturns:
		validation dataset
		training dataset

	"""
	train_transform = TrainAugmentation(config.image_size, config.image_mean, config.image_std)
	val_transform = TestTransform(config.image_size, config.image_mean, config.image_std)#gebruiken voor validatie dataset
	target_transform = net.MatchPrior(config.priors, config.center_variance, config.size_variance, 0.5)

	train_dataset = EPFLDataset(args.datasets, args.cache_path, transform=train_transform, target_transform=target_transform, batch_size=args.batch_size)
	val_dataset = EPFLDataset(args.datasets, args.cache_path, transform=val_transform, target_transform=target_transform, batch_size=args.batch_size, is_val=True)

	return train_dataset, val_dataset
예제 #2
0
		model_dict = pred_dec.state_dict()
		# 1. filter out unnecessary keys
		pretrained_dict = {k: v for k, v in pretrained_net_dict.items() if k in model_dict}
		# 2. overwrite entries in the existing state dict
		model_dict.update(pretrained_dict)
		pred_dec.load_state_dict(model_dict)



if __name__ == '__main__':
	timer = Timer()

	logging.info(args)
	config = mobilenetv1_ssd_config	#config file for priors etc.
	train_transform = TrainAugmentation(config.image_size, config.image_mean, config.image_std)
	target_transform = MatchPrior(config.priors, config.center_variance,
								  config.size_variance, 0.5)

	test_transform = TestTransform(config.image_size, config.image_mean, config.image_std)

	logging.info("Prepare training datasets.")
	train_dataset = VIDDataset(args.datasets, transform=train_transform,
								 target_transform=target_transform)
	label_file = os.path.join("models/", "vid-model-labels.txt")
	store_labels(label_file, train_dataset._classes_names)
	num_classes = len(train_dataset._classes_names)
	logging.info(f"Stored labels into file {label_file}.")
	logging.info("Train dataset size: {}".format(len(train_dataset)))
	train_loader = DataLoader(train_dataset, args.batch_size,
							  num_workers=args.num_workers,