def config_datasets(data): train_path = data["train_path"] valid_path = data["valid_path"] minibatch_size = int(data["minibatch_size"]) rgb_noise = float(data["data_augmentation"]["rgb_noise"]) depth_noise = float(data["data_augmentation"]["depth_noise"]) occluder_path = data["data_augmentation"]["occluder_path"] background_path = data["data_augmentation"]["background_path"] blur_noise = int(data["data_augmentation"]["blur_noise"]) h_noise = float(data["data_augmentation"]["h_noise"]) s_noise = float(data["data_augmentation"]["s_noise"]) v_noise = float(data["data_augmentation"]["v_noise"]) channel_hide = data["data_augmentation"]["channel_hide"] == "True" data_augmentation = DataAugmentation() data_augmentation.set_rgb_noise(rgb_noise) data_augmentation.set_depth_noise(depth_noise) if occluder_path != "": data_augmentation.set_occluder(occluder_path) if background_path != "": data_augmentation.set_background(background_path) if channel_hide: data_augmentation.set_channel_hide(0.25) data_augmentation.set_blur(blur_noise) data_augmentation.set_hsv_noise(h_noise, s_noise, v_noise) message_logger.info("Setup Train : {}".format(train_path)) train_dataset = Dataset(train_path, minibatch_size=minibatch_size) if not train_dataset.load(): message_logger.error("Train dataset empty") sys.exit(-1) train_dataset.set_data_augmentation(data_augmentation) train_dataset.compute_mean_std() message_logger.info("Computed mean : {}\nComputed Std : {}".format( train_dataset.mean, train_dataset.std)) message_logger.info("Setup Valid : {}".format(valid_path)) valid_dataset = Dataset(valid_path, minibatch_size=minibatch_size, max_samples=20000) if not valid_dataset.load(): message_logger.error("Valid dataset empty") sys.exit(-1) valid_dataset.set_data_augmentation(data_augmentation) valid_dataset.mean = train_dataset.mean valid_dataset.std = train_dataset.std return train_dataset, valid_dataset
def config_datasets(data): train_path = data["train_path"] valid_path = data["valid_path"] minibatch_size = int(data["minibatch_size"]) rgb_noise = float(data["data_augmentation"]["rgb_noise"]) depth_noise = float(data["data_augmentation"]["depth_noise"]) occluder_path = data["data_augmentation"]["occluder_path"] background_path = data["data_augmentation"]["background_path"] blur_noise = int(data["data_augmentation"]["blur_noise"]) hue_noise = float(data["data_augmentation"]["hue_noise"]) data_augmentation = DataAugmentation() data_augmentation.set_rgb_noise(rgb_noise) data_augmentation.set_depth_noise(depth_noise) if occluder_path != "": data_augmentation.set_occluder(occluder_path) if background_path != "": data_augmentation.set_background(background_path) data_augmentation.set_blur(blur_noise) data_augmentation.set_hue_noise(hue_noise) train_dataset = Dataset(train_path, minibatch_size=minibatch_size) if not train_dataset.load(): message_logger.error("Train dataset empty") sys.exit(-1) train_dataset.set_data_augmentation(data_augmentation) train_dataset.compute_mean_std() message_logger.info("Computed mean : {}\nComputed Std : {}".format(train_dataset.mean, train_dataset.std)) valid_dataset = Dataset(valid_path, minibatch_size=minibatch_size) if not valid_dataset.load(): message_logger.error("Valid dataset empty") sys.exit(-1) valid_dataset.set_data_augmentation(data_augmentation) valid_dataset.mean = train_dataset.mean valid_dataset.std = train_dataset.std return train_dataset, valid_dataset