Beispiel #1
0
    def __init__(self, root_dir, split="train", hd=False):
        self.root_dir = root_dir
        self.split = split
        self.imgs_dir = os.path.join(root_dir, "RGB_Images")
        self.masks_dir = os.path.join(root_dir, "Masks")
        base_size = 1080 if hd else 513
        self.transform_train = transforms.Compose([
            tr.RandomHorizontalFlip(),
            tr.RandomScaleCrop(base_size=base_size, crop_size=513, fill=0),
            tr.RandomRotate(15),
            tr.RandomGaussianBlur(),
            tr.Normalize(mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)),
            tr.ToTensor()
        ])

        self.transform_validation = transforms.Compose([
            tr.FixScaleCrop(crop_size=base_size),
            tr.Normalize(mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)),
            tr.ToTensor()
        ])

        with open(os.path.join(root_dir, "Sets", f"{split}.txt"), 'r') as f:
            basenames, imgs, masks = [], [], []
            for basename in f:
                img, mask = self.get_filenames(basename)
                basenames.append(basename)
                imgs.append(img)
                masks.append(mask)
            self.files = pd.DataFrame(
                list(zip(basenames, imgs, masks)),
                columns=['basename', 'img_filename', 'label_filename'])
Beispiel #2
0
    def transform_tr(self, sample):
        composed_transforms = transforms.Compose([
            tr.RandomHorizontalFlip(),
            tr.RandomRotateB(),
            tr.RandomScaleCrop(base_size=self.base_size, crop_size=self.crop_size, fill=255),
            tr.RandomGaussianBlur(),
            tr.Normalize(mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225)),
            tr.ToTensor()])

        return composed_transforms(sample)
Beispiel #3
0
    def __init__(self,
                 root_dir,
                 split="train",
                 train_ratio=0.9,
                 transform=None,
                 seed=1234):
        self.root_dir = root_dir
        self.depths_dir = os.path.join(root_dir, "Depths")
        self.images_dir = os.path.join(root_dir, "Images")
        self.masks_dir = os.path.join(root_dir, "Masks")
        self.overlays_dir = os.path.join(root_dir, "Overlays")
        self.plys_dir = os.path.join(root_dir, "PLYs")
        self.xmls_dir = os.path.join(root_dir, "XMLs")
        self.split = split
        self.weights = [1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1]

        self.transform_train = transforms.Compose([
            tr.RandomHorizontalFlip(),
            tr.RandomScaleCrop(base_size=(287, 352),
                               crop_size=(287, 352),
                               fill=0),
            tr.RandomRotate(15),
            tr.RandomGaussianBlur(),
            tr.Normalize(mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)),
            tr.ToTensor()
        ])

        self.transform_validation = transforms.Compose([
            tr.Normalize(mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)),
            tr.ToTensor()
        ])

        xml_files = []
        for dir_path, dir_names, file_names in os.walk(self.xmls_dir):
            xml_files += [os.path.join(dir_path, file) for file in file_names]
        sequences = [file.split("/")[-2] for file in xml_files]
        frame_ids = [file.split("/")[-1].split(".")[0] for file in xml_files]
        self.files = pd.DataFrame(list(zip(sequences, frame_ids)),
                                  columns=['sequence', 'frame_id'])

        # self.files = self.files[:100]
        np.random.seed(seed)
        mask = np.random.rand(len(self.files)) < train_ratio
        if split == "train":
            self.files = self.files[mask]
        else:
            self.files = self.files[~mask]
        self.files = self.files.reset_index()
Beispiel #4
0
    def __init__(self,
                 root_dir,
                 split="train",
                 sampeled_images_path="/home/deepsight/DeepSightData",
                 train_range=2000,
                 eval_distance=5):
        self.root_dir = root_dir
        self.split = split
        self.imgs_dir = os.path.join(root_dir, "RGB_Images")
        self.masks_dir = os.path.join(root_dir, "Masks")
        self.unlabeled_dir = os.path.join(root_dir, "unlabeled_rgb_images")
        self.train_range = train_range
        self.eval_distance = eval_distance

        self.datasets = ['Feb_11_CDE', 'Feb_8_CDE', 'March_1_CDE']
        self.mapping = video_scene_to_name(self.datasets, sampeled_images_path)

        self.transform_train = transforms.Compose([
            tr.RandomHorizontalFlip(temporal=True),
            tr.RandomScaleCrop(base_size=513,
                               crop_size=513,
                               fill=0,
                               temporal=True),
            tr.RandomRotate(15, temporal=True),
            tr.RandomGaussianBlur(temporal=True),
            tr.Normalize(mean=(0.5, 0.5, 0.5),
                         std=(0.5, 0.5, 0.5),
                         temporal=True),
            tr.ToTensor(temporal=True)
        ])

        self.transform_validation = transforms.Compose([
            tr.FixScaleCrop(crop_size=513, temporal=True),
            tr.Normalize(mean=(0.5, 0.5, 0.5),
                         std=(0.5, 0.5, 0.5),
                         temporal=True),
            tr.ToTensor(temporal=True)
        ])

        with open(os.path.join(root_dir, "Sets", f"{split}.txt"), 'r') as f:
            scenes, ids = [], []
            for basename in f:
                scene, id = basename.strip("\n").split("scene")
                scenes.append(scene)
                ids.append(id)
            self.files = pd.DataFrame(list(zip(scenes, ids)),
                                      columns=['scene', 'id'])