Example #1
0
def traindataloader(multiscale=False, factor_scale=[8, 5], augmentation=True, path="Dataset/train",
                    input_size=(512, 512), batch_size=8, batch_interval=10, num_workers=4, shuffle=True,
                    mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225], scale_factor=4, make_target=True):
    if multiscale:

        h_seed = input_size[0] // factor_scale[0]
        w_seed = input_size[1] // factor_scale[0]

        init = factor_scale[0] - (factor_scale[1] // 2)
        end = factor_scale[0] + (factor_scale[1] // 2)
        end = end + 1

        dataset = DetectionDataset(path=path)
        train_transform = [CenterTrainTransform([x * h_seed, x * w_seed], mean=mean, std=std, scale_factor=scale_factor,
                                                augmentation=augmentation, make_target=make_target,
                                                num_classes=dataset.num_class) for x in
                           range(init, end)]
    else:
        dataset = DetectionDataset(path=path)
        train_transform = [CenterTrainTransform(input_size, mean=mean, std=std, scale_factor=scale_factor,
                                                augmentation=augmentation, make_target=make_target,
                                                num_classes=dataset.num_class)]

    dataloader = RandomTransformDataLoader(
        train_transform, dataset, batch_size=batch_size, interval=batch_interval, last_batch='rollover',
        shuffle=shuffle, batchify_fn=Tuple(Stack(use_shared_mem=True),
                                           Pad(pad_val=-1),
                                           Stack(use_shared_mem=True),
                                           Stack(use_shared_mem=True),
                                           Stack(use_shared_mem=True),
                                           Stack(use_shared_mem=True),
                                           Stack()),
        num_workers=num_workers)

    return dataloader, dataset
Example #2
0
def validdataloader(path="Dataset/valid",
                    input_size=(512, 512),
                    batch_size=1,
                    num_workers=4,
                    shuffle=True,
                    mean=[0.485, 0.456, 0.406],
                    std=[0.229, 0.224, 0.225],
                    net=None,
                    foreground_iou_thresh=0.5,
                    background_iou_thresh=0.4,
                    make_target=False):
    transform = EfficientValidTransform(
        input_size[0],
        input_size[1],
        net=net,
        mean=mean,
        std=std,
        foreground_iou_thresh=foreground_iou_thresh,
        background_iou_thresh=background_iou_thresh,
        make_target=make_target)
    dataset = DetectionDataset(path=path, transform=transform)
    dataloader = DataLoader(
        dataset,
        batch_size=batch_size,
        shuffle=shuffle,
        batchify_fn=Tuple(Stack(use_shared_mem=True), Pad(pad_val=-1),
                          Stack(use_shared_mem=True),
                          Stack(use_shared_mem=True), Stack()),
        last_batch='rollover',  # or "keep", "discard"
        num_workers=num_workers)

    return dataloader, dataset
def testdataloader(path="Dataset/test",
                   input_size=(512, 512),
                   input_frame_number=2,
                   pin_memory=True,
                   num_workers=4,
                   mean=[0.485, 0.456, 0.406],
                   std=[0.229, 0.224, 0.225],
                   scale_factor=4):

    num_workers = 0 if pin_memory else num_workers

    transform = CenterValidTransform(input_size,
                                     input_frame_number=input_frame_number,
                                     mean=mean,
                                     std=std,
                                     scale_factor=scale_factor,
                                     make_target=False)
    dataset = DetectionDataset(path=path,
                               transform=transform,
                               sequence_number=input_frame_number)

    dataloader = DataLoader(dataset,
                            batch_size=1,
                            collate_fn=Tuple(Stack(), Pad(pad_val=-1), Stack(),
                                             Stack(), Pad(pad_val=-1)),
                            pin_memory=pin_memory,
                            num_workers=num_workers)
    return dataloader, dataset
Example #4
0
def traindataloader(multiscale=False,
                    factor_scale=[8, 6],
                    augmentation=True,
                    path="Dataset/train",
                    input_size=(512, 512),
                    batch_size=8,
                    batch_interval=10,
                    num_workers=4,
                    shuffle=True,
                    mean=[0.485, 0.456, 0.406],
                    std=[0.229, 0.224, 0.225],
                    net=None,
                    foreground_iou_thresh=0.5,
                    make_target=True):
    dataset = DetectionDataset(path=path)
    if multiscale:
        h_seed = input_size[0] // factor_scale[0]
        w_seed = input_size[1] // factor_scale[0]
        init = factor_scale[0] - (factor_scale[1] // 2)
        end = factor_scale[0] + (factor_scale[1] // 2)
        end = end + 1

        train_transform = [
            SSDTrainTransform(x * h_seed,
                              x * w_seed,
                              net=net,
                              mean=mean,
                              std=std,
                              foreground_iou_thresh=foreground_iou_thresh,
                              augmentation=augmentation,
                              make_target=make_target)
            for x in range(init, end)
        ]
    else:
        train_transform = [
            SSDTrainTransform(input_size[0],
                              input_size[1],
                              net=net,
                              mean=mean,
                              std=std,
                              foreground_iou_thresh=foreground_iou_thresh,
                              augmentation=augmentation,
                              make_target=make_target)
        ]

    dataloader = RandomTransformDataLoader(train_transform,
                                           dataset,
                                           batch_size=batch_size,
                                           interval=batch_interval,
                                           last_batch='rollover',
                                           shuffle=shuffle,
                                           batchify_fn=Tuple(
                                               Stack(use_shared_mem=True),
                                               Pad(pad_val=-1),
                                               Stack(use_shared_mem=True),
                                               Stack(use_shared_mem=True),
                                               Stack()),
                                           num_workers=num_workers)

    return dataloader, dataset
Example #5
0
def validdataloader(path="Dataset/valid", input_size=(512, 512),
                    batch_size=1, num_workers=4, shuffle=True, mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225],
                    scale_factor=4, make_target=True):
    transform = CenterValidTransform(input_size, mean=mean, std=std, scale_factor=scale_factor, make_target=make_target,
                                     num_classes=DetectionDataset(path=path).num_class)
    dataset = DetectionDataset(path=path, transform=transform)

    dataloader = DataLoader(
        dataset,
        batch_size=batch_size,
        shuffle=shuffle,
        batchify_fn=Tuple(Stack(use_shared_mem=True),
                          Pad(pad_val=-1),
                          Stack(use_shared_mem=True),
                          Stack(use_shared_mem=True),
                          Stack(use_shared_mem=True),
                          Stack(use_shared_mem=True),
                          Stack()),
        last_batch='rollover',  # or "keep", "discard"
        num_workers=num_workers)

    return dataloader, dataset
def traindataloader(augmentation=True,
                    path="Dataset/train",
                    input_size=(512, 512),
                    input_frame_number=2,
                    batch_size=8,
                    pin_memory=True,
                    num_workers=4,
                    shuffle=True,
                    mean=[0.485, 0.456, 0.406],
                    std=[0.229, 0.224, 0.225],
                    scale_factor=4,
                    make_target=True):

    num_workers = 0 if pin_memory else num_workers

    transform = CenterTrainTransform(
        input_size,
        input_frame_number=input_frame_number,
        mean=mean,
        std=std,
        scale_factor=scale_factor,
        augmentation=augmentation,
        make_target=make_target,
        num_classes=DetectionDataset(path=path).num_class)
    dataset = DetectionDataset(path=path,
                               transform=transform,
                               sequence_number=input_frame_number)

    dataloader = DataLoader(dataset,
                            batch_size=batch_size,
                            shuffle=shuffle,
                            collate_fn=Tuple(Stack(), Pad(pad_val=-1), Stack(),
                                             Stack(), Stack(), Stack(),
                                             Stack()),
                            pin_memory=pin_memory,
                            drop_last=False,
                            num_workers=num_workers)

    return dataloader, dataset
Example #7
0
def testdataloader(path="Dataset/test", input_size=(512, 512),
                   num_workers=4, mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225], scale_factor=4):
    transform = CenterValidTransform(input_size, mean=mean, std=std, scale_factor=scale_factor, make_target=False)
    dataset = DetectionDataset(path=path, transform=transform)

    dataloader = DataLoader(
        dataset,
        batch_size=1,
        batchify_fn=Tuple(Stack(use_shared_mem=True),
                          Pad(pad_val=-1),
                          Stack(),
                          Stack(use_shared_mem=True),
                          Pad(pad_val=-1)),
        num_workers=num_workers)
    return dataloader, dataset
Example #8
0
def traindataloader(multiscale=False,
                    factor_scale=[10, 9],
                    augmentation=True,
                    path="Dataset/train",
                    input_size=(512, 512),
                    batch_size=8,
                    batch_interval=10,
                    num_workers=4,
                    shuffle=True,
                    mean=[0.485, 0.456, 0.406],
                    std=[0.229, 0.224, 0.225]):
    dataset = DetectionDataset(path=path, test=False)

    if multiscale:
        init = factor_scale[0]
        end = init + factor_scale[1] + 1
        train_transform = [
            YoloTrainTransform(x * 32,
                               x * 32,
                               mean=mean,
                               std=std,
                               augmentation=augmentation)
            for x in range(init, end)
        ]
    else:
        train_transform = [
            YoloTrainTransform(input_size[0],
                               input_size[1],
                               mean=mean,
                               std=std,
                               augmentation=augmentation)
        ]

    dataloader = RandomTransformDataLoader(train_transform,
                                           dataset,
                                           batch_size=batch_size,
                                           interval=batch_interval,
                                           last_batch='rollover',
                                           shuffle=shuffle,
                                           batchify_fn=Tuple(
                                               Stack(use_shared_mem=True),
                                               Pad(pad_val=-1), Stack()),
                                           num_workers=num_workers)

    return dataloader, dataset
Example #9
0
def testdataloader(path="Dataset/test",
                   input_size=(512, 512),
                   num_workers=4,
                   mean=[0.485, 0.456, 0.406],
                   std=[0.229, 0.224, 0.225]):
    transform = YoloValidTransform(input_size[0],
                                   input_size[1],
                                   mean=mean,
                                   std=std)
    dataset = DetectionDataset(path=path, transform=transform, test=True)
    dataloader = DataLoader(dataset,
                            batch_size=1,
                            batchify_fn=Tuple(Stack(use_shared_mem=True),
                                              Pad(pad_val=-1), Stack(),
                                              Stack(use_shared_mem=True),
                                              Pad(pad_val=-1)),
                            num_workers=num_workers)
    return dataloader, dataset
Example #10
0
def validdataloader(path="Dataset/valid",
                    input_size=(512, 512),
                    batch_size=8,
                    num_workers=4,
                    shuffle=True,
                    mean=[0.485, 0.456, 0.406],
                    std=[0.229, 0.224, 0.225],
                    net=None,
                    ignore_threshold=0.5,
                    dynamic=True,
                    from_sigmoid=False,
                    make_target=True):

    transform = YoloValidTransform(input_size[0],
                                   input_size[1],
                                   net=net,
                                   mean=mean,
                                   std=std,
                                   ignore_threshold=ignore_threshold,
                                   dynamic=dynamic,
                                   from_sigmoid=from_sigmoid,
                                   make_target=make_target)
    dataset = DetectionDataset(path=path, transform=transform)

    dataloader = DataLoader(
        dataset,
        batch_size=batch_size,
        batchify_fn=Tuple(Stack(use_shared_mem=True), Pad(pad_val=-1),
                          Stack(use_shared_mem=True),
                          Stack(use_shared_mem=True),
                          Stack(use_shared_mem=True),
                          Stack(use_shared_mem=True),
                          Stack(use_shared_mem=True), Stack()),
        last_batch='rollover',  # or "keep", "discard"
        num_workers=num_workers,
        shuffle=shuffle,
    )

    return dataloader, dataset
Example #11
0
def validdataloader(path="Dataset/valid",
                    input_size=(512, 512),
                    batch_size=2,
                    num_workers=2,
                    shuffle=True,
                    mean=[0.485, 0.456, 0.406],
                    std=[0.229, 0.224, 0.225]):

    transform = YoloValidTransform(input_size[0],
                                   input_size[1],
                                   mean=mean,
                                   std=std)
    dataset = DetectionDataset(path=path, transform=transform, test=False)

    dataloader = DataLoader(
        dataset,
        batch_size=batch_size,
        batchify_fn=Tuple(Stack(use_shared_mem=True), Pad(pad_val=-1),
                          Stack()),
        last_batch='rollover',  # or "keep", "discard"
        num_workers=num_workers,
        shuffle=shuffle)

    return dataloader, dataset
Example #12
0
                img.context)

            return img, bbox[0], heatmap[0], offset_target[0], wh_target[
                0], mask_target[0], name
        else:
            return img, bbox, name


# test
if __name__ == "__main__":
    import random
    from core.utils.dataprocessing.dataset import DetectionDataset

    input_size = (960, 1280)
    scale_factor = 4
    root = os.path.dirname(
        os.path.dirname(
            os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
    transform = CenterTrainTransform(input_size,
                                     mean=(0.485, 0.456, 0.406),
                                     std=(0.229, 0.224, 0.225),
                                     scale_factor=scale_factor)
    dataset = DetectionDataset(path=os.path.join(root, 'Dataset', 'train'),
                               transform=transform)
    length = len(dataset)
    image, label, file_name, _, _ = dataset[random.randint(0, length - 1)]

    print('images length:', length)
    print('image shape:', image.shape)
    print('label shape:', label.shape)
            return img, bbox[0], heatmap[0], offset_target[0], wh_target[0], mask_target[0], name
        else:
            bbox = torch.as_tensor(bbox)
            return img, bbox, name


# test
if __name__ == "__main__":
    import random
    from core.utils.dataprocessing.dataset import DetectionDataset

    input_size = (960, 1280)
    scale_factor = 4
    root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
    transform = CenterTrainTransform(input_size, input_frame_number=2, mean=(0.485, 0.456, 0.406),
                                     std=(0.229, 0.224, 0.225),
                                     scale_factor=scale_factor)
    dataset = DetectionDataset(path=os.path.join(root, 'valid'), transform=transform, sequence_number=2)
    length = len(dataset)
    image, label, file_name, _, _ = dataset[random.randint(0, length - 1)]

    print('images length:', length)
    print('image shape:', image.shape)
    print('label shape:', label.shape)
    '''
    images length: 1500
    image shape: torch.Size([6, 960, 1280])
    label shape: torch.Size([1, 5])
    '''