def get_dataloader(model, train_dataset, validation_dataset, height, width,
                   batch_size, num_workers):
    """Data pre-processing. Returns mini batches of dataset with transformations

    Args:
        model (SSD model): Object detection model
        train_dataset (Dataset): Training images and labels
        validation_dataset (Dataset): Validation images and labels
        height (int): Height of the training image
        width (int): Width of training image
        batch_size (int): Number of images in a mini batch
        num_workers (int): Number of multiprocessing workers

    Returns:
        Dataloader : Mini batches of data
    """

    with autograd.train_mode():
        _, _, anchors = model(mx.nd.zeros((1, 3, height, width)))
    batchify_fn = Tuple(Stack(), Stack(), Stack())

    train_loader = gluon.data.DataLoader(train_dataset.transform(
        SSDDefaultTrainTransform(height, width, anchors)),
                                         batch_size,
                                         True,
                                         batchify_fn=batchify_fn,
                                         last_batch='rollover',
                                         num_workers=num_workers)

    return train_loader
def get_dataloader(net, train_dataset, val_dataset, data_shape, batch_size,
                   num_workers):
    """Get dataloader."""
    width, height = data_shape, data_shape
    # use fake data to generate fixed anchors for target generation
    with autograd.train_mode():
        anchors, _, _, _, _, _, _ = net(mx.nd.zeros((1, 3, height, width)))

    # stack image, anchor_cls_targets, anchor_box_targets
    # pad real_targets(xmin, ymin, xmax, ymax, label). will return length
    batchify_fn = Tuple(Stack(), Stack(), Stack(),
                        Pad(axis=0, pad_val=-1, ret_length=True))
    train_loader = gluon.data.DataLoader(train_dataset.transform(
        RefineDetDefaultTrainTransform(width, height, anchors)),
                                         batch_size,
                                         True,
                                         batchify_fn=batchify_fn,
                                         last_batch='rollover',
                                         num_workers=num_workers)
    # return: img (B, H, W, C); anchor_cls_targets (B, N); anchor_box_targets(B, N, 4);
    # targets(B, P, 5), target_len (B, ). m_i is the num of objects in each img, P is the length after pad.

    val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
    val_loader = gluon.data.DataLoader(val_dataset.transform(
        SSDDefaultValTransform(width, height)),
                                       batch_size,
                                       False,
                                       batchify_fn=val_batchify_fn,
                                       last_batch='keep',
                                       num_workers=num_workers)
    return train_loader, val_loader
Esempio n. 3
0
def get_dataloader(net, data_shape, batch_size, num_workers, ctx):
    """Get dataloader."""
    import os

    os.system('pip3 install gluoncv --pre')

    from gluoncv import data as gdata
    from gluoncv.data.batchify import Tuple, Stack, Pad
    from gluoncv.data.transforms.presets.ssd import SSDDefaultTrainTransform

    width, height = data_shape, data_shape
    # use fake data to generate fixed anchors for target generation
    with autograd.train_mode():
        _, _, anchors = net(mx.nd.zeros((1, 3, height, width), ctx))
    anchors = anchors.as_in_context(mx.cpu())
    batchify_fn = Tuple(Stack(), Stack(),
                        Stack())  # stack image, cls_targets, box_targets
    train_dataset = gdata.RecordFileDetection(
        os.path.join(os.environ['SM_CHANNEL_TRAIN'], 'train.rec'))
    train_loader = gluon.data.DataLoader(train_dataset.transform(
        SSDDefaultTrainTransform(width, height, anchors)),
                                         batch_size,
                                         True,
                                         batchify_fn=batchify_fn,
                                         last_batch='rollover',
                                         num_workers=num_workers)
    return train_loader
def ssd_train_dataloader(net,
                         train_dataset,
                         data_shape=512,
                         batch_size=10,
                         num_workers=0):
    '''
    returns the train loader from gluoncv
    '''
    from gluoncv.data.batchify import Tuple, Stack, Pad
    from gluoncv.data.transforms.presets.ssd import SSDDefaultTrainTransform

    width, height = data_shape, data_shape
    # use fake data to generate fixed anchors for target generation
    with autograd.train_mode():
        _, _, anchors = net(mx.nd.zeros((1, 3, height, width)))
    batchify_fn = Tuple(Stack(), Stack(),
                        Stack())  # stack image, cls_targets, box_targets
    new_SSDDefaultTrainTransform = SSDDefaultTrainTransform(
        width, height, anchors)
    new_SSDDefaultTrainTransform.__call__ = new_trainloader_call
    train_loader = gluon.data.DataLoader(
        train_dataset.transform(new_SSDDefaultTrainTransform),
        batch_size,
        True,
        batchify_fn=batchify_fn,
        last_batch="rollover",
        num_workers=num_workers,
    )
    return train_loader
Esempio n. 5
0
def get_coco_data_loaders(net, train_data, val_data, in_size, bs, n_workers,
                          ctx):
    with autograd.train_mode():
        fake_in_size = 1
        channels = 3
        _, _, anchors = net(
            nd.zeros((fake_in_size, channels, in_size, in_size), ctx))
    anchors = anchors.as_in_context(mx.cpu())

    img_train_s = Stack()
    class_targets = Stack()
    box_targets = Stack()
    train_batchify_fn = Tuple(img_train_s, class_targets, box_targets)
    train_data_transformed = train_data.transform(
        SSDDefaultTrainTransform(in_size, in_size, anchors))
    train_data_loader = gluon.data.DataLoader(train_data_transformed,
                                              batch_size=bs,
                                              shuffle=True,
                                              batchify_fn=train_batchify_fn,
                                              last_batch='rollover',
                                              num_workers=n_workers)

    img_val_s = Stack()
    padding = Pad(pad_val=-1)
    val_batchify_fn = Tuple(img_val_s, padding)
    val_data_transformed = val_data.transform(
        SSDDefaultValTransform(in_size, in_size))
    val_data_loader = gluon.data.DataLoader(val_data_transformed,
                                            batch_size=bs,
                                            shuffle=False,
                                            batchify_fn=val_batchify_fn,
                                            last_batch='keep',
                                            num_workers=n_workers)

    return train_data_loader, val_data_loader
Esempio n. 6
0
def get_dataloader(net, train_dataset, val_dataset, data_shape, batch_size,
                   num_workers):
    """Get dataloader."""
    width, height = data_shape, data_shape
    # use fake data to generate fixed anchors for target generation
    with autograd.train_mode():
        _, _, anchors = net(mx.nd.zeros((1, 3, height, width)))
    batchify_fn = Tuple(Stack(), Stack(),
                        Stack())  # stack image, cls_targets, box_targets
    train_loader = gluon.data.DataLoader(train_dataset.transform(
        SSDDefaultTrainTransform(width, height, anchors)),
                                         batch_size,
                                         True,
                                         batchify_fn=batchify_fn,
                                         last_batch='rollover',
                                         num_workers=num_workers)
    val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
    val_loader = gluon.data.DataLoader(val_dataset.transform(
        SSDDefaultValTransform(width, height)),
                                       batch_size,
                                       False,
                                       batchify_fn=val_batchify_fn,
                                       last_batch='keep',
                                       num_workers=num_workers)
    return train_loader, val_loader
Esempio n. 7
0
def tri_data_loader(batch_size, transform_train, transform_valid, num_workers=0,
                train_dataset=None, valid_dataset=None, valid_train_dataset=None, 
                batchify_fns=None, train_shuffle=True):
    """
            batchify_fns: dict(), like in ssd
    """
    if batchify_fns is None:
       batchify_fns = {'train': Tuple(Stack(), Stack(), Stack()),
                       'valid': Tuple(Stack(), Pad(pad_val=-1)),
                       'valid_train': Tuple(Stack(), Pad(pad_val=-1))}
    # 1. valid_data
    valid_data = DataLoader(valid_dataset.transform(transform_valid), batch_size, 
                        shuffle=False, batchify_fn=batchify_fns['valid'], last_batch='keep',
                        num_workers=num_workers)
    
    # 2. train_data
    train_data = DataLoader(train_dataset.transform(transform_train), batch_size,
                             shuffle=train_shuffle, batchify_fn=batchify_fns['train'], last_batch='rollover',
                             num_workers=num_workers)
    
    # 3. valid_train_data
    if valid_train_dataset is None:
        return train_data, valid_data, train_dataset.classes
    else:
        valid_train_data = DataLoader(valid_train_dataset.transform(transform_valid), batch_size, 
                            shuffle=False, batchify_fn=batchify_fns['valid_train'], last_batch='keep',
                            num_workers=num_workers)
        return train_data, valid_data, train_dataset.classes, valid_train_data
Esempio n. 8
0
def get_dataloader(net, data_shape, batch_size, num_workers, ctx):
    """Get dataloader."""

    width, height = data_shape, data_shape
    # use fake data to generate fixed anchors for target generation
    with autograd.train_mode():
        _, _, anchors = net(mx.nd.zeros((1, 3, height, width), ctx))
    anchors = anchors.as_in_context(mx.cpu())
    batchify_fn = Tuple(Stack(), Stack(),
                        Stack())  # stack image, cls_targets, box_targets

    # can I point that to a bundle of png files instead?
    train_dataset = gdata.RecordFileDetection(
        os.path.join(os.environ['SM_CHANNEL_TRAIN'], 'train.rec'))

    # this is the folder with all the training images
    train_folder = os.environ['SM_CHANNEL_TRAIN']

    train_loader = gluon.data.DataLoader(train_dataset.transform(
        SSDDefaultTrainTransform(width, height, anchors)),
                                         batch_size,
                                         True,
                                         batchify_fn=batchify_fn,
                                         last_batch='rollover',
                                         num_workers=num_workers)
    return train_loader
Esempio n. 9
0
def get_dataloader(net, train_dataset, valid_dataset, data_shape, batch_size,
                   num_workers):
    from gluoncv.data.batchify import Tuple, Stack, Pad
    from gluoncv.data.transforms.presets.ssd import SSDDefaultTrainTransform
    width, height = data_shape, data_shape
    # use fake data to generate fixed anchors for target generation
    with autograd.train_mode():
        _, _, anchors = net(mx.nd.zeros((1, 3, height, width)))
    batchify_fn = Tuple(Stack(), Stack(),
                        Stack())  # stack image, cls_targets, box_targets
    train_loader = gluon.data.DataLoader(train_dataset.transform(
        SSDDefaultTrainTransform(width, height, anchors)),
                                         batch_size,
                                         True,
                                         batchify_fn=batchify_fn,
                                         last_batch='rollover',
                                         num_workers=num_workers)

    val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))

    val_loader = gluon.data.DataLoader(valid_dataset.transform(
        SSDDefaultValTransform(width, height)),
                                       batch_size,
                                       False,
                                       batchify_fn=val_batchify_fn,
                                       last_batch='keep',
                                       num_workers=num_workers)

    eval_metric = VOC07MApMetric(iou_thresh=0.5, class_names=classes)
    return train_loader, val_loader, eval_metric
Esempio n. 10
0
def get_dataloader(model, train_dataset, validation_dataset, height, width,
                   batch_size, num_workers):
    """
    Get dataloader.
    """

    import gluoncv as gcv
    from gluoncv.data.batchify import Tuple, Stack, Pad
    from gluoncv.data.transforms.presets.ssd import SSDDefaultTrainTransform

    #In training mode, SSD returns three intermediate values
    #cls_preds are the class predictions prior to softmax
    #box_preds are bounding box offsets with one-to-one correspondence to anchors
    with autograd.train_mode():
        _, _, anchors = model(mx.nd.zeros((1, 3, height, width)))
    batchify_fn = Tuple(Stack(), Stack(), Stack())

    # SSDDefaultTrainTransform: data augmentation and prepprocessing
    # random color jittering, random expansion with prob 0.5, random cropping
    # resize with random interpolation, random horizontal flip,
    # normalize (substract mean and divide by std)
    train_loader = gluon.data.DataLoader(train_dataset.transform(
        SSDDefaultTrainTransform(height, width, anchors)),
                                         batch_size,
                                         True,
                                         batchify_fn=batchify_fn,
                                         last_batch='rollover',
                                         num_workers=num_workers)

    return train_loader
Esempio n. 11
0
def get_dataloader(net, train_dataset, val_dataset, data_shape, batch_size,
                   num_workers):
    """Get dataloader."""
    width, height = data_shape, data_shape

    with autograd.train_mode():
        _, _, anchors = net(mx.nd.zeros((1, 3, 512, 512)))
    batchify_fn_train = Tuple(Stack(), Stack(), Stack())
    train_loader = gluon.data.DataLoader(train_dataset.transform(
        TrainTransform(width, height, anchors)),
                                         batch_size,
                                         shuffle=True,
                                         batchify_fn=batchify_fn_train,
                                         last_batch='rollover',
                                         num_workers=num_workers)

    batchify_fn = Tuple(Stack(), Stack())
    val_loader = gluon.data.DataLoader(val_dataset.transform(
        SSDDefaultValTransform(width, height)),
                                       batch_size,
                                       shuffle=False,
                                       batchify_fn=batchify_fn,
                                       last_batch='keep',
                                       num_workers=num_workers)
    return train_loader, val_loader
Esempio n. 12
0
def get_dataloader(net, train_dataset, val_dataset, data_shape, batch_size, num_workers, args):
    """Get dataloader."""
    # when it is not in final_fit stage and val_dataset is not provided, we randomly
    # sample (1 - args.split_ratio) data as our val_dataset
    if (not args.final_fit) and (not val_dataset):
        train_dataset, val_dataset = _train_val_split(train_dataset, args.split_ratio)

    width, height = data_shape, data_shape
    batchify_fn = Tuple(*([Stack() for _ in range(6)] + [Pad(axis=0, pad_val=-1) for _ in range(
        1)]))  # stack image, all targets generated
    if args.no_random_shape:
        train_loader = gluon.data.DataLoader(
            train_dataset.transform(
                YOLO3DefaultTrainTransform(width, height, net, mixup=args.mixup)),
            batch_size, True, batchify_fn=batchify_fn, last_batch='rollover',
            num_workers=num_workers)
    else:
        transform_fns = [YOLO3DefaultTrainTransform(x * 32, x * 32, net, mixup=args.mixup) for x in
                         range(10, 20)]
        train_loader = RandomTransformDataLoader(
            transform_fns, train_dataset, batch_size=batch_size, interval=10, last_batch='rollover',
            shuffle=True, batchify_fn=batchify_fn, num_workers=num_workers)

    val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
    val_loader = None
    if val_dataset:
        val_loader = gluon.data.DataLoader(
            val_dataset.transform(YOLO3DefaultValTransform(width, height)),
            batch_size, False, batchify_fn=val_batchify_fn, last_batch='keep',
            num_workers=num_workers)
    return train_loader, val_loader
Esempio n. 13
0
def get_dataloader(net, train_dataset, validation_dataset, image_h, image_w,
                   batch_size, num_workers):
    """
    Get dataloader.
    """

    batchify_fn = Tuple(
        *([Stack() for _ in range(6)] +
          [Pad(axis=0, pad_val=-1)
           for _ in range(1)]))  # stack image, all targets generated

    train_loader = gluon.data.DataLoader(train_dataset.transform(
        YOLO3DefaultTrainTransform(image_w, image_h, net)),
                                         batch_size,
                                         True,
                                         batchify_fn=batchify_fn,
                                         last_batch='rollover',
                                         num_workers=num_workers,
                                         prefetch=num_workers +
                                         num_workers // 2)

    val_transform = YOLO3DefaultValTransform(image_w, image_h)
    batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
    val_loader = gluon.data.DataLoader(
        validation_dataset.transform(val_transform),
        batch_size,
        False,
        batchify_fn=batchify_fn,
        last_batch='keep',
        num_workers=num_workers,
        prefetch=num_workers + num_workers // 2)

    return train_loader, val_loader
Esempio n. 14
0
def get_dataloader(net, train_dataset, val_dataset, data_shape, batch_size,
                   num_workers, ctx):
    """Get dataloader."""
    width, height = data_shape, data_shape
    num_class = len(train_dataset.classes)
    batchify_fn = Tuple([Stack() for _ in range(6)
                         ])  # stack image, cls_targets, box_targets
    train_loader = gluon.data.DataLoader(train_dataset.transform(
        CenterNetDefaultTrainTransform(width,
                                       height,
                                       num_class=num_class,
                                       scale_factor=net.scale)),
                                         batch_size,
                                         True,
                                         batchify_fn=batchify_fn,
                                         last_batch='rollover',
                                         num_workers=num_workers)
    val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
    val_loader = gluon.data.DataLoader(val_dataset.transform(
        CenterNetDefaultValTransform(width, height)),
                                       batch_size,
                                       False,
                                       batchify_fn=val_batchify_fn,
                                       last_batch='keep',
                                       num_workers=num_workers)
    return train_loader, val_loader
Esempio n. 15
0
 def getDataloader(self, train_dataset, val_dataset):
     width, height = self.args.data_shape, self.args.data_shape
     # use fake data to generate fixed anchors for target generation
     with autograd.train_mode():
         foo, bar, anchors = self.net(
             mx.nd.zeros((1, 3, height, width), self.ctx[0]))
     anchors = anchors.as_in_context(mx.cpu())
     batchify_fn = Tuple(Stack(), Stack(),
                         Stack())  # stack image, cls_targets, box_targets
     train_loader = gluon.data.DataLoader(train_dataset.transform(
         SSDDefaultTrainTransform(width, height, anchors)),
                                          self.args.batch_size,
                                          True,
                                          batchify_fn=batchify_fn,
                                          last_batch='rollover',
                                          num_workers=self.args.num_workers)
     val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
     val_loader = None
     if val_dataset is not None:
         val_loader = gluon.data.DataLoader(
             val_dataset.transform(SSDDefaultValTransform(width, height)),
             self.args.batch_size,
             False,
             batchify_fn=val_batchify_fn,
             last_batch='keep',
             num_workers=self.args.num_workers)
     return train_loader, val_loader
Esempio n. 16
0
def test_transforms_presets_center_net():
    im_fname = gcv.utils.download('https://github.com/dmlc/web-data/blob/master/' +
                                  'gluoncv/detection/biking.jpg?raw=true', path='biking.jpg')
    x, orig_img = center_net.load_test(im_fname, short=512)
    x1, orig_img1 = center_net.transform_test(mx.image.imread(im_fname), short=512)
    np.testing.assert_allclose(x.asnumpy(), x1.asnumpy())
    np.testing.assert_allclose(orig_img, orig_img1)
    if not osp.isdir(osp.expanduser('~/.mxnet/datasets/voc')):
        return
    train_dataset = VOCDetectionTiny()
    val_dataset = VOCDetectionTiny(splits=[('tiny_motorbike', 'test')])
    width, height = (512, 512)
    net = gcv.model_zoo.get_model('center_net_resnet18_v1b_voc', pretrained=False, pretrained_base=False)
    net.initialize()
    num_workers = 0
    batch_size = 4
    batchify_fn = Tuple([Stack() for _ in range(6)])
    train_loader = gluon.data.DataLoader(
        train_dataset.transform(center_net.CenterNetDefaultTrainTransform(width, height, num_class=len(train_dataset.classes), scale_factor=net.scale)),
        batch_size, True, batchify_fn=batchify_fn, last_batch='rollover', num_workers=num_workers)
    val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
    val_loader = gluon.data.DataLoader(
        val_dataset.transform(center_net.CenterNetDefaultValTransform(width, height)),
        batch_size, False, batchify_fn=val_batchify_fn, last_batch='keep', num_workers=num_workers)

    for loader in [train_loader, val_loader]:
        for i, batch in enumerate(loader):
            if i > 1:
                break
            pass
Esempio n. 17
0
def test_transforms_presets_ssd():
    im_fname = gcv.utils.download('https://github.com/dmlc/web-data/blob/master/' +
                                  'gluoncv/detection/biking.jpg?raw=true', path='biking.jpg')
    x, orig_img = ssd.load_test(im_fname, short=512)
    x1, orig_img1 = ssd.transform_test(mx.image.imread(im_fname), short=512)
    np.testing.assert_allclose(x.asnumpy(), x1.asnumpy())
    np.testing.assert_allclose(orig_img, orig_img1)
    if not osp.isdir(osp.expanduser('~/.mxnet/datasets/voc')):
        return
    train_dataset = VOCDetectionTiny()
    val_dataset = VOCDetectionTiny(splits=[('tiny_motorbike', 'test')])
    width, height = (512, 512)
    net = gcv.model_zoo.get_model('ssd_512_resnet50_v1_voc', pretrained=False, pretrained_base=False)
    net.initialize()
    num_workers = 0
    batch_size = 4
    with autograd.train_mode():
        _, _, anchors = net(mx.nd.zeros((1, 3, height, width)))
    batchify_fn = Tuple(Stack(), Stack(), Stack())  # stack image, cls_targets, box_targets
    train_loader = gluon.data.DataLoader(
        train_dataset.transform(ssd.SSDDefaultTrainTransform(width, height, anchors)),
        batch_size, True, batchify_fn=batchify_fn, last_batch='rollover', num_workers=num_workers)
    val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
    val_loader = gluon.data.DataLoader(
        val_dataset.transform(ssd.SSDDefaultValTransform(width, height)),
        batch_size, False, batchify_fn=val_batchify_fn, last_batch='keep', num_workers=num_workers)
    train_loader2 = gluon.data.DataLoader(
        train_dataset.transform(ssd.SSDDefaultTrainTransform(width, height)),
        batch_size, True, batchify_fn=val_batchify_fn, last_batch='rollover', num_workers=num_workers)

    for loader in [train_loader, val_loader, train_loader2]:
        for i, batch in enumerate(loader):
            if i > 1:
                break
            pass
Esempio n. 18
0
def get_dataloader(net, train_dataset, val_dataset, data_shape, batch_size,
                   num_workers):
    """Get dataloader."""
    width, height = data_shape, data_shape
    batchify_fn = Tuple(
        *([Stack() for _ in range(6)] +
          [Pad(axis=0, pad_val=-1)
           for _ in range(1)]))  # stack image, all targets generated
    transform_fns = [
        YOLO3DefaultTrainTransform(x * 32, x * 32, net) for x in range(10, 20)
    ]
    train_loader = RandomTransformDataLoader(transform_fns,
                                             train_dataset,
                                             batch_size=batch_size,
                                             interval=10,
                                             last_batch='rollover',
                                             shuffle=True,
                                             batchify_fn=batchify_fn,
                                             num_workers=num_workers)
    # train_loader = gluon.data.DataLoader(
    #     train_dataset.transform(YOLO3DefaultTrainTransform(width, height, net)),
    #     batch_size, True, batchify_fn=batchify_fn, last_batch='rollover', num_workers=num_workers)
    val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
    val_loader = gluon.data.DataLoader(val_dataset.transform(
        YOLO3DefaultValTransform(width, height)),
                                       batch_size,
                                       False,
                                       batchify_fn=val_batchify_fn,
                                       last_batch='keep',
                                       num_workers=num_workers)
    return train_loader, val_loader
Esempio n. 19
0
def get_dataloader(net, train_dataset, val_dataset, data_shape, batch_size,
                   num_workers, args):

    import os
    os.system('pip3 install gluoncv')
    import gluoncv as gcv
    gcv.utils.check_version('0.6.0')
    from gluoncv import data as gdata
    from gluoncv import utils as gutils
    from gluoncv.model_zoo import get_model
    from gluoncv.data.batchify import Tuple, Stack, Pad
    from gluoncv.data.transforms.presets.yolo import YOLO3DefaultTrainTransform
    from gluoncv.data.transforms.presets.yolo import YOLO3DefaultValTransform
    from gluoncv.data.dataloader import RandomTransformDataLoader
    from gluoncv.utils.metrics.voc_detection import VOC07MApMetric
    from gluoncv.utils.metrics.coco_detection import COCODetectionMetric
    from gluoncv.utils import LRScheduler, LRSequential
    """Get dataloader."""
    width, height = data_shape, data_shape
    batchify_fn = Tuple(
        *([Stack() for _ in range(6)] +
          [Pad(axis=0, pad_val=-1)
           for _ in range(1)]))  # stack image, all targets generated
    if args.no_random_shape:
        print(len(train_dataset))
        img, label = train_dataset[0]
        print(img.shape, label.shape)
        train_loader = gluon.data.DataLoader(train_dataset.transform(
            YOLO3DefaultTrainTransform(width, height, net, mixup=args.mixup)),
                                             batch_size,
                                             True,
                                             batchify_fn=batchify_fn,
                                             last_batch='rollover',
                                             num_workers=num_workers)
    else:
        transform_fns = [
            YOLO3DefaultTrainTransform(x * 32, x * 32, net, mixup=args.mixup)
            for x in range(10, 20)
        ]
        train_loader = RandomTransformDataLoader(transform_fns,
                                                 train_dataset,
                                                 batch_size=batch_size,
                                                 interval=10,
                                                 last_batch='rollover',
                                                 shuffle=True,
                                                 batchify_fn=batchify_fn,
                                                 num_workers=num_workers)
    val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
    val_loader = gluon.data.DataLoader(val_dataset.transform(
        YOLO3DefaultValTransform(width, height)),
                                       batch_size,
                                       False,
                                       batchify_fn=val_batchify_fn,
                                       last_batch='keep',
                                       num_workers=num_workers)
    return train_loader, val_loader
Esempio n. 20
0
def get_dataloader(dataset, batch_size):
    width, height = FLAGS.data_shape, FLAGS.data_shape
    batchify_fn = Tuple(Stack(), Pad(pad_val=-1), Stack())
    loader = gluon.data.DataLoader(dataset.transform(
        YOLO3VideoInferenceTransform(width, height)),
                                   batch_size,
                                   False,
                                   last_batch='keep',
                                   num_workers=FLAGS.num_workers,
                                   batchify_fn=batchify_fn)
    return loader
Esempio n. 21
0
def get_dataloader(net, train_dataset, data_shape, batch_size, num_workers):
    from gluoncv.data.batchify import Tuple, Stack, Pad
    from gluoncv.data.transforms.presets.ssd import SSDDefaultTrainTransform
    width, height = data_shape, data_shape
    # use fake data to generate fixed anchors for target generation
    with autograd.train_mode():
        _, _, anchors = net(mx.nd.zeros((1, 3, height, width)))
    batchify_fn = Tuple(Stack(), Stack(), Stack())  # stack image, cls_targets, box_targets
    train_loader = gluon.data.DataLoader(
        train_dataset.transform(SSDDefaultTrainTransform(width, height, anchors)),
        batch_size, True, batchify_fn=batchify_fn, last_batch='rollover', num_workers=num_workers)
    return train_loader
Esempio n. 22
0
def get_traindataloader(net, train_dataset, data_shape, batch_size, num_workers, is_shuffle=True):
    width, height = data_shape, data_shape
    # use fake data to generate fixed anchors for target generation
    with autograd.train_mode():
        _, _, anchors = net(mx.nd.zeros((1, 3, height, width)))
    #print(anchors)  # mxnet ndarray, shape: 1 * 6132 * 4
    batchify_fn = Tuple(Stack(), Stack(), Stack())  # stack image, cls_targets, box_targets
    train_loader = gluon.data.DataLoader(
        train_dataset.transform(SSDDefaultTrainTransform(width, height, anchors)),
        batch_size, is_shuffle, batchify_fn=batchify_fn, last_batch='rollover', num_workers=num_workers)

    return train_loader
Esempio n. 23
0
def get_dataloader(net, train_dataset, val_dataset, data_shape, batch_size,
                   num_workers, ctx):
    """Loads data from a dataset and returns mini-batches of data, for both the training
    and the validation set.

    Arguments:
        net: the Gluon model you will train, used to generate fake anchors for target generation.
        train_dataset: Training dataset. Note that numpy and mxnet arrays can be directly used as a Dataset.
        val_dataset: Validation dataset. Note that numpy and mxnet arrays can be directly used as a Dataset.
        data_shape: Tuple, the input_shape of the model
        batch_size: Size of mini-batch.
        num_workers: The number of multiprocessing workers to use for data preprocessing.
        ctx: Indicator to the usage of GPU.
    Returns:
        train_loader: Gluon training dataloader
        val_loader: Gluon testing dataloader
    Raises:

    """
    width, height = data_shape

    # use fake data to generate fixed anchors for target generation
    with autograd.train_mode():
        _, _, anchors = net(mx.nd.zeros((1, 3, height, width), ctx))

    anchors = anchors.as_in_context(mx.cpu())

    batchify_fn = Tuple(Stack(), Stack(),
                        Stack())  # stack image, cls_targets, box_targets

    train_loader = gluon.data.DataLoader(
        train_dataset.transform(
            SSDDefaultTrainTransform(width, height, anchors)),
        batch_size,
        True,
        batchify_fn=batchify_fn,
        last_batch="rollover",
        num_workers=num_workers,
    )

    val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))

    val_loader = gluon.data.DataLoader(
        val_dataset.transform(SSDDefaultValTransform(width, height)),
        batch_size,
        False,
        batchify_fn=val_batchify_fn,
        last_batch="keep",
        num_workers=num_workers,
    )

    return train_loader, val_loader
Esempio n. 24
0
def test_transforms_presets_yolo():
    im_fname = gcv.utils.download(
        'https://github.com/dmlc/web-data/blob/master/' +
        'gluoncv/detection/biking.jpg?raw=true',
        path='biking.jpg')
    x, orig_img = yolo.load_test(im_fname, short=512)
    x1, orig_img1 = yolo.transform_test(mx.image.imread(im_fname), short=512)
    np.testing.assert_allclose(x.asnumpy(), x1.asnumpy())
    np.testing.assert_allclose(orig_img, orig_img1)
    if not osp.isdir(osp.expanduser('~/.mxnet/datasets/voc')):
        return
    train_dataset = gcv.data.VOCDetection(splits=((2007, 'trainval'),
                                                  (2012, 'trainval')))
    val_dataset = gcv.data.VOCDetection(splits=[(2007, 'test')])
    width, height = (512, 512)
    net = gcv.model_zoo.get_model('yolo3_darknet53_voc',
                                  pretrained=False,
                                  pretrained_base=False)
    net.initialize()
    num_workers = 0
    batch_size = 4
    batchify_fn = Tuple(*([Stack() for _ in range(6)] +
                          [Pad(axis=0, pad_val=-1) for _ in range(1)]))
    train_loader = gluon.data.DataLoader(train_dataset.transform(
        yolo.YOLO3DefaultTrainTransform(width, height, net)),
                                         batch_size,
                                         True,
                                         batchify_fn=batchify_fn,
                                         last_batch='rollover',
                                         num_workers=num_workers)
    val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
    val_loader = gluon.data.DataLoader(val_dataset.transform(
        yolo.YOLO3DefaultValTransform(width, height)),
                                       batch_size,
                                       False,
                                       batchify_fn=val_batchify_fn,
                                       last_batch='keep',
                                       num_workers=num_workers)
    train_loader2 = gluon.data.DataLoader(train_dataset.transform(
        yolo.YOLO3DefaultTrainTransform(width, height)),
                                          batch_size,
                                          True,
                                          batchify_fn=val_batchify_fn,
                                          last_batch='rollover',
                                          num_workers=num_workers)

    for loader in [train_loader, val_loader, train_loader2]:
        for i, batch in enumerate(loader):
            if i > 1:
                break
            pass
Esempio n. 25
0
def get_dataloader(net, train_dataset, val_dataset, data_shape, batch_size, num_workers, args):
    """Get dataloader: transform and batchify."""
    height, width = data_shape, data_shape
    # use fake data to generate fixed anchors for target generation
    with autograd.train_mode():
        _, _, face_anchors, \
        _, _, head_anchors, \
        _, _, body_anchors = net(mx.nd.zeros((1, 3, height, width)))
    anchors = [face_anchors, head_anchors, body_anchors]
    # stack image,cls_target box_target
    train_batchify_fn = Tuple(Stack(),  # source img
                              Stack(), Stack(), Stack(),  # face_cls_targets,head_cls_targets,body_cls_targets
                              Stack(), Stack(), Stack())  # face_box_targets,head_box_targets,body_cls_targets
    # train_batchify_fn = Tuple(Stack(),  # source img
    #                           Pad(), Pad(), Pad(),  # face_cls_targets,head_cls_targets,body_cls_targets
    #                           Pad(), Pad(), Pad())  # face_box_targets,head_box_targets,body_cls_targets
    # getdataloader
    train_loader = gluon.data.DataLoader(train_dataset.transform(
        PyramidBoxTrainTransform(width, height, anchors)),
        batch_size=batch_size, shuffle=True,
        batchify_fn=train_batchify_fn, num_workers=num_workers, last_batch='rollover')
    val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
    val_loader = gluon.data.DataLoader(
        val_dataset.transform(PyramidBoxValTransform()),
        batch_size=batch_size, shuffle=False, batchify_fn=val_batchify_fn, last_batch='keep', num_workers=num_workers)
    return train_loader, val_loader
Esempio n. 26
0
    def get_dataloader(self, net, train_dataset, val_dataset, width, height, batch_size, num_workers):
        from gluoncv.data.batchify import Tuple, Stack, Pad
        from gluoncv.data.transforms.presets.yolo import YOLO3DefaultTrainTransform, YOLO3DefaultValTransform

        # use fake data to generate fixed anchors for target generation    
        train_batchify_fn = Tuple(*([Stack() for _ in range(6)] + [Pad(axis=0, pad_val=-1) for _ in range(1)]))  # stack image, all targets generated
        train_loader = gluon.data.DataLoader(
            train_dataset.transform(YOLO3DefaultTrainTransform(width, height, net)),
            batch_size, True, batchify_fn=train_batchify_fn, last_batch='rollover', num_workers=num_workers)
        
        val__batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
        val_loader = gluon.data.DataLoader(
            val_dataset.transform(YOLO3DefaultValTransform(width, height)),
            batch_size, True, batchify_fn=val__batchify_fn, last_batch='rollover', num_workers=num_workers) 
        
        return train_loader, val_loader
Esempio n. 27
0
 def _get_dataloader(net, test_dataset, data_shape, batch_size,
                     num_workers, num_devices, args):
     """Get dataloader."""
     if args.meta_arch == 'yolo3':
         width, height = data_shape, data_shape
         val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
         test_loader = gluon.data.DataLoader(
             test_dataset.transform(
                 YOLO3DefaultValTransform(width, height)),
             batch_size,
             False,
             batchify_fn=val_batchify_fn,
             last_batch='keep',
             num_workers=num_workers)
         return test_loader
     elif args.meta_arch == 'faster_rcnn':
         """Get faster rcnn dataloader."""
         test_bfn = Tuple(*[Append() for _ in range(3)])
         short = net.short[-1] if isinstance(net.short,
                                             (tuple,
                                              list)) else net.short
         # validation use 1 sample per device
         test_loader = gluon.data.DataLoader(
             test_dataset.transform(
                 FasterRCNNDefaultValTransform(short, net.max_size)),
             num_devices,
             False,
             batchify_fn=test_bfn,
             last_batch='keep',
             num_workers=args.num_workers)
         return test_loader
     else:
         raise NotImplementedError('%s not implemented.' %
                                   args.meta_arch)
Esempio n. 28
0
def get_dataloader(net, val_dataset, data_shape, batch_size, num_workers, args):
    """Get dataloader."""
    width, height = data_shape, data_shape
    val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
    val_loader = gluon.data.DataLoader(
        val_dataset.transform(YOLO3DefaultValTransform(width, height, 50, 'coco')),
        batch_size, False, batchify_fn=val_batchify_fn, last_batch='keep', num_workers=num_workers)
    return val_loader
Esempio n. 29
0
def get_ori_evaldataloader(val_dataset, data_shape, batch_size, num_workers):
    width, height = data_shape, data_shape
    batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
    val_loader = gluon.data.DataLoader(
        val_dataset.transform(SSDOriTransform(width, height)), batchify_fn=batchify_fn,
        batch_size=batch_size, shuffle=False, last_batch='discard', num_workers=num_workers)

    return val_loader
Esempio n. 30
0
def get_dataloader(val_dataset, data_shape, batch_size, num_workers):
    """Get dataloader."""
    width, height = data_shape, data_shape
    batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
    val_loader = gluon.data.DataLoader(
        val_dataset.transform(SSDDefaultValTransform(width, height)), batchify_fn=batchify_fn,
        batch_size=batch_size, shuffle=False, last_batch='rollover', num_workers=num_workers)
    return val_loader