def get_dataloader(net, train_dataset, val_dataset, data_shape, batch_size,
                   num_workers):
    """Get dataloader."""
    width, height = data_shape, data_shape
    # use fake data to generate fixed anchors for target generation
    with autograd.train_mode():
        anchors, _, _, _, _, _, _ = net(mx.nd.zeros((1, 3, height, width)))

    # stack image, anchor_cls_targets, anchor_box_targets
    # pad real_targets(xmin, ymin, xmax, ymax, label). will return length
    batchify_fn = Tuple(Stack(), Stack(), Stack(),
                        Pad(axis=0, pad_val=-1, ret_length=True))
    train_loader = gluon.data.DataLoader(train_dataset.transform(
        RefineDetDefaultTrainTransform(width, height, anchors)),
                                         batch_size,
                                         True,
                                         batchify_fn=batchify_fn,
                                         last_batch='rollover',
                                         num_workers=num_workers)
    # return: img (B, H, W, C); anchor_cls_targets (B, N); anchor_box_targets(B, N, 4);
    # targets(B, P, 5), target_len (B, ). m_i is the num of objects in each img, P is the length after pad.

    val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
    val_loader = gluon.data.DataLoader(val_dataset.transform(
        SSDDefaultValTransform(width, height)),
                                       batch_size,
                                       False,
                                       batchify_fn=val_batchify_fn,
                                       last_batch='keep',
                                       num_workers=num_workers)
    return train_loader, val_loader
Ejemplo n.º 2
0
def get_dataloader(net, train_dataset, validation_dataset, image_h, image_w,
                   batch_size, num_workers):
    """
    Get dataloader.
    """

    batchify_fn = Tuple(
        *([Stack() for _ in range(6)] +
          [Pad(axis=0, pad_val=-1)
           for _ in range(1)]))  # stack image, all targets generated

    train_loader = gluon.data.DataLoader(train_dataset.transform(
        YOLO3DefaultTrainTransform(image_w, image_h, net)),
                                         batch_size,
                                         True,
                                         batchify_fn=batchify_fn,
                                         last_batch='rollover',
                                         num_workers=num_workers,
                                         prefetch=num_workers +
                                         num_workers // 2)

    val_transform = YOLO3DefaultValTransform(image_w, image_h)
    batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
    val_loader = gluon.data.DataLoader(
        validation_dataset.transform(val_transform),
        batch_size,
        False,
        batchify_fn=batchify_fn,
        last_batch='keep',
        num_workers=num_workers,
        prefetch=num_workers + num_workers // 2)

    return train_loader, val_loader
Ejemplo n.º 3
0
def get_dataloader(net, train_dataset, val_dataset, data_shape, batch_size,
                   num_workers):
    """Get dataloader."""
    width, height = data_shape, data_shape
    batchify_fn = Tuple(
        *([Stack() for _ in range(6)] +
          [Pad(axis=0, pad_val=-1)
           for _ in range(1)]))  # stack image, all targets generated
    transform_fns = [
        YOLO3DefaultTrainTransform(x * 32, x * 32, net) for x in range(10, 20)
    ]
    train_loader = RandomTransformDataLoader(transform_fns,
                                             train_dataset,
                                             batch_size=batch_size,
                                             interval=10,
                                             last_batch='rollover',
                                             shuffle=True,
                                             batchify_fn=batchify_fn,
                                             num_workers=num_workers)
    # train_loader = gluon.data.DataLoader(
    #     train_dataset.transform(YOLO3DefaultTrainTransform(width, height, net)),
    #     batch_size, True, batchify_fn=batchify_fn, last_batch='rollover', num_workers=num_workers)
    val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
    val_loader = gluon.data.DataLoader(val_dataset.transform(
        YOLO3DefaultValTransform(width, height)),
                                       batch_size,
                                       False,
                                       batchify_fn=val_batchify_fn,
                                       last_batch='keep',
                                       num_workers=num_workers)
    return train_loader, val_loader
Ejemplo n.º 4
0
def tri_data_loader(batch_size, transform_train, transform_valid, num_workers=0,
                train_dataset=None, valid_dataset=None, valid_train_dataset=None, 
                batchify_fns=None, train_shuffle=True):
    """
            batchify_fns: dict(), like in ssd
    """
    if batchify_fns is None:
       batchify_fns = {'train': Tuple(Stack(), Stack(), Stack()),
                       'valid': Tuple(Stack(), Pad(pad_val=-1)),
                       'valid_train': Tuple(Stack(), Pad(pad_val=-1))}
    # 1. valid_data
    valid_data = DataLoader(valid_dataset.transform(transform_valid), batch_size, 
                        shuffle=False, batchify_fn=batchify_fns['valid'], last_batch='keep',
                        num_workers=num_workers)
    
    # 2. train_data
    train_data = DataLoader(train_dataset.transform(transform_train), batch_size,
                             shuffle=train_shuffle, batchify_fn=batchify_fns['train'], last_batch='rollover',
                             num_workers=num_workers)
    
    # 3. valid_train_data
    if valid_train_dataset is None:
        return train_data, valid_data, train_dataset.classes
    else:
        valid_train_data = DataLoader(valid_train_dataset.transform(transform_valid), batch_size, 
                            shuffle=False, batchify_fn=batchify_fns['valid_train'], last_batch='keep',
                            num_workers=num_workers)
        return train_data, valid_data, train_dataset.classes, valid_train_data
Ejemplo n.º 5
0
def test_transforms_presets_yolo():
    im_fname = gcv.utils.download('https://github.com/dmlc/web-data/blob/master/' +
                                  'gluoncv/detection/biking.jpg?raw=true', path='biking.jpg')
    x, orig_img = yolo.load_test(im_fname, short=512)
    x1, orig_img1 = yolo.transform_test(mx.image.imread(im_fname), short=512)
    np.testing.assert_allclose(x.asnumpy(), x1.asnumpy())
    np.testing.assert_allclose(orig_img, orig_img1)
    if not osp.isdir(osp.expanduser('~/.mxnet/datasets/voc')):
        return
    train_dataset = VOCDetectionTiny()
    val_dataset = VOCDetectionTiny(splits=[('tiny_motorbike', 'test')])
    width, height = (512, 512)
    net = gcv.model_zoo.get_model('yolo3_darknet53_voc', pretrained=False, pretrained_base=False)
    net.initialize()
    num_workers = 0
    batch_size = 4
    batchify_fn = Tuple(*([Stack() for _ in range(6)] + [Pad(axis=0, pad_val=-1) for _ in range(1)]))
    train_loader = gluon.data.DataLoader(
        train_dataset.transform(yolo.YOLO3DefaultTrainTransform(width, height, net)),
        batch_size, True, batchify_fn=batchify_fn, last_batch='rollover', num_workers=num_workers)
    val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
    val_loader = gluon.data.DataLoader(
        val_dataset.transform(yolo.YOLO3DefaultValTransform(width, height)),
        batch_size, False, batchify_fn=val_batchify_fn, last_batch='keep', num_workers=num_workers)
    train_loader2 = gluon.data.DataLoader(
        train_dataset.transform(yolo.YOLO3DefaultTrainTransform(width, height)),
        batch_size, True, batchify_fn=val_batchify_fn, last_batch='rollover', num_workers=num_workers)

    for loader in [train_loader, val_loader, train_loader2]:
        for i, batch in enumerate(loader):
            if i > 1:
                break
            pass
Ejemplo n.º 6
0
def get_dataloader(net, train_dataset, val_dataset, data_shape, batch_size, num_workers, args):
    """Get dataloader."""
    # when it is not in final_fit stage and val_dataset is not provided, we randomly
    # sample (1 - args.split_ratio) data as our val_dataset
    if (not args.final_fit) and (not val_dataset):
        train_dataset, val_dataset = _train_val_split(train_dataset, args.split_ratio)

    width, height = data_shape, data_shape
    batchify_fn = Tuple(*([Stack() for _ in range(6)] + [Pad(axis=0, pad_val=-1) for _ in range(
        1)]))  # stack image, all targets generated
    if args.no_random_shape:
        train_loader = gluon.data.DataLoader(
            train_dataset.transform(
                YOLO3DefaultTrainTransform(width, height, net, mixup=args.mixup)),
            batch_size, True, batchify_fn=batchify_fn, last_batch='rollover',
            num_workers=num_workers)
    else:
        transform_fns = [YOLO3DefaultTrainTransform(x * 32, x * 32, net, mixup=args.mixup) for x in
                         range(10, 20)]
        train_loader = RandomTransformDataLoader(
            transform_fns, train_dataset, batch_size=batch_size, interval=10, last_batch='rollover',
            shuffle=True, batchify_fn=batchify_fn, num_workers=num_workers)

    val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
    val_loader = None
    if val_dataset:
        val_loader = gluon.data.DataLoader(
            val_dataset.transform(YOLO3DefaultValTransform(width, height)),
            batch_size, False, batchify_fn=val_batchify_fn, last_batch='keep',
            num_workers=num_workers)
    return train_loader, val_loader
Ejemplo n.º 7
0
def get_dataloader(net, train_dataset, val_dataset, data_shape, batch_size,
                   num_workers, args):

    import os
    os.system('pip3 install gluoncv')
    import gluoncv as gcv
    gcv.utils.check_version('0.6.0')
    from gluoncv import data as gdata
    from gluoncv import utils as gutils
    from gluoncv.model_zoo import get_model
    from gluoncv.data.batchify import Tuple, Stack, Pad
    from gluoncv.data.transforms.presets.yolo import YOLO3DefaultTrainTransform
    from gluoncv.data.transforms.presets.yolo import YOLO3DefaultValTransform
    from gluoncv.data.dataloader import RandomTransformDataLoader
    from gluoncv.utils.metrics.voc_detection import VOC07MApMetric
    from gluoncv.utils.metrics.coco_detection import COCODetectionMetric
    from gluoncv.utils import LRScheduler, LRSequential
    """Get dataloader."""
    width, height = data_shape, data_shape
    batchify_fn = Tuple(
        *([Stack() for _ in range(6)] +
          [Pad(axis=0, pad_val=-1)
           for _ in range(1)]))  # stack image, all targets generated
    if args.no_random_shape:
        print(len(train_dataset))
        img, label = train_dataset[0]
        print(img.shape, label.shape)
        train_loader = gluon.data.DataLoader(train_dataset.transform(
            YOLO3DefaultTrainTransform(width, height, net, mixup=args.mixup)),
                                             batch_size,
                                             True,
                                             batchify_fn=batchify_fn,
                                             last_batch='rollover',
                                             num_workers=num_workers)
    else:
        transform_fns = [
            YOLO3DefaultTrainTransform(x * 32, x * 32, net, mixup=args.mixup)
            for x in range(10, 20)
        ]
        train_loader = RandomTransformDataLoader(transform_fns,
                                                 train_dataset,
                                                 batch_size=batch_size,
                                                 interval=10,
                                                 last_batch='rollover',
                                                 shuffle=True,
                                                 batchify_fn=batchify_fn,
                                                 num_workers=num_workers)
    val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
    val_loader = gluon.data.DataLoader(val_dataset.transform(
        YOLO3DefaultValTransform(width, height)),
                                       batch_size,
                                       False,
                                       batchify_fn=val_batchify_fn,
                                       last_batch='keep',
                                       num_workers=num_workers)
    return train_loader, val_loader
Ejemplo n.º 8
0
def get_coco_data_loaders(net, train_data, val_data, in_size, bs, n_workers,
                          ctx):
    with autograd.train_mode():
        fake_in_size = 1
        channels = 3
        _, _, anchors = net(
            nd.zeros((fake_in_size, channels, in_size, in_size), ctx))
    anchors = anchors.as_in_context(mx.cpu())

    img_train_s = Stack()
    class_targets = Stack()
    box_targets = Stack()
    train_batchify_fn = Tuple(img_train_s, class_targets, box_targets)
    train_data_transformed = train_data.transform(
        SSDDefaultTrainTransform(in_size, in_size, anchors))
    train_data_loader = gluon.data.DataLoader(train_data_transformed,
                                              batch_size=bs,
                                              shuffle=True,
                                              batchify_fn=train_batchify_fn,
                                              last_batch='rollover',
                                              num_workers=n_workers)

    img_val_s = Stack()
    padding = Pad(pad_val=-1)
    val_batchify_fn = Tuple(img_val_s, padding)
    val_data_transformed = val_data.transform(
        SSDDefaultValTransform(in_size, in_size))
    val_data_loader = gluon.data.DataLoader(val_data_transformed,
                                            batch_size=bs,
                                            shuffle=False,
                                            batchify_fn=val_batchify_fn,
                                            last_batch='keep',
                                            num_workers=n_workers)

    return train_data_loader, val_data_loader
Ejemplo n.º 9
0
def test_transforms_presets_ssd():
    im_fname = gcv.utils.download('https://github.com/dmlc/web-data/blob/master/' +
                                  'gluoncv/detection/biking.jpg?raw=true', path='biking.jpg')
    x, orig_img = ssd.load_test(im_fname, short=512)
    x1, orig_img1 = ssd.transform_test(mx.image.imread(im_fname), short=512)
    np.testing.assert_allclose(x.asnumpy(), x1.asnumpy())
    np.testing.assert_allclose(orig_img, orig_img1)
    if not osp.isdir(osp.expanduser('~/.mxnet/datasets/voc')):
        return
    train_dataset = VOCDetectionTiny()
    val_dataset = VOCDetectionTiny(splits=[('tiny_motorbike', 'test')])
    width, height = (512, 512)
    net = gcv.model_zoo.get_model('ssd_512_resnet50_v1_voc', pretrained=False, pretrained_base=False)
    net.initialize()
    num_workers = 0
    batch_size = 4
    with autograd.train_mode():
        _, _, anchors = net(mx.nd.zeros((1, 3, height, width)))
    batchify_fn = Tuple(Stack(), Stack(), Stack())  # stack image, cls_targets, box_targets
    train_loader = gluon.data.DataLoader(
        train_dataset.transform(ssd.SSDDefaultTrainTransform(width, height, anchors)),
        batch_size, True, batchify_fn=batchify_fn, last_batch='rollover', num_workers=num_workers)
    val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
    val_loader = gluon.data.DataLoader(
        val_dataset.transform(ssd.SSDDefaultValTransform(width, height)),
        batch_size, False, batchify_fn=val_batchify_fn, last_batch='keep', num_workers=num_workers)
    train_loader2 = gluon.data.DataLoader(
        train_dataset.transform(ssd.SSDDefaultTrainTransform(width, height)),
        batch_size, True, batchify_fn=val_batchify_fn, last_batch='rollover', num_workers=num_workers)

    for loader in [train_loader, val_loader, train_loader2]:
        for i, batch in enumerate(loader):
            if i > 1:
                break
            pass
Ejemplo n.º 10
0
def get_dataloader(net, train_dataset, val_dataset, data_shape, batch_size,
                   num_workers, ctx):
    """Get dataloader."""
    width, height = data_shape, data_shape
    num_class = len(train_dataset.classes)
    batchify_fn = Tuple([Stack() for _ in range(6)
                         ])  # stack image, cls_targets, box_targets
    train_loader = gluon.data.DataLoader(train_dataset.transform(
        CenterNetDefaultTrainTransform(width,
                                       height,
                                       num_class=num_class,
                                       scale_factor=net.scale)),
                                         batch_size,
                                         True,
                                         batchify_fn=batchify_fn,
                                         last_batch='rollover',
                                         num_workers=num_workers)
    val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
    val_loader = gluon.data.DataLoader(val_dataset.transform(
        CenterNetDefaultValTransform(width, height)),
                                       batch_size,
                                       False,
                                       batchify_fn=val_batchify_fn,
                                       last_batch='keep',
                                       num_workers=num_workers)
    return train_loader, val_loader
Ejemplo n.º 11
0
def get_dataloader(net, train_dataset, valid_dataset, data_shape, batch_size,
                   num_workers):
    from gluoncv.data.batchify import Tuple, Stack, Pad
    from gluoncv.data.transforms.presets.ssd import SSDDefaultTrainTransform
    width, height = data_shape, data_shape
    # use fake data to generate fixed anchors for target generation
    with autograd.train_mode():
        _, _, anchors = net(mx.nd.zeros((1, 3, height, width)))
    batchify_fn = Tuple(Stack(), Stack(),
                        Stack())  # stack image, cls_targets, box_targets
    train_loader = gluon.data.DataLoader(train_dataset.transform(
        SSDDefaultTrainTransform(width, height, anchors)),
                                         batch_size,
                                         True,
                                         batchify_fn=batchify_fn,
                                         last_batch='rollover',
                                         num_workers=num_workers)

    val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))

    val_loader = gluon.data.DataLoader(valid_dataset.transform(
        SSDDefaultValTransform(width, height)),
                                       batch_size,
                                       False,
                                       batchify_fn=val_batchify_fn,
                                       last_batch='keep',
                                       num_workers=num_workers)

    eval_metric = VOC07MApMetric(iou_thresh=0.5, class_names=classes)
    return train_loader, val_loader, eval_metric
Ejemplo n.º 12
0
 def getDataloader(self, train_dataset, val_dataset):
     width, height = self.args.data_shape, self.args.data_shape
     # use fake data to generate fixed anchors for target generation
     with autograd.train_mode():
         foo, bar, anchors = self.net(
             mx.nd.zeros((1, 3, height, width), self.ctx[0]))
     anchors = anchors.as_in_context(mx.cpu())
     batchify_fn = Tuple(Stack(), Stack(),
                         Stack())  # stack image, cls_targets, box_targets
     train_loader = gluon.data.DataLoader(train_dataset.transform(
         SSDDefaultTrainTransform(width, height, anchors)),
                                          self.args.batch_size,
                                          True,
                                          batchify_fn=batchify_fn,
                                          last_batch='rollover',
                                          num_workers=self.args.num_workers)
     val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
     val_loader = None
     if val_dataset is not None:
         val_loader = gluon.data.DataLoader(
             val_dataset.transform(SSDDefaultValTransform(width, height)),
             self.args.batch_size,
             False,
             batchify_fn=val_batchify_fn,
             last_batch='keep',
             num_workers=self.args.num_workers)
     return train_loader, val_loader
Ejemplo n.º 13
0
def get_dataloader(net, train_dataset, val_dataset, data_shape, batch_size,
                   num_workers):
    """Get dataloader."""
    width, height = data_shape, data_shape
    # use fake data to generate fixed anchors for target generation
    with autograd.train_mode():
        _, _, anchors = net(mx.nd.zeros((1, 3, height, width)))
    batchify_fn = Tuple(Stack(), Stack(),
                        Stack())  # stack image, cls_targets, box_targets
    train_loader = gluon.data.DataLoader(train_dataset.transform(
        SSDDefaultTrainTransform(width, height, anchors)),
                                         batch_size,
                                         True,
                                         batchify_fn=batchify_fn,
                                         last_batch='rollover',
                                         num_workers=num_workers)
    val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
    val_loader = gluon.data.DataLoader(val_dataset.transform(
        SSDDefaultValTransform(width, height)),
                                       batch_size,
                                       False,
                                       batchify_fn=val_batchify_fn,
                                       last_batch='keep',
                                       num_workers=num_workers)
    return train_loader, val_loader
Ejemplo n.º 14
0
def get_dataloader(net, train_dataset, val_dataset, data_shape, batch_size, num_workers, args):
    """Get dataloader: transform and batchify."""
    height, width = data_shape, data_shape
    # use fake data to generate fixed anchors for target generation
    with autograd.train_mode():
        _, _, face_anchors, \
        _, _, head_anchors, \
        _, _, body_anchors = net(mx.nd.zeros((1, 3, height, width)))
    anchors = [face_anchors, head_anchors, body_anchors]
    # stack image,cls_target box_target
    train_batchify_fn = Tuple(Stack(),  # source img
                              Stack(), Stack(), Stack(),  # face_cls_targets,head_cls_targets,body_cls_targets
                              Stack(), Stack(), Stack())  # face_box_targets,head_box_targets,body_cls_targets
    # train_batchify_fn = Tuple(Stack(),  # source img
    #                           Pad(), Pad(), Pad(),  # face_cls_targets,head_cls_targets,body_cls_targets
    #                           Pad(), Pad(), Pad())  # face_box_targets,head_box_targets,body_cls_targets
    # getdataloader
    train_loader = gluon.data.DataLoader(train_dataset.transform(
        PyramidBoxTrainTransform(width, height, anchors)),
        batch_size=batch_size, shuffle=True,
        batchify_fn=train_batchify_fn, num_workers=num_workers, last_batch='rollover')
    val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
    val_loader = gluon.data.DataLoader(
        val_dataset.transform(PyramidBoxValTransform()),
        batch_size=batch_size, shuffle=False, batchify_fn=val_batchify_fn, last_batch='keep', num_workers=num_workers)
    return train_loader, val_loader
Ejemplo n.º 15
0
 def _get_dataloader(net, test_dataset, data_shape, batch_size,
                     num_workers, num_devices, args):
     """Get dataloader."""
     if args.meta_arch == 'yolo3':
         width, height = data_shape, data_shape
         val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
         test_loader = gluon.data.DataLoader(
             test_dataset.transform(
                 YOLO3DefaultValTransform(width, height)),
             batch_size,
             False,
             batchify_fn=val_batchify_fn,
             last_batch='keep',
             num_workers=num_workers)
         return test_loader
     elif args.meta_arch == 'faster_rcnn':
         """Get faster rcnn dataloader."""
         test_bfn = Tuple(*[Append() for _ in range(3)])
         short = net.short[-1] if isinstance(net.short,
                                             (tuple,
                                              list)) else net.short
         # validation use 1 sample per device
         test_loader = gluon.data.DataLoader(
             test_dataset.transform(
                 FasterRCNNDefaultValTransform(short, net.max_size)),
             num_devices,
             False,
             batchify_fn=test_bfn,
             last_batch='keep',
             num_workers=args.num_workers)
         return test_loader
     else:
         raise NotImplementedError('%s not implemented.' %
                                   args.meta_arch)
Ejemplo n.º 16
0
    def get_dataloader(self, net, train_dataset, val_dataset, width, height, batch_size, num_workers):
        from gluoncv.data.batchify import Tuple, Stack, Pad
        from gluoncv.data.transforms.presets.yolo import YOLO3DefaultTrainTransform, YOLO3DefaultValTransform

        # use fake data to generate fixed anchors for target generation    
        train_batchify_fn = Tuple(*([Stack() for _ in range(6)] + [Pad(axis=0, pad_val=-1) for _ in range(1)]))  # stack image, all targets generated
        train_loader = gluon.data.DataLoader(
            train_dataset.transform(YOLO3DefaultTrainTransform(width, height, net)),
            batch_size, True, batchify_fn=train_batchify_fn, last_batch='rollover', num_workers=num_workers)
        
        val__batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
        val_loader = gluon.data.DataLoader(
            val_dataset.transform(YOLO3DefaultValTransform(width, height)),
            batch_size, True, batchify_fn=val__batchify_fn, last_batch='rollover', num_workers=num_workers) 
        
        return train_loader, val_loader
    def Model(self, model_name, use_pretrained=True, use_gpu=True, gpu_devices=[0]):
        self.system_dict["model_name"] = model_name;
        self.system_dict["use_pretrained"] = use_pretrained;
        if(self.system_dict["model_name"] in self.system_dict["model_set_1"]):
            self.system_dict["local"]["net"] = gcv.model_zoo.get_model(self.system_dict["model_name"], 
                pretrained=self.system_dict["use_pretrained"]);
            self.system_dict["local"]["net"].reset_class(self.system_dict["classes"])
            self.system_dict["img_shape"] = (300, 300); 

            width, height = self.system_dict["img_shape"][0], self.system_dict["img_shape"][1]
            with autograd.train_mode():
                _, _, anchors = self.system_dict["local"]["net"](mx.nd.zeros((1, 3, height, width)))

            batchify_fn = Tuple(Stack(), Stack(), Stack())
            self.system_dict["local"]["train_loader"] = gluon.data.DataLoader(
                self.system_dict["local"]["train_dataset"].transform(SSDDefaultTrainTransform(width, height, anchors)),
                self.system_dict["batch_size"], True, batchify_fn=batchify_fn, last_batch='rollover', 
                num_workers=self.system_dict["num_workers"])

            self.set_device(use_gpu=use_gpu ,gpu_devices=gpu_devices);
            self.system_dict["local"]["net"].collect_params().reset_ctx(self.system_dict["local"]["ctx"])

        elif((self.system_dict["model_name"] in self.system_dict["model_set_2"]) or (self.system_dict["model_name"] in self.system_dict["model_set_3"])
            or (self.system_dict["model_name"] in self.system_dict["model_set_4"])):
            self.system_dict["local"]["net"] = gcv.model_zoo.get_model(self.system_dict["model_name"], 
                pretrained=self.system_dict["use_pretrained"]);
            self.system_dict["local"]["net"].reset_class(self.system_dict["classes"])
            self.system_dict["img_shape"] = (512, 512); 

            width, height = self.system_dict["img_shape"][0], self.system_dict["img_shape"][1]
            with autograd.train_mode():
                _, _, anchors = self.system_dict["local"]["net"](mx.nd.zeros((1, 3, height, width)))

            batchify_fn = Tuple(Stack(), Stack(), Stack())
            self.system_dict["local"]["train_loader"] = gluon.data.DataLoader(
                self.system_dict["local"]["train_dataset"].transform(SSDDefaultTrainTransform(width, height, anchors)),
                self.system_dict["batch_size"], True, batchify_fn=batchify_fn, last_batch='rollover', 
                num_workers=self.system_dict["num_workers"])

            self.set_device(use_gpu=use_gpu, gpu_devices=gpu_devices);
            self.system_dict["local"]["net"].collect_params().reset_ctx(self.system_dict["local"]["ctx"])

        elif((self.system_dict["model_name"] in self.system_dict["model_set_5"]) or (self.system_dict["model_name"] in self.system_dict["model_set_6"])) :
            self.system_dict["local"]["net"] = gcv.model_zoo.get_model(self.system_dict["model_name"], 
                pretrained=self.system_dict["use_pretrained"]);
            self.system_dict["local"]["net"].reset_class(self.system_dict["classes"])
            self.system_dict["img_shape"] = (416, 416); 

            width, height = self.system_dict["img_shape"][0], self.system_dict["img_shape"][1]

            train_transform = YOLO3DefaultTrainTransform(width, height, self.system_dict["local"]["net"])
            batchify_fn = Tuple(*([Stack() for _ in range(6)] + [Pad(axis=0, pad_val=-1) for _ in range(1)]))

            self.system_dict["local"]["train_loader"] = gluon.data.DataLoader(
                self.system_dict["local"]["train_dataset"].transform(train_transform),
                self.system_dict["batch_size"], True, batchify_fn=batchify_fn, last_batch='rollover', 
                num_workers=self.system_dict["num_workers"])

            self.set_device(use_gpu=use_gpu, gpu_devices=gpu_devices);
            self.system_dict["local"]["net"].collect_params().reset_ctx(self.system_dict["local"]["ctx"])
Ejemplo n.º 18
0
def get_dataloader(val_dataset, data_shape, batch_size, num_workers):
    """Get dataloader."""
    width, height = data_shape, data_shape
    batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
    val_loader = gluon.data.DataLoader(
        val_dataset.transform(SSDDefaultValTransform(width, height)), batchify_fn=batchify_fn,
        batch_size=batch_size, shuffle=False, last_batch='rollover', num_workers=num_workers)
    return val_loader
Ejemplo n.º 19
0
def get_dataloader(net, val_dataset, data_shape, batch_size, num_workers, args):
    """Get dataloader."""
    width, height = data_shape, data_shape
    val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
    val_loader = gluon.data.DataLoader(
        val_dataset.transform(YOLO3DefaultValTransform(width, height, 50, 'coco')),
        batch_size, False, batchify_fn=val_batchify_fn, last_batch='keep', num_workers=num_workers)
    return val_loader
Ejemplo n.º 20
0
def get_ori_evaldataloader(val_dataset, data_shape, batch_size, num_workers):
    width, height = data_shape, data_shape
    batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
    val_loader = gluon.data.DataLoader(
        val_dataset.transform(SSDOriTransform(width, height)), batchify_fn=batchify_fn,
        batch_size=batch_size, shuffle=False, last_batch='discard', num_workers=num_workers)

    return val_loader
Ejemplo n.º 21
0
    def get_dataloader(val_dataset, data_shape, batch_size, num_workers):
        from gluoncv.data.batchify import Tuple, Stack, Pad
        from gluoncv.data.transforms.presets.yolo import YOLO3DefaultValTransform

        val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
        val_loader = gluon.data.DataLoader(
            val_dataset.transform(YOLO3DefaultValTransform(data_shape, data_shape)),
            batch_size, False, batchify_fn=val_batchify_fn, last_batch='keep', num_workers=num_workers)
        return val_loader
Ejemplo n.º 22
0
def get_dataloader(val_dataset, data_shape, batch_size, num_workers, ctx):
    """Get dataloader."""
    width, height = data_shape, data_shape
    val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
    #val_batchify_fn = Tuple(Stack(), Stack())

    val_loader = gluon.data.DataLoader( val_dataset,
        batch_size, False, batchify_fn=val_batchify_fn, last_batch='rollover', num_workers=num_workers)
    return val_loader
Ejemplo n.º 23
0
def get_loader(net, train_dataset, val_dataset, data_shape, batch_size,
               num_workers, args):
    width, height = data_shape, data_shape
    batchify_fn = Tuple(*([Stack() for _ in range(6)] +
                          [Pad(axis=0, pad_val=-1) for _ in range(1)]))
    if args.no_random_shape:
        train_transform = YOLO3DefaultTrainTransform(width,
                                                     height,
                                                     net,
                                                     mixup=args.mixup)
        # return (img, objectness[0], center_targets[0], scale_targets[0], weights[0],
        #                 class_targets[0], gt_bboxes[0])
        train_loader = gluon.data.DataLoader(
            train_dataset.transform(train_transform),
            batch_size,
            shuffle=True,
            last_batch='rollover',
            batchify_fn=batchify_fn,
            num_workers=num_workers)
    else:
        transform_fns = [
            YOLO3DefaultTrainTransform(x * 32, x * 32, net, mixup=args.mixup)
            for x in range(10, 20)
        ]
        train_loader = RandomTransformDataLoader(transform_fns,
                                                 dataset=train_dataset,
                                                 batch_size=batch_size,
                                                 shuffle=True,
                                                 interval=10,
                                                 last_batch='rollover',
                                                 batchify_fn=batchify_fn,
                                                 num_workers=num_workers)

    val_batchify_fn = Tuple(Stack(), Pad(
        pad_val=-1))  # stack image, and pad labels: labels的0-4:box, 4-5: cls
    val_transform = YOLO3DefaultValTransform(width, height)
    # return img, bbox.astype(img.dtype)
    val_loader = gluon.data.DataLoader(val_dataset.transform(val_transform),
                                       batch_size,
                                       False,
                                       last_batch='keep',
                                       batchify_fn=val_batchify_fn,
                                       num_workers=num_workers)
    return train_loader, val_loader
Ejemplo n.º 24
0
def get_dataloader(net, train_dataset, val_dataset, data_shape, batch_size,
                   num_workers, args):
    """Get dataloader."""
    width, height = data_shape, data_shape
    # stack image, all targets generated
    batchify_fn = Tuple(*([Stack() for _ in range(6)] +
                          [Pad(axis=0, pad_val=-1) for _ in range(1)]))

    # 多尺度训练[320, ..., 680]
    '''
        因为在net中输入时,需要img, gt_boxes, obj_t, centers_t, scales_t, weights_t, clas_t. 
        所以train_loader 得到的数据batch应该以此为单位, batch[0:6]对应着不同的数据。
        在YOLO3DefaultTrainTransform函数里,需要完成:
            1 数据增强
            2 对train_dataset[ix][1]进行解析  ix表示第几个样本 
                train_dataset[ix][0] 这个样本本身数据 x 
                train_dataset[ix][1] shape = [M, 6] M表示gt_bbox个数 
                    bbox [.., :4] ,
                    cids [.., 4:5]
            3 按照格式返回数据
    '''
    transform_fns = [
        YOLO3DefaultTrainTransform(x * 32, x * 32, net, mixup=args.mixup)
        for x in range(10, 20)
    ]
    train_loader = RandomTransformDataLoader(transform_fns,
                                             train_dataset,
                                             batch_size=batch_size,
                                             interval=10,
                                             last_batch='rollover',
                                             shuffle=True,
                                             batchify_fn=batchify_fn,
                                             num_workers=num_workers)

    val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
    val_loader = gluon.data.DataLoader(
        val_dataset.transform(YOLO3DefaultValTransform(
            width, height)),  # validation时,是以某个固定尺寸的(416, 416)
        batch_size,
        False,
        batchify_fn=val_batchify_fn,
        last_batch='keep',
        num_workers=num_workers)
    return train_loader, val_loader
Ejemplo n.º 25
0
def get_dataloader(net, train_dataset, val_dataset, data_shape, batch_size,
                   num_workers, args):
    """Get dataloader."""
    width, height = data_shape, data_shape
    batchify_fn = Tuple(
        *([Stack() for _ in range(6)] +
          [Pad(axis=0, pad_val=-1)
           for _ in range(1)]))  # stack image, all targets generated
    if args.no_random_shape:
        train_loader = gluon.data.DataLoader(train_dataset.transform(
            YOLO3DefaultTrainTransform(width, height, net, mixup=args.mixup)),
                                             batch_size,
                                             True,
                                             batchify_fn=batchify_fn,
                                             last_batch='rollover',
                                             num_workers=num_workers)
    else:
        transform_fns = [
            YOLO3DefaultTrainTransform(x * 32, x * 32, net, mixup=args.mixup)
            for x in range(10, 20)
        ]
        train_loader = RandomTransformDataLoader(transform_fns,
                                                 train_dataset,
                                                 batch_size=batch_size,
                                                 interval=10,
                                                 last_batch='rollover',
                                                 shuffle=True,
                                                 batchify_fn=batchify_fn,
                                                 num_workers=num_workers)
    val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
    val_loader = gluon.data.DataLoader(val_dataset.transform(
        YOLO3DefaultValTransform(width, height)),
                                       batch_size,
                                       False,
                                       batchify_fn=val_batchify_fn,
                                       last_batch='discard',
                                       num_workers=num_workers)
    # NOTE for val batch loader last_batch='keep' changed to last_batch='discard' so exception not thrown
    # when last batch size is smaller than the number of GPUS (which throws exception) this is fixed in gluon
    # PR 14607: https://github.com/apache/incubator-mxnet/pull/14607 - but yet to be in official release
    # discarding last batch will incur minor changes in val results as some val data wont be processed

    return train_loader, val_loader
Ejemplo n.º 26
0
def validate(net, test_dataset, ctx):
    if isinstance(ctx, mx.Context):
        ctx = [ctx]
    size = len(test_dataset)
    metric = gcv.utils.metrics.voc_detection.VOC07MApMetric(
        iou_thresh=0.5, class_names=test_dataset.classes)
    net.collect_params().reset_ctx(ctx)
    metric.reset()
    width, height = 512, 512
    batch_size = 4
    batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
    val_loader = mx.gluon.data.DataLoader(test_dataset.transform(
        SSDDefaultValTransform(width, height)),
                                          batchify_fn=batchify_fn,
                                          batch_size=batch_size,
                                          shuffle=False,
                                          last_batch='rollover',
                                          num_workers=0)
    with tqdm(total=size) as pbar:
        start = time.time()
        for ib, batch in enumerate(val_loader):
            data = mx.gluon.utils.split_and_load(batch[0],
                                                 ctx_list=ctx,
                                                 batch_axis=0,
                                                 even_split=False)
            label = mx.gluon.utils.split_and_load(batch[1],
                                                  ctx_list=ctx,
                                                  batch_axis=0,
                                                  even_split=False)
            det_bboxes = []
            det_ids = []
            det_scores = []
            gt_bboxes = []
            gt_ids = []
            gt_difficults = []
            for x, y in zip(data, label):
                ids, scores, bboxes = net(x)
                det_ids.append(ids)
                det_scores.append(scores)
                # clip to image size
                det_bboxes.append(bboxes.clip(0, batch[0].shape[2]))
                # split ground truths
                gt_ids.append(y.slice_axis(axis=-1, begin=4, end=5))
                gt_bboxes.append(y.slice_axis(axis=-1, begin=0, end=4))
                gt_difficults.append(
                    y.slice_axis(axis=-1, begin=5, end=6
                                 ) if y.shape[-1] > 5 else None)

            metric.update(det_bboxes, det_ids, det_scores, gt_bboxes, gt_ids,
                          gt_difficults)
            pbar.update(batch[0].shape[0])
        end = time.time()
        speed = size / (end - start)
        print('Throughput is %f img/sec.' % speed)
    return metric.get()
Ejemplo n.º 27
0
def get_dali_dataloader(net, train_dataset, val_dataset, data_shape,
                        global_batch_size, num_workers, devices, ctx, horovod):
    width, height = data_shape, data_shape
    with autograd.train_mode():
        _, _, anchors = net(mx.nd.zeros((1, 3, height, width), ctx=ctx))
    anchors = anchors.as_in_context(mx.cpu())

    if horovod:
        batch_size = global_batch_size // hvd.size()
        pipelines = [
            SSDDALIPipeline(device_id=hvd.local_rank(),
                            batch_size=batch_size,
                            data_shape=data_shape,
                            anchors=anchors,
                            num_workers=num_workers,
                            dataset_reader=train_dataset[0])
        ]
    else:
        num_devices = len(devices)
        batch_size = global_batch_size // num_devices
        pipelines = [
            SSDDALIPipeline(device_id=device_id,
                            batch_size=batch_size,
                            data_shape=data_shape,
                            anchors=anchors,
                            num_workers=num_workers,
                            dataset_reader=train_dataset[i])
            for i, device_id in enumerate(devices)
        ]

    epoch_size = train_dataset[0].size()
    if horovod:
        epoch_size //= hvd.size()
    train_loader = DALIGenericIterator(
        pipelines, [('data', DALIGenericIterator.DATA_TAG),
                    ('bboxes', DALIGenericIterator.LABEL_TAG),
                    ('label', DALIGenericIterator.LABEL_TAG)],
        epoch_size,
        auto_reset=True)

    # validation
    if (not horovod or hvd.rank() == 0):
        val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
        val_loader = gluon.data.DataLoader(val_dataset.transform(
            SSDDefaultValTransform(width, height)),
                                           global_batch_size,
                                           False,
                                           batchify_fn=val_batchify_fn,
                                           last_batch='keep',
                                           num_workers=num_workers)
    else:
        val_loader = None

    return train_loader, val_loader
Ejemplo n.º 28
0
 def getDataloader(self, train_dataset, val_dataset):
     width, height = self.args.data_shape, self.args.data_shape
     batchify_fn = Tuple(*([Stack() for foo in range(6)] + [Pad(axis=0, pad_val=-1) for bar in range(1)]))  # stack image, all targets generated
     if self.args.no_random_shape:
         logger.debug('no random shape')
         train_loader = gluon.data.DataLoader(
             train_dataset.transform(YOLO3DefaultTrainTransform(width, height, self.net, mixup=self.args.mixup)),
             self.args.batch_size, True, batchify_fn=batchify_fn, last_batch='rollover', num_workers=self.args.num_workers)
     else:
         logger.debug('with random shape')
         transform_fns = [YOLO3DefaultTrainTransform(x * 32, x * 32, self.net, mixup=self.args.mixup) for x in range(10, 20)]
         train_loader = RandomTransformDataLoader(
             transform_fns, train_dataset, batch_size=self.args.batch_size, interval=10, last_batch='rollover',
             shuffle=True, batchify_fn=batchify_fn, num_workers=self.args.num_workers)
     val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
     val_loader = None
     if val_dataset is not None:
         val_loader = gluon.data.DataLoader(val_dataset.transform(YOLO3DefaultValTransform(width, height)),
             self.args.batch_size, True, batchify_fn=val_batchify_fn, last_batch='keep', num_workers=self.args.num_workers)
     return train_loader, val_loader
Ejemplo n.º 29
0
def get_coco_data_loaders(net, train_data, val_data, in_size, bs, n_workers):
    train_batchify_fn = Tuple(*([Stack() for _ in range(6)] + [Pad(pad_val=-1) for _ in range(1)]))
    train_data_transformed = train_data.transform(YOLO3DefaultTrainTransform(in_size, in_size, net, mixup=False))
    train_data_loader = gluon.data.DataLoader(train_data_transformed,
                                              batch_size=bs,
                                              shuffle=True,
                                              batchify_fn=train_batchify_fn,
                                              last_batch='rollover',
                                              num_workers=n_workers)

    val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
    val_data_transformed = val_data.transform(YOLO3DefaultValTransform(in_size, in_size))
    val_data_loader = gluon.data.DataLoader(val_data_transformed,
                                            batch_size=bs,
                                            shuffle=False,
                                            batchify_fn=val_batchify_fn,
                                            last_batch='keep',
                                            num_workers=n_workers)

    return train_data_loader, val_data_loader
Ejemplo n.º 30
0
def get_dataloader(dataset, batch_size):
    width, height = FLAGS.data_shape, FLAGS.data_shape
    batchify_fn = Tuple(Stack(), Pad(pad_val=-1), Stack())
    loader = gluon.data.DataLoader(dataset.transform(
        YOLO3VideoInferenceTransform(width, height)),
                                   batch_size,
                                   False,
                                   last_batch='keep',
                                   num_workers=FLAGS.num_workers,
                                   batchify_fn=batchify_fn)
    return loader