Exemple #1
0
def get_dataset(dataset, args):
    if dataset.lower() == 'voc':
        train_dataset = gdata.VOCDetection(
            splits=[(2007, 'trainval'), (2012, 'trainval')])
        val_dataset = gdata.VOCDetection(splits=[(2007, 'test')])
        val_metric = VOC07MApMetric(iou_thresh=0.5,
                                    class_names=val_dataset.classes)
    elif dataset.lower() == 'coco':
        train_dataset = gdata.COCODetection(splits='instances_train2017',
                                            use_crowd=False)
        val_dataset = gdata.COCODetection(splits='instances_val2017',
                                          skip_empty=False)
        val_metric = COCODetectionMetric(val_dataset,
                                         args.save_prefix + '_eval',
                                         cleanup=True)
    elif dataset.lower() == 'visualgenome':
        train_dataset = VGObject(root=os.path.join('~', '.mxnet', 'datasets',
                                                   'visualgenome'),
                                 splits='detections_train',
                                 use_crowd=False)
        val_dataset = VGObject(root=os.path.join('~', '.mxnet', 'datasets',
                                                 'visualgenome'),
                               splits='detections_val',
                               skip_empty=False)
        val_metric = COCODetectionMetric(val_dataset,
                                         args.save_prefix + '_eval',
                                         cleanup=True)
    else:
        raise NotImplementedError(
            'Dataset: {} not implemented.'.format(dataset))
    if args.mixup:
        from gluoncv.data.mixup import detection
        train_dataset = detection.MixupDetection(train_dataset)
    return train_dataset, val_dataset, val_metric
Exemple #2
0
 def metrics(self):
     _, _, H, W = self.ishape
     metric = COCODetectionMetric(self.val_dataset,
                                  '_eval',
                                  cleanup=True,
                                  data_shape=(H, W))
     metric.reset()
     return metric
Exemple #3
0
    def metrics(self):
        """ Customized metrics method introduction.

            COCODetectionMetric is used which is the detection metric for COCO bbox task.
        """
        _, _, H, W = self.ishape
        metric = COCODetectionMetric(
            self.val_dataset, '_eval', cleanup=True, data_shape=(H, W))
        metric.reset()
        return metric
Exemple #4
0
def get_coco(opt, coco_path="/export/guanghan/coco"):
    """Get coco dataset."""
    val_dataset = CenterCOCODataset(opt, split = 'val')   # custom dataset
    eval_metric = COCODetectionMetric(val_dataset,
                                     save_prefix = '_eval',
                                     data_shape=(opt.input_res, opt.input_res))
    return val_dataset, eval_metric
Exemple #5
0
def get_coco_validation_metric(val_data, consts):
    val_metric = COCODetectionMetric(val_data,
                                     consts.VAL_METRIC_F_NAME,
                                     cleanup=True,
                                     data_shape=(consts.IN_SIZE,
                                                 consts.IN_SIZE))
    return val_metric
Exemple #6
0
def get_dataset(dataset, args):
    if dataset.lower() == 'voc':
        if 0:
            train_dataset = gdata.VOCDetection(root='E:/dataset/VOCdevkit',
                                               splits=[(2007, 'trainval'),
                                                       (2012, 'trainval')])
            val_dataset = gdata.VOCDetection(root='E:/dataset/VOCdevkit',
                                             splits=[(2007, 'test')])
            val_metric = VOC07MApMetric(iou_thresh=0.5,
                                        class_names=val_dataset.classes)
        else:
            voc_root = 'G:/MSDataset/'  #layout same with VOC07
            train_dataset = gdata.MSDetection(root=voc_root,
                                              splits=[(2007, 'trainval')])
            val_dataset = gdata.MSDetection(root=voc_root,
                                            splits=[(2007, 'test')])
            val_metric = VOC07MApMetric(iou_thresh=0.5,
                                        class_names=val_dataset.classes)
    elif dataset.lower() == 'coco':
        train_dataset = gdata.COCODetection(splits='instances_train2017',
                                            use_crowd=False)
        val_dataset = gdata.COCODetection(splits='instances_val2017',
                                          skip_empty=False)
        val_metric = COCODetectionMetric(val_dataset,
                                         args.save_prefix + '_eval',
                                         cleanup=True)
    else:
        raise NotImplementedError(
            'Dataset: {} not implemented.'.format(dataset))
    if args.mixup:
        from gluoncv.data.mixup import MixupDetection
        train_dataset = MixupDetection(train_dataset)
    return train_dataset, val_dataset, val_metric
Exemple #7
0
def get_dataset(dataset, args):
    if dataset.lower() == 'voc':
        train_dataset = gdata.VOCDetection(root=args.dataset_root,
                                           splits=[(2007, 'trainval'),
                                                   (2012, 'trainval')])
        val_dataset = gdata.VOCDetection(root=args.dataset_root,
                                         splits=[(2007, 'test')])
        val_metric = VOC07MApMetric(iou_thresh=0.5,
                                    class_names=val_dataset.classes)
    elif dataset.lower() == 'coco':
        train_dataset = gdata.COCODetection(splits='instances_train2017')
        val_dataset = gdata.COCODetection(splits='instances_val2017',
                                          skip_empty=False)
        val_metric = COCODetectionMetric(val_dataset,
                                         args.save_prefix + '_eval',
                                         cleanup=True,
                                         data_shape=(args.data_shape,
                                                     args.data_shape))
        # coco validation is slow, consider increase the validation interval
        if args.val_interval == 1:
            args.val_interval = 10
    elif dataset.lower() == 'tt100k':
        train_dataset = gdata.TT100KDetection(root=args.dataset_root,
                                              splits='train')
        val_dataset = None
        val_metric = None
    else:
        raise NotImplementedError(
            'Dataset: {} not implemented.'.format(dataset))
    return train_dataset, val_dataset, val_metric
Exemple #8
0
def get_dataset(dataset, args):
    if dataset.lower() == 'voc':
        pass
        # train_dataset = gdata.VOCDetection(
        #     splits=[(2007, 'trainval'), (2012, 'trainval')])
        # val_dataset = gdata.VOCDetection(
        #     splits=[(2007, 'test')])
        # val_metric = VOC07MApMetric(iou_thresh=0.5, class_names=val_dataset.classes)
    elif dataset.lower() == 'coco':
        train_dataset = gdata.COCODetection(
            root='/home/xcq/PycharmProjects/datasets/coco/',
            splits='instances_train2017',
            use_crowd=False)
        val_dataset = gdata.COCODetection(
            root='/home/xcq/PycharmProjects/datasets/coco/',
            splits='instances_val2017',
            skip_empty=False)
        val_metric = COCODetectionMetric(val_dataset,
                                         args.save_prefix + '_eval',
                                         cleanup=True)
    else:
        raise NotImplementedError(
            'Dataset: {} not implemented.'.format(dataset))
    if args.mixup:
        from gluoncv.data.mixup import detection
        train_dataset = detection.MixupDetection(train_dataset)
    return train_dataset, val_dataset, val_metric
Exemple #9
0
def get_dataset(dataset, args):
    if dataset.lower() == 'voc':
        train_dataset = VOCLike(
            root='C:/Users/DELL/Desktop/traindata/VOCtemplate',
            splits=((2018, 'train'), ))
        val_dataset = VOCLike(
            root='C:/Users/DELL/Desktop/traindata/VOCtemplate',
            splits=((2018, 'val'), ))
        val_metric = VOC07MApMetric(iou_thresh=0.5,
                                    class_names=val_dataset.classes)
    elif dataset.lower() == 'coco':
        train_dataset = gdata.COCODetection(root=args.dataset_root + "/coco",
                                            splits='instances_train2017')
        val_dataset = gdata.COCODetection(root=args.dataset_root + "/coco",
                                          splits='instances_val2017',
                                          skip_empty=False)
        val_metric = COCODetectionMetric(val_dataset,
                                         args.save_prefix + '_eval',
                                         cleanup=True,
                                         data_shape=(args.data_shape,
                                                     args.data_shape))
        # coco validation is slow, consider increase the validation interval
        if args.val_interval == 1:
            args.val_interval = 10
    else:
        raise NotImplementedError(
            'Dataset: {} not implemented.'.format(dataset))
    return train_dataset, val_dataset, val_metric
Exemple #10
0
def get_dataset(dataset, args):
    # load training and validation images
    if dataset.lower == 'voc':
        train_dataset = gdata.VOCDetection(root=args.dataset_root + "/voc",
                                           splits=[(2007, 'trainval'),
                                                   (2012, 'trainval')])
        val_dataset = gdata.VOCDetection(root=args.dataset_root + "/voc",
                                         splits=[(2007, 'test')])
        val_metric = VOC07MApMetric(iou_thresh=0.5,
                                    class_names=val_dataset.classes)
        # if class_names is provided, will print out AP for each class
    elif dataset.lower == 'coco':
        train_dataset = gdata.COCODetection(root=args.dataset_root + "/coco",
                                            splits='instances_train2017')
        val_dataset = gdata.COCODetection(root=args.dataset_root + "/coco",
                                          splits='instances_val2017',
                                          skip_empty=False)
        val_metric = COCODetectionMetric(val_dataset,
                                         args.save_prefix + '_eval',
                                         cleanup=True,
                                         data_shape=(args.data_shape,
                                                     args.data_shape))
        # will print out AP for each class
        if args.val_interval == 1:
            # args.val_interval: 进行验证集测试的循环间隔
            # 如果进行测试很慢的话, 需要将该值改大一些,以加快训练
            args.val_interval = 10
    else:
        raise NotImplementedError(
            "dataset: {} not implemented".format(dataset))
    return train_dataset, val_dataset, val_metric
def get_dataset(dataset, args):
    if dataset.lower() == 'voc':
        train_dataset = VOCLike(
            root="/content/drive/My Drive/Research/Dataset_conversion/Dataset/",
            splits=[(2007, 'train')])
        val_dataset = VOCLike(
            root="/content/drive/My Drive/Research/Dataset_conversion/Dataset/",
            splits=[(2007, 'validation')])
        print(train_dataset.classes)
        print(val_dataset.classes)
        val_metric = VOC07MApMetric(iou_thresh=0.5,
                                    class_names=val_dataset.classes)
    elif dataset.lower() == 'coco':
        train_dataset = gdata.COCODetection(splits='instances_train2017',
                                            use_crowd=False)
        val_dataset = gdata.COCODetection(splits='instances_val2017',
                                          skip_empty=False)
        val_metric = COCODetectionMetric(val_dataset,
                                         args.save_prefix + '_eval',
                                         cleanup=True)
    else:
        raise NotImplementedError(
            'Dataset: {} not implemented.'.format(dataset))
    if args.mixup:
        from gluoncv.data.mixup import detection
        train_dataset = detection.MixupDetection(train_dataset)
    return train_dataset, val_dataset, val_metric
def get_dataset(dataset, args):
    if dataset.lower() == 'voc':
        train_dataset = VOCLike(root='/opt/ml/input/data/training',
                                splits=(('VOCTrain', 'train'), ))
        val_dataset = VOCLike(root='/opt/ml/input/data/training',
                              splits=(('VOCValidate', 'val'), ))
        #train_dataset = VOCLike(root='VOC-PlayingCards', splits=(('VOC2019', 'train'),))
        #val_dataset = VOCLike(root='VOC-PlayingCards', splits=(('VOC2018', 'val'),))
        val_metric = VOC07MApMetric(iou_thresh=0.5,
                                    class_names=val_dataset.classes)
        for c in val_metric.class_names:
            print("Class: {}".format(c))
    elif dataset.lower() == 'coco':
        train_dataset = gdata.COCODetection(root=args.dataset_root + "/coco",
                                            splits='instances_train2017')
        val_dataset = gdata.COCODetection(root=args.dataset_root + "/coco",
                                          splits='instances_val2017',
                                          skip_empty=False)
        val_metric = COCODetectionMetric(val_dataset,
                                         args.save_prefix + '_eval',
                                         cleanup=True,
                                         data_shape=(args.data_shape,
                                                     args.data_shape))
        # coco validation is slow, consider increase the validation interval
        if args.val_interval == 1:
            args.val_interval = 10
    else:
        raise NotImplementedError(
            'Dataset: {} not implemented.'.format(dataset))
    return train_dataset, val_dataset, val_metric
Exemple #13
0
def get_dataset(dataset, args):
    if dataset.lower() == 'voc':
        train_dataset = gdata.VOCDetection(
            splits=[(2007, 'trainval'), (2012, 'trainval')])
        val_dataset = gdata.VOCDetection(splits=[(2007, 'test')])
        val_metric = VOC07MApMetric(iou_thresh=0.5,
                                    class_names=val_dataset.classes)
    elif dataset.lower() == 'coco':
        train_dataset = gdata.COCODetection(root=args.dataset_root + "/coco",
                                            splits='instances_train2017')
        val_dataset = gdata.COCODetection(root=args.dataset_root + "/coco",
                                          splits='instances_val2017',
                                          skip_empty=False)
        val_metric = COCODetectionMetric(val_dataset,
                                         args.save_prefix + '_eval',
                                         cleanup=True,
                                         data_shape=(args.data_shape,
                                                     args.data_shape))
        # coco validation is slow, consider increase the validation interval
        if args.val_interval == 1:
            args.val_interval = 10
    else:
        train_dataset = petVOC(splits=[(2019, 'train_val')])
        val_dataset = train_dataset
        val_metric = VOC07MApMetric(iou_thresh=0.5,
                                    class_names=val_dataset.classes)

    return train_dataset, val_dataset, val_metric
def get_dataset(dataset, args):
    if dataset.lower() == 'voc':
        #train_dataset = VOCLike(root='/opt/ml/input/data/training', splits=((2019, 'train'),))
        #val_dataset = VOCLike(root='/opt/ml/input/data/training', splits=((2018, 'val'),))
        train_dataset = VOCLike(
            root='~/code/gluoncv-yolo-playing_cards/VOCTemplate',
            splits=((2019, 'train'), ))
        val_dataset = VOCLike(
            root='~/code/gluoncv-yolo-playing_cards/VOCTemplate',
            splits=((2018, 'val'), ))
        val_metric = VOC07MApMetric(iou_thresh=0.5,
                                    class_names=val_dataset.classes)
    elif dataset.lower() == 'coco':
        train_dataset = gdata.COCODetection(splits='instances_train2017',
                                            use_crowd=False)
        val_dataset = gdata.COCODetection(splits='instances_val2017',
                                          skip_empty=False)
        val_metric = COCODetectionMetric(val_dataset,
                                         args.save_prefix + '_eval',
                                         cleanup=True,
                                         data_shape=(args.data_shape,
                                                     args.data_shape))
    else:
        raise NotImplementedError(
            'Dataset: {} not implemented.'.format(dataset))
    if args.num_samples < 0:
        args.num_samples = len(train_dataset)
    if args.mixup:
        from gluoncv.data import MixupDetection
        train_dataset = MixupDetection(train_dataset)
    return train_dataset, val_dataset, val_metric
Exemple #15
0
def get_dataset(dataset, args):
    if dataset.lower() == 'voc':
        train_dataset = gdata.VOCDetection(
            root='/home/users/chenxin.lu/VOCdevkit/VOCdevkit',
            splits=[(2007, 'trainval'), (2012, 'trainval')])
        val_dataset = gdata.VOCDetection(splits=[(2007, 'test')])
        val_metric = VOC07MApMetric(iou_thresh=0.5,
                                    class_names=val_dataset.classes)
    elif dataset.lower() == 'coco':
        train_dataset = gdata.COCODetection(
            root=args.dataset_root + "/coco/stuff_annotations_trainval2017",
            splits='stuff_train2017')
        val_dataset = gdata.COCODetection(
            root=args.dataset_root + "/coco/stuff_annotations_trainval2017",
            splits='stuff_val2017',
            skip_empty=False)
        val_metric = COCODetectionMetric(val_dataset,
                                         args.save_prefix + '_eval',
                                         cleanup=True,
                                         data_shape=(args.data_shape,
                                                     args.data_shape),
                                         post_affine=get_post_transform)
        # coco validation is slow, consider increase the validation interval
        if args.val_interval == 1:
            args.val_interval = 10
    else:
        raise NotImplementedError(
            'Dataset: {} not implemented.'.format(dataset))
    if args.num_samples < 0:
        args.num_samples = len(train_dataset)
    return train_dataset, val_dataset, val_metric
Exemple #16
0
def get_dataset(dataset, args):
    if dataset.lower() == 'voc':
        train_dataset = gdata.VOCDetection(
            splits=[(2007, 'trainval'), (2012, 'trainval')])
        val_dataset = gdata.VOCDetection(splits=[(2007, 'test')])
        val_metric = VOC07MApMetric(iou_thresh=0.5,
                                    class_names=val_dataset.classes)
    elif dataset.lower() == 'coco':
        train_dataset = gdata.PersonDetection(root=COCO_ROOT_DIR,
                                              splits=('person_train2014',
                                                      'person_train2017'),
                                              use_crowd=False)
        val_dataset = gdata.PersonDetection(root=COCO_ROOT_DIR,
                                            splits='person_val2017',
                                            skip_empty=False)
        val_metric = COCODetectionMetric(val_dataset,
                                         args.save_prefix + '_eval',
                                         cleanup=True,
                                         data_shape=(args.data_shape,
                                                     args.data_shape))
    else:
        raise NotImplementedError(
            'Dataset: {} not implemented.'.format(dataset))
    if args.num_samples < 0:
        args.num_samples = len(train_dataset)
    return train_dataset, val_dataset, val_metric
Exemple #17
0
def get_dataset(dataset, args):
    if dataset.lower() == 'voc':
        train_dataset = gdata.VOCDetection(
            splits=[(2007, 'trainval'), (2012, 'trainval')])
        val_dataset = gdata.VOCDetection(splits=[(2007, 'test')])
        val_metric = VOC07MApMetric(iou_thresh=0.5,
                                    class_names=val_dataset.classes)
    elif dataset.lower() == 'coco':
        train_dataset = gdata.COCODetection(splits='instances_train2017',
                                            use_crowd=False)
        val_dataset = gdata.COCODetection(splits='instances_val2017',
                                          skip_empty=False)
        val_metric = COCODetectionMetric(val_dataset,
                                         args.save_prefix + '_eval',
                                         cleanup=True,
                                         data_shape=(args.data_shape,
                                                     args.data_shape))
    else:
        raise NotImplementedError(
            'Dataset: {} not implemented.'.format(dataset))
    if args.num_samples < 0:
        args.num_samples = len(train_dataset)
    if args.mixup:
        from gluoncv.data import MixupDetection
        train_dataset = MixupDetection(train_dataset)
    return train_dataset, val_dataset, val_metric
Exemple #18
0
def get_dataset(dataset, args):
    if dataset.lower() == 'voc':
        train_dataset = gdata.VOCDetection(
            splits=[(2007, 'trainval'), (2012, 'trainval')])
        val_dataset = gdata.VOCDetection(
            splits=[(2007, 'test')])
        #print(val_dataset.classes)
        #('aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus', 'car', 'cat', 'chair', 'cow', 'diningtable', 'dog', 'horse', 'motorbike', 'person', 'pottedplant', 'sheep', 'sofa', 'train', 'tvmonitor')

        val_metric = VOC07MApMetric(iou_thresh=0.5, class_names=val_dataset.classes)
    elif dataset.lower() == 'coco':
        train_dataset = gdata.COCODetection(splits='instances_train2017', use_crowd=False)
        val_dataset = gdata.COCODetection(splits='instances_val2017', skip_empty=False)
        val_metric = COCODetectionMetric(val_dataset, args.save_prefix + '_eval', cleanup=True)
    elif dataset.lower() == 'pedestrian':
        lst_dataset = LstDetection('train_val.lst',root=os.path.expanduser('.'))
        print(len(lst_dataset))
        first_img = lst_dataset[0][0]

        print(first_img.shape)
        print(lst_dataset[0][1])
        
        train_dataset = LstDetection('train.lst',root=os.path.expanduser('.'))
        val_dataset = LstDetection('val.lst',root=os.path.expanduser('.'))
        classs = ('pedestrian',)
        val_metric = VOC07MApMetric(iou_thresh=0.5,class_names=classs)
        
    else:
        raise NotImplementedError('Dataset: {} not implemented.'.format(dataset))
    if args.mixup:
        from gluoncv.data.mixup import MixupDetection
        train_dataset = MixupDetection(train_dataset)
    return train_dataset, val_dataset, val_metric
Exemple #19
0
def get_dali_dataset(dataset_name, devices, args):
    if dataset_name.lower() == "coco":
        # training
        expanded_file_root = os.path.expanduser(args.dataset_root)
        coco_root = os.path.join(expanded_file_root,
                                 'coco',
                                 'train2017')
        coco_annotations = os.path.join(expanded_file_root,
                                        'coco',
                                        'annotations',
                                        'instances_train2017.json')
        if args.horovod:
            train_dataset = [gdata.COCODetectionDALI(num_shards=hvd.size(), shard_id=hvd.rank(), file_root=coco_root,
                                                     annotations_file=coco_annotations, device_id=hvd.local_rank())]
        else:
            train_dataset = [gdata.COCODetectionDALI(num_shards= len(devices), shard_id=i, file_root=coco_root,
                                                     annotations_file=coco_annotations, device_id=i) for i, _ in enumerate(devices)]

        # validation
        if (not args.horovod or hvd.rank() == 0):
            val_dataset = gdata.COCODetection(root=os.path.join(args.dataset_root + '/coco'),
                                              splits='instances_val2017',
                                              skip_empty=False)
            val_metric = COCODetectionMetric(
                val_dataset, args.save_prefix + '_eval', cleanup=True,
                data_shape=(args.data_shape, args.data_shape))
        else:
            val_dataset = None
            val_metric = None
    else:
        raise NotImplementedError('Dataset: {} not implemented with DALI.'.format(dataset_name))

    return train_dataset, val_dataset, val_metric
Exemple #20
0
def get_dataset(dataset, args):
    if dataset.lower() == 'voc':
        train_dataset = gdata.VOCDetection(
            splits=[(2007, 'trainval'), (2012, 'trainval')])
        val_dataset = gdata.VOCDetection(splits=[(2007, 'test')])
        val_metric = VOC07MApMetric(iou_thresh=0.5,
                                    class_names=val_dataset.classes)
    elif dataset.lower() == 'coco':
        train_dataset = gdata.COCODetection(root=args.dataset_root + "/coco",
                                            splits='instances_train2017')
        val_dataset = gdata.COCODetection(root=args.dataset_root + "/coco",
                                          splits='instances_val2017',
                                          skip_empty=False)
        val_metric = COCODetectionMetric(val_dataset,
                                         args.save_prefix + '_eval',
                                         cleanup=True,
                                         data_shape=(args.data_shape,
                                                     args.data_shape))
        # coco validation is slow, consider increase the validation interval
        if args.val_interval == 1:
            args.val_interval = 10
    elif dataset.lower() == 'nzrc':
        # The classes for the dataset need to be reset after net is loaded to prevent a classes mismatch errors when loading net.
        gdata.COCODetection.CLASSES = classes
        print("train_efficirntdet.py-50 get_dataset CLASSES=",
              gdata.COCODetection.CLASSES)
        train_dataset = gdata.COCODetection(root=args.dataset_root +
                                            "/NZRC/ML4DR_v2",
                                            splits='coco_export2_train')
        val_dataset = gdata.COCODetection(root=args.dataset_root +
                                          "/NZRC/ML4DR_v2",
                                          splits='coco_export2_val',
                                          skip_empty=False)
        val_metric = COCODetectionMetric(val_dataset,
                                         args.save_prefix + '_eval',
                                         cleanup=True,
                                         data_shape=(args.data_shape,
                                                     args.data_shape))
        # coco validation is slow, consider increase the validation interval
        if args.val_interval == 1:
            args.val_interval = 10
    else:
        raise NotImplementedError(
            'Dataset: {} not implemented.'.format(dataset))
    if args.num_samples < 0:
        args.num_samples = len(train_dataset)
    return train_dataset, val_dataset, val_metric
def get_dataset(args):
    train_dataset = gdata.COCODetection(
        root=args.datasetloc, splits="instances_train2017", use_crowd=False, skip_empty=True
    )
    val_dataset = gdata.COCODetection(
        root=args.datasetloc, splits="instances_val2017", skip_empty=False
    )
    val_metric = COCODetectionMetric(val_dataset, args.save_prefix + "_eval")
    return train_dataset, val_dataset, val_metric
def get_dataset(args):
    train_dataset = COCODetection(args.train, 'train', args.images, 'labels')
    val_dataset = COCODetection(args.test, 'test', args.images, 'labels')

    val_metric = COCODetectionMetric(
        val_dataset, args.save_prefix + '_eval', cleanup=True,
        data_shape=(args.data_shape, args.data_shape)
    )

    args.num_samples = len(train_dataset)
    return train_dataset, val_dataset, val_metric
def get_dataset(dataset, args):
    if dataset.lower() == 'voc':
        val_dataset = gdata.VOCDetection(
            splits=[(2007, 'test')])
        val_metric = VOC07MApMetric(iou_thresh=0.5, class_names=val_dataset.classes)
    elif dataset.lower() == 'coco':
        val_dataset = gdata.COCODetection(splits='instances_val2017', skip_empty=False)
        val_metric = COCODetectionMetric(val_dataset, args.save_prefix + '_eval',
                                         cleanup=not args.save_json)
    else:
        raise NotImplementedError('Dataset: {} not implemented.'.format(dataset))
    return val_dataset, val_metric
def get_dataset(dataset, data_shape):
    if dataset.lower() == 'voc':
        val_dataset = gdata.VOCDetection(splits=[(2007, 'test')])
        val_metric = VOC07MApMetric(iou_thresh=0.5, class_names=val_dataset.classes)
    elif dataset.lower() == 'coco':
        #val_dataset = gdata.PersonDetection(root=COCO_ROOT_DIR, splits='person_train2017', skip_empty=False)
        #val_dataset = gdata.COCODetection(root=COCO_ROOT_DIR, splits='instances_val2017', skip_empty=False)
        val_dataset = gdata.PersonDetection(root=COCO_ROOT_DIR, splits='person_val2017', skip_empty=False)
        val_metric = COCODetectionMetric(
            val_dataset, args.save_prefix + '_eval', cleanup=True,
            data_shape=(data_shape, data_shape))
    else:
        raise NotImplementedError('Dataset: {} not implemented.'.format(dataset))
    return val_dataset, val_metric
Exemple #25
0
def get_dataset(dataset, args):
    if dataset.lower() == 'coco':
        from lib.dataset.coco import COCODataset
        val_dataset = COCODataset(root=args.val_data_root,
                                  annFile=args.val_ann_file,
                                  use_crowd=False)
        # train_dataset = gdata.COCODetection(splits='instances_train2017', use_crowd=False)
        # val_dataset = gdata.COCODetection(splits='instances_val2017', skip_empty=False)
        val_metric = COCODetectionMetric(val_dataset,
                                         args.save_prefix + '_eval',
                                         cleanup=True)
    else:
        raise NotImplementedError(
            'Dataset: {} not implemented.'.format(dataset))
    return None, val_dataset, val_metric
Exemple #26
0
def get_dataset(dataset, args):
    width, height = args.data_shape, args.data_shape
    if dataset.lower() == 'coco':
        val_dataset = COCODetection(root='./data/coco',
                                    splits='instances_val2017',
                                    skip_empty=False)
        val_metric = COCODetectionMetric(val_dataset,
                                         args.save_prefix + '_eval',
                                         cleanup=not args.save_json,
                                         data_shape=(height, width),
                                         score_thresh=args.score_thresh)
    else:
        raise NotImplementedError(
            'Dataset: {} not implemented.'.format(dataset))
    return val_dataset, val_metric
def get_dataset(dataset, args):
    if dataset.lower() == 'voc':

        #        train_dataset = gdata.VOCDetection(
        #            splits=[(2007, 'trainval'), (2012, 'trainval')])
        #        val_dataset = gdata.VOCDetection(
        #            splits=[(2007, 'test')])
        #        val_metric = VOC07MApMetric(iou_thresh=0.5, class_names=val_dataset.classes)

        root = '/home/wangqiushi/pycode/datasets/DR4/'
        image_root = os.path.join(root, 'region_pics')
        #image_root = '/'
        train_label_file = os.path.join(root, 'train.txt')
        val_label_file = os.path.join(root, 'valid.txt')
        classes = ('40', '50')
        map_file = os.path.join(root, 'DR2_map.txt')

        train_dataset = DetectionDataset(image_root,
                                         train_label_file,
                                         classes,
                                         map_file=map_file)
        val_dataset = DetectionDataset(image_root,
                                       val_label_file,
                                       classes,
                                       map_file=map_file,
                                       shuffle=False)
        val_metric = VOC07MApMetric(iou_thresh=0.5,
                                    class_names=val_dataset.classes)

    elif dataset.lower() == 'coco':
        train_dataset = gdata.COCODetection(splits='instances_train2017')
        val_dataset = gdata.COCODetection(splits='instances_val2017',
                                          skip_empty=False)
        val_metric = COCODetectionMetric(val_dataset,
                                         args.save_prefix + '_eval',
                                         cleanup=True,
                                         data_shape=(args.data_shape,
                                                     args.data_shape))
        # coco validation is slow, consider increase the validation interval
        if args.val_interval == 1:
            args.val_interval = 10

    else:
        raise NotImplementedError(
            'Dataset: {} not implemented.'.format(dataset))
    return train_dataset, val_dataset, val_metric
def get_eval_metric(args, CLASSES=None, IOU_THRESH=0.5):

    if args.pre_trained_weights == 'VOC':
        eval_metric = VOC07MApMetric(iou_thresh=IOU_THRESH,
                                     class_names=CLASSES)
    elif args.pre_trained_weights == 'COCO':
        eval_metric = COCODetectionMetric(val_dataset,
                                          args.save_prefix + '_eval',
                                          cleanup=True,
                                          data_shape=(args.data_shape,
                                                      args.data_shape))
    elif args.pre_trained_weights == 'NONE':
        eval_metric = VOC07MApMetric(iou_thresh=IOU_THRESH,
                                     class_names=CLASSES)
    else:
        raise NotImplementedError(
            'Dataset: {} not implemented.'.format(dataset))

    return eval_metric
Exemple #29
0
def get_dataset(dataset, args):
    if dataset.lower() == 'voc':
        train_dataset = gdata.VOCDetection(
            splits=[(2007, 'trainval'), (2012, 'trainval')])
        val_dataset = gdata.VOCDetection(splits=[(2007, 'test')])
        val_metric = VOC07MApMetric(iou_thresh=0.5,
                                    class_names=val_dataset.classes)
    elif dataset.lower() == 'coco':
        train_dataset = gdata.COCODetection(splits='instances_train2017',
                                            use_crowd=False)
        val_dataset = gdata.COCODetection(splits='instances_val2017',
                                          skip_empty=False)
        val_metric = COCODetectionMetric(val_dataset,
                                         os.path.join(args.logdir, 'eval'),
                                         cleanup=True)
    else:
        raise NotImplementedError(
            'Dataset: {} not implemented.'.format(dataset))
    if cfg.TRAIN.MODE_MIXUP:
        from gluoncv.data.mixup import MixupDetection
        train_dataset = MixupDetection(train_dataset)
    return train_dataset, val_dataset, val_metric
def get_dataset(dataset, args):
    if dataset.lower() == 'voc':
        train_dataset = gdata.VOCDetection(root=args.data_path,
            splits=[(2007, 'trainval'), (2012, 'trainval')])
        val_dataset = gdata.VOCDetection(root=args.data_path,
            splits=[(2007, 'test')])
        val_metric = VOC07MApMetric(iou_thresh=0.5, class_names=val_dataset.classes)
    elif dataset.lower() == 'coco':
        #train_dataset = gdata.COCODetection(splits='instances_train2014', use_crowd=False)
        train_dataset = gdata.COCODetection(root=args.data_path, splits='instances_train2017')
        val_dataset = gdata.COCODetection(root=args.data_path, splits='instances_val2017', skip_empty=False)
        val_metric = COCODetectionMetric(val_dataset, args.save_prefix + '_eval', cleanup=True)
    elif dataset.lower() == 'rec':
        train_dataset = gdata.RecordFileDetection(os.path.join(args.data_path, 'pikachu_train.rec'))
        val_dataset = gdata.RecordFileDetection(os.path.join(args.data_path, 'pikachu_train.rec'))
        val_metric = VOC07MApMetric(iou_thresh=0.5, class_names=rec_classes)
    else:
        raise NotImplementedError('Dataset: {} not implemented.'.format(dataset))
    if args.mixup:
        from gluoncv.data.mixup import MixupDetection
        train_dataset = MixupDetection(train_dataset)
    return train_dataset, val_dataset, val_metric