Пример #1
0
    def _update(self):
        """Use coco to get real scores. """
        if not self._current_id == len(self._img_ids):
            warnings.warn(
                'Recorded {} out of {} validation images, incomplete results'.
                format(self._current_id, len(self._img_ids)))
        if not self._results:
            # in case of empty results, push a dummy result
            self._results.append({
                'image_id': self._img_ids[0],
                'category_id': 0,
                'bbox': [0, 0, 0, 0],
                'score': 0
            })
        import json
        try:
            with open(self._filename, 'w') as f:
                json.dump(self._results, f)
        except IOError as e:
            raise RuntimeError(
                "Unable to dump json file, ignored. What(): {}".format(str(e)))

        pred = self.dataset.coco.loadRes(self._filename)
        gt = self.dataset.coco
        # lazy import pycocotools
        try_import_pycocotools()
        from pycocotools.cocoeval import COCOeval
        coco_eval = COCOeval(gt, pred, 'bbox')
        coco_eval.evaluate()
        coco_eval.accumulate()
        self._coco_eval = coco_eval
        return coco_eval
Пример #2
0
    def __init__(self, dataset, save_prefix, use_time=True, cleanup=False, score_thresh=1e-3):
        super(COCOInstanceMetric, self).__init__('COCOInstance')
        self.dataset = dataset
        self._img_ids = sorted(dataset.coco.getImgIds())
        self._current_id = 0
        self._cleanup = cleanup
        self._results = []
        self._score_thresh = score_thresh

        try_import_pycocotools()
        import pycocotools.mask as cocomask
        self._cocomask = cocomask

        if use_time:
            import datetime
            t = datetime.datetime.now().strftime('_%Y_%m_%d_%H_%M_%S')
        else:
            t = ''
        self._filename = osp.abspath(osp.expanduser(save_prefix) + t + '.json')
        try:
            f = open(self._filename, 'w')
        except IOError as e:
            raise RuntimeError("Unable to open json file to dump. What(): {}".format(str(e)))
        else:
            f.close()
Пример #3
0
    def _update(self):
        """Use coco to get real scores. """
        # if not self._current_id == len(self._img_ids):
        #     warnings.warn(
        #         'Recorded {} out of {} validation images, incompelete results'.format(
        #             self._current_id, len(self._img_ids)))
        import json
        try:
            with open(self._filename, 'w') as f:
                json.dump(self._results, f)
        except IOError as e:
            raise RuntimeError("Unable to dump json file, ignored. What(): {}".format(str(e)))

        pred = self.dataset.coco.loadRes(self._filename)
        gt = self.dataset.coco
        # lazy import pycocotools
        try_import_pycocotools()
        from pycocotools.cocoeval import COCOeval
        coco_eval = COCOeval(gt, pred, 'keypoints')
        coco_eval.params.useSegm = None
        coco_eval.evaluate()
        coco_eval.accumulate()
        coco_eval.summarize()
        self._coco_eval = coco_eval
        return coco_eval
Пример #4
0
 def __init__(self,
              root=os.path.expanduser('~/.torch/datasets/coco'),
              split='train',
              mode=None,
              transform=None,
              **kwargs):
     super(COCOSegmentation, self).__init__(root, split, mode, transform,
                                            **kwargs)
     # lazy import pycocotools
     try_import_pycocotools()
     from pycocotools.coco import COCO
     from pycocotools import mask
     if split == 'train':
         print('train set')
         ann_file = os.path.join(root,
                                 'annotations/instances_train2017.json')
         ids_file = os.path.join(root, 'annotations/train_ids.mx')
         self.root = os.path.join(root, 'train2017')
     else:
         print('val set')
         ann_file = os.path.join(root, 'annotations/instances_val2017.json')
         ids_file = os.path.join(root, 'annotations/val_ids.mx')
         self.root = os.path.join(root, 'val2017')
     self.coco = COCO(ann_file)
     self.coco_mask = mask
     if os.path.exists(ids_file):
         with open(ids_file, 'rb') as f:
             self.ids = pickle.load(f)
     else:
         ids = list(self.coco.imgs.keys())
         self.ids = self._preprocess(ids, ids_file)
     self.transform = transform
Пример #5
0
    def _load_jsons(self):
        """Load all image paths and labels from JSON annotation files into buffer."""
        items = []
        labels = []
        segms = []
        # lazy import pycocotools
        try_import_pycocotools()
        from pycocotools.coco import COCO
        for split in self._splits:
            anno = os.path.join(self._root, 'annotations', split) + '.json'
            _coco = COCO(anno)
            self._coco.append(_coco)
            classes = [c['name'] for c in _coco.loadCats(_coco.getCatIds())]
            if not classes == self.classes:
                raise ValueError("Incompatible category names with COCO: ")
            assert classes == self.classes
            json_id_to_contiguous = {
                v: k
                for k, v in enumerate(_coco.getCatIds())
            }
            if self.json_id_to_contiguous is None:
                self.json_id_to_contiguous = json_id_to_contiguous
                self.contiguous_id_to_json = {
                    v: k
                    for k, v in self.json_id_to_contiguous.items()
                }
            else:
                assert self.json_id_to_contiguous == json_id_to_contiguous

            # iterate through the annotations
            image_ids = sorted(_coco.getImgIds())
            for entry in _coco.loadImgs(image_ids):
                dirname, filename = entry['coco_url'].split('/')[-2:]
                abs_path = os.path.join(self._root, dirname, filename)
                if not os.path.exists(abs_path):
                    raise IOError('Image: {} not exists.'.format(abs_path))
                label, segm = self._check_load_bbox(_coco, entry)
                # skip images without objects
                if self._skip_empty and label is None:
                    continue
                items.append(abs_path)
                labels.append(label)
                segms.append(segm)
        return items, labels, segms
Пример #6
0
 def _update(self, annType='bbox'):
     """Use coco to get real scores. """
     pred = self.dataset.coco.loadRes(self._filename)
     gt = self.dataset.coco
     # lazy import pycocotools
     try_import_pycocotools()
     from pycocotools.cocoeval import COCOeval
     coco_eval = COCOeval(gt, pred, annType)
     coco_eval.evaluate()
     coco_eval.accumulate()
     names, values = [], []
     names.append('~~~~ Summary {} metrics ~~~~\n'.format(annType))
     # catch coco print string, don't want directly print here
     _stdout = sys.stdout
     sys.stdout = io.StringIO()
     coco_eval.summarize()
     coco_summary = sys.stdout.getvalue()
     sys.stdout = _stdout
     values.append(str(coco_summary).strip())
     names.append('~~~~ Mean AP for {} ~~~~\n'.format(annType))
     values.append('{:.1f}'.format(100 * self._get_ap(coco_eval)))
     return names, values
Пример #7
0
def to_mask(polys, size):
    """Convert list of polygons to full size binary mask

    Parameters
    ----------
    polys : list of numpy.ndarray
        Numpy.ndarray with shape (N, 2) where N is the number of bounding boxes.
        The second axis represents points of the polygons.
        Specifically, these are :math:`(x, y)`.
    size : tuple
        Tuple of length 2: (width, height).

    Returns
    -------
    numpy.ndarray
        Full size binary mask of shape (height, width)
    """
    try_import_pycocotools()
    import pycocotools.mask as cocomask
    width, height = size
    polys = [p.flatten().tolist() for p in polys]
    rles = cocomask.frPyObjects(polys, height, width)
    rle = cocomask.merge(rles)
    return cocomask.decode(rle)
Пример #8
0
        # test2017.zip, for those who want to attend the competition.
        # ('http://images.cocodataset.org/zips/test2017.zip',
        #  '4e443f8a2eca6b1dac8a6c57641b67dd40621a49'),
    ]
    makedirs(path)
    for url, checksum in _DOWNLOAD_URLS:
        filename = download(url, path=path, overwrite=overwrite, sha1_hash=checksum)
        # extract
        with zipfile.ZipFile(filename) as zf:
            zf.extractall(path=path)


if __name__ == '__main__':
    args = parse_args()
    path = os.path.expanduser(args.download_dir)
    if not os.path.isdir(path) or not os.path.isdir(os.path.join(path, 'train2017')) \
            or not os.path.isdir(os.path.join(path, 'val2017')) \
            or not os.path.isdir(os.path.join(path, 'annotations')):
        if args.no_download:
            raise ValueError(('{} is not a valid directory, make sure it is present.'
                              ' Or you should not disable "--no-download" to grab it'.format(path)))
        else:
            download_coco(path, overwrite=args.overwrite)

    # make symlink
    makedirs(os.path.expanduser('~/.torch/datasets'))
    if os.path.isdir(_TARGET_DIR):
        os.remove(_TARGET_DIR)
    os.symlink(path, _TARGET_DIR)
    try_import_pycocotools()