def setup(self): metadata_path = os.path.join(self.dataset_dir, "metadata.json") if os.path.isfile(metadata_path): self._metadata = etas.load_json(metadata_path) else: self._metadata = {} samples_path = os.path.join(self.dataset_dir, "samples.json") samples = etas.load_json(samples_path).get("samples", []) self._samples = self._preprocess_list(samples) self._num_samples = len(self._samples)
def load_coco_detection_annotations(json_path): """Loads the COCO annotations from the given JSON file. See :class:`fiftyone.types.dataset_types.COCODetectionDataset` for format details. Args: json_path: the path to the annotations JSON file Returns: a tuple of - classes: a list of classes - images: a dict mapping image filenames to image dicts - annotations: a dict mapping image IDs to list of :class:`COCOObject` instances """ d = etas.load_json(json_path) # Load classes categories = d.get("categories", None) if categories: classes = coco_categories_to_classes(categories) else: classes = None # Load image metadata images = {i["id"]: i for i in d.get("images", [])} # Load annotations annotations = defaultdict(list) for a in d["annotations"]: annotations[a["image_id"]].append(COCOObject.from_annotation_dict(a)) return classes, images, dict(annotations)
def setup(self): self._sample_parser = FiftyOneImageClassificationSampleParser() data_dir = os.path.join(self.dataset_dir, "data") self._image_paths_map = { os.path.splitext(os.path.basename(p))[0]: p for p in etau.list_files(data_dir, abs_paths=True) } labels_path = os.path.join(self.dataset_dir, "labels.json") if os.path.isfile(labels_path): labels = etas.load_json(labels_path) else: labels = {} self._classes = labels.get("classes", None) self._sample_parser.classes = self._classes self._labels_map = labels.get("labels", {}) if self.skip_unlabeled: self._labels_map = { k: v for k, v in self._labels_map.items() if v is not None } uuids = sorted(self._labels_map.keys()) self._uuids = self._preprocess_list(uuids) self._num_samples = len(self._uuids)
def get_classes(dataset_dir): metadata_path = os.path.join(dataset_dir, "metadata.json") if not os.path.isfile(metadata_path): return None metadata = etas.load_json(metadata_path) return metadata.get("info", {}).get("classes", None)
def setup(self): metadata_path = os.path.join(self.dataset_dir, "metadata.json") if os.path.isfile(metadata_path): metadata = etas.load_json(metadata_path) media_type = metadata.get("media_type", fomm.IMAGE) self._metadata = metadata self._is_video_dataset = media_type == fomm.VIDEO else: self._metadata = {} self._frame_labels_dir = os.path.join(self.dataset_dir, "frames") samples_path = os.path.join(self.dataset_dir, "samples.json") samples = etas.load_json(samples_path).get("samples", []) self._samples = self._preprocess_list(samples) self._num_samples = len(self._samples)
def _parse_label(self, target, img=None): if target is None: return None if etau.is_str(target): target = etas.load_json(target) return fol.Detections( detections=[self._parse_detection(obj, img=img) for obj in target])
def load_coco_detection_annotations(json_path): """Loads the COCO annotations from the given JSON file. See :class:`fiftyone.types.dataset_types.COCODetectionDataset` for format details. Args: json_path: the path to the annotations JSON file Returns: a tuple of - info: a dict of dataset info - classes: a list of classes - supercategory_map: a dict mapping class labels to supercategories - images: a dict mapping image filenames to image dicts - annotations: a dict mapping image IDs to list of :class:`COCOObject` instances, or ``None`` for unlabeled datasets """ d = etas.load_json(json_path) # Load info info = d.get("info", {}) licenses = d.get("licenses", None) categories = d.get("categories", None) if licenses is not None: info["licenses"] = licenses if categories is not None: info["categories"] = categories # Load classes if categories is not None: classes, supercategory_map = parse_coco_categories(categories) else: classes = None supercategory_map = None # Load image metadata images = {i["id"]: i for i in d.get("images", [])} # Load annotations _annotations = d.get("annotations", None) if _annotations is not None: annotations = defaultdict(list) for a in _annotations: annotations[a["image_id"]].append( COCOObject.from_annotation_dict(a)) annotations = dict(annotations) else: annotations = None return info, classes, supercategory_map, images, annotations
def load_bdd_annotations(json_path): """Loads the BDD annotations from the given JSON file. See :class:`fiftyone.types.dataset_types.BDDDataset` for more format details. Args: json_path: the path to the annotations JSON file Returns: a dict mapping filenames to BDD annotation dicts """ annotations = etas.load_json(json_path) return {d["name"]: d for d in annotations}
def setup(self): self._sample_parser = FiftyOneImageClassificationSampleParser() data_dir = os.path.join(self.dataset_dir, "data") self._image_paths_map = { os.path.splitext(os.path.basename(p))[0]: p for p in etau.list_files(data_dir, abs_paths=True) } labels_path = os.path.join(self.dataset_dir, "labels.json") labels = etas.load_json(labels_path) self._sample_parser.classes = labels.get("classes", None) self._labels = labels.get("labels", {}) self._num_samples = len(self._labels)
def _parse_label(self, labels, img): if labels is None: return None if etau.is_str(labels): labels = etas.load_json(labels) frame_size = etai.to_frame_size(img=img) label = _parse_bdd_annotation(labels, frame_size) if label is not None and self.expand: label = label.expand( prefix=self.prefix, labels_dict=self.labels_dict, multilabel=self.multilabel, skip_non_categorical=self.skip_non_categorical, ) return label
def parse_dict(d, key, env_var=None, default=no_default): '''Parses a dictionary attribute. Args: d: a JSON dictionary key: the key to parse env_var: an optional environment variable to load the attribute from rather than using the JSON dictionary default: a default dict to return if key is not present Returns: a dictionary Raises: EnvConfigError: if the environment variable, the dictionary key, or a default value was not provided ''' env_t = lambda v: etas.load_json(v) return _parse_env_var_or_key( d, key, dict, env_var, env_t, False, default)
def from_json(cls, path_or_str, name=None, rel_dir=None): """Loads a :class:`Dataset` from JSON generated by :func:`fiftyone.core.collections.SampleCollection.write_json` or :func:`fiftyone.core.collections.SampleCollection.to_json`. The JSON file can contain an export of any :class:`fiftyone.core.collections.SampleCollection`, e.g., :class:`Dataset` or :class:`fiftyone.core.view.DatasetView`. Args: path_or_str: the path to a JSON file on disk or a JSON string name (None): a name for the new dataset. By default, ``d["name"]`` is used rel_dir (None): a relative directory to prepend to the ``filepath`` of each sample, if the filepath is not absolute (begins with a path separator). The path is converted to an absolute path (if necessary) via ``os.path.abspath(os.path.expanduser(rel_dir))`` Returns: a :class:`Dataset` """ d = etas.load_json(path_or_str) return cls.from_dict(d, name=name, rel_dir=rel_dir)
def _import_frame_labels(self, sample, labels_path): frames_map = etas.load_json(labels_path).get("frames", {}) for key, value in frames_map.items(): sample.frames[int(key)] = fof.Frame.from_dict(value)
def _parse_label(self, labels, img): if etau.is_str(labels): labels = etas.load_json(labels) frame_size = etai.to_frame_size(img=img) return _parse_bdd_annotation(labels, frame_size)
def setup(self): samples_path = os.path.join(self.dataset_dir, "samples.json") self._samples = etas.load_json(samples_path).get("samples", [])