def execute(parser, args): dataset = fod.load_dataset(args.name) session = fos.launch_app(dataset=dataset, port=args.port, remote=args.remote) _watch_session(session, remote=args.remote)
def _get_dataset(self): if self._in_db: from fiftyone.core.dataset import load_dataset return load_dataset(self.dataset_name) return None
def execute(parser, args): name = args.name num_samples = args.num_samples dataset = fod.load_dataset(name) for sample in dataset.tail(num_samples=num_samples): print(sample)
def execute(parser, args): name = args.name new_name = args.new_name dataset = fod.load_dataset(name) dataset.name = new_name print("Dataset '%s' renamed to '%s'" % (name, new_name))
def from_dict(cls, d, **kwargs): """Constructs a :class:`StateDescription` from a JSON dictionary. Args: d: a JSON dictionary Returns: :class:`StateDescription` """ close = d.get("close", False) connected = d.get("connected", False) dataset = d.get("dataset", None) if dataset is not None: dataset = fod.load_dataset(dataset.get("name")) view_ = d.get("view", None) view = None if dataset is not None: view = fov.DatasetView(dataset) if view_ is not None: view._stages = [ fos.ViewStage._from_dict(s) for s in view_["view"] ] selected = d.get("selected", []) return cls(close=close, connected=connected, dataset=dataset, selected=selected, view=view, **kwargs)
def add_field( cls, field_name, ftype, embedded_doc_type=None, subfield=None, save=True, ): """Adds a new field to the sample. Args: field_name: the field name ftype: the field type to create. Must be a subclass of :class:`fiftyone.core.fields.Field` embedded_doc_type (None): the :class:`fiftyone.core.odm.BaseEmbeddedDocument` type of the field. Used only when ``ftype`` is :class:`fiftyone.core.fields.EmbeddedDocumentField` subfield (None): the type of the contained field. Used only when ``ftype`` is a list or dict type """ # Additional arg `save` is to prevent saving the fields when reloading # a dataset from the database. # pylint: disable=no-member if field_name in cls._fields: raise ValueError("Field '%s' already exists" % field_name) field = _create_field( field_name, ftype, embedded_doc_type=embedded_doc_type, subfield=subfield, ) cls._fields[field_name] = field cls._fields_ordered += (field_name,) try: if issubclass(cls, DatasetSampleDocument): # Only set the attribute if it is a class setattr(cls, field_name, field) except TypeError: # Instance, not class, so do not `setattr` pass if save: # Update dataset meta class # @todo(Tyler) refactor to avoid local import here import fiftyone.core.dataset as fod dataset = fod.load_dataset(cls.__name__) field = cls._fields[field_name] sample_field = SampleFieldDocument.from_field(field) dataset._meta.sample_fields.append(sample_field) dataset._meta.save()
def execute(parser, args): name = args.name anno_dir = args.anno_dir label_fields = args.label_fields dataset = fod.load_dataset(name) if label_fields is not None: label_fields = [f.strip() for f in label_fields.split(",")] dataset.draw_labels(anno_dir, label_fields=label_fields) print("Annotations written to '%s'" % anno_dir)
def execute(parser, args): name = args.name export_dir = args.export_dir json_path = args.json_path label_field = args.label_field dataset_type = etau.get_class(args.type) if args.type else None dataset = fod.load_dataset(name) if export_dir: dataset.export(export_dir, label_field=label_field, dataset_type=dataset_type) print("Dataset '%s' exported to '%s'" % (name, export_dir)) elif json_path: dataset.write_json(json_path) print("Dataset '%s' exported to '%s'" % (name, json_path)) else: raise ValueError( "Either `export_dir` or `json_path` must be provided")
def execute(parser, args): name = args.name dataset = fod.load_dataset(name) # @todo support Windows and other environments without `less` # Look at pydoc.pager() for inspiration? p = subprocess.Popen( ["less", "-F", "-R", "-S", "-X", "-K"], shell=True, stdin=subprocess.PIPE, ) try: with io.TextIOWrapper(p.stdin, errors="backslashreplace") as pipe: for sample in dataset: pipe.write(str(sample) + "\n") p.wait() except (KeyboardInterrupt, OSError): pass
def delete_field(cls, field_name): """Deletes the field from the sample. If the sample is in a dataset, the field will be removed from all samples in the dataset. Args: field_name: the field name Raises: AttributeError: if the field does not exist """ try: # Delete from all samples # pylint: disable=no-member cls.objects.update(**{"unset__%s" % field_name: None}) except InvalidQueryError: raise AttributeError("Sample has no field '%s'" % field_name) # Remove from dataset # pylint: disable=no-member del cls._fields[field_name] cls._fields_ordered = tuple( fn for fn in cls._fields_ordered if fn != field_name ) delattr(cls, field_name) # Update dataset meta class # @todo(Tyler) refactor to avoid local import here import fiftyone.core.dataset as fod dataset = fod.load_dataset(cls.__name__) dataset._meta.sample_fields = [ sf for sf in dataset._meta.sample_fields if sf.name != field_name ] dataset._meta.save()
def execute(parser, args): dataset = fod.load_dataset(args.name) print(dataset)