Пример #1
0
def parse_field_str(field_str):
    """Parses the string representation of a :class:`Field` generated by
    ``str(field)`` into components that can be passed to
    :meth:`fiftyone.core.dataset.Dataset.add_sample_field`.

    Returns:
        a tuple of

        -   ftype: the :class:`fiftyone.core.fields.Field` class
        -   embedded_doc_type: the
                :class:`fiftyone.core.odm.BaseEmbeddedDocument` type of the
                field, or ``None``
        -   subfield: the :class:`fiftyone.core.fields.Field` class of the
                subfield, or ``None``
    """
    chunks = field_str.strip().split("(", 1)
    ftype = etau.get_class(chunks[0])
    embedded_doc_type = None
    subfield = None
    if len(chunks) > 1:
        param = etau.get_class(chunks[1][:-1])  # remove trailing ")"
        if issubclass(ftype, EmbeddedDocumentField):
            embedded_doc_type = param
        elif issubclass(ftype, (ListField, DictField)):
            subfield = param
        else:
            raise ValueError("Failed to parse field string '%s'" % field_str)

    return ftype, embedded_doc_type, subfield
Пример #2
0
    def parse(class_name, module_name=None):
        '''Parses a Configurable subclass name string.

        Assumes both the Configurable class and the Config class are defined
        in the same module. The module containing the classes will be loaded
        if necessary.

        Args:
            class_name: a string containing the name of the Configurable class,
                e.g. "ClassName", or a fully-qualified class name, e.g.
                "eta.core.config.ClassName"
            module_name: a string containing the fully-qualified module name,
                e.g. "eta.core.config", or None if class_name includes the
                module name. Set module_name = __name__ to load a class from
                the calling module

        Returns:
            cls: the Configurable class
            config_cls: the Config class associated with cls
        '''
        if module_name is None:
            module_name, class_name = class_name.rsplit(".", 1)

        cls = etau.get_class(class_name, module_name=module_name)
        config_cls = etau.get_class(
            class_name + "Config", module_name=module_name)
        return cls, config_cls
Пример #3
0
def _load_dataset(name):
    try:
        # pylint: disable=no-member
        _meta = foo.DatasetDocument.objects.get(name=name)
    except DoesNotExist:
        raise DoesNotExistError("Dataset '%s' not found" % name)

    _sample_doc_cls = type(name, (foo.DatasetSampleDocument,), {})

    num_default_fields = len(foos.default_sample_fields(include_private=True))

    for sample_field in _meta.sample_fields[num_default_fields:]:
        subfield = (
            etau.get_class(sample_field.subfield)
            if sample_field.subfield
            else None
        )
        embedded_doc_type = (
            etau.get_class(sample_field.embedded_doc_type)
            if sample_field.embedded_doc_type
            else None
        )

        _sample_doc_cls.add_field(
            sample_field.name,
            etau.get_class(sample_field.ftype),
            subfield=subfield,
            embedded_doc_type=embedded_doc_type,
            save=False,
        )

    return _meta, _sample_doc_cls
Пример #4
0
    def execute(parser, args):
        input_dir = args.input_dir
        input_type = etau.get_class(args.input_type)

        output_dir = args.output_dir
        output_type = etau.get_class(args.output_type)

        foud.convert_dataset(
            input_dir=input_dir,
            input_type=input_type,
            output_dir=output_dir,
            output_type=output_type,
        )
Пример #5
0
    def execute(parser, args):
        if args.zoo_dataset:
            # View a zoo dataset
            name = args.zoo_dataset
            splits = args.splits
            dataset_dir = args.dataset_dir
            dataset = foz.load_zoo_dataset(name,
                                           splits=splits,
                                           dataset_dir=dataset_dir)
        elif args.dataset_dir:
            # View a dataset from a directory
            name = args.name
            dataset_dir = args.dataset_dir
            dataset_type = etau.get_class(args.type)
            dataset = fod.Dataset.from_dir(dataset_dir,
                                           dataset_type,
                                           name=name)
        elif args.json_path:
            # View a dataset from a JSON file
            name = args.name
            json_path = args.json_path
            dataset = fod.Dataset.from_json(json_path, name=name)
        else:
            raise ValueError(
                "Either `zoo_dataset`, `dataset_dir`, or `json_path` must be "
                "provided")

        session = fos.launch_app(dataset=dataset,
                                 port=args.port,
                                 remote=args.remote)

        _watch_session(session, remote=args.remote)
Пример #6
0
    def execute(parser, args):
        if args.zoo_dataset:
            # View a zoo dataset
            name = args.zoo_dataset
            splits = args.splits
            dataset_dir = args.dataset_dir
            kwargs = _parse_dataset_import_kwargs(args)

            dataset = foz.load_zoo_dataset(name,
                                           splits=splits,
                                           dataset_dir=dataset_dir,
                                           **kwargs)
        elif args.dataset_dir:
            # View a dataset from a directory
            name = args.name
            dataset_dir = args.dataset_dir
            dataset_type = etau.get_class(args.type)
            kwargs = _parse_dataset_import_kwargs(args)

            dataset = fod.Dataset.from_dir(dataset_dir,
                                           dataset_type,
                                           name=name,
                                           **kwargs)
        elif args.images_dir:
            # View a directory of images
            name = args.name
            images_dir = args.images_dir
            dataset = fod.Dataset.from_images_dir(images_dir, name=name)
        elif args.images_patt:
            # View a glob pattern of images
            name = args.name
            images_patt = args.images_patt
            dataset = fod.Dataset.from_images_patt(images_patt, name=name)
        elif args.videos_dir:
            # View a directory of images
            name = args.name
            videos_dir = args.videos_dir
            dataset = fod.Dataset.from_videos_dir(videos_dir, name=name)
        elif args.videos_patt:
            # View a glob pattern of videos
            name = args.name
            videos_patt = args.videos_patt
            dataset = fod.Dataset.from_videos_patt(videos_patt, name=name)
        elif args.json_path:
            # View a dataset from a JSON file
            name = args.name
            json_path = args.json_path
            dataset = fod.Dataset.from_json(json_path, name=name)
        else:
            raise ValueError(
                "Either `zoo_dataset`, `dataset_dir`, or `json_path` must be "
                "provided")

        session = fos.launch_app(dataset=dataset,
                                 port=args.port,
                                 remote=args.remote)

        _watch_session(session, remote=args.remote)
Пример #7
0
    def from_dict(cls, d):
        """Loads a :class:`ZooDatasetInfo` from a JSON dictionary.

        Args:
            d: a JSON dictionary

        Returns:
            a :class:`ZooDatasetInfo`
        """
        try:
            # @legacy field name
            zoo_dataset = d["zoo_dataset_cls"]
        except KeyError:
            zoo_dataset = d["zoo_dataset"]

        try:
            # @legacy field name
            dataset_type = d["format_cls"]
        except KeyError:
            dataset_type = d["dataset_type"]

        # @legacy dataset types
        _dt = "fiftyone.types.dataset_types"
        if dataset_type.endswith(".ImageClassificationDataset"):
            dataset_type = _dt + ".FiftyOneImageClassificationDataset"
        if dataset_type.endswith(".ImageDetectionDataset"):
            dataset_type = _dt + ".FiftyOneImageDetectionDataset"

        zoo_dataset = etau.get_class(zoo_dataset)()
        dataset_type = etau.get_class(dataset_type)()

        downloaded_splits = d.get("downloaded_splits", None)
        if downloaded_splits is not None:
            downloaded_splits = {
                k: ZooDatasetSplitInfo.from_dict(v)
                for k, v in downloaded_splits.items()
            }

        return cls(
            zoo_dataset,
            dataset_type,
            d["num_samples"],
            downloaded_splits=downloaded_splits,
            classes=d.get("classes", None),
        )
Пример #8
0
    def _from_dict(cls, d):
        """Creates a :class:`ViewStage` instance from a serialized JSON dict
        representation of it.

        Args:
            d: a JSON dict

        Returns:
            a :class:`ViewStage`
        """
        view_stage_cls = etau.get_class(d["_cls"])
        return view_stage_cls(**{k: v for (k, v) in d["kwargs"]})
Пример #9
0
    def from_dict(cls, d):
        '''Constructs a Container from a JSON dictionary.

        If the dictionary has the `"_CLS"` and `cls._ELE_CLS_FIELD`
        keys, they are used to infer the Container class and underlying element
        classes, respectively, and this method can be invoked on any
        `Container` subclass that has the same `_ELE_CLS_FIELD` setting.

        Otherwise, this method must be called on the same concrete `Container`
        subclass from which the JSON was generated.
        '''
        if cls._ELE_CLS_FIELD is None:
            raise ContainerError(
                "%s is an abstract container and cannot be used to load a "
                "JSON dictionary. Please use a Container subclass that "
                "defines its `_ELE_CLS_FIELD` member" % cls)

        if "_CLS" in d:
            if cls._ELE_CLS_FIELD not in d:
                raise ContainerError(
                    "Cannot use %s to reflectively load this container "
                    "because the expected field '%s' was not found in the "
                    "JSON dictionary" % (cls, cls._ELE_CLS_FIELD))

            # Parse reflectively
            cls = etau.get_class(d["_CLS"])
            ele_cls = etau.get_class(d[cls._ELE_CLS_FIELD])
        else:
            # Validates the cls settings
            cls()
            # Parse using provided class
            ele_cls = cls._ELE_CLS

        return cls(**{
            cls._ELE_ATTR: [ele_cls.from_dict(dd) for dd in d[cls._ELE_ATTR]]
        })
Пример #10
0
def parse_type(type_str):
    '''Parses the type string and returns the associated Type.

    Raises:
        TypeError: is the type string was not a recognized type
    '''
    try:
        type_cls = etau.get_class(type_str)
    except ImportError:
        raise TypeError("Unknown type '%s'" % type_str)

    if not issubclass(type_cls, Type):
        raise TypeError("Type '%s' must be a subclass of Type" % type_cls)

    return type_cls
Пример #11
0
    def execute(parser, args):
        name = args.name
        dataset_dir = args.dataset_dir
        json_path = args.json_path
        dataset_type = etau.get_class(args.type) if args.type else None

        if dataset_dir:
            dataset = fod.Dataset.from_dir(dataset_dir,
                                           dataset_type,
                                           name=name)
        elif json_path:
            dataset = fod.Dataset.from_json(json_path, name=name)
        else:
            raise ValueError(
                "Either `dataset_dir` or `json_path` must be provided")

        dataset.persistent = True

        print("Dataset '%s' created" % dataset.name)
Пример #12
0
    def execute(parser, args):
        name = args.name
        export_dir = args.export_dir
        json_path = args.json_path
        label_field = args.label_field
        dataset_type = etau.get_class(args.type) if args.type else None

        dataset = fod.load_dataset(name)

        if export_dir:
            dataset.export(export_dir,
                           label_field=label_field,
                           dataset_type=dataset_type)
            print("Dataset '%s' exported to '%s'" % (name, export_dir))
        elif json_path:
            dataset.write_json(json_path)
            print("Dataset '%s' exported to '%s'" % (name, json_path))
        else:
            raise ValueError(
                "Either `export_dir` or `json_path` must be provided")