Example #1
0
    def test_irrelevant_bases(self):
        class NotADataclass:
            # Like torch.nn.Module, this class contains annotations
            # but is not designed to be dataclass'd.
            # This test ensures that such classes, when inherited fron,
            # are not accidentally affected by expand_args_fields.
            a: int = 9
            b: int

        class LeftConfigured(Configurable, NotADataclass):
            left: int = 1

        class RightConfigured(NotADataclass, Configurable):
            right: int = 2

        class Outer(Configurable):
            left: LeftConfigured
            right: RightConfigured

            def __post_init__(self):
                run_auto_creation(self)

        outer = Outer(**get_default_args(Outer))
        self.assertEqual(outer.left.left, 1)
        self.assertEqual(outer.right.right, 2)
        with self.assertRaisesRegex(TypeError, "non-default argument"):
            dataclass(NotADataclass)
Example #2
0
def datafile(
    pattern: Union[str, Callable, None] = None,
    attrs: Optional[Dict[str, Converter]] = None,
    manual: bool = Meta.datafile_manual,
    defaults: bool = Meta.datafile_defaults,
    infer: bool = Meta.datafile_infer,
    **kwargs,
):
    """Synchronize a data class to the specified path."""

    if pattern is None:
        return dataclasses.dataclass(**kwargs)

    if callable(pattern):
        return dataclasses.dataclass(pattern)  # type: ignore

    def decorator(cls=None):
        if dataclasses.is_dataclass(cls):
            dataclass = cls
        else:
            dataclass = dataclasses.dataclass(cls)

        return create_model(
            dataclass,
            attrs=attrs,
            pattern=pattern,
            manual=manual,
            defaults=defaults,
            infer=infer,
        )

    return decorator
Example #3
0
    def __init_subclass__(cls, byteorder: str = ">"):
        """
        Subclass initiator. This makes the inheriting class a dataclass.
        :param str byteorder: byteorder for binary data
        """
        dataclasses.dataclass(cls)
        type_hints = get_type_hints(cls)

        cls._datafields = []
        cls._datafieldsmap = {}
        cls._formatstring = byteorder

        for field_ in dataclasses.fields(cls):
            _base, _type = datatypemapping[type_hints[field_.name]]
            if "constant" in field_.metadata:
                _base = ConstField
            elif "enum" in field_.metadata:
                _base = EnumField
            setattr(cls, field_.name, _base(name=field_.name))
            if type_hints[field_.name] is types.pad:
                _type = field_.default * _type
            if type_hints[field_.name] in (types.string, types.pascalstring,
                                           str):
                _type = str(field_.metadata["length"]) + _type
            cls._formatstring += _type
Example #4
0
 def _customized_dataclass_transform(cls):
     """Handles final optional dataclass attributes: `parent` and `name`."""
     # Use cls.__dict__ to get annotations of cls itself (no parent class).
     annotations = dict(cls.__dict__.get('__annotations__', {}))
     if 'parent' in annotations or 'name' in annotations:
         raise ValueError(
             f'properties `parent` and `name` are reserved: {annotations}')
     # Add `parent` and `name` default fields at end.
     # We temporarily modify base class __dataclass_fields__ to force desired
     # argument behavior and ordering from dataclass class-transform.
     parent_dataclass_fields = dict(getattr(cls, '__dataclass_fields__',
                                            {}))
     # Remove 'parent' and 'name' from parents because we always want parent and
     # name to show up last in the dataclass args.
     if 'parent' in parent_dataclass_fields:
         cls.__dataclass_fields__.pop('parent')  # pytype: disable=attribute-error
     if 'name' in parent_dataclass_fields:
         cls.__dataclass_fields__.pop('name')  # pytype: disable=attribute-error
     annotations['parent'] = Union[Type["Module"], Type["Scope"],
                                   Type["_Sentinel"], None]
     cls.parent = dataclasses.field(repr=False, default=_unspecified_parent)
     annotations['name'] = str
     cls.name = None  # default value of name is None.
     cls.__annotations__ = annotations
     # Now apply dataclass transform (which operates in-place).
     dataclasses.dataclass(cls, unsafe_hash=True, repr=False)  # pytype: disable=wrong-keyword-args
     cls.__hash__ = _wrap_hash(cls.__hash__)
     # Restore original base class __dataclass_fields__.
     if dataclasses.is_dataclass(cls.__bases__[0]):
         cls.__bases__[0].__dataclass_fields__ = parent_dataclass_fields
Example #5
0
    def wrap(cls: Type):
        # If no `dataclass` found in the class, dataclassify it automatically.
        if not is_dataclass(cls):
            dataclass(cls)

        g: Dict[str, Any] = {}

        # Create a scope storage used by serde.
        # Each class should get own scope. Child classes can not share scope with parent class.
        # That's why we need the "scope.cls is not cls" check.
        scope: SerdeScope = getattr(cls, SERDE_SCOPE, None)
        if scope is None or scope.cls is not cls:
            scope = SerdeScope(
                cls, reuse_instances_default=reuse_instances_default, convert_sets_default=convert_sets_default
            )
            setattr(cls, SERDE_SCOPE, scope)

        # Set some globals for all generated functions
        g['cls'] = cls
        g['copy'] = copy
        g['serde_scope'] = scope
        g['SerdeError'] = SerdeError
        g['raise_unsupported_type'] = raise_unsupported_type
        g['enum_value'] = enum_value
        g['is_dataclass'] = is_dataclass
        g['typename'] = typename  # used in union functions
        g['is_instance'] = is_instance  # used in union functions
        g['to_obj'] = to_obj
        if serialize:
            g['serde_custom_class_serializer'] = functools.partial(serde_custom_class_serializer, custom=serializer)

        # Collect types used in the generated code.
        for typ in iter_types(cls):
            if typ is cls or (is_primitive(typ) and not is_enum(typ)):
                continue
            g[typename(typ)] = typ

        # render all union functions
        for union in iter_unions(cls):
            union_args = type_args(union)
            union_key = union_func_name(UNION_SE_PREFIX, union_args)
            add_func(scope, union_key, render_union_func(cls, union_args), g)
            scope.union_se_args[union_key] = union_args

        for f in sefields(cls):
            if f.skip_if:
                g[f.skip_if.name] = f.skip_if
            if f.serializer:
                g[f.serializer.name] = f.serializer

        add_func(scope, TO_ITER, render_to_tuple(cls, serializer), g)
        add_func(scope, TO_DICT, render_to_dict(cls, rename_all, serializer), g)

        logger.debug(f'{cls.__name__}: {SERDE_SCOPE} {scope}')

        return cls
def test_fuzz_dataclass(_cls, init, repr, eq, order, unsafe_hash, frozen):
    dataclasses.dataclass(
        _cls=_cls,
        init=init,
        repr=repr,
        eq=eq,
        order=order,
        unsafe_hash=unsafe_hash,
        frozen=frozen,
    )
Example #7
0
 def __init_subclass__(cls,
                       init: bool = True,
                       repr: bool = True,
                       eq: bool = True,
                       order: bool = False,
                       unsafe_hash: bool = False) -> None:
     super().__init_subclass__()
     if not hasattr(cls, '__annotations__'):
         cls.__annotations__ = {}
     dataclass(frozen=True, init=init, repr=repr, eq=eq, order=order)(cls)
Example #8
0
    def dataclass_with_defaults(cls):
        """Like `dataclass`, but add some DAPPER-specific things.

        This adds `__init__`, `__repr__`, `__eq__`, ...,
        but also includes inherited defaults,
        ref https://stackoverflow.com/a/58130805,
        and enhances the `assimilate` method.
        """

        def set_field(name, type_, val):
            """Set the inherited (i.e. default, i.e. has value) field."""
            # Ensure annotations
            cls.__annotations__ = getattr(cls, '__annotations__', {})
            # Set annotation
            cls.__annotations__[name] = type_
            # Set value
            setattr(cls, name, val)

        # APPend default fields without overwriting.
        # NB: Don't implement (by PREpending?) non-default args -- to messy!
        for default_params in default_dataclasses:
            # NB: Calling dataclass twice always makes repr=True
            for field in dataclasses.fields(dataclass(default_params)):
                if field.name not in cls.__annotations__:
                    set_field(field.name, field.type, field)

        # Create new class (NB: old/new classes have same id)
        cls = dataclass(cls)

        # The new assimilate method
        def assimilate(self, HMM, xx, yy, desc=None, **stat_kwargs):
            # Progressbar name
            pb_name_hook = self.da_method if desc is None else desc # noqa
            # Init stats
            self.stats = dapper.stats.Stats(self, HMM, xx, yy, **stat_kwargs)
            # Assimilate
            time_start = time.time()
            _assimilate(self, HMM, xx, yy)
            dapper.stats.register_stat(
                self.stats, "duration", time.time()-time_start)

        # Overwrite the assimilate method with the new one
        try:
            _assimilate = cls.assimilate
        except AttributeError as error:
            raise AttributeError(
                "Classes decorated by da_method()"
                " must define a method called 'assimilate'.") from error
        cls.assimilate = functools.wraps(_assimilate)(assimilate)

        # Make self.__class__.__name__ an attrib.
        # Used by xpList.split_attrs().
        cls.da_method = cls.__name__

        return cls
Example #9
0
    def wrap(cls: Type):
        # If no `dataclass` found in the class, dataclassify it automatically.
        if not is_dataclass(cls):
            dataclass(cls)

        g: Dict[str, Any] = {}

        # Create a scope storage used by serde.
        # Each class should get own scope. Child classes can not share scope with parent class.
        # That's why we need the "scope.cls is not cls" check.
        scope: SerdeScope = getattr(cls, SERDE_SCOPE, None)
        if scope is None or scope.cls is not cls:
            scope = SerdeScope(cls,
                               reuse_instances_default=reuse_instances_default)
            setattr(cls, SERDE_SCOPE, scope)

        # Set some globals for all generated functions
        g['cls'] = cls
        g['serde_scope'] = scope
        g['SerdeError'] = SerdeError
        g['raise_unsupported_type'] = raise_unsupported_type
        g['typename'] = typename  # used in union functions
        if deserialize:
            g['serde_custom_class_deserializer'] = functools.partial(
                serde_custom_class_deserializer, custom=deserializer)

        # Collect types used in the generated code.
        for typ in iter_types(cls):
            if typ is cls or (is_primitive(typ) and not is_enum(typ)):
                continue
            g[typename(typ)] = typ

        # render all union functions
        for union in iter_unions(cls):
            union_args = type_args(union)
            add_func(scope, union_func_name(UNION_DE_PREFIX, union_args),
                     render_union_func(cls, union_args), g)

        # Collect default values and default factories used in the generated code.
        for f in defields(cls):
            assert f.name
            if has_default(f):
                scope.defaults[f.name] = f.default
            elif has_default_factory(f):
                scope.defaults[f.name] = f.default_factory
            if f.deserializer:
                g[f.deserializer.name] = f.deserializer

        add_func(scope, FROM_ITER, render_from_iter(cls, deserializer), g)
        add_func(scope, FROM_DICT,
                 render_from_dict(cls, rename_all, deserializer), g)

        logger.debug(f'{cls.__name__}: {SERDE_SCOPE} {scope}')

        return cls
Example #10
0
    def remove_field(cls, name):
        """
        Removes the named field from FlightPoint class.

        :param name: field name
        """
        if name in cls.__annotations__:
            del cls.__init__  # Delete constructor to allow it being rebuilt with dataclass() call
            delattr(cls, name)
            del cls.__annotations__[name]
            dataclass(cls)
Example #11
0
    def __init_subclass__(cls, **kwargs):
        # initialize .name attribute. HttpRequestHook -> http_request
        if not getattr(cls, "name", None):
            cls.name = re.sub('(?!^)([A-Z]+)', r'_\1', cls.__name__.replace("Hook", "")).lower()
        if cls.name in all_hooks:
            other = all_hooks[cls.name]
            raise RuntimeError(f"Two conflicting hooks for {cls.name}: {cls} and {other}")
        all_hooks[cls.name] = cls

        # a bit hacky: add a default constructor.
        dataclasses.dataclass(cls, repr=False, eq=False)
Example #12
0
 def _complete_objecttype(self, t, definition):
     for prop in definition.properties:
         if prop.target.__type__.name in {
             "schema::ObjectType",
             "schema::ScalarType",
         }:
             target_name = prop.target.name
             type_ = self._py_types[target_name]
             t.__annotations__[prop.name] = self.type_mapping.get(target_name, type_)
             setattr(t, prop.name, type_)
     _dataclasses.dataclass(t)
Example #13
0
    def wrapper(cls):
        dc = dataclass(cls, **kwargs)
        if issubclass(dc, base):
            return dc

        class Wrapper(dc, base):
            pass

        Wrapper.__name__ = cls.__name__

        return dataclass(Wrapper, **kwargs)
Example #14
0
def init_subclass(cls: Type[Entity], bases: Tuple[Type[Entity], ...]) -> None:
    if cls not in getattr(cls, '__skip_dataclass__', ()):
        if not hasattr(cls, '__annotations__'):
            cls.__annotations__ = {'id': str}

        for base_cls in bases:
            cls.__annotations__.update({
                k: v
                for k, v in base_cls.__annotations__.items()
                if k not in cls.__annotations__
            })

        dataclasses.dataclass(cls)
Example #15
0
        def wrap(cls):
            # We don't want to look at parent class
            if "__post_init__" in cls.__dict__:
                raise TypeError(f"{cls} has __post_init__. "
                                "Please use __post_init_post_parse__ instead.")

            if USE_VANILLA_DATACLASS:
                try:
                    post_init_post_parse = cls.__dict__[
                        "__post_init_post_parse__"]
                    logger.info(f"Setting {cls.__name__}.__post_init__ to its "
                                "__post_init_post_parse__")
                    cls.__post_init__ = post_init_post_parse
                except KeyError:
                    pass

                return dataclasses.dataclass(**kwargs)(cls)
            else:
                if ARBITRARY_TYPES_ALLOWED:

                    class Config:
                        arbitrary_types_allowed = ARBITRARY_TYPES_ALLOWED

                    assert config not in kwargs
                    kwargs["config"] = Config

                return pydantic.dataclasses.dataclass(cls, **kwargs)
Example #16
0
def _proxied_class_schema(
    clazz: type,
    base_schema: Optional[Type[marshmallow.Schema]] = None
) -> Type[marshmallow.Schema]:

    try:
        # noinspection PyDataclass
        fields: Tuple[dataclasses.Field, ...] = dataclasses.fields(clazz)
    except TypeError:  # Not a dataclass
        try:
            return class_schema(dataclasses.dataclass(clazz), base_schema)
        except Exception:
            raise TypeError(
                f"{getattr(clazz, '__name__', repr(clazz))} is not a dataclass and cannot be turned into one."
            )

    # Copy all marshmallow hooks and whitelisted members of the dataclass to the schema.
    attributes = {
        k: v
        for k, v in inspect.getmembers(clazz)
        if hasattr(v, "__marshmallow_hook__") or k in MEMBERS_WHITELIST
    }
    # Update the schema members to contain marshmallow fields instead of dataclass fields
    attributes.update((
        field.name,
        field_for_schema(field.type, _get_field_default(field), field.metadata,
                         base_schema),
    ) for field in fields if field.init)

    schema_class = type(clazz.__name__, (_base_schema(clazz, base_schema), ),
                        attributes)
    return cast(Type[marshmallow.Schema], schema_class)
Example #17
0
    def wrap(type__: Type[Any]) -> Any:
        if isinstance(type__, _TypedDictMeta):
            set_typed_dict_fields(type__)

        else:
            type__ = dataclasses.dataclass(type__)
            OrjsonDefaultTypes.set_type(type__)

        if deserialize_fields is not None:
            DeserializeFields.set_type(type__, deserialize_fields)
        else:
            DeserializeFields.clean_fields(type__)

        if serialize_fields is not None:
            SerializeFields.set_type(type__, serialize_fields)
        else:
            SerializeFields.clean_fields(type__)

        internal_classes = inspect.getmembers(
            type__, lambda a: inspect.isclass(a) and not issubclass(a, type))

        for _, cls in internal_classes:
            wrap(cls)

        return type__
Example #18
0
def _make_class_swappable(cls: Type[C], **kwargs) -> Type[C]:
    """
    Creates a new class that subclasses ModularComponent.

    Modifies that class to to accept constructors for the components passed to the
    decorator.
    """
    def _class_dict(new_fields) -> Dict[str, Any]:
        """
        Sets up the class attributes, along with type annotations.
        """
        return {
            **new_fields,
            '__annotations__': {
                **{k: type(v)
                   for k, v in new_fields.items()}
            },
        }

    # Create SwappableSubcomponents dataclass with components passed to @swappable
    subcomponent_class_name = ModularComponent.SwappableSubcomponents.__name__
    subcomponent_dataclass = dataclass(
        type(subcomponent_class_name, (), _class_dict(kwargs)))

    # Create a new class that subclasses the decorated class. This new class holds
    # the SwappableSubcomponents dataclass created above (list of components that are
    # swappable) and the swappables attribute (the actual swappable component constructors).
    return type(
        # We append "_Swappable" to the new class name for transparency.
        f"{cls.__name__}_Swappable",
        # ModularComponent comes before the class so we can intercept __init__ calls.
        (ModularComponent, cls),  # type: ignore
        # Items in this dictionary are converted to class attributes by type()
        _class_dict({subcomponent_class_name: subcomponent_dataclass}),
    )
Example #19
0
    def wrapper(config_cls):
        # Add __annotations__ for dataclass
        config_cls.__annotations__ = {
            field_name: parameters[field_name].annotation
            for field_name in whitelist
        }
        # Set default values
        for field_name in whitelist:
            default = parameters[field_name].default
            if default != Parameter.empty:
                setattr(config_cls, field_name, default)

        # Add hashing to support hashing list and dict
        config_cls.__hash__ = param_hash

        # Add non-recursive asdict(). dataclasses.asdict() is recursive
        def asdict(self):
            return {
                field.name: getattr(self, field.name)
                for field in fields(self)
            }

        config_cls.asdict = asdict

        return dataclass(frozen=True)(config_cls)
Example #20
0
    def wrapper(cls):
        cls = dataclass(cls, **kwargs)
        original_init = cls.__init__

        def __init__(self, *args, **kwargs):
            for name, value in kwargs.items():
                field_type = cls.__annotations__.get(name, None)
                if isinstance(value, dict):
                    if is_dataclass(field_type):
                        # Handle simple Dataclass type
                        new_obj = field_type(**value)
                        kwargs[name] = new_obj
                    if (
                        hasattr(field_type, "__args__")
                        and len(field_type.__args__) == 2
                        and field_type.__args__[-1] == type(None)  # noqa: E721
                    ):
                        # Handle simple Dataclass type when it is in a list(e.g. When it is Optional)
                        field_type = [T for T in field_type.__args__ if is_dataclass(T)]
                        if field_type:
                            field_type = field_type[0]
                        new_obj = field_type(**value)
                        kwargs[name] = new_obj

            original_init(self, *args, **kwargs)

        cls.__init__ = __init__
        return cls
Example #21
0
def _optionals_impl(cls: type, *args, **kwargs) -> type:
    if dc.is_dataclass(cls):
        assert not args and not kwargs
    else:
        cls = dc.dataclass(*args, **kwargs)(cls)
    fields = []
    for x in dc.fields(cls):
        # https://docs.python.org/3/library/dataclasses.html#dataclasses.Field
        kwargs = {
            k: getattr(x, k)
            for k in [
                'default',
                'default_factory',
                'init',
                'repr',
                'hash',
                'compare',
                'metadata',
            ]
        }
        if isinstance(kwargs['default'], dc._MISSING_TYPE) and isinstance(
                kwargs['default_factory'], dc._MISSING_TYPE):
            kwargs['default'] = None
        type_ = (x.type if getattr(x.type, '_name', None) == 'Optional' else
                 ty.Optional[x.type])
        fields.append((x.name, type_, dc.field(**kwargs)))
    return dc.make_dataclass(
        cls.__name__, fields, **{
            k: getattr(cls.__dataclass_params__, k)
            for k in dir(cls.__dataclass_params__) if not k.startswith('_')
        })
Example #22
0
def _node_type(node_class):

    # One approach for class decorators is to create a dynamic wrapper class, e.g. class NodeWrapper(node_class)
    # However, there are a *lot* of special variables that need to be copied across for everything to work
    # Instead, this approach mutates the class being decorated by adding members directly
    # So we guarantee all the special attributes of the original class are left in place
    # The dataclasses module itself works the same way

    class NodeBuilder(Node):

        explicit_deps: dc.InitVar[tp.List[NodeId]] = None

        bundle: dc.InitVar[NodeNamespace] = None

        def __post_init__(self, explicit_deps: tp.List[NodeId], bundle: NodeNamespace):
            dependencies = self._node_dependencies()
            if explicit_deps:
                dependencies.update({dep_id: DependencyType.HARD for dep_id in explicit_deps})
            object.__setattr__(self, "dependencies", dependencies)
            if bundle:
                object.__setattr__(self, "bundle_result", True)
                object.__setattr__(self, "bundle_namespace", bundle)

    setattr(node_class, "explicit_deps", NodeBuilder.explicit_deps)
    setattr(node_class, "bundle", NodeBuilder.bundle)
    setattr(node_class, "__post_init__", NodeBuilder.__post_init__)

    node_class.__annotations__.update(NodeBuilder.__annotations__)

    return dc.dataclass(frozen=True)(node_class)
Example #23
0
def dataclass(clz):

    data_clz = dataclasses.dataclass(frozen=True)(clz)
    meta_fields = []
    data_fields = []
    for name, field_info in data_clz.__dataclass_fields__.items():
        is_static = field_info.metadata.get('static', False)
        if is_static:
            meta_fields.append(name)
        else:
            data_fields.append(name)

    def iterate_clz(x):
        meta = tuple(getattr(x, name) for name in meta_fields)
        data = tuple(getattr(x, name) for name in data_fields)
        return data, meta

    def clz_from_iterable(meta, data):
        meta_args = tuple(zip(meta_fields, meta))
        data_args = tuple(zip(data_fields, data))
        kwargs = dict(meta_args + data_args)
        return data_clz(**kwargs)

    jax.tree_util.register_pytree_node(data_clz, iterate_clz,
                                       clz_from_iterable)

    return data_clz
Example #24
0
 def make_instance_doc(self, attributes):
     """Creates a dataclass instance holding all the attribute definitions"""
     attr = {x.name: x for x in attributes}
     annotations = {x.name: type(x) for x in attributes}
     This = dataclass()(type('doc', self.ibases,
                             dict(__annotations__=annotations)))
     return This(**attr)
Example #25
0
 def __init_subclass__(cls):
     """Automatically initialize all subclasses as custom dataclasses."""
     # All Flax Modules are dataclasses.  We force this convention since
     # it encourages the stateless behavior needed to clone module instances for
     # functional transformation.  Instead of using a python metaclass, we
     # automatically transform Modules into dataclasses at subclass creation
     # time, and we set the last dataclass arguments to `parent` and `name`.
     cls._add_parent_and_name_attrs()
     dataclasses.dataclass(cls)
     # We wrap user-defined methods including setup and __call__ to enforce
     # a number of different checks and to provide clear error messages.
     cls._verify_single_or_no_compact()
     cls._wrap_module_methods()
     # Set empty class defaults.
     cls._state = _uninitialized_module_internal_state
     cls.scope = None
Example #26
0
  def wrapped(clz):
    data_clz = dataclasses.dataclass(
        frozen=frozen, unsafe_hash=unsafe_hash)(
            clz)
    meta_fields = []
    data_fields = []
    for name, field_info in data_clz.__dataclass_fields__.items():
      is_pytree_node = field_info.metadata.get('pytree_node', True)
      if is_pytree_node:
        data_fields.append(name)
      else:
        meta_fields.append(name)

    def replace(self, **updates):
      """"Returns a new object replacing the specified fields with new values."""
      return dataclasses.replace(self, **updates)

    data_clz.replace = replace

    def iterate_clz(x):
      meta = tuple(getattr(x, name) for name in meta_fields)
      data = tuple(getattr(x, name) for name in data_fields)
      return data, meta

    def clz_from_iterable(meta, data):
      meta_args = tuple(zip(meta_fields, meta))
      data_args = tuple(zip(data_fields, data))
      kwargs = dict(meta_args + data_args)
      return data_clz(**kwargs)

    jax.tree_util.register_pytree_node(data_clz, iterate_clz, clz_from_iterable)

    def to_state_dict(x):
      state_dict = {
          name: serialization.to_state_dict(getattr(x, name))
          for name in data_fields
      }
      return state_dict

    def from_state_dict(x, state):
      """Restore the state of a data class."""
      state = state.copy()  # copy the state so we can pop the restored fields.
      updates = {}
      for name in data_fields:
        if name not in state:
          raise ValueError(f'Missing field {name} in state dict while restoring'
                           f' an instance of {clz.__name__}')
        value = getattr(x, name)
        value_state = state.pop(name)
        updates[name] = serialization.from_state_dict(value, value_state)
      if state:
        names = ','.join(state.keys())
        raise ValueError(f'Unknown field(s) "{names}" in state dict while'
                         f' restoring an instance of {clz.__name__}')
      return x.replace(**updates)

    serialization.register_serialization_state(data_clz, to_state_dict,
                                               from_state_dict)

    return data_clz
Example #27
0
def config(clazz):
    """Decorator allowing to transform a python object into a configuration file, and vice versa

    :param clazz: class to decorate
    :return: the decorated class
    """
    return deserialize(serialize(dataclass(clazz)))
Example #28
0
def use_automatically(cls):
    """ Class decorator used to annotate FaceDancer inner classes. Implies @dataclass.

    This decorator can be placed on inner classes that describe "subordinate"
    objects on USB devices. For example, a USBDevice can have several subordinate
    USBConfigurations; which select the various configurations for that class.

    When placed on a subordinate class, this allows the parent class to automatically
    instantiate the relevant given class during its creation; automatically populating
    the subordinate properties of the relevant device.

    For example, assume we have a FaceDancer class representing a custom USB device::

        @dataclass
        class ExampleDevice(USBDevice):
            product_string : str = "My Example Device"

            @use_automatically
            class DefaultConfiguration(USBConfiguration):
                number : int = 1

    In this case, when an ExampleDevice is instantiated, the USBDevice code knows how
    to instantiate DefaultConfiguration, and will do so automatically.

    Note that this decorator should _only_ be used for subordinate types; and expects that
    the decorated class has no explicitly-declared __init__ method. The __post_init__ mechanism
    of python dataclasses can be overridden to perform any needed initialization.
    """
    return AutoInstantiator(dataclass(cls))
Example #29
0
    def wrapper(cls):
        cls = dataclass(cls, **kwargs)
        original_init = cls.__init__

        def fill_fields(fillable_cls, fields_dict):
            for name, value in fields_dict.items():
                field_type = fillable_cls.__annotations__.get(name, None)
                if is_dataclass(field_type) and isinstance(value, dict):
                    new_obj = field_type(**value)
                    fields_dict[name] = new_obj
                elif isinstance(value, List) and hasattr(field_type, "__args__") and is_dataclass(field_type.__args__[0]):
                    list_field_type = field_type.__args__[0]
                    tmp_list = list()
                    for inner_dict in value:
                        if isinstance(inner_dict, dict):
                            inner_dataclass = list_field_type(**inner_dict)
                            inner_dataclass = fill_fields(inner_dataclass, inner_dict)
                            tmp_list.append(inner_dataclass)
                    fields_dict[name] = tmp_list
            return fillable_cls

        def __init__(self, *args, **kwargs):
            fill_fields(self, kwargs)
            original_init(self, *args, **kwargs)
        cls.__init__ = __init__
        return cls
Example #30
0
def main():
    dic()
    sets()
    dataclass()

    box = box_count(cookie='thin mints', count=5)
    print(box.cookie)
    # box.cooky = 'lemonades' #does not give error, adds cooky as attribute unless not in __slots__
    # print(box.cooky)

    donut = food(name='donut', calories=300, sugar=20, fat=20)
    ass = food(name='booty', calories=0, sugar=0, fat=400)

    print()
    print(donut.name)
    print(ass.fat)