def test_renaming(cl_and_vals, data): converter = Converter() cl, vals = cl_and_vals attrs = fields(cl) to_replace = data.draw(sampled_from(attrs)) u_fn = make_dict_unstructure_fn( cl, converter, **{to_replace.name: override(rename="class")} ) s_fn = make_dict_structure_fn( cl, converter, **{to_replace.name: override(rename="class")} ) converter.register_structure_hook(cl, s_fn) converter.register_unstructure_hook(cl, u_fn) inst = cl(*vals) raw = converter.unstructure(inst) assert "class" in raw new_inst = converter.structure(raw, cl) assert inst == new_inst
def test_nodefs_generated_unstructuring(cl_and_vals): """Test omitting default values on a per-attribute basis.""" converter = Converter() cl, vals = cl_and_vals attr_is_default = False for attr, val in zip(cl.__attrs_attrs__, vals): if attr.default is not NOTHING: fn = make_dict_unstructure_fn( cl, converter, **{attr.name: override(omit_if_default=True)} ) if attr.default == val: attr_is_default = True break else: assume(False) converter.register_unstructure_hook(cl, fn) inst = cl(*vals) res = converter.unstructure(inst) if attr_is_default: assert attr.name not in res
def init_converter(): converter = Converter() # converter.register_unstructure_hook(pendulum.DateTime, lambda dt: dt.to_iso8601_string()) # converter.register_structure_hook(pendulum.DateTime, lambda ts, _: pendulum.parse(ts)) converter.register_unstructure_hook(datetime, lambda dt: dt.isoformat() + 'Z') converter.register_structure_hook(datetime, lambda ts, _: parser.parse(ts)) return converter
def with_datetime_hooks(converter: cattr.Converter) -> cattr.Converter: converter.register_unstructure_hook(datetime.datetime, datetime_to_serializable) converter.register_structure_hook( datetime.datetime, lambda date_str, _: serializable_to_datetime(date_str) ) converter.register_unstructure_hook(datetime.date, date_to_serializable) converter.register_structure_hook( datetime.date, lambda date_str, _: serializable_to_date(date_str) ) return converter
def create_cattrs_converter(): converter = Converter() converter.register_structure_hook(bool, _structure_bool) converter.register_structure_hook(string_type, _structure_string) converter.register_structure_hook(Model, _structure_schematics) converter.register_structure_hook(BaseType, _structure_basetype) converter.register_structure_hook(datetime, _structure_datetime) converter.register_unstructure_hook(Model, _unstructure_schematics) converter.register_unstructure_hook(datetime, _unstructure_datetime) converter.register_unstructure_hook(BaseType, _unstructure_basetype) return converter
def _register_converter(converter: Converter) -> Converter: converter.register_structure_hook( base_models.Snowflake, lambda d, _: base_models.Snowflake(int(d))) converter.register_structure_hook( permission_models.BitwisePermissionFlags, lambda d, _: permission_models.BitwisePermissionFlags(int(d)), ) def unstruct_permissions( d: permission_models.BitwisePermissionFlags) -> str: return str(d.value) converter.register_unstructure_hook( permission_models.BitwisePermissionFlags, unstruct_permissions) def struct_int_or_str(d: typing.Any, _: object) -> typing.Union[int, str]: try: return int(d) except ValueError: return str(d) converter.register_structure_hook(typing.Union[int, str], struct_int_or_str) UNKNOWN_TYPE = base_models.UNKNOWN_TYPE # TODO: use the new methods in `typing` def is_unknown(cls: type) -> bool: if getattr(cls, '__origin__') is typing.Union and UNKNOWN_TYPE in getattr( cls, '__args__'): return True return False def unknown_function(data: object, cls: typing.Type[typing.Any]) -> object: default: typing.Tuple[typing.Any] = ( ) # type: ignore[assignment] # mypy 0.930 regression args: typing.Tuple[typing.Type[typing.Any]] = getattr( cls, '__args__', default) if len(args) == 2: return converter.structure(data, [n for n in args if n != UNKNOWN_TYPE][0]) else: type: typing.Any = typing.Union[tuple(n for n in args if n != UNKNOWN_TYPE)] return converter.structure(data, type) converter.register_structure_hook_func(is_unknown, unknown_function) models.setup_cattrs(converter) return converter
def test_omitting(): converter = Converter() @define class A: a: int b: int = field(init=False) converter.register_unstructure_hook( A, make_dict_unstructure_fn(A, converter, b=override(omit=True))) assert converter.unstructure(A(1)) == {"a": 1}
def test_individual_overrides(cl_and_vals): """ Test omitting default values on a per-class basis, but with individual overrides. """ converter = Converter() cl, vals = cl_and_vals for attr, val in zip(adapted_fields(cl), vals): if attr.default is not NOTHING: break else: assume(False) chosen_name = attr.name converter.register_unstructure_hook( cl, make_dict_unstructure_fn( cl, converter, omit_if_default=True, **{attr.name: override(omit_if_default=False)} ), ) inst = cl(*vals) res = converter.unstructure(inst) assert "Hyp" not in repr(res) assert "Factory" not in repr(res) for attr, val in zip(adapted_fields(cl), vals): if attr.name == chosen_name: assert attr.name in res elif attr.default is not NOTHING: if not isinstance(attr.default, Factory): if val == attr.default: assert attr.name not in res else: assert attr.name in res else: if attr.default.takes_self: if val == attr.default.factory(inst): assert attr.name not in res else: assert attr.name in res else: if val == attr.default.factory(): assert attr.name not in res else: assert attr.name in res
def test_unmodified_generated_unstructuring(cl_and_vals): converter = Converter() cl, vals = cl_and_vals fn = make_dict_unstructure_fn(cl, converter) inst = cl(*vals) res_expected = converter.unstructure(inst) converter.register_unstructure_hook(cl, fn) res_actual = converter.unstructure(inst) assert res_expected == res_actual
def test_individual_overrides(cl_and_vals): """ Test omitting default values on a per-class basis, but with individual overrides. """ converter = Converter() cl, vals = cl_and_vals for attr, val in zip(cl.__attrs_attrs__, vals): if attr.default is not NOTHING: break else: assume(False) chosen = attr converter.register_unstructure_hook( cl, make_dict_unstructure_fn( cl, converter, omit_if_default=True, **{attr.name: override(omit_if_default=False)}), ) inst = cl(*vals) res = converter.unstructure(inst) for attr, val in zip(cl.__attrs_attrs__, vals): if attr is chosen: assert attr.name in res elif attr.default is not NOTHING: if not isinstance(attr.default, Factory): if val == attr.default: assert attr.name not in res else: assert attr.name in res else: if val == attr.default.factory(): assert attr.name not in res else: assert attr.name in res
def test_nodefs_generated_unstructuring_cl(cl_and_vals): """Test omitting default values on a per-class basis.""" converter = Converter() cl, vals = cl_and_vals for attr, val in zip(cl.__attrs_attrs__, vals): if attr.default is not NOTHING: break else: assume(False) converter.register_unstructure_hook( cl, make_dict_unstructure_fn(cl, converter, omit_if_default=True) ) inst = cl(*vals) res = converter.unstructure(inst) for attr, val in zip(cl.__attrs_attrs__, vals): if attr.default is not NOTHING: if not isinstance(attr.default, Factory): if val == attr.default: assert attr.name not in res else: assert attr.name in res else: # The default is a factory, but might take self. if attr.default.takes_self: if val == attr.default.factory(cl): assert attr.name not in res else: assert attr.name in res else: if val == attr.default.factory(): assert attr.name not in res else: assert attr.name in res
import pickle pickle_loads = f.partial(pickle.loads, encoding='bytes') pickle_load = f.partial(pickle.load, encoding='bytes') pickle_dumps = f.partial(pickle.dumps, protocol=2) pickle_dump = f.partial(pickle.dump, protocol=2) def u2c(value): """Convert underscore string to capitalized string.""" # Make a list of capitalized words and underscores to be preserved capitalized_words = [w.capitalize() if w else '_' for w in value.split('_')] return "".join(capitalized_words) CUSTOM_CVT = Converter() CUSTOM_CVT.register_unstructure_hook( np.ndarray, lambda a: dict(dtype=a.dtype.name, data=a.tobytes(), shape=list(a.shape))) CUSTOM_CVT.register_structure_hook( np.ndarray, lambda a, _: np.frombuffer(a['data'], dtype=a['dtype']).reshape(tuple(a['shape']))) def to_dict(obj): """Convert object to dict""" global CUSTOM_CVT return CUSTOM_CVT.unstructure(obj) def from_dict(d, cls, compatible=True): """Convert dict to obj of class.""" global CUSTOM_CVT
def setup_cattrs(converter: cattr.Converter) -> None: # mypy cannot infer :( def unstructure_snowflake(struct: Snowflake) -> str: return str(struct) converter.register_unstructure_hook(Snowflake, unstructure_snowflake)
datetime_type = DateTimeType() def _structure_datetime(data, cls): if not data: raise ValueError("datetime is empty") return datetime_type.to_native(data) def _unstructure_datetime(data): return data.isoformat() converter = Converter() converter.register_structure_hook(datetime, _structure_datetime) converter.register_unstructure_hook(datetime, _unstructure_datetime) def validate_len(instance, attribute, value): if len(value) > 100: raise ValueError("val should <= 100") @attr.s class Artist2: name = attr.ib(type=str, validator=validate_len) @attr.s class Album2: title = attr.ib(type=str, validator=validate_len)