def convert(self, ctx: Context) -> t.Any: assert isinstance(ctx.type, MapType) # # Catch subclasses of the typing.Dict generics. The instantiated generic with the type # # parameter will be stored in __orig_bases__. # dict_base = find_orig_base(context.type, Dict) # if dict_base: # key_type, value_type = dict_base.__args__[0], dict_base.__args__[1] # constructor = context.type # # # Otherwise, catch instances of the typing.List generic. # elif getattr(context.type, '__origin__', None) in (dict, Dict): # # For the List generic. # key_type, value_type = context.type.__args__[0], context.type.__args__[1] # constructor = list # # else: # raise RuntimeError(f'unsure how to handle type {type_repr(context.type)}') if not isinstance(ctx.value, t.Mapping): raise ctx.type_error(expected=t.Mapping) result = dict() # TODO (@NiklasRosenstein): Check MapType.impl_hint for key, value in ctx.value.items(): if ctx.type.key_type is not None: key = ctx.push(ctx.type.key_type, key, f'Key({key!r})', ctx.field).convert() if ctx.type.value_type is not None: value = ctx.push(ctx.type.value_type, value, key, ctx.field).convert() result[key] = value return result
def convert(self, ctx: Context) -> t.Any: assert isinstance(ctx.type, ImplicitUnionType) errors: t.List[str] = [] tracebacks: t.List[str] = [] is_debug = _get_log_level(logger) >= logging.DEBUG for type_ in ctx.type.types: try: return ctx.push(type_, ctx.value, None, ctx.field).convert() except ConversionError as exc: errors.append(f'{type_}: {exc}') if is_debug: tracebacks.append(traceback.format_exc()) if is_debug: logger.debug( f'Error converting `{ctx.type}` ({ctx.direction.name}). This message is logged in ' f'conjunction with a ConversionTypeError to provide information about the tracebacks ' f'that have been caught when converting the individiual union members. This might not ' f'indicate an error in the program if the exception is handled.\n' + '\n'.join(tracebacks)) errors_text = '\n'.join(errors) raise ctx.error( _indent_exc( f'expected {ctx.type}, got `{type(ctx.value).__name__}`\n{errors_text}' ))
def convert(self, ctx: Context) -> t.Any: assert isinstance(ctx.type, ConcreteType) if ctx.direction == Direction.serialize: if not isinstance(ctx.value, duration): raise ctx.type_error(expected=duration) return str(ctx.value) elif ctx.direction == Direction.deserialize: if not isinstance(ctx.value, str): raise ctx.type_error(expected=str) return duration.parse( ctx.value ) # TODO (@NiklasRosenstein): Reraise as ConversionError?
def convert(self, ctx: Context) -> t.Any: source_type = type(ctx.value) target_type = preconditions.check_instance_of(ctx.location.type, ConcreteType).type fieldinfo = ctx.get_annotation(A.fieldinfo) or A.fieldinfo() strict = ctx.direction == Direction.serialize or fieldinfo.strict func = (self._strict_adapters if strict else self._nonstrict_adapters)\ .get((source_type, target_type)) if func is None: raise ctx.error( f'unable to {ctx.direction.name} {source_type.__name__} -> {target_type.__name__}' ) try: return func(ctx.value) except ValueError as exc: raise ctx.error(str(exc))
def to_python(self, value: BufferedBinaryStream, context: Context) -> Any: format_parts, _ = zip(*self.get_format_parts(context)) fmt = ''.join(format_parts) with value.try_read(struct.calcsize(fmt)) as data: values = struct.unpack(fmt, data) # TODO(NiklasRosenstein): Unpack nested datamodels. return context.type(*values)
def _serialize(self, ctx: Context, type_: ObjectType) -> t.Dict[str, t.Any]: skip_default_values = True if not isinstance(ctx.value, type_.schema.python_type): raise ctx.type_error(expected=type_.schema.python_type) groups: t.Dict[str, t.Dict[str, t.Any]] = {} for field in type_.schema.fields.values(): if not field.flat: continue value = getattr(ctx.value, field.name) if skip_default_values and value == field.get_default(): continue groups[field.name] = ctx.push(field.type, value, field.name, field).convert() result: t.Dict[str, t.Any] = {} flattened = type_.schema.flattened() for name, flat_field in flattened.fields.items(): alias = (flat_field.field.aliases or [name])[0] if not flat_field.group: value = getattr(ctx.value, name) if skip_default_values and value != flat_field.field.get_default( ): result[alias] = ctx.push(flat_field.field.type, value, name, flat_field.field).convert() elif alias in groups[flat_field.group or '$']: # May not be contained if we skipped default values. result[alias] = groups[flat_field.group or '$'][alias] # Explode values from the remainder field into the result. if flattened.remainder_field: assert isinstance(flattened.remainder_field.type, MapType) remnants = ctx.push( flattened.remainder_field.type, getattr(ctx.value, flattened.remainder_field.name), None, flattened.remainder_field).convert() for key, value in remnants.items(): if key in result: raise ctx.error( f'key {key!r} of remainder field {flattened.remainder_field.name!r} cannot be exploded ' 'into resulting JSON object because of a conflict.') result[key] = value return result
def convert(self, ctx: Context) -> t.Any: assert isinstance(ctx.type, ConcreteType) if ctx.direction.is_deserialize(): if not isinstance(ctx.value, str): raise ctx.type_error(expected=str) try: return self.from_string(ctx.type.type, ctx.value) except (TypeError, ValueError) as exc: raise ctx.error(str(exc)) else: if not isinstance(ctx.value, ctx.type.type): raise ctx.type_error(expected=ctx.type.type) if self.to_string is None: return str(ctx.value) else: return self.to_string(ctx.type.type, ctx.value)
def convert(self, ctx: Context) -> t.Any: assert isinstance(ctx.type, CollectionType) if ctx.direction == Direction.serialize: if not isinstance(ctx.value, ctx.type.python_type): raise ctx.type_error(expected=ctx.type.python_type) python_type = self.json_type elif ctx.direction == Direction.deserialize: if not isinstance(ctx.value, t.Collection) or isinstance( ctx.value, (str, bytes, bytearray, memoryview)): raise ctx.type_error(expected=t.Collection) python_type = ctx.type.python_type else: assert False, ctx.direction return python_type( # type: ignore ctx.push(ctx.type.item_type, val, idx, ctx.field).convert() for idx, val in enumerate(ctx.value))
def convert(self, ctx: Context) -> t.Any: preconditions.check_instance_of(ctx.type, ConcreteType) preconditions.check_argument( t.cast(ConcreteType, ctx.type).type is decimal.Decimal, 'must be Decimal') context = Optional(ctx.get_annotation(A.precision))\ .map(lambda b: b.to_context()).or_else(None) fieldinfo = ctx.get_annotation(A.fieldinfo) or A.fieldinfo() if ctx.direction == Direction.deserialize: if (not fieldinfo.strict and isinstance(ctx.value, (int, float))) or isinstance( ctx.value, str): return decimal.Decimal(ctx.value, context) raise ctx.type_error(expected='str') else: if not isinstance(ctx.value, decimal.Decimal): raise ctx.type_error(expected=decimal.Decimal) return str(ctx.value)
def convert(self, ctx: Context) -> t.Any: assert isinstance(ctx.type, ConcreteType) assert issubclass(ctx.type.type, enum.Enum) if ctx.direction == Direction.serialize: if not isinstance(ctx.value, ctx.type.type): raise ctx.type_error(expected=ctx.type.type) if issubclass(ctx.type.type, enum.IntEnum): return ctx.value.value if issubclass(ctx.type.type, enum.Enum): name = ctx.value.name alias = ctx.annotations.get_field_annotation( ctx.type.type, name, A.alias) if alias and alias.aliases: return alias.aliases[0] return name elif ctx.direction == Direction.deserialize: if issubclass(ctx.type.type, enum.IntEnum): if not isinstance(ctx.value, int): raise ctx.type_error(expected=int) return ctx.type.type(ctx.value) if issubclass(ctx.type.type, enum.Enum): if not isinstance(ctx.value, str): raise ctx.type_error(expected=str) for enum_value in ctx.type.type: alias = ctx.annotations.get_field_annotation( ctx.type.type, enum_value.name, A.alias) if alias and ctx.value in alias.aliases: return enum_value try: return ctx.type.type[ctx.value] except KeyError: raise ctx.error( f'{ctx.value!r} is not a member of enumeration {ctx.type}' ) assert False
def convert(self, ctx: Context) -> t.Any: preconditions.check_instance_of(ctx.type, ConcreteType) type_ = t.cast(ConcreteType, ctx.type).type datefmt = ctx.get_annotation(A.datefmt) or ( self.DEFAULT_DATE_FMT if type_ == datetime.date else self.DEFAULT_TIME_FMT if type_ == datetime.time else self.DEFAULT_DATETIME_FMT if type_ == datetime.datetime else None) assert datefmt is not None if ctx.direction == Direction.deserialize: if isinstance(ctx.value, type_): return ctx.value elif isinstance(ctx.value, str): dt = datefmt.parse( type_, ctx.value ) # TODO(NiklasRosenstein): Rethrow as ConversionError assert isinstance(dt, type_) return dt raise ctx.type_error(expected=f'str|{type_.__name__}') else: if not isinstance(ctx.value, type_): raise ctx.type_error(expected=type_) return datefmt.format(ctx.value)
def _deserialize(self, ctx: Context, type_: ObjectType) -> t.Any: enable_unknowns = ctx.settings.get(A.enable_unknowns) typeinfo = ctx.get_annotation(A.typeinfo) if not isinstance(ctx.value, t.Mapping): raise ctx.type_error(expected=t.Mapping) groups: t.Dict[str, t.Dict[str, t.Any]] = {'$': {}} # Collect keys into groups. used_keys: t.Set[str] = set() flattened = type_.schema.flattened() for name, flat_field in flattened.fields.items(): aliases = flat_field.field.aliases or [name] for alias in aliases: if alias in ctx.value: value = ctx.value[alias] groups.setdefault(flat_field.group or '$', {})[name] = \ ctx.push(flat_field.field.type, value, name, flat_field.field).convert() used_keys.add(alias) break # Move captured groups into the root group ($). for group, values in groups.items(): if group == '$': continue field = type_.schema.fields[group] groups['$'][group] = ctx.push(field.type, values, group, field).convert() # Collect unknown fields into the remainder field if there is one. if flattened.remainder_field: assert isinstance(flattened.remainder_field.type, MapType) remanants = {k: ctx.value[k] for k in ctx.value.keys() - used_keys} groups['$'][flattened.remainder_field.name] = ctx.push( flattened.remainder_field.type, remanants, None, flattened.remainder_field).convert() used_keys.update(ctx.value.keys()) if not enable_unknowns or (enable_unknowns and enable_unknowns.callback): unused_keys = ctx.value.keys() - used_keys if unused_keys and not enable_unknowns: raise ConversionError( f'unknown keys found while deserializing {ctx.type}: {unused_keys}', ctx.location) elif unused_keys and enable_unknowns and enable_unknowns.callback: enable_unknowns.callback(ctx, set(unused_keys)) try: return ((typeinfo.deserialize_as if typeinfo else None) or type_.schema.python_type)(**groups['$']) except TypeError as exc: raise ctx.error(str(exc))
def calc_size(type_: Type[T], registry: Registry = None) -> int: context = Context.new(registry or globals()['registry'], type_, None) converter = context.get_converter() assert isinstance(converter, _BinaryConverter) format_parts, _ = zip(*converter.get_format_parts(context)) return struct.calcsize(''.join(format_parts))
def to_bytes(value: T, type_: Type[T]=None, registry: Registry = None) -> bytes: type_ = type_ or type(value) return Context.new(registry or globals()['registry'], type_, value).from_python()
def from_bytes(type_: Type[T], data: bytes, registry: Registry = None) -> T: stream = BufferedBinaryStream(io.BytesIO(data)) return Context.new(registry or globals()['registry'], type_, stream).to_python()
def convert(self, ctx: Context) -> t.Any: assert isinstance(ctx.type, UnionType) fallback = ctx.get_annotation(A.union) or A.union() style = ctx.type.style or fallback.style or UnionType.DEFAULT_STYLE discriminator_key = ctx.type.discriminator_key or fallback.discriminator_key or UnionType.DEFAULT_DISCRIMINATOR_KEY is_deserialize = ctx.direction == Direction.deserialize if is_deserialize: if not isinstance(ctx.value, t.Mapping): raise ctx.type_error(expected=t.Mapping) member_name = self._get_deserialize_member_name( ctx.value, style, discriminator_key, ctx.location) member_type = ctx.type.subtypes.get_type_by_name( member_name, ctx.type_hint_adapter) assert isinstance(member_type, BaseType), f'"{type(ctx.type.subtypes).__name__}" returned member_type must '\ f'be BaseType, got "{type(member_type).__name__}"' else: member_type = ctx.type_hint_adapter.adapt_type_hint(type( ctx.value)) member_name = ctx.type.subtypes.get_type_name( member_type, ctx.type_hint_adapter) type_hint = member_type if is_deserialize: if style == A.union.Style.nested: if member_name not in ctx.value: raise ConversionError( f'missing union value key {member_name!r}', ctx.location) child_context = ctx.push(type_hint, ctx.value[member_name], member_name, ctx.field) elif style == A.union.Style.flat: child_context = ctx.push(type_hint, dict(ctx.value), None, ctx.field) t.cast(t.Dict, child_context.value).pop(discriminator_key) elif style == A.union.Style.keyed: child_context = ctx.push(type_hint, ctx.value[member_name], member_name, ctx.field) else: raise RuntimeError(f'bad style: {style!r}') else: child_context = ctx.push(type_hint, ctx.value, None, ctx.field) result = child_context.convert() if is_deserialize: return result else: if style == A.union.Style.nested: result = {discriminator_key: member_name, member_name: result} elif style == A.union.Style.flat: if not isinstance(result, t.MutableMapping): raise RuntimeError( f'union.Style.flat is not supported for non-object member types' ) result[discriminator_key] = member_name elif style == A.union.Style.keyed: if not isinstance(result, t.MutableMapping): raise RuntimeError( f'union.Style.keyed is not supported for non-object member types' ) result = {member_name: result} else: raise RuntimeError(f'bda style: {style!r}') return result