def validate_serialization_config(config): """Validate that ``config`` contains :class:`AttributeConfiguration` objects. :param config: Object or iterable of objects that represent :class:`AttributeConfigurations <AttributeConfiguration>` :type config: iterable of :class:`AttributeConfiguration` objects / iterable of :class:`dict <python:dict>` objects corresponding to a :class:`AttributeConfiguration` / :class:`AttributeConfiguration` / :class:`dict <python:dict>` object corresponding to a :class:`AttributeConfiguration` :rtype: :class:`list <python:list>` of :class:`AttributeConfiguration` objects """ if config and not checkers.is_iterable( config, forbid_literals=(str, bytes, dict, AttributeConfiguration)): config = [config] if not config: return [] return_value = [] for item in config: if isinstance(item, AttributeConfiguration) and item not in return_value: return_value.append(item) elif isinstance(item, dict): item = AttributeConfiguration(**item) if item not in return_value: return_value.append(item) return return_value
def ground_temperatures(self, value): if checkers.is_numeric(value): ground_temperatures = [value] * 12 elif checkers.is_iterable(value): ground_temperature = validators.iterable(value, minimum_length=12, maximum_length=12) ground_temperatures = [temp for temp in ground_temperature] else: raise ValueError( "Input error for value 'ground_temperature'. Value must " "be numeric or an iterable of length 12.") self._ground_temperatures = ground_temperatures
def declarative_base(cls=BaseModel, **kwargs): """Construct a base class for declarative class definitions. The new base class will be given a metaclass that produces appropriate :class:`Table <sqlalchemy:sqlalchemy.schema.Table>` objects and makes the appropriate :func:`mapper <sqlalchemy:sqlalchemy.orm.mapper>` calls based on the information provided declaratively in the class and any subclasses of the class. :param cls: Defaults to :class:`BaseModel` to provide serialization/de-serialization support. If a :class:`tuple <python:tuple>` of classes, will include :class:`BaseModel` in that list of classes to mixin serialization/de-serialization support. If not :obj:`None <python:None>` and not a :class:`tuple <python:tuple>`, will mixin :class:`BaseModel` with the value passed to provide serialization/de-serialization support. :type cls: :obj:`None <python:None>` / :class:`tuple <python:tuple>` of classes / class object :param kwargs: Additional keyword arguments supported by the original :func:`sqlalchemy.ext.declarative.declarative_base() <sqlalchemy:sqlalchemy.ext.declarative.declarative_base>` function :type kwargs: keyword arguments :returns: Base class for declarative class definitions with support for serialization and de-serialization. """ if isinstance(cls, tuple): class_list = [x for x in cls] class_list.insert(0, BaseModel) cls = (x for x in class_list) elif checkers.is_iterable(cls): class_list = [BaseModel] class_list.extend(cls) cls = (x for x in class_list) return SA_declarative_base(cls=cls, **kwargs)
def get_attribute_names(obj, include_callable=False, include_nested=True, include_private=False, include_special=False, include_utilities=False): """Return a list of attribute names within ``obj``. :param include_callable: If ``True``, will include callable attributes (methods). Defaults to ``False``. :type include_callable: :class:`bool <python:bool>` :param include_nested: If ``True``, will include attributes that are arbitrarily-nestable types (such as a :term:`model class` or :class:`dict <python:dict>`). Defaults to ``False``. :type include_nested: :class:`bool <python:bool>` :param include_private: If ``True``, will include attributes whose names begin with ``_`` (but *not* ``__``). Defaults to ``False``. :type include_private: :class:`bool <python:bool>` :param include_special: If ``True``, will include atributes whose names begin with ``__``. Defaults to ``False``. :type include_special: :class:`bool <python:bool>` :param include_utilities: If ``True``, will include utility properties added by SQLAlchemy or **SQLAthanor**. Defaults to ``False``. :type include_utilities: :class:`bool <python:bool>` :returns: :term:`Model Attribute` names attached to ``obj``. :rtype: :class:`list <python:list>` of :class:`str <python:str>` """ attribute_names = [x for x in dir(obj) if (include_utilities and x in UTILITY_COLUMNS) or \ (x not in UTILITY_COLUMNS)] attributes = [] for attribute in attribute_names: if (attribute[0] == '_' and attribute[0:2] != '__') and not include_private: continue if attribute[0:2] == '__' and not include_special: continue try: attribute_value = getattr(obj, attribute) except SA_InvalidRequestError: if not include_nested: continue attributes.append(attribute) continue if not include_nested: if checkers.is_type(attribute_value, ('BaseModel', 'RelationshipProperty', 'AssociationProxy', dict)): continue try: is_iterable = checkers.is_iterable(attribute_value, forbid_literals=(str, bytes, dict)) except SA_InvalidRequestError as error: if not include_nested: continue else: is_iterable = False if is_iterable: loop = False try: for item in attribute_value: if checkers.is_type( item, ('BaseModel', 'RelationshipProperty', 'AssociationProxy', dict)): loop = True break except (NotImplementedError, TypeError): pass if loop: continue if not include_callable and checkers.is_callable(attribute_value): continue attributes.append(attribute) return attributes
def parse_csv(input_data, delimiter='|', wrap_all_strings=False, null_text='None', wrapper_character="'", double_wrapper_character_when_nested=False, escape_character="\\", line_terminator='\r\n'): """De-serialize CSV data into a Python :class:`dict <python:dict>` object. .. versionadded:: 0.3.0 .. tip:: Unwrapped empty column values are automatically interpreted as null (:obj:`None <python:None>`). :param input_data: The CSV data to de-serialize. Should include column headers and at least **one** row of data. Will ignore any rows of data beyond the first row. :type input_data: :class:`str <python:str>` :param delimiter: The delimiter used between columns. Defaults to ``|``. :type delimiter: :class:`str <python:str>` :param wrapper_character: The string used to wrap string values when wrapping is applied. Defaults to ``'``. :type wrapper_character: :class:`str <python:str>` :param null_text: The string used to indicate an empty value if empty values are wrapped. Defaults to `None`. :type null_text: :class:`str <python:str>` :returns: A :class:`dict <python:dict>` representation of the CSV record. :rtype: :class:`dict <python:dict>` :raises DeserializationError: if ``input_data`` is not a valid :class:`str <python:str>` :raises CSVStructureError: if there are less than 2 (two) rows in ``input_data`` or if column headers are not valid Python variable names """ use_file = False if not checkers.is_file(input_data) and not checkers.is_iterable( input_data): try: input_data = validators.string(input_data, allow_empty=False) except (ValueError, TypeError): raise DeserializationError("input_data expects a 'str', received '%s'" \ % type(input_data)) input_data = [input_data] elif checkers.is_file(input_data): use_file = True if not wrapper_character: wrapper_character = '\'' if wrap_all_strings: quoting = csv.QUOTE_NONNUMERIC else: quoting = csv.QUOTE_MINIMAL if 'sqlathanor' in csv.list_dialects(): csv.unregister_dialect('sqlathanor') csv.register_dialect('sqlathanor', delimiter=delimiter, doublequote=double_wrapper_character_when_nested, escapechar=escape_character, quotechar=wrapper_character, quoting=quoting, lineterminator=line_terminator) if not use_file: csv_reader = csv.DictReader(input_data, dialect='sqlathanor', restkey=None, restval=None) rows = [x for x in csv_reader] else: if not is_py2: with open(input_data, 'r', newline='') as input_file: csv_reader = csv.DictReader(input_file, dialect='sqlathanor', restkey=None, restval=None) rows = [x for x in csv_reader] else: with open(input_data, 'r') as input_file: csv_reader = csv.DictReader(input_file, dialect='sqlathanor', restkey=None, restval=None) rows = [x for x in csv_reader] if len(rows) < 1: raise CSVStructureError( 'expected 1 row of data and 1 header row, missing 1') else: data = rows[0] for key in data: try: validators.variable_name(key) except ValueError: raise CSVStructureError( 'column (%s) is not a valid Python variable name' % key) if data[key] == null_text: data[key] = None csv.unregister_dialect('sqlathanor') return data
def notes(self, value): if checkers.is_iterable(value): value = '\n'.join(value) self._notes = validators.string(value, allow_empty = True)
def get_type_mapping(value, type_mapping=None, skip_nested=True, default_to_str=False): """Retrieve the SQL type mapping for ``value``. :param value: The value whose SQL type will be returned. :param type_mapping: Determines how the value type of ``value`` map to SQL column data types. To add a new mapping or override a default, set a key to the name of the value type in Python, and set the value to a :doc:`SQLAlchemy Data Type <sqlalchemy:core/types>`. The following are the default mappings applied: .. list-table:: :widths: 30 30 :header-rows: 1 * - Python Literal - SQL Column Type * - ``bool`` - :class:`Boolean <sqlalchemy:sqlalchemy.types.Boolean>` * - ``str`` - :class:`Text <sqlalchemy:sqlalchemy.types.Text>` * - ``int`` - :class:`Integer <sqlalchemy:sqlalchemy.types.Integer>` * - ``float`` - :class:`Float <sqlalchemy:sqlalchemy.types.Float>` * - ``date`` - :class:`Date <sqlalchemy:sqlalchemy.types.Date>` * - ``datetime`` - :class:`DateTime <sqlalchemy:sqlalchemy.types.DateTime>` * - ``time`` - :class:`Time <sqlalchemy:sqlalchemy.types.Time>` * - ``timedelta`` - :class:`Interval <sqlalchemy:sqlalchemy.types.Interval>` :type type_mapping: :class:`dict <python:dict>` with type names as keys and column data types as values / :obj:`None <python:None>` :param skip_nested: If ``True`` then if ``value`` is a nested item (e.g. iterable, :class:`dict <python:dict>` objects, etc.) it will return :obj:`None <python:None>`. If ``False``, will treat nested items as :class:`str <python:str>`. Defaults to ``True``. :type skip_nested: :class:`bool <python:bool>` :param default_to_str: If ``True``, will automatically set a ``value`` whose value type cannot be determined to ``str`` (:class:`Text <sqlalchemy:sqlalchemy.types.Text>`). If ``False``, will use the value type's ``__name__`` attribute and attempt to find a mapping. Defaults to ``False``. :type default_to_str: :class:`bool <python:bool>` :returns: The :doc:`SQLAlchemy Data Type <sqlalchemy:core/types>` for ``value``, or :obj:`None <python:None>` if the value should be skipped :rtype: :doc:`SQLAlchemy Data Type <sqlalchemy:core/types>` / :obj:`None` :raises UnsupportedValueTypeError: when ``value`` does not have corresponding :doc:`SQLAlchemy Data Type <sqlalchemy:core/types>` """ if not type_mapping: type_mapping = DEFAULT_PYTHON_SQL_TYPE_MAPPING for key in DEFAULT_PYTHON_SQL_TYPE_MAPPING: if key not in type_mapping: type_mapping[key] = DEFAULT_PYTHON_SQL_TYPE_MAPPING[key] if checkers.is_callable(value): raise UnsupportedValueTypeError('value ("%s") cannot be callable' % value) elif checkers.is_iterable(value) and skip_nested: return None elif checkers.is_iterable(value) and default_to_str: target_type = 'str' elif value is None and default_to_str: target_type = 'str' elif isinstance(value, bool): target_type = 'bool' elif checkers.is_numeric(value): if checkers.is_integer(value): target_type = 'int' else: target_type = 'float' elif checkers.is_time(value) and not checkers.is_datetime(value): target_type = 'time' elif checkers.is_datetime(value): target_type = 'datetime' elif checkers.is_date(value): target_type = 'date' elif default_to_str: target_type = 'str' else: target_type = type(value).__name__ column_type = type_mapping.get(target_type, None) if not column_type: raise UnsupportedValueTypeError( 'value ("%s") is not a supported type (%s)' % (value, target_type)) return column_type
def backoff(to_execute, args = None, kwargs = None, strategy = None, retry_execute = None, retry_args = None, retry_kwargs = None, max_tries = None, max_delay = None, catch_exceptions = None, on_failure = None, on_success = None): """Retry a function call multiple times with a delay per the strategy given. :param to_execute: The function call that is to be attempted. :type to_execute: callable :param args: The positional arguments to pass to the function on the first attempt. If ``retry_args`` is :class:`None <python:None>`, will re-use these arguments on retry attempts as well. :type args: iterable / :class:`None <python:None>`. :param kwargs: The keyword arguments to pass to the function on the first attempt. If ``retry_kwargs`` is :class:`None <python:None>`, will re-use these keyword arguments on retry attempts as well. :type kwargs: :class:`dict <python:dict>` / :class:`None <python:None>` :param strategy: The :class:`BackoffStrategy` to use when determining the delay between retry attempts. If :class:`None <python:None>`, defaults to :class:`Exponential`. :type strategy: :class:`BackoffStrategy` :param retry_execute: The function to call on retry attempts. If :class:`None <python:None>`, will retry ``to_execute``. Defaults to :class:`None <python:None>`. :type retry_execute: callable / :class:`None <python:None>` :param retry_args: The positional arguments to pass to the function on retry attempts. If :class:`None <python:None>`, will re-use ``args``. Defaults to :class:`None <python:None>`. :type retry_args: iterable / :class:`None <python:None>` :param retry_kwargs: The keyword arguments to pass to the function on retry attempts. If :class:`None <python:None>`, will re-use ``kwargs``. Defaults to :class:`None <python:None>`. :type subsequent_kwargs: :class:`dict <python:dict>` / :class:`None <python:None>` :param max_tries: The maximum number of times to attempt the call. If :class:`None <python:None>`, will apply an environment variable ``BACKOFF_DEFAULT_TRIES``. If that environment variable is not set, will apply a default of ``3``. :type max_tries: int / :class:`None <python:None>` :param max_delay: The maximum number of seconds to wait befor giving up once and for all. If :class:`None <python:None>`, will apply an environment variable ``BACKOFF_DEFAULT_DELAY`` if that environment variable is set. If it is not set, will not apply a max delay at all. :type max_delay: :class:`None <python:None>` / int :param catch_exceptions: The ``type(exception)`` to catch and retry. If :class:`None <python:None>`, will catch all exceptions. Defaults to :class:`None <python:None>`. .. caution:: The iterable must contain one or more types of exception *instances*, and not class objects. For example: .. code-block:: python # GOOD: catch_exceptions = (type(ValueError()), type(TypeError())) # BAD: catch_exceptions = (type(ValueError), type(ValueError)) # BAD: catch_exceptions = (ValueError, TypeError) # BAD: catch_exceptions = (ValueError(), TypeError()) :type catch_exceptions: iterable of form ``[type(exception()), ...]`` :param on_failure: The :class:`exception <python:Exception>` or function to call when all retry attempts have failed. If :class:`None <python:None>`, will raise the last-caught :class:`exception <python:Exception>`. If an :class:`exception <python:Exception>`, will raise the exception with the same message as the last-caught exception. If a function, will call the function and pass the last-raised exception, its message, and stacktrace to the function. Defaults to :class:`None <python:None>`. :type on_failure: :class:`Exception <python:Exception>` / function / :class:`None <python:None>` :param on_success: The function to call when the operation was successful. The function receives the result of the ``to_execute`` or ``retry_execute`` function that was successful, and is called before that result is returned to whatever code called the backoff function. If :class:`None <python:None>`, will just return the result of ``to_execute`` or ``retry_execute`` without calling a handler. Defaults to :class:`None <python:None>`. :type on_success: callable / :class:`None <python:None>` :returns: The result of the attempted function. Example: .. code-block:: python from backoff_utils import backoff def some_function(arg1, arg2, kwarg1 = None): # Function does something pass result = backoff(some_function, args = ['value1', 'value2'], kwargs = { 'kwarg1': 'value3' }, max_tries = 3, max_delay = 30, strategy = strategies.Exponential) """ # pylint: disable=too-many-branches,too-many-statements if to_execute is None: raise ValueError('to_execute cannot be None') elif not checkers.is_callable(to_execute): raise TypeError('to_execute must be callable') if strategy is None: strategy = strategies.Exponential if not hasattr(strategy, 'IS_INSTANTIATED'): raise TypeError('strategy must be a BackoffStrategy or descendent') if not strategy.IS_INSTANTIATED: test_strategy = strategy(attempt = 0) else: test_strategy = strategy if not checkers.is_type(test_strategy, 'BackoffStrategy'): raise TypeError('strategy must be a BackoffStrategy or descendent') if args: args = validators.iterable(args) if kwargs: kwargs = validators.dict(kwargs) if retry_execute is None: retry_execute = to_execute elif not checkers.is_callable(retry_execute): raise TypeError('retry_execute must be None or a callable') if not retry_args: retry_args = args else: retry_args = validators.iterable(retry_args) if not retry_kwargs: retry_kwargs = kwargs else: retry_kwargs = validators.dict(retry_kwargs) if max_tries is None: max_tries = DEFAULT_MAX_TRIES max_tries = validators.integer(max_tries) if max_delay is None: max_delay = DEFAULT_MAX_DELAY if catch_exceptions is None: catch_exceptions = [type(Exception())] else: if not checkers.is_iterable(catch_exceptions): catch_exceptions = [catch_exceptions] catch_exceptions = validators.iterable(catch_exceptions) if on_failure is not None and not checkers.is_callable(on_failure): raise TypeError('on_failure must be None or a callable') if on_success is not None and not checkers.is_callable(on_success): raise TypeError('on_success must be None or a callable') cached_error = None return_value = None returned = False failover_counter = 0 start_time = datetime.utcnow() while failover_counter <= (max_tries): elapsed_time = (datetime.utcnow() - start_time).total_seconds() if max_delay is not None and elapsed_time >= max_delay: if cached_error is None: raise BackoffTimeoutError('backoff timed out after:' ' {}s'.format(elapsed_time)) else: _handle_failure(on_failure, cached_error) if failover_counter == 0: try: if args is not None and kwargs is not None: return_value = to_execute(*args, **kwargs) elif args is not None: return_value = to_execute(*args) elif kwargs is not None: return_value = to_execute(**kwargs) else: return_value = to_execute() returned = True break except Exception as error: # pylint: disable=broad-except if type(error) in catch_exceptions: cached_error = error strategy.delay(failover_counter) failover_counter += 1 continue else: _handle_failure(on_failure = on_failure, error = error) return else: try: if retry_args is not None and retry_kwargs is not None: return_value = retry_execute(*retry_args, **retry_kwargs) elif retry_args is not None: return_value = retry_execute(*retry_args) elif retry_kwargs is not None: return_value = retry_execute(**retry_kwargs) else: return_value = retry_execute() returned = True break except Exception as error: # pylint: disable=broad-except if type(error) in catch_exceptions: strategy.delay(failover_counter) cached_error = error failover_counter += 1 continue else: _handle_failure(on_failure = on_failure, error = error) return if not returned: _handle_failure(on_failure = on_failure, error = cached_error) return elif returned and on_success is not None: on_success(return_value) return return_value
def automap_base(declarative_base=None, **kwargs): """Produce a declarative automap base. This function produces a new base class that is a product of the :class:`AutomapBase <sqlalchemy:sqlalchemy.ext.automap.AutomapBase>` class as well as a declarative base that you supply. If no declarative base is supplied, then the **SQLAthanor** default :class:`BaseModel <sqlathanor.declarative.BaseModel>` will be used, to provide serialization/de-serialization support to the resulting automapped base class. :param declarative_base: The declarative base class that is to be combined with the :class:`AutomapBase <sqlalchemy:sqlalchemy.ext.automap.AutomapBase>` class to construct the resulting automapped :term:`model class`. To ensure that **SQLAthanor** :term:`serialization`/:term:`de-serialization` functionality is provided to your automapped model class, make sure that the value provided is produced by :func:`sqlathanor.declarative_base() <sqlathanor.declarative.declarative_base>` or otherwise inherits from :class:`sqlathanor.declarative.BaseModel`. If ``None``, will default to :class:`sqlathanor.declarative.BaseModel`. :type declarative_base: The declarative base model to combine with the automapped :term:`model class` produced. :param kwargs: Passed to :func:`declarative_base() <sqlalchemy:sqlalchemy.ext.declarative.declarative_base>` :type kwargs: keyword arguments :returns: A :class:`AutomapBase <sqlalchemy:sqlalchemy.ext.automap.AutomapBase>` that can reflect your database schema structure with auto-generated declarative models that support **SQLAthanor** serialization/de-serialization. :rtype: :class:`AutomapBase <sqlalchemy:sqlalchemy.ext.automap.AutomapBase>` :raises SQLAlchemySupportError: if called in an environment where `SQLAlchemy`_ is installed with a version less than 0.9.1 (which introduces automap support). """ # pylint: disable=redefined-variable-type if not SUPPORTS_AUTOMAP: raise SQLAlchemySupportError( 'automap is only available in SQLAlchemy v.0.9.1 and higher, ' + \ 'but you are using %s. Please upgrade.' % sqlalchemy.__version__ ) if declarative_base is None: cls = BaseModel elif isinstance(declarative_base, BaseModel) or declarative_base == BaseModel: cls = declarative_base elif isinstance(declarative_base, tuple): for item in declarative_base: if item == BaseModel or isinstance(item, BaseModel): cls = declarative_base break else: cls = kwargs.pop('cls', None) if cls is None and checkers.is_iterable(declarative_base): class_list = [BaseModel] class_list.extend([x for x in declarative_base]) elif cls is None and not checkers.is_iterable(declarative_base): class_list = [BaseModel, declarative_base] elif checkers.is_iterable(cls) and checkers.is_iterable( declarative_base): class_list = [BaseModel] class_list.extend([x for x in cls]) class_list.extend([x for x in declarative_base]) elif cls is not None and checkers.is_iterable(declarative_base): class_list = [BaseModel, cls] class_list.extend([x for x in declarative_base]) elif cls is not None and not checkers.is_iterable(declarative_base): class_list = [BaseModel, cls, declarative_base] for item in class_list[1:]: if item == BaseModel or isinstance(item, BaseModel): class_list = class_list[1:] break cls = tuple(x for x in class_list) automapped_base = SA_automap_base(declarative_base=cls, **kwargs) return automapped_base
def _to_dict(self, format, max_nesting=0, current_nesting=0, is_dumping=False, config_set=None): """Return a :class:`dict <python:dict>` representation of the object. .. warning:: This method is an **intermediate** step that is used to produce the contents for certain public JSON, YAML, and :class:`dict <python:dict>` serialization methods. It should not be called directly. :param format: The format to which the :class:`dict <python:dict>` will ultimately be serialized. Accepts: ``'csv'``, ``'json'``, ``'yaml'``, and ``'dict'``. :type format: :class:`str <python:str>` :param max_nesting: The maximum number of levels that the resulting :class:`dict <python:dict>` object can be nested. If set to ``0``, will not nest other serializable objects. Defaults to ``0``. :type max_nesting: :class:`int <python:int>` :param current_nesting: The current nesting level at which the :class:`dict <python:dict>` representation will reside. Defaults to ``0``. :type current_nesting: :class:`int <python:int>` :param is_dumping: If ``True``, retrieves all attributes except callables, utilities, and specials (``__<name>``). If ``False``, only retrieves those that have JSON serialization enabled. Defaults to ``False``. :type is_dumping: :class:`bool <python:bool>` :param config_set: If not :obj:`None <python:None>`, the named configuration set to use when processing the input. Defaults to :obj:`None <python:None>`. :type config_set: :class:`str <python:str>` / :obj:`None <python:None>` :returns: A :class:`dict <python:dict>` representation of the object. :rtype: :class:`dict <python:dict>` :raises InvalidFormatError: if ``format`` is not recognized :raises SerializableAttributeError: if attributes is empty :raises UnsupportedSerializationError: if unable to serialize a value :raises MaximumNestingExceededError: if ``current_nesting`` is greater than ``max_nesting`` :raises MaximumNestingExceededWarning: if an attribute requires nesting beyond ``max_nesting`` """ # pylint: disable=too-many-branches next_nesting = current_nesting + 1 if format not in ['csv', 'json', 'yaml', 'dict']: raise InvalidFormatError("format '%s' not supported" % format) if current_nesting > max_nesting: raise MaximumNestingExceededError( 'current nesting level (%s) exceeds maximum %s' % (current_nesting, max_nesting)) dict_object = dict_() if format == 'csv': attribute_getter = self.get_csv_serialization_config elif format == 'json': attribute_getter = self.get_json_serialization_config elif format == 'yaml': attribute_getter = self.get_yaml_serialization_config elif format == 'dict': attribute_getter = self.get_dict_serialization_config if not is_dumping: attributes = [ x for x in attribute_getter( deserialize=None, serialize=True, config_set=config_set) if hasattr(self, x.name) ] else: attribute_names = [ x for x in get_attribute_names(self, include_callable=False, include_nested=False, include_private=True, include_special=False, include_utilities=False) ] attributes = [] for item in attribute_names: attribute_config = self.get_attribute_serialization_config( item, config_set=config_set) if attribute_config is not None: on_serialize_function = attribute_config.on_serialize.get( format, None) else: on_serialize_function = None attribute = AttributeConfiguration( name=item, supports_json=True, supports_yaml=True, supports_dict=True, on_serialize=on_serialize_function) attributes.append(attribute) if not attributes: raise SerializableAttributeError( "'%s' has no '%s' serializable attributes" % (type(self.__class__), format)) for attribute in attributes: item = getattr(self, attribute.name, None) if hasattr(item, '_to_dict'): try: value = item._to_dict( format, # pylint: disable=protected-access max_nesting=max_nesting, current_nesting=next_nesting, is_dumping=is_dumping, config_set=config_set) except MaximumNestingExceededError: warnings.warn( "skipping key '%s' because maximum nesting has been exceeded" \ % attribute.name, MaximumNestingExceededWarning ) continue else: if attribute.on_serialize[format]: on_serialize_function = attribute.on_serialize[format] item = on_serialize_function(item) if checkers.is_iterable(item, forbid_literals=(str, bytes, dict)): try: value = iterable__to_dict(item, format, max_nesting=max_nesting, current_nesting=next_nesting, is_dumping=is_dumping, config_set=config_set) except MaximumNestingExceededError: warnings.warn( "skipping key '%s' because maximum nesting has been exceeded" \ % attribute.name, MaximumNestingExceededWarning ) continue except NotAnIterableError: try: value = self._get_serialized_value( format, attribute.name, config_set=config_set) except UnsupportedSerializationError as error: if is_dumping: value = getattr(self, attribute.name) else: raise error else: try: value = self._get_serialized_value( format, attribute.name, config_set=config_set) except UnsupportedSerializationError as error: if is_dumping: value = getattr(self, attribute.name) else: raise error serialized_key = attribute.display_name or attribute.name dict_object[str(serialized_key)] = value return dict_object