def _update_data(self, *data_dict, **kwargs): """ A private method to process and update entity values correctly. :param data: A dictionary of values to be updated for the entity :param kwargs: keyword arguments with key-value pairs to be updated """ # Load each of the fields given in the data dictionary self.errors = {} for data in data_dict: if not isinstance(data, dict): raise AssertionError( f'Positional argument "{data}" passed must be a dict.' f'This argument serves as a template for loading common ' f'values.', ) for field_name, val in data.items(): setattr(self, field_name, val) # Now load against the keyword arguments for field_name, val in kwargs.items(): setattr(self, field_name, val) # Raise any errors found during update if self.errors: logger.error(self.errors) raise ValidationError(self.errors)
def commit(self): # Raise error if there the Unit Of Work is not active logger.debug(f"Committing {self}...") if not self._in_progress: raise InvalidOperationError("UnitOfWork is not in progress") # Exit from Unit of Work _uow_context_stack.pop() # Commit and destroy session try: for _, session in self._sessions.items(): session.commit() for event in self._events: for _, broker in self.domain.brokers.items(): broker.send_message(event) logger.debug("Commit Successful") except Exception as exc: logger.error( f"Error during Commit: {str(exc)}. Rolling back Transaction..." ) self.rollback() raise ValidationError( {"_entity": [f"Error during Data Commit: - {repr(exc)}"]} ) self._reset()
def __call__(self, value): """ Validate that the input contains (or does *not* contain, if inverse_match is True) a match for the regular expression. """ regex_matches = self.regex.search(str(value)) invalid_input = regex_matches if self.inverse_match else not regex_matches if invalid_input: raise ValidationError(self.message)
def __init__(self, *template, owner=None, raise_errors=True, **kwargs): """ Initialise the container. During initialization, set value on fields if validation passes. This initialization technique supports keyword arguments as well as dictionaries. You can even use a template for initial data. """ if self.meta_.abstract is True: raise NotSupportedError( f'{self.__class__.__name__} class has been marked abstract' f' and cannot be instantiated') self.errors = defaultdict(list) self.raise_errors = raise_errors # Entity/Aggregate to which this Command is connected to self.owner = owner # Load the attributes based on the template loaded_fields = [] for dictionary in template: if not isinstance(dictionary, dict): raise AssertionError( f'Positional argument "{dictionary}" passed must be a dict.' f'This argument serves as a template for loading common ' f'values.', ) for field_name, val in dictionary.items(): loaded_fields.append(field_name) setattr(self, field_name, val) # Now load against the keyword arguments for field_name, val in kwargs.items(): loaded_fields.append(field_name) setattr(self, field_name, val) # Now load the remaining fields with a None value, which will fail # for required fields for field_name, field_obj in self.meta_.declared_fields.items(): if field_name not in loaded_fields: setattr(self, field_name, None) self.defaults() # `clean()` will return a `defaultdict(list)` if errors are to be raised custom_errors = self.clean() or {} for field in custom_errors: self.errors[field].extend(custom_errors[field]) # Raise any errors found during load if self.errors and self.raise_errors: logger.error(self.errors) raise ValidationError(self.errors)
def __init__(self, content_type=Text, pickled=False, **kwargs): if content_type not in [ Boolean, Date, DateTime, Float, Identifier, Integer, String, Text, ]: raise ValidationError({"content_type": ["Content type not supported"]}) self.content_type = content_type self.pickled = pickled super().__init__(**kwargs)
def add(self, aggregate): # noqa: C901 """This method helps persist or update aggregates into the persistence store. Returns the persisted aggregate. Protean adopts a collection-oriented design pattern to handle persistence. What this means is that the Repository interface does not hint in any way that there is an underlying persistence mechanism, avoiding any notion of saving or persisting data in the design layer. The task of syncing the data back into the persistence store is handled automatically. To be specific, a Repository mimics a `set` collection. Whatever the implementation, the repository will not allow instances of the same object to be added twice. Also, when retrieving objects from a Repository and modifying them, you don’t need to “re-save” them to the Repository. If there is a :ref:`Unit of Work <unit-of-work>` in progress, then the changes are performed on the UoW's active session. They are committed whenever the entire UoW is committed. If there is no transaction in progress, changes are committed immediately to the persistence store. This mechanism is part of the DAO's design, and is automatically used wherever one tries to persist data. """ # Ensure that aggregate is clean and good to save # FIXME Let `clean()` raise validation errors errors = aggregate.clean() or {} # Raise any errors found during load if errors: logger.error(errors) raise ValidationError(errors) # If there are HasMany fields in the aggregate, sync child objects added/removed, # but not yet persisted to the database. # # The details of in-transit child objects are maintained as part of the `has_many_field` itself # in a variable called `_temp_cache` for field_name, field in aggregate.meta_.declared_fields.items(): if isinstance(field, HasMany): has_many_field = getattr(aggregate, field_name) for item in has_many_field._temp_cache["removed"]: dao = current_domain.get_dao(field.to_cls) dao.delete(item) has_many_field._temp_cache["removed"] = list( ) # Empty contents of `removed` cache for item in has_many_field._temp_cache["added"]: dao = current_domain.get_dao(field.to_cls) item.state_.mark_new() dao.save(item) has_many_field._temp_cache["added"] = list( ) # Empty contents of `added` cache if isinstance(field, HasOne): if field.has_changed: dao = current_domain.get_dao(field.to_cls) if field.change == "ADDED": dao.save(field.value) elif field.change == "UPDATED": if field.change_old_value is not None: # The object was replaced, so delete the old record dao.delete(field.change_old_value) else: # The same object was updated, so mark it as new to be able to save # FIXME This should have been automatic with `is_changed` flag in `state_` field.value.state_.mark_new() dao.save(field.value) else: dao.delete(field.change_old_value) # Reset temporary fields after processing field.change = None field.change_old_value = None # Persist only if the aggregate object is new, or it has changed since last persistence if (not aggregate.state_.is_persisted) or ( aggregate.state_.is_persisted and aggregate.state_.is_changed): dao = current_domain.get_dao(self.meta_.aggregate_cls) dao.save(aggregate) return aggregate
def __call__(self, value): if len(value) < self.min_length: raise ValidationError(self.error)
def medium_string_validator(value): """Function checks the max length of a field""" if len(value) > 15: raise ValidationError( 'Value cannot be more than 15 characters long.')
def __init__(self, *template, raise_errors=True, **kwargs): # noqa: C901 """ Initialise the entity object. During initialization, set value on fields if validation passes. This initialization technique supports keyword arguments as well as dictionaries. The objects initialized in the following example have the same structure:: user1 = User({'first_name': 'John', 'last_name': 'Doe'}) user2 = User(first_name='John', last_name='Doe') You can also specify a template for initial data and override specific attributes:: base_user = User({'age': 15}) user = User(base_user.to_dict(), first_name='John', last_name='Doe') """ if self.meta_.abstract is True: raise NotSupportedError( f'{self.__class__.__name__} class has been marked abstract' f' and cannot be instantiated') self.errors = defaultdict(list) self.raise_errors = raise_errors # Set up the storage for instance state self.state_ = _EntityState() # Load the attributes based on the template loaded_fields = [] for dictionary in template: if not isinstance(dictionary, dict): raise AssertionError( f'Positional argument "{dictionary}" passed must be a dict.' f'This argument serves as a template for loading common ' f'values.', ) for field_name, val in dictionary.items(): if field_name not in kwargs: kwargs[field_name] = val # Now load against the keyword arguments for field_name, val in kwargs.items(): loaded_fields.append(field_name) try: setattr(self, field_name, val) except ValidationError as err: for field_name in err.messages: self.errors[field_name].extend(err.messages[field_name]) # Load Value Objects from associated fields # This block will dynamically construct value objects from field values # and associated the vo with the entity # If the value object was already provided, it will not be overridden. for field_name, field_obj in self.meta_.declared_fields.items(): if isinstance( field_obj, (ValueObjectField)) and not getattr(self, field_name): attributes = [ (embedded_field.field_name, embedded_field.attribute_name) for embedded_field in field_obj.embedded_fields.values() ] values = {name: kwargs.get(attr) for name, attr in attributes} try: value_object = field_obj.value_object_cls.build(**values) # Set VO value only if the value object is not None/Empty if value_object: setattr(self, field_name, value_object) loaded_fields.append(field_name) except ValidationError as err: for sub_field_name in err.messages: self.errors['{}_{}'.format( field_name, sub_field_name)].extend( err.messages[sub_field_name]) # Load Identities if not getattr(self, self.meta_.id_field.field_name, None) and type(self.meta_.id_field) is Auto: setattr(self, self.meta_.id_field.field_name, self.generate_identity()) loaded_fields.append(self.meta_.id_field.field_name) # Load Associations for field_name, field_obj in self.meta_.declared_fields.items(): if isinstance(field_obj, Association): getattr( self, field_name) # This refreshes the values in associations # Now load the remaining fields with a None value, which will fail # for required fields for field_name, field_obj in self.meta_.declared_fields.items(): if field_name not in loaded_fields: if not isinstance(field_obj, (Reference, _ReferenceField, Association)): try: setattr(self, field_name, None) # If field is a VO, set underlying attributes to None as well if isinstance(field_obj, ValueObjectField): for embedded_field in field_obj.embedded_fields.values( ): setattr(self, embedded_field.attribute_name, None) except ValidationError as err: for field_name in err.messages: self.errors[field_name].extend( err.messages[field_name]) for field_name, field_obj in self.meta_.attributes.items(): if field_name not in loaded_fields and not hasattr( self, field_name): setattr(self, field_name, None) self.defaults() # `clean()` will return a `defaultdict(list)` if errors are to be raised custom_errors = self.clean() or {} for field in custom_errors: self.errors[field].extend(custom_errors[field]) # Raise any errors found during load if self.errors and self.raise_errors: logger.error(self.errors) raise ValidationError(self.errors)
def __call__(self, value): if self.max_value and value > self.max_value: raise ValidationError(self.message)
def __call__(self, value): if self.max_length and len(value) > self.max_length: raise ValidationError(self.message)