def update_all(cls, documents: List[dict]) -> (List[dict], List[dict]): """ Update documents formatted as a list of dictionary. :raises ValidationFailed in case validation fail. :returns A tuple containing previous documents (first item) and new documents (second item). """ if not documents: raise ValidationFailed([], message="No data provided.") if not isinstance(documents, list): raise ValidationFailed(documents, message="Must be a list.") new_documents = copy.deepcopy(documents) errors = cls.validate_and_deserialize_update(new_documents) if errors: raise ValidationFailed(documents, errors) try: if cls.logger.isEnabledFor(logging.DEBUG): cls.logger.debug(f"Updating {new_documents}...") previous_documents, updated_documents = cls._update_many( new_documents) if cls.logger.isEnabledFor(logging.DEBUG): cls.logger.debug(f"Documents updated to {updated_documents}.") return ( [cls.serialize(document) for document in previous_documents], [cls.serialize(document) for document in updated_documents], ) except pymongo.errors.DuplicateKeyError: raise ValidationFailed( [cls.serialize(document) for document in documents], message="One document already exists.", )
def add_all(cls, documents: List[dict]) -> List[dict]: """ Add documents formatted as a list of dictionaries. :raises ValidationFailed in case validation fail. :returns The inserted documents formatted as a list of dictionaries. """ if not documents: raise ValidationFailed([], message="No data provided.") if not isinstance(documents, list): raise ValidationFailed(documents, message="Must be a list.") new_documents = copy.deepcopy(documents) errors = cls.validate_and_deserialize_insert(new_documents) if errors: raise ValidationFailed(documents, errors) try: if cls.logger.isEnabledFor(logging.DEBUG): cls.logger.debug(f"Inserting {new_documents}...") cls._insert_many(new_documents) if cls.logger.isEnabledFor(logging.DEBUG): cls.logger.debug("Documents inserted.") return [cls.serialize(document) for document in new_documents] except pymongo.errors.BulkWriteError as e: raise ValidationFailed(documents, message=str(e.details))
def add_all(cls, rows: List[dict]) -> List[dict]: """ Add models formatted as a list of dictionaries. :raises ValidationFailed in case Marshmallow validation fail. :returns The inserted models formatted as a list of dictionaries. """ if not rows: raise ValidationFailed({}, message="No data provided.") try: models = cls.schema().load(rows, many=True, session=cls._session) except exc.sa_exc.DBAPIError: cls._handle_connection_failure() except ValidationError as e: raise ValidationFailed(rows, e.messages) try: cls._session.add_all(models) if cls.audit_model: for row in rows: cls.audit_model.audit_add(row) cls._session.commit() return _models_field_values(models) except exc.sa_exc.DBAPIError: cls._session.rollback() cls._handle_connection_failure() except Exception: cls._session.rollback() raise
def add(cls, row: dict) -> dict: """ Add a model formatted as a dictionary. :raises ValidationFailed in case Marshmallow validation fail. :returns The inserted model formatted as a dictionary. """ if not row: raise ValidationFailed({}, message="No data provided.") try: model = cls.schema().load(row, session=cls._session) except exc.sa_exc.DBAPIError: logger.exception("Database could not be reached.") raise Exception("Database could not be reached.") except ValidationError as e: raise ValidationFailed(row, e.messages) try: cls._session.add(model) if cls.audit_model: cls.audit_model.audit_add(row) cls._session.commit() return _model_field_values(model) except exc.sa_exc.DBAPIError: cls._session.rollback() cls._handle_connection_failure() except Exception: cls._session.rollback() raise
def get(cls, **filters) -> dict: """ Return the model formatted as a dictionary. """ cls._check_required_query_fields(filters) query = cls._session.query(cls) for column_name, value in filters.items(): if value is not None: if isinstance(value, list): if not value: continue if len(value) > 1: raise ValidationFailed( filters, {column_name: ["Only one value must be queried."]}) value = value[0] query = query.filter(getattr(cls, column_name) == value) try: model = query.one_or_none() cls._session.close() return cls.schema().dump(model) except exc.MultipleResultsFound: cls._session.rollback( ) # SQLAlchemy state is not coherent with the reality if not rollback raise ValidationFailed( filters, message="More than one result: Consider another filtering.") except exc.sa_exc.DBAPIError: cls._handle_connection_failure()
def _get_revision(cls, filters: dict) -> int: # TODO Use an int Column validate + deserialize revision = filters.get("revision") if revision is None: raise ValidationFailed( filters, {"revision": ["Missing data for required field."]}) if not isinstance(revision, int): raise ValidationFailed(filters, {"revision": ["Not a valid int."]}) del filters["revision"] return revision
def update_all(cls, rows: List[dict]) -> (List[dict], List[dict]): """ Update models formatted as a list of dictionaries. :raises ValidationFailed in case Marshmallow validation fail. :returns A tuple containing previous models formatted as a list of dictionaries (first item) and new models formatted as a list of dictionaries (second item). """ if not rows: raise ValidationFailed({}, message="No data provided.") previous_rows = [] new_rows = [] new_models = [] for row in rows: if not isinstance(row, dict): raise ValidationFailed(row, message="Must be a dictionary.") try: previous_model = cls.schema().get_instance(row) except exc.sa_exc.DBAPIError: cls._handle_connection_failure() if not previous_model: raise ModelCouldNotBeFound(row) previous_row = _model_field_values(previous_model) try: new_model = cls.schema().load(row, instance=previous_model, partial=True, session=cls._session) except ValidationError as e: raise ValidationFailed(row, e.messages) new_row = _model_field_values(new_model) previous_rows.append(previous_row) new_rows.append(new_row) new_models.append(new_model) try: cls._session.add_all(new_models) if cls.audit_model: for new_row in new_rows: cls.audit_model.audit_update(new_row) cls._session.commit() return previous_rows, new_rows except exc.sa_exc.DBAPIError: cls._session.rollback() cls._handle_connection_failure() except Exception: cls._session.rollback() raise
def get_all(cls, **filters) -> List[dict]: """ Return all documents matching provided filters. """ limit = filters.pop("limit", 0) or 0 offset = filters.pop("offset", 0) or 0 errors = cls.validate_query(filters) if errors: raise ValidationFailed(filters, errors) cls.deserialize_query(filters) if cls.logger.isEnabledFor(logging.DEBUG): if filters: cls.logger.debug(f"Query documents matching {filters}...") else: cls.logger.debug(f"Query all documents...") documents = cls.__collection__.find(filters, skip=offset, limit=limit) if cls.logger.isEnabledFor(logging.DEBUG): nb_documents = (cls.__collection__.count_documents( filters, skip=offset, limit=limit) if limit else cls.__collection__.count_documents(filters, skip=offset)) cls.logger.debug( f'{nb_documents if nb_documents else "No corresponding"} documents retrieved.' ) return [cls.serialize(document) for document in documents]
def _check_required_query_fields(cls, filters): for required_field in cls._get_required_query_fields(): if required_field not in filters: raise ValidationFailed( filters, errors={ required_field: ["Missing data for required field."] }, )
def get_last(self, request_arguments: dict) -> dict: """ Return last revision of a model formatted as a dictionary. """ if not self._model: raise ControllerModelNotSet(self) if not isinstance(request_arguments, dict): raise ValidationFailed(request_arguments, message="Must be a dictionary.") return self._model.get_last(**request_arguments)
def get_history(self, request_arguments: dict) -> List[dict]: """ Return all models formatted as a list of dictionaries. """ if not self._model: raise ControllerModelNotSet(self) if not isinstance(request_arguments, dict): raise ValidationFailed(request_arguments, message="Must be a dictionary.") return self._model.get_history(**request_arguments)
def rollback_to(self, request_arguments: dict) -> int: """ Rollback to the model(s) matching those criterion. :returns Number of affected rows. """ if not self._model: raise ControllerModelNotSet(self) if not isinstance(request_arguments, dict): raise ValidationFailed(request_arguments, message="Must be a dictionary.") return self._model.rollback_to(**request_arguments)
def add(cls, document: dict) -> dict: """ Add a model formatted as a dictionary. :raises ValidationFailed in case validation fail. :returns The inserted model formatted as a dictionary. """ errors = cls.validate_insert(document) if errors: raise ValidationFailed(document, errors) cls.deserialize_insert(document) try: if cls.logger.isEnabledFor(logging.DEBUG): cls.logger.debug(f"Inserting {document}...") cls._insert_one(document) if cls.logger.isEnabledFor(logging.DEBUG): cls.logger.debug("Document inserted.") return cls.serialize(document) except pymongo.errors.DuplicateKeyError: raise ValidationFailed(cls.serialize(document), message="This document already exists.")
def get(cls, **filters) -> dict: """ Return the document matching provided filters. """ errors = cls.validate_query(filters) if errors: raise ValidationFailed(filters, errors) cls.deserialize_query(filters) if cls.__collection__.count_documents(filters) > 1: raise ValidationFailed( filters, message="More than one result: Consider another filtering.") if cls.logger.isEnabledFor(logging.DEBUG): cls.logger.debug(f"Query document matching {filters}...") document = cls.__collection__.find_one(filters) if cls.logger.isEnabledFor(logging.DEBUG): cls.logger.debug( f'{"1" if document else "No corresponding"} document retrieved.' ) return cls.serialize(document)
def update(cls, document: dict) -> (dict, dict): """ Update a model formatted as a dictionary. :raises ValidationFailed in case validation fail. :returns A tuple containing previous document (first item) and new document (second item). """ errors = cls.validate_update(document) if errors: raise ValidationFailed(document, errors) cls.deserialize_update(document) try: if cls.logger.isEnabledFor(logging.DEBUG): cls.logger.debug(f"Updating {document}...") previous_document, new_document = cls._update_one(document) if cls.logger.isEnabledFor(logging.DEBUG): cls.logger.debug(f"Document updated to {new_document}.") return cls.serialize(previous_document), cls.serialize( new_document) except pymongo.errors.DuplicateKeyError: raise ValidationFailed(cls.serialize(document), message="This document already exists.")
def _ignore_read_only_fields(model_properties: dict, model_as_dict: dict): if model_as_dict: if not isinstance(model_as_dict, dict): raise ValidationFailed(model_as_dict, message="Must be a dictionary.") read_only_fields = [ field_name for field_name, field_properties in model_properties.items() if field_properties.get("readOnly") ] return { field_name: field_value for field_name, field_value in model_as_dict.items() if field_name not in read_only_fields } return model_as_dict
def post_many(self, new_dicts: List[dict]) -> List[dict]: """ Add models formatted as a list of dictionaries. :raises ValidationFailed in case Marshmallow validation fail. :returns The inserted models formatted as a list of dictionaries. """ if not self._model: raise ControllerModelNotSet(self) if new_dicts and hasattr(self.json_post_model, "_schema"): if not isinstance(new_dicts, list): raise ValidationFailed( new_dicts, message="Must be a list of dictionaries." ) new_dicts = [ _ignore_read_only_fields( self.json_post_model._schema.get("properties", {}), new_dict ) for new_dict in new_dicts ] return self._model.add_all(new_dicts)
def remove(cls, **filters) -> int: """ Remove the document(s) matching those criteria. :param filters: Provided filters. Each entry if composed of a field name associated to a value. :returns Number of removed documents. """ errors = cls.validate_remove(filters) if errors: raise ValidationFailed(filters, errors) cls.deserialize_query(filters) if cls.logger.isEnabledFor(logging.DEBUG): if filters: cls.logger.debug( f"Removing documents corresponding to {filters}...") else: cls.logger.debug("Removing all documents...") nb_removed = cls._delete_many(filters) if cls.logger.isEnabledFor(logging.DEBUG): cls.logger.debug(f"{nb_removed} documents removed.") return nb_removed
def rollback_to(cls, **filters) -> int: revision = cls._get_revision(filters) errors = cls.validate_query(filters) if errors: raise ValidationFailed(filters, errors) cls.deserialize_query(filters) # Select those who were valid at the time of the revision previously_expired = { cls.valid_since_revision.name: { "$lte": revision }, cls.valid_until_revision.name: { "$exists": True, "$ne": -1, "$gt": revision, }, } expired_documents = cls.__collection__.find( { **filters, **previously_expired }, projection={"_id": False}) expired_documents = list(expired_documents) # Convert Cursor to list errors = cls.validate_rollback(filters, expired_documents) if errors: raise ValidationFailed({**filters, "revision": revision}, errors) new_revision = cls._increment(*REVISION_COUNTER) # Update currently valid as non valid anymore (new version since this validity) for expired_document in expired_documents: expired_document_keys = cls._to_primary_keys_model( expired_document) expired_document_keys[cls.valid_until_revision.name] = -1 cls.__collection__.find_one_and_update( expired_document_keys, {"$set": { cls.valid_until_revision.name: new_revision }}, ) # Update currently valid as non valid anymore (they were not existing at the time) new_still_valid = { cls.valid_since_revision.name: { "$gt": revision }, cls.valid_until_revision.name: -1, } nb_removed = cls.__collection__.update_many( { **filters, **new_still_valid }, { "$set": { cls.valid_until_revision.name: new_revision } }, ).modified_count # Insert expired as valid for expired_document in expired_documents: expired_document[cls.valid_since_revision.name] = new_revision expired_document[cls.valid_until_revision.name] = -1 if expired_documents: cls.__collection__.insert_many(expired_documents) if cls.audit_model: cls.audit_model.audit_rollback(new_revision) return len(expired_documents) + nb_removed