def diff_models(from_: BaseModel, to_: BaseModel) -> MutableMapping: ''' Return a dict with differences of the second in relation to the first model. Useful for getting only the fields that have changed before an update, for example. ''' from_dict = from_.dict() to_dict = to_.dict(exclude_unset=True) return {k: v for k, v in to_dict.items() if from_dict.get(k) != v}
def test_get_one(client, url: str = URL, model: BaseModel = basic_potato, id_key: str = 'id'): res = client.post(url, json=model.dict()) assert res.status_code == 200 data = client.get(url).json() assert len(data) == 1 res = client.get(f'{url}/{data[0][id_key]}') assert res.status_code == 200 assert compare_dict(res.json(), model.dict(), exclude=[id_key])
def create_from_schema( cls, db, schema: BaseModel, additions=None, ): if additions is None: return cls(**schema.dict(skip_defaults=True)).save_return(db) return cls(**schema.dict(skip_defaults=True), **additions).save_return(db)
def add_local_resource(db: Session, Resource: declarative_base, user_input: BaseModel, r_id = None): if r_id: data_dict = user_input.dict() resource = get_local_resources(db= db, Resource= Resource, r_id= r_id) for attr, val in data_dict.items(): if not val == None: setattr(resource, attr, val) else: resource = Resource(**user_input.dict()) db.add(resource) db.commit() return resource
def test_delete_all(client, url: str = URL, model: BaseModel = basic_potato): res = client.post(url, json=model.dict()) assert res.status_code == 200 res = client.post(url, json=model.dict()) assert res.status_code == 200 assert len(client.get(url).json()) == 2 res = client.delete(url) assert res.status_code == 200 assert len(res.json()) == 0 assert len(client.get(url).json()) == 0
async def publish( self, stream_id: str, data: BaseModel, key: Optional[bytes] = None ) -> Awaitable[aiokafka.structs.ConsumerRecord]: if not self._intialized: async with self.get_lock("_"): await self.initialize() schema_key = getattr(data, "__key__", None) if schema_key not in self._schemas: raise UnregisteredSchemaException(model=data) data_ = data.dict() topic_id = self.topic_mng.get_topic_id(stream_id) async with self.get_lock(stream_id): if not await self.topic_mng.topic_exists(topic_id): reg = self.get_schema_reg(data) await self.topic_mng.create_topic( topic_id, replication_factor=self._replication_factor, retention_ms=reg.retention * 1000 if reg.retention is not None else None, ) logger.info(f"Sending kafka msg: {stream_id}") producer = await self._get_producer() return await producer.send( topic_id, value=orjson.dumps({"schema": schema_key, "data": data_}), key=key )
async def update(table: Table, model: BaseModel, db: Database) -> int: query = (table.update().where(table.c.id == model.id).values({ **model.dict(), "updated_at": create_datetime()["updated_at"] })) await db.execute(query=query) return model.id
async def _update_and_reencrypt(self, identifier: UUID, body: BaseModel, session: Optional[AsyncSession] = None): """ Support re-encryption by enforcing that every update triggers a new encryption call, even if the the original call does not update the encrypted field. """ encrypted_field_name = self.store.model_class.__plaintext__ current_model = await self.store.retrieve(identifier, session=session) current_value = current_model.plaintext dict_body = body.dict() null_update = ( # Check if the update is for the encrypted field, and if it's explicitly set to null encrypted_field_name in dict_body and dict_body.get(encrypted_field_name) is None) new_value = dict_body.pop(self.store.model_class.__plaintext__, None) use_new_value = new_value is not None or null_update updated_value = new_value if use_new_value else current_value model_kwargs = { id: identifier, encrypted_field_name: updated_value, **dict_body, } model = self.store.model_class(id=identifier, **model_kwargs) return self.store.update(identifier, model, session=session)
def to_json(obj: BaseModel) -> JSON: def to_serializable(v): if isinstance(v, Path): return str(v) if isinstance(v, PosixPath): return str(v) if isinstance(v, Callable): return "function" if isinstance(v, Enum): return v.name if isinstance(v, Iterable) and not isinstance( v, list) and not isinstance(v, str): v = list(v) return v base = obj.dict() def to_json_rec(_obj: Any): if isinstance(_obj, dict): for k, v in _obj.items(): if not isinstance(v, dict) and not isinstance(v, list): _obj[k] = to_serializable(v) if isinstance(v, dict): to_json_rec(v) if isinstance(v, list): for i, e in enumerate(v): if not isinstance(e, dict) and not isinstance(e, list): _obj[k][i] = to_serializable(e) else: to_json_rec(e) to_json_rec(base) return base
async def update_entry(table: Table, payload: BaseModel, id: int = Path(..., gt=0)): await get_entry(table, id) entry_id = await crud.put(id, payload, table) return {**payload.dict(), "id": entry_id}
async def get_data_with_input(self, path: str, input: BaseModel) -> Dict: """ evaluates a data document against an input. that is how OPA "runs queries". see explanation how opa evaluate documents: https://www.openpolicyagent.org/docs/latest/philosophy/#the-opa-document-model see api reference: https://www.openpolicyagent.org/docs/latest/rest-api/#get-a-document-with-input """ # opa data api format needs the input to sit under "input" opa_input = { "input": input.dict() } if path.startswith("/"): path = path[1:] try: async with aiohttp.ClientSession() as session: async with session.post( f"{self._opa_url}/data/{path}", data=json.dumps(opa_input) ) as opa_response: return await proxy_response(opa_response) except aiohttp.ClientError as e: logger.warning("Opa connection error: {err}", err=e) raise
async def publish( self, stream_id: str, data: BaseModel, key: Optional[bytes] = None, headers: Optional[List[Tuple[str, bytes]]] = None, ) -> Awaitable[aiokafka.structs.ConsumerRecord]: if not self._initialized: async with self.get_lock("_"): await self.initialize() schema_key = getattr(data, "__key__", None) if schema_key not in self._schemas: # do not require key schema_key = f"{data.__class__.__name__}:1" data_ = data.dict() await self._maybe_create_topic(stream_id, data) return await self.raw_publish(stream_id, orjson.dumps({ "schema": schema_key, "data": data_ }), key, headers=headers)
def first_or_create(self, values: BaseModel, match: Optional[dict] = None) -> Base: match = match or values.dict() query = self.db.query(self.model) for key, value in match.items(): query = query.filter(getattr(self.model, key) == value) return query.first() or self.create(values)
def create(self, data: BaseModel) -> Base: db_entity = self.model(**data.dict()) self.db.add(db_entity) self.db.commit() self.db.refresh(db_entity) return db_entity
def filter_by_model(self, filter_obj: BaseModel): if filter_obj is None: return self values = filter_obj.dict(exclude_unset=True) criterions = [] for k, v in values.items(): extra = filter_obj.__fields__[k].field_info.extra op_name = extra.get('op', 'eq') op = OPERATOR_MAP.get(op_name) if not op: continue parser = extra.get('parser') or PARSER_MAP.get(op_name) v = parser(v) if parser and isfunction(parser) else v if op_name in {'like', 'ilike', 'notlike', 'notilike'}: columns = extra.get('columns') or [k] like_clauses = [] for col in columns: criterion = self.compute_criterion(col, op, v) if criterion is not None: like_clauses.append(criterion) criterion = or_(*like_clauses) else: criterion = self.compute_criterion(extra.get('column') or k, op, v) if criterion is not None: criterions.append(criterion) return self.filter(*criterions)
async def create(self, schema: BaseModel, **kwargs: tp.Any) -> ormar.Model: """ Create a new object """ obj = await self.model.objects.create( **schema.dict(exclude_unset=True), **kwargs) return obj
def update(uuid: UUID, obj: BaseModel, db_table: DeclarativeMeta, db: Session): """Updates the object with the given UUID in the database. Designed to be called only by the API since it raises an HTTPException.""" # Try to perform the update try: result = db.execute( sql_update(db_table).where(db_table.uuid == uuid).values( # exclude_unset is needed for update routes so that any values in the Pydantic model # that are not being updated are not set to None. Instead they will be removed from the dict. **obj.dict(exclude_unset=True))) # Verify a row was actually updated if result.rowcount != 1: raise HTTPException(status_code=404, detail=f"UUID {uuid} does not exist.") commit(db) # An IntegrityError will happen if value already exists or was set to None except IntegrityError: db.rollback() raise HTTPException( status_code=status.HTTP_409_CONFLICT, detail=f"Got an IntegrityError while updating UUID {uuid}.", )
def model_to_primitive( model: BaseModel, without_id: bool = False, exclude: Sequence[str] = None, keep_python_primitives: bool = False, ) -> Dict: """ Convert pydantic-{model} to dict transforming complex types to primitives (e.g. datetime to str) :param model: Pydantic model :param without_id: Remove id key from result dict :param exclude: List of field to exclude from result dict :param keep_python_primitives: If True result dict will have python-primitives (e.g. datetime, Decimal) :return: Dict with fields from given model """ exclude_set: Set[Union[int, str]] = set(exclude or []) if without_id: exclude_set.add("id") data: Dict if keep_python_primitives: data = model.dict(exclude=exclude_set) else: data = json.loads(model.json(exclude=exclude_set)) return data
def _filtered_query(self, filters: BaseModel): q = db.session.query(self.model) for k, v in clean(filters.dict(exclude_defaults=True)).items(): op, col_name = dj_lookup_to_sqla(k) q = q.filter(op(getattr(self.model, col_name), v)) return q
def update_with_dict(m: BaseModel, updated) -> BaseModel: d = m.dict() if 'meta' in d and d['meta'] is not None and 'meta' in updated and updated['meta'] is not None: d['meta'].update(updated['meta']) updated['meta'] = d['meta'] d.update(updated) return type(m)(**d)
def update_item_with_etag(self, item: BaseModel, etag: str) -> BaseModel: self.container.replace_item( item=item.id, body=item.dict(), etag=etag, match_condition=MatchConditions.IfNotModified) return self.read_item_by_id(item.id)
async def update(collection: Collection, id: str, data: BaseModel): logging.info(">>> " + __name__ + ":" + update.__name__ ) found = await collection.find_one({"_id": ObjectId(id)}, {"_id": 1}) if found: _dict = data.dict() excludes = [] # TODO for k in _dict: if not _dict[k]: excludes.append(k) print(excludes) for i in excludes: del _dict[i] # rs = await collection.update_one( # {"_id": ObjectId(id)}, # {"$set": _dict} # ) # if rs.modified_count: # return await collection.find_one({"_id": ObjectId(id)}) return await collection.find_one_and_update( {"_id": ObjectId(id)}, {"$set": _dict}, return_document=ReturnDocument.AFTER ) return None
async def update_or_insert(self, obj_to_save: BaseModel, filters: dict): """ Create or update a document based on filters """ return await self._collection.find_one_and_update( filters, {"$set": obj_to_save.dict()}, upsert=True, return_document=ReturnDocument.AFTER)
def create(obj: BaseModel, db_table: DeclarativeMeta, db: Session) -> UUID: """Creates a new object in the given database table. Returns the new object's UUID. Designed to be called only by the API since it raises an HTTPException.""" new_obj = db_table(**obj.dict()) db.add(new_obj) commit(db) return new_obj.uuid
def update(self, id: str, update: BaseModel): document = update.dict() document["updated"] = get_timestamp() result = self.collection.update_one({"_id": id}, {"$set": document}) if not result.modified_count: raise DocumentNotFoundException(collection=self.__collection__, identifier=id) return self.get(id)
def test_update(client, url: str = URL, model: BaseModel = basic_potato, id_key: str = 'id'): res = client.post(url, json=model.dict()) data = res.json() assert res.status_code == 200 tuber = model.copy() tuber.color = 'yellow' res = client.put(f'{url}/{data[id_key]}', json=tuber.dict()) assert res.status_code == 200 assert compare_dict(res.json(), tuber.dict(), exclude=[id_key]) assert not compare_dict(res.json(), model.dict(), exclude=[id_key]) res = client.get(f'{url}/{data[id_key]}') assert res.status_code == 200 assert compare_dict(res.json(), tuber.dict(), exclude=[id_key]) assert not compare_dict(res.json(), model.dict(), exclude=[id_key])
async def add(table: Table, info: BaseModel): newVals = info.dict(skip_defaults=True) query = table.insert().values(newVals) try: newId = await db.execute(query) except UniqueViolationError as e: raise HTTPException(status_code=HTTP_409_CONFLICT, detail=e.detail) return await getById(table, newId)
def create(self, create: BaseModel): document = create.dict() document["created"] = document["updated"] = get_timestamp() document["_id"] = get_uuid() document["id"] = document["_id"] result = self.collection.insert_one(document) assert result.acknowledged return self.get(result.inserted_id)
async def update_entry( table: Table, payload: BaseModel, entry_id: int = Path(..., gt=0)) -> Dict[str, Any]: entry_id = await put(entry_id, payload, table) if not isinstance(entry_id, int): raise HTTPException(status_code=404, detail="Entry not found") return {**payload.dict(), "id": entry_id}
async def put(id: int, payload: BaseModel, table: Table): query = ( table .update() .where(id == table.c.id) .values(**payload.dict()) .returning(table.c.id) ) return await database.execute(query=query)