def model_to_primitive( model: BaseModel, without_id: bool = False, exclude: Sequence[str] = None, keep_python_primitives: bool = False, ) -> Dict: """ Convert pydantic-{model} to dict transforming complex types to primitives (e.g. datetime to str) :param model: Pydantic model :param without_id: Remove id key from result dict :param exclude: List of field to exclude from result dict :param keep_python_primitives: If True result dict will have python-primitives (e.g. datetime, Decimal) :return: Dict with fields from given model """ exclude_set: Set[Union[int, str]] = set(exclude or []) if without_id: exclude_set.add("id") data: Dict if keep_python_primitives: data = model.dict(exclude=exclude_set) else: data = json.loads(model.json(exclude=exclude_set)) return data
def commit(self, resource, object, method, bg, status="init", dependence_uuid=None): uuid_str = str(uuid.uuid4()) time = datetime.now() user_id = self.user.id if self.model == None: self.model = BaseModel() row = TaskModel( uuid = uuid_str, post_time = time, run_time = 0, user_id = user_id, status = status, dependence_uuid = dependence_uuid, resource = resource, object = object, method = method, request = self.model.json(), message = "queing task" ) res = copy.copy(row) self.db.add(row) self.db.commit() bg.add_task(do_task) return res
def __init__(__pydantic_self__, **data: Any) -> None: """ """ resource_type = data.pop("resource_type", None) errors = [] if ("resourceType" in data and "resourceType" not in __pydantic_self__.__fields__): resource_type = data.pop("resourceType", None) if (resource_type is not None and resource_type != __pydantic_self__.__fields__["resource_type"].default): expected_resource_type = __pydantic_self__.__fields__[ "resource_type"].default error = (f"``{__pydantic_self__.__class__.__module__}." f"{__pydantic_self__.__class__.__name__}`` " f"expects resource type ``{expected_resource_type}``, " f"but got ``{resource_type}``. " "Make sure resource type name is correct and right " "ModelClass has been chosen.") errors.append( ErrorWrapper(WrongResourceType(error=error), loc="resource_type")) if errors: raise ValidationError(errors, __pydantic_self__.__class__) BaseModel.__init__(__pydantic_self__, **data)
def diff_models(from_: BaseModel, to_: BaseModel) -> MutableMapping: ''' Return a dict with differences of the second in relation to the first model. Useful for getting only the fields that have changed before an update, for example. ''' from_dict = from_.dict() to_dict = to_.dict(exclude_unset=True) return {k: v for k, v in to_dict.items() if from_dict.get(k) != v}
def __init__(self, samplers, datasets): BaseModel.__init__( self, samplers=samplers, datasets=datasets, length=max(map(len, samplers)), from_mapping=Dataset.create_from_combine_mapping(datasets), zipped_samplers=ZipSampler.zip_samplers(samplers, datasets), )
def __init__(self, length, proportion=1.0, replacement=False): BaseModel.__init__(self, proportion=proportion, replacement=replacement, sampler=torch.utils.data.WeightedRandomSampler( torch.ones(length).double(), num_samples=int(length * proportion), replacement=replacement, ))
def __init__(self, samplers, dataset): BaseModel.__init__(self, samplers=samplers, dataset=dataset, length=len(dataset) * len(samplers), merged_samplers=MultiSampler.merge_samplers( samplers, [1 for _ in samplers], ))
def __init__(self, sampler, length, epoch_bound=False): ''' Wrapper that repeats and limits length of sampling based on epoch length and batch size ''' BaseModel.__init__(self, sampler=sampler, length=length, epoch_bound=epoch_bound, queue=iter(sampler))
def __init__(self, dictionary): BaseModel.__init__(self) if ("user_id" in dictionary.keys()): self.user_id = dictionary["user_id"] if ("accepted" in dictionary.keys()): self.accepted = dictionary["accepted"] if ("waiting" in dictionary.keys()): self.waiting = dictionary["waiting"] if ("turn" in dictionary.keys()): self.turn = dictionary["turn"]
def create_from_schema( cls, db, schema: BaseModel, additions=None, ): if additions is None: return cls(**schema.dict(skip_defaults=True)).save_return(db) return cls(**schema.dict(skip_defaults=True), **additions).save_return(db)
def test_get_one(client, url: str = URL, model: BaseModel = basic_potato, id_key: str = 'id'): res = client.post(url, json=model.dict()) assert res.status_code == 200 data = client.get(url).json() assert len(data) == 1 res = client.get(f'{url}/{data[0][id_key]}') assert res.status_code == 200 assert compare_dict(res.json(), model.dict(), exclude=[id_key])
def __init__(self, samplers, datasets, ns): BaseModel.__init__( self, samplers=samplers, datasets=datasets, ns=ns, length=MergeSampler.merged_samplers_length(samplers), from_mapping=Dataset.create_from_concat_mapping(datasets), merged_samplers=MergeSampler.merge_samplers( samplers, datasets, ns), )
def add_local_resource(db: Session, Resource: declarative_base, user_input: BaseModel, r_id = None): if r_id: data_dict = user_input.dict() resource = get_local_resources(db= db, Resource= Resource, r_id= r_id) for attr, val in data_dict.items(): if not val == None: setattr(resource, attr, val) else: resource = Resource(**user_input.dict()) db.add(resource) db.commit() return resource
def add_init_forgiveness(self, **kwargs): fields: Dict[str, ModelField] = self.__class__.__fields__ model_fields_changed: list = [] for model_field in fields.values(): if model_field.name not in kwargs and getattr(model_field, 'required', False): model_fields_changed.append(model_field) model_field.required = False BaseModel.__init__(self, **kwargs) for model_field in model_fields_changed: model_field.required = True
def parse_document_to_dict(doc: BaseModel) -> Dict[str, Any]: # Convert to dictionary doc = doc.dict() # Check for any secret values and enum values for key, value in doc.items(): if isinstance(value, SecretStr): doc[key] = value.get_secret_value() elif isinstance(value, Enum): doc[key] = value.value return doc
def test_delete_all(client, url: str = URL, model: BaseModel = basic_potato): res = client.post(url, json=model.dict()) assert res.status_code == 200 res = client.post(url, json=model.dict()) assert res.status_code == 200 assert len(client.get(url).json()) == 2 res = client.delete(url) assert res.status_code == 200 assert len(res.json()) == 0 assert len(client.get(url).json()) == 0
def deploy_json_api_output( intervention: Intervention, area_result: pydantic.BaseModel, output_dir: pathlib.Path, filename_override=None, ): if not output_dir.exists(): output_dir.mkdir(parents=True, exist_ok=True) filename = filename_override or (area_result.output_key(intervention) + ".json") output_path = output_dir / filename output_path.write_text(area_result.json()) return area_result
def __init__(self, dictionary): BaseModel.__init__(self) if ("user_id" in dictionary.keys()): self.user_id = dictionary["user_id"] if ("accepted" in dictionary.keys()): self.accepted = dictionary["accepted"] if ("locations" in dictionary.keys()): self.locations = dictionary["locations"] if ("eggs" in dictionary.keys()): self.eggs = dictionary["eggs"] if ("splashes" in dictionary.keys()): self.splashes = dictionary["splashes"] if ("last_check" in dictionary.keys()): self.last_check = dictionary["last_check"]
def get_ref_entity_name_to_ref_field_value( self, entity: BaseModel, entity_type: Type[BaseModel]) -> Dict[str, str]: """ Get a dictionary with name of reference entities to value of the referencing field, being id of the referencing entity :param entity: CSR entity :param entity_type: type of the entity :return: dictionary from reference entity name to value of reference field """ entity_ref_to_ref_id = dict() id_attribute = self.get_id_field_name(entity_type) entity_type_name = entity_type.schema()['title'] entity_id = entity.__getattribute__(id_attribute) entity_ref_to_ref_id[entity_type_name] = entity_id if entity_type_name == 'Individual': return entity_ref_to_ref_id # Follow reference fields to obtain identifiers of linked entities ref_fields = self.get_field_properties_by_keyword( entity_type, 'references') for field_name, ref_entity_name in ref_fields.items(): if not self.skip_reference(type(entity), ref_entity_name): # Lookup referenced entity referenced_entity_type = list([ entity for entity in SubjectEntity.__args__ if entity.schema()['title'] == ref_entity_name ])[0] referenced_id = entity.__getattribute__(field_name) if not referenced_id: continue referenced_id_attribute = self.get_id_field_name( referenced_entity_type) referenced_entities = [ e for e in self.subject_registry.entity_data[ref_entity_name] if e.__getattribute__(referenced_id_attribute) == referenced_id ] if not referenced_entities: raise MappingException( f'{entity_type_name} with id {entity_id} has reference to non-existing' f' {ref_entity_name} with id {referenced_id}.') # Recursively add identifiers from referenced entity referenced_ids = self.get_ref_entity_name_to_ref_field_value( referenced_entities[0], referenced_entity_type) entity_ref_to_ref_id.update(referenced_ids) return entity_ref_to_ref_id
async def create(self, schema: BaseModel, **kwargs: tp.Any) -> ormar.Model: """ Create a new object """ obj = await self.model.objects.create( **schema.dict(exclude_unset=True), **kwargs) return obj
def update_item_with_etag(self, item: BaseModel, etag: str) -> BaseModel: self.container.replace_item( item=item.id, body=item.dict(), etag=etag, match_condition=MatchConditions.IfNotModified) return self.read_item_by_id(item.id)
def deploy_json_api_output(region_result: pydantic.BaseModel, output_path: pathlib.Path) -> None: # Excluding fields that are not specifically included in a model. # This lets a field be undefined and not included in the actual json. serialized_result = region_result.json(exclude_unset=True) output_path.write_text(serialized_result)
def filter_by_model(self, filter_obj: BaseModel): if filter_obj is None: return self values = filter_obj.dict(exclude_unset=True) criterions = [] for k, v in values.items(): extra = filter_obj.__fields__[k].field_info.extra op_name = extra.get('op', 'eq') op = OPERATOR_MAP.get(op_name) if not op: continue parser = extra.get('parser') or PARSER_MAP.get(op_name) v = parser(v) if parser and isfunction(parser) else v if op_name in {'like', 'ilike', 'notlike', 'notilike'}: columns = extra.get('columns') or [k] like_clauses = [] for col in columns: criterion = self.compute_criterion(col, op, v) if criterion is not None: like_clauses.append(criterion) criterion = or_(*like_clauses) else: criterion = self.compute_criterion(extra.get('column') or k, op, v) if criterion is not None: criterions.append(criterion) return self.filter(*criterions)
class GoalsExportPolicy(BaseExportPolicy): class ParamsPolicy(BaseModel): useDeleted: Optional[bool] = None params: ParamsPolicy = BaseModel() columns: Optional[ list[ Literal[ "id", "name", "type", "is_retargeting", "flag", "conditions", "steps", "depth", "default_price", ] ] ] = [ "id", "name", "type", "is_retargeting", "flag", "conditions", "steps", "depth", "default_price", ]
class ClientsExportPolicy(BaseExportPolicy): columns: list[Literal["id", "name", "chief_login"]] = [ "id", "name", "chief_login", ] params: ClassVar = BaseModel()
async def checkId(expectedId: int, model: BaseModel): if model.id: if model.id != expectedId: raise unmatchedIdException else: model.id = expectedId return model
def update_with_dict(m: BaseModel, updated) -> BaseModel: d = m.dict() if 'meta' in d and d['meta'] is not None and 'meta' in updated and updated['meta'] is not None: d['meta'].update(updated['meta']) updated['meta'] = d['meta'] d.update(updated) return type(m)(**d)
def update(uuid: UUID, obj: BaseModel, db_table: DeclarativeMeta, db: Session): """Updates the object with the given UUID in the database. Designed to be called only by the API since it raises an HTTPException.""" # Try to perform the update try: result = db.execute( sql_update(db_table).where(db_table.uuid == uuid).values( # exclude_unset is needed for update routes so that any values in the Pydantic model # that are not being updated are not set to None. Instead they will be removed from the dict. **obj.dict(exclude_unset=True))) # Verify a row was actually updated if result.rowcount != 1: raise HTTPException(status_code=404, detail=f"UUID {uuid} does not exist.") commit(db) # An IntegrityError will happen if value already exists or was set to None except IntegrityError: db.rollback() raise HTTPException( status_code=status.HTTP_409_CONFLICT, detail=f"Got an IntegrityError while updating UUID {uuid}.", )
def _filtered_query(self, filters: BaseModel): q = db.session.query(self.model) for k, v in clean(filters.dict(exclude_defaults=True)).items(): op, col_name = dj_lookup_to_sqla(k) q = q.filter(op(getattr(self.model, col_name), v)) return q
def __init__(self, from_db=False, forgiveness=None, **kwargs): if forgiveness is None: forgiveness = from_db self.set_db_fields(kwargs, from_db) self.add_init_forgiveness( **kwargs) if forgiveness else BaseModel.__init__(self, **kwargs)