def json( self, *, include: Union["AbstractSetIntStr", "MappingIntStrAny"] = None, exclude: Union["AbstractSetIntStr", "MappingIntStrAny"] = None, by_alias: bool = None, skip_defaults: bool = None, exclude_unset: bool = False, exclude_defaults: bool = False, exclude_none: bool = None, encoder: Optional[Callable[[Any], Any]] = None, **dumps_kwargs: Any, ) -> str: """ """ if by_alias is None: by_alias = True if exclude_none is None: exclude_none = True return BaseModel.json( self, include=include, by_alias=by_alias, skip_defaults=skip_defaults, exclude_unset=exclude_unset, exclude_defaults=exclude_defaults, exclude_none=exclude_none, encoder=encoder, **dumps_kwargs, )
def deploy_json_api_output(region_result: pydantic.BaseModel, output_path: pathlib.Path) -> None: # Excluding fields that are not specifically included in a model. # This lets a field be undefined and not included in the actual json. serialized_result = region_result.json(exclude_unset=True) output_path.write_text(serialized_result)
def model_to_primitive( model: BaseModel, without_id: bool = False, exclude: Sequence[str] = None, keep_python_primitives: bool = False, ) -> Dict: """ Convert pydantic-{model} to dict transforming complex types to primitives (e.g. datetime to str) :param model: Pydantic model :param without_id: Remove id key from result dict :param exclude: List of field to exclude from result dict :param keep_python_primitives: If True result dict will have python-primitives (e.g. datetime, Decimal) :return: Dict with fields from given model """ exclude_set: Set[Union[int, str]] = set(exclude or []) if without_id: exclude_set.add("id") data: Dict if keep_python_primitives: data = model.dict(exclude=exclude_set) else: data = json.loads(model.json(exclude=exclude_set)) return data
def model_to_pretty_json_str(pydantic_model: BaseModel) -> str: """Pretty printing Pydantic Models For logging etc. """ return json.dumps(json.loads(pydantic_model.json()), indent=2, sort_keys=True)
def process_bind_param( self, value: pydantic.BaseModel, dialect: 'DefaultDialect') -> typing.Union[str, typing.Any]: """Encode data, if required.""" if value is None: return value return value.json()
async def update_or_create(self, entity: BaseModel, *args, **kwargs) -> Tuple[BaseModel, bool]: json_str: str = entity.json() key_name: str = self.key_name.format(getattr(entity, self.key_field)) created: int = await self._client.set(key=key_name, value=json_str, expire=self.timeout) return entity, bool(created)
async def save_model(instance: BaseModel): """Saving model to Redis.""" rc = await get_connection() await rc.set( f"{instance.__class__.__name__}:{instance.id}", instance.json(), expire=settings.model_ttl, )
def generate_json(shrub_config: BaseModel) -> str: """ Generate a json version of the given configuration. :param shrub_config: Shrub configuration to generate. :return: JSON version of given shrub configuration. """ return shrub_config.json(exclude_none=True, exclude_unset=True, by_alias=True)
def post(self, endpoint: Endpoint, sub: str, request: BaseModel = None) -> requests.Response: url = f"{self.endpoints[endpoint]}{sub}" print(url) if request is None: return requests.post(url) return requests.post(url, json=request.json())
async def update_or_create( self, entity: BaseModel, *args, **kwargs ) -> Tuple[BaseModel, bool]: json_str: str = entity.json() field: str = getattr(entity, self.key_field) created: int = await self._client.hset( key=self.key_name, field=field, value=json_str ) if self.timeout > 0: await self._client.expire(self.key_name, self.timeout) return entity, bool(created)
def model_to_JSON(model: BaseModel): if not isinstance(model, list): return json.loads(model.json(exclude_unset=True)) else: json_str = "[" for item in model: json_str += item.json(exclude_unset=True) json_str += "]" return json.loads(json_str)
def json( self, *, include: Union["AbstractSetIntStr", "MappingIntStrAny"] = None, exclude: Union["AbstractSetIntStr", "MappingIntStrAny"] = None, by_alias: bool = None, skip_defaults: bool = None, exclude_unset: bool = False, exclude_defaults: bool = False, exclude_none: bool = None, encoder: Optional[Callable[[Any], Any]] = None, **dumps_kwargs: Any, ) -> str: """ """ if by_alias is None: by_alias = True if exclude_none is None: exclude_none = True if self.__config__.json_dumps == orjson_dumps: if "option" not in dumps_kwargs: option = 0 if "indent" in dumps_kwargs: dumps_kwargs.pop("indent") # only indent 2 is accepted option |= orjson.OPT_INDENT_2 sort_keys = dumps_kwargs.pop("sort_keys", False) if sort_keys: option |= orjson.OPT_SORT_KEYS if len(dumps_kwargs) > 0: logger.warning( "When ``dumps`` method is used from ``orjson`` " "all dumps kwargs are ignored except `indent`, `sort_keys` " "and of course ``option`` from orjson" ) if option > 0: dumps_kwargs = {"option": option} return BaseModel.json( self, include=include, by_alias=by_alias, skip_defaults=skip_defaults, exclude_unset=exclude_unset, exclude_defaults=exclude_defaults, exclude_none=exclude_none, encoder=encoder, **dumps_kwargs, )
def deploy_json_api_output( intervention: Intervention, area_result: pydantic.BaseModel, output_dir: pathlib.Path, filename_override=None, ): if not output_dir.exists(): output_dir.mkdir(parents=True, exist_ok=True) filename = filename_override or (area_result.output_key(intervention) + ".json") output_path = output_dir / filename output_path.write_text(area_result.json()) return area_result
async def create(self, entity: BaseModel, left: bool = True, pivot: Optional[BaseModel] = None, pipe: Optional[Pipeline] = None, *args, **kwargs) -> BaseModel: _pipe: Pipeline = self._client.pipeline() if pipe is None else pipe if pivot is None: push: Callable = _pipe.lpush if left else _pipe.rpush push(key=self.key_name, value=entity.json()) else: _pipe.linsert( key=self.key_name, pivot=pivot.json(), value=entity.json(), before=left, ) if pipe is None: if self.timeout > 0: _pipe.expire(self.key_name, self.timeout) await _pipe.execute() return entity
async def update(self, entity: BaseModel, entities: Optional[List[BaseModel]] = None, pipe: Optional[Pipeline] = None, *args, **kwargs) -> BaseModel: indexes = await self.find_index(entity=entity, entities=entities) _pipe: Pipeline = self._client.pipeline() if pipe is None else pipe for i in indexes: _pipe.lset(key=self.key_name, index=i, value=entity.json()) if pipe is None: if self.timeout > 0: _pipe.expire(self.key_name, self.timeout) await _pipe.execute() return entity
class PostTask(): db = None user = None model = None task_model = TaskModel() def __init__(self, db: Session, user: CurrentUser, model: BaseModel): self.db = db self.user = user self.model = model def commit(self, resource, object, method): uuid_str = str(uuid.uuid4()) time = datetime.now() user_id = self.user.user_id if self.model == None: self.model = BaseModel() row = TaskModel(uuid=uuid_str, post_time=time, run_time=0, user_id=user_id, status="init", resource=resource, object=object, method=method, request=self.model.json(), message="queing task") res = TaskModel(uuid=uuid_str, post_time=time, run_time=0, user_id=user_id, status="init", resource=resource, object=object, method=method, request=self.model, message="queing task") self.db.add(row) self.db.commit() return res
def queue_object( name: QueueNameType, message: BaseModel, *, account_id: str, visibility_timeout: Optional[int] = None, ) -> bool: queue = get_queue(name, account_id=account_id) if not queue: raise Exception("unable to queue object, no such queue: %s" % queue) encoded = base64.b64encode( message.json(exclude_none=True).encode()).decode() try: queue.send_message(encoded, visibility_timeout=visibility_timeout) return True except ResourceNotFoundError: return False
def write_json(self, name: str, data: BaseModel) -> str: """Write artifact data to self.output_dir/name.json Args: name: filename data: data Returns: Full filesystem path of artifact file """ logger = Logger() os.makedirs(self.output_dir, exist_ok=True) artifact_path = os.path.join(self.output_dir, f"{name}.json") with logger.bind(artifact_path=artifact_path): logger.info(event=LogEvent.WriteToFSStart) with open(artifact_path, "w") as artifact_fp: artifact_fp.write(data.json(exclude_unset=True)) logger.info(event=LogEvent.WriteToFSEnd) return artifact_path
def __init__( self, model: BaseModel, status_code: str = HTTP_200, headers: Optional[Dict[Any, Any]] = None, ) -> None: """Init custom response. Arguments: model: pydantic model that should be encoded. status_code: response HTTP status code. headers: headers for response. """ headers = headers or {} headers["Content-Type"] = "application/json" super().__init__( status_code, headers, model.json(by_alias=True), )
def queue_object( name: QueueNameType, message: BaseModel, storage_type: StorageType, *, visibility_timeout: Optional[int] = None, time_to_live: int = DEFAULT_TTL, ) -> bool: queue = get_queue(name, storage_type) if not queue: raise Exception("unable to queue object, no such queue: %s" % queue) encoded = base64.b64encode(message.json(exclude_none=True).encode()).decode() try: queue.send_message( encoded, visibility_timeout=visibility_timeout, time_to_live=time_to_live ) return True except ResourceNotFoundError: return False
def __init__( self, model: BaseModel, status_code: int = 200, media_type: str = "application/json", **kwargs: Any, ) -> None: """Init custom response. Arguments: model: pydantic model that should be encoded. status_code: response HTTP status code. media_type: content type of response. kwargs: other arguments to response constructor from starlette. """ super().__init__( model.json(by_alias=True), status_code, media_type=media_type, **kwargs, )
async def update_or_create(self, entity: BaseModel, left: bool = True, pivot: Optional[BaseModel] = None, entities: Optional[List[BaseModel]] = None, *args, **kwargs) -> Tuple[BaseModel, bool]: indexes = await self.find_index(entity=entity, entities=entities) created: bool = False if len(indexes) == 0: entity = await self.create(entity=entity, left=left, pivot=pivot) created = True else: pipe: Pipeline = self._client.pipeline() for i in indexes: pipe.lset(key=self.key_name, index=i, value=entity.json()) pipe.expire(self.key_name, self.timeout) await pipe.execute() return entity, created
def write_json(self, name: str, data: BaseModel) -> str: """Write artifact data to s3://self.bucket/self.key_prefix/name.json Args: name: s3 key name data: data Returns: S3 uri (s3://bucket/key/path) to artifact """ output_key = "/".join((self.key_prefix, f"{name}.json")) logger = Logger() with logger.bind(bucket=self.bucket, key=output_key): logger.info(event=LogEvent.WriteToS3Start) s3_client = boto3.Session().client("s3") results_str = data.json(exclude_unset=True) results_bytes = results_str.encode("utf-8") with io.BytesIO(results_bytes) as results_bytes_stream: s3_client.upload_fileobj(results_bytes_stream, self.bucket, output_key) logger.info(event=LogEvent.WriteToS3End) return f"s3://{self.bucket}/{output_key}"
def _write_result(self, result: BaseModel, filename: str, keep_inputs: bool = True, keep_outputs: bool = True): """Write result to a log file Args: result: Result to be written filename: Name of the log file keep_inputs: Whether to write the function inputs keep_outputs: Whether to write the function outputs """ # Determine which fields to dump exclude = set() if not keep_inputs: exclude.add('inputs') if not keep_outputs: exclude.add('value') # Write it out with open(os.path.join(self.output_dir, filename), 'a') as fp: print(result.json(exclude=exclude), file=fp)
def serialize_message(message: BaseModel, compress: bool = False) -> bytes: data = message.json().encode("utf-8") if compress: data = b"Z" + zstandard.compress(data) return data
def make_pydantic_model_bq_safe(model: BaseModel) -> Dict[str, Any]: """ This is ugly but I think it's the best option until https://github.com/pydantic/pydantic/issues/1409 """ return make_dict_bq_safe(json.loads(model.json()))
async def post(base_url: str, path: str, data: BaseModel): async with AsyncClient(base_url=base_url) as ac: return await ac.post(url=path, data=data.json())
def perform_base_model(self, verb, path, data: BaseModel): cmd = f"http://{self.ip_addr}:{self.port}{path}" response = verb(cmd, data.json()) return response.json()
def write_model(dirpath: Path, name: str, model: BaseModel): file: Path = _get_file_path(dirpath, name) if file.exists() and not file.is_file(): raise FileExistsError() file.write_text(model.json())
def pydantic_to_json_string(instance: pydantic.BaseModel) -> str: """Convert Pydantic model instance to JSON string.""" return instance.json()