def dump_schema(f, schema: Schema = None, *args, **kwargs): if schema is None: raise ProgrammingError("No Schema specified") elif not isinstance(schema, Schema): raise ProgrammingError("schema must be of type marshmallow.Schema") func_response = f(*args, **kwargs) if isinstance(func_response, tuple): return schema.dump(func_response[0]), func_response[1] return schema.dump(func_response)
def respond_with_schema(self, schema: Schema, value, status: int = 200) -> Response: try: schema.validate(value) except ValidationError: return self.error("invalid schema supplied") result = schema.dump(value) return self.respond(result, status)
def _export(self, export_type: Literal['vulns', 'assets', 'compliance'], schema: Schema, **kwargs) -> Union[ExportsIterator, UUID]: ''' Get the list of jobs for for the specified datatype. API Documentation for the job listings for :devportal:`assets <exports-assets-request-export>`, :devportal:`compliance <io-exports-compliance-create>`, and :devportal:`vulnerabilities <exports-vulns-request-export>` datatypes. ''' export_uuid = kwargs.pop('uuid', None) use_iterator = kwargs.pop('use_iterator', True) when_done = kwargs.pop('when_done', False) Iterator = kwargs.pop('iterator', ExportsIterator) # noqa: PLC0103 timeout = kwargs.pop('timeout', None) payload = schema.dump(schema.load(kwargs)) if not export_uuid: export_uuid = self._api.post(f'{export_type}/export', json=payload, box=True).export_uuid self._log.debug( f'{export_type} export job {export_uuid} initiated') if use_iterator: return Iterator(self._api, type=export_type, uuid=export_uuid, _wait_for_complete=when_done, timeout=timeout) return UUID(export_uuid)
def paginate(query: BaseQuery, schema: Schema) -> dict: if not request.endpoint: raise Exception("Cannot paginate outside of request context.") page = request.args.get("page", DEFAULT_PAGE_NUMBER) per_page = request.args.get("page_size", DEFAULT_PAGE_SIZE) page_obj = query.paginate(page=page, per_page=per_page) next_ = url_for( request.endpoint, page=page_obj.next_num if page_obj.has_next else page_obj.page, per_page=per_page, **request.view_args) prev = url_for( request.endpoint, page=page_obj.prev_num if page_obj.has_prev else page_obj.page, per_page=per_page, **request.view_args) return { "total": page_obj.total, "pages": page_obj.pages, "next": next_, "prev": prev, "results": schema.dump(page_obj.items), }
def paginate(self, page: int = 1, per_page: int = 20, schema: Schema = None, only_data: bool = False) -> Pagination: """ Args: page (int): Page number per_page (int): Limit in the number of items per page schema (Schema): class:`Schema` to serialize items only_data (bool): Flag to return schema dump of the query result Returns: class:`Pagination` instance contains `per_page` items from page `page`. """ items = self.limit(per_page).offset((page - 1) * per_page).all() if only_data: if schema is None: raise ValueError('scheme cannot be None when only_data is set True') return schema.dump(items) if not items and page != 1: return Pagination(self, page, per_page, 0, [], schema) # No need to count if we're on the first page and there are fewer items than we expected. if page == 1 and len(items) < per_page: total = len(items) else: total = self.order_by(None).count() return Pagination(self, page, per_page, total, items, schema)
def respond_with_schema(self, schema: Schema, value, status: int = 200) -> Response: result = schema.dump(value) if result.errors: return self.error('invalid schema supplied') return self.respond(result.data, status)
def test_dataclass_serialize(schema: Schema): res = schema.dump(TestDTO(array=np.zeros((3, 3)))) assert res == { 'array': { 'data': [[0.0, 0.0, 0.0], [0.0, 0.0, 0.0], [0.0, 0.0, 0.0]], 'dtype': 'float64' } }
def __init__(self, page_queryset: PaginatedQuery, schema: Schema): self.page = page_queryset.get_page() self.count = page_queryset.query.count() self.page_nums = page_queryset.get_page_count() self.data = schema.dump(obj=page_queryset.get_object_list(), many=True).data self.result = OrderedDict([('count', self.count), ('page_nums', self.page_nums), ('next', self.get_next_url()), ('previous', self.get_previous_url()), ('results', self.data)])
def dump(self, obj, *, many=None): dumped = Schema.dump(self, obj, many=many) # TODO This is hacky, but the other option I can think of is to generate a different schema # depending on dump and load, which is even more hacky # The only problem is the catch all field, we can't statically create a schema for it # so we just update the dumped dict if many: for i, _obj in enumerate(obj): dumped[i].update(_handle_undefined_parameters_safe(cls=_obj, kvs={}, usage="dump")) else: dumped.update(_handle_undefined_parameters_safe(cls=obj, kvs={}, usage="dump")) return dumped
def data_dump_and_validation(schema: Schema, data: Any) -> Dict: """ Use a marshmallow schema to dump and validate input data :param schema: the schema to use to dump an object and validate it :param data: the data to dump and validate :return: the resulting dumped data from marshmallow """ val = schema.dump(data) errors = schema.validate(val) if errors: raise ValidationError(errors) return val
def serialize(data_object, format='json'): if format in formats: m = get_doc_map(data_object.__class__) Schema = build_schema(m, data_object.__module__) schema = Schema() data = {} for i in schema.declared_fields.keys(): data[i] = getattr(data_object, i, None) dump = schema.dump(data) return formats[format].dumps(dump.data) else: raise Exception( "Format '{format}' not supported".format(format=format))
def assert_serde( self, dictionary: Dict[str, Any], schema: marshmallow.Schema, typ: type, ) -> None: """ Checks if the given dictionary when deserialized with the given schema has the passed type and when serialised again is equal to the original dictionary. """ object = schema.load(dictionary) self.assertEqual(type(object), typ) parsed = schema.dump(object) if hasattr(object, 'to_string'): self.assertFalse('\n' in object.to_string()) self.assertDictEqual(dictionary, parsed)
def _base_request(request: Request, request_schema: Schema, response_schema: Schema, method: Callable[[object, Optional[str]], object], is_paging: bool = False, code=None, code_name: Optional[str] = None): """Request processing base method. :param request: Request object :param request_schema: RequestSchema Instance :param response_schema: ResponseSchema Instance :param method: method to be processed by the request :param is_paging: paging type request :param code: request identifier :param code_name: parameter name of request identifier :return: """ if not request: return response.HttpResponseNotFound() request_param = {} if request.query_params: request_param.update(request.query_params.dict()) if request.data: request_param.update(request.data) if code and code_name: request_param[code_name] = code request_obj, errors = request_schema.load(request_param) if errors: return response.HttpResponseBadRequest(errors) try: if is_paging: path = _parse_path(request) response_obj = method(request_obj, path) else: response_obj = method(request_obj) except RequestParameterException: return response.HttpResponseBadRequest() except ResourceNotFoundException: return response.HttpResponseNotFound() except Exception as e: return response.HttpResponseServerError() data, _ = response_schema.dump(response_obj) if request.method == 'POST': return Response(data, status.HTTP_201_CREATED) elif request.method == 'DELETE': return Response({}, status.HTTP_204_NO_CONTENT) else: return Response(data, status.HTTP_200_OK)
def build_response(self, schema: Schema, response): """ Validate the given response against a given Schema. Return the response data as a serialized object according to the given Schema's fields. :param schema: :param response: :return: """ # Validate that schema. # This is not normally done with responses, but I want to be strict about ensuring the schema is up-to-date validation_errors = schema.validate(response) if validation_errors: # Throw an exception here with all the errors. # This will be caught and handled by the 500 internal error raise exceptions.ValidationError(validation_errors) # Build schema object from response data = schema.dump(response) return data
def read_json_request(self, schema: Schema): """ :param schema: :type schema: Schema descendant :return: """ # Ensure body can be JSON decoded try: json_data = json.loads(self.request.body) except JSONDecodeError as e: self.set_status(self.STATUS_ERROR_EXTERNAL, reason=str(e)) self.write_error() raise BaseApiError("Expected request body to be JSON. Received '{}'".format(self.request.body)) request_validation_errors = schema.validate(json_data) if request_validation_errors: self.error_messages = request_validation_errors self.set_status(self.STATUS_ERROR_EXTERNAL, reason="Failed request schema validation") self.write_error() raise BaseApiError("Failed schema validation: {}".format(str(request_validation_errors))) return schema.dump(schema.load(json_data))
def codec(schema: Schema) -> JsonCodec[V]: return JsonCodec[V](lambda v: schema.dump(v).data, lambda j: schema.load(j).data)
def _entity_to_data(self, schema: Schema, entity) -> Dict[str, Any]: return schema.dump(entity)
def test_fields_dump(app: Flask, pendulum_field_schema: Schema): with app.app_context(): data = {"time": datetime(1994, 9, 11, 8, 20)} res = pendulum_field_schema.dump(data) assert res["time"] == "1994-09-11 16:20:00"
def test_fileds_none_dump_handle(app: Flask, pendulum_field_schema: Schema): with app.app_context(): data = {"time": None} res = pendulum_field_schema.dump(data) assert res["time"] is None
def as_dict(schema: Schema, ob: dataclasses.dataclass) -> t.Dict[str, t.Any]: d = schema.omit_none(schema.from_dataclass(ob)) d = schema.load(d) # or validate()? return schema.dump(d)
def _assert_load_dump( schema: marshmallow.Schema, loaded: t.Any, dumped: t.Dict[t.Any, t.Any] ) -> None: assert schema.dump(schema.load(dumped)) == dumped
def asdict(schema: ms.Schema, data: str): return schema.dump(data)
def render(result: QueryResult, schema: Schema) -> Any: return schema.dump(result)