def create_upload_func(self, ns, definition, path, operation): request_schema = definition.request_schema or Schema() response_schema = definition.response_schema or Schema() @self.graph.route(path, operation, ns) def upload(**path_data): request_data = load_query_string_data(request_schema) if not request.files: raise BadRequest("No files were uploaded") uploads = [ temporary_upload(name, fileobj) for name, fileobj in request.files.items() if not self.exclude_func(name, fileobj) ] with nested(*uploads) as files: response_data = definition.func( files, **merge_data(path_data, request_data)) if response_data is None: return "", 204 return dump_response_data(response_schema, response_data, operation.value.default_code) if definition.request_schema: upload = qs(definition.request_schema)(upload) if definition.response_schema: upload = response(definition.response_schema)(upload) return upload
def test_notebooks_field_circular_dependency(monkeypatch): """ Test that NotebooksField raises a ValidationError if notebook specifications have circular dependencies. """ monkeypatch.setattr("pathlib.Path.exists", lambda self: True) notebooks = { "notebook1": { "filename": "NOTEBOOK1.ipynb", "parameters": { "param": "notebook2" }, }, "notebook2": { "filename": "NOTEBOOK2.ipynb", "parameters": { "param": "notebook1" }, }, } notebooks_field = NotebooksField() # Can't set context directly on a Field - must be set on the parent Schema notebooks_field._bind_to_schema( "notebooks", Schema(context={"inputs_dir": "DUMMY_INPUTS_DIR"})) with pytest.raises(ValidationError) as exc_info: deserialised_notebooks = notebooks_field.deserialize(notebooks) assert ("Notebook specifications contain circular dependencies." in exc_info.value.messages)
def schema(cls: Type[A], *, infer_missing: bool = False, only=None, exclude=(), many: bool = False, context=None, load_only=(), dump_only=(), partial: bool = False, unknown=None) -> SchemaType: Schema = build_schema(cls, DataClassJsonMix, infer_missing, partial) if unknown is None: undefined_parameter_action = _undefined_parameter_action_safe(cls) if undefined_parameter_action is not None: # We can just make use of the same-named mm keywords unknown = undefined_parameter_action.name.lower() return Schema(only=only, exclude=exclude, many=many, context=context, load_only=load_only, dump_only=dump_only, partial=partial, unknown=unknown)
def configure_delete(self, ns, definition): """ Register a delete endpoint. The definition's func should be a delete function, which must: - accept kwargs for path data - return truthy/falsey :param ns: the namespace :param definition: the endpoint definition """ request_schema = definition.request_schema or Schema() @self.add_route(ns.instance_path, Operation.Delete, ns) @qs(request_schema) @wraps(definition.func) def delete(**path_data): headers = dict() request_data = load_query_string_data(request_schema) response_data = require_response_data(definition.func(**merge_data(path_data, request_data))) definition.header_func(headers, response_data) response_format = self.negotiate_response_content(definition.response_formats) return dump_response_data( "", None, status_code=Operation.Delete.value.default_code, headers=headers, response_format=response_format, ) delete.__doc__ = "Delete a {} by id".format(ns.subject_name)
def configure_retrievefor(self, ns, definition): """ Register a relation endpoint. The definition's func should be a retrieve function, which must: - accept kwargs for path data and optional request data - return an item The definition's request_schema will be used to process query string arguments, if any. :param ns: the namespace :param definition: the endpoint definition """ request_schema = definition.request_schema or Schema() @self.graph.route(ns.relation_path, Operation.RetrieveFor, ns) @qs(request_schema) @response(definition.response_schema) def retrieve(**path_data): request_data = load_query_string_data(request_schema) response_data = require_response_data( definition.func(**merge_data(path_data, request_data))) return dump_response_data(definition.response_schema, response_data) retrieve.__doc__ = "Retrieve {} relative to a {}".format( pluralize(ns.object_name), ns.subject_name)
class BaseView(web.View): serializer = Schema() async def pre_process_request(self): if self.request._method in { hdrs.METH_POST, hdrs.METH_PUT, hdrs.METH_PATCH, hdrs.METH_DELETE }: data = await self.request.json() else: data = self.request.query result = self.serializer.load(data) if result.errors: raise APIException(result.errors, 400) self.validated_data = result.data @asyncio.coroutine def __iter__(self): if self.request._method not in hdrs.METH_ALL: self._raise_allowed_methods() method = getattr(self, self.request._method.lower(), None) if method is None: self._raise_allowed_methods() try: yield from self.pre_process_request() resp = yield from method() except APIException as e: return e.response return resp
def deserialize(string, object_class, format='json'): if format in formats: m = get_doc_map(object_class) Schema = build_schema(m, object_class.__module__) schema = Schema() if isinstance(string, str): data = formats[format].loads(string) loaded_data = schema.load(data).data else: loaded_data = string mod = importlib.import_module(object_class.__module__) for key, value in loaded_data.items(): if isinstance(value, list): sub_type = m[key][1:-1] elements = [] for sub_element in value: sub_type_class = getattr(mod, sub_type) elements.append(deserialize(sub_element, sub_type_class)) loaded_data[key] = elements elif isinstance(value, dict): sub_type_class = getattr(mod, m[key]) element = deserialize(value, sub_type_class) loaded_data[key] = element return object_class(**loaded_data) else: raise Exception( "Format '{format}' not supported".format(format=format))
def configure_retrieve(self, ns, definition): """ Register a retrieve endpoint. The definition's func should be a retrieve function, which must: - accept kwargs for path data - return an item or falsey :param ns: the namespace :param definition: the endpoint definition """ request_schema = definition.request_schema or Schema() @self.add_route(ns.instance_path, Operation.Retrieve, ns) @qs(request_schema) @response(definition.response_schema) @wraps(definition.func) def retrieve(**path_data): headers = dict() request_data = load_query_string_data(request_schema) response_data = require_response_data(definition.func(**merge_data(path_data, request_data))) definition.header_func(headers, response_data) response_format = self.negotiate_response_content(definition.response_formats) return dump_response_data( definition.response_schema, response_data, headers=headers, response_format=response_format, ) retrieve.__doc__ = "Retrieve a {} by id".format(ns.subject_name)
class OperationResult(PrettyObject): __slots__ = [ "id", "code", "result", "message", "error", ] class Schema(Schema): id = fields.Str(allow_none=True) code = fields.Int(allow_none=True) result = fields.Dict(allow_none=True) message = fields.Str(allow_none=True) error = fields.Str(allow_none=True) SCHEMA = Schema(strict=True) def __init__(self, **kwargs): self.update(**kwargs) def update(self, **kwargs): values, errors = self.SCHEMA.load(kwargs) if errors: raise ValidationError(errors) for name in self.SCHEMA.fields.keys(): value = kwargs.pop(name, None) setattr(self, name, value)
class ViewSet(BaseView): serializers_map = {} default_serializer = Schema() default_response_serializer = Schema() response_serializer_map = {} @property def serializer(self): serializer = self.serializers_map.get(self.request.method, self.default_serializer) return serializer @property def response_serializer(self): serializer = self.response_serializer_map.get( self.request.method, self.default_response_serializer) return serializer
def field_to_property(field: fields.Field): """ :param field: """ # pylint: disable=protected-access return JSONSchema()._get_schema_for_field(Schema(), field)
def test_toggle_load_instance_per_schema(self, models, Schema): tname = "Teachy T" source = {"full_name": tname} # No per-instance override load_instance_default = Schema() result = load_instance_default.load(source) default = load_instance_default.opts.load_instance default_type = models.Teacher if default else dict assert isinstance(result, default_type) # Override the default override = Schema(load_instance=not default) result = override.load(source) override_type = dict if default else models.Teacher assert isinstance(result, override_type)
def test_handle_error_reraises_errors(web_request): p = Parser() with pytest.raises(ValidationError): p.handle_error( ValidationError("error raised"), web_request, Schema(), error_status_code=422, error_headers={}, )
class SensorPrivateResource(BaseResource): class Schema(Schema): name = fields.String(required=False) status = fields.Integer(default=1) sensor_type = fields.Integer(required=True) @provide_db_session @schematic_request(Schema()) def post(self, sensor_id, request_obj): SensorManager(self.db_session).create_or_update(sensor_id, request_obj) return 201
def test_register_both_create_and_create_collection(self): mappings = { Operation.Create: EndpointDefinition( func=lambda x: x, request_schema=Schema(), response_schema=Schema(), ), Operation.CreateCollection: EndpointDefinition( func=lambda x: x, request_schema=Schema(), response_schema=Schema(), ), } assert_that( calling(configure_crud).with_args(self.graph, self.ns, mappings), raises(RouteAlreadyRegisteredException), )
def conditional_crypto_deserialize(object_dict, parent_object_dict): """Return the WebPush Crypto Schema if there's a data payload""" if parent_object_dict.get("body"): encoding = object_dict.get("content-encoding") # Validate the crypto headers appropriately if encoding == "aesgcm128": return WebPushCrypto01HeaderSchema() elif encoding == "aesgcm": return WebPushCrypto04HeaderSchema() else: return WebPushInvalidContentEncodingSchema() else: return Schema()
def test_notebooks_field_invalid_keys(monkeypatch, key, message): """ Test that NotebooksField raises a ValidationError if a notebook key is not a string, or has a disallowed value. """ monkeypatch.setattr("pathlib.Path.exists", lambda self: True) notebooks = {key: {"filename": "NOTEBOOK1.ipynb"}} notebooks_field = NotebooksField() # Can't set context directly on a Field - must be set on the parent Schema notebooks_field._bind_to_schema( "notebooks", Schema(context={"inputs_dir": "DUMMY_INPUTS_DIR"})) with pytest.raises(ValidationError) as exc_info: deserialised_notebooks = notebooks_field.deserialize(notebooks) assert message in exc_info.value.messages[key]["key"]
def serialize(data_object, format='json'): if format in formats: m = get_doc_map(data_object.__class__) Schema = build_schema(m, data_object.__module__) schema = Schema() data = {} for i in schema.declared_fields.keys(): data[i] = getattr(data_object, i, None) dump = schema.dump(data) return formats[format].dumps(dump.data) else: raise Exception( "Format '{format}' not supported".format(format=format))
def configure_status_convention(graph): store = IndexStatusStore(graph) ns = Namespace(subject="index_status", ) def search(**kwargs): status = store.get_status() return status, len(status) mappings = { Operation.Search: EndpointDefinition( func=search, request_schema=Schema(), response_schema=IndexStatusSchema(), ), } configure_crud(graph, ns, mappings) return ns
def test_offset_limit_page_to_paginated_list(): graph = create_object_graph(name="example", testing=True) ns = Namespace("foo") @graph.flask.route("/", methods=["GET"], endpoint="foo.search.v1") def search(): pass with graph.flask.test_request_context(): page = OffsetLimitPage( offset=10, limit=10, foo="bar", ) result = [], 0 paginated_list, headers = page.to_paginated_list( result, _ns=ns, _operation=Operation.Search) schema_cls = page.make_paginated_list_schema_class(ns, Schema()) data = schema_cls().dump(paginated_list).data assert_that( data, is_( equal_to( dict( offset=10, limit=10, count=0, items=[], _links=dict( self=dict( href= "http://localhost/?offset=10&limit=10&foo=bar", ), prev=dict( href= "http://localhost/?offset=0&limit=10&foo=bar", ), ), ))))
def get(self, **kwargs): """External facing subscription list endpoint GET Gets a list of Subscription object with given args Args: kwargs (dict): filters to apply to query Subscriptions Returns: json: serialized list of Subscription objects CASE WHEN p.is_unlimited==0 THEN sum(mb_used)>p.mb_available ELSE 0 END AS is_over from data_usages as du LEFT JOIN subscriptions as s on s.id = du.subscription_id LEFT JOIN plans as p on p.id = s.plan_id #where from_date>='2019-09-01' and to_date<'2019-10-01' group by subscription_id """ from_date = date.fromisoformat('2019-09-10') print("Today's date:", from_date) cycle = BillingCycle.get_current_cycle(from_date) xpr = case([ (Plan.is_unlimited == 0, func.sum(DataUsage.mb_used) > Plan.mb_available), ], else_=0).label('mb_used') data = db.session.query(DataUsage, \ xpr, \ Plan.mb_available,\ Plan.is_unlimited) \ .join(Subscription, Subscription.id == DataUsage.subscription_id) \ .join(Plan, Plan.id == Subscription.plan_id) \ .filter(DataUsage.from_date >= cycle.start_date) \ .filter(DataUsage.to_date <= cycle.end_date) \ .group_by(DataUsage.subscription_id) data2 = data.all() result = Schema().dump(data2, many=True) return jsonify(result)
class SchemedEntity(Entity, metaclass=SchemedEntityMeta): schema = Schema() # __slots__ = False def __init__(self, **kwargs): for name, field in self.schema.fields.items(): if field.default is not missing: kwargs.setdefault(name, field.default) super().__init__(**kwargs) @class_or_instance_property def _fields(self): return self.schema.fields def dump(self, many=False, **kwargs): assert not many return self.schema.dump(self, **kwargs) @classmethod def load(cls, data, many=False, **kwargs): if not many: return cls(**cls.schema.load(data, **kwargs)) else: return tuple( cls(**item) for item in cls.schema.load(data, many=many, **kwargs)) @classmethod def __deepcopy__(cls, memo): rv = deepcopy(cls) rv.schema.entity = rv return rv def to_dict(self, *args, **kwargs): # TODO: Obviously this should use dump in some way return super().to_dict(*args, **kwargs)
def test_notebooks_field_deserialise(monkeypatch): """ Test that NotebooksField deserialises a dict of notebook specifications as an OrderedDict. """ monkeypatch.setattr("pathlib.Path.exists", lambda self: True) notebooks = { "notebook1": { "filename": "NOTEBOOK1.ipynb" }, "notebook2": { "filename": "NOTEBOOK2.ipynb", "parameters": { "other_notebook": "notebook1" }, }, } notebooks_field = NotebooksField() # Can't set context directly on a Field - must be set on the parent Schema notebooks_field._bind_to_schema( "notebooks", Schema(context={"inputs_dir": "DUMMY_INPUTS_DIR"})) deserialised_notebooks = notebooks_field.deserialize(notebooks) assert isinstance(deserialised_notebooks, OrderedDict) assert deserialised_notebooks == notebooks
def _get_update_schema(self) -> Schema: return Schema()
def _get_create_schema(self) -> Schema: return Schema()
api = Api(app) os.makedirs(os.path.dirname(Setting.FLASK_API_LOG_PATH), exist_ok=True) logging.basicConfig( filename=Setting.FLASK_API_LOG_PATH, level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s' ) AccidentSchema = Schema().from_dict({ 'lat': fields.Float(required=True), 'lng': fields.Float(required=True), 'frontal': fields.Bool(required=True), 'tilt': fields.Bool(required=True), 'fire': fields.Bool(required=True), 'fall': fields.Bool(required=True), 'temp': fields.Float(required=True), 'license_plate': fields.Str(required=True, validate=validate.Length(7)), 'date': fields.DateTime(required=True), }) def validate_json_data(body): try: AccidentSchema().load(body) return True except ValidationError as err: print(err.messages) return False
def test_with_schema_instance(self): res = ModuleLocalNested(Schema()) assert isinstance(res, fields.Nested)
def test_handle_error_reraises_errors(web_request): p = Parser() with pytest.raises(ValidationError): p.handle_error(ValidationError("error raised"), web_request, Schema())
def test_method_with_no_serialize_is_missing(self): m = fields.Method() m.parent = Schema() assert m.serialize('', '', '') is missing_
def api_products_by_farmer(): products = Product.query.filter(Product.farmer_id == farmerId).all() return Schema(many=True).dump(products).data