def __init__(self) -> None: super().__init__() self.arg_schema = dict( page=Str(missing=None, description="Page"), per_page=Int(missing=20, description="Number to show"), all=Boolean(missing=False, description="Return all results."), ) self.fields = {} self.basic_info = {} self.schema = None
class OpenSearchEndpoint(HTTPEndpoint): args_schema = dict( page=Str(missing=None, description="Page"), per_page=Int(missing=20, description="Number to show"), all=Boolean(missing=False, description="Return all results."), ) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) async def get(self, request): db = app_context().database query_params = request.query_params if "query" not in query_params or "model" not in query_params: return JSONResponse({ "Status": "Success", "Message": "Provide a string as a query parameter or model", "example": "/api/v2/search?query=lava&model=sample", }) params = {"query": query_params["query"]} model = query_params["model"] models = { "sample": search_sample, "project": search_project, "session": search_session, } if model == "all": sql_fn = search_total query_res = db.exec_sql_query(fn=sql_fn, params=params) json_res = [dict(model=x, data=y) for x, y in query_res] json_res.reverse() return JSONResponse(json_res) else: for key, value in models.items(): if model == key: sql_fn = value schema = getattr(db.interface, key)(many=True) query_res = db.exec_sql_query(fn=sql_fn, params=params) json_res = [dict(r) for r in query_res] return APIResponse(json_res, total_count=len(json_res), schema=schema)
class PaginationArgs(ma.Schema): """ Read in arguments for paginating """ class Meta: """ Ensure that other fields are not provided """ strict = True page = Int(missing=1, strict=True, validate=lambda p: p > 0) per_page = Int(missing=10, strict=True, validate=lambda p: p > 0) @validates_schema(pass_original=True) def check_unknown_fields(self, data, original_data): """ Ensure no additional fields are passed """ unknown = set(original_data) - set(self.fields) if len(unknown) > 0: raise BadRequestKeyError("Unknown field {}".format(unknown))
def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.meta = self.Meta() if getattr(self.meta, "table") is None: raise ValueError( f"Meta value '{k}' must be provided for ViewAPIEndpoint") self._model_name = classname_for_table(self.meta.table) log.info(self._model_name) self.args_schema = dict(page=Str(missing=None), per_page=Int(missing=20), all=Boolean(missing=False))
def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.meta = self.Meta() for k in ("database", "schema"): if getattr(self.meta, k) is None: raise ValueError( f"Meta value '{k}' must be provided for ModelAPIEndpoint") schema = self.meta.schema self._model_name = classname_for_table(schema.opts.model.__table__) log.info(self._model_name) d = "related models to nest within response [allowed_nests]" e = "?nest=session,project" des = create_params(d, e) self.instance_schema = dict( nest=NestedModelField(Str(), missing=[], description=des)) des_page = create_params("number of results per page", "?page=>i:5039") des_per_page = create_params("number of results per page", "?per_page=15") des_all = create_params("return all results", "?all=true") self.args_schema = dict( **self.instance_schema, page=Str(missing=None, description=des_page), per_page=Int(missing=20, description=des_per_page), all=Boolean(missing=False, description=des_all), ) self._filters = [] self.register_filter(OpenSearchFilter) self.register_filter(AuthorityFilter) self.register_filter(FieldExistsFilter) self.register_filter(FieldNotExistsFilter) self.register_filter(EmbargoFilter) self.register_filter(DateFilter) self.register_filter(DOIFilter) self.register_filter(CoordinateFilter) self.register_filter(GeometryFilter) self.register_filter(TextSearchFilter) self.register_filter(AgeRangeFilter) self.register_filter(IdListFilter) self.register_filter(TagsFilter)
class JobArgs(ma.Schema): """ Class to read in arguments to create a new job """ class Meta: """ Ensure that it can only take the defined arguments """ strict = True case_id = Int(required=True) author = Str(required=True) name = Str(required=True) @validates_schema(pass_original=True) def check_unknown_fields(self, data, original_data): """ Ensure no additional fields are passed """ unknown = set(original_data) - set(self.fields) if unknown: raise BadRequestKeyError("Unknown field", unknown)
class DataFileListEndpoint(HTTPEndpoint): """A simple demonstration endpoint for paginating a select statement. Extremely quick, but somewhat hand-constructed.""" args_schema = dict( page=Str(missing=None, description="Page"), per_page=Int(missing=20, description="Number to show"), all=Boolean(missing=False, description="Return all results."), ) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) db = get_database() self.schema = db.interface.data_file self.model = db.model.data_file self._filters = [] self.register_filter(OpenSearchFilter) self.register_filter(AuthorityFilter) self.register_filter(FieldExistsFilter) self.register_filter(FieldNotExistsFilter) self.register_filter(EmbargoFilter) self.register_filter(DateFilter) self.register_filter(TextSearchFilter) self.register_filter(AgeRangeFilter) self.register_filter(IdListFilter) self.register_filter(TagsFilter) def register_filter(self, _filter: BaseFilter): """Register a filter specification to control parametrization of the model query""" f = _filter(self.model, schema=self.schema) if not f.should_apply(): return if params := getattr(f, "params"): self.args_schema.update(**params) self._filters.append(f)
class ProductSchema(Schema): name = String() period = String(enum=["monthly", "yearly"]) price = Int() product_listing_id = String(required=True) quantity = Int(required=True)
class ProductListing(Schema): product_listing_id = String(required=True) quantity = Int(required=True)
class RequestParser: """RequestParser.""" def __init__(self, fields=None, processors=None, *args, **kwargs): """Constructor.""" self.fields = fields or {} self.processors = processors or [] def parse(self, *args, **kwargs): """Parse.""" return self.post_process( parser.parse(self.fields, request, *args, **kwargs)) def post_process(self, request_arguments, *args, **kwargs): """Post process.""" for func in self.processors: func(request_arguments) return request_arguments search_request_parser = RequestParser( fields={ "page": Int(validate=Range(min=1), missing=1), "from": Int(validate=Range(min=1)), "size": Int(validate=Range(min=1), missing=10), "q": String(missing=""), }, processors=[build_pagination], )
from webargs.flaskparser import use_args from webargs.fields import List, Str, Int from osp.common import config from osp.citations.models import Text_Index from osp.www import utils from osp.www.cache import cache from osp.www.hit import Hit app = Flask(__name__) cache.init_app(app) @app.route('/') @use_args(dict(institution_id = List(Int(), missing=None))) def home(args): """ Home page + ranking interface. """ facets = utils.bootstrap_facets() # Bootstrap URL institution(s). facets['institution'] = utils.institution_facets( include=args['institution_id'] ) return render_template('home.html', facets=facets)
return query, new_offset DEF_MAX_N_RESULTS = 2048 ORDER_BY_OPTIONS = [ 'newest', 'oldest', None, ] ORDER_TOPICS_OPTIONS = ORDER_BY_OPTIONS + ['newest_last_post'] DEF_GET_COLL_ARGS = { 'fields': DelimitedList(Str()), 'offset': Int(validate=lambda n: n >= 0), 'max_n_results': Int(validate=lambda n: n >= 0, missing=DEF_MAX_N_RESULTS), 'order': Str(validate=validate.OneOf(ORDER_BY_OPTIONS), missing='newest'), } GET_TOPICS_ARGS = DEF_GET_COLL_ARGS.copy() GET_TOPICS_ARGS.update({ 'order': Str(validate=validate.OneOf(ORDER_TOPICS_OPTIONS), missing='newest_last_post'), 'statuses': DelimitedList(Str()), }) def parse_get_coll_args(req,
def params(self): d = "A list of model id's to grab from db." e = ["?ids=213,221,423"] des = create_params(d, e) return {self.key: DelimitedList(Int(), description=des)}
from webargs import core from webargs.flaskparser import use_args from webargs.fields import Str, Int, Bool from dha_poc.util import zipcode_regex, coordinates_regex, sort_criteria, dimensions_regex from dha_poc.data.hotel_attributes import available_languages parser = core.Parser() query_args = { 'zipcode': Str(required=True, validate=lambda z: zipcode_regex.match(str(z)) is not None), 'email': Str(), 'price': Int(validate=lambda n: 0 <= n <= 10e6), 'title': Str(validate=lambda s: len(s) < 1000), 'language': Str(validate=lambda s: len(s) == 2 and s.lower() in available_languages), 'currency': Str(validate=lambda s: len(s) == 3), 'coord': Str(validate=lambda s: coordinates_regex.match(s)), 'event_date': Str(), 'num': Int(), 'context_id': Int() }
class PurchaseTotalSchema(Schema): currency = String(required=True) subtotal = Int(required=True) tax = Int() total = Int(required=True)
class DataFileFilterByModelID(HTTPEndpoint): """ A filterable datafile endpoint to find related models creates a custom json serialization that returns the model with id of the link """ args_schema = dict( sample_id=DelimitedList(Int(), description="sample id to filter datafile by"), session_id=DelimitedList( Int(), description="session id to filter datafile by"), analysis_id=DelimitedList( Int(), description="analysis id to filter datafile by"), ) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) # self.schema = get_database().interface.data_file( # many=True, allow_nests=["data_file_link"] # ) async def get(self, request): """Handler for all GET requests""" model_names = await parser.parse(self.args_schema, request, location="querystring") db = get_database() n_models = sum(len(v) for v in model_names.values()) if n_models == 0: raise SparrowAPIError("No IDs were passed to filter by") # Wrap entire query infrastructure in error-handling block. # We should probably make this a "with" statement or something # to use throughout our API code. with db.session_scope(commit=False): DataFile = db.model.data_file DataFileLink = db.model.data_file_link data = [] for model_name, model_ids in model_names.items(): q = db.session.query( DataFile.file_hash, DataFile.file_mtime, DataFile.basename, DataFile.type_id, getattr(DataFileLink, model_name), ).order_by(DataFile.file_mtime) q = q.join(DataFile.data_file_link_collection).filter( getattr(DataFileLink, model_name).in_(model_ids)) res = q.all() # SUPER messy way to create a custom json serialization for row in res: row_obj = {} file_hash, file_mtime, basename, type_id, model_id = row row_obj["file_hash"] = file_hash row_obj["file_mtime"] = file_mtime.isoformat() row_obj["basename"] = basename row_obj["type"] = type_id row_obj["model"] = model_name[:-3] row_obj["model_id"] = model_id data.append(row_obj) return JSONResponse(dict(data=data, total_count=len(data)))
def parse(self): """Parse.""" return self.post_process( parser.parse(self.fields, request, locations=["view_args"])) def post_process(self, request_arguments): """Post process.""" for func in self.processors: func(request_arguments) return request_arguments search_request_parser = RequestParser( fields={ "page": Int(validate=Range(min=1), ), "from": Int( load_from="from", validate=Range(min=1), ), "size": Int( validate=Range(min=1), missing=10, ), "q": String(), # TODO: allow getting it from "query" maybe a Function }, processors=[build_pagination], ) create_request_parser = RequestParser()
def params(self): d = "A list of tag ids to filter by" e = ["?tags=3,2,1"] des = create_params(d, e) return {self.key: DelimitedList(Int(), description=des)}