def http_error(httpexception, errno=None, code=None, error=None, message=None, info=None, details=None): """Return a JSON formated response matching the error protocol. :param httpexception: Instance of :mod:`~pyramid:pyramid.httpexceptions` :param errno: stable application-level error number (e.g. 109) :param code: matches the HTTP status code (e.g 400) :param error: string description of error type (e.g. "Bad request") :param message: context information (e.g. "Invalid request parameters") :param info: information about error (e.g. URL to troubleshooting) :param details: additional structured details (conflicting record) :returns: the formatted response object :rtype: pyramid.httpexceptions.HTTPException """ errno = errno or ERRORS.UNDEFINED # Track error number for request summary logger.bind(errno=errno) body = {"code": code or httpexception.code, "errno": errno, "error": error or httpexception.title} if message is not None: body["message"] = message if info is not None: body["info"] = info if details is not None: body["details"] = details response = httpexception response.body = json.dumps(body).encode("utf-8") response.content_type = "application/json" return response
def __init__(self, request): self.request = request self.db = request.db self.db_kwargs = dict(resource=self, user_id=request.authenticated_userid) self.timestamp = self.db.collection_timestamp(**self.db_kwargs) self.record_id = self.request.matchdict.get('id') # Log resource context. logger.bind(resource_name=self.name, resource_timestamp=self.timestamp)
def collection_get(self): """Model ``GET`` endpoint: retrieve multiple records. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPNotModified` if ``If-None-Match`` header is provided and collection not modified in the interim. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPPreconditionFailed` if ``If-Match`` header is provided and collection modified in the iterim. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPBadRequest` if filters or sorting are invalid. """ self._add_timestamp_header(self.request.response) self._add_cache_header(self.request.response) self._raise_304_if_not_modified() self._raise_412_if_modified() headers = self.request.response.headers filters = self._extract_filters() limit = self._extract_limit() sorting = self._extract_sorting(limit) partial_fields = self._extract_partial_fields() filter_fields = [f.field for f in filters] include_deleted = self.model.modified_field in filter_fields pagination_rules, offset = self._extract_pagination_rules_from_token( limit, sorting) records, total_records = self.model.get_records( filters=filters, sorting=sorting, limit=limit, pagination_rules=pagination_rules, include_deleted=include_deleted) offset = offset + len(records) next_page = None if limit and len(records) == limit and offset < total_records: lastrecord = records[-1] next_page = self._next_page_url(sorting, limit, lastrecord, offset) headers['Next-Page'] = encode_header(next_page) if partial_fields: records = [ dict_subset(record, partial_fields) for record in records ] # Bind metric about response size. logger.bind(nb_records=len(records), limit=limit) headers['Total-Records'] = encode_header('%s' % total_records) return self.postprocess(records)
def post_batch(request): requests = request.validated['requests'] batch_size = len(requests) limit = request.registry.settings['batch_max_requests'] if limit and len(requests) > int(limit): error_msg = 'Number of requests is limited to %s' % limit request.errors.add('body', 'requests', error_msg) return if any([batch.path in req['path'] for req in requests]): error_msg = 'Recursive call on %s endpoint is forbidden.' % batch.path request.errors.add('body', 'requests', error_msg) return responses = [] sublogger = logger.new() for subrequest_spec in requests: subrequest = build_request(request, subrequest_spec) sublogger.bind(path=subrequest.path, method=subrequest.method) try: # Invoke subrequest without individual transaction. resp, subrequest = request.follow_subrequest(subrequest, use_tweens=False) except httpexceptions.HTTPException as e: if e.content_type == 'application/json': resp = e else: # JSONify raw Pyramid errors. resp = errors.http_error(e) except Exception as e: resp = render_view_to_response(e, subrequest) if resp.status_code >= 500: raise e sublogger.bind(code=resp.status_code) sublogger.info('subrequest.summary') dict_resp = build_response(resp, subrequest) responses.append(dict_resp) # Rebing batch request for summary logger.bind(path=batch.path, method=request.method, batch_size=batch_size, agent=request.headers.get('User-Agent'),) return { 'responses': responses }
def post_batch(request): requests = request.validated['requests'] batch_size = len(requests) limit = request.registry.settings['batch_max_requests'] if limit and len(requests) > int(limit): error_msg = 'Number of requests is limited to %s' % limit request.errors.add('body', 'requests', error_msg) return if any([batch.path in req['path'] for req in requests]): error_msg = 'Recursive call on %s endpoint is forbidden.' % batch.path request.errors.add('body', 'requests', error_msg) return responses = [] sublogger = logger.new() for subrequest_spec in requests: subrequest = build_request(request, subrequest_spec) subrequest.parent = request sublogger.bind(path=subrequest.path, method=subrequest.method) try: subresponse = request.invoke_subrequest(subrequest) except httpexceptions.HTTPException as e: error_msg = 'Failed batch subrequest' subresponse = errors.http_error(e, message=error_msg) except Exception as e: logger.error(e) subresponse = errors.http_error( httpexceptions.HTTPInternalServerError()) sublogger.bind(code=subresponse.status_code) sublogger.info('subrequest.summary') subresponse = build_response(subresponse, subrequest) responses.append(subresponse) # Rebing batch request for summary logger.bind(path=batch.path, method=request.method, batch_size=batch_size, agent=request.headers.get('User-Agent'),) return { 'responses': responses }
def on_new_response(event): response = event.response request = event.request # Compute the request processing time in msec (-1 if unknown) current = utils.msec_time() duration = current - getattr(request, "_received_at", current - 1) isotimestamp = datetime.fromtimestamp(current / 1000).isoformat() # Bind infos for request summary logger. logger.bind(time=isotimestamp, code=response.status_code, t=duration) # Ouput application request summary. logger.info("request.summary")
def post_batch(request): requests = request.validated['requests'] batch_size = len(requests) limit = request.registry.settings['batch_max_requests'] if limit and len(requests) > int(limit): error_msg = 'Number of requests is limited to %s' % limit request.errors.add('body', 'requests', error_msg) return if any([batch.path in req['path'] for req in requests]): error_msg = 'Recursive call on %s endpoint is forbidden.' % batch.path request.errors.add('body', 'requests', error_msg) return responses = [] sublogger = logger.new() for subrequest_spec in requests: subrequest = build_request(request, subrequest_spec) sublogger.bind(path=subrequest.path, method=subrequest.method) try: # Invoke subrequest without individual transaction. resp, subrequest = request.follow_subrequest(subrequest, use_tweens=False) except httpexceptions.HTTPException as e: if e.content_type == 'application/json': resp = e else: # JSONify raw Pyramid errors. resp = errors.http_error(e) sublogger.bind(code=resp.status_code) sublogger.info('subrequest.summary') dict_resp = build_response(resp, subrequest) responses.append(dict_resp) # Rebing batch request for summary logger.bind( path=batch.path, method=request.method, batch_size=batch_size, agent=request.headers.get('User-Agent'), ) return {'responses': responses}
def collection_get(self): """Collection ``GET`` endpoint: retrieve multiple records. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPNotModified` if ``If-None-Match`` header is provided and collection not modified in the interim. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPPreconditionFailed` if ``If-Match`` header is provided and collection modified in the iterim. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPBadRequest` if filters or sorting are invalid. """ self._add_timestamp_header(self.request.response) self._raise_304_if_not_modified() self._raise_412_if_modified() headers = self.request.response.headers filters = self._extract_filters() sorting = self._extract_sorting() limit = self._extract_limit() filter_fields = [f.field for f in filters] include_deleted = self.collection.modified_field in filter_fields pagination_rules = self._extract_pagination_rules_from_token( limit, sorting) records, total_records = self.collection.get_records( filters=filters, sorting=sorting, limit=limit, pagination_rules=pagination_rules, include_deleted=include_deleted) next_page = None if limit and len(records) == limit and total_records > limit: next_page = self._next_page_url(sorting, limit, records[-1]) headers['Next-Page'] = next_page # Bind metric about response size. logger.bind(nb_records=len(records), limit=limit) headers['Total-Records'] = ('%s' % total_records) body = { 'data': records, } return body
def __init__(self, request, context=None): # Models are isolated by user. parent_id = self.get_parent_id(request) # Authentication to storage is transmitted as is (cf. cloud_storage). auth = request.headers.get('Authorization') self.model = self.default_model( storage=request.registry.storage, id_generator=request.registry.id_generator, collection_id=classname(self), parent_id=parent_id, auth=auth) self.request = request self.context = context self.record_id = self.request.matchdict.get('id') self.force_patch_update = False # Log resource context. logger.bind(collection_id=self.model.collection_id, collection_timestamp=self.timestamp)
def __init__(self, request, context): # Collections are isolated by user. parent_id = context.prefixed_userid # Authentication to storage is transmitted as is (cf. cloud_storage). auth = request.headers.get('Authorization') self.collection = Collection( storage=request.registry.storage, id_generator=request.registry.id_generator, collection_id=classname(self), parent_id=parent_id, auth=auth) self.request = request self.context = context self.timestamp = self.collection.timestamp() self.record_id = self.request.matchdict.get('id') # Log resource context. logger.bind(collection_id=self.collection.collection_id, collection_timestamp=self.timestamp)
def get_records(self): """Fetch the collection records, using querystring arguments for sorting, filtering and pagination. Override to implement custom querystring parsing, or post-process records after their retrieval from storage. :raises: :class:`pyramid.httpexceptions.HTTPBadRequest` if filters or sorting are invalid. :returns: A tuple with the list of records in the current page, the total number of records in the result set, and the next page url. :rtype: tuple """ filters = self._extract_filters() sorting = self._extract_sorting() pagination_rules, limit = self._extract_pagination_rules_from_token( sorting) include_deleted = self.modified_field in [f.field for f in filters] records, total_records = self.db.get_all( filters=filters, sorting=sorting, pagination_rules=pagination_rules, limit=limit, include_deleted=include_deleted, **self.db_kwargs) next_page = None if limit and len(records) == limit and total_records > limit: next_page = self._next_page_url(sorting, limit, records[-1]) # Bind metric about response size. logger.bind(nb_records=len(records), limit=limit) return records, total_records, next_page