Exemplo n.º 1
0
    def __init__(self, request, context=None):
        # Models are isolated by user.
        parent_id = self.get_parent_id(request)

        # Authentication to storage is transmitted as is (cf. cloud_storage).
        auth = request.headers.get('Authorization')

        # ID generator by resource name in settings.
        default_id_generator = request.registry.id_generators['']
        resource_name = context.resource_name if context else ''
        id_generator = request.registry.id_generators.get(resource_name,
                                                          default_id_generator)

        self.model = self.default_model(
            storage=request.registry.storage,
            id_generator=id_generator,
            collection_id=classname(self),
            parent_id=parent_id,
            auth=auth)

        self.request = request
        self.context = context
        self.record_id = self.request.matchdict.get('id')
        self.force_patch_update = False

        # Log resource context.
        logger.bind(collection_id=self.model.collection_id,
                    collection_timestamp=self.timestamp)
Exemplo n.º 2
0
    def __init__(self, request, context=None):
        # Models are isolated by user.
        parent_id = self.get_parent_id(request)

        # Authentication to storage is transmitted as is (cf. cloud_storage).
        auth = request.headers.get('Authorization')

        # ID generator by resource name in settings.
        default_id_generator = request.registry.id_generators['']
        resource_name = context.resource_name if context else ''
        id_generator = request.registry.id_generators.get(resource_name,
                                                          default_id_generator)

        self.model = self.default_model(
            storage=request.registry.storage,
            id_generator=id_generator,
            collection_id=classname(self),
            parent_id=parent_id,
            auth=auth)

        self.request = request
        self.context = context
        self.record_id = self.request.matchdict.get('id')
        self.force_patch_update = False

        # Log resource context.
        logger.bind(collection_id=self.model.collection_id,
                    collection_timestamp=self.timestamp)
Exemplo n.º 3
0
    def collection_get(self):
        """Model ``GET`` endpoint: retrieve multiple records.

        :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPNotModified` if
            ``If-None-Match`` header is provided and collection not
            modified in the interim.

        :raises:
            :exc:`~pyramid:pyramid.httpexceptions.HTTPPreconditionFailed` if
            ``If-Match`` header is provided and collection modified
            in the iterim.
        :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPBadRequest`
            if filters or sorting are invalid.
        """
        self._add_timestamp_header(self.request.response)
        self._add_cache_header(self.request.response)
        self._raise_304_if_not_modified()
        self._raise_412_if_modified()

        headers = self.request.response.headers

        filters = self._extract_filters()
        limit = self._extract_limit()
        sorting = self._extract_sorting(limit)
        partial_fields = self._extract_partial_fields()

        filter_fields = [f.field for f in filters]
        include_deleted = self.model.modified_field in filter_fields

        pagination_rules, offset = self._extract_pagination_rules_from_token(
            limit, sorting)

        records, total_records = self.model.get_records(
            filters=filters,
            sorting=sorting,
            limit=limit,
            pagination_rules=pagination_rules,
            include_deleted=include_deleted)

        offset = offset + len(records)
        next_page = None

        if limit and len(records) == limit and offset < total_records:
            lastrecord = records[-1]
            next_page = self._next_page_url(sorting, limit, lastrecord, offset)
            headers['Next-Page'] = encode_header(next_page)

        if partial_fields:
            records = [
                dict_subset(record, partial_fields)
                for record in records
            ]

        # Bind metric about response size.
        logger.bind(nb_records=len(records), limit=limit)
        headers['Total-Records'] = encode_header('%s' % total_records)

        return self.postprocess(records)
Exemplo n.º 4
0
    def collection_get(self):
        """Model ``GET`` endpoint: retrieve multiple records.

        :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPNotModified` if
            ``If-None-Match`` header is provided and collection not
            modified in the interim.

        :raises:
            :exc:`~pyramid:pyramid.httpexceptions.HTTPPreconditionFailed` if
            ``If-Match`` header is provided and collection modified
            in the iterim.
        :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPBadRequest`
            if filters or sorting are invalid.
        """
        self._add_timestamp_header(self.request.response)
        self._add_cache_header(self.request.response)
        self._raise_304_if_not_modified()
        self._raise_412_if_modified()

        headers = self.request.response.headers

        filters = self._extract_filters()
        limit = self._extract_limit()
        sorting = self._extract_sorting(limit)
        partial_fields = self._extract_partial_fields()

        filter_fields = [f.field for f in filters]
        include_deleted = self.model.modified_field in filter_fields

        pagination_rules, offset = self._extract_pagination_rules_from_token(
            limit, sorting)

        records, total_records = self.model.get_records(
            filters=filters,
            sorting=sorting,
            limit=limit,
            pagination_rules=pagination_rules,
            include_deleted=include_deleted)

        offset = offset + len(records)
        next_page = None

        if limit and len(records) == limit and offset < total_records:
            lastrecord = records[-1]
            next_page = self._next_page_url(sorting, limit, lastrecord, offset)
            headers['Next-Page'] = encode_header(next_page)

        if partial_fields:
            records = [
                dict_subset(record, partial_fields)
                for record in records
            ]

        # Bind metric about response size.
        logger.bind(nb_records=len(records), limit=limit)
        headers['Total-Records'] = encode_header('%s' % total_records)

        return self.postprocess(records)
Exemplo n.º 5
0
def post_batch(request):
    requests = request.validated['body']['requests']
    batch_size = len(requests)

    limit = request.registry.settings['batch_max_requests']
    if limit and len(requests) > int(limit):
        error_msg = 'Number of requests is limited to %s' % limit
        request.errors.add('body', 'requests', error_msg)
        return

    if any([batch.path in req['path'] for req in requests]):
        error_msg = 'Recursive call on %s endpoint is forbidden.' % batch.path
        request.errors.add('body', 'requests', error_msg)
        return

    responses = []

    sublogger = logger.new()

    for subrequest_spec in requests:
        subrequest = build_request(request, subrequest_spec)

        sublogger.bind(path=subrequest.path,
                       method=subrequest.method)
        try:
            # Invoke subrequest without individual transaction.
            resp, subrequest = request.follow_subrequest(subrequest,
                                                         use_tweens=False)
        except httpexceptions.HTTPException as e:
            if e.content_type == 'application/json':
                resp = e
            else:
                # JSONify raw Pyramid errors.
                resp = errors.http_error(e)

        sublogger.bind(code=resp.status_code)
        sublogger.info('subrequest.summary')

        dict_resp = build_response(resp, subrequest)
        responses.append(dict_resp)

    # Rebing batch request for summary
    logger.bind(path=batch.path,
                method=request.method,
                batch_size=batch_size,
                agent=request.headers.get('User-Agent'),)

    return {
        'responses': responses
    }
Exemplo n.º 6
0
def post_batch(request):
    requests = request.validated['requests']
    batch_size = len(requests)

    limit = request.registry.settings['batch_max_requests']
    if limit and len(requests) > int(limit):
        error_msg = 'Number of requests is limited to %s' % limit
        request.errors.add('body', 'requests', error_msg)
        return

    if any([batch.path in req['path'] for req in requests]):
        error_msg = 'Recursive call on %s endpoint is forbidden.' % batch.path
        request.errors.add('body', 'requests', error_msg)
        return

    responses = []

    sublogger = logger.new()

    for subrequest_spec in requests:
        subrequest = build_request(request, subrequest_spec)

        sublogger.bind(path=subrequest.path,
                       method=subrequest.method)
        try:
            # Invoke subrequest without individual transaction.
            resp, subrequest = request.follow_subrequest(subrequest,
                                                         use_tweens=False)
        except httpexceptions.HTTPException as e:
            if e.content_type == 'application/json':
                resp = e
            else:
                # JSONify raw Pyramid errors.
                resp = errors.http_error(e)

        sublogger.bind(code=resp.status_code)
        sublogger.info('subrequest.summary')

        dict_resp = build_response(resp, subrequest)
        responses.append(dict_resp)

    # Rebing batch request for summary
    logger.bind(path=batch.path,
                method=request.method,
                batch_size=batch_size,
                agent=request.headers.get('User-Agent'),)

    return {
        'responses': responses
    }