コード例 #1
0
ファイル: batch.py プロジェクト: michielbdejong/cliquet
def build_request(original, dict_obj):
    """
    Transform a dict object into a ``pyramid.request.Request`` object.

    :param original: the original batch request.
    :param dict_obj: a dict object with the sub-request specifications.
    """
    api_prefix = '/%s' % original.upath_info.split('/')[1]
    path = dict_obj['path']
    if not path.startswith(api_prefix):
        path = api_prefix + path

    path = path.encode('utf-8')

    method = dict_obj.get('method') or 'GET'
    headers = dict(original.headers)
    headers.update(**dict_obj.get('headers') or {})
    payload = dict_obj.get('body') or ''

    # Payload is always a dict (from ``BatchRequestSchema.body``).
    # Send it as JSON for subrequests.
    if isinstance(payload, dict):
        headers['Content-Type'] = 'application/json; charset=utf-8'
        payload = json.dumps(payload)

    if six.PY3:  # pragma: no cover
        path = path.decode('latin-1')

    request = Request.blank(path=path,
                            headers=headers,
                            POST=payload,
                            method=method)

    return request
コード例 #2
0
 def test_subrequests_body_are_json_serialized(self):
     request = {'path': '/', 'body': {'json': 'payload'}}
     self.post({'requests': [request]})
     wanted = {"json": "payload"}
     subrequest, = self.request.invoke_subrequest.call_args[0]
     self.assertEqual(subrequest.body.decode('utf8'),
                      json.dumps(wanted))
コード例 #3
0
ファイル: __init__.py プロジェクト: MrChoclate/cliquet
    def create(self, collection_id, parent_id, record, id_generator=None,
               unique_fields=None, id_field=DEFAULT_ID_FIELD,
               modified_field=DEFAULT_MODIFIED_FIELD,
               auth=None):
        id_generator = id_generator or self.id_generator
        record = record.copy()
        record_id = record.setdefault(id_field, id_generator())

        query = """
        WITH delete_potential_tombstone AS (
            DELETE FROM deleted
             WHERE id = :object_id
               AND parent_id = :parent_id
               AND collection_id = :collection_id
        )
        INSERT INTO records (id, parent_id, collection_id, data)
        VALUES (:object_id, :parent_id,
                :collection_id, (:data)::JSONB)
        RETURNING id, as_epoch(last_modified) AS last_modified;
        """
        placeholders = dict(object_id=record_id,
                            parent_id=parent_id,
                            collection_id=collection_id,
                            data=json.dumps(record))
        with self.client.connect() as conn:
            # Check that it does violate the resource unicity rules.
            self._check_unicity(conn, collection_id, parent_id, record,
                                unique_fields, id_field, modified_field,
                                for_creation=True)
            result = conn.execute(query, placeholders)
            inserted = result.fetchone()

        record[modified_field] = inserted['last_modified']
        return record
コード例 #4
0
ファイル: __init__.py プロジェクト: brouberol/cliquet
    def create(self, collection_id, parent_id, record, id_generator=None,
               unique_fields=None, id_field=DEFAULT_ID_FIELD,
               modified_field=DEFAULT_MODIFIED_FIELD,
               auth=None):
        id_generator = id_generator or self.id_generator
        record = record.copy()
        record_id = record.setdefault(id_field, id_generator())

        query = """
        INSERT INTO records (id, parent_id, collection_id, data)
        VALUES (%(object_id)s, %(parent_id)s,
                %(collection_id)s, %(data)s::JSONB)
        RETURNING id, as_epoch(last_modified) AS last_modified;
        """
        placeholders = dict(object_id=record_id,
                            parent_id=parent_id,
                            collection_id=collection_id,
                            data=json.dumps(record))
        with self.connect() as cursor:
            # Check that it does violate the resource unicity rules.
            self._check_unicity(cursor, collection_id, parent_id, record,
                                unique_fields, id_field, modified_field,
                                for_creation=True)
            cursor.execute(query, placeholders)
            inserted = cursor.fetchone()

        record[modified_field] = inserted['last_modified']
        return record
コード例 #5
0
ファイル: test_views_batch.py プロジェクト: ayusharma/cliquet
 def test_subrequests_body_have_utf8_charset(self):
     request = {"path": "/", "body": {"json": u"😂"}}
     self.post({"requests": [request]})
     subrequest, = self.request.invoke_subrequest.call_args[0]
     self.assertIn("charset=utf-8", subrequest.headers["Content-Type"])
     wanted = {"json": u"😂"}
     self.assertEqual(subrequest.body.decode("utf8"), json.dumps(wanted))
コード例 #6
0
ファイル: errors.py プロジェクト: elemoine/cliquet
def http_error(httpexception, errno=None, code=None, error=None, message=None, info=None, details=None):
    """Return a JSON formated response matching the error protocol.

    :param httpexception: Instance of :mod:`~pyramid:pyramid.httpexceptions`
    :param errno: stable application-level error number (e.g. 109)
    :param code: matches the HTTP status code (e.g 400)
    :param error: string description of error type (e.g. "Bad request")
    :param message: context information (e.g. "Invalid request parameters")
    :param info: information about error (e.g. URL to troubleshooting)
    :param details: additional structured details (conflicting record)
    :returns: the formatted response object
    :rtype: pyramid.httpexceptions.HTTPException
    """
    errno = errno or ERRORS.UNDEFINED

    # Track error number for request summary
    logger.bind(errno=errno)

    body = {"code": code or httpexception.code, "errno": errno, "error": error or httpexception.title}

    if message is not None:
        body["message"] = message

    if info is not None:
        body["info"] = info

    if details is not None:
        body["details"] = details

    response = httpexception
    response.body = json.dumps(body).encode("utf-8")
    response.content_type = "application/json"
    return response
コード例 #7
0
 def test_subrequests_body_have_utf8_charset(self):
     request = {'path': '/', 'body': {'json': u"😂"}}
     self.post({'requests': [request]})
     subrequest, = self.request.invoke_subrequest.call_args[0]
     self.assertIn('charset=utf-8', subrequest.headers['Content-Type'])
     wanted = {"json": u"😂"}
     self.assertEqual(subrequest.body.decode('utf8'),
                      json.dumps(wanted))
コード例 #8
0
ファイル: cloud_storage.py プロジェクト: jotes/cliquet
 def create(self, resource, user_id, record):
     self.check_unicity(resource, user_id, record)
     url = self._build_url(self.collection_url.format(resource.name))
     resp = self._client.post(url,
                              data=json.dumps(record),
                              headers=self._build_headers(resource))
     resp.raise_for_status()
     return resp.json()
コード例 #9
0
ファイル: test_pagination.py プロジェクト: rodo/cliquet
 def test_raises_bad_request_if_token_has_bad_data_structure(self):
     invalid_token = json.dumps([[("last_modified", 0, ">")]])
     self.resource.request.GET = {
         "_since": "123",
         "_limit": "20",
         "_token": b64encode(invalid_token.encode("ascii")).decode("ascii"),
     }
     self.assertRaises(HTTPBadRequest, self.resource.collection_get)
コード例 #10
0
ファイル: __init__.py プロジェクト: ayusharma/cliquet
    def _format_conditions(self, filters, id_field, modified_field,
                           prefix='filters'):
        """Format the filters list in SQL, with placeholders for safe escaping.

        .. note::
            All conditions are combined using AND.

        .. note::

            Field name and value are escaped as they come from HTTP API.

        :returns: A SQL string with placeholders, and a dict mapping
            placeholders to actual values.
        :rtype: tuple
        """
        operators = {
            COMPARISON.EQ.value: '=',
            COMPARISON.NOT.value: '<>',
            COMPARISON.IN.value: 'IN',
            COMPARISON.EXCLUDE.value: 'NOT IN',
        }

        conditions = []
        holders = {}
        for i, filtr in enumerate(filters):
            value = filtr.value

            if filtr.field == id_field:
                sql_field = 'id'
            elif filtr.field == modified_field:
                sql_field = 'as_epoch(last_modified)'
            else:
                # Safely escape field name
                field_holder = '%s_field_%s' % (prefix, i)
                holders[field_holder] = filtr.field
                # JSON operator ->> retrieves values as text.
                # If field is missing, we default to ''.
                sql_field = "coalesce(data->>:%s, '')" % field_holder

            if filtr.operator not in (COMPARISON.IN.value,
                                      COMPARISON.EXCLUDE.value):
                # For the IN operator, let psycopg escape the values list.
                # Otherwise JSON-ify the native value (e.g. True -> 'true')
                if not isinstance(filtr.value, six.string_types):
                    value = json.dumps(filtr.value).strip('"')
            else:
                value = tuple(value)

            # Safely escape value
            value_holder = '%s_value_%s' % (prefix, i)
            holders[value_holder] = value

            sql_operator = operators.setdefault(filtr.operator, filtr.operator)
            cond = "%s %s :%s" % (sql_field, sql_operator, value_holder)
            conditions.append(cond)

        safe_sql = ' AND '.join(conditions)
        return safe_sql, holders
コード例 #11
0
 def update(self, resource, user_id, record_id, record):
     self.check_unicity(resource, user_id, record)
     url = self._build_url(self.record_url.format(resource.name,
                                                  record_id))
     try:
         self.get(resource, user_id, record_id)
     except exceptions.RecordNotFoundError:
         resp = self._client.put(url,
                                 data=json.dumps(record),
                                 headers=self._build_headers(resource))
     else:
         if resource.id_field in record:
             del record[resource.id_field]
         resp = self._client.patch(url,
                                   data=json.dumps(record),
                                   headers=self._build_headers(resource))
     resp.raise_for_status()
     return resp.json()
コード例 #12
0
 def create(self, resource, user_id, record):
     self.check_unicity(resource, user_id, record)
     record_id = resource.id_generator()
     url = self._build_url(self.record_url.format(resource.name,
                                                  record_id))
     resp = self._client.put(url,
                             data=json.dumps(record),
                             headers=self._build_headers(resource))
     resp.raise_for_status()
     return resp.json()
コード例 #13
0
    def test_every_available_migration(self):
        """Test every migration available in cliquet code base since
        version 1.6.

        Records migration test is currently very naive, and should be
        elaborated along future migrations.
        """
        self._delete_everything()

        # Install old schema
        with self.storage.client.connect() as conn:
            here = os.path.abspath(os.path.dirname(__file__))
            filepath = 'schema/postgresql-storage-1.6.sql'
            old_schema = open(os.path.join(here, filepath)).read()
            conn.execute(old_schema)

        # Create a sample record using some code that is compatible with the
        # schema in place in cliquet 1.6.
        with self.storage.client.connect() as conn:
            before = {'drink': 'cacao'}
            query = """
            INSERT INTO records (user_id, resource_name, data)
            VALUES (:user_id, :resource_name, (:data)::JSON)
            RETURNING id, as_epoch(last_modified) AS last_modified;
            """
            placeholders = dict(user_id='jean-louis',
                                resource_name='test',
                                data=json.dumps(before))
            result = conn.execute(query, placeholders)
            inserted = result.fetchone()
            before['id'] = six.text_type(inserted['id'])
            before['last_modified'] = inserted['last_modified']

        # In cliquet 1.6, version = 1.
        version = self.storage._get_installed_version()
        self.assertEqual(version, 1)

        # Run every migrations available.
        self.storage.initialize_schema()

        # Version matches current one.
        version = self.storage._get_installed_version()
        self.assertEqual(version, self.version)

        # Check that previously created record is still here
        migrated, count = self.storage.get_all('test', 'jean-louis')
        self.assertEqual(migrated[0], before)

        # Check that new records can be created
        r = self.storage.create('test', ',jean-louis', {'drink': 'mate'})

        # And deleted
        self.storage.delete('test', ',jean-louis', r['id'])
コード例 #14
0
ファイル: __init__.py プロジェクト: FooBarQuaxx/cliquet
 def set(self, key, value, ttl=None):
     query = """
     WITH upsert AS (
         UPDATE cache SET value = :value, ttl = sec2ttl(:ttl)
          WHERE key=:key
         RETURNING *)
     INSERT INTO cache (key, value, ttl)
     SELECT :key, :value, sec2ttl(:ttl)
     WHERE NOT EXISTS (SELECT * FROM upsert)
     """
     value = json.dumps(value)
     with self.client.connect() as conn:
         conn.execute(query, dict(key=key, value=value, ttl=ttl))
コード例 #15
0
    def _build_pagination_token(self, sorting, last_record, offset):
        """Build a pagination token.

        It is a base64 JSON object with the sorting fields values of
        the last_record.

        """
        token = {'last_record': {}, 'offset': offset}

        for field, _ in sorting:
            token['last_record'][field] = last_record[field]

        return encode64(json.dumps(token))
コード例 #16
0
ファイル: __init__.py プロジェクト: rodo/cliquet
 def set(self, key, value, ttl=None):
     query = """
     WITH upsert AS (
         UPDATE cache SET value = %(value)s, ttl = sec2ttl(%(ttl)s)
          WHERE key=%(key)s
         RETURNING *)
     INSERT INTO cache (key, value, ttl)
     SELECT %(key)s, %(value)s, sec2ttl(%(ttl)s)
     WHERE NOT EXISTS (SELECT * FROM upsert)
     """
     value = json.dumps(value)
     with self.connect() as cursor:
         cursor.execute(query, dict(key=key, value=value, ttl=ttl))
コード例 #17
0
ファイル: resource.py プロジェクト: brouberol/cliquet
    def _build_pagination_token(self, sorting, last_record):
        """Build a pagination token.

        It is a base64 JSON object with the sorting fields values of
        the last_record.

        """
        token = {}

        for field, _ in sorting:
            token[field] = last_record[field]

        return encode64(json.dumps(token))
コード例 #18
0
 def set(self, key, value, ttl=None):
     query = """
     WITH upsert AS (
         UPDATE cache SET value = :value, ttl = sec2ttl(:ttl)
          WHERE key=:key
         RETURNING *)
     INSERT INTO cache (key, value, ttl)
     SELECT :key, :value, sec2ttl(:ttl)
     WHERE NOT EXISTS (SELECT * FROM upsert)
     """
     value = json.dumps(value)
     with self.client.connect() as conn:
         conn.execute(query,
                      dict(key=self.prefix + key, value=value, ttl=ttl))
コード例 #19
0
ファイル: errors.py プロジェクト: ayusharma/cliquet
def send_alert(request, message=None, url=None, code='soft-eol'):
    """Helper to add an Alert header to the response.

    :param code: The type of error 'soft-eol', 'hard-eol'
    :param message: The description message.
    :param url: The URL for more information, default to the documentation url.
    """
    if url is None:
        url = request.registry.settings['project_docs']

    request.response.headers['Alert'] = encode_header(json.dumps({
        'code': code,
        'message': message,
        'url': url
    }))
コード例 #20
0
def send_alert(request, message=None, url=None, code='soft-eol'):
    """Helper to add an Alert header to the response.

    :param code: The type of error 'soft-eol', 'hard-eol'
    :param message: The description message.
    :param url: The URL for more information, default to the documentation url.
    """
    if url is None:
        url = request.registry.settings['project_docs']

    request.response.headers['Alert'] = encode_header(json.dumps({
        'code': code,
        'message': message,
        'url': url
    }))
コード例 #21
0
ファイル: __init__.py プロジェクト: glasserc/cliquet
    def update(self, collection_id, parent_id, object_id, record,
               unique_fields=None, id_field=DEFAULT_ID_FIELD,
               modified_field=DEFAULT_MODIFIED_FIELD,
               auth=None):
        query_create = """
        INSERT INTO records (id, parent_id, collection_id, data, last_modified)
        VALUES (:object_id, :parent_id,
                :collection_id, (:data)::JSONB,
                from_epoch(:last_modified))
        RETURNING as_epoch(last_modified) AS last_modified;
        """

        query_update = """
        UPDATE records SET data=(:data)::JSONB,
                           last_modified=from_epoch(:last_modified)
        WHERE id = :object_id
           AND parent_id = :parent_id
           AND collection_id = :collection_id
        RETURNING as_epoch(last_modified) AS last_modified;
        """
        placeholders = dict(object_id=object_id,
                            parent_id=parent_id,
                            collection_id=collection_id,
                            last_modified=record.get(modified_field),
                            data=json.dumps(record))

        record = record.copy()
        record[id_field] = object_id

        with self.client.connect() as conn:
            # Check that it does violate the resource unicity rules.
            self._check_unicity(conn, collection_id, parent_id, record,
                                unique_fields, id_field, modified_field)
            # Create or update ?
            query = """
            SELECT id FROM records
            WHERE id = :object_id
              AND parent_id = :parent_id
              AND collection_id = :collection_id;
            """
            result = conn.execute(query, placeholders)
            query = query_update if result.rowcount > 0 else query_create

            result = conn.execute(query, placeholders)
            updated = result.fetchone()

        record[modified_field] = updated['last_modified']
        return record
コード例 #22
0
    def test_every_available_migration(self):
        """Test every migration available in cliquet code base since
        version 1.6.

        Records migration test is currently very naive, and should be
        elaborated along future migrations.
        """
        self._delete_everything()

        # Install old schema
        with self.db.connect() as cursor:
            here = os.path.abspath(os.path.dirname(__file__))
            filepath = 'schema/postgresql-storage-1.6.sql'
            old_schema = open(os.path.join(here, filepath)).read()
            cursor.execute(old_schema)

        resource = TestResource()

        # Create a sample record using some code that is compatible with the
        # schema in place in cliquet 1.6.
        with self.db.connect() as cursor:
            before = {'drink': 'cacao'}
            query = """
            INSERT INTO records (user_id, resource_name, data)
            VALUES (%(user_id)s, %(resource_name)s, %(data)s::JSON)
            RETURNING id, as_epoch(last_modified) AS last_modified;
            """
            placeholders = dict(user_id='jean-louis',
                                resource_name=resource.name,
                                data=json.dumps(before))
            cursor.execute(query, placeholders)
            inserted = cursor.fetchone()
            before[resource.id_field] = inserted['id']
            before[resource.modified_field] = inserted['last_modified']

        # In cliquet 1.6, version = 1.
        version = self.db._get_installed_version()
        self.assertEqual(version, 1)

        # Run every migrations available.
        self.db.initialize_schema()

        # Version matches current one.
        version = self.db._get_installed_version()
        self.assertEqual(version, self.version)

        migrated, count = self.db.get_all(TestResource(), 'jean-louis')
        self.assertEqual(migrated[0], before)
コード例 #23
0
ファイル: __init__.py プロジェクト: timgates42/cliquet
    def create(self,
               collection_id,
               parent_id,
               record,
               id_generator=None,
               unique_fields=None,
               id_field=DEFAULT_ID_FIELD,
               modified_field=DEFAULT_MODIFIED_FIELD,
               auth=None):
        id_generator = id_generator or self.id_generator
        record = record.copy()
        record_id = record.setdefault(id_field, id_generator())

        query = """
        WITH delete_potential_tombstone AS (
            DELETE FROM deleted
             WHERE id = :object_id
               AND parent_id = :parent_id
               AND collection_id = :collection_id
        )
        INSERT INTO records (id, parent_id, collection_id, data, last_modified)
        VALUES (:object_id, :parent_id,
                :collection_id, (:data)::JSONB,
                from_epoch(:last_modified))
        RETURNING id, as_epoch(last_modified) AS last_modified;
        """
        placeholders = dict(object_id=record_id,
                            parent_id=parent_id,
                            collection_id=collection_id,
                            last_modified=record.get(modified_field),
                            data=json.dumps(record))
        with self.client.connect() as conn:
            # Check that it does violate the resource unicity rules.
            self._check_unicity(conn,
                                collection_id,
                                parent_id,
                                record,
                                unique_fields,
                                id_field,
                                modified_field,
                                for_creation=True)
            result = conn.execute(query, placeholders)
            inserted = result.fetchone()

        record[modified_field] = inserted['last_modified']
        return record
コード例 #24
0
ファイル: __init__.py プロジェクト: brouberol/cliquet
    def update(self, collection_id, parent_id, object_id, record,
               unique_fields=None, id_field=DEFAULT_ID_FIELD,
               modified_field=DEFAULT_MODIFIED_FIELD,
               auth=None):
        query_create = """
        INSERT INTO records (id, parent_id, collection_id, data)
        VALUES (%(object_id)s, %(parent_id)s,
                %(collection_id)s, %(data)s::JSONB)
        RETURNING as_epoch(last_modified) AS last_modified;
        """

        query_update = """
        UPDATE records SET data=%(data)s::JSONB
        WHERE id = %(object_id)s
           AND parent_id = %(parent_id)s
           AND collection_id = %(collection_id)s
        RETURNING as_epoch(last_modified) AS last_modified;
        """
        placeholders = dict(object_id=object_id,
                            parent_id=parent_id,
                            collection_id=collection_id,
                            data=json.dumps(record))

        record = record.copy()
        record[id_field] = object_id

        with self.connect() as cursor:
            # Check that it does violate the resource unicity rules.
            self._check_unicity(cursor, collection_id, parent_id, record,
                                unique_fields, id_field, modified_field)
            # Create or update ?
            query = """
            SELECT id FROM records
            WHERE id = %(object_id)s
              AND parent_id = %(parent_id)s
              AND collection_id = %(collection_id)s;
            """
            cursor.execute(query, placeholders)
            query = query_update if cursor.rowcount > 0 else query_create

            cursor.execute(query, placeholders)
            result = cursor.fetchone()

        record[modified_field] = result['last_modified']
        return record
コード例 #25
0
ファイル: __init__.py プロジェクト: michielbdejong/cliquet
    def update(self, resource, user_id, record_id, record):
        query_create = """
        INSERT INTO records (id, user_id, resource_name, data)
        VALUES (%(record_id)s, %(user_id)s,
                %(resource_name)s, %(data)s::JSONB)
        RETURNING as_epoch(last_modified) AS last_modified;
        """

        query_update = """
        UPDATE records SET data=%(data)s::JSONB
        WHERE id = %(record_id)s
           AND user_id = %(user_id)s
           AND resource_name = %(resource_name)s
        RETURNING as_epoch(last_modified) AS last_modified;
        """
        placeholders = dict(record_id=record_id,
                            user_id=user_id,
                            resource_name=resource.name,
                            data=json.dumps(record))

        with self.connect() as cursor:
            # Check that it does violate the resource unicity rules.
            self._check_unicity(cursor, resource, user_id, record)

            # Create or update ?
            query = """
            SELECT id FROM records
            WHERE id = %(record_id)s
              AND user_id = %(user_id)s
              AND resource_name = %(resource_name)s;
            """
            cursor.execute(query, placeholders)
            query = query_update if cursor.rowcount > 0 else query_create

            cursor.execute(query, placeholders)
            result = cursor.fetchone()

        record = record.copy()
        record[resource.id_field] = record_id
        record[resource.modified_field] = result['last_modified']
        return record
コード例 #26
0
def http_error(httpexception, errno=None,
               code=None, error=None, message=None, info=None, details=None):
    """Return a JSON formated response matching the error protocol.

    :param httpexception: Instance of :mod:`~pyramid:pyramid.httpexceptions`
    :param errno: stable application-level error number (e.g. 109)
    :param code: matches the HTTP status code (e.g 400)
    :param error: string description of error type (e.g. "Bad request")
    :param message: context information (e.g. "Invalid request parameters")
    :param info: information about error (e.g. URL to troubleshooting)
    :param details: additional structured details (conflicting record)
    :returns: the formatted response object
    :rtype: pyramid.httpexceptions.HTTPException
    """
    errno = errno or ERRORS.UNDEFINED

    if isinstance(errno, Enum):
        errno = errno.value

    # Track error number for request summary
    logger.bind(errno=errno)

    body = {
        "code": code or httpexception.code,
        "errno": errno,
        "error": error or httpexception.title
    }

    if message is not None:
        body['message'] = message

    if info is not None:
        body['info'] = info

    if details is not None:
        body['details'] = details

    response = httpexception
    response.body = json.dumps(body).encode("utf-8")
    response.content_type = 'application/json'
    return response
コード例 #27
0
ファイル: __init__.py プロジェクト: jotes/cliquet
    def create(self, resource, user_id, record):
        query = """
        INSERT INTO records (user_id, resource_name, data)
        VALUES (%(user_id)s, %(resource_name)s, %(data)s::json)
        RETURNING id, as_epoch(last_modified) AS last_modified;
        """
        placeholders = dict(user_id=user_id,
                            resource_name=resource.name,
                            data=json.dumps(record))

        with self.connect() as cursor:
            # Check that it does violate the resource unicity rules.
            self._check_unicity(cursor, resource, user_id, record)

            cursor.execute(query, placeholders)
            inserted = cursor.fetchone()

        record = record.copy()
        record[resource.id_field] = inserted['id']
        record[resource.modified_field] = inserted['last_modified']
        return record
コード例 #28
0
ファイル: __init__.py プロジェクト: timgates42/cliquet
    def get_all(self,
                collection_id,
                parent_id,
                filters=None,
                sorting=None,
                pagination_rules=None,
                limit=None,
                include_deleted=False,
                id_field=DEFAULT_ID_FIELD,
                modified_field=DEFAULT_MODIFIED_FIELD,
                deleted_field=DEFAULT_DELETED_FIELD,
                auth=None):
        query = """
        WITH total_filtered AS (
            SELECT COUNT(id) AS count
              FROM records
             WHERE parent_id = :parent_id
               AND collection_id = :collection_id
               %(conditions_filter)s
        ),
        collection_filtered AS (
            SELECT id, last_modified, data
              FROM records
             WHERE parent_id = :parent_id
               AND collection_id = :collection_id
               %(conditions_filter)s
             LIMIT %(max_fetch_size)s
        ),
        fake_deleted AS (
            SELECT (:deleted_field)::JSONB AS data
        ),
        filtered_deleted AS (
            SELECT id, last_modified, fake_deleted.data AS data
              FROM deleted, fake_deleted
             WHERE parent_id = :parent_id
               AND collection_id = :collection_id
               %(conditions_filter)s
               %(deleted_limit)s
        ),
        all_records AS (
            SELECT * FROM filtered_deleted
             UNION ALL
            SELECT * FROM collection_filtered
        ),
        paginated_records AS (
            SELECT DISTINCT id
              FROM all_records
              %(pagination_rules)s
        )
        SELECT total_filtered.count AS count_total,
               a.id, as_epoch(a.last_modified) AS last_modified, a.data
          FROM paginated_records AS p JOIN all_records AS a ON (a.id = p.id),
               total_filtered
          %(sorting)s
          %(pagination_limit)s;
        """
        deleted_field = json.dumps(dict([(deleted_field, True)]))

        # Unsafe strings escaped by PostgreSQL
        placeholders = dict(parent_id=parent_id,
                            collection_id=collection_id,
                            deleted_field=deleted_field)

        # Safe strings
        safeholders = defaultdict(six.text_type)
        safeholders['max_fetch_size'] = self._max_fetch_size

        if filters:
            safe_sql, holders = self._format_conditions(
                filters, id_field, modified_field)
            safeholders['conditions_filter'] = 'AND %s' % safe_sql
            placeholders.update(**holders)

        if not include_deleted:
            safeholders['deleted_limit'] = 'LIMIT 0'

        if sorting:
            sql, holders = self._format_sorting(sorting, id_field,
                                                modified_field)
            safeholders['sorting'] = sql
            placeholders.update(**holders)

        if pagination_rules:
            sql, holders = self._format_pagination(pagination_rules, id_field,
                                                   modified_field)
            safeholders['pagination_rules'] = 'WHERE %s' % sql
            placeholders.update(**holders)

        if limit:
            assert isinstance(limit, six.integer_types)  # asserted in resource
            safeholders['pagination_limit'] = 'LIMIT %s' % limit

        with self.client.connect(readonly=True) as conn:
            result = conn.execute(query % safeholders, placeholders)
            retrieved = result.fetchmany(self._max_fetch_size)

        if not len(retrieved):
            return [], 0

        count_total = retrieved[0]['count_total']

        records = []
        for result in retrieved:
            record = result['data']
            record[id_field] = result['id']
            record[modified_field] = result['last_modified']
            records.append(record)

        return records, count_total
コード例 #29
0
    def get_all(self, resource, user_id, filters=None, sorting=None,
                pagination_rules=None, limit=None, include_deleted=False):
        url = self.collection_url.format(resource.name)

        params = []

        sort_fields = []
        if sorting:
            for field, direction in sorting:
                prefix = '-' if direction < 0 else ''
                sort_fields.append(prefix + field)

        if sort_fields:
            params += [("_sort", ','.join(sort_fields))]

        if filters:
            params += self._filters_as_params(filters)

        if limit:
            params.append(("_limit", limit))

        if not pagination_rules:
            resp = self._client.get(self._build_url(url),
                                    params=params,
                                    headers=self._build_headers(resource))
            resp.raise_for_status()

            count = resp.headers['Total-Records']
            records = resp.json()['items']

        else:
            batch_payload = {'defaults': {'body': {}}, 'requests': []}

            querystring = self._params_as_querystring(params)
            batch_payload['requests'].append({
                'method': 'HEAD',
                'path': url + '?%s' % querystring,
            })

            for filters in pagination_rules:
                params_ = list(params)
                params_ += self._filters_as_params(filters)
                querystring = self._params_as_querystring(params_)
                batch_payload['requests'].append({
                    'path': url + '?%s' % querystring,
                })

            resp = self._client.post(self._build_url('/batch'),
                                     data=json.dumps(batch_payload),
                                     headers=self._build_headers(resource))
            resp.raise_for_status()
            batch_responses = resp.json()['responses']

            if any([r['status'] >= 400 for r in batch_responses]):
                http_error = requests.HTTPError('Batch error', response=resp)
                raise exceptions.BackendError(original=http_error)

            count = batch_responses[0]['headers']['Total-Records']
            records = {}
            for batch_response in batch_responses[1:]:
                for record in batch_response['body']['items']:
                    records[record[resource.id_field]] = record

            if sorting:
                records = apply_sorting(records.values(), sorting)[:limit]

        return records, int(count)
コード例 #30
0
ファイル: test_logging.py プロジェクト: elemoine/cliquet
 def test_list_of_homogeneous_values_are_serialized_as_string(self):
     list_values = ["life", "of", "pi", 3.14]
     logged = self.renderer(self.logger, "info", {"params": list_values})
     log = json.loads(logged)
     self.assertEqual(log["Fields"]["params"], json.dumps(list_values))
コード例 #31
0
ファイル: test_logging.py プロジェクト: elemoine/cliquet
 def test_objects_values_are_serialized_as_string(self):
     querystring = {"_sort": "name"}
     logged = self.renderer(self.logger, "info", {"params": querystring})
     log = json.loads(logged)
     self.assertEqual(log["Fields"]["params"], json.dumps(querystring))
コード例 #32
0
ファイル: test_logging.py プロジェクト: ayusharma/cliquet
 def test_objects_values_are_serialized_as_string(self):
     querystring = {'_sort': 'name'}
     logged = self.renderer(self.logger, 'info', {'params': querystring})
     log = json.loads(logged)
     self.assertEqual(log['Fields']['params'], json.dumps(querystring))
コード例 #33
0
ファイル: test_views_batch.py プロジェクト: ayusharma/cliquet
 def test_subrequests_body_are_json_serialized(self):
     request = {"path": "/", "body": {"json": "payload"}}
     self.post({"requests": [request]})
     wanted = {"json": "payload"}
     subrequest, = self.request.invoke_subrequest.call_args[0]
     self.assertEqual(subrequest.body.decode("utf8"), json.dumps(wanted))
コード例 #34
0
ファイル: __init__.py プロジェクト: timgates42/cliquet
    def _format_conditions(self,
                           filters,
                           id_field,
                           modified_field,
                           prefix='filters'):
        """Format the filters list in SQL, with placeholders for safe escaping.

        .. note::
            All conditions are combined using AND.

        .. note::

            Field name and value are escaped as they come from HTTP API.

        :returns: A SQL string with placeholders, and a dict mapping
            placeholders to actual values.
        :rtype: tuple
        """
        operators = {
            COMPARISON.EQ: '=',
            COMPARISON.NOT: '<>',
            COMPARISON.IN: 'IN',
            COMPARISON.EXCLUDE: 'NOT IN',
        }

        conditions = []
        holders = {}
        for i, filtr in enumerate(filters):
            value = filtr.value

            if filtr.field == id_field:
                sql_field = 'id'
            elif filtr.field == modified_field:
                sql_field = 'as_epoch(last_modified)'
            else:
                # Safely escape field name
                field_holder = '%s_field_%s' % (prefix, i)
                holders[field_holder] = filtr.field

                # JSON operator ->> retrieves values as text.
                # If field is missing, we default to ''.
                sql_field = "coalesce(data->>:%s, '')" % field_holder
                if isinstance(value, (int, float)) and \
                   value not in (True, False):
                    sql_field = "(data->>:%s)::numeric" % field_holder

            if filtr.operator not in (COMPARISON.IN, COMPARISON.EXCLUDE):
                # For the IN operator, let psycopg escape the values list.
                # Otherwise JSON-ify the native value (e.g. True -> 'true')
                if not isinstance(filtr.value, six.string_types):
                    value = json.dumps(filtr.value).strip('"')
            else:
                value = tuple(value)

            # Safely escape value
            value_holder = '%s_value_%s' % (prefix, i)
            holders[value_holder] = value

            sql_operator = operators.setdefault(filtr.operator,
                                                filtr.operator.value)
            cond = "%s %s :%s" % (sql_field, sql_operator, value_holder)
            conditions.append(cond)

        safe_sql = ' AND '.join(conditions)
        return safe_sql, holders
コード例 #35
0
 def test_list_of_homogeneous_values_are_serialized_as_string(self):
     list_values = ['life', 'of', 'pi', 3.14]
     logged = self.renderer(self.logger, 'info', {'params': list_values})
     log = json.loads(logged)
     self.assertEqual(log['Fields']['params'], json.dumps(list_values))
コード例 #36
0
ファイル: test_logging.py プロジェクト: ayusharma/cliquet
 def test_list_of_homogeneous_values_are_serialized_as_string(self):
     list_values = ['life', 'of', 'pi', 3.14]
     logged = self.renderer(self.logger, 'info', {'params': list_values})
     log = json.loads(logged)
     self.assertEqual(log['Fields']['params'], json.dumps(list_values))
コード例 #37
0
ファイル: redis.py プロジェクト: timgates42/cliquet
 def set(self, key, value, ttl=None):
     value = json.dumps(value)
     if ttl:
         self._client.psetex(self.prefix + key, int(ttl * 1000), value)
     else:
         self._client.set(self.prefix + key, value)
コード例 #38
0
ファイル: __init__.py プロジェクト: MrChoclate/cliquet
    def get_all(self, collection_id, parent_id, filters=None, sorting=None,
                pagination_rules=None, limit=None, include_deleted=False,
                id_field=DEFAULT_ID_FIELD,
                modified_field=DEFAULT_MODIFIED_FIELD,
                deleted_field=DEFAULT_DELETED_FIELD,
                auth=None):
        query = """
        WITH total_filtered AS (
            SELECT COUNT(id) AS count
              FROM records
             WHERE parent_id = :parent_id
               AND collection_id = :collection_id
               %(conditions_filter)s
        ),
        collection_filtered AS (
            SELECT id, last_modified, data
              FROM records
             WHERE parent_id = :parent_id
               AND collection_id = :collection_id
               %(conditions_filter)s
             LIMIT %(max_fetch_size)s
        ),
        fake_deleted AS (
            SELECT (:deleted_field)::JSONB AS data
        ),
        filtered_deleted AS (
            SELECT id, last_modified, fake_deleted.data AS data
              FROM deleted, fake_deleted
             WHERE parent_id = :parent_id
               AND collection_id = :collection_id
               %(conditions_filter)s
               %(deleted_limit)s
        ),
        all_records AS (
            SELECT * FROM filtered_deleted
             UNION ALL
            SELECT * FROM collection_filtered
        ),
        paginated_records AS (
            SELECT DISTINCT id
              FROM all_records
              %(pagination_rules)s
        )
        SELECT total_filtered.count AS count_total,
               a.id, as_epoch(a.last_modified) AS last_modified, a.data
          FROM paginated_records AS p JOIN all_records AS a ON (a.id = p.id),
               total_filtered
          %(sorting)s
          %(pagination_limit)s;
        """
        deleted_field = json.dumps(dict([(deleted_field, True)]))

        # Unsafe strings escaped by PostgreSQL
        placeholders = dict(parent_id=parent_id,
                            collection_id=collection_id,
                            deleted_field=deleted_field)

        # Safe strings
        safeholders = defaultdict(six.text_type)
        safeholders['max_fetch_size'] = self._max_fetch_size

        if filters:
            safe_sql, holders = self._format_conditions(filters,
                                                        id_field,
                                                        modified_field)
            safeholders['conditions_filter'] = 'AND %s' % safe_sql
            placeholders.update(**holders)

        if not include_deleted:
            safeholders['deleted_limit'] = 'LIMIT 0'

        if sorting:
            sql, holders = self._format_sorting(sorting, id_field,
                                                modified_field)
            safeholders['sorting'] = sql
            placeholders.update(**holders)

        if pagination_rules:
            sql, holders = self._format_pagination(pagination_rules, id_field,
                                                   modified_field)
            safeholders['pagination_rules'] = 'WHERE %s' % sql
            placeholders.update(**holders)

        if limit:
            assert isinstance(limit, six.integer_types)  # asserted in resource
            safeholders['pagination_limit'] = 'LIMIT %s' % limit

        with self.client.connect(readonly=True) as conn:
            result = conn.execute(query % safeholders, placeholders)
            retrieved = result.fetchmany(self._max_fetch_size)

        if not len(retrieved):
            return [], 0

        count_total = retrieved[0]['count_total']

        records = []
        for result in retrieved:
            record = result['data']
            record[id_field] = result['id']
            record[modified_field] = result['last_modified']
            records.append(record)

        return records, count_total
コード例 #39
0
 def test_objects_values_are_serialized_as_string(self):
     querystring = {'_sort': 'name'}
     logged = self.renderer(self.logger, 'info', {'params': querystring})
     log = json.loads(logged)
     self.assertEqual(log['Fields']['params'], json.dumps(querystring))
コード例 #40
0
ファイル: test_pagination.py プロジェクト: MrChoclate/cliquet
 def test_raises_bad_request_if_token_has_bad_data_structure(self):
     invalid_token = json.dumps([[('last_modified', 0, '>')]])
     self.resource.request.GET = {
         '_since': '123', '_limit': '20',
         '_token': b64encode(invalid_token.encode('ascii')).decode('ascii')}
     self.assertRaises(HTTPBadRequest, self.resource.collection_get)
コード例 #41
0
ファイル: test_pagination.py プロジェクト: timgates42/cliquet
 def test_raises_bad_request_if_token_has_bad_data_structure(self):
     invalid_token = json.dumps([[('last_modified', 0, '>')]])
     self.resource.request.GET = {
         '_since': '123', '_limit': '20',
         '_token': b64encode(invalid_token.encode('ascii')).decode('ascii')}
     self.assertRaises(HTTPBadRequest, self.resource.collection_get)