def test_debug_error_message(self): with self.app.test_request_context(): self.app.config['DEBUG'] = False self.assertEquals(debug_error_message('An error message'), None) self.app.config['DEBUG'] = True self.assertEquals(debug_error_message('An error message'), 'An error message')
def _resolve_embedded_documents(resource, req, documents): """Loops through the documents, adding embedded representations of any fields that are (1) defined eligible for embedding in the DOMAIN and (2) requested to be embedded in the current `req` Currently we only support a single layer of embedding, i.e. /invoices/?embedded={"user":1} *NOT* /invoices/?embedded={"user.friends":1} :param resource: the resource name. :param req: and instace of :class:`eve.utils.ParsedRequest`. :param documents: list of documents returned by the query. .. versonchanged:: 0.1.1 'collection' key has been renamed to 'resource' (data_relation). .. versionadded:: 0.1.0 """ if req.embedded: # Parse the embedded clause, we are expecting # something like: '{"user":1}' try: client_embedding = json.loads(req.embedded) except ValueError: abort(400, description=debug_error_message( 'Unable to parse `embedded` clause' )) # Build the list of fields where embedding is being requested try: embedded_fields = [k for k, v in client_embedding.items() if v == 1] except AttributeError: # We got something other than a dict abort(400, description=debug_error_message( 'Unable to parse `embedded` clause' )) # For each field, is the field allowed to be embedded? # Pick out fields that have a `data_relation` where `embeddable=True` enabled_embedded_fields = [] for field in embedded_fields: # Reject bogus field names if field in config.DOMAIN[resource]['schema']: field_definition = config.DOMAIN[resource]['schema'][field] if 'data_relation' in field_definition and \ field_definition['data_relation'].get('embeddable'): # or could raise 400 here enabled_embedded_fields.append(field) for document in documents: for field in enabled_embedded_fields: field_definition = config.DOMAIN[resource]['schema'][field] # Retrieve and serialize the requested document embedded_doc = app.data.find_one( field_definition['data_relation']['resource'], **{config.ID_FIELD: document[field]} ) if embedded_doc: document[field] = embedded_doc
def test_debug_error_message(self): with self.app.test_request_context(): self.app.config['DEBUG'] = False self.assertEqual(debug_error_message('An error message'), None) self.app.config['DEBUG'] = True self.assertEqual(debug_error_message('An error message'), 'An error message')
def payload(): """ Performs sanity checks or decoding depending on the Content-Type, then returns the request payload as a dict. If request Content-Type is unsupported, aborts with a 400 (Bad Request). .. versionchanged:: 0.1.1 Payload returned as a standard python dict regardless of request content type. .. versionchanged:: 0.0.9 More informative error messages. request.get_json() replaces the now deprecated request.json .. versionchanged:: 0.0.7 Native Flask request.json preferred over json.loads. .. versionadded: 0.0.5 """ content_type = request.headers['Content-Type'].split(';')[0] if content_type == 'application/json': return request.get_json() elif content_type == 'application/x-www-form-urlencoded': return request.form.to_dict() if len(request.form) else \ abort(400, description=debug_error_message( 'No form-urlencoded data supplied' )) else: abort(400, description=debug_error_message( 'Unknown or no Content-Type header supplied'))
def insert(self, resource, doc_or_docs): """ Inserts a document into a resource collection. .. versionchanged:: 0.0.9 More informative error messages. .. versionchanged:: 0.0.8 'write_concern' support. .. versionchanged:: 0.0.6 projection queries ('?projection={"name": 1}') 'document' param renamed to 'doc_or_docs', making support for bulk inserts apparent. .. versionchanged:: 0.0.4 retrieves the target collection via the new config.SOURCES helper. """ datasource, _, _, _ = self._datasource_ex(resource) try: return self.driver.db[datasource].insert(doc_or_docs, **self._wc(resource)) except pymongo.errors.InvalidOperation as e: abort(500, description=debug_error_message( 'pymongo.errors.InvalidOperation: %s' % e )) except pymongo.errors.OperationFailure as e: # most likely a 'w' (write_concern) setting which needs an # existing ReplicaSet which doesn't exist. Please note that the # update will actually succeed (a new ETag will be needed). abort(500, description=debug_error_message( 'pymongo.errors.OperationFailure: %s' % e ))
def _change_request(self, resource, id_, changes, original, replace=False): """ Performs a change, be it a replace or update. .. versionchanged:: 0.8.2 Return 400 if update/replace with malformed DBRef field. See #1257. .. versionchanged:: 0.6.1 Support for PyMongo 3.0. .. versionchanged:: 0.6 Return 400 if an attempt is made to update/replace an immutable field. """ id_field = config.DOMAIN[resource]["id_field"] query = {id_field: id_} if config.ETAG in original: query[config.ETAG] = original[config.ETAG] datasource, filter_, _, _ = self._datasource_ex(resource, query) coll = self.get_collection_with_write_concern(datasource, resource) try: result = (coll.replace_one(filter_, changes) if replace else coll.update_one(filter_, changes)) if (config.ETAG in original and result and result.acknowledged and result.modified_count == 0): raise self.OriginalChangedError() except pymongo.errors.DuplicateKeyError as e: abort( 400, description=debug_error_message( "pymongo.errors.DuplicateKeyError: %s" % e), ) except (pymongo.errors.WriteError, pymongo.errors.OperationFailure) as e: # server error codes and messages changed between 2.4 and 2.6/3.0. server_version = self.driver.db.client.server_info()["version"][:3] if (server_version == "2.4" and e.code in (13596, 10148)) or ( server_version in ("2.6", "3.0", "3.2", "3.4", "3.6", "4.0") and e.code in (66, 16837)): # attempt to update an immutable field. this usually # happens when a PATCH or PUT includes a mismatching ID_FIELD. self.app.logger.warning(e) description = ( debug_error_message( "pymongo.errors.OperationFailure: %s" % e) or "Attempt to update an immutable field. Usually happens " "when PATCH or PUT include a '%s' field, " "which is immutable (PUT can include it as long as " "it is unchanged)." % id_field) abort(400, description=description) else: # see comment in :func:`insert()`. self.app.logger.exception(e) abort( 500, description=debug_error_message( "pymongo.errors.OperationFailure: %s" % e), )
def process_login(items): """Hook to add token on POST to /sessions. Attempts to first login via LDAP (if enabled), then login via database. If the login is successful, the fields "username" and "password" are removed and the fields "user" and "token" are added, which will be stored in the db. If the login is unsuccessful, abort(401) Args: items (list): List of items as passed by EVE to post hooks. """ for item in items: username = item['username'] password = item['password'] # LDAP if (app.config.get('ldap_connector') and ldap.authenticate_user(username, password)): # Success, sync user and get token try: user = ldap.sync_one(username) app.logger.info("User '%s' was authenticated with LDAP" % username) except LDAPException: # Sync failed! Try to find user in db. user = _find_user(username) if user: app.logger.error( f"User '{username}' authenticated with LDAP and found " "in db, but LDAP sync failed.") else: status = (f"Login failed: user '{username}' authenticated " "with LDAP but not found in db, and LDAP sync " "failed.") app.logger.error(status) abort(401, description=debug_error_message(status)) _prepare_token(item, user['_id']) return # Database, try to find via nethz, mail or objectid user = _find_user(username) if user: app.logger.debug("User found in db.") if verify_password(user, item['password']): app.logger.debug("Login for user '%s' successful." % username) _prepare_token(item, user['_id']) return else: status = "Login failed: Password does not match!" app.logger.debug(status) abort(401, description=debug_error_message(status)) # Abort if everything else fails status = "Login with db failed: User not found!" app.logger.debug(status) abort(401, description=debug_error_message(status))
def test_debug_error_message(self): with self.app.test_request_context(): self.app.config["DEBUG"] = False self.assertEqual(debug_error_message("An error message"), None) self.app.config["DEBUG"] = True self.assertEqual(debug_error_message("An error message"), "An error message")
def new_resolve_embedded_fields(resource, req, document): embedded_fields = [] if req.embedded: try: client_embedding = json.loads(req.embedded) except ValueError: abort(400, description=debug_error_message( 'Unable to parse `embedded` clause')) try: embedded_fields = [ k for k, v in client_embedding.items() if v == 1 ] except AttributeError: # We got something other than a dict abort(400, description=debug_error_message( 'Unable to parse `embedded` clause')) embedded_fields = list( set(config.DOMAIN[resource]['embedded_fields']) | set(embedded_fields)) enabled_embedded_fields = [] for field in embedded_fields: field_def = new_field_definition(resource, field, document) if type(field_def) is dict: if field_def.get('type') == 'list': field_def = field_def['schema'] if 'data_relation' in field_def and \ field_def['data_relation'].get('embeddable'): enabled_embedded_fields.append(field) return enabled_embedded_fields
def insert(self, resource, doc_or_docs): """ Inserts a document into a resource collection. .. versionchanged:: 0.6.1 Support for PyMongo 3.0. .. versionchanged:: 0.6 Support for multiple databases. .. versionchanged:: 0.0.9 More informative error messages. .. versionchanged:: 0.0.8 'write_concern' support. .. versionchanged:: 0.0.6 projection queries ('?projection={"name": 1}') 'document' param renamed to 'doc_or_docs', making support for bulk inserts apparent. .. versionchanged:: 0.0.4 retrieves the target collection via the new config.SOURCES helper. """ datasource, _, _, _ = self._datasource_ex(resource) coll = self.get_collection_with_write_concern(datasource, resource) if isinstance(doc_or_docs, dict): doc_or_docs = [doc_or_docs] try: return coll.insert_many(doc_or_docs, ordered=True).inserted_ids except pymongo.errors.BulkWriteError as e: self.app.logger.exception(e) # since this is an ordered bulk operation, all remaining inserts # are aborted. Be aware that if BULK_ENABLED is True and more than # one document is included with the payload, some documents might # have been successfully inserted, even if the operation was # aborted. # report a duplicate key error since this can probably be # handled by the client. for error in e.details["writeErrors"]: # amazingly enough, pymongo does not appear to be exposing # error codes as constants. if error["code"] == 11000: abort( 409, description=debug_error_message( "Duplicate key error at index: %s, message: %s" % (error["index"], error["errmsg"])), ) abort( 500, description=debug_error_message( "pymongo.errors.BulkWriteError: %s" % e), )
def test_debug_error_message(self): with self.app.test_request_context(): self.app.config["DEBUG"] = False self.assertEqual(debug_error_message("An error message"), None) self.app.config["DEBUG"] = True self.assertEqual( debug_error_message("An error message"), "An error message" )
def insert(self, resource, doc_or_docs): """ Inserts a document into a resource collection. .. versionchanged:: 0.6.1 Support for PyMongo 3.0. .. versionchanged:: 0.6 Support for multiple databases. .. versionchanged:: 0.0.9 More informative error messages. .. versionchanged:: 0.0.8 'write_concern' support. .. versionchanged:: 0.0.6 projection queries ('?projection={"name": 1}') 'document' param renamed to 'doc_or_docs', making support for bulk inserts apparent. .. versionchanged:: 0.0.4 retrieves the target collection via the new config.SOURCES helper. """ datasource, _, _, _ = self._datasource_ex(resource) coll = self.get_collection_with_write_concern(datasource, resource) if isinstance(doc_or_docs, dict): doc_or_docs = [doc_or_docs] try: return coll.insert_many(doc_or_docs, ordered=True).inserted_ids except pymongo.errors.BulkWriteError as e: self.app.logger.exception(e) # since this is an ordered bulk operation, all remaining inserts # are aborted. Be aware that if BULK_ENABLED is True and more than # one document is included with the payload, some documents might # have been successfully inserted, even if the operation was # aborted. # report a duplicate key error since this can probably be # handled by the client. for error in e.details['writeErrors']: # amazingly enough, pymongo does not appear to be exposing # error codes as constants. if error['code'] == 11000: abort(409, description=debug_error_message( 'Duplicate key error at index: %s, message: %s' % ( error['index'], error['errmsg']) )) abort(500, description=debug_error_message( 'pymongo.errors.BulkWriteError: %s' % e ))
def _change_request(self, resource, id_, changes, original, replace=False): """ Performs a change, be it a replace or update. .. versionchanged:: 0.6.1 Support for PyMongo 3.0. .. versionchanged:: 0.6 Return 400 if an attempt is made to update/replace an immutable field. """ id_field = config.DOMAIN[resource]['id_field'] query = {id_field: id_} if config.ETAG in original: query[config.ETAG] = original[config.ETAG] datasource, filter_, _, _ = self._datasource_ex( resource, query) coll = self.get_collection_with_write_concern(datasource, resource) try: result = coll.replace_one(filter_, changes) if replace else \ coll.update_one(filter_, changes) if result and result.acknowledged and result.modified_count == 0: raise self.OriginalChangedError() except pymongo.errors.DuplicateKeyError as e: abort(400, description=debug_error_message( 'pymongo.errors.DuplicateKeyError: %s' % e )) except pymongo.errors.OperationFailure as e: # server error codes and messages changed between 2.4 and 2.6/3.0. server_version = \ self.driver.db.client.server_info()['version'][:3] if ( (server_version == '2.4' and e.code in (13596, 10148)) or (server_version in ('2.6', '3.0') and e.code in (66, 16837)) ): # attempt to update an immutable field. this usually # happens when a PATCH or PUT includes a mismatching ID_FIELD. self.app.logger.warn(e) description = debug_error_message( 'pymongo.errors.OperationFailure: %s' % e) or \ "Attempt to update an immutable field. Usually happens " \ "when PATCH or PUT include a '%s' field, " \ "which is immutable (PUT can include it as long as " \ "it is unchanged)." % id_field abort(400, description=description) else: # see comment in :func:`insert()`. self.app.logger.exception(e) abort(500, description=debug_error_message( 'pymongo.errors.OperationFailure: %s' % e ))
def process_login(items): """Hook to add token on POST to /sessions. Attempts to first login via LDAP (if enabled), then login via database. If the login is successful, the fields "username" and "password" are removed and the fields "user" and "token" are added, which will be stored in the db. If the login is unsuccessful, abort(401) Args: items (list): List of items as passed by EVE to post hooks. """ for item in items: username = item['username'] password = item['password'] # LDAP if (app.config.get('ldap_connector') and ldap.authenticate_user(username, password)): # Success, sync user and get token updated = ldap.sync_one(username) _prepare_token(item, updated['_id']) app.logger.info( "User '%s' was authenticated with LDAP" % username) return # Database, try to find via nethz, mail or objectid users = app.data.driver.db['users'] lookup = {'$or': [{'nethz': username}, {'email': username}]} try: objectid = ObjectId(username) lookup['$or'].append({'_id': objectid}) except InvalidId: pass # input can't be used as ObjectId user = users.find_one(lookup) if user: app.logger.debug("User found in db.") if verify_password(user, item['password']): app.logger.debug("Login for user '%s' successful." % username) _prepare_token(item, user['_id']) return else: status = "Login failed: Password does not match!" app.logger.debug(status) abort(401, description=debug_error_message(status)) # Abort if everything else fails status = "Login with db failed: User not found!" app.logger.debug(status) abort(401, description=debug_error_message(status))
def payload(): """ Performs sanity checks or decoding depending on the Content-Type, then returns the request payload as a dict. If request Content-Type is unsupported, aborts with a 400 (Bad Request). .. versionchanged:: 0.3 Allow 'multipart/form-data' content type. .. versionchanged:: 0.1.1 Payload returned as a standard python dict regardless of request content type. .. versionchanged:: 0.0.9 More informative error messages. request.get_json() replaces the now deprecated request.json .. versionchanged:: 0.0.7 Native Flask request.json preferred over json.loads. .. versionadded: 0.0.5 """ content_type = request.headers['Content-Type'].split(';')[0] if content_type == 'application/json': return request.get_json() elif content_type == 'application/x-www-form-urlencoded': return request.form.to_dict() if len(request.form) else \ abort(400, description=debug_error_message( 'No form-urlencoded data supplied' )) elif content_type == 'multipart/form-data': # as multipart is also used for file uploads, we let an empty # request.form go through as long as there are also files in the # request. if len(request.form) or len(request.files): # merge form fields and request files, so we get a single payload # to be validated against the resource schema. # list() is needed because Python3 items() returns a dict_view, not # a list as in Python2. return dict( list(request.form.to_dict().items()) + list(request.files.to_dict().items())) else: abort(400, description=debug_error_message( 'No multipart/form-data supplied')) else: abort(400, description=debug_error_message( 'Unknown or no Content-Type header supplied'))
def _change_request(self, resource, id_, changes, original, replace=False): """ Performs a change, be it a replace or update. .. versionchanged:: 0.6.1 Support for PyMongo 3.0. .. versionchanged:: 0.6 Return 400 if an attempt is made to update/replace an immutable field. """ id_field = config.DOMAIN[resource]['id_field'] query = {id_field: id_} if config.ETAG in original: query[config.ETAG] = original[config.ETAG] datasource, filter_, _, _ = self._datasource_ex( resource, query) coll = self.get_collection_with_write_concern(datasource, resource) try: coll.replace_one(filter_, changes) if replace else \ coll.update_one(filter_, changes) except pymongo.errors.DuplicateKeyError as e: abort(400, description=debug_error_message( 'pymongo.errors.DuplicateKeyError: %s' % e )) except pymongo.errors.OperationFailure as e: # server error codes and messages changed between 2.4 and 2.6/3.0. server_version = \ self.driver.db.client.server_info()['version'][:3] if ( (server_version == '2.4' and e.code in (13596, 10148)) or (server_version in ('2.6', '3.0', '3.2', '3.4') and e.code in (66, 16837)) ): # attempt to update an immutable field. this usually # happens when a PATCH or PUT includes a mismatching ID_FIELD. self.app.logger.warn(e) description = debug_error_message( 'pymongo.errors.OperationFailure: %s' % e) or \ "Attempt to update an immutable field. Usually happens " \ "when PATCH or PUT include a '%s' field, " \ "which is immutable (PUT can include it as long as " \ "it is unchanged)." % id_field abort(400, description=description) else: # see comment in :func:`insert()`. self.app.logger.exception(e) abort(500, description=debug_error_message( 'pymongo.errors.OperationFailure: %s' % e ))
def process_login(items): """Hook to add token on POST to /sessions. Attempts to first login via LDAP (if enabled), then login via database. If the login is successful, the fields "username" and "password" are removed and the fields "user" and "token" are added, which will be stored in the db. If the login is unsuccessful, abort(401) Args: items (list): List of items as passed by EVE to post hooks. """ for item in items: username = item['username'] password = item['password'] # LDAP if (app.config.get('ldap_connector') and ldap.authenticate_user(username, password)): # Success, sync user and get token updated = ldap.sync_one(username) _prepare_token(item, updated['_id']) app.logger.info("User '%s' was authenticated with LDAP" % username) return # Database, try to find via nethz, mail or objectid users = app.data.driver.db['users'] lookup = {'$or': [{'nethz': username}, {'email': username}]} try: objectid = ObjectId(username) lookup['$or'].append({'_id': objectid}) except InvalidId: pass # input can't be used as ObjectId user = users.find_one(lookup) if user: app.logger.debug("User found in db.") if verify_password(user, item['password']): app.logger.debug("Login for user '%s' successful." % username) _prepare_token(item, user['_id']) return else: status = "Login failed: Password does not match!" app.logger.debug(status) abort(401, description=debug_error_message(status)) # Abort if everything else fails status = "Login with db failed: User not found!" app.logger.debug(status) abort(401, description=debug_error_message(status))
def resolve_embedded_fields(resource, req): """ Returns a list of validated embedded fields from the incoming request or from the resource definition is the request does not specify. :param resource: the resource name. :param req: and instace of :class:`eve.utils.ParsedRequest`. .. versionchanged:: 0.5 Enables subdocuments embedding. #389. .. versionadded:: 0.4 """ embedded_fields = [] if req.embedded: # Parse the embedded clause, we are expecting # something like: '{"user":1}' try: client_embedding = json.loads(req.embedded) except ValueError: abort(400, description=debug_error_message( 'Unable to parse `embedded` clause' )) # Build the list of fields where embedding is being requested try: embedded_fields = [k for k, v in client_embedding.items() if v == 1] except AttributeError: # We got something other than a dict abort(400, description=debug_error_message( 'Unable to parse `embedded` clause' )) embedded_fields = list( set(config.DOMAIN[resource]['embedded_fields']) | set(embedded_fields)) # For each field, is the field allowed to be embedded? # Pick out fields that have a `data_relation` where `embeddable=True` enabled_embedded_fields = [] for field in embedded_fields: # Reject bogus field names field_def = field_definition(resource, field) if field_def: if field_def['type'] == 'list': field_def = field_def['schema'] if 'data_relation' in field_def and \ field_def['data_relation'].get('embeddable'): # or could raise 400 here enabled_embedded_fields.append(field) return enabled_embedded_fields
def resolve_embedded_fields(resource, req): """ Returns a list of validated embedded fields from the incoming request or from the resource definition is the request does not specify. :param resource: the resource name. :param req: and instace of :class:`eve.utils.ParsedRequest`. .. versionchanged:: 0.5 Enables subdocuments embedding. #389. .. versionadded:: 0.4 """ embedded_fields = [] if req.embedded: # Parse the embedded clause, we are expecting # something like: '{"user":1}' try: client_embedding = json.loads(req.embedded) except ValueError: abort(400, description=debug_error_message( 'Unable to parse `embedded` clause')) # Build the list of fields where embedding is being requested try: embedded_fields = [ k for k, v in client_embedding.items() if v == 1 ] except AttributeError: # We got something other than a dict abort(400, description=debug_error_message( 'Unable to parse `embedded` clause')) embedded_fields = list( set(config.DOMAIN[resource]['embedded_fields']) | set(embedded_fields)) # For each field, is the field allowed to be embedded? # Pick out fields that have a `data_relation` where `embeddable=True` enabled_embedded_fields = [] for field in embedded_fields: # Reject bogus field names field_def = field_definition(resource, field) if field_def: if field_def['type'] == 'list': field_def = field_def['schema'] if 'data_relation' in field_def and \ field_def['data_relation'].get('embeddable'): # or could raise 400 here enabled_embedded_fields.append(field) return enabled_embedded_fields
def sanitize_keys(spec): ops = set([op for op in spec.keys() if op[0] == '$']) unknown = ops - Mongo.operators if unknown: abort(400, description=debug_error_message( 'Query contains unknown or unsupported operators: %s' % ', '.join(unknown) )) if set(spec.keys()) & set(config.MONGO_QUERY_BLACKLIST): abort(400, description=debug_error_message( 'Query contains operators banned in MONGO_QUERY_BLACKLIST' ))
def payload(): """ Performs sanity checks or decoding depending on the Content-Type, then returns the request payload as a dict. If request Content-Type is unsupported, aborts with a 400 (Bad Request). .. versionchanged:: 0.3 Allow 'multipart/form-data' content type. .. versionchanged:: 0.1.1 Payload returned as a standard python dict regardless of request content type. .. versionchanged:: 0.0.9 More informative error messages. request.get_json() replaces the now deprecated request.json .. versionchanged:: 0.0.7 Native Flask request.json preferred over json.loads. .. versionadded: 0.0.5 """ content_type = request.headers['Content-Type'].split(';')[0] if content_type == 'application/json': return request.get_json() elif content_type == 'application/x-www-form-urlencoded': return request.form.to_dict() if len(request.form) else \ abort(400, description=debug_error_message( 'No form-urlencoded data supplied' )) elif content_type == 'multipart/form-data': # as multipart is also used for file uploads, we let an empty # request.form go through as long as there are also files in the # request. if len(request.form) or len(request.files): # merge form fields and request files, so we get a single payload # to be validated against the resource schema. # list() is needed because Python3 items() returns a dict_view, not # a list as in Python2. return dict(list(request.form.to_dict().items()) + list(request.files.to_dict().items())) else: abort(400, description=debug_error_message( 'No multipart/form-data supplied' )) else: abort(400, description=debug_error_message( 'Unknown or no Content-Type header supplied'))
def insert(self, resource, doc_or_docs): """ Inserts a document into a resource collection. .. versionchanged:: 0.6.1 Support for PyMongo 3.0. .. versionchanged:: 0.6 Support for multiple databases. .. versionchanged:: 0.0.9 More informative error messages. .. versionchanged:: 0.0.8 'write_concern' support. .. versionchanged:: 0.0.6 projection queries ('?projection={"name": 1}') 'document' param renamed to 'doc_or_docs', making support for bulk inserts apparent. .. versionchanged:: 0.0.4 retrieves the target collection via the new config.SOURCES helper. """ datasource, _, _, _ = self._datasource_ex(resource) coll = self.get_collection_with_write_concern(datasource, resource) if isinstance(doc_or_docs, dict): doc_or_docs = [doc_or_docs] try: return coll.insert_many(doc_or_docs).inserted_ids except pymongo.errors.DuplicateKeyError as e: abort(409, description=debug_error_message( 'pymongo.errors.DuplicateKeyError: %s' % e )) except pymongo.errors.InvalidOperation as e: self.app.logger.exception(e) abort(500, description=debug_error_message( 'pymongo.errors.InvalidOperation: %s' % e )) except pymongo.errors.OperationFailure as e: # most likely a 'w' (write_concern) setting which needs an # existing ReplicaSet which doesn't exist. Please note that the # update will actually succeed (a new ETag will be needed). self.app.logger.exception(e) abort(500, description=debug_error_message( 'pymongo.errors.OperationFailure: %s' % e ))
def get_document(resource, concurrency_check, **lookup): """ Retrieves and return a single document. Since this function is used by the editing methods (POST, PATCH, DELETE), we make sure that the client request references the current representation of the document before returning it. However, this concurrency control may be turned off by internal functions. :param resource: the name of the resource to which the document belongs to. :param concurrency_check: boolean check for concurrency control :param **lookup: document lookup query .. versionchanged:: 0.5 Concurrency control optional for internal functions. ETAG are now stored with the document (#369). .. versionchanged:: 0.0.9 More informative error messages. .. versionchanged:: 0.0.5 Pass current resource to ``parse_request``, allowing for proper processing of new configuration settings: `filters`, `sorting`, `paging`. """ req = parse_request(resource) document = app.data.find_one(resource, None, **lookup) if document: if not req.if_match and config.IF_MATCH and concurrency_check: # we don't allow editing unless the client provides an etag # for the document abort(403, description=debug_error_message( 'An etag must be provided to edit a document' )) # ensure the retrieved document has LAST_UPDATED and DATE_CREATED, # eventually with same default values as in GET. document[config.LAST_UPDATED] = last_updated(document) document[config.DATE_CREATED] = date_created(document) if req.if_match and concurrency_check: etag = document.get(config.ETAG, document_etag(document)) if req.if_match != etag: # client and server etags must match, or we don't allow editing # (ensures that client's version of the document is up to date) abort(412, description=debug_error_message( 'Client and server etags don\'t match' )) return document
def get_document(resource, concurrency_check, **lookup): """ Retrieves and return a single document. Since this function is used by the editing methods (POST, PATCH, DELETE), we make sure that the client request references the current representation of the document before returning it. However, this concurrency control may be turned off by internal functions. :param resource: the name of the resource to which the document belongs to. :param concurrency_check: boolean check for concurrency control :param **lookup: document lookup query .. versionchanged:: 0.5 Concurrency control optional for internal functions. ETAG are now stored with the document (#369). .. versionchanged:: 0.0.9 More informative error messages. .. versionchanged:: 0.0.5 Pass current resource to ``parse_request``, allowing for proper processing of new configuration settings: `filters`, `sorting`, `paging`. """ req = parse_request(resource) document = app.data.find_one(resource, None, **lookup) if document: if not req.if_match and config.IF_MATCH and concurrency_check: # we don't allow editing unless the client provides an etag # for the document abort(403, description=debug_error_message( 'An etag must be provided to edit a document')) # ensure the retrieved document has LAST_UPDATED and DATE_CREATED, # eventually with same default values as in GET. document[config.LAST_UPDATED] = last_updated(document) document[config.DATE_CREATED] = date_created(document) if req.if_match and concurrency_check: etag = document.get(config.ETAG, document_etag(document)) if req.if_match != etag: # client and server etags must match, or we don't allow editing # (ensures that client's version of the document is up to date) abort(412, description=debug_error_message( 'Client and server etags don\'t match')) return document
def aggregate(self, resource, req): client_projection = {} spec = {} if req.where: try: spec = self._sanitize( self._jsondatetime(json.loads(req.where, object_hook=json_util.object_hook))) except: try: spec = parse(req.where) except ParseError: abort(400, description=debug_error_message( 'Unable to parse `where` clause' )) bad_filter = validate_filters(spec, resource) if bad_filter: abort(400, bad_filter) if req.projection: try: client_projection = json.loads(req.projection) except: abort(400, description=debug_error_message( 'Unable to parse `projection` clause' )) datasource, spec, projection = self._datasource_ex(resource, spec, client_projection) groupers = config.DOMAIN[resource]["default_groupers"] groupees = config.DOMAIN[resource]["default_groupees"] group_val = {} group_val["_id"] = {g: "$%s" % g for g in groupers} for group_info in groupees: name = group_info["name"] group_type = group_info["type"] group_val[name] = {"$%s" % group_type: "$%s" % name} pipeline = [] pipeline.append({"$match": spec}) pipeline.append({"$project": projection}) pipeline.append({"$group": group_val}) pipeline.append({"$limit": 1000}) docs = self.driver.db[datasource].aggregate(pipeline)["result"] cursor = Cursor(docs) #gives required functions to returned result return cursor
def home_endpoint(): """ Home/API entry point. Will provide links to each available resource .. versionchanged:: 0.4 Prevent versioning collections from being added in links. .. versionchanged:: 0.2 Use new 'resource_title' setting for link titles. .. versionchanged:: 0.1.0 Support for optional HATEOAS. """ if config.HATEOAS: response = {} links = [] for resource in config.DOMAIN.keys(): if not resource.endswith(config.VERSIONS): links.append({'href': '%s' % resource_uri(resource), 'title': '%s' % config.DOMAIN[resource]['resource_title']}) response[config.LINKS] = {'child': links} return send_response(None, (response,)) else: abort(404, debug_error_message("HATEOAS is disabled so we have no data" " to display at the API homepage."))
def resolve_document_version(document, resource, method, latest_doc=None): """ Version number logic for all methods. :param document: the document in question. :param resource: the resource of the request/document. :param method: method coorsponding to the request. :param latest_doc: the most recent version of the document. .. versionadded:: 0.4 """ resource_def = app.config['DOMAIN'][resource] version = app.config['VERSION'] latest_version = app.config['LATEST_VERSION'] if resource_def['versioning'] is True: # especially on collection endpoints, we don't to encure an extra # lookup if we are already pulling the latest version if method == 'GET' and latest_doc is None: if version not in document: # well it should be... the api designer must have turned on # versioning after data was already in the collection or the # collection has been modified without respecting versioning document[version] = 1 # the first saved version will be 2 document[latest_version] = document[version] # include latest_doc if the request is for an older version so that we # can set the latest_version field in the response if method == 'GET' and latest_doc is not None: if version not in latest_doc: # well it should be... the api designer must have turned on # versioning after data was already in the collection or the # collection has been modified without respecting versioning document[version] = 1 # the first saved version will be 2 document[latest_version] = document[version] else: document[latest_version] = latest_doc[version] if version not in document: # this version was put in the database before versioning # was turned on or outside of Eve document[version] = 1 if method == 'POST': # this one is easy! it is a new document document[version] = 1 if method == 'PUT' or method == 'PATCH' or \ (method == 'DELETE' and resource_def['soft_delete'] is True): if not latest_doc: abort(500, description=debug_error_message( 'I need the latest document here!')) if version in latest_doc: # all is right in the world :) document[version] = latest_doc[version] + 1 else: # if versioning was just turned on, then we will start # versioning now. if the db was modified outside of Eve or # versioning was turned of for a while, version numbers will # not be consistent! you have been warned document[version] = 1
def resolve_document_version(document, resource, method, latest_doc=None): """ Version number logic for all methods. :param document: the document in question. :param resource: the resource of the request/document. :param method: method coorsponding to the request. :param latest_doc: the most recent version of the document. .. versionadded:: 0.4 """ resource_def = app.config['DOMAIN'][resource] version = app.config['VERSION'] latest_version = app.config['LATEST_VERSION'] if resource_def['versioning'] is True: # especially on collection endpoints, we don't to encure an extra # lookup if we are already pulling the latest version if method == 'GET' and latest_doc is None: if version not in document: # well it should be... the api designer must have turned on # versioning after data was already in the collection or the # collection has been modified without respecting versioning document[version] = 1 # the first saved version will be 2 document[latest_version] = document[version] # include latest_doc if the request is for an older version so that we # can set the latest_version field in the response if method == 'GET' and latest_doc is not None: if version not in latest_doc: # well it should be... the api designer must have turned on # versioning after data was already in the collection or the # collection has been modified without respecting versioning document[version] = 1 # the first saved version will be 2 document[latest_version] = document[version] else: document[latest_version] = latest_doc[version] if version not in document: # this version was put in the database before versioning # was turned on or outside of Eve document[version] = 1 if method == 'POST': # this one is easy! it is a new document document[version] = 1 if method == 'PUT' or method == 'PATCH' or \ (method == 'DELETE' and resource_def['soft_delete'] is True): if not latest_doc: abort(500, description=debug_error_message( 'I need the latest document here!' )) if version in latest_doc: # all is right in the world :) document[version] = latest_doc[version] + 1 else: # if versioning was just turned on, then we will start # versioning now. if the db was modified outside of Eve or # versioning was turned of for a while, version numbers will # not be consistent! you have been warned document[version] = 1
def _best_mime(): """ Returns the best match between the requested mime type and the ones supported by Eve. Along with the mime, also the corresponding render function is returns. .. versionchanged:: 0.8 Support for optional renderers via RENDERERS. XML and JSON configuration keywords removed. .. versionchanged:: 0.3 Support for optional renderers via XML and JSON configuration keywords. """ supported = [] renders = {} for renderer_cls in app.config.get('RENDERERS'): renderer = import_from_string(renderer_cls) for mime_type in renderer.mime: supported.append(mime_type) renders[mime_type] = renderer if len(supported) == 0: abort(500, description=debug_error_message( 'Configuration error: no supported mime types') ) best_match = request.accept_mimetypes.best_match(supported) or \ supported[0] return best_match, renders[best_match]
def replace(self, resource, id_, document, original): """ Replaces a graph node. :param resource: resource being accessed. :param id_: the unique id of the node. :param document: the new json document :param original: definition of the json document that should be updated. :raise OriginalChangedError: raised if the database layer notices a change from the supplied `original` parameter. """ label, _, _, _ = self._datasource_ex(resource, []) id_field = config.DOMAIN[resource]['id_field'] filter_ = {id_field: id_} old_node = self.driver.select(label, **filter_).first() # Delete the old node if old_node is None: abort(500, description=debug_error_message('Object not existent')) self.remove(resource, filter_) # create and insert the new one node = create_node(label, document) node[id_field] = id_ self.driver.graph.create(node)
def replace(self, resource, id_, document): """ Replaces an existing document. .. versionchanged:: 0.3.0 Custom ID_FIELD lookups would fail. See #203. .. versionchanged:: 0.2 Don't explicitly converto ID_FIELD to ObjectId anymore, so we can also process different types (UUIDs etc). .. versionadded:: 0.1.0 """ datasource, filter_, _, _ = self._datasource_ex(resource, {config.ID_FIELD: id_}) # TODO consider using find_and_modify() instead. The document might # have changed since the ETag was computed. This would require getting # the original document as an argument though. try: self.driver.db[datasource].update(filter_, document, **self._wc(resource)) except pymongo.errors.OperationFailure as e: # see comment in :func:`insert()`. abort(500, description=debug_error_message( 'pymongo.errors.OperationFailure: %s' % e ))
def synthesize_versioned_document(document, delta, resource_def): """ Synthesizes an old document from the latest document and the values of all versioned fields from the old version. This is accomplished by removing all versioned fields from the latest document before updating fields to ensure that fields with required=False can be removed. :param document: the current version of a document. :param delta: the versioned fields from a specific document version. :param resource_def: a resource definition. .. versionadded:: 0.4 """ old_doc = copy.deepcopy(document) if versioned_id_field() not in delta: abort( 400, description=debug_error_message( 'You must include %s in any projection with a version query.' % versioned_id_field())) delta[app.config['ID_FIELD']] = delta[versioned_id_field()] del delta[versioned_id_field()] # remove all versioned fields from document fields = versioned_fields(resource_def) for field in document: if field in fields: del old_doc[field] # add versioned fields old_doc.update(delta) return old_doc
def _best_mime(): """ Returns the best match between the requested mime type and the ones supported by Eve. Along with the mime, also the corresponding render function is returns. .. versionchanged:: 0.8 Support for optional renderers via RENDERERS. XML and JSON configuration keywords removed. .. versionchanged:: 0.3 Support for optional renderers via XML and JSON configuration keywords. """ supported = [] renders = {} for renderer_cls in app.config.get("RENDERERS"): renderer = import_from_string(renderer_cls) for mime_type in renderer.mime: supported.append(mime_type) renders[mime_type] = renderer if len(supported) == 0: abort( 500, description=debug_error_message( "Configuration error: no supported mime types"), ) best_match = request.accept_mimetypes.best_match(supported) or supported[0] return best_match, renders[best_match]
def insert(self, resource, doc_or_docs): """Called when performing POST request""" datasource, filter_, _, _ = self._datasource_ex(resource) try: if not isinstance(doc_or_docs, list): doc_or_docs = [doc_or_docs] ids = [] for doc in doc_or_docs: model = self._doc_to_model(resource, doc) model.save(write_concern=self._wc(resource)) ids.append(model.id) doc.update(dict(model.to_mongo())) doc[config.ID_FIELD] = model.id # Recompute ETag since MongoEngine can modify the data via # save hooks. clean_doc(doc) doc['_etag'] = document_etag(doc) return ids except pymongo.errors.OperationFailure as e: # most likely a 'w' (write_concern) setting which needs an # existing ReplicaSet which doesn't exist. Please note that the # update will actually succeed (a new ETag will be needed). abort(500, description=debug_error_message( 'pymongo.errors.OperationFailure: %s' % e)) except Exception as exc: self._handle_exception(exc)
def on_fetched_item_callback(resource, response): """Removes `_id` from the fetched document and embeds related items requested in the `embed` URL query parameter. Allows multilevel embedding (limited to 3 levels). Nested embedded relations are separated by dot. Example: .../organizations/1234567890?embed=["parent", "memberships.person"] We cannot use Eve's built-in embedded resource serialization because it handles only entities directly referenced by a field in the document and replaces that field. However, we need to embed reverse relations too, e.g. `memberships` in the organization entity lists all memberships referencing the organization. Furthermore, unlike the Eve built-in embedding Popolo requires that referenced items are embedded as a field with different name from the referencing one (e.g. `organization` vs. `organization_id`) and allows multilevel embedding. """ del response['_id'] if 'embed' in request.args: try: embed = json.loads(request.args['embed']) for path in embed: _embed_relation(resource, path, response, [(resource, response['id'])]) except: abort(400, description=debug_error_message('Unable to parse `embed` clause'))
def _best_mime(): """ Returns the best match between the requested mime type and the ones supported by Eve. Along with the mime, also the corresponding render function is returns. .. versionchanged:: 0.3 Support for optional renderers via XML and JSON configuration keywords. """ supported = [] renders = {} for mime in _MIME_TYPES: # only mime types that have not been disabled via configuration if app.config.get(mime['tag'], True): for mime_type in mime['mime']: supported.append(mime_type) renders[mime_type] = mime['renderer'] if len(supported) == 0: abort(500, description=debug_error_message( 'Configuration error: no supported mime types')) best_match = request.accept_mimetypes.best_match(supported) or \ supported[0] return best_match, renders[best_match]
def _best_mime(): """ Returns the best match between the requested mime type and the ones supported by Eve. Along with the mime, also the corresponding render function is returns. .. versionchanged:: 0.3 Support for optional renderers via XML and JSON configuration keywords. """ supported = [] renders = {} for mime in _MIME_TYPES: # only mime types that have not been disabled via configuration if app.config.get(mime['tag'], True): for mime_type in mime['mime']: supported.append(mime_type) renders[mime_type] = mime['renderer'] if len(supported) == 0: abort(500, description=debug_error_message( 'Configuration error: no supported mime types') ) best_match = request.accept_mimetypes.best_match(supported) or \ supported[0] return best_match, renders[best_match]
def home_endpoint(): """ Home/API entry point. Will provide links to each available resource .. versionchanged:: 0.5 Resource URLs are relative to API root. Don't list internal resources. .. versionchanged:: 0.4 Prevent versioning collections from being added in links. .. versionchanged:: 0.2 Use new 'resource_title' setting for link titles. .. versionchanged:: 0.1.0 Support for optional HATEOAS. """ if config.HATEOAS: response = {} links = [] for resource in config.DOMAIN.keys(): internal = config.DOMAIN[resource]['internal_resource'] if not resource.endswith(config.VERSIONS): if not bool(internal): links.append({'href': '%s' % config.URLS[resource], 'title': '%s' % config.DOMAIN[resource]['resource_title']}) response[config.LINKS] = {'child': links} return send_response(None, (response,)) else: abort(404, debug_error_message("HATEOAS is disabled so we have no data" " to display at the API homepage."))
def is_empty(self, resource): """ Returns True if resource is empty; False otherwise. If there is no predefined filter on the resource we're relying on the db.collection.count(). However, if we do have a predefined filter we have to fallback on the find() method, which can be much slower. .. versionchanged:: 0.6 Support for multiple databases. .. versionadded:: 0.3 """ datasource, filter_, _, _ = self.datasource(resource) coll = self.pymongo(resource).db[datasource] try: if not filter_: # faster, but we can only afford it if there's now predefined # filter on the datasource. return coll.count() == 0 else: # fallback on find() since we have a filter to apply. try: # need to check if the whole resultset is missing, no # matter the IMS header. del filter_[config.LAST_UPDATED] except: pass return coll.find(filter_).count() == 0 except pymongo.errors.OperationFailure as e: # see comment in :func:`insert()`. self.app.logger.exception(e) abort(500, description=debug_error_message( 'pymongo.errors.OperationFailure: %s' % e ))
def synthesize_versioned_document(document, delta, resource_def): """ Synthesizes an old document from the latest document and the values of all versioned fields from the old version. This is accomplished by removing all versioned fields from the latest document before updating fields to ensure that fields with required=False can be removed. :param document: the current version of a document. :param delta: the versioned fields from a specific document version. :param resource_def: a resource definition. .. versionadded:: 0.4 """ old_doc = copy.deepcopy(document) if versioned_id_field() not in delta: abort(400, description=debug_error_message( 'You must include %s in any projection with a version query.' % versioned_id_field() )) delta[app.config['ID_FIELD']] = delta[versioned_id_field()] del delta[versioned_id_field()] # remove all versioned fields from document fields = versioned_fields(resource_def) for field in document: if field in fields: del old_doc[field] # add versioned fields old_doc.update(delta) return old_doc
def insert(self, resource, doc_or_docs): """Called when performing POST request""" datasource, filter_, _, _ = self._datasource_ex(resource) try: if isinstance(doc_or_docs, list): ids = [] for doc in doc_or_docs: model = self._doc_to_model(resource, doc) model.save(write_concern=self._wc(resource)) ids.append(model.id) doc.update(dict(model.to_mongo())) doc[config.ID_FIELD] = model.id clean_doc(doc) return ids else: model = self._doc_to_model(resource, doc_or_docs) model.save(write_concern=self._wc(resource)) doc_or_docs.update(dict(model.to_mongo())) doc_or_docs[config.ID_FIELD] = model.id clean_doc(doc_or_docs) return model.id except pymongo.errors.OperationFailure as e: # most likely a 'w' (write_concern) setting which needs an # existing ReplicaSet which doesn't exist. Please note that the # update will actually succeed (a new ETag will be needed). abort(500, description=debug_error_message( 'pymongo.errors.OperationFailure: %s' % e )) except Exception as exc: self._handle_exception(exc)
def is_empty(self, resource): """ Returns True if resource is empty; False otherwise. If there is no predefined filter on the resource we're relying on the db.collection.count(). However, if we do have a predefined filter we have to fallback on the find() method, which can be much slower. .. versionchanged:: 0.6 Support for multiple databases. .. versionadded:: 0.3 """ datasource, filter_, _, _ = self.datasource(resource) coll = self.pymongo(resource).db[datasource] try: if not filter_: # faster, but we can only afford it if there's now predefined # filter on the datasource. return coll.count() == 0 else: # fallback on find() since we have a filter to apply. try: # need to check if the whole resultset is missing, no # matter the IMS header. del filter_[config.LAST_UPDATED] except: pass return coll.find(filter_).count() == 0 except pymongo.errors.OperationFailure as e: # see comment in :func:`insert()`. self.app.logger.exception(e) abort(500, description=debug_error_message( 'pymongo.errors.OperationFailure: %s' % e))
def update(self, resource, id_, updates): """Updates a collection document. .. versionchanged:: 0.0.9 More informative error messages. .. versionchanged:: 0.0.8 'write_concern' support. .. versionchanged:: 0.0.6 projection queries ('?projection={"name": 1}') .. versionchanged:: 0.0.4 retrieves the target collection via the new config.SOURCES helper. """ datasource, filter_, _ = self._datasource_ex(resource, {ID_FIELD: ObjectId(id_)}) # TODO consider using find_and_modify() instead. The document might # have changed since the ETag was computed. This would require getting # the original document as an argument though. try: self.driver.db[datasource].update(filter_, {"$set": updates}, **self._wc(resource)) except pymongo.errors.OperationFailure as e: # see comment in :func:`insert()`. abort(500, description=debug_error_message( 'pymongo.errors.OperationFailure: %s' % e ))
def remove(self, resource, id_=None): """Removes a document or the entire set of documents from a collection. .. versionchanged:: 0.0.9 More informative error messages. .. versionchanged:: 0.0.8 'write_concern' support. .. versionchanged:: 0.0.6 projection queries ('?projection={"name": 1}') .. versionchanged:: 0.0.4 retrieves the target collection via the new config.SOURCES helper. .. versionadded:: 0.0.2 Support for deletion of entire documents collection. """ query = {ID_FIELD: ObjectId(id_)} if id_ else None datasource, filter_, _ = self._datasource_ex(resource, query) try: self.driver.db[datasource].remove(filter_, **self._wc(resource)) except pymongo.errors.OperationFailure as e: # see comment in :func:`insert()`. abort(500, description=debug_error_message( 'pymongo.errors.OperationFailure: %s' % e ))
def insert(self, resource, doc_or_docs): """Inserts a document into a resource collection. .. versionchanged:: 0.0.9 More informative error messages. .. versionchanged:: 0.0.8 'write_concern' support. .. versionchanged:: 0.0.6 projection queries ('?projection={"name": 1}') 'document' param renamed to 'doc_or_docs', making support for bulk inserts apparent. .. versionchanged:: 0.0.4 retrieves the target collection via the new config.SOURCES helper. """ datasource, filter_, _ = self._datasource_ex(resource) try: return self.driver.db[datasource].insert(doc_or_docs, **self._wc(resource)) except pymongo.errors.OperationFailure as e: # most likely a 'w' (write_concern) setting which needs an # existing ReplicaSet which doesn't exist. Please note that the # update will actually succeed (a new ETag will be needed). abort(500, description=debug_error_message( 'pymongo.errors.OperationFailure: %s' % e ))
def insert(self, resource, doc_or_docs): """Called when performing POST request""" datasource, filter_, _, _ = self._datasource_ex(resource) try: if isinstance(doc_or_docs, list): ids = [] for doc in doc_or_docs: model = self._doc_to_model(resource, doc) model.save(write_concern=self._wc(resource)) ids.append(model.id) doc.update(dict(model.to_mongo())) doc[config.ID_FIELD] = model.id return ids else: model = self._doc_to_model(resource, doc_or_docs) model.save(write_concern=self._wc(resource)) doc_or_docs.update(dict(model.to_mongo())) doc_or_docs[config.ID_FIELD] = model.id return model.id except pymongo.errors.OperationFailure as e: # most likely a 'w' (write_concern) setting which needs an # existing ReplicaSet which doesn't exist. Please note that the # update will actually succeed (a new ETag will be needed). abort(500, description=debug_error_message( 'pymongo.errors.OperationFailure: %s' % e ))
def embedded_document(reference, data_relation, field_name): """ Returns a document to be embedded by reference using data_relation taking into account document versions :param reference: reference to the document to be embedded. :param data_relation: the relation schema definition. :param field_name: field name used in abort message only .. versionadded:: 0.5 """ # Retrieve and serialize the requested document if 'version' in data_relation and data_relation['version'] is True: # support late versioning if reference[config.VERSION] == 1: # there is a chance this document hasn't been saved # since versioning was turned on embedded_doc = missing_version_field(data_relation, reference) if embedded_doc is None: # this document has been saved since the data_relation was # made - we basically do not have the copy of the document # that existed when the data relation was made, but we'll # try the next best thing - the first version reference[config.VERSION] = 1 embedded_doc = get_data_version_relation_document( data_relation, reference) latest_embedded_doc = embedded_doc else: # grab the specific version embedded_doc = get_data_version_relation_document( data_relation, reference) # grab the latest version latest_embedded_doc = get_data_version_relation_document( data_relation, reference, latest=True) # make sure we got the documents if embedded_doc is None or latest_embedded_doc is None: # your database is not consistent!!! that is bad abort(404, description=debug_error_message( "Unable to locate embedded documents for '%s'" % field_name )) # build the response document build_response_document(embedded_doc, data_relation['resource'], [], latest_embedded_doc) else: subresource = data_relation['resource'] embedded_doc = app.data.find_one(subresource, None, **{config.ID_FIELD: reference}) if embedded_doc: resolve_media_files(embedded_doc, subresource) return embedded_doc
def find(self, resource, req): args = dict() if req.max_results: args['limit'] = req.max_results if req.page > 1: args['skip'] = (req.page - 1) * req.max_results # TODO sort syntax should probably be coherent with 'where': either # mongo-like # or python-like. Currently accepts only mongo-like sort # syntax. # TODO should validate on unknown sort fields (mongo driver doesn't # return an error) if req.sortMethod: sortMethod = req.sortMethod else: sortMethod = 1 if req.sort: args['sort'] = ast.literal_eval(str([(req.sort,sortMethod)])) client_projection = {} spec = {} if req.latLng: spec['latLng'] = {'$within': {'$center': [[float(req.latLng[0]),float(req.latLng[1])],.75]}} if not req.latLng and req.city: spec['city'] = req.city bad_filter = validate_filters(spec, resource) if bad_filter: abort(400, bad_filter) if req.projection: try: client_projection = json.loads(req.projection) except: abort(400, description=debug_error_message( 'Unable to parse `projection` clause' )) datasource, spec, projection = self._datasource_ex(resource, spec, client_projection) if req.if_modified_since: spec[config.LAST_UPDATED] = \ {'$gt': req.if_modified_since} if len(spec) > 0: args['spec'] = spec if projection is not None: args['fields'] = projection return self.driver.db[datasource].find(**args)
def embedded_document(reference, data_relation, field_name): """ Returns a document to be embedded by reference using data_relation taking into account document versions :param reference: reference to the document to be embedded. :param data_relation: the relation schema definition. :param field_name: field name used in abort message only .. versionadded:: 0.5 """ # Retrieve and serialize the requested document if 'version' in data_relation and data_relation['version'] is True: # support late versioning if reference[config.VERSION] == 1: # there is a chance this document hasn't been saved # since versioning was turned on embedded_doc = missing_version_field(data_relation, reference) if embedded_doc is None: # this document has been saved since the data_relation was # made - we basically do not have the copy of the document # that existed when the data relation was made, but we'll # try the next best thing - the first version reference[config.VERSION] = 1 embedded_doc = get_data_version_relation_document( data_relation, reference) latest_embedded_doc = embedded_doc else: # grab the specific version embedded_doc = get_data_version_relation_document( data_relation, reference) # grab the latest version latest_embedded_doc = get_data_version_relation_document( data_relation, reference, latest=True) # make sure we got the documents if embedded_doc is None or latest_embedded_doc is None: # your database is not consistent!!! that is bad abort(404, description=debug_error_message( "Unable to locate embedded documents for '%s'" % field_name)) # build the response document build_response_document(embedded_doc, data_relation['resource'], [], latest_embedded_doc) else: subresource = data_relation['resource'] embedded_doc = app.data.find_one(subresource, None, **{config.ID_FIELD: reference}) if embedded_doc: resolve_media_files(embedded_doc, subresource) return embedded_doc
def update(self, resource, id_, updates): """ Updates a collection document. .. versionchanged:: 0.4 Return a 400 on pymongo DuplicateKeyError. .. versionchanged:: 0.3.0 Custom ID_FIELD lookups would fail. See #203. .. versionchanged:: 0.2 Don't explicitly convert ID_FIELD to ObjectId anymore, so we can also process different types (UUIDs etc). .. versionchanged:: 0.0.9 More informative error messages. .. versionchanged:: 0.0.8 'write_concern' support. .. versionchanged:: 0.0.6 projection queries ('?projection={"name": 1}') .. versionchanged:: 0.0.4 retrieves the target collection via the new config.SOURCES helper. """ datasource, filter_, _, _ = self._datasource_ex(resource, {config.ID_FIELD: id_}) # TODO consider using find_and_modify() instead. The document might # have changed since the ETag was computed. This would require getting # the original document as an argument though. try: self.driver.db[datasource].update(filter_, {"$set": updates}, **self._wc(resource)) except pymongo.errors.DuplicateKeyError as e: abort(400, description=debug_error_message( 'pymongo.errors.DuplicateKeyError: %s' % e )) except pymongo.errors.OperationFailure as e: # see comment in :func:`insert()`. abort(500, description=debug_error_message( 'pymongo.errors.OperationFailure: %s' % e ))
def pre_delete_realm(item): """ Hook before deleting a realm. Denies deletion if realm has child / children :param item: fields of the item / record :type item: dict :return: None """ if len(item['_children']) > 0: abort(409, description=debug_error_message("Item have children, so can't delete it"))
def update(self, resource, id_, updates, *args, **kwargs): """Called when performing PATCH request.""" try: return self.updater.update(resource, id_, updates) except pymongo.errors.OperationFailure as e: # see comment in :func:`insert()`. abort(500, description=debug_error_message( 'pymongo.errors.OperationFailure: %s' % e)) except Exception as exc: self._handle_exception(exc)
def replace(self, resource, id_, document): """Called when performing PUT request.""" try: # FIXME: filters? model = self._doc_to_model(resource, document) model.save(write_concern=self._wc(resource)) except pymongo.errors.OperationFailure as e: # see comment in :func:`insert()`. abort(500, description=debug_error_message( 'pymongo.errors.OperationFailure: %s' % e ))