def test_no_spec(self): obj = {u'body': {'event': 'start_backup'}, 'ttl': 300} document = six.text_type(json.dumps(obj, ensure_ascii=False)) doc_stream = io.StringIO(document) deserialized = utils.deserialize(doc_stream, len(document)) filtered = utils.sanitize(deserialized, spec=None) self.assertEqual(obj, filtered) # NOTE(kgriffs): Ensure default value for *spec* is None filtered2 = utils.sanitize(deserialized) self.assertEqual(filtered, filtered2)
def test_no_spec(self): obj = {u"body": {"event": "start_backup"}, "ttl": 300} document = six.text_type(json.dumps(obj, ensure_ascii=False)) doc_stream = io.StringIO(document) deserialized = utils.deserialize(doc_stream, len(document)) filtered = utils.sanitize(deserialized, spec=None) self.assertEqual(obj, filtered) # NOTE(kgriffs): Ensure default value for *spec* is None filtered2 = utils.sanitize(deserialized) self.assertEqual(filtered, filtered2)
def on_put(self, req, resp, project_id, queue_name): LOG.debug(u'Queue metadata PUT - queue: %(queue)s, ' u'project: %(project)s', {'queue': queue_name, 'project': project_id}) try: # Place JSON size restriction before parsing self._validate.queue_metadata_length(req.content_length) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) # Deserialize queue metadata document = wsgi_utils.deserialize(req.stream, req.content_length) metadata = wsgi_utils.sanitize(document, spec=None) try: self._queue_ctrl.set_metadata(queue_name, metadata=metadata, project=project_id) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except storage_errors.QueueDoesNotExist: raise falcon.HTTPNotFound() except Exception as ex: LOG.exception(ex) description = _(u'Metadata could not be updated.') raise wsgi_errors.HTTPServiceUnavailable(description) resp.status = falcon.HTTP_204 resp.location = req.path
def on_patch(self, req, resp, project_id, queue_name, claim_id): # Read claim metadata (e.g., TTL) and raise appropriate # HTTP errors as needed. document = wsgi_utils.deserialize(req.stream, req.content_length) metadata = wsgi_utils.sanitize(document, CLAIM_PATCH_SPEC) try: self._validate.claim_updating(metadata) self._claim_controller.update(queue_name, claim_id=claim_id, metadata=metadata, project=project_id) resp.status = falcon.HTTP_204 except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except storage_errors.DoesNotExist as ex: LOG.debug(ex) raise wsgi_errors.HTTPNotFound(six.text_type(ex)) except Exception: description = _(u'Claim could not be updated.') LOG.exception(description) raise wsgi_errors.HTTPServiceUnavailable(description)
def on_put(self, req, resp, project_id, topic_name): try: # Place JSON size restriction before parsing self._validate.queue_metadata_length(req.content_length) # Deserialize Topic metadata metadata = None if req.content_length: document = wsgi_utils.deserialize(req.stream, req.content_length) metadata = wsgi_utils.sanitize(document) self._validate.queue_metadata_putting(metadata) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) try: created = self._topic_controller.create(topic_name, metadata=metadata, project=project_id) except storage_errors.FlavorDoesNotExist as ex: LOG.exception('Flavor "%s" does not exist', topic_name) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except Exception: description = _(u'Topic could not be created.') LOG.exception(description) raise wsgi_errors.HTTPServiceUnavailable(description) resp.status = falcon.HTTP_201 if created else falcon.HTTP_204 resp.location = req.path
def on_patch(self, req, resp, project_id, queue_name, claim_id): # Read claim metadata (e.g., TTL) and raise appropriate # HTTP errors as needed. document = wsgi_utils.deserialize(req.stream, req.content_length) metadata = wsgi_utils.sanitize(document, self._claim_patch_spec) try: self._validate.claim_updating(metadata) self._claim_controller.update(queue_name, claim_id=claim_id, metadata=metadata, project=project_id) resp.status = falcon.HTTP_204 except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except storage_errors.DoesNotExist as ex: LOG.debug(ex) raise falcon.HTTPNotFound() except Exception as ex: LOG.exception(ex) description = _(u'Claim could not be updated.') raise wsgi_errors.HTTPServiceUnavailable(description)
def on_post(self, req, resp, project_id, queue_name): LOG.debug( u'Claims collection POST - queue: %(queue)s, ' u'project: %(project)s', { 'queue': queue_name, 'project': project_id }) # Check for an explicit limit on the # of messages to claim limit = req.get_param_as_int('limit') claim_options = {} if limit is None else {'limit': limit} # NOTE(kgriffs): Clients may or may not actually include the # Content-Length header when the body is empty; the following # check works for both 0 and None. if not req.content_length: # No values given, so use defaults metadata = self._default_meta else: # Read claim metadata (e.g., TTL) and raise appropriate # HTTP errors as needed. document = wsgi_utils.deserialize(req.stream, req.content_length) metadata = wsgi_utils.sanitize(document, self._claim_post_spec) # Claim some messages try: self._validate.claim_creation(metadata, limit=limit) cid, msgs = self._claim_controller.create(queue_name, metadata=metadata, project=project_id, **claim_options) # Buffer claimed messages # TODO(kgriffs): optimize, along with serialization (below) resp_msgs = list(msgs) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Claim could not be created.') raise wsgi_errors.HTTPServiceUnavailable(description) # Serialize claimed messages, if any. This logic assumes # the storage driver returned well-formed messages. if len(resp_msgs) != 0: base_path = req.path.rpartition('/')[0] resp_msgs = [ wsgi_utils.format_message_v1_1(msg, base_path, cid) for msg in resp_msgs ] resp.location = req.path + '/' + cid resp.body = utils.to_json({'messages': resp_msgs}) resp.status = falcon.HTTP_201 else: resp.status = falcon.HTTP_204
def on_put(self, req, resp, project_id, queue_name): try: # Place JSON size restriction before parsing self._validate.queue_metadata_length(req.content_length) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) # Deserialize queue metadata metadata = None if req.content_length: document = wsgi_utils.deserialize(req.stream, req.content_length) metadata = wsgi_utils.sanitize(document, spec=None) try: created = self._queue_controller.create(queue_name, metadata=metadata, project=project_id) except storage_errors.FlavorDoesNotExist as ex: LOG.exception(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Queue could not be created.') raise wsgi_errors.HTTPServiceUnavailable(description) resp.status = falcon.HTTP_201 if created else falcon.HTTP_204 resp.location = req.path
def on_put(self, req, resp, project_id, queue_name): try: # Place JSON size restriction before parsing self._validate.queue_metadata_length(req.content_length) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) # Deserialize queue metadata document = wsgi_utils.deserialize(req.stream, req.content_length) metadata = wsgi_utils.sanitize(document, spec=None) try: self._queue_ctrl.set_metadata(queue_name, metadata=metadata, project=project_id) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except storage_errors.QueueDoesNotExist: raise falcon.HTTPNotFound() except Exception as ex: LOG.exception(ex) description = _(u'Metadata could not be updated.') raise wsgi_errors.HTTPServiceUnavailable(description) resp.status = falcon.HTTP_204 resp.location = req.path
def on_put(self, req, resp, project_id, queue_name): try: # Place JSON size restriction before parsing self._validate.queue_metadata_length(req.content_length) # Deserialize queue metadata metadata = None if req.content_length: document = wsgi_utils.deserialize(req.stream, req.content_length) metadata = wsgi_utils.sanitize(document, spec=None) # NOTE(Eva-i): reserved queue attributes is Zaqar's feature since # API v2. But we have to ensure the bad data will not come from # older APIs, so we validate metadata here. self._validate.queue_metadata_putting(metadata) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) try: created = self._queue_controller.create(queue_name, metadata=metadata, project=project_id) except storage_errors.FlavorDoesNotExist as ex: LOG.exception(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Queue could not be created.') raise wsgi_errors.HTTPServiceUnavailable(description) resp.status = falcon.HTTP_201 if created else falcon.HTTP_204 resp.location = req.path
def on_post(self, req, resp, project_id, queue_name): client_uuid = wsgi_helpers.get_client_uuid(req) try: # Place JSON size restriction before parsing self._validate.message_length(req.content_length) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) # Deserialize and validate the incoming messages document = wsgi_utils.deserialize(req.stream, req.content_length) if 'messages' not in document: description = _(u'No messages were found in the request body.') raise wsgi_errors.HTTPBadRequestAPI(description) messages = wsgi_utils.sanitize(document['messages'], self._message_post_spec, doctype=wsgi_utils.JSONArray) try: self._validate.message_posting(messages) if not self._queue_controller.exists(queue_name, project_id): self._queue_controller.create(queue_name, project=project_id) message_ids = self._message_controller.post( queue_name, messages=messages, project=project_id, client_uuid=client_uuid) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except storage_errors.DoesNotExist as ex: LOG.debug(ex) raise falcon.HTTPNotFound() except storage_errors.MessageConflict as ex: LOG.exception(ex) description = _(u'No messages could be enqueued.') raise wsgi_errors.HTTPServiceUnavailable(description) except Exception as ex: LOG.exception(ex) description = _(u'Messages could not be enqueued.') raise wsgi_errors.HTTPServiceUnavailable(description) # Prepare the response ids_value = ','.join(message_ids) resp.location = req.path + '?ids=' + ids_value hrefs = [req.path + '/' + id for id in message_ids] body = {'resources': hrefs} resp.body = utils.to_json(body) resp.status = falcon.HTTP_201
def on_post(self, req, resp, project_id, queue_name): LOG.debug( u"Messages collection POST - queue: %(queue)s, " u"project: %(project)s", {"queue": queue_name, "project": project_id}, ) client_uuid = wsgi_helpers.get_client_uuid(req) try: # Place JSON size restriction before parsing self._validate.message_length(req.content_length) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) # Deserialize and validate the request body document = wsgi_utils.deserialize(req.stream, req.content_length) messages = wsgi_utils.sanitize(document, MESSAGE_POST_SPEC, doctype=wsgi_utils.JSONArray) try: self._validate.message_posting(messages) message_ids = self._message_controller.post( queue_name, messages=messages, project=project_id, client_uuid=client_uuid ) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except storage_errors.DoesNotExist as ex: LOG.debug(ex) raise falcon.HTTPNotFound() except storage_errors.MessageConflict as ex: LOG.exception(ex) description = _(u"No messages could be enqueued.") raise wsgi_errors.HTTPServiceUnavailable(description) except Exception as ex: LOG.exception(ex) description = _(u"Messages could not be enqueued.") raise wsgi_errors.HTTPServiceUnavailable(description) # Prepare the response ids_value = ",".join(message_ids) resp.location = req.path + "?ids=" + ids_value hrefs = [req.path + "/" + id for id in message_ids] # NOTE(kgriffs): As of the Icehouse release, drivers are # no longer allowed to enqueue a subset of the messages # submitted by the client; it's all or nothing. Therefore, # 'partial' is now always False in the v1.0 API, and the # field has been removed in v1.1. body = {"resources": hrefs, "partial": False} resp.body = utils.to_json(body) resp.status = falcon.HTTP_201
def test_no_spec_array(self): things = [{u"body": {"event": "start_backup"}, "ttl": 300}] document = six.text_type(json.dumps(things, ensure_ascii=False)) doc_stream = io.StringIO(document) deserialized = utils.deserialize(doc_stream, len(document)) filtered = utils.sanitize(deserialized, doctype=utils.JSONArray, spec=None) self.assertEqual(things, filtered)
def on_post(self, req, resp, project_id, queue_name): client_uuid = wsgi_helpers.get_client_uuid(req) try: # Place JSON size restriction before parsing self._validate.message_length(req.content_length) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) # Deserialize and validate the request body document = wsgi_utils.deserialize(req.stream, req.content_length) messages = wsgi_utils.sanitize(document, MESSAGE_POST_SPEC, doctype=wsgi_utils.JSONArray) try: self._validate.message_posting(messages) message_ids = self._message_controller.post( queue_name, messages=messages, project=project_id, client_uuid=client_uuid) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except storage_errors.DoesNotExist as ex: LOG.debug(ex) raise wsgi_errors.HTTPNotFound(six.text_type(ex)) except storage_errors.MessageConflict as ex: LOG.exception(ex) description = _(u'No messages could be enqueued.') raise wsgi_errors.HTTPServiceUnavailable(description) except Exception as ex: LOG.exception(ex) description = _(u'Messages could not be enqueued.') raise wsgi_errors.HTTPServiceUnavailable(description) # Prepare the response ids_value = ','.join(message_ids) resp.location = req.path + '?ids=' + ids_value hrefs = [req.path + '/' + id for id in message_ids] # NOTE(kgriffs): As of the Icehouse release, drivers are # no longer allowed to enqueue a subset of the messages # submitted by the client; it's all or nothing. Therefore, # 'partial' is now always False in the v1.0 API, and the # field has been removed in v1.1. body = {'resources': hrefs, 'partial': False} resp.body = utils.to_json(body) resp.status = falcon.HTTP_201
def test_no_spec_array(self): things = [{u'body': {'event': 'start_backup'}, 'ttl': 300}] document = six.text_type(json.dumps(things, ensure_ascii=False)) doc_stream = io.StringIO(document) deserialized = utils.deserialize(doc_stream, len(document)) filtered = utils.sanitize(deserialized, doctype=utils.JSONArray, spec=None) self.assertEqual(things, filtered)
def test_no_spec_array(self): things = [{u'body': {'event': 'start_backup'}, 'ttl': 300}] document = six.text_type(json.dumps(things, ensure_ascii=False)) doc_stream = io.StringIO(document) deserialized = utils.deserialize(doc_stream, len(document)) filtered = utils.sanitize(deserialized, doctype=utils.JSONArray, spec=None) self.assertEqual(filtered, things)
def on_post(self, req, resp, project_id, queue_name): LOG.debug(u'Claims collection POST - queue: %(queue)s, ' u'project: %(project)s', {'queue': queue_name, 'project': project_id}) # Check for an explicit limit on the # of messages to claim limit = req.get_param_as_int('limit') claim_options = {} if limit is None else {'limit': limit} # NOTE(kgriffs): Clients may or may not actually include the # Content-Length header when the body is empty; the following # check works for both 0 and None. if not req.content_length: # No values given, so use defaults metadata = self._default_meta else: # Read claim metadata (e.g., TTL) and raise appropriate # HTTP errors as needed. document = wsgi_utils.deserialize(req.stream, req.content_length) metadata = wsgi_utils.sanitize(document, self._claim_post_spec) # Claim some messages try: self._validate.claim_creation(metadata, limit=limit) cid, msgs = self._claim_controller.create( queue_name, metadata=metadata, project=project_id, **claim_options) # Buffer claimed messages # TODO(kgriffs): optimize, along with serialization (below) resp_msgs = list(msgs) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Claim could not be created.') raise wsgi_errors.HTTPServiceUnavailable(description) # Serialize claimed messages, if any. This logic assumes # the storage driver returned well-formed messages. if len(resp_msgs) != 0: base_path = req.path.rpartition('/')[0] resp_msgs = [wsgi_utils.format_message_v1_1(msg, base_path, cid) for msg in resp_msgs] resp.location = req.path + '/' + cid resp.body = utils.to_json({'messages': resp_msgs}) resp.status = falcon.HTTP_201 else: resp.status = falcon.HTTP_204
def test_deserialize_and_sanitize_json_array(self): array = [{u"body": {u"x": 1}}, {u"body": {u"x": 2}}] document = six.text_type(json.dumps(array, ensure_ascii=False)) stream = io.StringIO(document) spec = [("body", dict, None)] # Positive test deserialized_object = utils.deserialize(stream, len(document)) filtered_object = utils.sanitize(deserialized_object, spec, doctype=utils.JSONArray) self.assertEqual(array, filtered_object) # Negative test self.assertRaises(falcon.HTTPBadRequest, utils.sanitize, deserialized_object, spec, doctype=utils.JSONObject)
def test_deserialize_and_sanitize_json_obj(self): obj = {u"body": {"event": "start_backup"}, "id": "DEADBEEF"} document = six.text_type(json.dumps(obj, ensure_ascii=False)) stream = io.StringIO(document) spec = [("body", dict, None), ("id", six.string_types, None)] # Positive test deserialized_object = utils.deserialize(stream, len(document)) filtered_object = utils.sanitize(deserialized_object, spec) self.assertEqual(obj, filtered_object) # Negative test self.assertRaises(falcon.HTTPBadRequest, utils.sanitize, deserialized_object, spec, doctype=utils.JSONArray)
def test_deserialize_and_sanitize_json_obj(self): obj = {u'body': {'event': 'start_backup'}, 'id': 'DEADBEEF'} document = six.text_type(json.dumps(obj, ensure_ascii=False)) stream = io.StringIO(document) spec = [('body', dict, None), ('id', six.string_types, None)] # Positive test deserialized_object = utils.deserialize(stream, len(document)) filtered_object = utils.sanitize(deserialized_object, spec) self.assertEqual(obj, filtered_object) # Negative test self.assertRaises(falcon.HTTPBadRequest, utils.sanitize, deserialized_object, spec, doctype=utils.JSONArray)
def test_deserialize_and_sanitize_json_obj(self): obj = {u'body': {'event': 'start_backup'}, 'id': 'DEADBEEF'} document = six.text_type(json.dumps(obj, ensure_ascii=False)) stream = io.StringIO(document) spec = [('body', dict, None), ('id', six.string_types, None)] # Positive test deserialized_object = utils.deserialize(stream, len(document)) filtered_object = utils.sanitize(deserialized_object, spec) self.assertEqual(filtered_object, obj) # Negative test self.assertRaises(falcon.HTTPBadRequest, utils.sanitize, deserialized_object, spec, doctype=utils.JSONArray)
def test_deserialize_and_sanitize_json_array(self): array = [{u'body': {u'x': 1}}, {u'body': {u'x': 2}}] document = six.text_type(json.dumps(array, ensure_ascii=False)) stream = io.StringIO(document) spec = [('body', dict, None)] # Positive test deserialized_object = utils.deserialize(stream, len(document)) filtered_object = utils.sanitize(deserialized_object, spec, doctype=utils.JSONArray) self.assertEqual(array, filtered_object) # Negative test self.assertRaises(falcon.HTTPBadRequest, utils.sanitize, deserialized_object, spec, doctype=utils.JSONObject)
def on_post(self, req, resp, project_id, queue_name): # Check for an explicit limit on the # of messages to claim limit = req.get_param_as_int('limit') claim_options = {} if limit is None else {'limit': limit} # Read claim metadata (e.g., TTL) and raise appropriate # HTTP errors as needed. document = wsgi_utils.deserialize(req.stream, req.content_length) metadata = wsgi_utils.sanitize(document, CLAIM_POST_SPEC) # Claim some messages try: self._validate.claim_creation(metadata, limit=limit) cid, msgs = self._claim_controller.create(queue_name, metadata=metadata, project=project_id, **claim_options) # Buffer claimed messages # TODO(kgriffs): optimize, along with serialization (below) resp_msgs = list(msgs) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Claim could not be created.') raise wsgi_errors.HTTPServiceUnavailable(description) # Serialize claimed messages, if any. This logic assumes # the storage driver returned well-formed messages. if len(resp_msgs) != 0: resp_msgs = [ wsgi_utils.format_message_v1(msg, req.path.rpartition('/')[0], cid) for msg in resp_msgs ] resp.location = req.path + '/' + cid resp.body = utils.to_json(resp_msgs) resp.status = falcon.HTTP_201 else: resp.status = falcon.HTTP_204
def on_post(self, req, resp, project_id, queue_name): LOG.debug( u"Claims collection POST - queue: %(queue)s, " u"project: %(project)s", {"queue": queue_name, "project": project_id}, ) # Check for an explicit limit on the # of messages to claim limit = req.get_param_as_int("limit") claim_options = {} if limit is None else {"limit": limit} # Read claim metadata (e.g., TTL) and raise appropriate # HTTP errors as needed. document = wsgi_utils.deserialize(req.stream, req.content_length) metadata = wsgi_utils.sanitize(document, CLAIM_POST_SPEC) # Claim some messages try: self._validate.claim_creation(metadata, limit=limit) cid, msgs = self._claim_controller.create( queue_name, metadata=metadata, project=project_id, **claim_options ) # Buffer claimed messages # TODO(kgriffs): optimize, along with serialization (below) resp_msgs = list(msgs) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except Exception as ex: LOG.exception(ex) description = _(u"Claim could not be created.") raise wsgi_errors.HTTPServiceUnavailable(description) # Serialize claimed messages, if any. This logic assumes # the storage driver returned well-formed messages. if len(resp_msgs) != 0: resp_msgs = [wsgi_utils.format_message_v1(msg, req.path.rpartition("/")[0], cid) for msg in resp_msgs] resp.location = req.path + "/" + cid resp.body = utils.to_json(resp_msgs) resp.status = falcon.HTTP_201 else: resp.status = falcon.HTTP_204
def on_put(self, req, resp, project_id, queue_name): try: # Place JSON size restriction before parsing self._validate.queue_metadata_length(req.content_length) # Deserialize queue metadata document = wsgi_utils.deserialize(req.stream, req.content_length) metadata = wsgi_utils.sanitize(document) # Restrict setting any reserved queue attributes for key in metadata: if key.startswith('_'): description = _(u'Reserved queue attributes in metadata ' u'(which names start with "_") can not be ' u'set in API v1.') raise validation.ValidationFailed(description) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) try: self._queue_ctrl.set_metadata(queue_name, metadata=metadata, project=project_id) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except storage_errors.QueueDoesNotExist as ex: raise wsgi_errors.HTTPNotFound(six.text_type(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Metadata could not be updated.') raise wsgi_errors.HTTPServiceUnavailable(description) resp.status = falcon.HTTP_204 resp.location = req.path
def on_put(self, req, resp, project_id, queue_name): try: # Place JSON size restriction before parsing self._validate.queue_metadata_length(req.content_length) # Deserialize queue metadata document = wsgi_utils.deserialize(req.stream, req.content_length) metadata = wsgi_utils.sanitize(document) # Restrict setting any reserved queue attributes for key in metadata: if key.startswith('_'): description = _(u'Reserved queue attributes in metadata ' u'(which names start with "_") can not be ' u'set in API v1.') raise validation.ValidationFailed(description) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) try: self._queue_ctrl.set_metadata(queue_name, metadata=metadata, project=project_id) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except storage_errors.QueueDoesNotExist as ex: raise wsgi_errors.HTTPNotFound(six.text_type(ex)) except Exception: description = _(u'Metadata could not be updated.') LOG.exception(description) raise wsgi_errors.HTTPServiceUnavailable(description) resp.status = falcon.HTTP_204 resp.location = req.path
def on_put(self, req, resp, project_id, queue_name): LOG.debug( u'Queue item PUT - queue: %(queue)s, ' u'project: %(project)s', { 'queue': queue_name, 'project': project_id }) try: # Place JSON size restriction before parsing self._validate.queue_metadata_length(req.content_length) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) # Deserialize queue metadata metadata = None if req.content_length: document = wsgi_utils.deserialize(req.stream, req.content_length) metadata = wsgi_utils.sanitize(document, spec=None) try: created = self._queue_controller.create(queue_name, metadata=metadata, project=project_id) except storage_errors.FlavorDoesNotExist as ex: LOG.exception(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Queue could not be created.') raise wsgi_errors.HTTPServiceUnavailable(description) resp.status = falcon.HTTP_201 if created else falcon.HTTP_204 resp.location = req.path
def on_post(self, req, resp, project_id, queue_name): client_uuid = wsgi_helpers.get_client_uuid(req) try: # NOTE(flwang): Replace 'exists' with 'get_metadata' won't impact # the performance since both of them will call # collection.find_one() queue_meta = None try: queue_meta = self._queue_controller.get_metadata(queue_name, project_id) except storage_errors.DoesNotExist as ex: self._validate.queue_identification(queue_name, project_id) self._queue_controller.create(queue_name, project=project_id) # NOTE(flwang): Queue is created in lazy mode, so no metadata # set. queue_meta = {} queue_max_msg_size = queue_meta.get('_max_messages_post_size', None) queue_default_ttl = queue_meta.get('_default_message_ttl', None) # TODO(flwang): To avoid any unexpected regression issue, we just # leave the _message_post_spec attribute of class as it's. It # should be removed in Newton release. if queue_default_ttl: message_post_spec = (('ttl', int, queue_default_ttl), ('body', '*', None),) else: message_post_spec = (('ttl', int, self._default_message_ttl), ('body', '*', None),) # Place JSON size restriction before parsing self._validate.message_length(req.content_length, max_msg_post_size=queue_max_msg_size) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) # Deserialize and validate the incoming messages document = wsgi_utils.deserialize(req.stream, req.content_length) if 'messages' not in document: description = _(u'No messages were found in the request body.') raise wsgi_errors.HTTPBadRequestAPI(description) messages = wsgi_utils.sanitize(document['messages'], message_post_spec, doctype=wsgi_utils.JSONArray) try: self._validate.message_posting(messages) message_ids = self._message_controller.post( queue_name, messages=messages, project=project_id, client_uuid=client_uuid) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except storage_errors.DoesNotExist as ex: LOG.debug(ex) raise wsgi_errors.HTTPNotFound(six.text_type(ex)) except storage_errors.MessageConflict as ex: LOG.exception(ex) description = _(u'No messages could be enqueued.') raise wsgi_errors.HTTPServiceUnavailable(description) except Exception as ex: LOG.exception(ex) description = _(u'Messages could not be enqueued.') raise wsgi_errors.HTTPServiceUnavailable(description) # Prepare the response ids_value = ','.join(message_ids) resp.location = req.path + '?ids=' + ids_value hrefs = [req.path + '/' + id for id in message_ids] body = {'resources': hrefs} resp.body = utils.to_json(body) resp.status = falcon.HTTP_201
def on_patch(self, req, resp, project_id, queue_name): """Allows one to update a queue's metadata. This method expects the user to submit a JSON object. There is also strict format checking through the use of jsonschema. Appropriate errors are returned in each case for badly formatted input. :returns: HTTP | 200,400,409,503 """ LOG.debug(u'PATCH queue - name: %s', queue_name) try: # Place JSON size restriction before parsing self._validate.queue_metadata_length(req.content_length) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestBody(six.text_type(ex)) # NOTE(flwang): See below link to get more details about draft 10, # tools.ietf.org/html/draft-ietf-appsawg-json-patch-10 content_types = { 'application/openstack-messaging-v2.0-json-patch': 10, } if req.content_type not in content_types: headers = {'Accept-Patch': ', '.join(sorted(content_types.keys()))} msg = _("Accepted media type for PATCH: %s.") LOG.debug(msg % headers) raise wsgi_errors.HTTPUnsupportedMediaType(msg % headers) if req.content_length: try: changes = utils.read_json(req.stream, req.content_length) changes = wsgi_utils.sanitize(changes, spec=None, doctype=list) except utils.MalformedJSON as ex: LOG.debug(ex) description = _(u'Request body could not be parsed.') raise wsgi_errors.HTTPBadRequestBody(description) except utils.OverflowedJSONInteger as ex: LOG.debug(ex) description = _(u'JSON contains integer that is too large.') raise wsgi_errors.HTTPBadRequestBody(description) except Exception as ex: # Error while reading from the network/server LOG.exception(ex) description = _(u'Request body could not be read.') raise wsgi_errors.HTTPServiceUnavailable(description) else: msg = _("PATCH body could not be empty for update.") LOG.debug(msg) raise wsgi_errors.HTTPBadRequestBody(msg) try: changes = self._validate.queue_patching(req, changes) # NOTE(Eva-i): using 'get_metadata' instead of 'get', so # QueueDoesNotExist error will be thrown in case of non-existent # queue. metadata = self._queue_controller.get_metadata(queue_name, project=project_id) for change in changes: change_method_name = '_do_%s' % change['op'] change_method = getattr(self, change_method_name) change_method(req, metadata, change) self._validate.queue_metadata_putting(metadata) self._queue_controller.set_metadata(queue_name, metadata, project_id) except storage_errors.DoesNotExist as ex: LOG.debug(ex) raise wsgi_errors.HTTPNotFound(six.text_type(ex)) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestBody(six.text_type(ex)) except wsgi_errors.HTTPConflict as ex: raise ex except Exception as ex: LOG.exception(ex) description = _(u'Queue could not be updated.') raise wsgi_errors.HTTPServiceUnavailable(description) resp.body = utils.to_json(metadata)
def on_post(self, req, resp, project_id, queue_name): client_uuid = wsgi_helpers.get_client_uuid(req) try: # NOTE(flwang): Replace 'exists' with 'get_metadata' won't impact # the performance since both of them will call # collection.find_one() queue_meta = None try: queue_meta = self._queue_controller.get_metadata( queue_name, project_id) except storage_errors.DoesNotExist as ex: self._validate.queue_identification(queue_name, project_id) self._queue_controller.create(queue_name, project=project_id) # NOTE(flwang): Queue is created in lazy mode, so no metadata # set. queue_meta = {} queue_max_msg_size = queue_meta.get('_max_messages_post_size', None) queue_default_ttl = queue_meta.get('_default_message_ttl', None) # TODO(flwang): To avoid any unexpected regression issue, we just # leave the _message_post_spec attribute of class as it's. It # should be removed in Newton release. if queue_default_ttl: message_post_spec = ( ('ttl', int, queue_default_ttl), ('body', '*', None), ) else: message_post_spec = ( ('ttl', int, self._default_message_ttl), ('body', '*', None), ) # Place JSON size restriction before parsing self._validate.message_length(req.content_length, max_msg_post_size=queue_max_msg_size) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) # Deserialize and validate the incoming messages document = wsgi_utils.deserialize(req.stream, req.content_length) if 'messages' not in document: description = _(u'No messages were found in the request body.') raise wsgi_errors.HTTPBadRequestAPI(description) messages = wsgi_utils.sanitize(document['messages'], message_post_spec, doctype=wsgi_utils.JSONArray) try: self._validate.message_posting(messages) message_ids = self._message_controller.post( queue_name, messages=messages, project=project_id, client_uuid=client_uuid) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except storage_errors.DoesNotExist as ex: LOG.debug(ex) raise wsgi_errors.HTTPNotFound(six.text_type(ex)) except storage_errors.MessageConflict as ex: LOG.exception(ex) description = _(u'No messages could be enqueued.') raise wsgi_errors.HTTPServiceUnavailable(description) except Exception as ex: LOG.exception(ex) description = _(u'Messages could not be enqueued.') raise wsgi_errors.HTTPServiceUnavailable(description) # Prepare the response ids_value = ','.join(message_ids) resp.location = req.path + '?ids=' + ids_value hrefs = [req.path + '/' + id for id in message_ids] body = {'resources': hrefs} resp.body = utils.to_json(body) resp.status = falcon.HTTP_201
def on_patch(self, req, resp, project_id, topic_name): """Allows one to update a topic's metadata. This method expects the user to submit a JSON object. There is also strict format checking through the use of jsonschema. Appropriate errors are returned in each case for badly formatted input. :returns: HTTP | 200,400,409,503 """ LOG.debug(u'PATCH topic - name: %s', topic_name) try: # Place JSON size restriction before parsing self._validate.queue_metadata_length(req.content_length) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestBody(six.text_type(ex)) # NOTE(flwang): See below link to get more details about draft 10, # tools.ietf.org/html/draft-ietf-appsawg-json-patch-10 content_types = { 'application/openstack-messaging-v2.0-json-patch': 10, } if req.content_type not in content_types: headers = {'Accept-Patch': ', '.join(sorted(content_types.keys()))} msg = _("Accepted media type for PATCH: %s.") LOG.debug(msg, headers) raise wsgi_errors.HTTPUnsupportedMediaType(msg % headers) if req.content_length: try: changes = utils.read_json(req.stream, req.content_length) changes = wsgi_utils.sanitize(changes, doctype=list) except utils.MalformedJSON as ex: LOG.debug(ex) description = _(u'Request body could not be parsed.') raise wsgi_errors.HTTPBadRequestBody(description) except utils.OverflowedJSONInteger as ex: LOG.debug(ex) description = _(u'JSON contains integer that is too large.') raise wsgi_errors.HTTPBadRequestBody(description) except Exception: # Error while reading from the network/server description = _(u'Request body could not be read.') LOG.exception(description) raise wsgi_errors.HTTPServiceUnavailable(description) else: msg = _("PATCH body could not be empty for update.") LOG.debug(msg) raise wsgi_errors.HTTPBadRequestBody(msg) try: changes = self._validate.queue_patching(req, changes) # NOTE(Eva-i): using 'get_metadata' instead of 'get', so # QueueDoesNotExist error will be thrown in case of non-existent # queue. metadata = self._topic_controller.get_metadata(topic_name, project=project_id) reserved_metadata = _get_reserved_metadata(self._validate) for change in changes: change_method_name = '_do_%s' % change['op'] change_method = getattr(self, change_method_name) change_method(req, metadata, reserved_metadata, change) self._validate.queue_metadata_putting(metadata) self._topic_controller.set_metadata(topic_name, metadata, project_id) except storage_errors.DoesNotExist as ex: LOG.debug(ex) raise wsgi_errors.HTTPNotFound(six.text_type(ex)) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestBody(six.text_type(ex)) except wsgi_errors.HTTPConflict: raise except Exception: description = _(u'Topic could not be updated.') LOG.exception(description) raise wsgi_errors.HTTPServiceUnavailable(description) for meta, value in _get_reserved_metadata(self._validate).items(): if not metadata.get(meta): metadata[meta] = value resp.body = utils.to_json(metadata)