def on_put(self, req, resp, project_id, queue_name): try: # Place JSON size restriction before parsing self._validate.queue_metadata_length(req.content_length) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) # Deserialize queue metadata document = wsgi_utils.deserialize(req.stream, req.content_length) metadata = wsgi_utils.sanitize(document, spec=None) try: self._queue_ctrl.set_metadata(queue_name, metadata=metadata, project=project_id) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except storage_errors.QueueDoesNotExist: raise falcon.HTTPNotFound() except Exception as ex: LOG.exception(ex) description = _(u'Metadata could not be updated.') raise wsgi_errors.HTTPServiceUnavailable(description) resp.status = falcon.HTTP_204 resp.location = req.path
def on_put(self, req, resp, project_id, queue_name): try: # Place JSON size restriction before parsing self._validate.queue_metadata_length(req.content_length) # Deserialize queue metadata metadata = None if req.content_length: document = wsgi_utils.deserialize(req.stream, req.content_length) metadata = wsgi_utils.sanitize(document, spec=None) # NOTE(Eva-i): reserved queue attributes is Zaqar's feature since # API v2. But we have to ensure the bad data will not come from # older APIs, so we validate metadata here. self._validate.queue_metadata_putting(metadata) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) try: created = self._queue_controller.create(queue_name, metadata=metadata, project=project_id) except storage_errors.FlavorDoesNotExist as ex: LOG.exception(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Queue could not be created.') raise wsgi_errors.HTTPServiceUnavailable(description) resp.status = falcon.HTTP_201 if created else falcon.HTTP_204 resp.location = req.path
def on_post(self, req, resp, project_id, queue_name): if req.content_length: document = wsgi_utils.deserialize(req.stream, req.content_length) else: document = {} try: self._validate.subscription_posting(document) subscriber = document['subscriber'] ttl = int(document['ttl']) options = document['options'] created = self._subscription_controller.create(queue_name, subscriber, ttl, options, project=project_id) except storage_errors.QueueDoesNotExist as ex: LOG.exception(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Subscription could not be created.') raise wsgi_errors.HTTPServiceUnavailable(description) resp.status = falcon.HTTP_201 if created else falcon.HTTP_409 resp.location = req.path if created: resp.body = utils.to_json( {'subscription_id': six.text_type(created)})
def on_put(self, req, resp, project_id, topic_name): try: # Place JSON size restriction before parsing self._validate.queue_metadata_length(req.content_length) # Deserialize Topic metadata metadata = None if req.content_length: document = wsgi_utils.deserialize(req.stream, req.content_length) metadata = wsgi_utils.sanitize(document) self._validate.queue_metadata_putting(metadata) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) try: created = self._topic_controller.create(topic_name, metadata=metadata, project=project_id) except storage_errors.FlavorDoesNotExist as ex: LOG.exception('Flavor "%s" does not exist', topic_name) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except Exception: description = _(u'Topic could not be created.') LOG.exception(description) raise wsgi_errors.HTTPServiceUnavailable(description) resp.status = falcon.HTTP_201 if created else falcon.HTTP_204 resp.location = req.path
def on_post(self, req, resp, project_id, queue_name): client_uuid = wsgi_helpers.get_client_uuid(req) try: # Place JSON size restriction before parsing self._validate.message_length(req.content_length) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) # Deserialize and validate the incoming messages document = wsgi_utils.deserialize(req.stream, req.content_length) if 'messages' not in document: description = _(u'No messages were found in the request body.') raise wsgi_errors.HTTPBadRequestAPI(description) messages = wsgi_utils.sanitize(document['messages'], self._message_post_spec, doctype=wsgi_utils.JSONArray) try: self._validate.message_posting(messages) if not self._queue_controller.exists(queue_name, project_id): self._queue_controller.create(queue_name, project=project_id) message_ids = self._message_controller.post( queue_name, messages=messages, project=project_id, client_uuid=client_uuid) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except storage_errors.DoesNotExist as ex: LOG.debug(ex) raise falcon.HTTPNotFound() except storage_errors.MessageConflict as ex: LOG.exception(ex) description = _(u'No messages could be enqueued.') raise wsgi_errors.HTTPServiceUnavailable(description) except Exception as ex: LOG.exception(ex) description = _(u'Messages could not be enqueued.') raise wsgi_errors.HTTPServiceUnavailable(description) # Prepare the response ids_value = ','.join(message_ids) resp.location = req.path + '?ids=' + ids_value hrefs = [req.path + '/' + id for id in message_ids] body = {'resources': hrefs} resp.body = utils.to_json(body) resp.status = falcon.HTTP_201
def on_post(self, req, resp, project_id, queue_name): client_uuid = wsgi_helpers.get_client_uuid(req) try: # Place JSON size restriction before parsing self._validate.message_length(req.content_length) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) # Deserialize and validate the request body document = wsgi_utils.deserialize(req.stream, req.content_length) messages = wsgi_utils.sanitize(document, MESSAGE_POST_SPEC, doctype=wsgi_utils.JSONArray) try: self._validate.message_posting(messages) message_ids = self._message_controller.post( queue_name, messages=messages, project=project_id, client_uuid=client_uuid) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except storage_errors.DoesNotExist as ex: LOG.debug(ex) raise wsgi_errors.HTTPNotFound(six.text_type(ex)) except storage_errors.MessageConflict as ex: LOG.exception(ex) description = _(u'No messages could be enqueued.') raise wsgi_errors.HTTPServiceUnavailable(description) except Exception as ex: LOG.exception(ex) description = _(u'Messages could not be enqueued.') raise wsgi_errors.HTTPServiceUnavailable(description) # Prepare the response ids_value = ','.join(message_ids) resp.location = req.path + '?ids=' + ids_value hrefs = [req.path + '/' + id for id in message_ids] # NOTE(kgriffs): As of the Icehouse release, drivers are # no longer allowed to enqueue a subset of the messages # submitted by the client; it's all or nothing. Therefore, # 'partial' is now always False in the v1.0 API, and the # field has been removed in v1.1. body = {'resources': hrefs, 'partial': False} resp.body = utils.to_json(body) resp.status = falcon.HTTP_201
def _queue_list(self, project_id, path, kfilter, **kwargs): try: self._validate.queue_listing(**kwargs) with_count = kwargs.pop('with_count', False) results = self._queue_controller.list(project=project_id, kfilter=kfilter, **kwargs) # Buffer list of queues queues = list(next(results)) total_number = None if with_count: total_number = self._queue_controller.calculate_resource_count( project=project_id) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except Exception: description = _(u'Queues could not be listed.') LOG.exception(description) raise wsgi_errors.HTTPServiceUnavailable(description) # Got some. Prepare the response. kwargs['marker'] = next(results) or kwargs.get('marker', '') reserved_metadata = _get_reserved_metadata(self._validate).items() for each_queue in queues: each_queue['href'] = path + '/' + each_queue['name'] if kwargs.get('detailed'): for meta, value in reserved_metadata: if not each_queue.get('metadata', {}).get(meta): each_queue['metadata'][meta] = value return queues, kwargs['marker'], total_number
def _get(self, req, project_id, queue_name): client_uuid = wsgi_helpers.get_client_uuid(req) kwargs = {} # NOTE(kgriffs): This syntax ensures that # we don't clobber default values with None. req.get_param('marker', store=kwargs) req.get_param_as_int('limit', store=kwargs) req.get_param_as_bool('echo', store=kwargs) req.get_param_as_bool('include_claimed', store=kwargs) try: self._validate.message_listing(**kwargs) results = self._message_controller.list( queue_name, project=project_id, client_uuid=client_uuid, **kwargs) # Buffer messages cursor = next(results) messages = list(cursor) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except storage_errors.QueueDoesNotExist as ex: LOG.debug(ex) messages = None except Exception as ex: LOG.exception(ex) description = _(u'Messages could not be listed.') raise wsgi_errors.HTTPServiceUnavailable(description) if not messages: messages = [] else: # Found some messages, so prepare the response kwargs['marker'] = next(results) base_path = req.path.rsplit('/', 1)[0] messages = [wsgi_utils.format_message_v1_1(m, base_path, m['claim_id']) for m in messages] links = [] if messages: links = [ { 'rel': 'next', 'href': req.path + falcon.to_query_str(kwargs) } ] return { 'messages': messages, 'links': links }
def _get_by_id(self, base_path, project_id, queue_name, ids): """Returns one or more messages from the queue by ID.""" try: self._validate.message_listing(limit=len(ids)) messages = self._message_controller.bulk_get(queue_name, message_ids=ids, project=project_id) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Message could not be retrieved.') raise wsgi_errors.HTTPServiceUnavailable(description) # Prepare response messages = list(messages) if not messages: return None messages = [ wsgi_utils.format_message_v1_1(m, base_path, m['claim_id']) for m in messages ] return {'messages': messages}
def _get_by_id(self, base_path, project_id, queue_name, ids): """Returns one or more messages from the queue by ID.""" try: self._validate.message_listing(limit=len(ids)) messages = self._message_controller.bulk_get( queue_name, message_ids=ids, project=project_id) queue_meta = self._queue_controller.get_metadata(queue_name, project_id) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except storage_errors.QueueDoesNotExist: LOG.exception('Queue name "%s" does not exist', queue_name) queue_meta = None except Exception: description = _(u'Message could not be retrieved.') LOG.exception(description) raise wsgi_errors.HTTPServiceUnavailable(description) # Prepare response messages = list(messages) if not messages: return None # Decrypt messages if queue_meta and queue_meta.get('_enable_encrypt_messages', False): self._encryptor.message_decrypted(messages) messages = [wsgi_utils.format_message_v1_1(m, base_path, m['claim_id']) for m in messages] return {'messages': messages}
def _topic_list(self, project_id, path, kfilter, **kwargs): try: self._validate.queue_listing(**kwargs) results = self._topic_controller.list(project=project_id, kfilter=kfilter, **kwargs) # Buffer list of topics topics = list(next(results)) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except Exception: description = _(u'Topics could not be listed.') LOG.exception(description) raise wsgi_errors.HTTPServiceUnavailable(description) # Got some. Prepare the response. kwargs['marker'] = next(results) or kwargs.get('marker', '') reserved_metadata = _get_reserved_metadata(self._validate).items() for each_topic in topics: each_topic['href'] = path + '/' + each_topic['name'] if kwargs.get('detailed'): for meta, value in reserved_metadata: if not each_topic.get('metadata', {}).get(meta): each_topic['metadata'][meta] = value return topics, kwargs['marker']
def on_patch(self, req, resp, project_id, queue_name, subscription_id): if req.content_length: document = wsgi_utils.deserialize(req.stream, req.content_length) else: document = {} try: self._validate.subscription_patching(document) self._subscription_controller.update(queue_name, subscription_id, project=project_id, **document) resp.status = falcon.HTTP_204 resp.location = req.path except storage_errors.SubscriptionDoesNotExist as ex: LOG.debug(ex) raise wsgi_errors.HTTPNotFound(six.text_type(ex)) except storage_errors.SubscriptionAlreadyExists as ex: LOG.debug(ex) raise wsgi_errors.HTTPConflict(six.text_type(ex)) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except Exception as ex: LOG.exception(ex) description = (_(u'Subscription %(subscription_id)s could not be' ' updated.') % dict(subscription_id=subscription_id)) raise falcon.HTTPBadRequest(_('Unable to update subscription'), description)
def on_post(self, req, resp, project_id, queue_name): LOG.debug( u'Claims collection POST - queue: %(queue)s, ' u'project: %(project)s', { 'queue': queue_name, 'project': project_id }) # Check for an explicit limit on the # of messages to claim limit = req.get_param_as_int('limit') claim_options = {} if limit is None else {'limit': limit} # NOTE(kgriffs): Clients may or may not actually include the # Content-Length header when the body is empty; the following # check works for both 0 and None. if not req.content_length: # No values given, so use defaults metadata = self._default_meta else: # Read claim metadata (e.g., TTL) and raise appropriate # HTTP errors as needed. document = wsgi_utils.deserialize(req.stream, req.content_length) metadata = wsgi_utils.sanitize(document, self._claim_post_spec) # Claim some messages try: self._validate.claim_creation(metadata, limit=limit) cid, msgs = self._claim_controller.create(queue_name, metadata=metadata, project=project_id, **claim_options) # Buffer claimed messages # TODO(kgriffs): optimize, along with serialization (below) resp_msgs = list(msgs) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Claim could not be created.') raise wsgi_errors.HTTPServiceUnavailable(description) # Serialize claimed messages, if any. This logic assumes # the storage driver returned well-formed messages. if len(resp_msgs) != 0: base_path = req.path.rpartition('/')[0] resp_msgs = [ wsgi_utils.format_message_v1_1(msg, base_path, cid) for msg in resp_msgs ] resp.location = req.path + '/' + cid resp.body = utils.to_json({'messages': resp_msgs}) resp.status = falcon.HTTP_201 else: resp.status = falcon.HTTP_204
def on_patch(self, req, resp, project_id, queue_name, claim_id): # Read claim metadata (e.g., TTL) and raise appropriate # HTTP errors as needed. document = wsgi_utils.deserialize(req.stream, req.content_length) metadata = wsgi_utils.sanitize(document, CLAIM_PATCH_SPEC) try: self._validate.claim_updating(metadata) self._claim_controller.update(queue_name, claim_id=claim_id, metadata=metadata, project=project_id) resp.status = falcon.HTTP_204 except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except storage_errors.DoesNotExist as ex: LOG.debug(ex) raise wsgi_errors.HTTPNotFound(six.text_type(ex)) except Exception: description = _(u'Claim could not be updated.') LOG.exception(description) raise wsgi_errors.HTTPServiceUnavailable(description)
def on_post(self, req, resp, project_id, queue_name): try: if req.content_length: document = wsgi_utils.deserialize(req.stream, req.content_length) self._validate.queue_purging(document) else: document = {'resource_types': ['messages', 'subscriptions']} except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) try: if "messages" in document['resource_types']: pop_limit = 100 LOG.debug("Purge all messages under queue %s" % queue_name) messages = self._message_ctrl.pop(queue_name, pop_limit, project=project_id) while messages: messages = self._message_ctrl.pop(queue_name, pop_limit, project=project_id) if "subscriptions" in document['resource_types']: LOG.debug("Purge all subscriptions under queue %s" % queue_name) results = self._subscription_ctrl.list(queue_name, project=project_id) subscriptions = list(next(results)) for sub in subscriptions: self._subscription_ctrl.delete(queue_name, sub['id'], project=project_id) except ValueError as err: raise wsgi_errors.HTTPBadRequestAPI(str(err)) except Exception as ex: LOG.exception(ex) description = _(u'Queue could not be purged.') raise wsgi_errors.HTTPServiceUnavailable(description) resp.status = falcon.HTTP_204
def on_get(self, request, response, project_id): """Returns a pool listing as objects embedded in an object: :: { "pools": [ {"href": "", "weight": 100, "uri": ""}, ... ], "links": [ {"href": "", "rel": "next"} ] } :returns: HTTP | 200 """ LOG.debug(u'LIST pools') store = {} request.get_param('marker', store=store) request.get_param_as_int('limit', store=store) request.get_param_as_bool('detailed', store=store) try: self._validate.pool_listing(**store) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) cursor = self._ctrl.list(**store) pools = list(next(cursor)) results = {'links': []} if pools: store['marker'] = next(cursor) for entry in pools: entry['href'] = request.path + '/' + entry['name'] results['links'] = [ { 'rel': 'next', 'href': request.path + falcon.to_query_str(store) } ] results['pools'] = pools response.content_location = request.relative_uri response.body = transport_utils.to_json(results) response.status = falcon.HTTP_200
def on_post(self, req, resp, project_id, queue_name): LOG.debug( u'Pre-Signed URL Creation for queue: %(queue)s, ' u'project: %(project)s', { 'queue': queue_name, 'project': project_id }) try: document = wsgi_utils.deserialize(req.stream, req.content_length) except ValueError as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) diff = set(document.keys()) - _KNOWN_KEYS if diff: msg = six.text_type(_LE('Unknown keys: %s') % diff) raise wsgi_errors.HTTPBadRequestAPI(msg) key = self._conf.signed_url.secret_key paths = document.pop('paths', None) if not paths: paths = [os.path.join(req.path[:-6], 'messages')] else: diff = set(paths) - _VALID_PATHS if diff: msg = six.text_type(_LE('Invalid paths: %s') % diff) raise wsgi_errors.HTTPBadRequestAPI(msg) paths = [os.path.join(req.path[:-6], path) for path in paths] try: data = urls.create_signed_url(key, paths, project=project_id, **document) except ValueError as err: raise wsgi_errors.HTTPBadRequestAPI(str(err)) resp.body = utils.to_json(data)
def on_get(self, req, resp, project_id): kwargs = {} # NOTE(kgriffs): This syntax ensures that # we don't clobber default values with None. req.get_param('marker', store=kwargs) req.get_param_as_int('limit', store=kwargs) req.get_param_as_bool('detailed', store=kwargs) try: self._validate.queue_listing(**kwargs) results = self._queue_controller.list(project=project_id, **kwargs) # Buffer list of queues queues = list(next(results)) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Queues could not be listed.') raise wsgi_errors.HTTPServiceUnavailable(description) # Got some. Prepare the response. kwargs['marker'] = next(results) or kwargs.get('marker', '') reserved_metadata = _get_reserved_metadata(self._validate).items() for each_queue in queues: each_queue['href'] = req.path + '/' + each_queue['name'] if kwargs.get('detailed'): for meta, value in reserved_metadata: if not each_queue.get('metadata', {}).get(meta): each_queue['metadata'][meta] = value links = [] if queues: links = [ { 'rel': 'next', 'href': req.path + falcon.to_query_str(kwargs) } ] response_body = { 'queues': queues, 'links': links } resp.body = utils.to_json(response_body)
def on_put(self, req, resp, project_id, queue_name): try: # Place JSON size restriction before parsing self._validate.queue_metadata_length(req.content_length) # Deserialize queue metadata document = wsgi_utils.deserialize(req.stream, req.content_length) metadata = wsgi_utils.sanitize(document) # Restrict setting any reserved queue attributes for key in metadata: if key.startswith('_'): description = _(u'Reserved queue attributes in metadata ' u'(which names start with "_") can not be ' u'set in API v1.') raise validation.ValidationFailed(description) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) try: self._queue_ctrl.set_metadata(queue_name, metadata=metadata, project=project_id) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except storage_errors.QueueDoesNotExist as ex: raise wsgi_errors.HTTPNotFound(six.text_type(ex)) except Exception: description = _(u'Metadata could not be updated.') LOG.exception(description) raise wsgi_errors.HTTPServiceUnavailable(description) resp.status = falcon.HTTP_204 resp.location = req.path
def on_put(self, req, resp, project_id, queue_name): LOG.debug( u'Queue item PUT - queue: %(queue)s, ' u'project: %(project)s', { 'queue': queue_name, 'project': project_id }) try: # Place JSON size restriction before parsing self._validate.queue_metadata_length(req.content_length) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) # Deserialize queue metadata metadata = None if req.content_length: document = wsgi_utils.deserialize(req.stream, req.content_length) metadata = wsgi_utils.sanitize(document, spec=None) try: created = self._queue_controller.create(queue_name, metadata=metadata, project=project_id) except storage_errors.FlavorDoesNotExist as ex: LOG.exception(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Queue could not be created.') raise wsgi_errors.HTTPServiceUnavailable(description) resp.status = falcon.HTTP_201 if created else falcon.HTTP_204 resp.location = req.path
def on_get(self, req, resp, project_id): LOG.debug(u'Queue collection GET') kwargs = {} # NOTE(kgriffs): This syntax ensures that # we don't clobber default values with None. req.get_param('marker', store=kwargs) req.get_param_as_int('limit', store=kwargs) req.get_param_as_bool('detailed', store=kwargs) try: self._validate.queue_listing(**kwargs) results = self._queue_controller.list(project=project_id, **kwargs) # Buffer list of queues queues = list(next(results)) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except Exception: description = _(u'Queues could not be listed.') LOG.exception(description) raise wsgi_errors.HTTPServiceUnavailable(description) # Check for an empty list if len(queues) == 0: resp.status = falcon.HTTP_204 return # Got some. Prepare the response. kwargs['marker'] = next(results) for each_queue in queues: each_queue['href'] = req.path + '/' + each_queue['name'] response_body = { 'queues': queues, 'links': [{ 'rel': 'next', 'href': req.path + falcon.to_query_str(kwargs) }] } resp.content_location = req.relative_uri resp.body = utils.to_json(response_body)
def on_delete(self, req, resp, project_id, queue_name): ids = req.get_param_as_list('ids') pop_limit = req.get_param_as_int('pop') try: self._validate.message_deletion(ids, pop_limit) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) if ids: resp.status = self._delete_messages_by_id(queue_name, ids, project_id) elif pop_limit: resp.status, resp.body = self._pop_messages( queue_name, project_id, pop_limit)
def on_post(self, req, resp, project_id, queue_name): # Check for an explicit limit on the # of messages to claim limit = req.get_param_as_int('limit') claim_options = {} if limit is None else {'limit': limit} # Read claim metadata (e.g., TTL) and raise appropriate # HTTP errors as needed. document = wsgi_utils.deserialize(req.stream, req.content_length) metadata = wsgi_utils.sanitize(document, CLAIM_POST_SPEC) # Claim some messages try: self._validate.claim_creation(metadata, limit=limit) cid, msgs = self._claim_controller.create(queue_name, metadata=metadata, project=project_id, **claim_options) # Buffer claimed messages # TODO(kgriffs): optimize, along with serialization (below) resp_msgs = list(msgs) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Claim could not be created.') raise wsgi_errors.HTTPServiceUnavailable(description) # Serialize claimed messages, if any. This logic assumes # the storage driver returned well-formed messages. if len(resp_msgs) != 0: resp_msgs = [ wsgi_utils.format_message_v1(msg, req.path.rpartition('/')[0], cid) for msg in resp_msgs ] resp.location = req.path + '/' + cid resp.body = utils.to_json(resp_msgs) resp.status = falcon.HTTP_201 else: resp.status = falcon.HTTP_204
def on_get(self, req, resp, project_id, queue_name): LOG.debug( u'Subscription collection GET - project: %(project)s, ' u'queue: %(queue)s', { 'project': project_id, 'queue': queue_name }) kwargs = {} # NOTE(kgriffs): This syntax ensures that # we don't clobber default values with None. req.get_param('marker', store=kwargs) req.get_param_as_int('limit', store=kwargs) try: self._validate.subscription_listing(**kwargs) results = self._subscription_controller.list(queue_name, project=project_id, **kwargs) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Subscriptions could not be listed.') raise wsgi_errors.HTTPServiceUnavailable(description) # Buffer list of subscriptions subscriptions = list(next(results)) # Got some. Prepare the response. kwargs['marker'] = next(results) or kwargs.get('marker', '') response_body = { 'subscriptions': subscriptions, 'links': [{ 'rel': 'next', 'href': req.path + falcon.to_query_str(kwargs) }] } resp.body = utils.to_json(response_body)
def on_delete(self, req, resp, project_id, queue_name): # NOTE(zyuan): Attempt to delete the whole message collection # (without an "ids" parameter) is not allowed ids = req.get_param_as_list('ids', required=True) try: self._validate.message_listing(limit=len(ids)) self._message_controller.bulk_delete(queue_name, message_ids=ids, project=project_id) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Messages could not be deleted.') raise wsgi_errors.HTTPServiceUnavailable(description) resp.status = falcon.HTTP_204
def on_put(self, req, resp, project_id, queue_name, subscription_id): if req.content_length: document = wsgi_utils.deserialize(req.stream, req.content_length) else: document = {} try: self._validate.subscription_confirming(document) confirmed = document.get('confirmed', None) self._subscription_controller.confirm(queue_name, subscription_id, project=project_id, confirmed=confirmed) if confirmed is False: now = timeutils.utcnow_ts() now_dt = datetime.datetime.utcfromtimestamp(now) ttl = self._conf.transport.default_subscription_ttl expires = now_dt + datetime.timedelta(seconds=ttl) api_version = req.path.split('/')[1] sub = self._subscription_controller.get(queue_name, subscription_id, project=project_id) self._notification.send_confirm_notification( queue_name, sub, self._conf, project_id, str(expires), api_version, True) resp.status = falcon.HTTP_204 resp.location = req.path except storage_errors.SubscriptionDoesNotExist as ex: LOG.debug(ex) raise wsgi_errors.HTTPNotFound(six.text_type(ex)) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except Exception as ex: LOG.exception(ex) description = (_(u'Subscription %(subscription_id)s could not be' ' confirmed.') % dict(subscription_id=subscription_id)) raise falcon.HTTPBadRequest(_('Unable to confirm subscription'), description)
def on_post(self, req, resp, project_id, queue_name): client_uuid = wsgi_helpers.get_client_uuid(req) try: # NOTE(flwang): Replace 'exists' with 'get_metadata' won't impact # the performance since both of them will call # collection.find_one() queue_meta = None try: queue_meta = self._queue_controller.get_metadata( queue_name, project_id) except storage_errors.DoesNotExist as ex: self._validate.queue_identification(queue_name, project_id) self._queue_controller.create(queue_name, project=project_id) # NOTE(flwang): Queue is created in lazy mode, so no metadata # set. queue_meta = {} queue_max_msg_size = queue_meta.get('_max_messages_post_size', None) queue_default_ttl = queue_meta.get('_default_message_ttl', None) # TODO(flwang): To avoid any unexpected regression issue, we just # leave the _message_post_spec attribute of class as it's. It # should be removed in Newton release. if queue_default_ttl: message_post_spec = ( ('ttl', int, queue_default_ttl), ('body', '*', None), ) else: message_post_spec = ( ('ttl', int, self._default_message_ttl), ('body', '*', None), ) # Place JSON size restriction before parsing self._validate.message_length(req.content_length, max_msg_post_size=queue_max_msg_size) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) # Deserialize and validate the incoming messages document = wsgi_utils.deserialize(req.stream, req.content_length) if 'messages' not in document: description = _(u'No messages were found in the request body.') raise wsgi_errors.HTTPBadRequestAPI(description) messages = wsgi_utils.sanitize(document['messages'], message_post_spec, doctype=wsgi_utils.JSONArray) try: self._validate.message_posting(messages) message_ids = self._message_controller.post( queue_name, messages=messages, project=project_id, client_uuid=client_uuid) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except storage_errors.DoesNotExist as ex: LOG.debug(ex) raise wsgi_errors.HTTPNotFound(six.text_type(ex)) except storage_errors.MessageConflict as ex: LOG.exception(ex) description = _(u'No messages could be enqueued.') raise wsgi_errors.HTTPServiceUnavailable(description) except Exception as ex: LOG.exception(ex) description = _(u'Messages could not be enqueued.') raise wsgi_errors.HTTPServiceUnavailable(description) # Prepare the response ids_value = ','.join(message_ids) resp.location = req.path + '?ids=' + ids_value hrefs = [req.path + '/' + id for id in message_ids] body = {'resources': hrefs} resp.body = utils.to_json(body) resp.status = falcon.HTTP_201
def on_post(self, req, resp, project_id, queue_name): if req.content_length: document = wsgi_utils.deserialize(req.stream, req.content_length) else: document = {} try: if not self._queue_controller.exists(queue_name, project_id): self._queue_controller.create(queue_name, project=project_id) self._validate.subscription_posting(document) subscriber = document['subscriber'] options = document.get('options', {}) url = netutils.urlsplit(subscriber) ttl = document.get('ttl', self._default_subscription_ttl) mgr = driver.DriverManager('zaqar.notification.tasks', url.scheme, invoke_on_load=True) req_data = req.headers.copy() req_data.update(req.env) mgr.driver.register(subscriber, options, ttl, project_id, req_data) created = self._subscription_controller.create(queue_name, subscriber, ttl, options, project=project_id) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Subscription could not be created.') raise wsgi_errors.HTTPServiceUnavailable(description) now = timeutils.utcnow_ts() now_dt = datetime.datetime.utcfromtimestamp(now) expires = now_dt + datetime.timedelta(seconds=ttl) api_version = req.path.split('/')[1] if created: subscription = self._subscription_controller.get( queue_name, created, project_id) # send confirm notification self._notification.send_confirm_notification( queue_name, subscription, self._conf, project_id, str(expires), api_version) resp.location = req.path resp.status = falcon.HTTP_201 resp.body = utils.to_json( {'subscription_id': six.text_type(created)}) else: subscription = self._subscription_controller.get_with_subscriber( queue_name, subscriber, project_id) confirmed = subscription.get('confirmed', True) if confirmed: description = _(u'Such subscription already exists.' u'Subscriptions are unique by project + queue ' u'+ subscriber URI.') raise wsgi_errors.HTTPConflict(description, headers={'location': req.path}) else: # The subscription is not confirmed, re-send confirm # notification self._notification.send_confirm_notification( queue_name, subscription, self._conf, project_id, str(expires), api_version) resp.location = req.path resp.status = falcon.HTTP_201 resp.body = utils.to_json( {'subscription_id': six.text_type(subscription['id'])})
def _get(self, req, project_id, queue_name): client_uuid = wsgi_helpers.get_client_uuid(req) kwargs = {} # NOTE(kgriffs): This syntax ensures that # we don't clobber default values with None. req.get_param('marker', store=kwargs) req.get_param_as_int('limit', store=kwargs) req.get_param_as_bool('echo', store=kwargs) req.get_param_as_bool('include_claimed', store=kwargs) req.get_param_as_bool('include_delayed', store=kwargs) try: queue_meta = {} try: # NOTE(cdyangzhenyu): In order to determine whether the # queue has a delay attribute, the metadata of the queue # is obtained here. This may have a little performance impact. # So maybe a refactor is needed in the future. queue_meta = self._queue_controller.get_metadata( queue_name, project_id) except storage_errors.DoesNotExist as ex: LOG.exception(ex) queue_delay = queue_meta.get('_default_message_delay') if not queue_delay: # NOTE(cdyangzhenyu): If the queue without the metadata # attribute _default_message_delay, we don't filter # for delay messages. kwargs['include_delayed'] = True self._validate.message_listing(**kwargs) results = self._message_controller.list(queue_name, project=project_id, client_uuid=client_uuid, **kwargs) # Buffer messages cursor = next(results) messages = list(cursor) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except storage_errors.QueueDoesNotExist as ex: LOG.debug(ex) messages = None except Exception as ex: LOG.exception(ex) description = _(u'Messages could not be listed.') raise wsgi_errors.HTTPServiceUnavailable(description) if not messages: messages = [] else: # Found some messages, so prepare the response kwargs['marker'] = next(results) base_path = req.path.rsplit('/', 1)[0] messages = [ wsgi_utils.format_message_v1_1(m, base_path, m['claim_id']) for m in messages ] links = [] if messages: links = [{ 'rel': 'next', 'href': req.path + falcon.to_query_str(kwargs) }] return {'messages': messages, 'links': links}
def on_get(self, request, response, project_id): """Returns a flavor listing as objects embedded in an object: :: { "flavors": [ {"href": "", "capabilities": {}, "pool_list": ""}, ... ], "links": [ {"rel": "next", "href": ""}, ... ] } :returns: HTTP | 200 """ LOG.debug(u'LIST flavors for project_id %s', project_id) store = {} request.get_param('marker', store=store) request.get_param_as_int('limit', store=store) detailed = request.get_param_as_bool('detailed') try: self._validate.flavor_listing(**store) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) cursor = self._ctrl.list(project=project_id, **store) flavors = list(next(cursor)) results = {'links': []} if flavors: store['marker'] = next(cursor) for entry in flavors: entry['href'] = request.path + '/' + entry['name'] data = {} data['name'] = entry['name'] pool_list = \ list(self._pools_ctrl.get_pools_by_flavor(flavor=data)) pool_name_list = [] if len(pool_list) > 0: pool_name_list = [x['name'] for x in pool_list] entry['pool_list'] = pool_name_list if detailed: caps = self._pools_ctrl.capabilities(flavor=entry) entry['capabilities'] = [ str(cap).split('.')[-1] for cap in caps ] if detailed is not None: store['detailed'] = detailed if flavors: results['links'] = [{ 'rel': 'next', 'href': request.path + falcon.to_query_str(store) }] results['flavors'] = flavors response.body = transport_utils.to_json(results) response.status = falcon.HTTP_200