def _get_by_id(self, base_path, project_id, queue_name, ids): """Returns one or more messages from the queue by ID.""" try: self._validate.message_listing(limit=len(ids)) messages = self.message_controller.bulk_get( queue_name, message_ids=ids, project=project_id) except validation.ValidationFailed as ex: raise wsgi_exceptions.HTTPBadRequestAPI(six.text_type(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Message could not be retrieved.') raise wsgi_exceptions.HTTPServiceUnavailable(description) # Prepare response messages = list(messages) if not messages: return None base_path += '/' for each_message in messages: each_message['href'] = base_path + each_message['id'] del each_message['id'] return messages
def on_post(self, req, resp, project_id, queue_name): LOG.debug( _(u'Claims collection POST - queue: %(queue)s, ' u'project: %(project)s'), { 'queue': queue_name, 'project': project_id }) # Check for an explicit limit on the # of messages to claim limit = req.get_param_as_int('limit') claim_options = {} if limit is None else {'limit': limit} # Place JSON size restriction before parsing if req.content_length > self._metadata_max_length: description = _(u'Claim metadata size is too large.') raise wsgi_exceptions.HTTPBadRequestBody(description) # Read claim metadata (e.g., TTL) and raise appropriate # HTTP errors as needed. metadata, = wsgi_utils.filter_stream(req.stream, req.content_length, CLAIM_POST_SPEC) # Claim some messages try: self._validate.claim_creation(metadata, **claim_options) cid, msgs = self.claim_controller.create(queue_name, metadata=metadata, project=project_id, **claim_options) # Buffer claimed messages # TODO(kgriffs): optimize, along with serialization (below) resp_msgs = list(msgs) except validation.ValidationFailed as ex: raise wsgi_exceptions.HTTPBadRequestAPI(six.text_type(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Claim could not be created.') raise wsgi_exceptions.HTTPServiceUnavailable(description) # Serialize claimed messages, if any. This logic assumes # the storage driver returned well-formed messages. if len(resp_msgs) != 0: for msg in resp_msgs: msg['href'] = _msg_uri_from_claim( req.path.rpartition('/')[0], msg['id'], cid) del msg['id'] resp.location = req.path + '/' + cid resp.body = utils.to_json(resp_msgs) resp.status = falcon.HTTP_201 else: resp.status = falcon.HTTP_204
def _get(self, req, project_id, queue_name): client_uuid = wsgi_utils.get_client_uuid(req) kwargs = {} # NOTE(kgriffs): This syntax ensures that # we don't clobber default values with None. req.get_param('marker', store=kwargs) req.get_param_as_int('limit', store=kwargs) req.get_param_as_bool('echo', store=kwargs) req.get_param_as_bool('include_claimed', store=kwargs) try: self._validate.message_listing(**kwargs) results = self.message_controller.list( queue_name, project=project_id, client_uuid=client_uuid, **kwargs) # Buffer messages cursor = next(results) messages = list(cursor) except validation.ValidationFailed as ex: raise wsgi_exceptions.HTTPBadRequestAPI(six.text_type(ex)) except storage_exceptions.DoesNotExist: raise falcon.HTTPNotFound() except Exception as ex: LOG.exception(ex) description = _(u'Messages could not be listed.') raise wsgi_exceptions.HTTPServiceUnavailable(description) if not messages: return None # Found some messages, so prepare the response kwargs['marker'] = next(results) for each_message in messages: each_message['href'] = req.path + '/' + each_message['id'] del each_message['id'] return { 'messages': messages, 'links': [ { 'rel': 'next', 'href': req.path + falcon.to_query_str(kwargs) } ] }
def get_client_uuid(req): """Read a required Client-ID from a request. :param req: A falcon.Request object :raises: HTTPBadRequest if the Client-ID header is missing or does not represent a valid UUID :returns: A UUID object """ try: return uuid.UUID(req.get_header('Client-ID', required=True)) except ValueError: description = _(u'Malformed hexadecimal UUID.') raise exceptions.HTTPBadRequestAPI(description)
def on_get(self, req, resp, project_id): kwargs = {} # NOTE(kgriffs): This syntax ensures that # we don't clobber default values with None. req.get_param('marker', store=kwargs) req.get_param_as_int('limit', store=kwargs) req.get_param_as_bool('detailed', store=kwargs) try: self._validate.queue_listing(**kwargs) results = self.queue_controller.list(project=project_id, **kwargs) except validation.ValidationFailed as ex: raise wsgi_exceptions.HTTPBadRequestAPI(six.text_type(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Queues could not be listed.') raise wsgi_exceptions.HTTPServiceUnavailable(description) # Buffer list of queues queues = list(next(results)) # Check for an empty list if len(queues) == 0: resp.status = falcon.HTTP_204 return # Got some. Prepare the response. kwargs['marker'] = next(results) for each_queue in queues: each_queue['href'] = req.path + '/' + each_queue['name'] response_body = { 'queues': queues, 'links': [{ 'rel': 'next', 'href': req.path + falcon.to_query_str(kwargs) }] } resp.content_location = req.relative_uri resp.body = utils.to_json(response_body)
def on_get(self, request, response): project = helpers.get_project(request) LOG.debug('LIST queues - project: {0}'.format(project)) kwargs = {} request.get_param('marker', store=kwargs) request.get_param_as_int('limit', store=kwargs) request.get_param_as_bool('detailed', store=kwargs) resp = collections.defaultdict(list) limit = kwargs.get('limit', STORAGE_LIMITS.default_queue_paging) try: validate.queue_listing(limit=limit) except validate.ValidationFailed as ex: raise wsgi_exceptions.HTTPBadRequestAPI(six.text_type(ex)) for queue in self._catalogue.list(project): queue_name = queue['name'] if queue_name < kwargs.get('marker', ''): continue entry = { 'href': request.path + '/' + queue_name, 'name': queue_name } if kwargs.get('detailed', None): entry['metadata'] = queue['metadata'] resp['queues'].append(entry) kwargs['marker'] = queue_name if len(resp['queues']) == limit: break if not resp: LOG.debug('LIST queues - no queues found') response.status = falcon.HTTP_204 return resp['links'].append({ 'rel': 'next', 'href': request.path + falcon.to_query_str(kwargs) }) response.content_location = request.relative_uri response.body = json.dumps(resp, ensure_ascii=False)
def on_delete(self, req, resp, project_id, queue_name): # NOTE(zyuan): Attempt to delete the whole message collection # (without an "ids" parameter) is not allowed ids = req.get_param_as_list('ids', required=True) try: self._validate.message_listing(limit=len(ids)) self.message_controller.bulk_delete( queue_name, message_ids=ids, project=project_id) except validation.ValidationFailed as ex: raise wsgi_exceptions.HTTPBadRequestAPI(six.text_type(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Messages could not be deleted.') raise wsgi_exceptions.HTTPServiceUnavailable(description) resp.status = falcon.HTTP_204
def on_put(self, req, resp, project_id, queue_name): LOG.debug( _(u'Queue metadata PUT - queue: %(queue)s, ' u'project: %(project)s'), { 'queue': queue_name, 'project': project_id }) # Place JSON size restriction before parsing if req.content_length > self._wsgi_conf.metadata_max_length: description = _(u'Queue metadata size is too large.') raise wsgi_exceptions.HTTPBadRequestBody(description) # Deserialize queue metadata metadata, = wsgi_utils.filter_stream(req.stream, req.content_length, spec=None) try: self._validate.queue_content( metadata, check_size=(self._validate._limits_conf.metadata_size_uplimit < self._wsgi_conf.metadata_max_length)) self.queue_ctrl.set_metadata(queue_name, metadata=metadata, project=project_id) except validation.ValidationFailed as ex: raise wsgi_exceptions.HTTPBadRequestAPI(six.text_type(ex)) except storage_exceptions.QueueDoesNotExist: raise falcon.HTTPNotFound() except Exception as ex: LOG.exception(ex) description = _(u'Metadata could not be updated.') raise wsgi_exceptions.HTTPServiceUnavailable(description) resp.status = falcon.HTTP_204 resp.location = req.path
def on_patch(self, req, resp, project_id, queue_name, claim_id): LOG.debug( _(u'Claim Item PATCH - claim: %(claim_id)s, ' u'queue: %(queue_name)s, project:%(project_id)s') % { 'queue_name': queue_name, 'project_id': project_id, 'claim_id': claim_id }) # Place JSON size restriction before parsing if req.content_length > self._metadata_max_length: description = _(u'Claim metadata size is too large.') raise wsgi_exceptions.HTTPBadRequestBody(description) # Read claim metadata (e.g., TTL) and raise appropriate # HTTP errors as needed. metadata, = wsgi_utils.filter_stream(req.stream, req.content_length, CLAIM_PATCH_SPEC) try: self._validate.claim_updating(metadata) self.claim_controller.update(queue_name, claim_id=claim_id, metadata=metadata, project=project_id) resp.status = falcon.HTTP_204 except validation.ValidationFailed as ex: raise wsgi_exceptions.HTTPBadRequestAPI(six.text_type(ex)) except storage_exceptions.DoesNotExist: raise falcon.HTTPNotFound() except Exception as ex: LOG.exception(ex) description = _(u'Claim could not be updated.') raise wsgi_exceptions.HTTPServiceUnavailable(description)
def on_post(self, req, resp, project_id, queue_name): LOG.debug(_(u'Messages collection POST - queue: %(queue)s, ' u'project: %(project)s'), {'queue': queue_name, 'project': project_id}) client_uuid = wsgi_utils.get_client_uuid(req) # Place JSON size restriction before parsing if req.content_length > self._wsgi_conf.content_max_length: description = _(u'Message collection size is too large.') raise wsgi_exceptions.HTTPBadRequestBody(description) # Pull out just the fields we care about messages = wsgi_utils.filter_stream( req.stream, req.content_length, MESSAGE_POST_SPEC, doctype=wsgi_utils.JSONArray) # Enqueue the messages partial = False try: # No need to check each message's size if it # can not exceed the request size limit self._validate.message_posting( messages, check_size=( self._validate._limits_conf.message_size_uplimit < self._wsgi_conf.content_max_length)) message_ids = self.message_controller.post( queue_name, messages=messages, project=project_id, client_uuid=client_uuid) except validation.ValidationFailed as ex: raise wsgi_exceptions.HTTPBadRequestAPI(six.text_type(ex)) except storage_exceptions.DoesNotExist: raise falcon.HTTPNotFound() except storage_exceptions.MessageConflict as ex: LOG.exception(ex) partial = True message_ids = ex.succeeded_ids if not message_ids: # TODO(kgriffs): Include error code that is different # from the code used in the generic case, below. description = _(u'No messages could be enqueued.') raise wsgi_exceptions.HTTPServiceUnavailable(description) except Exception as ex: LOG.exception(ex) description = _(u'Messages could not be enqueued.') raise wsgi_exceptions.HTTPServiceUnavailable(description) # Prepare the response ids_value = ','.join(message_ids) resp.location = req.path + '?ids=' + ids_value hrefs = [req.path + '/' + id for id in message_ids] body = {'resources': hrefs, 'partial': partial} resp.body = utils.to_json(body) resp.status = falcon.HTTP_201