def on_get(self, req, resp, project_id, queue_name): try: resp_dict = self.queue_ctrl.stats(queue_name, project=project_id) message_stats = resp_dict['messages'] if message_stats['total'] != 0: base_path = req.path[:req.path.rindex('/')] + '/messages/' newest = message_stats['newest'] newest['href'] = base_path + newest['id'] del newest['id'] oldest = message_stats['oldest'] oldest['href'] = base_path + oldest['id'] del oldest['id'] resp.content_location = req.path resp.body = utils.to_json(resp_dict) # status defaults to 200 except storage_exceptions.DoesNotExist: raise falcon.HTTPNotFound() except Exception as ex: LOG.exception(ex) description = _(u'Queue stats could not be read.') raise wsgi_exceptions.HTTPServiceUnavailable(description)
def on_get(self, req, resp, project_id, queue_name, claim_id): LOG.debug(_(u'Claim item GET - claim: %(claim_id)s, ' u'queue: %(queue_name)s, project: %(project_id)s') % {'queue_name': queue_name, 'project_id': project_id, 'claim_id': claim_id}) try: meta, msgs = self.claim_controller.get( queue_name, claim_id=claim_id, project=project_id) # Buffer claimed messages # TODO(kgriffs): Optimize along with serialization (see below) meta['messages'] = list(msgs) except storage_exceptions.DoesNotExist: raise falcon.HTTPNotFound() except Exception as ex: LOG.exception(ex) description = _(u'Claim could not be queried.') raise wsgi_exceptions.HTTPServiceUnavailable(description) # Serialize claimed messages # TODO(kgriffs): Optimize for msg in meta['messages']: msg['href'] = _msg_uri_from_claim( req.path.rsplit('/', 2)[0], msg['id'], meta['id']) del msg['id'] meta['href'] = req.path del meta['id'] resp.content_location = req.relative_uri resp.body = utils.to_json(meta)
def on_delete(self, req, resp, project_id, queue_name, message_id): LOG.debug( _(u'Messages item DELETE - message: %(message)s, ' u'queue: %(queue)s, project: %(project)s') % { 'message': message_id, 'queue': queue_name, 'project': project_id }) try: self.message_controller.delete(queue_name, message_id=message_id, project=project_id, claim=req.get_param('claim_id')) except storage_exceptions.NotPermitted as ex: LOG.exception(ex) title = _(u'Unable to delete') description = _(u'This message is claimed; it cannot be ' u'deleted without a valid claim_id.') raise falcon.HTTPForbidden(title, description) except Exception as ex: LOG.exception(ex) description = _(u'Message could not be deleted.') raise wsgi_exceptions.HTTPServiceUnavailable(description) # Alles guete resp.status = falcon.HTTP_204
def _get_by_id(self, base_path, project_id, queue_name, ids): """Returns one or more messages from the queue by ID.""" try: validate.message_listing(limit=len(ids)) messages = self.message_controller.bulk_get(queue_name, message_ids=ids, project=project_id) except input_exceptions.ValidationFailed as ex: raise wsgi_exceptions.HTTPBadRequestBody(str(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Message could not be retrieved.') raise wsgi_exceptions.HTTPServiceUnavailable(description) # Prepare response messages = list(messages) if not messages: return None base_path += '/' for each_message in messages: each_message['href'] = base_path + each_message['id'] del each_message['id'] return messages
def on_get(self, req, resp, project_id, queue_name, message_id): LOG.debug( _(u'Messages item GET - message: %(message)s, ' u'queue: %(queue)s, project: %(project)s') % { 'message': message_id, 'queue': queue_name, 'project': project_id }) try: message = self.message_controller.get(queue_name, message_id, project=project_id) except storage_exceptions.DoesNotExist: raise falcon.HTTPNotFound() except Exception as ex: LOG.exception(ex) description = _(u'Message could not be retrieved.') raise wsgi_exceptions.HTTPServiceUnavailable(description) # Prepare response message['href'] = req.path del message['id'] resp.content_location = req.relative_uri resp.body = utils.to_json(message)
def on_post(self, req, resp, project_id, queue_name): LOG.debug(_(u'Claims collection POST - queue: %(queue)s, ' u'project: %(project)s') % {'queue': queue_name, 'project': project_id}) # Check for an explicit limit on the # of messages to claim limit = req.get_param_as_int('limit') claim_options = {} if limit is None else {'limit': limit} # Place JSON size restriction before parsing if req.content_length > CFG.metadata_max_length: description = _(u'Claim metadata size is too large.') raise wsgi_exceptions.HTTPBadRequestBody(description) # Read claim metadata (e.g., TTL) and raise appropriate # HTTP errors as needed. metadata, = wsgi_utils.filter_stream(req.stream, req.content_length, CLAIM_POST_SPEC) # Claim some messages try: validate.claim_creation(metadata, **claim_options) cid, msgs = self.claim_controller.create( queue_name, metadata=metadata, project=project_id, **claim_options) # Buffer claimed messages # TODO(kgriffs): optimize, along with serialization (below) resp_msgs = list(msgs) except input_exceptions.ValidationFailed as ex: raise wsgi_exceptions.HTTPBadRequestBody(str(ex)) except storage_exceptions.DoesNotExist: raise falcon.HTTPNotFound() except Exception as ex: LOG.exception(ex) description = _(u'Claim could not be created.') raise wsgi_exceptions.HTTPServiceUnavailable(description) # Serialize claimed messages, if any. This logic assumes # the storage driver returned well-formed messages. if len(resp_msgs) != 0: for msg in resp_msgs: msg['href'] = _msg_uri_from_claim( req.path.rpartition('/')[0], msg['id'], cid) del msg['id'] resp.location = req.path + '/' + cid resp.body = utils.to_json(resp_msgs) resp.status = falcon.HTTP_201 else: resp.status = falcon.HTTP_204
def on_delete(self, req, resp, project_id, queue_name): LOG.debug(_(u'Queue item DELETE - queue: %(queue)s, ' u'project: %(project)s') % {'queue': queue_name, 'project': project_id}) try: self.queue_controller.delete(queue_name, project=project_id) except Exception as ex: LOG.exception(ex) description = _(u'Queue could not be deleted.') raise wsgi_exceptions.HTTPServiceUnavailable(description) resp.status = falcon.HTTP_204
def _get(self, req, project_id, queue_name): uuid = req.get_header('Client-ID', required=True) kwargs = {} # NOTE(kgriffs): This syntax ensures that # we don't clobber default values with None. req.get_param('marker', store=kwargs) req.get_param_as_int('limit', store=kwargs) req.get_param_as_bool('echo', store=kwargs) req.get_param_as_bool('include_claimed', store=kwargs) try: validate.message_listing(**kwargs) results = self.message_controller.list(queue_name, project=project_id, client_uuid=uuid, **kwargs) # Buffer messages cursor = next(results) messages = list(cursor) except input_exceptions.ValidationFailed as ex: raise wsgi_exceptions.HTTPBadRequestBody(str(ex)) except storage_exceptions.DoesNotExist: raise falcon.HTTPNotFound() except Exception as ex: LOG.exception(ex) description = _(u'Messages could not be listed.') raise wsgi_exceptions.HTTPServiceUnavailable(description) if not messages: return None # Found some messages, so prepare the response kwargs['marker'] = next(results) for each_message in messages: each_message['href'] = req.path + '/' + each_message['id'] del each_message['id'] return { 'messages': messages, 'links': [{ 'rel': 'next', 'href': req.path + falcon.to_query_str(kwargs) }] }
def on_get(self, req, resp, project_id): kwargs = {} # NOTE(kgriffs): This syntax ensures that # we don't clobber default values with None. req.get_param('marker', store=kwargs) req.get_param_as_int('limit', store=kwargs) req.get_param_as_bool('detailed', store=kwargs) try: validate.queue_listing(**kwargs) results = self.queue_controller.list(project=project_id, **kwargs) except input_exceptions.ValidationFailed as ex: raise wsgi_exceptions.HTTPBadRequestBody(str(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Queues could not be listed.') raise wsgi_exceptions.HTTPServiceUnavailable(description) # Buffer list of queues queues = list(next(results)) # Check for an empty list if len(queues) == 0: resp.status = falcon.HTTP_204 return # Got some. Prepare the response. kwargs['marker'] = next(results) for each_queue in queues: each_queue['href'] = req.path + '/' + each_queue['name'] response_body = { 'queues': queues, 'links': [ { 'rel': 'next', 'href': req.path + falcon.to_query_str(kwargs) } ] } resp.content_location = req.relative_uri resp.body = utils.to_json(response_body)
def on_delete(self, req, resp, project_id, queue_name, claim_id): LOG.debug(_(u'Claim item DELETE - claim: %(claim_id)s, ' u'queue: %(queue_name)s, project: %(project_id)s') % {'queue_name': queue_name, 'project_id': project_id, 'claim_id': claim_id}) try: self.claim_controller.delete(queue_name, claim_id=claim_id, project=project_id) resp.status = falcon.HTTP_204 except Exception as ex: LOG.exception(ex) description = _(u'Claim could not be deleted.') raise wsgi_exceptions.HTTPServiceUnavailable(description)
def on_get(self, req, resp, project_id, queue_name): LOG.debug(_(u'Queue metadata GET - queue: %(queue)s, ' u'project: %(project)s') % {'queue': queue_name, 'project': project_id}) try: resp_dict = self.queue_ctrl.get_metadata(queue_name, project=project_id) except storage_exceptions.DoesNotExist: raise falcon.HTTPNotFound() except Exception as ex: LOG.exception(ex) description = _(u'Queue metadata could not be retrieved.') raise wsgi_exceptions.HTTPServiceUnavailable(description) resp.content_location = req.path resp.body = utils.to_json(resp_dict)
def on_delete(self, req, resp, project_id, queue_name): # NOTE(zyuan): Attempt to delete the whole message collection # (without an "ids" parameter) is not allowed ids = req.get_param_as_list('ids', required=True) try: validate.message_listing(limit=len(ids)) self.message_controller.bulk_delete(queue_name, message_ids=ids, project=project_id) except input_exceptions.ValidationFailed as ex: raise wsgi_exceptions.HTTPBadRequestBody(str(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Messages could not be deleted.') raise wsgi_exceptions.HTTPServiceUnavailable(description) resp.status = falcon.HTTP_204
def on_put(self, req, resp, project_id, queue_name): LOG.debug(_(u'Queue item PUT - queue: %(queue)s, ' u'project: %(project)s') % {'queue': queue_name, 'project': project_id}) try: validate.queue_creation(name=queue_name) created = self.queue_controller.create( queue_name, project=project_id) except input_exceptions.ValidationFailed as ex: raise wsgi_exceptions.HTTPBadRequestBody(str(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Queue could not be created.') raise wsgi_exceptions.HTTPServiceUnavailable(description) resp.status = falcon.HTTP_201 if created else falcon.HTTP_204 resp.location = req.path
def on_patch(self, req, resp, project_id, queue_name, claim_id): LOG.debug( _(u'Claim Item PATCH - claim: %(claim_id)s, ' u'queue: %(queue_name)s, project:%(project_id)s') % { 'queue_name': queue_name, 'project_id': project_id, 'claim_id': claim_id }) # Place JSON size restriction before parsing if req.content_length > CFG.metadata_max_length: description = _(u'Claim metadata size is too large.') raise wsgi_exceptions.HTTPBadRequestBody(description) # Read claim metadata (e.g., TTL) and raise appropriate # HTTP errors as needed. metadata, = wsgi_utils.filter_stream(req.stream, req.content_length, CLAIM_PATCH_SPEC) try: validate.claim_updating(metadata) self.claim_controller.update(queue_name, claim_id=claim_id, metadata=metadata, project=project_id) resp.status = falcon.HTTP_204 except input_exceptions.ValidationFailed as ex: raise wsgi_exceptions.HTTPBadRequestBody(str(ex)) except storage_exceptions.DoesNotExist: raise falcon.HTTPNotFound() except Exception as ex: LOG.exception(ex) description = _(u'Claim could not be updated.') raise wsgi_exceptions.HTTPServiceUnavailable(description)
def on_put(self, req, resp, project_id, queue_name): LOG.debug(_(u'Queue metadata PUT - queue: %(queue)s, ' u'project: %(project)s') % {'queue': queue_name, 'project': project_id}) # Place JSON size restriction before parsing if req.content_length > CFG.metadata_max_length: description = _(u'Queue metadata size is too large.') raise wsgi_exceptions.HTTPBadRequestBody(description) # Deserialize queue metadata metadata, = wsgi_utils.filter_stream(req.stream, req.content_length, spec=None) try: validate.queue_content( metadata, check_size=( validate.CFG.metadata_size_uplimit < CFG.metadata_max_length)) self.queue_ctrl.set_metadata(queue_name, metadata=metadata, project=project_id) except input_exceptions.ValidationFailed as ex: raise wsgi_exceptions.HTTPBadRequestBody(str(ex)) except storage_exceptions.QueueDoesNotExist: raise falcon.HTTPNotFound() except Exception as ex: LOG.exception(ex) description = _(u'Metadata could not be updated.') raise wsgi_exceptions.HTTPServiceUnavailable(description) resp.status = falcon.HTTP_204 resp.location = req.path
def filter_stream(stream, len, spec=None, doctype=JSONObject): """Reads, deserializes, and validates a document from a stream. :param stream: file-like object from which to read an object or array of objects. :param len: number of bytes to read from stream :param spec: (Default None) Iterable describing expected fields, yielding tuples with the form of: (field_name, value_type). Note that value_type may either be a Python type, or the special string '*' to accept any type. If spec is None, the incoming documents will not be validated. :param doctype: type of document to expect; must be either JSONObject or JSONArray. :raises: HTTPBadRequest, HTTPServiceUnavailable :returns: A sanitized, filtered version of the document list read from the stream. If the document contains a list of objects, each object will be filtered and returned in a new list. If, on the other hand, the document is expected to contain a single object, that object will be filtered and returned as a single-element iterable. """ if len is None: description = _(u'Request body can not be empty') raise exceptions.HTTPBadRequestBody(description) try: # TODO(kgriffs): read_json should stream the resulting list # of messages, returning a generator rather than buffering # everything in memory (bp/streaming-serialization). document = utils.read_json(stream, len) except utils.MalformedJSON as ex: LOG.exception(ex) description = _(u'Request body could not be parsed.') raise exceptions.HTTPBadRequestBody(description) except utils.OverflowedJSONInteger as ex: LOG.exception(ex) description = _(u'JSON contains integer that is too large.') raise exceptions.HTTPBadRequestBody(description) except Exception as ex: # Error while reading from the network/server LOG.exception(ex) description = _(u'Request body could not be read.') raise exceptions.HTTPServiceUnavailable(description) if doctype is JSONObject: if not isinstance(document, JSONObject): raise exceptions.HTTPDocumentTypeNotSupported() return (document,) if spec is None else (filter(document, spec),) if doctype is JSONArray: if not isinstance(document, JSONArray): raise exceptions.HTTPDocumentTypeNotSupported() if spec is None: return document return [filter(obj, spec) for obj in document] raise TypeError('doctype must be either a JSONObject or JSONArray')
def on_post(self, req, resp, project_id, queue_name): LOG.debug( _(u'Messages collection POST - queue: %(queue)s, ' u'project: %(project)s') % { 'queue': queue_name, 'project': project_id }) uuid = req.get_header('Client-ID', required=True) # Place JSON size restriction before parsing if req.content_length > CFG.content_max_length: description = _(u'Message collection size is too large.') raise wsgi_exceptions.HTTPBadRequestBody(description) # Pull out just the fields we care about messages = wsgi_utils.filter_stream(req.stream, req.content_length, MESSAGE_POST_SPEC, doctype=wsgi_utils.JSONArray) # Enqueue the messages partial = False try: # No need to check each message's size if it # can not exceed the request size limit validate.message_posting( messages, check_size=(validate.CFG.message_size_uplimit < CFG.content_max_length)) message_ids = self.message_controller.post(queue_name, messages=messages, project=project_id, client_uuid=uuid) except input_exceptions.ValidationFailed as ex: raise wsgi_exceptions.HTTPBadRequestBody(str(ex)) except storage_exceptions.DoesNotExist: raise falcon.HTTPNotFound() except storage_exceptions.MessageConflict as ex: LOG.exception(ex) partial = True message_ids = ex.succeeded_ids if not message_ids: # TODO(kgriffs): Include error code that is different # from the code used in the generic case, below. description = _(u'No messages could be enqueued.') raise wsgi_exceptions.HTTPServiceUnavailable(description) except Exception as ex: LOG.exception(ex) description = _(u'Messages could not be enqueued.') raise wsgi_exceptions.HTTPServiceUnavailable(description) # Prepare the response ids_value = ','.join(message_ids) resp.location = req.path + '?ids=' + ids_value hrefs = [req.path + '/' + id for id in message_ids] body = {'resources': hrefs, 'partial': partial} resp.body = utils.to_json(body) resp.status = falcon.HTTP_201