def on_put(self, req, resp, tenant_id, queue_name): if req.content_length > transport.MAX_QUEUE_METADATA_SIZE: raise falcon.HTTPBadRequest(_('Bad request'), _('Queue metadata size is too large.')) if req.content_length is None or req.content_length == 0: raise falcon.HTTPBadRequest(_('Bad request'), _('Missing queue metadata.')) try: metadata = _filtered(helpers.read_json(req.stream)) created = self.queue_ctrl.upsert(queue_name, metadata=metadata, tenant=tenant_id) except helpers.MalformedJSON: raise falcon.HTTPBadRequest(_('Bad request'), _('Malformed queue metadata.')) except Exception as ex: LOG.exception(ex) title = _('Service temporarily unavailable') msg = _('Please try again in a few seconds.') raise falcon.HTTPServiceUnavailable(title, msg, 30) resp.status = falcon.HTTP_201 if created else falcon.HTTP_204 resp.location = req.path
def filter_stream(stream, len, spec, doctype=JSONObject): """Reads, deserializes, and validates a document from a stream. :param stream: file-like object from which to read an object or array of objects. :param len: number of bytes to read from stream :param spec: iterable describing expected fields, yielding tuples with the form of: (field_name, value_type). Note that value_type may either be a Python type, or the special string '*' to accept any type. :param doctype: type of document to expect; must be either JSONObject or JSONArray. :raises: HTTPBadRequest, HTTPServiceUnavailable :returns: A sanitized, filtered version of the document read from the stream. If the document contains a list of objects, each object will be filtered and yielded in turn. If, on the other hand, the document is expected to contain a single object, that object will be filtered and returned as a single-element iterable. """ try: # TODO(kgriffs): read_json should stream the resulting list # of messages, returning a generator rather than buffering # everything in memory (bp/streaming-serialization). document = helpers.read_json(stream, len) except helpers.MalformedJSON as ex: LOG.exception(ex) description = _('Body could not be parsed.') raise exceptions.HTTPBadRequestBody(description) except helpers.OverflowedJSONInteger as ex: LOG.exception(ex) description = _('JSON contains integer that is too large.') raise exceptions.HTTPBadRequestBody(description) except Exception as ex: # Error while reading from the network/server LOG.exception(ex) description = _('Request body could not be read.') raise exceptions.HTTPServiceUnavailable(description) if doctype is JSONObject: if not isinstance(document, JSONObject): raise exceptions.HTTPDocumentTypeNotSupported() return (filter(document, spec),) if doctype is JSONArray: if not isinstance(document, JSONArray): raise exceptions.HTTPDocumentTypeNotSupported() # Return as a generator since we plan on doing a # streaming JSON deserializer (see above.git ) return (filter(obj, spec) for obj in document) raise ValueError('doctype not in (JSONObject, JSONArray)')
def on_patch(self, req, resp, tenant_id, queue_name, claim_id): if req.content_length is None or req.content_length == 0: raise falcon.HTTPBadRequest(_('Bad request'), _('Missing claim metadata.')) try: metadata = _filtered(helpers.read_json(req.stream)) self.claim_ctrl.update(queue_name, claim_id=claim_id, metadata=metadata, tenant=tenant_id) resp.status = falcon.HTTP_204 except helpers.MalformedJSON: raise falcon.HTTPBadRequest(_('Bad request'), _('Malformed claim metadata.')) except exceptions.DoesNotExist: raise falcon.HTTPNotFound
def on_put(self, req, resp, project_id, queue_name): LOG.debug(_("Queue metadata PUT - queue: %(queue)s, " "project: %(project)s") % {"queue": queue_name, "project": project_id}) # TODO(kgriffs): Migrate this check to input validator middleware if req.content_length > transport.MAX_QUEUE_METADATA_SIZE: description = _('Queue metadata size is too large.') raise wsgi_exceptions.HTTPBadRequestBody(description) # Deserialize queue metadata try: metadata = helpers.read_json(req.stream, req.content_length) except helpers.MalformedJSON: description = _('Request body could not be parsed.') raise wsgi_exceptions.HTTPBadRequestBody(description) except Exception as ex: LOG.exception(ex) description = _('Request body could not be read.') raise wsgi_exceptions.HTTPServiceUnavailable(description) # Metadata must be a JSON object if not isinstance(metadata, dict): description = _('Queue metadata must be an object.') raise wsgi_exceptions.HTTPBadRequestBody(description) try: self.queue_ctrl.set_metadata(queue_name, metadata=metadata, project=project_id) except storage_exceptions.QueueDoesNotExist: raise falcon.HTTPNotFound() except Exception as ex: LOG.exception(ex) description = _('Metadata could not be updated.') raise wsgi_exceptions.HTTPServiceUnavailable(description) resp.status = falcon.HTTP_204 resp.location = req.path
def on_post(self, req, resp, tenant_id, queue_name): if req.content_length is None or req.content_length == 0: raise falcon.HTTPBadRequest(_('Bad request'), _('Missing claim metadata.')) #TODO(zyuan): where do we define the limits? kwargs = { 'limit': req.get_param_as_int('limit'), } kwargs = dict([(k, v) for k, v in kwargs.items() if v is not None]) try: metadata = _filtered(helpers.read_json(req.stream)) cid, msgs = self.claim_ctrl.create( queue_name, metadata=metadata, tenant=tenant_id, **kwargs) resp_msgs = list(msgs) if len(resp_msgs) != 0: for msg in resp_msgs: msg['href'] = _msg_uri_from_claim( req.path.rpartition('/')[0], msg['id'], cid) del msg['id'] resp.location = req.path + '/' + cid resp.body = helpers.to_json(resp_msgs) resp.status = falcon.HTTP_200 else: resp.status = falcon.HTTP_204 except helpers.MalformedJSON: raise falcon.HTTPBadRequest(_('Bad request'), _('Malformed claim metadata.')) except exceptions.DoesNotExist: raise falcon.HTTPNotFound
def on_put(self, req, resp, project_id, queue_name): # TODO(kgriffs): Migrate this check to input validator middleware if req.content_length > transport.MAX_QUEUE_METADATA_SIZE: description = _('Queue metadata size is too large.') raise wsgi_exceptions.HTTPBadRequestBody(description) # Deserialize queue metadata try: metadata = helpers.read_json(req.stream, req.content_length) except helpers.MalformedJSON: description = _('Request body could not be parsed.') raise wsgi_exceptions.HTTPBadRequestBody(description) except Exception as ex: LOG.exception(ex) description = _('Request body could not be read.') raise wsgi_exceptions.HTTPServiceUnavailable(description) # Metadata must be a JSON object if not isinstance(metadata, dict): description = _('Queue metadata must be an object.') raise wsgi_exceptions.HTTPBadRequestBody(description) # Create or update the queue try: created = self.queue_controller.upsert( queue_name, metadata=metadata, project=project_id) except Exception as ex: LOG.exception(ex) description = _('Queue could not be created.') raise wsgi_exceptions.HTTPServiceUnavailable(description) resp.status = falcon.HTTP_201 if created else falcon.HTTP_204 resp.location = req.path
def on_post(self, req, resp, tenant_id, queue_name): uuid = req.get_header('Client-ID', required=True) if req.content_length is None or req.content_length == 0: raise falcon.HTTPBadRequest(_('Bad request'), _('Missing message contents.')) def filtered(ls): try: if len(ls) < 1: raise helpers.MalformedJSON for m in ls: #TODO(zyuan): verify the TTL values yield {'ttl': m['ttl'], 'body': m['body']} except Exception: raise helpers.MalformedJSON try: ls = filtered(helpers.read_json(req.stream)) ns = self.msg_ctrl.post(queue_name, messages=ls, tenant=tenant_id, client_uuid=uuid) resp.location = req.path + '/' + ','.join( [n.encode('utf-8') for n in ns]) resp.status = falcon.HTTP_201 except helpers.MalformedJSON: raise falcon.HTTPBadRequest(_('Bad request'), _('Malformed messages.')) except exceptions.DoesNotExist: raise falcon.HTTPNotFound