def on_get(self, request, response, project_id, flavor): """Returns a JSON object for a single flavor entry: :: {"pool": "", capabilities: {...}} :returns: HTTP | [200, 404] """ LOG.debug(u'GET flavor - name: %s', flavor) data = None try: data = self._ctrl.get(flavor, project=project_id) capabilities = self._pools_ctrl.capabilities(group=data['pool']) data['capabilities'] = [str(cap).split('.')[-1] for cap in capabilities] except errors.FlavorDoesNotExist as ex: LOG.debug(ex) raise falcon.HTTPNotFound() data['href'] = request.path # remove the name entry - it isn't needed on GET del data['name'] response.body = transport_utils.to_json(data)
def on_get(self, request, response, project_id, flavor): """Returns a JSON object for a single flavor entry: :: {"pool": "", "pool_list": [], capabilities: {...}} :returns: HTTP | [200, 404] """ LOG.debug(u'GET flavor - name: %s', flavor) data = None try: data = self._ctrl.get(flavor, project=project_id) capabilities = self._pools_ctrl.capabilities(flavor=data) data['capabilities'] = [str(cap).split('.')[-1] for cap in capabilities] pool_list =\ list(self._pools_ctrl.get_pools_by_flavor(flavor=data)) pool_name_list = [] if len(pool_list) > 0: pool_name_list = [x['name'] for x in pool_list] data['pool_list'] = pool_name_list except errors.FlavorDoesNotExist as ex: LOG.debug(ex) raise wsgi_errors.HTTPNotFound(six.text_type(ex)) data['href'] = request.path response.body = transport_utils.to_json(data)
def on_get(self, req, resp, project_id, queue_name): try: resp_dict = self._queue_ctrl.stats(queue_name, project=project_id) message_stats = resp_dict['messages'] if message_stats['total'] != 0: base_path = req.path[:req.path.rindex('/')] + '/messages/' newest = message_stats['newest'] newest['href'] = base_path + newest['id'] del newest['id'] oldest = message_stats['oldest'] oldest['href'] = base_path + oldest['id'] del oldest['id'] resp.content_location = req.path resp.body = utils.to_json(resp_dict) # status defaults to 200 except storage_errors.DoesNotExist as ex: LOG.debug(ex) raise falcon.HTTPNotFound() except Exception as ex: LOG.exception(ex) description = _(u'Queue stats could not be read.') raise wsgi_errors.HTTPServiceUnavailable(description)
def on_post(self, req, resp, project_id, queue_name): if req.content_length: document = wsgi_utils.deserialize(req.stream, req.content_length) else: document = {} try: self._validate.subscription_posting(document) subscriber = document['subscriber'] ttl = int(document['ttl']) options = document['options'] created = self._subscription_controller.create(queue_name, subscriber, ttl, options, project=project_id) except storage_errors.QueueDoesNotExist as ex: LOG.exception(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Subscription could not be created.') raise wsgi_errors.HTTPServiceUnavailable(description) resp.status = falcon.HTTP_201 if created else falcon.HTTP_409 resp.location = req.path if created: resp.body = utils.to_json( {'subscription_id': six.text_type(created)})
def on_get(self, req, resp, project_id, queue_name, message_id): LOG.debug(u'Messages item GET - message: %(message)s, ' u'queue: %(queue)s, project: %(project)s', {'message': message_id, 'queue': queue_name, 'project': project_id}) try: message = self._message_controller.get( queue_name, message_id, project=project_id) except storage_errors.DoesNotExist as ex: LOG.debug(ex) raise falcon.HTTPNotFound() except Exception as ex: LOG.exception(ex) description = _(u'Message could not be retrieved.') raise wsgi_errors.HTTPServiceUnavailable(description) # Prepare response message['href'] = req.path message = wsgi_utils.format_message_v1_1(message, req.path.rsplit('/', 2)[0], message['claim_id']) resp.body = utils.to_json(message)
def on_get(self, req, resp, project_id, queue_name, claim_id): try: meta, msgs = self._claim_controller.get( queue_name, claim_id=claim_id, project=project_id) # Buffer claimed messages # TODO(kgriffs): Optimize along with serialization (see below) meta['messages'] = list(msgs) except storage_errors.DoesNotExist as ex: LOG.debug(ex) raise falcon.HTTPNotFound() except Exception as ex: LOG.exception(ex) description = _(u'Claim could not be queried.') raise wsgi_errors.HTTPServiceUnavailable(description) # Serialize claimed messages # TODO(kgriffs): Optimize base_path = req.path.rsplit('/', 2)[0] meta['messages'] = [wsgi_utils.format_message_v1_1(msg, base_path, claim_id) for msg in meta['messages']] meta['href'] = req.path del meta['id'] resp.body = utils.to_json(meta)
def on_get(self, request, response, project_id, flavor): """Returns a JSON object for a single flavor entry: :: {"pool": "", capabilities: {...}} :returns: HTTP | [200, 404] """ LOG.debug(u'GET flavor - name: %s', flavor) data = None try: data = self._ctrl.get(flavor, project=project_id) pool_group = data['pool_group'] # NOTE(wanghao): remove this in Newton. data['pool'] = data['pool_group'] capabilities = self._pools_ctrl.capabilities(group=pool_group) data['capabilities'] = [str(cap).split('.')[-1] for cap in capabilities] except errors.FlavorDoesNotExist as ex: LOG.debug(ex) raise wsgi_errors.HTTPNotFound(six.text_type(ex)) data['href'] = request.path response.body = transport_utils.to_json(data)
def on_get(self, request, response, project_id, pool): """Returns a JSON object for a single pool entry: :: {"weight": 100, "uri": "", options: {...}} :returns: HTTP | [200, 404] """ LOG.debug(u'GET pool - name: %s', pool) data = None detailed = request.get_param_as_bool('detailed') or False try: data = self._ctrl.get(pool, detailed) except errors.PoolDoesNotExist as ex: LOG.debug(ex) raise falcon.HTTPNotFound() data['href'] = request.path # remove the name entry - it isn't needed on GET del data['name'] response.body = transport_utils.to_json(data)
def on_get(self, request, response, project_id, flavor): """Returns a JSON object for a single flavor entry: :: {"pool": "", capabilities: {...}} :returns: HTTP | [200, 404] """ LOG.debug(u"GET flavor - name: %s", flavor) data = None try: data = self._ctrl.get(flavor, project=project_id) capabilities = self._pools_ctrl.capabilities(group=data["pool"]) data["capabilities"] = [str(cap).split(".")[-1] for cap in capabilities] except errors.FlavorDoesNotExist as ex: LOG.debug(ex) raise falcon.HTTPNotFound() data["href"] = request.path response.body = transport_utils.to_json(data)
def on_get(self, request, response, project_id, flavor): """Returns a JSON object for a single flavor entry: :: {"pool_group": "", capabilities: {...}} :returns: HTTP | [200, 404] """ LOG.debug(u'GET flavor - name: %s', flavor) data = None detailed = request.get_param_as_bool('detailed') or False try: data = self._ctrl.get(flavor, project=project_id, detailed=detailed) # NOTE(wanghao): remove this in Newton. data['pool'] = data['pool_group'] except errors.FlavorDoesNotExist as ex: LOG.debug(ex) raise wsgi_errors.HTTPNotFound(six.text_type(ex)) data['href'] = request.path response.body = transport_utils.to_json(data)
def _on_get_with_kfilter(self, req, resp, project_id, kfilter={}): kwargs = {} # NOTE(kgriffs): This syntax ensures that # we don't clobber default values with None. req.get_param('marker', store=kwargs) req.get_param_as_int('limit', store=kwargs) req.get_param_as_bool('detailed', store=kwargs) req.get_param('name', store=kwargs) queues, marker = self._queue_list(project_id, req.path, kfilter, **kwargs) links = [] kwargs['marker'] = marker if queues: links = [ { 'rel': 'next', 'href': req.path + falcon.to_query_str(kwargs) } ] response_body = { 'queues': queues, 'links': links } resp.body = utils.to_json(response_body)
def on_get(self, request, response, project_id, flavor): """Returns a JSON object for a single flavor entry: :: {"pool": "", capabilities: {...}} :returns: HTTP | [200, 404] """ LOG.debug(u'GET flavor - name: %s', flavor) data = None detailed = request.get_param_as_bool('detailed') or False try: data = self._ctrl.get(flavor, project=project_id, detailed=detailed) except errors.FlavorDoesNotExist as ex: LOG.debug(ex) raise falcon.HTTPNotFound() data['href'] = request.path response.body = transport_utils.to_json(data)
def on_get(self, req, resp, project_id, queue_name, claim_id): LOG.debug( u"Claim item GET - claim: %(claim_id)s, " u"queue: %(queue_name)s, project: %(project_id)s", {"queue_name": queue_name, "project_id": project_id, "claim_id": claim_id}, ) try: meta, msgs = self._claim_controller.get(queue_name, claim_id=claim_id, project=project_id) # Buffer claimed messages # TODO(kgriffs): Optimize along with serialization (see below) meta["messages"] = list(msgs) except storage_errors.DoesNotExist as ex: LOG.debug(ex) raise falcon.HTTPNotFound() except Exception as ex: LOG.exception(ex) description = _(u"Claim could not be queried.") raise wsgi_errors.HTTPServiceUnavailable(description) # Serialize claimed messages # TODO(kgriffs): Optimize meta["messages"] = [ wsgi_utils.format_message_v1(msg, req.path.rsplit("/", 2)[0], meta["id"]) for msg in meta["messages"] ] meta["href"] = req.path del meta["id"] resp.content_location = req.relative_uri resp.body = utils.to_json(meta)
def on_post(self, req, resp, project_id, queue_name): LOG.debug(u'Pre-Signed URL Creation for queue: %(queue)s, ' u'project: %(project)s', {'queue': queue_name, 'project': project_id}) try: document = wsgi_utils.deserialize(req.stream, req.content_length) except ValueError as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) diff = set(document.keys()) - _KNOWN_KEYS if diff: msg = six.text_type(_LE('Unknown keys: %s') % diff) raise wsgi_errors.HTTPBadRequestAPI(msg) key = self._conf.signed_url.secret_key paths = document.pop('paths', None) if not paths: paths = [os.path.join(req.path[:-6], 'messages')] else: diff = set(paths) - _VALID_PATHS if diff: msg = six.text_type(_LE('Invalid paths: %s') % diff) raise wsgi_errors.HTTPBadRequestAPI(msg) paths = [os.path.join(req.path[:-6], path) for path in paths] try: data = urls.create_signed_url(key, paths, project=project_id, **document) except ValueError as err: raise wsgi_errors.HTTPBadRequestAPI(str(err)) resp.body = utils.to_json(data)
def on_post(self, req, resp, project_id, queue_name): LOG.debug( u"Messages collection POST - queue: %(queue)s, " u"project: %(project)s", {"queue": queue_name, "project": project_id}, ) client_uuid = wsgi_helpers.get_client_uuid(req) try: # Place JSON size restriction before parsing self._validate.message_length(req.content_length) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) # Deserialize and validate the request body document = wsgi_utils.deserialize(req.stream, req.content_length) messages = wsgi_utils.sanitize(document, MESSAGE_POST_SPEC, doctype=wsgi_utils.JSONArray) try: self._validate.message_posting(messages) message_ids = self._message_controller.post( queue_name, messages=messages, project=project_id, client_uuid=client_uuid ) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except storage_errors.DoesNotExist as ex: LOG.debug(ex) raise falcon.HTTPNotFound() except storage_errors.MessageConflict as ex: LOG.exception(ex) description = _(u"No messages could be enqueued.") raise wsgi_errors.HTTPServiceUnavailable(description) except Exception as ex: LOG.exception(ex) description = _(u"Messages could not be enqueued.") raise wsgi_errors.HTTPServiceUnavailable(description) # Prepare the response ids_value = ",".join(message_ids) resp.location = req.path + "?ids=" + ids_value hrefs = [req.path + "/" + id for id in message_ids] # NOTE(kgriffs): As of the Icehouse release, drivers are # no longer allowed to enqueue a subset of the messages # submitted by the client; it's all or nothing. Therefore, # 'partial' is now always False in the v1.0 API, and the # field has been removed in v1.1. body = {"resources": hrefs, "partial": False} resp.body = utils.to_json(body) resp.status = falcon.HTTP_201
def on_post(self, req, resp, project_id, queue_name): client_uuid = wsgi_helpers.get_client_uuid(req) try: # Place JSON size restriction before parsing self._validate.message_length(req.content_length) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) # Deserialize and validate the incoming messages document = wsgi_utils.deserialize(req.stream, req.content_length) if 'messages' not in document: description = _(u'No messages were found in the request body.') raise wsgi_errors.HTTPBadRequestAPI(description) messages = wsgi_utils.sanitize(document['messages'], self._message_post_spec, doctype=wsgi_utils.JSONArray) try: self._validate.message_posting(messages) if not self._queue_controller.exists(queue_name, project_id): self._queue_controller.create(queue_name, project=project_id) message_ids = self._message_controller.post( queue_name, messages=messages, project=project_id, client_uuid=client_uuid) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except storage_errors.DoesNotExist as ex: LOG.debug(ex) raise falcon.HTTPNotFound() except storage_errors.MessageConflict as ex: LOG.exception(ex) description = _(u'No messages could be enqueued.') raise wsgi_errors.HTTPServiceUnavailable(description) except Exception as ex: LOG.exception(ex) description = _(u'Messages could not be enqueued.') raise wsgi_errors.HTTPServiceUnavailable(description) # Prepare the response ids_value = ','.join(message_ids) resp.location = req.path + '?ids=' + ids_value hrefs = [req.path + '/' + id for id in message_ids] body = {'resources': hrefs} resp.body = utils.to_json(body) resp.status = falcon.HTTP_201
def on_get(self, request, response, project_id): """Returns a flavor listing as objects embedded in an object: :: { "flavors": [ {"href": "", "capabilities": {}, "pool_group": ""}, ... ], "links": [ {"rel": "next", "href": ""}, ... ] } :returns: HTTP | 200 """ LOG.debug(u'LIST flavors for project_id %s' % project_id) store = {} request.get_param('marker', store=store) request.get_param_as_int('limit', store=store) detailed = request.get_param_as_bool('detailed') cursor = self._ctrl.list(project=project_id, **store) flavors = list(next(cursor)) results = {'links': []} if flavors: store['marker'] = next(cursor) for entry in flavors: entry['href'] = request.path + '/' + entry['name'] pool_group = entry['pool_group'] # NOTE(wanghao): remove this in Newton. entry['pool'] = entry['pool_group'] if detailed: caps = self._pools_ctrl.capabilities(group=pool_group) entry['capabilities'] = [str(cap).split('.')[-1] for cap in caps] if detailed is not None: store['detailed'] = detailed if flavors: results['links'] = [ { 'rel': 'next', 'href': request.path + falcon.to_query_str(store) } ] results['flavors'] = flavors response.body = transport_utils.to_json(results) response.status = falcon.HTTP_200
def on_get(self, req, resp, **kwargs): try: resp_dict = self._driver.health() resp.body = utils.to_json(resp_dict) except Exception as ex: LOG.exception(ex) description = _(u'Health status could not be read.') raise wsgi_errors.HTTPServiceUnavailable(description)
def on_get(self, request, response, project_id): """Returns a flavor listing as objects embedded in an object: :: { "flavors": [ {"href": "", "capabilities": {}, "pool": ""}, ... ], "links": [ {"rel": "next", "href": ""}, ... ] } :returns: HTTP | 200 """ LOG.debug(u'LIST flavors for project_id %s' % project_id) store = {} request.get_param('marker', store=store) request.get_param_as_int('limit', store=store) detailed = request.get_param_as_bool('detailed') cursor = self._ctrl.list(project=project_id, **store) flavors = list(next(cursor)) results = {'links': []} if flavors: store['marker'] = next(cursor) for entry in flavors: entry['href'] = request.path + '/' + entry['name'] if detailed: caps = self._pools_ctrl.capabilities(group=entry['pool']) entry['capabilities'] = [str(cap).split('.')[-1] for cap in caps] if detailed is not None: store['detailed'] = detailed if flavors: results['links'] = [ { 'rel': 'next', 'href': request.path + falcon.to_query_str(store) } ] results['flavors'] = flavors response.body = transport_utils.to_json(results) response.status = falcon.HTTP_200
def on_post(self, req, resp, project_id, queue_name): LOG.debug(u'Claims collection POST - queue: %(queue)s, ' u'project: %(project)s', {'queue': queue_name, 'project': project_id}) # Check for an explicit limit on the # of messages to claim limit = req.get_param_as_int('limit') claim_options = {} if limit is None else {'limit': limit} # NOTE(kgriffs): Clients may or may not actually include the # Content-Length header when the body is empty; the following # check works for both 0 and None. if not req.content_length: # No values given, so use defaults metadata = self._default_meta else: # Read claim metadata (e.g., TTL) and raise appropriate # HTTP errors as needed. document = wsgi_utils.deserialize(req.stream, req.content_length) metadata = wsgi_utils.sanitize(document, self._claim_post_spec) # Claim some messages try: self._validate.claim_creation(metadata, limit=limit) cid, msgs = self._claim_controller.create( queue_name, metadata=metadata, project=project_id, **claim_options) # Buffer claimed messages # TODO(kgriffs): optimize, along with serialization (below) resp_msgs = list(msgs) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Claim could not be created.') raise wsgi_errors.HTTPServiceUnavailable(description) # Serialize claimed messages, if any. This logic assumes # the storage driver returned well-formed messages. if len(resp_msgs) != 0: base_path = req.path.rpartition('/')[0] resp_msgs = [wsgi_utils.format_message_v1_1(msg, base_path, cid) for msg in resp_msgs] resp.location = req.path + '/' + cid resp.body = utils.to_json({'messages': resp_msgs}) resp.status = falcon.HTTP_201 else: resp.status = falcon.HTTP_204
def on_get(self, request, response, project_id): """Returns a pool listing as objects embedded in an object: :: { "pools": [ {"href": "", "weight": 100, "uri": ""}, ... ], "links": [ {"href": "", "rel": "next"} ] } :returns: HTTP | 200 """ LOG.debug(u'LIST pools') store = {} request.get_param('marker', store=store) request.get_param_as_int('limit', store=store) request.get_param_as_bool('detailed', store=store) try: self._validate.pool_listing(**store) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) cursor = self._ctrl.list(**store) pools = list(next(cursor)) results = {'links': []} if pools: store['marker'] = next(cursor) for entry in pools: entry['href'] = request.path + '/' + entry['name'] results['links'] = [ { 'rel': 'next', 'href': request.path + falcon.to_query_str(store) } ] results['pools'] = pools response.content_location = request.relative_uri response.body = transport_utils.to_json(results) response.status = falcon.HTTP_200
def on_post(self, req, resp, project_id, queue_name): # Check for an explicit limit on the # of messages to claim limit = req.get_param_as_int('limit') claim_options = {} if limit is None else {'limit': limit} # NOTE(kgriffs): Clients may or may not actually include the # Content-Length header when the body is empty; the following # check works for both 0 and None. if not req.content_length: # No values given, so use defaults metadata = self._default_meta else: # Read claim metadata (e.g., TTL) and raise appropriate # HTTP errors as needed. document = wsgi_utils.deserialize(req.stream, req.content_length) metadata = wsgi_utils.sanitize(document, self._claim_post_spec) # Claim some messages try: self._validate.claim_creation(metadata, limit=limit) cid, msgs = self._claim_controller.create(queue_name, metadata=metadata, project=project_id, **claim_options) # Buffer claimed messages # TODO(kgriffs): optimize, along with serialization (below) resp_msgs = list(msgs) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Claim could not be created.') raise wsgi_errors.HTTPServiceUnavailable(description) # Serialize claimed messages, if any. This logic assumes # the storage driver returned well-formed messages. if len(resp_msgs) != 0: base_path = req.path.rpartition('/')[0] resp_msgs = [ wsgi_utils.format_message_v1_1(msg, base_path, cid) for msg in resp_msgs ] resp.location = req.path + '/' + cid resp.body = utils.to_json({'messages': resp_msgs}) resp.status = falcon.HTTP_201 else: resp.status = falcon.HTTP_204
def on_get(self, req, resp, project_id, queue_name): try: resp_dict = self._queue_ctrl.stats(queue_name, project=project_id) message_stats = resp_dict['messages'] if message_stats['total'] != 0: base_path = req.path[:req.path.rindex('/')] + '/messages/' newest = message_stats['newest'] newest['href'] = base_path + newest['id'] del newest['id'] oldest = message_stats['oldest'] oldest['href'] = base_path + oldest['id'] del oldest['id'] resp.body = utils.to_json(resp_dict) # status defaults to 200 except (storage_errors.QueueDoesNotExist, storage_errors.QueueIsEmpty): resp_dict = { 'messages': { 'claimed': 0, 'free': 0, 'total': 0 } } resp.body = utils.to_json(resp_dict) except storage_errors.DoesNotExist as ex: LOG.debug(ex) raise wsgi_errors.HTTPNotFound(six.text_type(ex)) except Exception: description = _(u'Queue stats could not be read.') LOG.exception(description) raise wsgi_errors.HTTPServiceUnavailable(description)
def on_get(self, req, resp, project_id, topic_name): try: resp_dict = self._topic_ctrl.stats(topic_name, project=project_id) message_stats = resp_dict['messages'] if message_stats['total'] != 0: base_path = req.path[:req.path.rindex('/')] + '/messages/' newest = message_stats['newest'] newest['href'] = base_path + newest['id'] del newest['id'] oldest = message_stats['oldest'] oldest['href'] = base_path + oldest['id'] del oldest['id'] resp.body = utils.to_json(resp_dict) # status defaults to 200 except (storage_errors.TopicDoesNotExist, storage_errors.TopicIsEmpty) as ex: resp_dict = { 'messages': { 'claimed': 0, 'free': 0, 'total': 0 } } resp.body = utils.to_json(resp_dict) except storage_errors.DoesNotExist as ex: LOG.debug(ex) raise wsgi_errors.HTTPNotFound(six.text_type(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Topic stats could not be read.') raise wsgi_errors.HTTPServiceUnavailable(description)
def on_post(self, req, resp, project_id, queue_name): LOG.debug( u'Claims collection POST - queue: %(queue)s, ' u'project: %(project)s', { 'queue': queue_name, 'project': project_id }) # Check for an explicit limit on the # of messages to claim limit = req.get_param_as_int('limit') claim_options = {} if limit is None else {'limit': limit} # Read claim metadata (e.g., TTL) and raise appropriate # HTTP errors as needed. document = wsgi_utils.deserialize(req.stream, req.content_length) metadata = wsgi_utils.sanitize(document, CLAIM_POST_SPEC) # Claim some messages try: self._validate.claim_creation(metadata, limit=limit) cid, msgs = self._claim_controller.create(queue_name, metadata=metadata, project=project_id, **claim_options) # Buffer claimed messages # TODO(kgriffs): optimize, along with serialization (below) resp_msgs = list(msgs) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Claim could not be created.') raise wsgi_errors.HTTPServiceUnavailable(description) # Serialize claimed messages, if any. This logic assumes # the storage driver returned well-formed messages. if len(resp_msgs) != 0: resp_msgs = [ wsgi_utils.format_message_v1(msg, req.path.rpartition('/')[0], cid) for msg in resp_msgs ] resp.location = req.path + '/' + cid resp.body = utils.to_json(resp_msgs) resp.status = falcon.HTTP_201 else: resp.status = falcon.HTTP_204
def on_get(self, request, response, project_id): """Returns a flavor listing as objects embedded in an object: :: { "flavors": [ {"href": "", "capabilities": {}, "pool": ""}, ... ], "links": [ {"rel": "next", "href": ""}, ... ] } :returns: HTTP | 200 """ LOG.debug(u'LIST flavors for project_id %s' % project_id) store = {} request.get_param('marker', store=store) request.get_param_as_int('limit', store=store) request.get_param_as_bool('detailed', store=store) cursor = self._ctrl.list(project=project_id, **store) flavors = list(next(cursor)) results = {'links': []} if flavors: store['marker'] = next(cursor) for entry in flavors: entry['href'] = request.path + '/' + entry['name'] # NOTE(wanghao): remove this in Newton. entry['pool'] = entry['pool_group'] results['links'] = [{ 'rel': 'next', 'href': request.path + falcon.to_query_str(store) }] results['flavors'] = flavors response.body = transport_utils.to_json(results) response.status = falcon.HTTP_200
def on_get(self, req, resp, project_id): kwargs = {} # NOTE(kgriffs): This syntax ensures that # we don't clobber default values with None. req.get_param('marker', store=kwargs) req.get_param_as_int('limit', store=kwargs) req.get_param_as_bool('detailed', store=kwargs) try: self._validate.queue_listing(**kwargs) results = self._queue_controller.list(project=project_id, **kwargs) # Buffer list of queues queues = list(next(results)) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Queues could not be listed.') raise wsgi_errors.HTTPServiceUnavailable(description) # Got some. Prepare the response. kwargs['marker'] = next(results) or kwargs.get('marker', '') reserved_metadata = _get_reserved_metadata(self._validate).items() for each_queue in queues: each_queue['href'] = req.path + '/' + each_queue['name'] if kwargs.get('detailed'): for meta, value in reserved_metadata: if not each_queue.get('metadata', {}).get(meta): each_queue['metadata'][meta] = value links = [] if queues: links = [ { 'rel': 'next', 'href': req.path + falcon.to_query_str(kwargs) } ] response_body = { 'queues': queues, 'links': links } resp.body = utils.to_json(response_body)
def on_get(self, request, response, project_id): """Returns a flavor listing as objects embedded in an object: :: { "flavors": [ {"href": "", "capabilities": {}, "pool": ""}, ... ], "links": [ {"rel": "next", "href": ""}, ... ] } :returns: HTTP | 200 """ LOG.debug(u"LIST flavors for project_id %s" % project_id) store = {} request.get_param("marker", store=store) request.get_param_as_int("limit", store=store) detailed = request.get_param_as_bool("detailed") cursor = self._ctrl.list(project=project_id, **store) flavors = list(next(cursor)) results = {"links": []} if flavors: store["marker"] = next(cursor) for entry in flavors: entry["href"] = request.path + "/" + entry["name"] if detailed: caps = self._pools_ctrl.capabilities(group=entry["pool"]) entry["capabilities"] = [str(cap).split(".")[-1] for cap in caps] if detailed is not None: store["detailed"] = detailed if flavors: results["links"] = [{"rel": "next", "href": request.path + falcon.to_query_str(store)}] results["flavors"] = flavors response.body = transport_utils.to_json(results) response.status = falcon.HTTP_200
def queue_get_stats(self, req): """Gets queue stats :param req: Request instance ready to be sent. :type req: `api.common.Request` :return: resp: Response instance :type: resp: `api.common.Response` """ project_id = req._headers.get('X-Project-ID') queue_name = req._body.get('queue_name') try: resp_dict = self._queue_controller.stats(queue_name, project=project_id) body = utils.to_json(resp_dict) except storage_errors.QueueDoesNotExist as ex: LOG.exception(ex) resp_dict = { 'messages': { 'claimed': 0, 'free': 0, 'total': 0 } } body = utils.to_json(resp_dict) headers = {'status': 404} resp = response.Response(req, body, headers) return resp except storage_errors.BaseException as ex: LOG.exception(ex) error = _('Cannot retrieve queue "%s" stats.') % queue_name headers = {'status': 503} return api_utils.error_response(req, ex, headers, error) else: headers = {'status': 200} resp = response.Response(req, body, headers) return resp
def on_get(self, req, resp, project_id): LOG.debug(u'Queue collection GET') kwargs = {} # NOTE(kgriffs): This syntax ensures that # we don't clobber default values with None. req.get_param('marker', store=kwargs) req.get_param_as_int('limit', store=kwargs) req.get_param_as_bool('detailed', store=kwargs) try: self._validate.queue_listing(**kwargs) results = self._queue_controller.list(project=project_id, **kwargs) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Queues could not be listed.') raise wsgi_errors.HTTPServiceUnavailable(description) # Buffer list of queues queues = list(next(results)) # Check for an empty list if len(queues) == 0: resp.status = falcon.HTTP_204 return # Got some. Prepare the response. kwargs['marker'] = next(results) for each_queue in queues: each_queue['href'] = req.path + '/' + each_queue['name'] response_body = { 'queues': queues, 'links': [ { 'rel': 'next', 'href': req.path + falcon.to_query_str(kwargs) } ] } resp.content_location = req.relative_uri resp.body = utils.to_json(response_body)
def on_get(self, req, resp, project_id, queue_name): resp.content_location = req.relative_uri ids = req.get_param_as_list('ids') if ids is None: response = self._get(req, project_id, queue_name) else: response = self._get_by_id(req.path.rsplit('/', 1)[0], project_id, queue_name, ids) if response is None: resp.status = falcon.HTTP_204 return resp.body = utils.to_json(response)
def on_get(self, req, resp, project_id, queue_name): try: resp_dict = self._queue_controller.get(queue_name, project=project_id) except storage_errors.DoesNotExist as ex: LOG.debug(ex) raise falcon.HTTPNotFound() except Exception as ex: LOG.exception(ex) description = _(u'Queue metadata could not be retrieved.') raise wsgi_errors.HTTPServiceUnavailable(description) resp.body = utils.to_json(resp_dict)
def queue_list(self, req): """Gets a list of queues :param req: Request instance ready to be sent. :type req: `api.common.Request` :return: resp: Response instance :type: resp: `api.common.Response` """ project_id = req._headers.get('X-Project-ID') LOG.debug(u'Queue list - project: %(project)s', {'project': project_id}) kwargs = {} if req._body.get('marker') is not None: kwargs['marker'] = req._body.get('marker') if req._body.get('limit') is not None: kwargs['limit'] = req._body.get('limit') if req._body.get('detailed') is not None: kwargs['detailed'] = req._body.get('detailed') try: self._validate.queue_listing(**kwargs) results = self._queue_controller.list( project=project_id, **kwargs) except validation.ValidationFailed as ex: LOG.debug(ex) headers = {'status': 400} return api_utils.error_response(req, ex, headers) except storage_errors.BaseException as ex: LOG.exception(ex) error = 'Queues could not be listed.' headers = {'status': 503} return api_utils.error_response(req, ex, error, headers) # Buffer list of queues queues = list(next(results)) # Got some. Prepare the response. body = utils.to_json({'queues': queues}) headers = {'status': 200} resp = response.Response(req, body, headers) return resp
def on_get(self, request, response, project_id): """Returns a pool listing as objects embedded in an object: :: { "pools": [ {"href": "", "weight": 100, "uri": ""}, ... ], "links": [ {"href": "", "rel": "next"} ] } :returns: HTTP | 200 """ LOG.debug(u'LIST pools') store = {} request.get_param('marker', store=store) request.get_param_as_int('limit', store=store) request.get_param_as_bool('detailed', store=store) cursor = self._ctrl.list(**store) pools = list(next(cursor)) results = {'links': []} if pools: store['marker'] = next(cursor) for entry in pools: entry['href'] = request.path + '/' + entry['name'] results['links'] = [ { 'rel': 'next', 'href': request.path + falcon.to_query_str(store) } ] results['pools'] = pools response.content_location = request.relative_uri response.body = transport_utils.to_json(results) response.status = falcon.HTTP_200
def on_get(self, req, resp, project_id): LOG.debug(u'Queue collection GET') kwargs = {} # NOTE(kgriffs): This syntax ensures that # we don't clobber default values with None. req.get_param('marker', store=kwargs) req.get_param_as_int('limit', store=kwargs) req.get_param_as_bool('detailed', store=kwargs) try: self._validate.queue_listing(**kwargs) results = self._queue_controller.list(project=project_id, **kwargs) # Buffer list of queues queues = list(next(results)) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except Exception: description = _(u'Queues could not be listed.') LOG.exception(description) raise wsgi_errors.HTTPServiceUnavailable(description) # Check for an empty list if len(queues) == 0: resp.status = falcon.HTTP_204 return # Got some. Prepare the response. kwargs['marker'] = next(results) for each_queue in queues: each_queue['href'] = req.path + '/' + each_queue['name'] response_body = { 'queues': queues, 'links': [{ 'rel': 'next', 'href': req.path + falcon.to_query_str(kwargs) }] } resp.content_location = req.relative_uri resp.body = utils.to_json(response_body)
def on_get(self, request, response, project_id): """Returns a flavor listing as objects embedded in an object: :: { "flavors": [ {"href": "", "capabilities": {}, "pool": ""}, ... ], "links": [ {"rel": "next", "href": ""}, ... ] } :returns: HTTP | 200 """ LOG.debug(u'LIST flavors for project_id %s' % project_id) store = {} request.get_param('marker', store=store) request.get_param_as_int('limit', store=store) request.get_param_as_bool('detailed', store=store) cursor = self._ctrl.list(project=project_id, **store) flavors = list(next(cursor)) results = {'links': []} if flavors: store['marker'] = next(cursor) for entry in flavors: entry['href'] = request.path + '/' + entry['name'] results['links'] = [ { 'rel': 'next', 'href': request.path + falcon.to_query_str(store) } ] results['flavors'] = flavors response.body = transport_utils.to_json(results) response.status = falcon.HTTP_200
def on_get(self, req, resp, project_id, queue_name): try: resp_dict = self._queue_ctrl.get_metadata(queue_name, project=project_id) except storage_errors.DoesNotExist as ex: LOG.debug(ex) raise wsgi_errors.HTTPNotFound(six.text_type(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Queue metadata could not be retrieved.') raise wsgi_errors.HTTPServiceUnavailable(description) resp.content_location = req.path resp.body = utils.to_json(resp_dict)
def on_get(self, req, resp, project_id, queue_name, subscription_id): try: resp_dict = self._subscription_controller.get(queue_name, subscription_id, project=project_id) except storage_errors.DoesNotExist as ex: LOG.debug(ex) raise wsgi_errors.HTTPNotFound(six.text_type(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Subscription could not be retrieved.') raise wsgi_errors.HTTPServiceUnavailable(description) resp.body = utils.to_json(resp_dict)
def on_get(self, req, resp, project_id, queue_name): try: resp_dict = self._queue_ctrl.get_metadata(queue_name, project=project_id) except storage_errors.DoesNotExist as ex: LOG.debug(ex) raise wsgi_errors.HTTPNotFound(six.text_type(ex)) except Exception: description = _(u'Queue metadata could not be retrieved.') LOG.exception(description) raise wsgi_errors.HTTPServiceUnavailable(description) resp.content_location = req.path resp.body = utils.to_json(resp_dict)
def on_patch(self, request, response, project_id, pool): """Allows one to update a pool's weight, uri, and/or options. This method expects the user to submit a JSON object containing at least one of: 'uri', 'weight', 'flavor', 'options'.If none are found, the request is flagged as bad. There is also strict format checking through the use of jsonschema. Appropriate errors are returned in each case for badly formatted input. :returns: HTTP | 200,400 """ LOG.debug(u'PATCH pool - name: %s', pool) data = wsgi_utils.load(request) EXPECT = ('weight', 'uri', 'flavor', 'options') if not any([(field in data) for field in EXPECT]): LOG.debug(u'PATCH pool, bad params') raise wsgi_errors.HTTPBadRequestBody( 'One of `uri`, `weight`, `flavor`,' ' or `options` needs ' 'to be specified' ) for field in EXPECT: wsgi_utils.validate(self._validators[field], data) conf = self._ctrl.driver.conf if 'uri' in data and not storage_utils.can_connect(data['uri'], conf=conf): raise wsgi_errors.HTTPBadRequestBody( 'cannot connect to %s' % data['uri'] ) fields = common_utils.fields(data, EXPECT, pred=lambda v: v is not None) resp_data = None try: self._ctrl.update(pool, **fields) resp_data = self._ctrl.get(pool, False) except errors.PoolDoesNotExist as ex: LOG.exception('Pool "%s" does not exist', pool) raise wsgi_errors.HTTPNotFound(six.text_type(ex)) resp_data['href'] = request.path response.body = transport_utils.to_json(resp_data)
def on_get(self, req, resp, project_id, queue_name): try: resp_dict = self._queue_controller.get(queue_name, project=project_id) for meta, value in _get_reserved_metadata(self._validate).items(): if not resp_dict.get(meta): resp_dict[meta] = value except storage_errors.DoesNotExist as ex: LOG.debug(ex) raise wsgi_errors.HTTPNotFound(six.text_type(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Queue metadata could not be retrieved.') raise wsgi_errors.HTTPServiceUnavailable(description) resp.body = utils.to_json(resp_dict)
def on_get(self, req, resp, project_id, topic_name): try: resp_dict = self._topic_controller.get(topic_name, project=project_id) for meta, value in _get_reserved_metadata(self._validate).items(): if not resp_dict.get(meta): resp_dict[meta] = value except storage_errors.DoesNotExist as ex: LOG.debug(ex) raise wsgi_errors.HTTPNotFound(six.text_type(ex)) except Exception: description = _(u'Topic metadata could not be retrieved.') LOG.exception(description) raise wsgi_errors.HTTPServiceUnavailable(description) resp.body = utils.to_json(resp_dict)
def on_patch(self, request, response, project_id, flavor): """Allows one to update a flavors's pool_group. This method expects the user to submit a JSON object containing 'pool_group'. If none is found, the request is flagged as bad. There is also strict format checking through the use of jsonschema. Appropriate errors are returned in each case for badly formatted input. :returns: HTTP | [200, 400] """ LOG.debug(u'PATCH flavor - name: %s', flavor) data = wsgi_utils.load(request) EXPECT = ('pool_group', 'pool') if not any([(field in data) for field in EXPECT]): LOG.debug(u'PATCH flavor, bad params') raise wsgi_errors.HTTPBadRequestBody( '`pool_group` or `pool` needs to be specified') for field in EXPECT: wsgi_utils.validate(self._validators[field], data) fields = common_utils.fields(data, EXPECT, pred=lambda v: v is not None) # NOTE(wanghao): remove this in Newton. if fields.get('pool') and fields.get('pool_group') is None: fields['pool_group'] = fields.get('pool') fields.pop('pool') resp_data = None try: self._ctrl.update(flavor, project=project_id, **fields) resp_data = self._ctrl.get(flavor, project=project_id) capabilities = self._pools_ctrl.capabilities( group=resp_data['pool_group']) resp_data['capabilities'] = [ str(cap).split('.')[-1] for cap in capabilities ] except errors.FlavorDoesNotExist as ex: LOG.exception(ex) raise wsgi_errors.HTTPNotFound(six.text_type(ex)) resp_data['href'] = request.path response.body = transport_utils.to_json(resp_data)
def on_patch(self, request, response, project_id, pool): """Allows one to update a pool's weight, uri, and/or options. This method expects the user to submit a JSON object containing at least one of: 'uri', 'weight', 'group', 'options'. If none are found, the request is flagged as bad. There is also strict format checking through the use of jsonschema. Appropriate errors are returned in each case for badly formatted input. :returns: HTTP | 200,400 """ LOG.debug(u'PATCH pool - name: %s', pool) data = wsgi_utils.load(request) EXPECT = ('weight', 'uri', 'group', 'options') if not any([(field in data) for field in EXPECT]): LOG.debug(u'PATCH pool, bad params') raise wsgi_errors.HTTPBadRequestBody( 'One of `uri`, `weight`, `group`, or `options` needs ' 'to be specified' ) for field in EXPECT: wsgi_utils.validate(self._validators[field], data) conf = self._ctrl.driver.conf if 'uri' in data and not storage_utils.can_connect(data['uri'], conf=conf): raise wsgi_errors.HTTPBadRequestBody( 'cannot connect to %s' % data['uri'] ) fields = common_utils.fields(data, EXPECT, pred=lambda v: v is not None) resp_data = None try: self._ctrl.update(pool, **fields) resp_data = self._ctrl.get(pool, False) except errors.PoolDoesNotExist as ex: LOG.exception(ex) raise wsgi_errors.HTTPNotFound(six.text_type(ex)) resp_data['href'] = request.path response.body = transport_utils.to_json(resp_data)
def on_get(self, req, resp, project_id, queue_name): LOG.debug( u'Subscription collection GET - project: %(project)s, ' u'queue: %(queue)s', { 'project': project_id, 'queue': queue_name }) kwargs = {} # NOTE(kgriffs): This syntax ensures that # we don't clobber default values with None. req.get_param('marker', store=kwargs) req.get_param_as_int('limit', store=kwargs) try: self._validate.subscription_listing(**kwargs) results = self._subscription_controller.list(queue_name, project=project_id, **kwargs) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Subscriptions could not be listed.') raise wsgi_errors.HTTPServiceUnavailable(description) # Buffer list of subscriptions subscriptions = list(next(results)) # Got some. Prepare the response. kwargs['marker'] = next(results) or kwargs.get('marker', '') response_body = { 'subscriptions': subscriptions, 'links': [{ 'rel': 'next', 'href': req.path + falcon.to_query_str(kwargs) }] } resp.body = utils.to_json(response_body)
def on_get(self, req, resp, project_id, queue_name): ids = req.get_param_as_list('ids') if ids is None: response = self._get(req, project_id, queue_name) else: response = self._get_by_id( req.path.rsplit('/', 1)[0], project_id, queue_name, ids) if response is None: # NOTE(TheSriram): Trying to get a message by id, should # return the message if its present, otherwise a 404 since # the message might have been deleted. resp.status = falcon.HTTP_404 else: resp.body = utils.to_json(response)
def on_get(self, req, resp, project_id, queue_name, message_id): try: message = self._message_controller.get(queue_name, message_id, project=project_id) except storage_errors.DoesNotExist as ex: LOG.debug(ex) raise wsgi_errors.HTTPNotFound(six.text_type(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Message could not be retrieved.') raise wsgi_errors.HTTPServiceUnavailable(description) resp.content_location = req.relative_uri message = wsgi_utils.format_message_v1(message, req.path.rsplit('/', 2)[0]) resp.body = utils.to_json(message)
def _on_patch_by_group(self, request, response, project_id, flavor, pool_group): LOG.debug(u'PATCH flavor - name: %s by group', flavor) resp_data = None try: flvor_obj = {} flvor_obj['pool_group'] = pool_group capabilities = self._pools_ctrl.capabilities(flavor=flvor_obj) self._ctrl.update(flavor, project=project_id, pool_group=pool_group, capabilities=capabilities) resp_data = self._ctrl.get(flavor, project=project_id) resp_data['capabilities'] = [ str(cap).split('.')[-1] for cap in capabilities ] except errors.FlavorDoesNotExist as ex: LOG.exception(ex) raise wsgi_errors.HTTPNotFound(six.text_type(ex)) resp_data['href'] = request.path response.body = transport_utils.to_json(resp_data)
def on_post(self, req, resp, project_id, queue_name): LOG.debug( u'Subscription item POST - project: %(project)s, ' u'queue: %(queue)s', { 'project': project_id, 'queue': queue_name }) if req.content_length: document = wsgi_utils.deserialize(req.stream, req.content_length) else: document = {} try: self._validate.subscription_posting(document) subscriber = document['subscriber'] ttl = int(document['ttl']) options = document['options'] created = self._subscription_controller.create(queue_name, subscriber, ttl, options, project=project_id) except storage_errors.QueueDoesNotExist as ex: LOG.exception(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except validation.ValidationFailed as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) except Exception as ex: LOG.exception(ex) description = _(u'Subscription could not be created.') raise wsgi_errors.HTTPServiceUnavailable(description) resp.status = falcon.HTTP_201 if created else falcon.HTTP_409 resp.location = req.path if created: resp.body = utils.to_json( {'subscription_id': six.text_type(created)})
def on_post(self, req, resp, project_id, queue_name): LOG.debug( u'Pre-Signed URL Creation for queue: %(queue)s, ' u'project: %(project)s', { 'queue': queue_name, 'project': project_id }) try: document = wsgi_utils.deserialize(req.stream, req.content_length) except ValueError as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) diff = set(document.keys()) - _KNOWN_KEYS if diff: msg = six.text_type(_LE('Unknown keys: %s') % diff) raise wsgi_errors.HTTPBadRequestAPI(msg) key = self._conf.signed_url.secret_key paths = document.pop('paths', None) if not paths: paths = [os.path.join(req.path[:-6], 'messages')] else: diff = set(paths) - _VALID_PATHS if diff: msg = six.text_type(_LE('Invalid paths: %s') % diff) raise wsgi_errors.HTTPBadRequestAPI(msg) paths = [os.path.join(req.path[:-6], path) for path in paths] try: data = urls.create_signed_url(key, paths, project=project_id, **document) except ValueError as err: raise wsgi_errors.HTTPBadRequestAPI(str(err)) resp.body = utils.to_json(data)
def on_get(self, req, resp, project_id, queue_name): ids = req.get_param_as_list('ids') if ids is None: response = self._get(req, project_id, queue_name) else: response = self._get_by_id(req.path.rsplit('/', 1)[0], project_id, queue_name, ids) if response is None: # NOTE(TheSriram): Trying to get a message by id, should # return the message if its present, otherwise a 404 since # the message might have been deleted. msg = _(u'No messages with IDs: {ids} found in the queue {queue} ' u'for project {project}.') description = msg.format(queue=queue_name, project=project_id, ids=ids) raise wsgi_errors.HTTPNotFound(description) else: resp.body = utils.to_json(response)
def on_get(self, req, resp, project_id, queue_name, claim_id): LOG.debug( u'Claim item GET - claim: %(claim_id)s, ' u'queue: %(queue_name)s, project: %(project_id)s', { 'queue_name': queue_name, 'project_id': project_id, 'claim_id': claim_id }) try: meta, msgs = self._claim_controller.get(queue_name, claim_id=claim_id, project=project_id) # Buffer claimed messages # TODO(kgriffs): Optimize along with serialization (see below) meta['messages'] = list(msgs) except storage_errors.DoesNotExist as ex: LOG.debug(ex) raise falcon.HTTPNotFound() except Exception as ex: LOG.exception(ex) description = _(u'Claim could not be queried.') raise wsgi_errors.HTTPServiceUnavailable(description) # Serialize claimed messages # TODO(kgriffs): Optimize meta['messages'] = [ wsgi_utils.format_message_v1(msg, req.path.rsplit('/', 2)[0], meta['id']) for msg in meta['messages'] ] meta['href'] = req.path del meta['id'] resp.content_location = req.relative_uri resp.body = utils.to_json(meta)
def on_get(self, req, resp, project_id, queue_name): LOG.debug( u'Messages collection GET - queue: %(queue)s, ' u'project: %(project)s', { 'queue': queue_name, 'project': project_id }) resp.content_location = req.relative_uri ids = req.get_param_as_list('ids') if ids is None: response = self._get(req, project_id, queue_name) else: response = self._get_by_id( req.path.rsplit('/', 1)[0], project_id, queue_name, ids) if response is None: resp.status = falcon.HTTP_204 return resp.body = utils.to_json(response)
def on_get(self, req, resp, project_id, queue_name, message_id): try: message = self._message_controller.get(queue_name, message_id, project=project_id) except storage_errors.DoesNotExist as ex: LOG.debug(ex) raise falcon.HTTPNotFound() except Exception as ex: LOG.exception(ex) description = _(u'Message could not be retrieved.') raise wsgi_errors.HTTPServiceUnavailable(description) # Prepare response message['href'] = req.path message = wsgi_utils.format_message_v1_1(message, req.path.rsplit('/', 2)[0], message['claim_id']) resp.body = utils.to_json(message)
def on_get(self, req, resp, project_id, queue_name, subscription_id): LOG.debug( u'Subscription GET - subscription id: %(subscription_id)s,' u' project: %(project)s, queue: %(queue)s', { 'subscription_id': subscription_id, 'project': project_id, 'queue': queue_name }) try: resp_dict = self._subscription_controller.get(queue_name, subscription_id, project=project_id) except storage_errors.DoesNotExist as ex: LOG.debug(ex) raise falcon.HTTPNotFound() except Exception as ex: LOG.exception(ex) description = _(u'Subscription could not be retrieved.') raise wsgi_errors.HTTPServiceUnavailable(description) resp.body = utils.to_json(resp_dict)
def _pop_messages(self, queue_name, project_id, pop_limit): try: LOG.debug(u'POP messages - queue: %(queue)s, ' u'project: %(project)s', {'queue': queue_name, 'project': project_id}) messages = self._message_controller.pop( queue_name, project=project_id, limit=pop_limit) except Exception: description = _(u'Messages could not be popped.') LOG.exception(description) raise wsgi_errors.HTTPServiceUnavailable(description) # Prepare response if not messages: messages = [] body = {'messages': messages} body = utils.to_json(body) return falcon.HTTP_200, body
def on_get(self, req, resp, project_id, queue_name): LOG.debug( u'Queue metadata GET - queue: %(queue)s, ' u'project: %(project)s', { 'queue': queue_name, 'project': project_id }) try: resp_dict = self._queue_ctrl.get_metadata(queue_name, project=project_id) except storage_errors.DoesNotExist as ex: LOG.debug(ex) raise falcon.HTTPNotFound() except Exception as ex: LOG.exception(ex) description = _(u'Queue metadata could not be retrieved.') raise wsgi_errors.HTTPServiceUnavailable(description) resp.content_location = req.path resp.body = utils.to_json(resp_dict)