def step(self): """ Run another step of the task, and return True if the task is complete; False otherwise. """ if not self.done(): assert self._runner is not None, "Task not started" if self._timeout is not None and self._timeout.expired(): logger.info(_('%s timed out') % str(self)) try: self._runner.throw(self._timeout) except StopIteration: self._done = True else: # Clean up in case task swallows exception without exiting self.cancel() else: logger.debug(_('%s running') % str(self)) try: next(self._runner) except StopIteration: self._done = True logger.debug(_('%s complete') % str(self)) return self._done
def __call__(self, message_data): """Consumer callback to call a method on a proxy object. Parses the message for validity and fires off a thread to call the proxy object method. Message data should be a dictionary with two keys: method: string representing the method to call args: dictionary of arg: value Example: {'method': 'echo', 'args': {'value': 42}} """ # It is important to clear the context here, because at this point # the previous context is stored in local.store.context if hasattr(local.store, 'context'): del local.store.context rpc_common._safe_log(LOG.debug, 'received %s', message_data) self.msg_id_cache.check_duplicate_message(message_data) ctxt = unpack_context(self.conf, message_data) method = message_data.get('method') args = message_data.get('args', {}) version = message_data.get('version') namespace = message_data.get('namespace') if not method: LOG.warn(_('no method for message: %s') % message_data) ctxt.reply(_('No method for message: %s') % message_data, connection_pool=self.connection_pool) return self.pool.spawn_n(self._process_data, ctxt, version, method, namespace, args)
def __call__(self, message_data): """Consumer callback to call a method on a proxy object. Parses the message for validity and fires off a thread to call the proxy object method. Message data should be a dictionary with two keys: method: string representing the method to call args: dictionary of arg: value Example: {'method': 'echo', 'args': {'value': 42}} """ # It is important to clear the context here, because at this point # the previous context is stored in local.store.context if hasattr(local.store, 'context'): del local.store.context rpc_common._safe_log(LOG.debug, 'received %s', message_data) self.msg_id_cache.check_duplicate_message(message_data) ctxt = unpack_context(self.conf, message_data) method = message_data.get('method') args = message_data.get('args', {}) version = message_data.get('version') namespace = message_data.get('namespace') if not method: LOG.warn(_('no method for message: %s') % message_data) ctxt.reply(_('No method for message: %s') % message_data, connection_pool=self.connection_pool) return self.pool.spawn_n(self._process_data, ctxt, version, method, namespace, args)
def start(self): verstr = version.version_string() LOG.info(_('Starting %(topic)s node (version %(version)s)'), {'topic': self.topic, 'version': verstr}) self.basic_config_check() self.manager.init_host() self.model_disconnected = False LOG.debug(_("Creating RPC server for service %s") % self.topic) target = messaging.Target(topic=self.topic, server=self.host) endpoints = [ self.manager, baserpc.BaseRPCAPI(self.manager.service_name) ] endpoints.extend(self.manager.additional_endpoints) serializer = objects_base.HealingObjectSerializer() self.rpcserver = rpc.get_server(target, endpoints, serializer) self.rpcserver.start() if self.periodic_enable: if self.periodic_fuzzy_delay: initial_delay = random.randint(0, self.periodic_fuzzy_delay) else: initial_delay = None self.tg.add_dynamic_timer(self.periodic_tasks, initial_delay=initial_delay, periodic_interval_max= self.periodic_interval_max)
def _multi_send(method, context, topic, msg, timeout=None, envelope=False, _msg_id=None): """Wraps the sending of messages. Dispatches to the matchmaker and sends message to all relevant hosts. """ conf = CONF LOG.debug("%(msg)s" % {'msg': ' '.join(map(pformat, (topic, msg)))}) queues = _get_matchmaker().queues(topic) LOG.debug("Sending message(s) to: %s", queues) # Don't stack if we have no matchmaker results if not queues: LOG.warn(_("No matchmaker results. Not casting.")) # While not strictly a timeout, callers know how to handle # this exception and a timeout isn't too big a lie. raise rpc_common.Timeout(_("No match from matchmaker.")) # This supports brokerless fanout (addresses > 1) for queue in queues: (_topic, ip_addr) = queue _addr = "tcp://%s:%s" % (ip_addr, conf.rpc_zmq_port) if method.__name__ == '_cast': eventlet.spawn_n(method, _addr, context, _topic, msg, timeout, envelope, _msg_id) return return method(_addr, context, _topic, msg, timeout, envelope)
def _process_data(self, message_data): msg_id = message_data.pop('_msg_id', None) waiter = self._call_waiters.get(msg_id) if not waiter: LOG.warn(_('No calling threads waiting for msg_id : %(msg_id)s' ', message : %(data)s'), {'msg_id': msg_id, 'data': message_data}) LOG.warn(_('_call_waiters: %s') % self._call_waiters) else: waiter.put(message_data)
def _process_data(self, message_data): msg_id = message_data.pop('_msg_id', None) waiter = self._call_waiters.get(msg_id) if not waiter: LOG.warn( _('No calling threads waiting for msg_id : %(msg_id)s' ', message : %(data)s'), { 'msg_id': msg_id, 'data': message_data }) LOG.warn(_('_call_waiters: %s') % self._call_waiters) else: waiter.put(message_data)
def __init__(self, info=None, topic=None, method=None): """Initiates Timeout object. :param info: Extra info to convey to the user :param topic: The topic that the rpc call was sent to :param rpc_method_name: The name of the rpc method being called """ self.info = info self.topic = topic self.method = method super(Timeout, self).__init__(None, info=info or _('<unknown>'), topic=topic or _('<unknown>'), method=method or _('<unknown>'))
def _get_not_supported_column(col_name_col_instance, column_name): try: column = col_name_col_instance[column_name] except KeyError: msg = _("Please specify column %s in col_name_col_instance " "param. It is required because column has unsupported " "type by sqlite).") raise ColumnError(msg % column_name) if not isinstance(column, Column): msg = _("col_name_col_instance param has wrong type of " "column instance for column %s It should be instance " "of sqlalchemy.Column.") raise ColumnError(msg % column_name) return column
def _db_schema_sanity_check(engine): """Ensure all database tables were created with required parameters. :param engine: SQLAlchemy engine instance for a given database """ if engine.name == 'mysql': onlyutf8_sql = ('SELECT TABLE_NAME,TABLE_COLLATION ' 'from information_schema.TABLES ' 'where TABLE_SCHEMA=%s and ' 'TABLE_COLLATION NOT LIKE "%%utf8%%"') # NOTE(morganfainberg): exclude the sqlalchemy-migrate and alembic # versioning tables from the tables we need to verify utf8 status on. # Non-standard table names are not supported. EXCLUDED_TABLES = ['migrate_version', 'alembic_version'] table_names = [res[0] for res in engine.execute(onlyutf8_sql, engine.url.database) if res[0].lower() not in EXCLUDED_TABLES] if len(table_names) > 0: raise ValueError(_('Tables "%s" have non utf8 collation, ' 'please make sure all tables are CHARSET=utf8' ) % ','.join(table_names))
class BadRequest(HTTPClientError): """HTTP 400 - Bad Request. The request cannot be fulfilled due to bad syntax. """ http_status = 400 message = _("Bad Request")
class HttpVersionNotSupported(HttpServerError): """HTTP 505 - HttpVersion Not Supported. The server does not support the HTTP protocol version used in the request. """ http_status = 505 message = _("HTTP Version Not Supported")
def __init__(self, addr, zmq_type, bind=True, subscribe=None): self.sock = _get_ctxt().socket(zmq_type) self.addr = addr self.type = zmq_type self.subscriptions = [] # Support failures on sending/receiving on wrong socket type. self.can_recv = zmq_type in (zmq.PULL, zmq.SUB) self.can_send = zmq_type in (zmq.PUSH, zmq.PUB) self.can_sub = zmq_type in (zmq.SUB, ) # Support list, str, & None for subscribe arg (cast to list) do_sub = { list: subscribe, str: [subscribe], type(None): [] }[type(subscribe)] for f in do_sub: self.subscribe(f) str_data = {'addr': addr, 'type': self.socket_s(), 'subscribe': subscribe, 'bind': bind} LOG.debug("Connecting to %(addr)s with %(type)s", str_data) LOG.debug("-> Subscribed to %(subscribe)s", str_data) LOG.debug("-> bind: %(bind)s", str_data) try: if bind: self.sock.bind(addr) else: self.sock.connect(addr) except Exception: raise RPCException(_("Could not open socket."))
class InternalServerError(HttpServerError): """HTTP 500 - Internal Server Error. A generic error message, given when no more specific message is suitable. """ http_status = 500 message = _("Internal Server Error")
class ServiceUnavailable(HttpServerError): """HTTP 503 - Service Unavailable. The server is currently unavailable. """ http_status = 503 message = _("Service Unavailable")
class RequestUriTooLong(HTTPClientError): """HTTP 414 - Request-URI Too Long. The URI provided was too long for the server to process. """ http_status = 414 message = _("Request-URI Too Long")
class ExpectationFailed(HTTPClientError): """HTTP 417 - Expectation Failed. The server cannot meet the requirements of the Expect request-header field. """ http_status = 417 message = _("Expectation Failed")
def find(self, base_url=None, **kwargs): """Find a single item with attributes matching ``**kwargs``. :param base_url: if provided, the generated URL will be appended to it """ kwargs = self._filter_kwargs(kwargs) rl = self._list( '%(base_url)s%(query)s' % { 'base_url': self.build_url(base_url=base_url, **kwargs), 'query': '?%s' % parse.urlencode(kwargs) if kwargs else '', }, self.collection_key) num = len(rl) if num == 0: msg = _("No %(name)s matching %(args)s.") % { 'name': self.resource_class.__name__, 'args': kwargs } raise exceptions.NotFound(404, msg) elif num > 1: raise exceptions.NoUniqueMatch else: return rl[0]
def coerce(self, obj, attr, value): if not isinstance(value, list): raise ValueError(_('A list is required here')) for index, element in enumerate(list(value)): value[index] = self._element_type.coerce( obj, '%s[%i]' % (attr, index), element) return value
def bool_from_string(subject, strict=False, default=False): """Interpret a string as a boolean. A case-insensitive match is performed such that strings matching 't', 'true', 'on', 'y', 'yes', or '1' are considered True and, when `strict=False`, anything else returns the value specified by 'default'. Useful for JSON-decoded stuff and config file parsing. If `strict=True`, unrecognized values, including None, will raise a ValueError which is useful when parsing values passed in from an API call. Strings yielding False are 'f', 'false', 'off', 'n', 'no', or '0'. """ if not isinstance(subject, six.string_types): subject = six.text_type(subject) lowered = subject.strip().lower() if lowered in TRUE_STRINGS: return True elif lowered in FALSE_STRINGS: return False elif strict: acceptable = ', '.join( "'%s'" % s for s in sorted(TRUE_STRINGS + FALSE_STRINGS)) msg = _("Unrecognized value '%(val)s', acceptable values are:" " %(acceptable)s") % {'val': subject, 'acceptable': acceptable} raise ValueError(msg) else: return default
class Timeout(RPCException): """Signifies that a timeout has occurred. This exception is raised if the rpc_response_timeout is reached while waiting for a response from the remote side. """ msg_fmt = _('Timeout while waiting on RPC response - ' 'topic: "%(topic)s", RPC method: "%(method)s" ' 'info: "%(info)s"') def __init__(self, info=None, topic=None, method=None): """Initiates Timeout object. :param info: Extra info to convey to the user :param topic: The topic that the rpc call was sent to :param rpc_method_name: The name of the rpc method being called """ self.info = info self.topic = topic self.method = method super(Timeout, self).__init__(None, info=info or _('<unknown>'), topic=topic or _('<unknown>'), method=method or _('<unknown>'))
class HttpServerError(HttpError): """Server-side HTTP error. Exception for cases in which the server is aware that it has erred or is incapable of performing the request. """ message = _("HTTP Server Error")
class HttpError(ClientException): """The base exception class for all HTTP exceptions. """ http_status = 0 message = _("HTTP Error") def __init__(self, message=None, details=None, response=None, request_id=None, url=None, method=None, http_status=None): self.http_status = http_status or self.http_status self.message = message or self.message self.details = details self.request_id = request_id self.response = response self.url = url self.method = method formatted_string = "%s (HTTP %s)" % (self.message, self.http_status) if request_id: formatted_string += " (Request-ID: %s)" % request_id super(HttpError, self).__init__(formatted_string)
def start(self): verstr = version.version_string() LOG.info(_('Starting %(topic)s node (version %(version)s)'), {'topic': self.topic, 'version': verstr}) self.basic_config_check() #endpoints.extend(self.manager.additional_endpoints) targets = [] endpoints = [] for x in self.topic: targets.append(messaging.Target(topic=x)) endpoints.append(self.manager) listener = messaging.get_notification_listener(rpc.TRANSPORT, targets, endpoints, allow_requeue=True) if self.periodic_enable: if self.periodic_fuzzy_delay: initial_delay = random.randint(0, self.periodic_fuzzy_delay) else: initial_delay = None self.tg.add_dynamic_timer(self.periodic_tasks, initial_delay=initial_delay, periodic_interval_max= self.periodic_interval_max) listener.start()
def read(self, i=None): result = self.data.read(i) self.bytes_read += len(result) if self.bytes_read > self.limit: msg = _("Request is too large.") raise webob.exc.HTTPRequestEntityTooLarge(explanation=msg) return result
class PaymentRequired(HTTPClientError): """HTTP 402 - Payment Required. Reserved for future use. """ http_status = 402 message = _("Payment Required")
def obj_load_attr(self, attrname): """Load an additional attribute from the real object. This should use self._conductor, and cache any data that might be useful for future load operations. """ raise NotImplementedError( _("Cannot load '%s' in the base class") % attrname)
class ProxyAuthenticationRequired(HTTPClientError): """HTTP 407 - Proxy Authentication Required. The client must first authenticate itself with the proxy. """ http_status = 407 message = _("Proxy Authentication Required")
def __init__(self, expected, value): super(KeyTypeError, self).__init__( _('Key %(key)s must be of type %(expected)s not %(actual)s' ) % {'key': repr(value), 'expected': expected.__name__, 'actual': value.__class__.__name__, })
class RequestTimeout(HTTPClientError): """HTTP 408 - Request Timeout. The server timed out waiting for the request. """ http_status = 408 message = _("Request Timeout")
def db_sync(engine, abs_path, version=None, init_version=0, sanity_check=True): """Upgrade or downgrade a database. Function runs the upgrade() or downgrade() functions in change scripts. :param engine: SQLAlchemy engine instance for a given database :param abs_path: Absolute path to migrate repository. :param version: Database will upgrade/downgrade until this version. If None - database will update to the latest available version. :param init_version: Initial database version :param sanity_check: Require schema sanity checking for all tables """ if version is not None: try: version = int(version) except ValueError: raise exception.DbMigrationError( message=_("version should be an integer")) current_version = db_version(engine, abs_path, init_version) repository = _find_migrate_repo(abs_path) if sanity_check: _db_schema_sanity_check(engine) if version is None or version > current_version: return versioning_api.upgrade(engine, repository, version) else: return versioning_api.downgrade(engine, repository, version)
def obj_class_from_name(cls, objname, objver): """Returns a class from the registry based on a name and version.""" if objname not in cls._obj_classes: LOG.error(_('Unable to instantiate unregistered object type ' '%(objtype)s') % dict(objtype=objname)) raise exceptions.UnsupportedObjectError(objtype=objname) # NOTE(comstud): If there's not an exact match, return the highest # compatible version. The objects stored in the class are sorted # such that highest version is first, so only set compatible_match # once below. compatible_match = None for objclass in cls._obj_classes[objname]: if objclass.VERSION == objver: return objclass if (not compatible_match and versionutils.is_compatible(objver, objclass.VERSION)): compatible_match = objclass if compatible_match: return compatible_match # As mentioned above, latest version is always first in the list. latest_ver = cls._obj_classes[objname][0].VERSION raise exceptions.IncompatibleObjectVersion(objname=objname, objver=objver, supported=latest_ver)
def publisher(waiter): LOG.info(_LI("Creating proxy for topic: %s"), topic) try: # The topic is received over the network, # don't trust this input. if self.badchars.search(topic) is not None: emsg = _("Topic contained dangerous characters.") LOG.warn(emsg) raise RPCException(emsg) out_sock = ZmqSocket("ipc://%s/zmq_topic_%s" % (ipc_dir, topic), sock_type, bind=True) except RPCException: waiter.send_exception(*sys.exc_info()) return self.topic_proxy[topic] = eventlet.queue.LightQueue( CONF.rpc_zmq_topic_backlog) self.sockets.append(out_sock) # It takes some time for a pub socket to open, # before we can have any faith in doing a send() to it. if sock_type == zmq.PUB: eventlet.sleep(.5) waiter.send(True) while(True): data = self.topic_proxy[topic].get() out_sock.send(data, copy=False)
def deprecated(self, msg, *args, **kwargs): """Call this method when a deprecated feature is used. If the system is configured for fatal deprecations then the message is logged at the 'critical' level and :class:`DeprecatedConfig` will be raised. Otherwise, the message will be logged (once) at the 'warn' level. :raises: :class:`DeprecatedConfig` if the system is configured for fatal deprecations. """ stdmsg = _("Deprecated: %s") % msg if CONF.fatal_deprecations: self.critical(stdmsg, *args, **kwargs) raise DeprecatedConfig(msg=stdmsg) # Using a list because a tuple with dict can't be stored in a set. sent_args = self._deprecated_messages_sent.setdefault(msg, list()) if args in sent_args: # Already logged this message, so don't log it again. return sent_args.append(args) self.warn(stdmsg, *args, **kwargs)
def read(self, i=None): result = self.data.read(i) self.bytes_read += len(result) if self.bytes_read > self.limit: msg = _("Request is too large.") raise webob.exc.HTTPRequestEntityTooLarge(explanation=msg) return result
def __init__(self, expected, value): super(KeyTypeError, self).__init__( _('Key %(key)s must be of type %(expected)s not %(actual)s') % { 'key': repr(value), 'expected': expected.__name__, 'actual': value.__class__.__name__, })
def _db_schema_sanity_check(engine): """Ensure all database tables were created with required parameters. :param engine: SQLAlchemy engine instance for a given database """ if engine.name == 'mysql': onlyutf8_sql = ('SELECT TABLE_NAME,TABLE_COLLATION ' 'from information_schema.TABLES ' 'where TABLE_SCHEMA=%s and ' 'TABLE_COLLATION NOT LIKE "%%utf8%%"') # NOTE(morganfainberg): exclude the sqlalchemy-migrate and alembic # versioning tables from the tables we need to verify utf8 status on. # Non-standard table names are not supported. EXCLUDED_TABLES = ['migrate_version', 'alembic_version'] table_names = [ res[0] for res in engine.execute(onlyutf8_sql, engine.url.database) if res[0].lower() not in EXCLUDED_TABLES ] if len(table_names) > 0: raise ValueError( _('Tables "%s" have non utf8 collation, ' 'please make sure all tables are CHARSET=utf8') % ','.join(table_names))
def db_sync(engine, abs_path, version=None, init_version=0, sanity_check=True): """Upgrade or downgrade a database. Function runs the upgrade() or downgrade() functions in change scripts. :param engine: SQLAlchemy engine instance for a given database :param abs_path: Absolute path to migrate repository. :param version: Database will upgrade/downgrade until this version. If None - database will update to the latest available version. :param init_version: Initial database version :param sanity_check: Require schema sanity checking for all tables """ if version is not None: try: version = int(version) except ValueError: raise exception.DbMigrationError( message=_("version should be an integer")) current_version = db_version(engine, abs_path, init_version) repository = _find_migrate_repo(abs_path) if sanity_check: _db_schema_sanity_check(engine) if version is None or version > current_version: return versioning_api.upgrade(engine, repository, version) else: return versioning_api.downgrade(engine, repository, version)
def obj_class_from_name(cls, objname, objver): """Returns a class from the registry based on a name and version.""" if objname not in cls._obj_classes: LOG.error( _('Unable to instantiate unregistered object type ' '%(objtype)s') % dict(objtype=objname)) raise exceptions.UnsupportedObjectError(objtype=objname) # NOTE(comstud): If there's not an exact match, return the highest # compatible version. The objects stored in the class are sorted # such that highest version is first, so only set compatible_match # once below. compatible_match = None for objclass in cls._obj_classes[objname]: if objclass.VERSION == objver: return objclass if (not compatible_match and versionutils.is_compatible(objver, objclass.VERSION)): compatible_match = objclass if compatible_match: return compatible_match # As mentioned above, latest version is always first in the list. latest_ver = cls._obj_classes[objname][0].VERSION raise exceptions.IncompatibleObjectVersion(objname=objname, objver=objver, supported=latest_ver)
def __iter__(self): for chunk in self.data: self.bytes_read += len(chunk) if self.bytes_read > self.limit: msg = _("Request is too large.") raise webob.exc.HTTPRequestEntityTooLarge(explanation=msg) else: yield chunk
def _read_deleted_filter(query, db_model, read_deleted): if 'deleted' not in db_model.__table__.columns: raise ValueError(_("There is no `deleted` column in `%s` table. " "Project doesn't use soft-deleted feature.") % db_model.__name__) default_deleted_value = db_model.__table__.c.deleted.default.arg if read_deleted == 'no': query = query.filter(db_model.deleted == default_deleted_value) elif read_deleted == 'yes': pass # omit the filter to include deleted and active elif read_deleted == 'only': query = query.filter(db_model.deleted != default_deleted_value) else: raise ValueError(_("Unrecognized read_deleted value '%s'") % read_deleted) return query
def add_call_waiter(self, waiter, msg_id): self._num_call_waiters += 1 if self._num_call_waiters > self._num_call_waiters_wrn_threshold: LOG.warn(_('Number of call waiters is greater than warning ' 'threshold: %d. There could be a MulticallProxyWaiter ' 'leak.') % self._num_call_waiters_wrn_threshold) self._num_call_waiters_wrn_threshold *= 2 self._call_waiters[msg_id] = waiter
def coerce(obj, attr, value): # FIXME(danms): We should really try to avoid the need to do this if isinstance(value, (six.string_types, int, long, float, datetime.datetime)): return unicode(value) else: raise ValueError(_('A string is required here, not %s') % value.__class__.__name__)
def __init__(self, expected, key, value): super(ElementTypeError, self).__init__( _('Element %(key)s:%(val)s must be of type %(expected)s' ' not %(actual)s' ) % {'key': key, 'val': repr(value), 'expected': expected, 'actual': value.__class__.__name__, })
def string_to_bytes(text, unit_system='IEC', return_int=False): """Converts a string into an float representation of bytes. The units supported for IEC :: Kb(it), Kib(it), Mb(it), Mib(it), Gb(it), Gib(it), Tb(it), Tib(it) KB, KiB, MB, MiB, GB, GiB, TB, TiB The units supported for SI :: kb(it), Mb(it), Gb(it), Tb(it) kB, MB, GB, TB Note that the SI unit system does not support capital letter 'K' :param text: String input for bytes size conversion. :param unit_system: Unit system for byte size conversion. :param return_int: If True, returns integer representation of text in bytes. (default: decimal) :returns: Numerical representation of text in bytes. :raises ValueError: If text has an invalid value. """ try: base, reg_ex = UNIT_SYSTEM_INFO[unit_system] except KeyError: msg = _('Invalid unit system: "%s"') % unit_system raise ValueError(msg) match = reg_ex.match(text) if match: magnitude = float(match.group(1)) unit_prefix = match.group(2) if match.group(3) in ['b', 'bit']: magnitude /= 8 else: msg = _('Invalid string format: %s') % text raise ValueError(msg) if not unit_prefix: res = magnitude else: res = magnitude * pow(base, UNIT_PREFIX_EXPONENT[unit_prefix]) if return_int: return int(math.ceil(res)) return res
def _null(self, obj, attr): if self.nullable: return None elif self._default != UnspecifiedDefault: # NOTE(danms): We coerce the default value each time the field # is set to None as our contract states that we'll let the type # examine the object and attribute name at that time. return self._type.coerce(obj, attr, self._default) else: raise ValueError(_("Field `%s' cannot be None") % attr)
def coerce(self, obj, attr, value): try: obj_name = value.obj_name() except AttributeError: obj_name = "" if obj_name != self._obj_name: raise ValueError(_('An object of type %s is required here') % self._obj_name) return value
def setter(self, value, name=name, field=field): self._changed_fields.add(name) try: return setattr(self, get_attrname(name), field.coerce(self, name, value)) except Exception: attr = "%s.%s" % (self.obj_name(), name) LOG.exception(_('Error setting %(attr)s') % {'attr': attr}) raise
def __call__(self, req): if (req.content_length is not None and req.content_length > CONF.max_request_body_size): msg = _("Request is too large.") raise webob.exc.HTTPRequestEntityTooLarge(explanation=msg) if req.content_length is None and req.is_body_readable: limiter = LimitingReader(req.body_file, CONF.max_request_body_size) req.body_file = limiter return self.application
def model_query(context, model, session, args=None, project_only=False, read_deleted=None): """Query helper that accounts for context's `read_deleted` field. :param context: context to query under :param model: Model to query. Must be a subclass of ModelBase. :type model: models.ModelBase :param session: The session to use. :type session: sqlalchemy.orm.session.Session :param args: Arguments to query. If None - model is used. :type args: tuple :param project_only: If present and context is user-type, then restrict query to match the context's project_id. If set to 'allow_none', restriction includes project_id = None. :type project_only: bool :param read_deleted: If present, overrides context's read_deleted field. :type read_deleted: bool Usage: ..code:: python result = (utils.model_query(context, models.Instance, session=session) .filter_by(uuid=instance_uuid) .all()) query = utils.model_query( context, Node, session=session, args=(func.count(Node.id), func.sum(Node.ram)) ).filter_by(project_id=project_id) """ if not read_deleted: if hasattr(context, 'read_deleted'): # NOTE(viktors): some projects use `read_deleted` attribute in # their contexts instead of `show_deleted`. read_deleted = context.read_deleted else: read_deleted = context.show_deleted if not issubclass(model, models.ModelBase): raise TypeError(_("model should be a subclass of ModelBase")) query = session.query(model) if not args else session.query(*args) query = _read_deleted_filter(query, model, read_deleted) query = _project_filter(query, model, context, project_only) return query
def notify(context, publisher_id, event_type, priority, payload): """Sends a notification using the specified driver :param publisher_id: the source worker_type.host of the message :param event_type: the literal type of event (ex. Instance Creation) :param priority: patterned after the enumeration of Python logging levels in the set (DEBUG, WARN, INFO, ERROR, CRITICAL) :param payload: A python dictionary of attributes Outgoing message format includes the above parameters, and appends the following: message_id a UUID representing the id for this notification timestamp the GMT timestamp the notification was sent at The composite message will be constructed as a dictionary of the above attributes, which will then be sent via the transport mechanism defined by the driver. Message example:: {'message_id': str(uuid.uuid4()), 'publisher_id': 'compute.host1', 'timestamp': timeutils.utcnow(), 'priority': 'WARN', 'event_type': 'compute.create_instance', 'payload': {'instance_id': 12, ... }} """ if priority not in log_levels: raise BadPriorityException( _('%s not in valid priorities') % priority) # Ensure everything is JSON serializable. payload = jsonutils.to_primitive(payload, convert_instances=True) msg = dict(message_id=str(uuid.uuid4()), publisher_id=publisher_id, event_type=event_type, priority=priority, payload=payload, timestamp=str(timeutils.utcnow())) for driver in _get_drivers(): try: driver.notify(context, msg) except Exception as e: LOG.exception(_LE("Problem '%(e)s' attempting to " "send to notification system. " "Payload=%(payload)s") % dict(e=e, payload=payload))
def _get_matchmaker(*args, **kwargs): global matchmaker if not matchmaker: mm = CONF.rpc_zmq_matchmaker if mm.endswith('matchmaker.MatchMakerRing'): mm.replace('matchmaker', 'matchmaker_ring') LOG.warn(_('rpc_zmq_matchmaker = %(orig)s is deprecated; use' ' %(new)s instead') % dict( orig=CONF.rpc_zmq_matchmaker, new=mm)) matchmaker = importutils.import_object(mm, *args, **kwargs) return matchmaker