def stdattrs_extended_resources(attributes): r_map = standard_attr.get_standard_attr_resource_model_map( include_resources=True, include_sub_resources=False) sr_map = standard_attr.get_standard_attr_resource_model_map( include_resources=False, include_sub_resources=True) return dict(itertools.chain( {r: attributes for r in r_map}.items(), {sr: {'parameters': attributes} for sr in sr_map}.items() ))
def stdattrs_extended_resources(attributes): r_map = standard_attr.get_standard_attr_resource_model_map( include_resources=True, include_sub_resources=False) sr_map = standard_attr.get_standard_attr_resource_model_map( include_resources=False, include_sub_resources=True) return dict( itertools.chain({r: attributes for r in r_map}.items(), {sr: { 'parameters': attributes } for sr in sr_map}.items()))
def _get_constrained_instance_match(self, session): """Returns instance and constraint of if-match criterion if present. Checks the context associated with the session for compare-and-swap update revision number constraints. If one is found, this returns the instance that is constrained as well as the requested revision number to match. """ context = session.info.get('using_context') if context: # NOTE(ralonsoh): use "pop_transaction_constraint" once implemented criteria = context.get_transaction_constraint() context.clear_transaction_constraint() else: criteria = None if not criteria: return None, None match = criteria.if_revision_match mmap = standard_attr.get_standard_attr_resource_model_map() model = mmap.get(criteria.resource) if not model: msg = _("Revision matching not supported for this resource") raise exc.BadRequest(resource=criteria.resource, msg=msg) instance = self._find_instance_by_column_value(session, model, 'id', criteria.resource_id) return instance, match
class TimeStamp_db_mixin(object): """Mixin class to add Time Stamp methods.""" def __new__(cls, *args, **kwargs): rs_model_maps = standard_attr.get_standard_attr_resource_model_map() for model in rs_model_maps.values(): model_query.register_hook( model, "change_since_query", query_hook=None, filter_hook=None, result_filters=_change_since_result_filter_hook) return super(TimeStamp_db_mixin, cls).__new__(cls, *args, **kwargs) def register_db_events(self): listen = db_api.sqla_listen listen(standard_attr.StandardAttribute, 'before_insert', _add_timestamp) listen(se.Session, 'before_flush', _update_timestamp) @staticmethod @resource_extend.extends( list(standard_attr.get_standard_attr_resource_model_map())) def _extend_resource_dict_timestamp(resource_res, resource_db): if (resource_db and resource_db.created_at and resource_db.updated_at): _format_timestamp(resource_db, resource_res)
def test_bulk_delete_protection(self): # security group rules increment security groups so they must not be # allowed to be deleted in bulk mm = standard_attr.get_standard_attr_resource_model_map() sg_rule_model = mm['security_group_rules'] with testtools.ExpectedException(RuntimeError): ctx = context.get_admin_context() ctx.session.query(sg_rule_model).delete()
def test_standard_attr_resource_model_map(self): rs_map = standard_attr.get_standard_attr_resource_model_map() base = self._make_decl_base() class MyModel(standard_attr.HasStandardAttributes, standard_attr.model_base.HasId, base): api_collections = ['my_resource', 'my_resource2'] rs_map = standard_attr.get_standard_attr_resource_model_map() self.assertEqual(MyModel, rs_map['my_resource']) self.assertEqual(MyModel, rs_map['my_resource2']) class Dup(standard_attr.HasStandardAttributes, standard_attr.model_base.HasId, base): api_collections = ['my_resource'] with testtools.ExpectedException(RuntimeError): standard_attr.get_standard_attr_resource_model_map()
class StandardAttrDescriptionMixin(object): supported_extension_aliases = ['standard-attr-description'] @staticmethod @resource_extend.extends( list(standard_attr.get_standard_attr_resource_model_map())) def _extend_standard_attr_description(res, db_object): if not hasattr(db_object, 'description'): return res['description'] = db_object.description
def __new__(cls, *args, **kwargs): rs_model_maps = standard_attr.get_standard_attr_resource_model_map() for model in rs_model_maps.values(): model_query.register_hook( model, "change_since_query", query_hook=None, filter_hook=None, result_filters=_change_since_result_filter_hook) return super(TimeStamp_db_mixin, cls).__new__(cls, *args, **kwargs)
def __new__(cls, *args, **kwargs): rs_model_maps = standard_attr.get_standard_attr_resource_model_map() for model in rs_model_maps.values(): model_query.register_hook( model, "change_since_query", query_hook=None, filter_hook=None, result_filters=_change_since_result_filter_hook) return super(TimeStamp_db_mixin, cls).__new__(cls, *args, **kwargs)
def test_standard_attr_resource_model_map(self): rs_map = standard_attr.get_standard_attr_resource_model_map() base = self._make_decl_base() class MyModel(standard_attr.HasStandardAttributes, standard_attr.model_base.HasId, base): api_collections = ['my_resource', 'my_resource2'] rs_map = standard_attr.get_standard_attr_resource_model_map() self.assertEqual(MyModel, rs_map['my_resource']) self.assertEqual(MyModel, rs_map['my_resource2']) class Dup(standard_attr.HasStandardAttributes, standard_attr.model_base.HasId, base): api_collections = ['my_resource'] with testtools.ExpectedException(RuntimeError): standard_attr.get_standard_attr_resource_model_map()
def __init__(self): super(TimeStampPlugin, self).__init__() self.register_db_events() rs_model_maps = standard_attr.get_standard_attr_resource_model_map() for rsmap, model in rs_model_maps.items(): resource_extend.register_funcs( rsmap, [self.extend_resource_dict_timestamp]) model_query.register_hook(model, "change_since_query", None, None, self._change_since_result_filter_hook) # TODO(jlibosva): Move this to register_model_query_hook base_obj.register_filter_hook_on_model(models_v2.SubnetPool, ts_db.CHANGED_SINCE)
def test_standard_attr_resource_model_map(self): rs_map = standard_attr.get_standard_attr_resource_model_map() base = self._make_decl_base() class MyModel(standard_attr.HasStandardAttributes, standard_attr.model_base.HasId, base): api_collections = ['my_resource', 'my_resource2'] api_sub_resources = ['my_subresource'] rs_map = standard_attr.get_standard_attr_resource_model_map() self.assertEqual(MyModel, rs_map['my_resource']) self.assertEqual(MyModel, rs_map['my_resource2']) self.assertEqual(MyModel, rs_map['my_subresource']) sub_rs_map = standard_attr.get_standard_attr_resource_model_map( include_resources=False, include_sub_resources=True) self.assertNotIn('my_resource', sub_rs_map) self.assertNotIn('my_resource2', sub_rs_map) self.assertEqual(MyModel, sub_rs_map['my_subresource']) nosub_rs_map = standard_attr.get_standard_attr_resource_model_map( include_resources=True, include_sub_resources=False) self.assertEqual(MyModel, nosub_rs_map['my_resource']) self.assertEqual(MyModel, nosub_rs_map['my_resource2']) self.assertNotIn('my_subresource', nosub_rs_map) class Dup(standard_attr.HasStandardAttributes, standard_attr.model_base.HasId, base): api_collections = ['my_resource'] with testtools.ExpectedException(RuntimeError): standard_attr.get_standard_attr_resource_model_map()
def __init__(self): super(TimeStampPlugin, self).__init__() self.register_db_events() rs_model_maps = standard_attr.get_standard_attr_resource_model_map() for rsmap, model in rs_model_maps.items(): db_base_plugin_v2.NeutronDbPluginV2.register_dict_extend_funcs( rsmap, [self.extend_resource_dict_timestamp]) db_base_plugin_v2.NeutronDbPluginV2.register_model_query_hook( model, "change_since_query", None, None, self._change_since_result_filter_hook) # TODO(jlibosva): Move this to register_model_query_hook base_obj.register_filter_hook_on_model( models_v2.SubnetPool, ts_db.CHANGED_SINCE)
def test_api_collections_are_expected(self): # NOTE to reviewers. If this test is being modified, it means the # resources being extended by standard attr extensions have changed. # Ensure that the patch has made this discoverable to API users. # This means a new extension for a new resource or a new extension # indicating that an existing resource now has standard attributes. # Ensure devref list of resources is updated at # doc/source/devref/api_extensions.rst expected = ['subnets', 'trunks', 'routers', 'segments', 'security_group_rules', 'networks', 'policies', 'subnetpools', 'ports', 'security_groups', 'floatingips'] self.assertEqual( set(expected), set(standard_attr.get_standard_attr_resource_model_map().keys()) )
def _get_constrained_instance_match(self, session): """Returns instance and constraint of if-match criterion if present. Checks the context associated with the session for compare-and-swap update revision number constraints. If one is found, this returns the instance that is constrained as well as the requested revision number to match. """ context = session.info.get('using_context') criteria = context.get_transaction_constraint() if context else None if not criteria: return None, None match = criteria.if_revision_match mmap = standard_attr.get_standard_attr_resource_model_map() model = mmap.get(criteria.resource) if not model: msg = _("Revision matching not supported for this resource") raise exc.BadRequest(resource=criteria.resource, msg=msg) instance = self._find_instance_by_column_value( session, model, 'id', criteria.resource_id) return instance, match
def _get_constrained_instance_match(self, session): """Returns instance and constraint of if-match criterion if present. Checks the context associated with the session for compare-and-swap update revision number constraints. If one is found, this returns the instance that is constrained as well as the requested revision number to match. """ criteria = getattr(session.info.get('using_context'), '_CONSTRAINT', None) if not criteria: return None, None match = criteria['if_revision_match'] mmap = standard_attr.get_standard_attr_resource_model_map() model = mmap.get(criteria['resource']) if not model: msg = _("Revision matching not supported for this resource") raise exc.BadRequest(resource=criteria['resource'], msg=msg) instance = self._find_instance_by_column_value(session, model, 'id', criteria['resource_id']) return instance, match
def test_standard_attr_resource_model_map(self): rs_map = standard_attr.get_standard_attr_resource_model_map() base = self._make_decl_base() class MyModel(standard_attr.HasStandardAttributes, standard_attr.model_base.HasId, base): api_collections = ['my_resource', 'my_resource2'] api_sub_resources = ['my_subresource'] rs_map = standard_attr.get_standard_attr_resource_model_map() self.assertEqual(MyModel, rs_map['my_resource']) self.assertEqual(MyModel, rs_map['my_resource2']) self.assertEqual(MyModel, rs_map['my_subresource']) sub_rs_map = standard_attr.get_standard_attr_resource_model_map( include_resources=False, include_sub_resources=True) self.assertNotIn('my_resource', sub_rs_map) self.assertNotIn('my_resource2', sub_rs_map) self.assertEqual(MyModel, sub_rs_map['my_subresource']) nosub_rs_map = standard_attr.get_standard_attr_resource_model_map( include_resources=True, include_sub_resources=False) self.assertEqual(MyModel, nosub_rs_map['my_resource']) self.assertEqual(MyModel, nosub_rs_map['my_resource2']) self.assertNotIn('my_subresource', nosub_rs_map) class Dup(standard_attr.HasStandardAttributes, standard_attr.model_base.HasId, base): api_collections = ['my_resource'] with testtools.ExpectedException(RuntimeError): standard_attr.get_standard_attr_resource_model_map()
def get_extended_resources(self, version): if version != "2.0": return {} rs_map = standard_attr.get_standard_attr_resource_model_map() return {resource: REVISION_BODY for resource in rs_map}
def __init__(self): super(RevisionPlugin, self).__init__() for resource in standard_attr.get_standard_attr_resource_model_map(): db_base_plugin_v2.NeutronDbPluginV2.register_dict_extend_funcs( resource, [self.extend_resource_dict_revision]) db_api.sqla_listen(se.Session, 'before_flush', self.bump_revisions)
def __new__(cls, *args, **kwargs): for resource in standard_attr.get_standard_attr_resource_model_map(): resource_extend.register_funcs( resource, ['_extend_standard_attr_description']) return super(StandardAttrDescriptionMixin, cls).__new__(cls, *args, **kwargs)
def __init__(self): super(RevisionPlugin, self).__init__() for resource in standard_attr.get_standard_attr_resource_model_map(): db_base_plugin_v2.NeutronDbPluginV2.register_dict_extend_funcs( resource, [self.extend_resource_dict_revision]) event.listen(se.Session, 'before_flush', self.bump_revisions)
# under the License. from neutron.db import standard_attr from neutron_lib.db import api as db_api from oslo_db import api as oslo_db_api from oslo_log import log from sqlalchemy.orm import exc from networking_ovn.common import constants as ovn_const from networking_ovn.common import exceptions as ovn_exc from networking_ovn.common import utils from networking_ovn.db import models LOG = log.getLogger(__name__) STD_ATTR_MAP = standard_attr.get_standard_attr_resource_model_map() _wrap_db_retry = oslo_db_api.wrap_db_retry( max_retries=ovn_const.DB_MAX_RETRIES, retry_interval=ovn_const.DB_INITIAL_RETRY_INTERVAL, max_retry_interval=ovn_const.DB_MAX_RETRY_INTERVAL, inc_retry_interval=True, retry_on_deadlock=True) def _get_standard_attr_id(session, resource_uuid, resource_type): try: row = session.query( STD_ATTR_MAP[resource_type]).filter_by(id=resource_uuid).one() return row.standard_attr_id except exc.NoResultFound:
def get_extended_resources(self, version): if version != "2.0": return {} rs_map = standard_attr.get_standard_attr_resource_model_map() return {resource: TIMESTAMP_BODY for resource in rs_map}
class RevisionPlugin(service_base.ServicePluginBase): """Plugin to populate revision numbers into standard attr resources.""" supported_extension_aliases = ['standard-attr-revisions'] def __init__(self): super(RevisionPlugin, self).__init__() db_api.sqla_listen(se.Session, 'before_flush', self.bump_revisions) db_api.sqla_listen(se.Session, 'after_commit', self._clear_rev_bumped_flags) db_api.sqla_listen(se.Session, 'after_rollback', self._clear_rev_bumped_flags) def bump_revisions(self, session, context, instances): # bump revision number for any updated objects in the session for obj in session.dirty: if isinstance(obj, standard_attr.HasStandardAttributes): self._bump_obj_revision(session, obj) # see if any created/updated/deleted objects bump the revision # of another object objects_with_related_revisions = [ o for o in session.deleted | session.dirty | session.new if getattr(o, 'revises_on_change', ()) ] for obj in objects_with_related_revisions: self._bump_related_revisions(session, obj) def _bump_related_revisions(self, session, obj): for revises_col in getattr(obj, 'revises_on_change', ()): try: related_obj = self._find_related_obj(session, obj, revises_col) if not related_obj: LOG.warning( _LW("Could not find related %(col)s for " "resource %(obj)s to bump revision."), { 'obj': obj, 'col': revises_col }) continue # if related object revises others, bump those as well self._bump_related_revisions(session, related_obj) # no need to bump revisions on related objects being deleted if related_obj not in session.deleted: self._bump_obj_revision(session, related_obj) except exc.ObjectDeletedError: # object was in session but another writer deleted it pass def get_plugin_type(self): return "revision_plugin" def get_plugin_description(self): return "Adds revision numbers to resources." @staticmethod @resource_extend.extends( list(standard_attr.get_standard_attr_resource_model_map())) def extend_resource_dict_revision(resource_res, resource_db): resource_res['revision_number'] = resource_db.revision_number def _find_related_obj(self, session, obj, relationship_col): """Gets a related object off of a relationship. Raises a runtime error if the relationship isn't configured correctly for revision bumping. """ # first check to see if it's directly attached to the object already related_obj = getattr(obj, relationship_col) if related_obj: return related_obj for rel in sqlalchemy.inspect(obj).mapper.relationships: if rel.key != relationship_col: continue if not rel.load_on_pending: raise RuntimeError( _("revises_on_change relationships must " "have load_on_pending set to True to " "bump parent revisions on create: %s"), relationship_col) def _clear_rev_bumped_flags(self, session): """This clears all flags on commit/rollback to enable rev bumps.""" for inst in session: setattr(inst, '_rev_bumped', False) def _bump_obj_revision(self, session, obj): """Increment object revision in compare and swap fashion. Before the increment, this checks and enforces any revision number constraints. """ if getattr(obj, '_rev_bumped', False): # we've already bumped the revision of this object in this txn return obj.bump_revision() setattr(obj, '_rev_bumped', True)
def __new__(cls, *args, **kwargs): for resource in standard_attr.get_standard_attr_resource_model_map(): common_db_mixin.CommonDbMixin.register_dict_extend_funcs( resource, ['_extend_standard_attr_description']) return super(StandardAttrDescriptionMixin, cls).__new__(cls, *args, **kwargs)
class RevisionPlugin(service_base.ServicePluginBase): """Plugin to populate revision numbers into standard attr resources.""" supported_extension_aliases = [ 'standard-attr-revisions', 'revision-if-match' ] __filter_validation_support = True def __init__(self): super(RevisionPlugin, self).__init__() # background on these event hooks: # https://docs.sqlalchemy.org/en/latest/orm/session_events.html db_api.sqla_listen(se.Session, 'before_flush', self.bump_revisions) db_api.sqla_listen(se.Session, "after_flush_postexec", self._emit_related_revision_bumps) db_api.sqla_listen(se.Session, 'after_commit', self._clear_rev_bumped_flags) db_api.sqla_listen(se.Session, 'after_rollback', self._clear_rev_bumped_flags) def bump_revisions(self, session, context, instances): self._enforce_if_match_constraints(session) # bump revision number for any updated objects in the session self._bump_obj_revisions(session, [ obj for obj in session.dirty if isinstance(obj, standard_attr.HasStandardAttributes) ]) # see if any created/updated/deleted objects bump the revision # of another object objects_with_related_revisions = [ o for o in session.deleted | session.dirty | session.new if getattr(o, 'revises_on_change', ()) ] collected = session.info.setdefault('_related_bumped', set()) self._collect_related_tobump(session, objects_with_related_revisions, collected) def _emit_related_revision_bumps(self, session, context): # within after_flush_postexec, emit an UPDATE statement to increment # revision flags for related objects that were located in the # before_flush phase. # # note that this event isn't called if the flush fails; # in that case, the transaction is rolled back and the # after_rollback event will invoke self._clear_rev_bumped_flags # to clean out state. collected = session.info.get('_related_bumped', None) if collected: try: self._bump_obj_revisions(session, collected, version_check=False) finally: collected.clear() def _collect_related_tobump(self, session, objects, collected): for obj in objects: if obj in collected: continue for revises_col in getattr(obj, 'revises_on_change', ()): related_obj = self._find_related_obj(obj, revises_col) if not related_obj: LOG.warning( "Could not find related %(col)s for " "resource %(obj)s to bump revision.", { 'obj': obj, 'col': revises_col }) continue # if related object revises others, bump those as well self._collect_related_tobump(session, [related_obj], collected) # no need to bump revisions on related objects being deleted if related_obj not in session.deleted: collected.add(related_obj) return collected def get_plugin_type(self): return "revision_plugin" def get_plugin_description(self): return "Adds revision numbers to resources." @staticmethod @resource_extend.extends( list(standard_attr.get_standard_attr_resource_model_map())) def extend_resource_dict_revision(resource_res, resource_db): resource_res['revision_number'] = resource_db.revision_number def _find_related_obj(self, obj, relationship_col): """Gets a related object off of a relationship. Raises a runtime error if the relationship isn't configured correctly for revision bumping. """ # first check to see if it's directly attached to the object already try: related_obj = getattr(obj, relationship_col) except exc.ObjectDeletedError: # object was in session but another writer deleted it return None if related_obj: return related_obj for rel in sqlalchemy.inspect(obj).mapper.relationships: if rel.key != relationship_col: continue if not rel.load_on_pending: raise RuntimeError( _("revises_on_change relationships must " "have load_on_pending set to True to " "bump parent revisions on create: %s") % relationship_col) def _clear_rev_bumped_flags(self, session): """This clears all flags on commit/rollback to enable rev bumps.""" session.info.pop('_related_bumped', None) for inst in session: setattr(inst, '_rev_bumped', False) def _bump_obj_revisions(self, session, objects, version_check=True): """Increment object revisions. If version_check=True, uses SQLAlchemy ORM's compare-and-swap feature (known as "version_id_col" in the ORM mapping), which is part of the StandardAttribute class. If version_check=False, runs an UPDATE statement directly against the set of all StandardAttribute objects at once, without using any compare and swap logic. If a revision number constraint rule was associated with the Session, this is retrieved and each object is tested to see if it matches this condition; if so, the constraint is enforced. """ # filter objects for which we've already bumped the revision to_bump = [ obj for obj in objects if not getattr(obj, '_rev_bumped', False) ] if not to_bump: return self._run_constrained_instance_match_check(session, to_bump) if not version_check: # this UPDATE statement could alternatively be written to run # as an UPDATE-per-object with Python-generated revision numbers # as parameters. session.query(standard_attr.StandardAttribute).filter( standard_attr.StandardAttribute.id.in_( [obj._effective_standard_attribute_id for obj in to_bump]) ).update( { # note that SQLAlchemy runs the onupdate function for # the updated_at column and applies it to the SET clause as # well. standard_attr.StandardAttribute.revision_number: standard_attr.StandardAttribute.revision_number + 1 }, synchronize_session=False) # run a SELECT to get back the new values we just generated. # if MySQL supported RETURNING, we could get these numbers # back from the UPDATE without running another SELECT. retrieve_revision_numbers = { row.id: (row.revision_number, row.updated_at) for row in session.query( standard_attr.StandardAttribute.id, standard_attr.StandardAttribute.revision_number, standard_attr.StandardAttribute.updated_at, ).filter( standard_attr.StandardAttribute.id.in_([ obj._effective_standard_attribute_id for obj in to_bump ])) } for obj in to_bump: if version_check: # full version check, run the ORM routine to UPDATE # the row with a WHERE clause obj.bump_revision() else: # no version check - get back what we did in our one-step # UPDATE statement and set it without causing change in # ORM flush state try: new_version_id, new_updated_at = retrieve_revision_numbers[ obj._effective_standard_attribute_id] except KeyError: # in case the object was deleted concurrently LOG.warning( "No standard attr row found for resource: %(obj)s", {'obj': obj}) else: obj._set_updated_revision_number(new_version_id, new_updated_at) setattr(obj, '_rev_bumped', True) def _run_constrained_instance_match_check(self, session, objects): instance, match = self._get_constrained_instance_match(session) for obj in objects: if instance and instance == obj: # one last check before bumping revision self._enforce_if_match_constraints(session) def _find_instance_by_column_value(self, session, model, column, value): """Lookup object in session or from DB based on a column's value.""" for session_obj in session: if not isinstance(session_obj, model): continue if getattr(session_obj, column) == value: return session_obj # object isn't in session so we have to query for it related_obj = (session.query(model).filter_by(**{ column: value }).first()) return related_obj def _get_constrained_instance_match(self, session): """Returns instance and constraint of if-match criterion if present. Checks the context associated with the session for compare-and-swap update revision number constraints. If one is found, this returns the instance that is constrained as well as the requested revision number to match. """ context = session.info.get('using_context') criteria = context.get_transaction_constraint() if context else None if not criteria: return None, None match = criteria.if_revision_match mmap = standard_attr.get_standard_attr_resource_model_map() model = mmap.get(criteria.resource) if not model: msg = _("Revision matching not supported for this resource") raise exc.BadRequest(resource=criteria.resource, msg=msg) instance = self._find_instance_by_column_value(session, model, 'id', criteria.resource_id) return instance, match def _enforce_if_match_constraints(self, session): """Check for if-match constraints and raise exception if violated. We determine the collection being modified and look for any objects of the collection type in the dirty/deleted items in the session. If they don't match the revision_number constraint supplied, we throw an exception. We are protected from a concurrent update because if we match revision number here and another update commits to the database first, the compare and swap of revision_number will fail and a StaleDataError (or deadlock in galera multi-writer) will be raised, at which point this will be retried and fail to match. """ instance, match = self._get_constrained_instance_match(session) if not instance or getattr(instance, '_rev_bumped', False): # no constraints present or constrain satisfied in this transaction return if instance.revision_number != match: raise RevisionNumberConstraintFailed(match, instance.revision_number)
from neutron_lib.plugins import directory from oslo_log import helpers as log_helpers from sqlalchemy.orm import exc from neutron.db import _model_query as model_query from neutron.db import _resource_extend as resource_extend from neutron.db import api as db_api from neutron.db import common_db_mixin from neutron.db import standard_attr from neutron.db import tag_db as tag_methods from neutron.extensions import tagging from neutron.objects import tag as tag_obj # Taggable resources resource_model_map = standard_attr.get_standard_attr_resource_model_map() @resource_extend.has_resource_extenders class TagPlugin(common_db_mixin.CommonDbMixin, tagging.TagPluginBase): """Implementation of the Neutron Tag Service Plugin.""" supported_extension_aliases = ['tag', 'tag-ext', 'standard-attr-tag'] def __new__(cls, *args, **kwargs): inst = super(TagPlugin, cls).__new__(cls, *args, **kwargs) inst._filter_methods = [] # prevent GC of our partial functions for model in resource_model_map.values(): method = functools.partial(tag_methods.apply_tag_filters, model) inst._filter_methods.append(method) model_query.register_hook(model, "tag",
from neutron_lib.db import api as lib_db_api from neutron_lib.objects import exceptions as obj_exc from neutron_lib.plugins import directory from oslo_log import helpers as log_helpers from sqlalchemy.orm import exc from neutron.db import _model_query as model_query from neutron.db import _resource_extend as resource_extend from neutron.db import api as db_api from neutron.db import common_db_mixin from neutron.db import standard_attr from neutron.extensions import tagging from neutron.objects import tag as tag_obj # Taggable resources resource_model_map = standard_attr.get_standard_attr_resource_model_map() @resource_extend.has_resource_extenders class TagPlugin(common_db_mixin.CommonDbMixin, tagging.TagPluginBase): """Implementation of the Neutron Tag Service Plugin.""" supported_extension_aliases = ['standard-attr-tag'] __filter_validation_support = True def __new__(cls, *args, **kwargs): inst = super(TagPlugin, cls).__new__(cls, *args, **kwargs) tag_obj.register_tag_hooks() return inst
class RevisionPlugin(service_base.ServicePluginBase): """Plugin to populate revision numbers into standard attr resources.""" supported_extension_aliases = [ 'standard-attr-revisions', 'revision-if-match' ] def __init__(self): super(RevisionPlugin, self).__init__() db_api.sqla_listen(se.Session, 'before_flush', self.bump_revisions) db_api.sqla_listen(se.Session, 'after_commit', self._clear_rev_bumped_flags) db_api.sqla_listen(se.Session, 'after_rollback', self._clear_rev_bumped_flags) def bump_revisions(self, session, context, instances): self._enforce_if_match_constraints(session) # bump revision number for any updated objects in the session for obj in session.dirty: if isinstance(obj, standard_attr.HasStandardAttributes): self._bump_obj_revision(session, obj) # see if any created/updated/deleted objects bump the revision # of another object objects_with_related_revisions = [ o for o in session.deleted | session.dirty | session.new if getattr(o, 'revises_on_change', ()) ] for obj in objects_with_related_revisions: self._bump_related_revisions(session, obj) def _bump_related_revisions(self, session, obj): for revises_col in getattr(obj, 'revises_on_change', ()): try: related_obj = self._find_related_obj(session, obj, revises_col) if not related_obj: LOG.warning( "Could not find related %(col)s for " "resource %(obj)s to bump revision.", { 'obj': obj, 'col': revises_col }) continue # if related object revises others, bump those as well self._bump_related_revisions(session, related_obj) # no need to bump revisions on related objects being deleted if related_obj not in session.deleted: self._bump_obj_revision(session, related_obj) except exc.ObjectDeletedError: # object was in session but another writer deleted it pass def get_plugin_type(self): return "revision_plugin" def get_plugin_description(self): return "Adds revision numbers to resources." @staticmethod @resource_extend.extends( list(standard_attr.get_standard_attr_resource_model_map())) def extend_resource_dict_revision(resource_res, resource_db): resource_res['revision_number'] = resource_db.revision_number def _find_related_obj(self, session, obj, relationship_col): """Gets a related object off of a relationship. Raises a runtime error if the relationship isn't configured correctly for revision bumping. """ # first check to see if it's directly attached to the object already related_obj = getattr(obj, relationship_col) if related_obj: return related_obj for rel in sqlalchemy.inspect(obj).mapper.relationships: if rel.key != relationship_col: continue if not rel.load_on_pending: raise RuntimeError( _("revises_on_change relationships must " "have load_on_pending set to True to " "bump parent revisions on create: %s"), relationship_col) def _clear_rev_bumped_flags(self, session): """This clears all flags on commit/rollback to enable rev bumps.""" for inst in session: setattr(inst, '_rev_bumped', False) def _bump_obj_revision(self, session, obj): """Increment object revision in compare and swap fashion. Before the increment, this checks and enforces any revision number constraints. """ if getattr(obj, '_rev_bumped', False): # we've already bumped the revision of this object in this txn return instance, match = self._get_constrained_instance_match(session) if instance and instance == obj: # one last check before bumping revision self._enforce_if_match_constraints(session) obj.bump_revision() setattr(obj, '_rev_bumped', True) def _find_instance_by_column_value(self, session, model, column, value): """Lookup object in session or from DB based on a column's value.""" for session_obj in session: if not isinstance(session_obj, model): continue if getattr(session_obj, column) == value: return session_obj # object isn't in session so we have to query for it related_obj = (session.query(model).filter_by(**{ column: value }).first()) return related_obj def _get_constrained_instance_match(self, session): """Returns instance and constraint of if-match criterion if present. Checks the context associated with the session for compare-and-swap update revision number constraints. If one is found, this returns the instance that is constrained as well as the requested revision number to match. """ context = session.info.get('using_context') criteria = context.get_transaction_constraint() if context else None if not criteria: return None, None match = criteria.if_revision_match mmap = standard_attr.get_standard_attr_resource_model_map() model = mmap.get(criteria.resource) if not model: msg = _("Revision matching not supported for this resource") raise exc.BadRequest(resource=criteria.resource, msg=msg) instance = self._find_instance_by_column_value(session, model, 'id', criteria.resource_id) return instance, match def _enforce_if_match_constraints(self, session): """Check for if-match constraints and raise exception if violated. We determine the collection being modified and look for any objects of the collection type in the dirty/deleted items in the session. If they don't match the revision_number constraint supplied, we throw an exception. We are protected from a concurrent update because if we match revision number here and another update commits to the database first, the compare and swap of revision_number will fail and a StaleDataError (or deadlock in galera multi-writer) will be raised, at which point this will be retried and fail to match. """ instance, match = self._get_constrained_instance_match(session) if not instance or getattr(instance, '_rev_bumped', False): # no constraints present or constrain satisfied in this transaction return if instance.revision_number != match: raise RevisionNumberConstraintFailed(match, instance.revision_number)