예제 #1
0
    def __init__(self, app, session):
        self.app = app
        self.session = session
        self._action_signal = signal('action_signal')
        self._action_signal.connect(_event_pitcher)
        if not hasattr(app, 'act_manager'):
            app.act_manager = self

        # Guarantee Idempotence. Database events
        if event.contains(self.session, 'before_flush', db_event):
            return
        event.listen(self.session, 'before_flush', db_event)

        if event.contains(self.session, 'after_rollback', db_rollback):
            return
        event.listen(self.session, 'after_rollback', db_rollback)

        # Don't need to worry about threads yet since the _heartbeat thread
        # isn't running.
        _ha = HeartbeatAction()
        if not _ha:
            raise ValueError()
        
        event_registry.setdefault('heartbeat-action', []).append(_ha)

        # Periodic events. Smallest periodic interval is 15 seconds. Could be
        # configurable.
        _hb['func'] = _heartbeat
        t = Timer(_hb['interval'], _hb['func'])
        t.start()
예제 #2
0
 def test__validate_disjoint_outlaws(self):
     special_years = [year for year in range(FAKE.random_int(min=1))]
     same_special_years_calendar = self.calendar_factory.build(
         special_leap_years=special_years,
         special_common_years=special_years,
     )
     assert event.contains(
         ConvertibleCalendar,
         "before_insert",
         ConvertibleCalendar.validate_disjoint_special_years,
     )
     assert event.contains(
         ConvertibleCalendar,
         "before_update",
         ConvertibleCalendar.validate_disjoint_special_years,
     )
     with pytest.raises(AssertionError), self.session:
         self.session.add(same_special_years_calendar)
         self.session.commit()
     with pytest.raises(AssertionError), self.session:
         calendar = self.calendar_factory.build(
             special_leap_years=special_years,
             special_common_years=[],
         )
         self.session.add(calendar)
         self.session.commit()
         calendar.special_common_years = special_years
         self.session.commit()
예제 #3
0
 def test_remove(self):
     from sqlalchemy_mptt import tree_manager
     tree_manager.register_events(remove=True)
     self.assertFalse(
         contains(
             Tree,
             'before_insert',
             tree_manager.before_insert
         )
     )
     self.assertFalse(
         contains(
             Tree,
             'before_update',
             tree_manager.before_update
         )
     )
     self.assertFalse(
         contains(
             Tree,
             'before_delete',
             tree_manager.before_delete
         )
     )
     tree_manager.register_events()
예제 #4
0
 def test_register(self):
     from sqlalchemy_mptt import tree_manager
     tree_manager.register_events()
     self.assertTrue(contains(BaseNestedSets, 'before_insert',
                              tree_manager.before_insert))
     self.assertTrue(contains(BaseNestedSets, 'before_update',
                              tree_manager.before_update))
     self.assertTrue(contains(BaseNestedSets, 'before_delete',
                              tree_manager.before_delete))
예제 #5
0
 def test_remove(self):
     from sqlalchemy_mptt import tree_manager
     tree_manager.register_events(remove=True)
     self.assertFalse(contains(Tree, 'before_insert',
                               tree_manager.before_insert))
     self.assertFalse(contains(Tree, 'before_update',
                               tree_manager.before_update))
     self.assertFalse(contains(Tree, 'before_delete',
                               tree_manager.before_delete))
     tree_manager.register_events()
def test_install_event_listeners():
    from sqlalchemy_postgresql_audit.event_listeners import enable_event_listeners
    from sqlalchemy_postgresql_audit.event_listeners.sqlalchemy import (
        create_audit_table, )

    assert not event.contains(Table, "after_parent_attach", create_audit_table)

    enable_event_listeners()

    assert event.contains(Table, "after_parent_attach", create_audit_table)
예제 #7
0
 def test_restore(self):
     instrument = self._track_changes({MockModel1.dict1: dict})
     # set instance attribute, load instance, refresh instance and flush_refresh listeners
     assert len(instrument.listeners) == 4
     for listener_args in instrument.listeners:
         assert event.contains(*listener_args)
     instrument.restore()
     assert len(instrument.listeners) == 4
     for listener_args in instrument.listeners:
         assert not event.contains(*listener_args)
     return instrument
 def _add_commit_hook(self, context):
     # TODO(ivar): this is sqlAlchemy specific. find a cleaner way to manage
     # tree manager's hooks.
     if context.store.supports_hooks:
         session = context.store.db_session
         if not sa_event.contains(session, 'after_flush',
                                  self._after_tree_session_flush):
             sa_event.listen(session, 'after_flush',
                             self._after_tree_session_flush)
         if not sa_event.contains(session, 'after_transaction_end',
                                  self._after_tree_transaction_end):
             sa_event.listen(session, 'after_transaction_end',
                             self._after_tree_transaction_end)
예제 #9
0
 def add_commit_hook(self):
     if not sa_event.contains(self.db_session, 'before_flush',
                              self._before_session_commit):
         sa_event.listen(self.db_session, 'before_flush',
                         self._before_session_commit, self)
     if not sa_event.contains(self.db_session, 'after_transaction_end',
                              self._after_transaction_end):
         sa_event.listen(self.db_session, 'after_transaction_end',
                         self._after_transaction_end)
     if not sa_event.contains(self.db_session, 'after_flush',
                              self._after_session_flush):
         sa_event.listen(self.db_session, 'after_flush',
                         self._after_session_flush)
예제 #10
0
 def test_commit(self):
     profiler = SessionProfiler()
     profiler.begin()
     with mock.patch.object(profiler, "_get_stats") as mocked:
         profiler.commit()
         mocked.assert_called()
         self.assertFalse(profiler.alive)
         self.assertFalse(
             event.contains(profiler.engine, profiler._before,
                            profiler._before_cursor_execute))
         self.assertFalse(
             event.contains(profiler.engine, profiler._after,
                            profiler._after_cursor_execute))
예제 #11
0
    def apply_driver_hacks(self, app, info, options):
        """Call before engine creation."""
        # Don't forget to apply hacks defined on parent object.
        super(SQLAlchemy, self).apply_driver_hacks(app, info, options)

        # Set database pool connection
        self.__set_db_connection_pool(app, options)

        if info.drivername == 'sqlite':
            connect_args = options.setdefault('connect_args', {})

            if 'isolation_level' not in connect_args:
                # disable pysqlite's emitting of the BEGIN statement entirely.
                # also stops it from emitting COMMIT before any DDL.
                connect_args['isolation_level'] = None

            if not event.contains(Engine, 'connect', do_sqlite_connect):
                event.listen(Engine, 'connect', do_sqlite_connect)
            if not event.contains(Engine, 'begin', do_sqlite_begin):
                event.listen(Engine, 'begin', do_sqlite_begin)

            from sqlite3 import register_adapter

            def adapt_proxy(proxy):
                """Get current object and try to adapt it again."""
                return proxy._get_current_object()

            register_adapter(LocalProxy, adapt_proxy)

        elif info.drivername == 'postgresql+psycopg2':  # pragma: no cover
            from psycopg2.extensions import adapt, register_adapter

            def adapt_proxy(proxy):
                """Get current object and try to adapt it again."""
                return adapt(proxy._get_current_object())

            register_adapter(LocalProxy, adapt_proxy)

        elif info.drivername == 'mysql+pymysql':  # pragma: no cover
            from pymysql import converters

            def escape_local_proxy(val, mapping):
                """Get current object and try to adapt it again."""
                return converters.escape_item(
                    val._get_current_object(),
                    self.engine.dialect.encoding,
                    mapping=mapping,
                )

            converters.conversions[LocalProxy] = escape_local_proxy
            converters.encoders[LocalProxy] = escape_local_proxy
예제 #12
0
 def test_begin(self):
     profiler = SessionProfiler()
     with mock.patch.object(profiler, "_reset_stats") as mocked:
         profiler.begin()
         mocked.assert_called()
         self.assertTrue(profiler.alive)
         self.assertIsInstance(profiler.queries, Queue)
         self.assertTrue(profiler.queries.empty())
         self.assertTrue(
             event.contains(profiler.engine, profiler._before,
                            profiler._before_cursor_execute))
         self.assertTrue(
             event.contains(profiler.engine, profiler._after,
                            profiler._after_cursor_execute))
예제 #13
0
    def apply_driver_hacks(self, app, sa_url, options):
        """Call before engine creation."""
        # Don't forget to apply hacks defined on parent object.
        super(SQLAlchemy, self).apply_driver_hacks(app, sa_url, options)

        if sa_url.drivername == "sqlite":
            connect_args = options.setdefault("connect_args", {})

            if "isolation_level" not in connect_args:
                # disable pysqlite's emitting of the BEGIN statement entirely.
                # also stops it from emitting COMMIT before any DDL.
                connect_args["isolation_level"] = None

            if not event.contains(Engine, "connect", do_sqlite_connect):
                event.listen(Engine, "connect", do_sqlite_connect)
            if not event.contains(Engine, "begin", do_sqlite_begin):
                event.listen(Engine, "begin", do_sqlite_begin)

            from sqlite3 import register_adapter

            def adapt_proxy(proxy):
                """Get current object and try to adapt it again."""
                return proxy._get_current_object()

            register_adapter(LocalProxy, adapt_proxy)

        elif sa_url.drivername == "postgresql+psycopg2":  # pragma: no cover
            from psycopg2.extensions import adapt, register_adapter

            def adapt_proxy(proxy):
                """Get current object and try to adapt it again."""
                return adapt(proxy._get_current_object())

            register_adapter(LocalProxy, adapt_proxy)

        elif sa_url.drivername == "mysql+pymysql":  # pragma: no cover
            from pymysql import converters

            def escape_local_proxy(val, mapping):
                """Get current object and try to adapt it again."""
                return converters.escape_item(
                    val._get_current_object(),
                    self.engine.dialect.encoding,
                    mapping=mapping,
                )

            converters.conversions[LocalProxy] = escape_local_proxy
            converters.encoders[LocalProxy] = escape_local_proxy

        return sa_url, options
예제 #14
0
    def apply_driver_hacks(self, app, info, options):
        """Called before engine creation."""
        # Don't forget to apply hacks defined on parent object.
        super(SQLAlchemy, self).apply_driver_hacks(app, info, options)

        if info.drivername == 'sqlite':
            connect_args = options.setdefault('connect_args', {})

            if 'isolation_level' not in connect_args:
                # disable pysqlite's emitting of the BEGIN statement entirely.
                # also stops it from emitting COMMIT before any DDL.
                connect_args['isolation_level'] = None

            if not event.contains(Engine, "connect", do_sqlite_connect):
                event.listen(Engine, "connect", do_sqlite_connect)
            if not event.contains(Engine, "begin", do_sqlite_begin):
                event.listen(Engine, "begin", do_sqlite_begin)

            from sqlite3 import register_adapter

            def adapt_proxy(proxy):
                """Get current object and try to adapt it again."""
                return proxy._get_current_object()

            register_adapter(LocalProxy, adapt_proxy)

        elif info.drivername == 'postgresql+psycopg2':  # pragma: no cover
            from psycopg2.extensions import adapt, register_adapter

            def adapt_proxy(proxy):
                """Get current object and try to adapt it again."""
                return adapt(proxy._get_current_object())

            register_adapter(LocalProxy, adapt_proxy)

        elif info.drivername == 'mysql+pymysql':  # pragma: no cover
            from pymysql import converters

            def escape_local_proxy(val, mapping):
                """Get current object and try to adapt it again."""
                return converters.escape_item(
                    val._get_current_object(),
                    self.engine.dialect.encoding,
                    mapping=mapping,
                )

            converters.conversions[LocalProxy] = escape_local_proxy
            converters.encoders[LocalProxy] = escape_local_proxy
예제 #15
0
    def __create_event(self):
        """ Create an SQLAlchemy event listening the 'set' in a particular column.

        :rtype : object
        """
        if not event.contains(self.field, 'set', self.__validate):
            event.listen(self.field, 'set', self.__validate, retval=True)
예제 #16
0
    def register(cls, encryptor: Encryptor):
        """
        Register this encryptable with an encryptor.

        Instances of this encryptor will be encrypted on initialization and decrypted on load.

        """
        # save the current encryptor statically
        cls.__encryptor__ = encryptor

        # NB: we cannot use the before_insert listener in conjunction with a foreign key relationship
        # for encrypted data; SQLAlchemy will warn about using 'related attribute set' operation so
        # late in its insert/flush process.
        listeners = dict(
            init=on_init,
            load=on_load,
        )

        for name, func in listeners.items():
            # If we initialize the graph multiple times (as in many unit testing scenarios),
            # we will accumulate listener functions -- with unpredictable results. As protection,
            # we need to remove existing listeners before adding new ones; this solution only
            # works if the id (e.g. memory address) of the listener does not change, which means
            # they cannot be closures around the `encryptor` reference.
            #
            # Hence the `__encryptor__` hack above...
            if contains(cls, name, func):
                remove(cls, name, func)
            listen(cls, name, func)
예제 #17
0
    def test_mapping_listeners(self, mocker, event):
        mapper_mock, store, table = self.setup_mapper(mocker)
        Repository.mapping(Repository, store)

        assert contains(Repository, event, Repository._events[event])
        Repository._events[event](mapper_mock, None, table)
        getattr(table, event).assert_called_once_with()
예제 #18
0
    def test_make_mutual_conversions(self):
        with self.session:
            conversion1_2 = self.conversion_factory()
            calendar1 = conversion1_2.source_calendar
            calendar2 = conversion1_2.target_calendar
            conversion2_3 = self.conversion_factory(source_calendar=calendar2)
            calendar3 = conversion2_3.target_calendar
            expected_sync_ordinal_difference = abs(
                conversion1_2.source_sync_ordinal -
                conversion2_3.target_sync_ordinal)
            self.session.flush()

            conversions = set(object_ for object_ in self.session
                              if isinstance(object_, CalendarConversion))
            conversion1_3 = conversions - {conversion1_2, conversion2_3}
            conversion1_3 = conversion1_3.pop()  # set -> CalendarConversion

            assert event.contains(
                Session,
                "before_flush",
                CalendarConversion.make_mutual_conversions,
            )
            assert (abs(conversion1_3.source_sync_ordinal -
                        conversion1_3.target_sync_ordinal) ==
                    expected_sync_ordinal_difference)
            if calendar1.id == conversion1_3.source_calendar.id:
                assert calendar3.id == conversion1_3.target_calendar.id
            elif calendar3.id == conversion1_3.source_calendar.id:
                assert calendar1.id == conversion1_3.target_calendar.id
            else:
                self.fail("Automatic conversion not made")
예제 #19
0
    def register(cls, encryptor: Encryptor):
        """
        Register this encryptable with an encryptor.

        Instances of this encryptor will be encrypted on initialization and decrypted on load.

        """
        # save the current encryptor statically
        cls.__encryptor__ = encryptor

        # NB: we cannot use the before_insert listener in conjunction with a foreign key relationship
        # for encrypted data; SQLAlchemy will warn about using 'related attribute set' operation so
        # late in its insert/flush process.
        listeners = dict(
            init=on_init,
            load=on_load,
        )

        for name, func in listeners.items():
            # If we initialize the graph multiple times (as in many unit testing scenarios),
            # we will accumulate listener functions -- with unpredictable results. As protection,
            # we need to remove existing listeners before adding new ones; this solution only
            # works if the id (e.g. memory address) of the listener does not change, which means
            # they cannot be closures around the `encryptor` reference.
            #
            # Hence the `__encryptor__` hack above...
            if contains(cls, name, func):
                remove(cls, name, func)
            listen(cls, name, func)
예제 #20
0
    def __create_event(self):
        """ Create an SQLAlchemy event listening the 'set' in a particular column.

        :rtype : object
        """
        if not event.contains(self.field, 'set', self.__validate):
            event.listen(self.field, 'set', self.__validate, retval=True)
예제 #21
0
    def apply_driver_hacks(self, app, info, options):
        """Called before engine creation."""
        # Don't forget to apply hacks defined on parent object.
        super(SQLAlchemy, self).apply_driver_hacks(app, info, options)

        if info.drivername == 'sqlite':
            connect_args = options.setdefault('connect_args', {})

            if 'isolation_level' not in connect_args:
                # disable pysqlite's emitting of the BEGIN statement entirely.
                # also stops it from emitting COMMIT before any DDL.
                connect_args['isolation_level'] = None

            if not event.contains(Engine, "connect", do_sqlite_connect):
                event.listen(Engine, "connect", do_sqlite_connect)
            if not event.contains(Engine, "begin", do_sqlite_begin):
                event.listen(Engine, "begin", do_sqlite_begin)
예제 #22
0
    def apply_driver_hacks(self, app, info, options):
        """Called before engine creation."""
        # Don't forget to apply hacks defined on parent object.
        super(SQLAlchemy, self).apply_driver_hacks(app, info, options)

        if info.drivername == "sqlite":
            connect_args = options.setdefault("connect_args", {})

            if "isolation_level" not in connect_args:
                # disable pysqlite's emitting of the BEGIN statement entirely.
                # also stops it from emitting COMMIT before any DDL.
                connect_args["isolation_level"] = None

            if not event.contains(Engine, "connect", do_sqlite_connect):
                event.listen(Engine, "connect", do_sqlite_connect)
            if not event.contains(Engine, "begin", do_sqlite_begin):
                event.listen(Engine, "begin", do_sqlite_begin)
예제 #23
0
 def unregister_signals_oaiset(self):
     """Unregister signals oaiset."""
     from .models import OAISet
     from .receivers import after_delete_oai_set, after_insert_oai_set, \
         after_update_oai_set
     if contains(OAISet, 'after_insert', after_insert_oai_set):
         remove(OAISet, 'after_insert', after_insert_oai_set)
         remove(OAISet, 'after_update', after_update_oai_set)
         remove(OAISet, 'after_delete', after_delete_oai_set)
예제 #24
0
 def unregister_signals_oaiset(self):
     """Unregister signals oaiset."""
     from .models import OAISet
     from .receivers import after_insert_oai_set, \
         after_update_oai_set, after_delete_oai_set
     if contains(OAISet, 'after_insert', after_insert_oai_set):
         remove(OAISet, 'after_insert', after_insert_oai_set)
         remove(OAISet, 'after_update', after_update_oai_set)
         remove(OAISet, 'after_delete', after_delete_oai_set)
예제 #25
0
 def test_instrumentation_context_manager(self, storage):
     instance = MockModel1(name='name')
     storage.mock_model_1.put(instance)
     with self._track_changes({MockModel1.dict1: dict}) as instrument:
         instance = storage.mock_model_1.get(instance.id)
         instance.dict1 = {'new': 'value'}
         assert instrument.tracked_changes == {
             'mock_model_1': {
                 instance.id: {
                     'dict1': Value(STUB, {'new': 'value'})
                 }
             }
         }
         assert len(instrument.listeners) == 4
         for listener_args in instrument.listeners:
             assert event.contains(*listener_args)
     for listener_args in instrument.listeners:
         assert not event.contains(*listener_args)
예제 #26
0
    def __declare_last__(cls):
        # Unconfigure the event set in _SQLAMutationTracker, we have _save_data
        mapper = cls._sa_class_manager.mapper
        args = (mapper.attrs['data'], 'set', _SQLAMutationTracker._field_set)
        if event.contains(*args):
            event.remove(*args)

        # Declaring the event on the class attribute instead of mapper property
        # enables proper registration on its subclasses
        event.listen(cls.data, 'set', cls._set_data, retval=True)
예제 #27
0
    def __declare_last__(cls):
        # Unconfigure the event set in _SQLAMutationTracker, we have _save_data
        mapper = cls._sa_class_manager.mapper
        args = (mapper.attrs['data'], 'set', _SQLAMutationTracker._field_set)
        if event.contains(*args):
            event.remove(*args)

        # Declaring the event on the class attribute instead of mapper property
        # enables proper registration on its subclasses
        event.listen(cls.data, 'set', cls._save_data, retval=True)
예제 #28
0
파일: env.py 프로젝트: gonicus/gosa
 def remove_flush_listeners(self):
     """
     remove all before_flush listeners, currently this is only use by a test,
     that switched between proxied and normal mode.
     """
     for session in self.__db_session.values():
         if event.contains(session, "before_flush", before_proxy_flush):
             event.remove(session, "before_flush", before_proxy_flush)
     self.__db_session = {}
     for factory in self.__db_factory.values():
         factory.reset_events()
예제 #29
0
 def register_events(self, remove=False):
     for e, h in (
         ('before_insert', self.before_insert),
         ('before_update', self.before_update),
         ('before_delete', self.before_delete),
     ):
         is_event_exist = event.contains(self.base_class, e, h)
         if remove and is_event_exist:
             event.remove(self.base_class, e, h)
         elif not is_event_exist:
             event.listen(self.base_class, e, h, propagate=True)
     return self
    def __init__(self, engine, alias):
        self.engine = engine
        self.alias = alias
        self.loggers = {}
        self.tmp = {}

        for i in (
            (self.engine, "before_execute", self.before_execute),
            (self.engine, "after_execute", self.after_execute),
        ):
            if not event.contains(*i):
                event.listen(*i)
 def add_commit_hook(self):
     # By session lifecycle order.
     if not sa_event.contains(self.db_session, 'before_flush',
                              SqlAlchemyStore._before_session_commit):
         sa_event.listen(self.db_session, 'before_flush',
                         SqlAlchemyStore._before_session_commit)
     if not sa_event.contains(self.db_session, 'after_flush',
                              SqlAlchemyStore._after_session_flush):
         sa_event.listen(self.db_session, 'after_flush',
                         SqlAlchemyStore._after_session_flush)
     if not sa_event.contains(self.db_session, 'after_rollback',
                              SqlAlchemyStore._after_session_rollback):
         sa_event.listen(self.db_session, 'after_rollback',
                         SqlAlchemyStore._after_session_rollback)
     if not sa_event.contains(self.db_session, 'after_rollback',
                              SqlAlchemyStore._clear_epoch_bumped_flags):
         sa_event.listen(self.db_session, 'after_rollback',
                         SqlAlchemyStore._clear_epoch_bumped_flags)
     if not sa_event.contains(self.db_session, 'after_commit',
                              SqlAlchemyStore._clear_epoch_bumped_flags):
         sa_event.listen(self.db_session, 'after_commit',
                         SqlAlchemyStore._clear_epoch_bumped_flags)
     if not sa_event.contains(self.db_session, 'after_transaction_end',
                              SqlAlchemyStore._after_transaction_end):
         sa_event.listen(self.db_session, 'after_transaction_end',
                         SqlAlchemyStore._after_transaction_end)
예제 #32
0
 def add_commit_hook(self):
     # By session lifecycle order.
     if not sa_event.contains(self.db_session, 'before_flush',
                              SqlAlchemyStore._before_session_commit):
         sa_event.listen(self.db_session, 'before_flush',
                         SqlAlchemyStore._before_session_commit)
     if not sa_event.contains(self.db_session, 'after_flush',
                              SqlAlchemyStore._after_session_flush):
         sa_event.listen(self.db_session, 'after_flush',
                         SqlAlchemyStore._after_session_flush)
     if not sa_event.contains(self.db_session, 'after_rollback',
                              SqlAlchemyStore._after_session_rollback):
         sa_event.listen(self.db_session, 'after_rollback',
                         SqlAlchemyStore._after_session_rollback)
     if not sa_event.contains(self.db_session, 'after_rollback',
                              SqlAlchemyStore._clear_epoch_bumped_flags):
         sa_event.listen(self.db_session, 'after_rollback',
                         SqlAlchemyStore._clear_epoch_bumped_flags)
     if not sa_event.contains(self.db_session, 'after_commit',
                              SqlAlchemyStore._clear_epoch_bumped_flags):
         sa_event.listen(self.db_session, 'after_commit',
                         SqlAlchemyStore._clear_epoch_bumped_flags)
     if not sa_event.contains(self.db_session, 'after_transaction_end',
                              SqlAlchemyStore._after_transaction_end):
         sa_event.listen(self.db_session, 'after_transaction_end',
                         SqlAlchemyStore._after_transaction_end)
예제 #33
0
    def register_db_event(self, action):
        """Registers an action's event.

        The event is identified by category, name, and type. The
        Categories are: database
        For database events:
             model,
             field_name,
             event_type is one of new|delete|dirty
        """
        action.act_manager = self
        
        valid_categories = ['database']
        valid_event_types = ['new', 'deleted', 'dirty']

        # The registry is a flat dict by combining category and name.
        k = 'database'    # Database event

        if action.model:
            k = k + ':' + action.model.__tablename__
        else:
            raise ValueError('%s must be a valid model' % action.model)

        if action.field_name:
            k = k + ':' + action.field_name

        if action.event_type not in valid_event_types:
            raise ValueError( '%s is not a valid event type %s, %s' % \
                              (action.event_type, k, str(valid_event_types)))
        k = k + ':' + action.event_type

        # The registry: keys are like: 'database-model-field_name-dirty'
        # Don't allow dupes. Make this idempotent.
        _pv_semaphore.acquire()
        if k not in event_registry:
            event_registry.setdefault(k, []).append(action)
        else:
            if self.action in event_registry[k]:
                logging.debug('duplicate action register rejected, %s' % repr(action))
            else:
                logging.info('action registered, %s' % action.name)
                event_registry[k].append(action)
        _pv_semaphore.release()


        # The listener needs the model.attribute such as: User.first_name
        if action.model and action.field_name and action.event_type == 'dirty':
            collection = getattr(action.model, action.field_name)
            # Guarantee Idempotence by not repeateing this event.
            if event.contains(collection, 'set', db_attr_event):
                return
            event.listen(collection, 'set', db_attr_event)
예제 #34
0
def _register_connection_events(conn):
    '''
    Register clean up events for our
    connection only once, as adding/removing them
    seems an expensive operation.
    '''

    # Use 'commit' as a mark to guess the events being handled.
    if contains(conn, 'commit', _connection_cleanup_handler):
        return

    # Plug post-operation clean up handlers.
    listen(conn, 'commit', _connection_cleanup_handler)
    listen(conn, 'rollback', _connection_cleanup_handler)
예제 #35
0
 def unregister_signals(self):
     """Unregister signals."""
     from .models import Collection
     from .percolator import collection_inserted_percolator, \
         collection_removed_percolator, collection_updated_percolator
     # Unregister Record signals
     if hasattr(self, 'update_function'):
         signals.before_record_insert.disconnect(self.update_function)
         signals.before_record_update.disconnect(self.update_function)
     # Unregister collection signals
     if contains(Collection, 'after_insert',
                 collection_inserted_percolator):
         remove(Collection, 'after_insert', collection_inserted_percolator)
         remove(Collection, 'after_update', collection_updated_percolator)
         remove(Collection, 'after_delete', collection_removed_percolator)
예제 #36
0
 def unregister_signals(self):
     """Unregister signals."""
     from .models import Collection
     from .percolator import collection_inserted_percolator, \
         collection_removed_percolator, collection_updated_percolator
     # Unregister Record signals
     if hasattr(self, 'update_function'):
         signals.before_record_insert.disconnect(self.update_function)
         signals.before_record_update.disconnect(self.update_function)
     # Unregister collection signals
     if contains(Collection, 'after_insert',
                 collection_inserted_percolator):
         remove(Collection, 'after_insert', collection_inserted_percolator)
         remove(Collection, 'after_update', collection_updated_percolator)
         remove(Collection, 'after_delete', collection_removed_percolator)
예제 #37
0
def remove_mutable_association_listener():
    """
    Remove the event listener that associates ``Dict`` and ``List`` column types with
    ``MutableDict`` and ``MutableList``, respectively.

    This call must happen before any model instance is instantiated.
    This is because once it does, that would trigger the listener we are trying to remove.
    Once it is triggered, many other listeners will then be registered.
    At that point, it is too late.

    The reason this function exists is that the association listener, interferes with ARIA change
    tracking instrumentation, so a way to disable it is required.

    Note that the event listener this call removes is registered by default.
    """
    if event.contains(*_LISTENER_ARGS):
        event.remove(*_LISTENER_ARGS)
예제 #38
0
    def clear(self):
        logger.info(
            f"$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ clearing Syned Models"
        )
        for nr, ev in enumerate(
            ['after_insert', 'after_update', 'after_delete']):
            for model in self.models:
                listener = self.model_handlers[model]
                logger.info(f"remove listener {listener} for {model} on {ev}")

                if event.contains(model, ev, listener[nr]):
                    event.remove(model, ev, listener[nr])

        self.models.clear()
        self.model_names.clear()
        self.tables.clear()
        self.ids.clear()
예제 #39
0
def atomic_transaction(commit_at_end=False):
    event.listen(db.session(), "before_commit", _raise_commit_error)
    try:
        yield
        if commit_at_end:
            event.remove(db.session(), "before_commit", _raise_commit_error)
            db.session.commit()
        else:
            db.session.rollback()
            event.remove(db.session(), "before_commit", _raise_commit_error)
    except Exception as e:
        if event.contains(db.session(), "before_commit", _raise_commit_error):
            event.remove(db.session(), "before_commit", _raise_commit_error)
        db.session.rollback()
        if isinstance(e, DatabaseError):
            handle_database_error(e)
        raise e
예제 #40
0
def _register_session_events(session):
    '''
    Register connection/transaction and clean up events
    for our session only once, as adding/removing them
    seems an expensive operation.
    '''

    # Use 'after_being' as a mark to guess the events being handled.
    if contains(session, 'after_begin', _session_after_begin_handler):
        return

    # Have the connections inherit the tracing info
    # from the session (including parent span, if any).
    listen(session, 'after_begin', _session_after_begin_handler)

    # Plug post-operation clean up handlers.
    # The actual session commit/rollback is not traced by us.
    listen(session, 'after_commit', _session_cleanup_handler)
    listen(session, 'after_rollback', _session_cleanup_handler)
예제 #41
0
def create_engine_from_conf(config):
    need_connection_pool_fix = True

    backend = config.get('DATABASE', 'type')

    if backend == 'mysql':
        if config.has_option('DATABASE', 'host'):
            host = config.get('DATABASE', 'host').lower()
        else:
            host = 'localhost'

        if config.has_option('DATABASE', 'port'):
            port = config.getint('DATABASE', 'port')
        else:
            port = 3306

        username = config.get('DATABASE', 'username')
        password = config.get('DATABASE', 'password')
        db_name = config.get('DATABASE', 'db_name')

        db_url = "mysql+mysqldb://%s:%s@%s:%s/%s?charset=utf8" % \
                 (username, quote_plus(password), host, port, db_name)
        logger.debug('[dtable_events] database: mysql, name: %s', db_name)
    else:
        logger.error("Unknown database backend: %s" % backend)
        raise RuntimeError("Unknown database backend: %s" % backend)

    # Add pool recycle, or mysql connection will be closed
    # by mysql daemon if idle for too long.
    kwargs = dict(pool_recycle=300, echo=False, echo_pool=False)

    engine = create_engine(db_url, **kwargs)

    if need_connection_pool_fix and not contains(Pool, 'checkout',
                                                 ping_connection):
        # We use contains to double check in case we call
        # create_engine multiple times in the same process.
        listen(Pool, 'checkout', ping_connection)

    return engine
예제 #42
0
 def __enter__(self):
     """Disable date update."""
     self._found = contains(
         Timestamp, 'before_update', timestamp_before_update)
     if self._found:
         remove(Timestamp, 'before_update', timestamp_before_update)
예제 #43
0
 def start(self):
     """ Restart the listener
     """
     if not event.contains(self.field, 'set', self.__validate):
         self.__create_event()
예제 #44
0
 def stop(self):
     """ Remove the listener to stop the validation
     """
     if event.contains(self.field, 'set', self.__validate):
         event.remove(self.field, 'set', self.__validate)
예제 #45
0
 def stop(self):
     """ Remove the listener to stop the validation
     """
     if event.contains(self.field, 'set', self.__validate):
         event.remove(self.field, 'set', self.__validate)
예제 #46
0
 def start(self):
     """ Restart the listener
     """
     if not event.contains(self.field, 'set', self.__validate):
         self.__create_event()