def _create_lookup_keys(self, sender_receivers_tuple_list): """ Creates a signal lookup keys from the provided array of tuples. """ lookup_keys = [] for keep in sender_receivers_tuple_list: receiver = keep[0] sender = keep[1] lookup_key = (_make_id(receiver) if receiver else receiver, _make_id(sender)) lookup_keys.append(lookup_key) return lookup_keys
def propagate_signal(self, sender, **named): """ Send signal from sender to all connected receivers catching errors. Arguments: sender The sender of the signal. Can be any python object (normally one registered with a connect if you actually want something to occur). named Named arguments which will be passed to receivers. These arguments must be a subset of the argument names defined in providing_args. Return a list of tuple pairs [(receiver, response), ... ]. May raise DispatcherKeyError. If any receiver raises an error (specifically any subclass of Exception), the error instance is returned as the result for that receiver. """ logger = get_task_logger(__name__) logger.info("START propagate_signal") logger.info(self) logger.info(sender) logger.info(_make_id(sender)) logger.info(self._live_receivers(_make_id(sender))) # Call each receiver with whatever arguments it can accept. for receiver in self._live_receivers(_make_id(sender)): try: logger.info( "START Receiver: {}; Signal: {}; sender: {}, kwargs:{}".format( receiver, signal, sender, named)) receiver(signal=self, sender=sender, **named) logger.info( "END Receiver: {}; Signal: {}; sender: {}, kwargs:{}".format( receiver, signal, sender, named)) except Exception as ex: logger.info( "EXCEPT START Receiver: {}; Signal: {}; sender: {}, kwargs:{}". format(receiver, signal, sender, named)) logger.error(ex) logger.info( "EXCEPT END Receiver: {}; Signal: {}; sender: {}, kwargs:{}". format(receiver, signal, sender, named)) logger.info("END propagate_signal")
def _live_receivers(self, sender): """ Filter sequence of receivers to get resolved, live receivers. This checks for weak references and resolves them, then returning only live receivers. """ with self.lock: self._clear_dead_receivers() senderkey = _make_id(sender) receivers = [] for (receiverkey, r_senderkey), receiver in self.receivers: r_sender = self._sender_map[r_senderkey] if r_senderkey == NONE_ID or r_senderkey == senderkey: receivers.append(receiver) elif sender and issubclass(sender, r_sender): receivers.append(receiver) if self.use_caching and sender: if not receivers: self.sender_receivers_cache[sender] = NO_RECEIVERS else: # Note, we must cache the weakref versions. self.sender_receivers_cache[sender] = receivers non_weak_receivers = [] for receiver in receivers: if isinstance(receiver, weakref.ReferenceType): # Dereference the weak reference. receiver = receiver() if receiver is not None: non_weak_receivers.append(receiver) else: non_weak_receivers.append(receiver) return non_weak_receivers
def send_robust(self, sender, **named): """ Send signal from sender to all connected receivers catching errors. Arguments: sender The sender of the signal. Can be any python object (normally one registered with a connect if you actually want something to occur). named Named arguments which will be passed to receivers. These arguments must be a subset of the argument names defined in providing_args. Return nothing """ if not self.receivers: return # Call each receiver with whatever arguments it can accept. # Return a list of tuple pairs [(receiver, response), ... ]. for receiver in self._live_receivers(_make_id(sender)): try: SignalTask.delay(receiver=receiver, sender=sender, **named) except Exception, err: pass
def receiver_exist(receiver, signal, dispatch_uid): """Code adapted from Django code to test whether a receiver already exists.""" if dispatch_uid: lookup_key = (dispatch_uid, _make_id(None)) # _make_id(sender) not use of sender else: lookup_key = (_make_id(receiver), _make_id(None)) signal.lock.acquire() try: for r_key, _ in signal.receivers: if r_key == lookup_key: return True finally: signal.lock.release() return False
def get_or_create_user(self, **kwargs): details = kwargs['details'] user = None email = details.get('email') if email and ASSOCIATE_BY_MAIL: # try to associate accounts registered with the same email # address, only if it's a single object. ValueError is # raised if multiple objects are returned try: user = User.objects.get(email=email) except MultipleObjectsReturned: raise ValueError('Not unique email address supplied') except User.DoesNotExist: pass if user is None and create_user.receivers: sender = self.__class__ for receiver in create_user._live_receivers(_make_id(sender)): user = receiver(signal=create_user, sender=sender, **kwargs) if user is not None: user.is_new = True break if user is None and CREATE_USERS: username = self.username(details) user = User.objects.create_user(username=username, email=email) user.is_new = True return user
def send(self, sender, **named): """ Send this signal asynchronously to the registered receivers @param sender: sender @param named: named arguments @return: """ if not self.receivers: logging.warning("no receivers found. sender: %s, signal type: %s" % (sender, self.signal_type)) return None logging.info("sending signal: %s" % self.signal_type) for receiver in self._live_receivers(_make_id(sender)): args = { "receiver": receiver, "sender": sender, "signal_type": self.signal_type } args.update(named) signal_data = AsyncSignal.dump_signal_data(args) deferred.defer(send_async, signal_data=signal_data, _queue="signals", _name="send-signal-%s-%s-%s" % (sender, self.signal_type, get_uuid())) return None # discard the responses
def propagate_signal(self, sender, **named): """ Send signal from sender to all connected receivers catching errors. Arguments: sender The sender of the signal. Can be any python object (normally one registered with a connect if you actually want something to occur). named Named arguments which will be passed to receivers. These arguments must be a subset of the argument names defined in providing_args. Return a list of tuple pairs [(receiver, response), ... ]. May raise DispatcherKeyError. If any receiver raises an error (specifically any subclass of Exception), the error instance is returned as the result for that receiver. """ # Call each receiver with whatever arguments it can accept. for receiver in self._live_receivers(_make_id(sender)): try: receiver(signal=self, sender=sender, **named) except Exception: pass
def send(self, sender, **named): """ Send signal from sender to all connected receivers. If any receiver raises an error, the error propagates back through send, terminating the dispatch loop, so it is quite possible to not have all receivers called if a raises an error. Arguments: sender The sender of the signal Either a specific object or None. named Named arguments which will be passed to receivers. Return nothing """ if not self.receivers: return task_args = {} if 'task_args' in named: task_args = named.pop('task_args') for receiver in self._live_receivers(_make_id(sender)): kwargs = {"receiver": receiver, "sender": sender } kwargs.update(named) SignalTask.apply_async(kwargs=kwargs, **task_args)
def priority_connect(self, receiver, sender=None, children=True): if sender and children: if sender._meta.abstract: for child in apps.get_models(): if issubclass(child, sender): priority_connect(self, receiver, child, children=False) return lookup_key = (_make_id(receiver), _make_id(sender)) with self.lock: self._clear_dead_receivers() for r_key, _ in self.receivers: if r_key == lookup_key: break else: # Adding priority receiver to beginning of the list self.receivers.insert(0, (lookup_key, receiver)) self.sender_receivers_cache.clear()
def check_for_receivers(sender, sending_signal, **kwargs): """Checks that no other signal receivers have been connected.""" sender_id = _make_id(sender) if sending_signal.use_caching and isinstance(sender_id, int): # XXX # This should probably be handled differently, as _make_id # is always likely to return an integer. return elif len(sending_signal._live_receivers(sender_id)) > 1: warnings.warn("pre_save and post_save signals will not longer be sent for Revision and Version models in django-reversion 1.8. Please use the pre_revision_commit and post_revision_commit signals instead.")
def connect(self, receiver, label, sender=None, weak=True, dispatch_uid=None, before=[], after=[]): """%s priority The lower the number the sooner the handler will be called """ % Signal.connect.__doc__ from django.conf import settings # If DEBUG is on, check that we got a good receiver if settings.DEBUG: import inspect assert callable(receiver), "Signal receivers must be callable." # Check for **kwargs # Not all callables are inspectable with getargspec, so we'll # try a couple different ways but in the end fall back on assuming # it is -- we don't want to prevent registration of valid but weird # callables. try: argspec = inspect.getargspec(receiver) except TypeError: try: argspec = inspect.getargspec(receiver.__call__) except (TypeError, AttributeError): argspec = None if argspec: assert argspec[2] is not None, \ "Signal receivers must accept keyword arguments (**kwargs)." if dispatch_uid: lookup_key = (dispatch_uid, _make_id(sender)) else: lookup_key = (_make_id(receiver), _make_id(sender)) if weak: receiver = saferef.safeRef(receiver, onDelete=self._remove_receiver) for r_key, _ in self.receivers: if r_key == lookup_key: break else: self.receivers.add((lookup_key, receiver), label, before, after)
def test_auto_index(self): """ Check that the elasticsearch object is receiver for the post save / delete signals """ from django.db.models.signals import post_save, post_delete from django.dispatch.dispatcher import _make_id import weakref for signal, callback in ((post_save, self.Article.elasticsearch.django_post_save), (post_delete, self.Article.elasticsearch.django_post_delete)): found = False for lookup_key, receiver in signal.receivers: if lookup_key[0] == _make_id(callback): found = True self.assertTrue(found)
def suppress_signal(signal, suppress=None): handlers = signal.receivers receiver_cache = signal.sender_receivers_cache.copy() signal.receivers = [] if suppress: refs = [_make_id(sup) for sup in suppress] signal.receivers = [h for h in handlers if not h[0][1] in refs] else: signal.receivers = [] signal.sender_receivers_cache.clear() try: yield finally: signal.sender_receivers_cache = receiver_cache signal.receivers = handlers
def connect(self, receiver, sender=None, weak=True, dispatch_uid=None, priority=50): if dispatch_uid is None: dispatch_uid = _make_id(receiver) inner_uid = '{0}{1}'.format(priority, dispatch_uid) super(Signal, self).connect(receiver, sender=sender, weak=weak, dispatch_uid=inner_uid) self.receivers.sort()
def safe_send(self, sender, **named): responses = [] if not self.receivers: return responses # Call each receiver with whatever arguments it can accept. # Return a list of tuple pairs [(receiver, response), ... ]. for receiver in self._live_receivers(_make_id(sender)): try: response = receiver(signal=self, sender=sender, **named) except Exception, err: log.error('Error calling signal', exc_info=True) responses.append((receiver, err)) else: responses.append((receiver, response))
def defer(f, *args, **kwargs): ''' Wrapper that defers a function's execution until the current transaction commits, if a transaction is active. Otherwise, executes as usual. Note that a deferred function will NOT be called if the transaction completes without committing (e.g. when transaction.is_dirty() is False upon exiting the transaction). An implicit assumption is that a deferred function does not return an important value, since there is no way to retrieve the return value in the normal execution order. Before being connected to the 'post_commit' signal of an existing managed transaction, the deferred function is wrapped by the @commit_on_success decorator to ensure that it behaves properly by committing or rolling back any updates it makes to a current transaction. >>> from django.db import transaction >>> from django_transaction_signals import defer >>> >>> def log_success(msg): >>> print 'logging success' >>> >>> @transaction.atomic >>> def transactional_update(value): >>> print 'starting transaction' >>> defer(log_success, 'The transaction was successful') >>> print 'finishing transaction' >>> >>> transactional_update('foo') ... starting transaction ... finishing transaction ... logging success ''' connection = get_connection(kwargs.pop('using', None)) if not connection.get_autocommit() or connection.in_atomic_block: def f_deferred(*a, **kw): f(*args, **kwargs) if connection.savepoint_ids: savepoint_id = connection.savepoint_ids[-1] else: savepoint_id = None dispatch_uid = (savepoint_id, _make_id(f_deferred)) transaction.signals.post_commit.connect(f_deferred, weak=False, dispatch_uid=dispatch_uid) else: return f(*args, **kwargs)
def safe_send(self, sender, **named): responses = [] if not self.receivers: return responses do_raise = getattr(settings, 'RAISE_ON_SIGNAL_ERROR', False) # Call each receiver with whatever arguments it can accept. # Return a list of tuple pairs [(receiver, response), ... ]. for receiver in self._live_receivers(_make_id(sender)): try: with statsd.timer('signal.send'): response = receiver(signal=self, sender=sender, **named) except Exception, err: if do_raise: raise log.error('Error calling signal', exc_info=True) responses.append((receiver, err)) else: responses.append((receiver, response))
def __init__(self, senders, signal_list=None, ignore_receivers=None): """ Inhibits Django's signals from being fired for the given senders. with inhibit_signals( [None, obj.__class__], ignore_receivers=[receiver_to_keep_connected]): obj.save() # only receiver_to_keep_connected() should be called """ try: iter(senders) except TypeError: senders = [senders] self.senders = {_make_id(sender): sender for sender in senders} self.ignore_receivers_keys = map(_make_id, ignore_receivers or []) signals = signal_list if signal_list is not None else ALL_SIGNALS self.signal_map = { signal: self.get_signal_receivers(signal) for signal in signals }
def send(self, sender, **named): """ Send this signal asynchronously to the registered receivers @param sender: sender @param named: named arguments @return: """ if not self.receivers: logging.warning("no receivers found. sender: %s, signal type: %s" % (sender, self.signal_type)) return None logging.info("sending signal: %s" % self.signal_type) for receiver in self._live_receivers(_make_id(sender)): args = {"receiver": receiver, "sender": sender, "signal_type": self.signal_type} args.update(named) signal_data = AsyncSignal.dump_signal_data(args) deferred.defer(send_async, signal_data=signal_data, _queue="signals", _name="send-signal-%s-%s-%s" % (sender, self.signal_type, get_uuid())) return None # discard the responses
def send(self, sender, **named): """Send the signal via Celery.""" logger = get_task_logger(__name__) logger.info("START send") logger.info(self) logger.info(sender) logger.info(_make_id(sender)) logger.info(self._live_receivers(sender)) for receiver in self._live_receivers(sender): try: logger.info( "START Receiver: {}; Signal: {}; sender: {}, kwargs:{}". format(receiver, self, sender, named)) call_receiver.apply_async( args=( receiver, self, sender, ), kwargs=named, queue=self.queue, ) logger.info( "END Receiver: {}; Signal: {}; sender: {}, kwargs:{}". format(receiver, self, sender, named)) except Exception as ex: logger.info( "EXCEPT START Receiver: {}; Signal: {}; sender: {}, kwargs:{}" .format(receiver, self, sender, named)) logger.error(ex) logger.info( "EXCEPT END Receiver: {}; Signal: {}; sender: {}, kwargs:{}" .format(receiver, self, sender, named)) logger.info("END send")
def authenticate(self, *args, **kwargs): """Authenticate user using social credentials Authentication is made if this is the correct backend, backend verification is made by kwargs inspection for current backend name presence. """ # Validate backend and arguments. Require that the Social Auth # response be passed in as a keyword argument, to make sure we # don't match the username/password calling conventions of # authenticate. if not (self.name and kwargs.get(self.name) and 'response' in kwargs): return None response = kwargs.get('response') details = self.get_user_details(response) kwargs['details'] = details uid = self.get_user_id(details, response) is_new = False user = kwargs.get('user') try: social_user = self.get_social_auth_user(uid) except UserSocialAuth.DoesNotExist: if user is None and create_user.receivers: sender = self.__class__ for receiver in create_user._live_receivers(_make_id(sender)): user = receiver(signal=create_user, sender=sender, **kwargs) if user is not None: is_new = True break if user is None: # new user if not CREATE_USERS or not kwargs.get('create_user', True): # Send signal for cases where tracking failed registering # is useful. socialauth_not_registered.send(sender=self.__class__, uid=uid, response=response, details=details) return None email = details.get('email') if email and ASSOCIATE_BY_MAIL: # try to associate accounts registered with the same email # address, only if it's a single object. ValueError is # raised if multiple objects are returned try: user = User.objects.get(email=email) except MultipleObjectsReturned: raise ValueError('Not unique email address supplied') except User.DoesNotExist: user = None if not user: username = self.username(details) logger.debug('Creating new user with username %s and email %s', username, sanitize_log_data(email)) user = User.objects.create_user(username=username, email=email) is_new = True try: social_user = self.associate_auth(user, uid, response, details) except IntegrityError: # Protect for possible race condition, those bastard with FTL # clicking capabilities social_user = self.get_social_auth_user(uid) # Raise ValueError if this account was registered by another user. if user and user != social_user.user: logger.info('Account already in use', extra=dict(data=details)) return None user = social_user.user # Flag user "new" status setattr(user, 'is_new', is_new) # Update extra_data storage, unless disabled by setting if LOAD_EXTRA_DATA: extra_data = self.extra_data(user, uid, response, details) if extra_data and social_user.extra_data != extra_data: social_user.extra_data = extra_data social_user.save() user.social_user = social_user # Update user account data. self.update_user_details(user, response, details, is_new) return user
def check_for_receivers(sender, sending_signal, **kwargs): """Checks that no other signal receivers have been connected.""" if len(sending_signal._live_receivers(_make_id(sender))) > 1: warnings.warn( "pre_save and post_save signals will not longer be sent for Revision and Version models in django-reversion 1.8. Please use the pre_revision_commit and post_revision_commit signals instead." )
def receivers_for_model(model): # TODO: Remove `_make_id` reference check when support for 1.5 is dropped sender = model if django.VERSION >= (1, 6) else _make_id(model) for signal in model_sender_signals: for receiver in signal._live_receivers(sender): yield signal, receiver
def check_for_receivers(sender, sending_signal, **kwargs): """Checks that no other signal receivers have been connected.""" if len(sending_signal._live_receivers(_make_id(sender))) > 1: warnings.warn("pre_save and post_save signals will not longer be sent for Revision and Version models in django-reversion 1.8. Please use the pre_revision_commit and post_revision_commit signals instead.")
def receivers_for_model(model): sender = model if django.VERSION >= (1, 6) else _make_id(model) for signal in model_sender_signals: for receiver in signal._live_receivers(sender): yield signal, receiver
def connect(self, receiver, sender=None, weak=True, dispatch_uid=None): super(Provider, self).connect(receiver, sender, weak, dispatch_uid) self._sender_map[_make_id(sender)] = sender