def __init__(self, hs: "HomeServer"): self.hs = hs self.pusher_factory = PusherFactory(hs) self.store = self.hs.get_datastores().main self.clock = self.hs.get_clock() # We shard the handling of push notifications by user ID. self._pusher_shard_config = hs.config.worker.pusher_shard_config self._instance_name = hs.get_instance_name() self._should_start_pushers = ( self._instance_name in self._pusher_shard_config.instances ) # We can only delete pushers on master. self._remove_pusher_client = None if hs.config.worker.worker_app: self._remove_pusher_client = ReplicationRemovePusherRestServlet.make_client( hs ) # Record the last stream ID that we were poked about so we can get # changes since then. We set this to the current max stream ID on # startup as every individual pusher will have checked for changes on # startup. self._last_room_stream_id_seen = self.store.get_room_max_stream_ordering() # map from user id to app_id:pushkey to pusher self.pushers: Dict[str, Dict[str, Pusher]] = {} self._account_validity_handler = hs.get_account_validity_handler()
def __init__(self, _hs): self.hs = _hs self.pusher_factory = PusherFactory(_hs) self._should_start_pushers = _hs.config.start_pushers self.store = self.hs.get_datastore() self.clock = self.hs.get_clock() self.pushers = {}
def __init__(self, _hs): self.hs = _hs self.pusher_factory = PusherFactory(_hs) self._should_start_pushers = _hs.config.start_pushers self.store = self.hs.get_datastore() self.clock = self.hs.get_clock() # map from user id to app_id:pushkey to pusher self.pushers = { } # type: Dict[str, Dict[str, Union[HttpPusher, EmailPusher]]] # a lock for the pushers dict, since `count_pushers` is called from an different # and we otherwise get concurrent modification errors self._pushers_lock = Lock() def count_pushers(): results = defaultdict(int) # type: Dict[Tuple[str, str], int] with self._pushers_lock: for pushers in self.pushers.values(): for pusher in pushers.values(): k = (type(pusher).__name__, pusher.app_id) results[k] += 1 return results LaterGauge( name="synapse_pushers", desc="the number of active pushers", labels=["kind", "app_id"], caller=count_pushers, )
def __init__(self, hs: "HomeServer"): self.hs = hs self.pusher_factory = PusherFactory(hs) self._should_start_pushers = hs.config.start_pushers self.store = self.hs.get_datastore() self.clock = self.hs.get_clock() # We shard the handling of push notifications by user ID. self._pusher_shard_config = hs.config.push.pusher_shard_config self._instance_name = hs.get_instance_name() # map from user id to app_id:pushkey to pusher self.pushers = {} # type: Dict[str, Dict[str, Union[HttpPusher, EmailPusher]]]
def __init__(self, hs: "HomeServer"): self.hs = hs self.pusher_factory = PusherFactory(hs) self._should_start_pushers = hs.config.start_pushers self.store = self.hs.get_datastore() self.clock = self.hs.get_clock() self._account_validity = hs.config.account_validity # We shard the handling of push notifications by user ID. self._pusher_shard_config = hs.config.push.pusher_shard_config self._instance_name = hs.get_instance_name() # Record the last stream ID that we were poked about so we can get # changes since then. We set this to the current max stream ID on # startup as every individual pusher will have checked for changes on # startup. self._last_room_stream_id_seen = self.store.get_room_max_stream_ordering() # map from user id to app_id:pushkey to pusher self.pushers = {} # type: Dict[str, Dict[str, Union[HttpPusher, EmailPusher]]]
class PusherPool: """ The pusher pool. This is responsible for dispatching notifications of new events to the http and email pushers. It provides three methods which are designed to be called by the rest of the application: `start`, `on_new_notifications`, and `on_new_receipts`: each of these delegates to each of the relevant pushers. Note that it is expected that each pusher will have its own 'processing' loop which will send out the notifications in the background, rather than blocking until the notifications are sent; accordingly Pusher.on_started, Pusher.on_new_notifications and Pusher.on_new_receipts are not expected to return awaitables. """ def __init__(self, hs: "HomeServer"): self.hs = hs self.pusher_factory = PusherFactory(hs) self._should_start_pushers = hs.config.start_pushers self.store = self.hs.get_datastore() self.clock = self.hs.get_clock() self._account_validity = hs.config.account_validity # We shard the handling of push notifications by user ID. self._pusher_shard_config = hs.config.push.pusher_shard_config self._instance_name = hs.get_instance_name() # Record the last stream ID that we were poked about so we can get # changes since then. We set this to the current max stream ID on # startup as every individual pusher will have checked for changes on # startup. self._last_room_stream_id_seen = self.store.get_room_max_stream_ordering( ) # map from user id to app_id:pushkey to pusher self.pushers = { } # type: Dict[str, Dict[str, Union[HttpPusher, EmailPusher]]] def start(self): """Starts the pushers off in a background process. """ if not self._should_start_pushers: logger.info( "Not starting pushers because they are disabled in the config") return run_as_background_process("start_pushers", self._start_pushers) async def add_pusher( self, user_id, access_token, kind, app_id, app_display_name, device_display_name, pushkey, lang, data, profile_tag="", ): """Creates a new pusher and adds it to the pool Returns: EmailPusher|HttpPusher """ time_now_msec = self.clock.time_msec() # we try to create the pusher just to validate the config: it # will then get pulled out of the database, # recreated, added and started: this means we have only one # code path adding pushers. self.pusher_factory.create_pusher({ "id": None, "user_name": user_id, "kind": kind, "app_id": app_id, "app_display_name": app_display_name, "device_display_name": device_display_name, "pushkey": pushkey, "ts": time_now_msec, "lang": lang, "data": data, "last_stream_ordering": None, "last_success": None, "failing_since": None, }) # create the pusher setting last_stream_ordering to the current maximum # stream ordering in event_push_actions, so it will process # pushes from this point onwards. last_stream_ordering = await self.store.get_latest_push_action_stream_ordering( ) await self.store.add_pusher( user_id=user_id, access_token=access_token, kind=kind, app_id=app_id, app_display_name=app_display_name, device_display_name=device_display_name, pushkey=pushkey, pushkey_ts=time_now_msec, lang=lang, data=data, last_stream_ordering=last_stream_ordering, profile_tag=profile_tag, ) pusher = await self.start_pusher_by_id(app_id, pushkey, user_id) return pusher async def remove_pushers_by_app_id_and_pushkey_not_user( self, app_id, pushkey, not_user_id): to_remove = await self.store.get_pushers_by_app_id_and_pushkey( app_id, pushkey) for p in to_remove: if p["user_name"] != not_user_id: logger.info( "Removing pusher for app id %s, pushkey %s, user %s", app_id, pushkey, p["user_name"], ) await self.remove_pusher(p["app_id"], p["pushkey"], p["user_name"]) async def remove_pushers_by_access_token(self, user_id, access_tokens): """Remove the pushers for a given user corresponding to a set of access_tokens. Args: user_id (str): user to remove pushers for access_tokens (Iterable[int]): access token *ids* to remove pushers for """ if not self._pusher_shard_config.should_handle(self._instance_name, user_id): return tokens = set(access_tokens) for p in await self.store.get_pushers_by_user_id(user_id): if p["access_token"] in tokens: logger.info( "Removing pusher for app id %s, pushkey %s, user %s", p["app_id"], p["pushkey"], p["user_name"], ) await self.remove_pusher(p["app_id"], p["pushkey"], p["user_name"]) async def on_new_notifications(self, max_token: RoomStreamToken): if not self.pushers: # nothing to do here. return # We just use the minimum stream ordering and ignore the vector clock # component. This is safe to do as long as we *always* ignore the vector # clock components. max_stream_id = max_token.stream if max_stream_id < self._last_room_stream_id_seen: # Nothing to do return prev_stream_id = self._last_room_stream_id_seen self._last_room_stream_id_seen = max_stream_id try: users_affected = await self.store.get_push_action_users_in_range( prev_stream_id, max_stream_id) for u in users_affected: # Don't push if the user account has expired if self._account_validity.enabled: expired = await self.store.is_account_expired( u, self.clock.time_msec()) if expired: continue if u in self.pushers: for p in self.pushers[u].values(): p.on_new_notifications(max_token) except Exception: logger.exception("Exception in pusher on_new_notifications") async def on_new_receipts(self, min_stream_id, max_stream_id, affected_room_ids): if not self.pushers: # nothing to do here. return try: # Need to subtract 1 from the minimum because the lower bound here # is not inclusive users_affected = await self.store.get_users_sent_receipts_between( min_stream_id - 1, max_stream_id) for u in users_affected: # Don't push if the user account has expired if self._account_validity.enabled: expired = await self.store.is_account_expired( u, self.clock.time_msec()) if expired: continue if u in self.pushers: for p in self.pushers[u].values(): p.on_new_receipts(min_stream_id, max_stream_id) except Exception: logger.exception("Exception in pusher on_new_receipts") async def start_pusher_by_id(self, app_id, pushkey, user_id): """Look up the details for the given pusher, and start it Returns: EmailPusher|HttpPusher|None: The pusher started, if any """ if not self._should_start_pushers: return if not self._pusher_shard_config.should_handle(self._instance_name, user_id): return resultlist = await self.store.get_pushers_by_app_id_and_pushkey( app_id, pushkey) pusher_dict = None for r in resultlist: if r["user_name"] == user_id: pusher_dict = r pusher = None if pusher_dict: pusher = await self._start_pusher(pusher_dict) return pusher async def _start_pushers(self) -> None: """Start all the pushers """ pushers = await self.store.get_all_pushers() # Stagger starting up the pushers so we don't completely drown the # process on start up. await concurrently_execute(self._start_pusher, pushers, 10) logger.info("Started pushers") async def _start_pusher(self, pusherdict): """Start the given pusher Args: pusherdict (dict): dict with the values pulled from the db table Returns: EmailPusher|HttpPusher """ if not self._pusher_shard_config.should_handle( self._instance_name, pusherdict["user_name"]): return try: p = self.pusher_factory.create_pusher(pusherdict) except PusherConfigException as e: logger.warning( "Pusher incorrectly configured id=%i, user=%s, appid=%s, pushkey=%s: %s", pusherdict["id"], pusherdict.get("user_name"), pusherdict.get("app_id"), pusherdict.get("pushkey"), e, ) return except Exception: logger.exception( "Couldn't start pusher id %i: caught Exception", pusherdict["id"], ) return if not p: return appid_pushkey = "%s:%s" % (pusherdict["app_id"], pusherdict["pushkey"]) byuser = self.pushers.setdefault(pusherdict["user_name"], {}) if appid_pushkey in byuser: byuser[appid_pushkey].on_stop() byuser[appid_pushkey] = p synapse_pushers.labels(type(p).__name__, p.app_id).inc() # Check if there *may* be push to process. We do this as this check is a # lot cheaper to do than actually fetching the exact rows we need to # push. user_id = pusherdict["user_name"] last_stream_ordering = pusherdict["last_stream_ordering"] if last_stream_ordering: have_notifs = await self.store.get_if_maybe_push_in_range_for_user( user_id, last_stream_ordering) else: # We always want to default to starting up the pusher rather than # risk missing push. have_notifs = True p.on_started(have_notifs) return p async def remove_pusher(self, app_id, pushkey, user_id): appid_pushkey = "%s:%s" % (app_id, pushkey) byuser = self.pushers.get(user_id, {}) if appid_pushkey in byuser: logger.info("Stopping pusher %s / %s", user_id, appid_pushkey) pusher = byuser.pop(appid_pushkey) pusher.on_stop() synapse_pushers.labels(type(pusher).__name__, pusher.app_id).dec() await self.store.delete_pusher_by_app_id_pushkey_user_id( app_id, pushkey, user_id)
class PusherPool: """ The pusher pool. This is responsible for dispatching notifications of new events to the http and email pushers. It provides three methods which are designed to be called by the rest of the application: `start`, `on_new_notifications`, and `on_new_receipts`: each of these delegates to each of the relevant pushers. Note that it is expected that each pusher will have its own 'processing' loop which will send out the notifications in the background, rather than blocking until the notifications are sent; accordingly Pusher.on_started, Pusher.on_new_notifications and Pusher.on_new_receipts are not expected to return deferreds. """ def __init__(self, _hs): self.hs = _hs self.pusher_factory = PusherFactory(_hs) self._should_start_pushers = _hs.config.start_pushers self.store = self.hs.get_datastore() self.clock = self.hs.get_clock() self.pushers = {} def start(self): """Starts the pushers off in a background process. """ if not self._should_start_pushers: logger.info("Not starting pushers because they are disabled in the config") return run_as_background_process("start_pushers", self._start_pushers) @defer.inlineCallbacks def add_pusher(self, user_id, access_token, kind, app_id, app_display_name, device_display_name, pushkey, lang, data, profile_tag=""): time_now_msec = self.clock.time_msec() # we try to create the pusher just to validate the config: it # will then get pulled out of the database, # recreated, added and started: this means we have only one # code path adding pushers. self.pusher_factory.create_pusher({ "id": None, "user_name": user_id, "kind": kind, "app_id": app_id, "app_display_name": app_display_name, "device_display_name": device_display_name, "pushkey": pushkey, "ts": time_now_msec, "lang": lang, "data": data, "last_stream_ordering": None, "last_success": None, "failing_since": None }) # create the pusher setting last_stream_ordering to the current maximum # stream ordering in event_push_actions, so it will process # pushes from this point onwards. last_stream_ordering = ( yield self.store.get_latest_push_action_stream_ordering() ) yield self.store.add_pusher( user_id=user_id, access_token=access_token, kind=kind, app_id=app_id, app_display_name=app_display_name, device_display_name=device_display_name, pushkey=pushkey, pushkey_ts=time_now_msec, lang=lang, data=data, last_stream_ordering=last_stream_ordering, profile_tag=profile_tag, ) yield self.start_pusher_by_id(app_id, pushkey, user_id) @defer.inlineCallbacks def remove_pushers_by_app_id_and_pushkey_not_user(self, app_id, pushkey, not_user_id): to_remove = yield self.store.get_pushers_by_app_id_and_pushkey( app_id, pushkey ) for p in to_remove: if p['user_name'] != not_user_id: logger.info( "Removing pusher for app id %s, pushkey %s, user %s", app_id, pushkey, p['user_name'] ) yield self.remove_pusher(p['app_id'], p['pushkey'], p['user_name']) @defer.inlineCallbacks def remove_pushers_by_access_token(self, user_id, access_tokens): """Remove the pushers for a given user corresponding to a set of access_tokens. Args: user_id (str): user to remove pushers for access_tokens (Iterable[int]): access token *ids* to remove pushers for """ tokens = set(access_tokens) for p in (yield self.store.get_pushers_by_user_id(user_id)): if p['access_token'] in tokens: logger.info( "Removing pusher for app id %s, pushkey %s, user %s", p['app_id'], p['pushkey'], p['user_name'] ) yield self.remove_pusher( p['app_id'], p['pushkey'], p['user_name'], ) @defer.inlineCallbacks def on_new_notifications(self, min_stream_id, max_stream_id): if not self.pushers: # nothing to do here. return try: users_affected = yield self.store.get_push_action_users_in_range( min_stream_id, max_stream_id ) for u in users_affected: if u in self.pushers: for p in self.pushers[u].values(): p.on_new_notifications(min_stream_id, max_stream_id) except Exception: logger.exception("Exception in pusher on_new_notifications") @defer.inlineCallbacks def on_new_receipts(self, min_stream_id, max_stream_id, affected_room_ids): if not self.pushers: # nothing to do here. return try: # Need to subtract 1 from the minimum because the lower bound here # is not inclusive updated_receipts = yield self.store.get_all_updated_receipts( min_stream_id - 1, max_stream_id ) # This returns a tuple, user_id is at index 3 users_affected = set([r[3] for r in updated_receipts]) for u in users_affected: if u in self.pushers: for p in self.pushers[u].values(): p.on_new_receipts(min_stream_id, max_stream_id) except Exception: logger.exception("Exception in pusher on_new_receipts") @defer.inlineCallbacks def start_pusher_by_id(self, app_id, pushkey, user_id): """Look up the details for the given pusher, and start it""" if not self._should_start_pushers: return resultlist = yield self.store.get_pushers_by_app_id_and_pushkey( app_id, pushkey ) p = None for r in resultlist: if r['user_name'] == user_id: p = r if p: yield self._start_pusher(p) @defer.inlineCallbacks def _start_pushers(self): """Start all the pushers Returns: Deferred """ pushers = yield self.store.get_all_pushers() logger.info("Starting %d pushers", len(pushers)) # Stagger starting up the pushers so we don't completely drown the # process on start up. yield concurrently_execute(self._start_pusher, pushers, 10) logger.info("Started pushers") @defer.inlineCallbacks def _start_pusher(self, pusherdict): """Start the given pusher Args: pusherdict (dict): Returns: None """ try: p = self.pusher_factory.create_pusher(pusherdict) except PusherConfigException as e: logger.warning( "Pusher incorrectly configured user=%s, appid=%s, pushkey=%s: %s", pusherdict.get('user_name'), pusherdict.get('app_id'), pusherdict.get('pushkey'), e, ) return except Exception: logger.exception("Couldn't start a pusher: caught Exception") return if not p: return appid_pushkey = "%s:%s" % ( pusherdict['app_id'], pusherdict['pushkey'], ) byuser = self.pushers.setdefault(pusherdict['user_name'], {}) if appid_pushkey in byuser: byuser[appid_pushkey].on_stop() byuser[appid_pushkey] = p # Check if there *may* be push to process. We do this as this check is a # lot cheaper to do than actually fetching the exact rows we need to # push. user_id = pusherdict["user_name"] last_stream_ordering = pusherdict["last_stream_ordering"] if last_stream_ordering: have_notifs = yield self.store.get_if_maybe_push_in_range_for_user( user_id, last_stream_ordering, ) else: # We always want to default to starting up the pusher rather than # risk missing push. have_notifs = True p.on_started(have_notifs) @defer.inlineCallbacks def remove_pusher(self, app_id, pushkey, user_id): appid_pushkey = "%s:%s" % (app_id, pushkey) byuser = self.pushers.get(user_id, {}) if appid_pushkey in byuser: logger.info("Stopping pusher %s / %s", user_id, appid_pushkey) byuser[appid_pushkey].on_stop() del byuser[appid_pushkey] yield self.store.delete_pusher_by_app_id_pushkey_user_id( app_id, pushkey, user_id )
class PusherPool: """ The pusher pool. This is responsible for dispatching notifications of new events to the http and email pushers. It provides three methods which are designed to be called by the rest of the application: `start`, `on_new_notifications`, and `on_new_receipts`: each of these delegates to each of the relevant pushers. Note that it is expected that each pusher will have its own 'processing' loop which will send out the notifications in the background, rather than blocking until the notifications are sent; accordingly Pusher.on_started, Pusher.on_new_notifications and Pusher.on_new_receipts are not expected to return awaitables. """ def __init__(self, hs: "HomeServer"): self.hs = hs self.pusher_factory = PusherFactory(hs) self.store = self.hs.get_datastores().main self.clock = self.hs.get_clock() # We shard the handling of push notifications by user ID. self._pusher_shard_config = hs.config.worker.pusher_shard_config self._instance_name = hs.get_instance_name() self._should_start_pushers = ( self._instance_name in self._pusher_shard_config.instances ) # We can only delete pushers on master. self._remove_pusher_client = None if hs.config.worker.worker_app: self._remove_pusher_client = ReplicationRemovePusherRestServlet.make_client( hs ) # Record the last stream ID that we were poked about so we can get # changes since then. We set this to the current max stream ID on # startup as every individual pusher will have checked for changes on # startup. self._last_room_stream_id_seen = self.store.get_room_max_stream_ordering() # map from user id to app_id:pushkey to pusher self.pushers: Dict[str, Dict[str, Pusher]] = {} self._account_validity_handler = hs.get_account_validity_handler() def start(self) -> None: """Starts the pushers off in a background process.""" if not self._should_start_pushers: logger.info("Not starting pushers because they are disabled in the config") return run_as_background_process("start_pushers", self._start_pushers) async def add_pusher( self, user_id: str, access_token: Optional[int], kind: str, app_id: str, app_display_name: str, device_display_name: str, pushkey: str, lang: Optional[str], data: JsonDict, profile_tag: str = "", ) -> Optional[Pusher]: """Creates a new pusher and adds it to the pool Returns: The newly created pusher. """ if kind == "email": email_owner = await self.store.get_user_id_by_threepid( "email", canonicalise_email(pushkey) ) if email_owner != user_id: raise SynapseError(400, "Email not found", Codes.THREEPID_NOT_FOUND) time_now_msec = self.clock.time_msec() # create the pusher setting last_stream_ordering to the current maximum # stream ordering, so it will process pushes from this point onwards. last_stream_ordering = self.store.get_room_max_stream_ordering() # we try to create the pusher just to validate the config: it # will then get pulled out of the database, # recreated, added and started: this means we have only one # code path adding pushers. self.pusher_factory.create_pusher( PusherConfig( id=None, user_name=user_id, access_token=access_token, profile_tag=profile_tag, kind=kind, app_id=app_id, app_display_name=app_display_name, device_display_name=device_display_name, pushkey=pushkey, ts=time_now_msec, lang=lang, data=data, last_stream_ordering=last_stream_ordering, last_success=None, failing_since=None, ) ) await self.store.add_pusher( user_id=user_id, access_token=access_token, kind=kind, app_id=app_id, app_display_name=app_display_name, device_display_name=device_display_name, pushkey=pushkey, pushkey_ts=time_now_msec, lang=lang, data=data, last_stream_ordering=last_stream_ordering, profile_tag=profile_tag, ) pusher = await self.start_pusher_by_id(app_id, pushkey, user_id) return pusher async def remove_pushers_by_app_id_and_pushkey_not_user( self, app_id: str, pushkey: str, not_user_id: str ) -> None: to_remove = await self.store.get_pushers_by_app_id_and_pushkey(app_id, pushkey) for p in to_remove: if p.user_name != not_user_id: logger.info( "Removing pusher for app id %s, pushkey %s, user %s", app_id, pushkey, p.user_name, ) await self.remove_pusher(p.app_id, p.pushkey, p.user_name) async def remove_pushers_by_access_token( self, user_id: str, access_tokens: Iterable[int] ) -> None: """Remove the pushers for a given user corresponding to a set of access_tokens. Args: user_id: user to remove pushers for access_tokens: access token *ids* to remove pushers for """ tokens = set(access_tokens) for p in await self.store.get_pushers_by_user_id(user_id): if p.access_token in tokens: logger.info( "Removing pusher for app id %s, pushkey %s, user %s", p.app_id, p.pushkey, p.user_name, ) await self.remove_pusher(p.app_id, p.pushkey, p.user_name) def on_new_notifications(self, max_token: RoomStreamToken) -> None: if not self.pushers: # nothing to do here. return # We just use the minimum stream ordering and ignore the vector clock # component. This is safe to do as long as we *always* ignore the vector # clock components. max_stream_id = max_token.stream if max_stream_id < self._last_room_stream_id_seen: # Nothing to do return # We only start a new background process if necessary rather than # optimistically (to cut down on overhead). self._on_new_notifications(max_token) @wrap_as_background_process("on_new_notifications") async def _on_new_notifications(self, max_token: RoomStreamToken) -> None: # We just use the minimum stream ordering and ignore the vector clock # component. This is safe to do as long as we *always* ignore the vector # clock components. max_stream_id = max_token.stream prev_stream_id = self._last_room_stream_id_seen self._last_room_stream_id_seen = max_stream_id try: users_affected = await self.store.get_push_action_users_in_range( prev_stream_id, max_stream_id ) for u in users_affected: # Don't push if the user account has expired expired = await self._account_validity_handler.is_user_expired(u) if expired: continue if u in self.pushers: for p in self.pushers[u].values(): p.on_new_notifications(max_token) except Exception: logger.exception("Exception in pusher on_new_notifications") async def on_new_receipts( self, min_stream_id: int, max_stream_id: int, affected_room_ids: Iterable[str] ) -> None: if not self.pushers: # nothing to do here. return try: # Need to subtract 1 from the minimum because the lower bound here # is not inclusive users_affected = await self.store.get_users_sent_receipts_between( min_stream_id - 1, max_stream_id ) for u in users_affected: # Don't push if the user account has expired expired = await self._account_validity_handler.is_user_expired(u) if expired: continue if u in self.pushers: for p in self.pushers[u].values(): p.on_new_receipts(min_stream_id, max_stream_id) except Exception: logger.exception("Exception in pusher on_new_receipts") async def start_pusher_by_id( self, app_id: str, pushkey: str, user_id: str ) -> Optional[Pusher]: """Look up the details for the given pusher, and start it Returns: The pusher started, if any """ if not self._should_start_pushers: return None if not self._pusher_shard_config.should_handle(self._instance_name, user_id): return None resultlist = await self.store.get_pushers_by_app_id_and_pushkey(app_id, pushkey) pusher_config = None for r in resultlist: if r.user_name == user_id: pusher_config = r pusher = None if pusher_config: pusher = await self._start_pusher(pusher_config) return pusher async def _start_pushers(self) -> None: """Start all the pushers""" pushers = await self.store.get_all_pushers() # Stagger starting up the pushers so we don't completely drown the # process on start up. await concurrently_execute(self._start_pusher, pushers, 10) logger.info("Started pushers") async def _start_pusher(self, pusher_config: PusherConfig) -> Optional[Pusher]: """Start the given pusher Args: pusher_config: The pusher configuration with the values pulled from the db table Returns: The newly created pusher or None. """ if not self._pusher_shard_config.should_handle( self._instance_name, pusher_config.user_name ): return None try: p = self.pusher_factory.create_pusher(pusher_config) except PusherConfigException as e: logger.warning( "Pusher incorrectly configured id=%i, user=%s, appid=%s, pushkey=%s: %s", pusher_config.id, pusher_config.user_name, pusher_config.app_id, pusher_config.pushkey, e, ) return None except Exception: logger.exception( "Couldn't start pusher id %i: caught Exception", pusher_config.id, ) return None if not p: return None appid_pushkey = "%s:%s" % (pusher_config.app_id, pusher_config.pushkey) byuser = self.pushers.setdefault(pusher_config.user_name, {}) if appid_pushkey in byuser: byuser[appid_pushkey].on_stop() byuser[appid_pushkey] = p synapse_pushers.labels(type(p).__name__, p.app_id).inc() # Check if there *may* be push to process. We do this as this check is a # lot cheaper to do than actually fetching the exact rows we need to # push. user_id = pusher_config.user_name last_stream_ordering = pusher_config.last_stream_ordering if last_stream_ordering: have_notifs = await self.store.get_if_maybe_push_in_range_for_user( user_id, last_stream_ordering ) else: # We always want to default to starting up the pusher rather than # risk missing push. have_notifs = True p.on_started(have_notifs) return p async def remove_pusher(self, app_id: str, pushkey: str, user_id: str) -> None: appid_pushkey = "%s:%s" % (app_id, pushkey) byuser = self.pushers.get(user_id, {}) if appid_pushkey in byuser: logger.info("Stopping pusher %s / %s", user_id, appid_pushkey) pusher = byuser.pop(appid_pushkey) pusher.on_stop() synapse_pushers.labels(type(pusher).__name__, pusher.app_id).dec() # We can only delete pushers on master. if self._remove_pusher_client: await self._remove_pusher_client( app_id=app_id, pushkey=pushkey, user_id=user_id ) else: await self.store.delete_pusher_by_app_id_pushkey_user_id( app_id, pushkey, user_id )
class PusherPool: """ The pusher pool. This is responsible for dispatching notifications of new events to the http and email pushers. It provides three methods which are designed to be called by the rest of the application: `start`, `on_new_notifications`, and `on_new_receipts`: each of these delegates to each of the relevant pushers. Note that it is expected that each pusher will have its own 'processing' loop which will send out the notifications in the background, rather than blocking until the notifications are sent; accordingly Pusher.on_started, Pusher.on_new_notifications and Pusher.on_new_receipts are not expected to return deferreds. """ def __init__(self, _hs): self.hs = _hs self.pusher_factory = PusherFactory(_hs) self._should_start_pushers = _hs.config.start_pushers self.store = self.hs.get_datastore() self.clock = self.hs.get_clock() self.pushers = {} def start(self): """Starts the pushers off in a background process. """ if not self._should_start_pushers: logger.info( "Not starting pushers because they are disabled in the config") return run_as_background_process("start_pushers", self._start_pushers) @defer.inlineCallbacks def add_pusher( self, user_id, access_token, kind, app_id, app_display_name, device_display_name, pushkey, lang, data, profile_tag="", ): """Creates a new pusher and adds it to the pool Returns: Deferred[EmailPusher|HttpPusher] """ time_now_msec = self.clock.time_msec() # we try to create the pusher just to validate the config: it # will then get pulled out of the database, # recreated, added and started: this means we have only one # code path adding pushers. self.pusher_factory.create_pusher({ "id": None, "user_name": user_id, "kind": kind, "app_id": app_id, "app_display_name": app_display_name, "device_display_name": device_display_name, "pushkey": pushkey, "ts": time_now_msec, "lang": lang, "data": data, "last_stream_ordering": None, "last_success": None, "failing_since": None, }) # create the pusher setting last_stream_ordering to the current maximum # stream ordering in event_push_actions, so it will process # pushes from this point onwards. last_stream_ordering = ( yield self.store.get_latest_push_action_stream_ordering()) yield self.store.add_pusher( user_id=user_id, access_token=access_token, kind=kind, app_id=app_id, app_display_name=app_display_name, device_display_name=device_display_name, pushkey=pushkey, pushkey_ts=time_now_msec, lang=lang, data=data, last_stream_ordering=last_stream_ordering, profile_tag=profile_tag, ) pusher = yield self.start_pusher_by_id(app_id, pushkey, user_id) return pusher @defer.inlineCallbacks def remove_pushers_by_app_id_and_pushkey_not_user(self, app_id, pushkey, not_user_id): to_remove = yield self.store.get_pushers_by_app_id_and_pushkey( app_id, pushkey) for p in to_remove: if p["user_name"] != not_user_id: logger.info( "Removing pusher for app id %s, pushkey %s, user %s", app_id, pushkey, p["user_name"], ) yield self.remove_pusher(p["app_id"], p["pushkey"], p["user_name"]) @defer.inlineCallbacks def remove_pushers_by_access_token(self, user_id, access_tokens): """Remove the pushers for a given user corresponding to a set of access_tokens. Args: user_id (str): user to remove pushers for access_tokens (Iterable[int]): access token *ids* to remove pushers for """ tokens = set(access_tokens) for p in (yield self.store.get_pushers_by_user_id(user_id)): if p["access_token"] in tokens: logger.info( "Removing pusher for app id %s, pushkey %s, user %s", p["app_id"], p["pushkey"], p["user_name"], ) yield self.remove_pusher(p["app_id"], p["pushkey"], p["user_name"]) @defer.inlineCallbacks def on_new_notifications(self, min_stream_id, max_stream_id): if not self.pushers: # nothing to do here. return try: users_affected = yield self.store.get_push_action_users_in_range( min_stream_id, max_stream_id) for u in users_affected: if u in self.pushers: for p in self.pushers[u].values(): p.on_new_notifications(min_stream_id, max_stream_id) except Exception: logger.exception("Exception in pusher on_new_notifications") @defer.inlineCallbacks def on_new_receipts(self, min_stream_id, max_stream_id, affected_room_ids): if not self.pushers: # nothing to do here. return try: # Need to subtract 1 from the minimum because the lower bound here # is not inclusive updated_receipts = yield self.store.get_all_updated_receipts( min_stream_id - 1, max_stream_id) # This returns a tuple, user_id is at index 3 users_affected = set([r[3] for r in updated_receipts]) for u in users_affected: if u in self.pushers: for p in self.pushers[u].values(): p.on_new_receipts(min_stream_id, max_stream_id) except Exception: logger.exception("Exception in pusher on_new_receipts") @defer.inlineCallbacks def start_pusher_by_id(self, app_id, pushkey, user_id): """Look up the details for the given pusher, and start it Returns: Deferred[EmailPusher|HttpPusher|None]: The pusher started, if any """ if not self._should_start_pushers: return resultlist = yield self.store.get_pushers_by_app_id_and_pushkey( app_id, pushkey) pusher_dict = None for r in resultlist: if r["user_name"] == user_id: pusher_dict = r pusher = None if pusher_dict: pusher = yield self._start_pusher(pusher_dict) return pusher @defer.inlineCallbacks def _start_pushers(self): """Start all the pushers Returns: Deferred """ pushers = yield self.store.get_all_pushers() logger.info("Starting %d pushers", len(pushers)) # Stagger starting up the pushers so we don't completely drown the # process on start up. yield concurrently_execute(self._start_pusher, pushers, 10) logger.info("Started pushers") @defer.inlineCallbacks def _start_pusher(self, pusherdict): """Start the given pusher Args: pusherdict (dict): Returns: Deferred[EmailPusher|HttpPusher] """ try: p = self.pusher_factory.create_pusher(pusherdict) except PusherConfigException as e: logger.warning( "Pusher incorrectly configured user=%s, appid=%s, pushkey=%s: %s", pusherdict.get("user_name"), pusherdict.get("app_id"), pusherdict.get("pushkey"), e, ) return except Exception: logger.exception("Couldn't start a pusher: caught Exception") return if not p: return appid_pushkey = "%s:%s" % (pusherdict["app_id"], pusherdict["pushkey"]) byuser = self.pushers.setdefault(pusherdict["user_name"], {}) if appid_pushkey in byuser: byuser[appid_pushkey].on_stop() byuser[appid_pushkey] = p # Check if there *may* be push to process. We do this as this check is a # lot cheaper to do than actually fetching the exact rows we need to # push. user_id = pusherdict["user_name"] last_stream_ordering = pusherdict["last_stream_ordering"] if last_stream_ordering: have_notifs = yield self.store.get_if_maybe_push_in_range_for_user( user_id, last_stream_ordering) else: # We always want to default to starting up the pusher rather than # risk missing push. have_notifs = True p.on_started(have_notifs) return p @defer.inlineCallbacks def remove_pusher(self, app_id, pushkey, user_id): appid_pushkey = "%s:%s" % (app_id, pushkey) byuser = self.pushers.get(user_id, {}) if appid_pushkey in byuser: logger.info("Stopping pusher %s / %s", user_id, appid_pushkey) byuser[appid_pushkey].on_stop() del byuser[appid_pushkey] yield self.store.delete_pusher_by_app_id_pushkey_user_id( app_id, pushkey, user_id)
class PusherPool: def __init__(self, _hs): self.hs = _hs self.pusher_factory = PusherFactory(_hs) self.start_pushers = _hs.config.start_pushers self.store = self.hs.get_datastore() self.clock = self.hs.get_clock() self.pushers = {} @defer.inlineCallbacks def start(self): pushers = yield self.store.get_all_pushers() self._start_pushers(pushers) @defer.inlineCallbacks def add_pusher(self, user_id, access_token, kind, app_id, app_display_name, device_display_name, pushkey, lang, data, profile_tag=""): time_now_msec = self.clock.time_msec() # we try to create the pusher just to validate the config: it # will then get pulled out of the database, # recreated, added and started: this means we have only one # code path adding pushers. self.pusher_factory.create_pusher({ "id": None, "user_name": user_id, "kind": kind, "app_id": app_id, "app_display_name": app_display_name, "device_display_name": device_display_name, "pushkey": pushkey, "ts": time_now_msec, "lang": lang, "data": data, "last_stream_ordering": None, "last_success": None, "failing_since": None }) # create the pusher setting last_stream_ordering to the current maximum # stream ordering in event_push_actions, so it will process # pushes from this point onwards. last_stream_ordering = ( yield self.store.get_latest_push_action_stream_ordering() ) yield self.store.add_pusher( user_id=user_id, access_token=access_token, kind=kind, app_id=app_id, app_display_name=app_display_name, device_display_name=device_display_name, pushkey=pushkey, pushkey_ts=time_now_msec, lang=lang, data=data, last_stream_ordering=last_stream_ordering, profile_tag=profile_tag, ) yield self._refresh_pusher(app_id, pushkey, user_id) @defer.inlineCallbacks def remove_pushers_by_app_id_and_pushkey_not_user(self, app_id, pushkey, not_user_id): to_remove = yield self.store.get_pushers_by_app_id_and_pushkey( app_id, pushkey ) for p in to_remove: if p['user_name'] != not_user_id: logger.info( "Removing pusher for app id %s, pushkey %s, user %s", app_id, pushkey, p['user_name'] ) yield self.remove_pusher(p['app_id'], p['pushkey'], p['user_name']) @defer.inlineCallbacks def remove_pushers_by_access_token(self, user_id, access_tokens): """Remove the pushers for a given user corresponding to a set of access_tokens. Args: user_id (str): user to remove pushers for access_tokens (Iterable[int]): access token *ids* to remove pushers for """ tokens = set(access_tokens) for p in (yield self.store.get_pushers_by_user_id(user_id)): if p['access_token'] in tokens: logger.info( "Removing pusher for app id %s, pushkey %s, user %s", p['app_id'], p['pushkey'], p['user_name'] ) yield self.remove_pusher( p['app_id'], p['pushkey'], p['user_name'], ) @defer.inlineCallbacks def on_new_notifications(self, min_stream_id, max_stream_id): yield run_on_reactor() try: users_affected = yield self.store.get_push_action_users_in_range( min_stream_id, max_stream_id ) deferreds = [] for u in users_affected: if u in self.pushers: for p in self.pushers[u].values(): deferreds.append( run_in_background( p.on_new_notifications, min_stream_id, max_stream_id, ) ) yield make_deferred_yieldable( defer.gatherResults(deferreds, consumeErrors=True), ) except Exception: logger.exception("Exception in pusher on_new_notifications") @defer.inlineCallbacks def on_new_receipts(self, min_stream_id, max_stream_id, affected_room_ids): yield run_on_reactor() try: # Need to subtract 1 from the minimum because the lower bound here # is not inclusive updated_receipts = yield self.store.get_all_updated_receipts( min_stream_id - 1, max_stream_id ) # This returns a tuple, user_id is at index 3 users_affected = set([r[3] for r in updated_receipts]) deferreds = [] for u in users_affected: if u in self.pushers: for p in self.pushers[u].values(): deferreds.append( run_in_background( p.on_new_receipts, min_stream_id, max_stream_id, ) ) yield make_deferred_yieldable( defer.gatherResults(deferreds, consumeErrors=True), ) except Exception: logger.exception("Exception in pusher on_new_receipts") @defer.inlineCallbacks def _refresh_pusher(self, app_id, pushkey, user_id): resultlist = yield self.store.get_pushers_by_app_id_and_pushkey( app_id, pushkey ) p = None for r in resultlist: if r['user_name'] == user_id: p = r if p: self._start_pushers([p]) def _start_pushers(self, pushers): if not self.start_pushers: logger.info("Not starting pushers because they are disabled in the config") return logger.info("Starting %d pushers", len(pushers)) for pusherdict in pushers: try: p = self.pusher_factory.create_pusher(pusherdict) except Exception: logger.exception("Couldn't start a pusher: caught Exception") continue if p: appid_pushkey = "%s:%s" % ( pusherdict['app_id'], pusherdict['pushkey'], ) byuser = self.pushers.setdefault(pusherdict['user_name'], {}) if appid_pushkey in byuser: byuser[appid_pushkey].on_stop() byuser[appid_pushkey] = p run_in_background(p.on_started) logger.info("Started pushers") @defer.inlineCallbacks def remove_pusher(self, app_id, pushkey, user_id): appid_pushkey = "%s:%s" % (app_id, pushkey) byuser = self.pushers.get(user_id, {}) if appid_pushkey in byuser: logger.info("Stopping pusher %s / %s", user_id, appid_pushkey) byuser[appid_pushkey].on_stop() del byuser[appid_pushkey] yield self.store.delete_pusher_by_app_id_pushkey_user_id( app_id, pushkey, user_id )
class PusherPool: def __init__(self, _hs): self.hs = _hs self.pusher_factory = PusherFactory(_hs) self.start_pushers = _hs.config.start_pushers self.store = self.hs.get_datastore() self.clock = self.hs.get_clock() self.pushers = {} @defer.inlineCallbacks def start(self): pushers = yield self.store.get_all_pushers() self._start_pushers(pushers) @defer.inlineCallbacks def add_pusher(self, user_id, access_token, kind, app_id, app_display_name, device_display_name, pushkey, lang, data, profile_tag=""): time_now_msec = self.clock.time_msec() # we try to create the pusher just to validate the config: it # will then get pulled out of the database, # recreated, added and started: this means we have only one # code path adding pushers. self.pusher_factory.create_pusher({ "id": None, "user_name": user_id, "kind": kind, "app_id": app_id, "app_display_name": app_display_name, "device_display_name": device_display_name, "pushkey": pushkey, "ts": time_now_msec, "lang": lang, "data": data, "last_stream_ordering": None, "last_success": None, "failing_since": None }) # create the pusher setting last_stream_ordering to the current maximum # stream ordering in event_push_actions, so it will process # pushes from this point onwards. last_stream_ordering = ( yield self.store.get_latest_push_action_stream_ordering()) yield self.store.add_pusher( user_id=user_id, access_token=access_token, kind=kind, app_id=app_id, app_display_name=app_display_name, device_display_name=device_display_name, pushkey=pushkey, pushkey_ts=time_now_msec, lang=lang, data=data, last_stream_ordering=last_stream_ordering, profile_tag=profile_tag, ) yield self._refresh_pusher(app_id, pushkey, user_id) @defer.inlineCallbacks def remove_pushers_by_app_id_and_pushkey_not_user(self, app_id, pushkey, not_user_id): to_remove = yield self.store.get_pushers_by_app_id_and_pushkey( app_id, pushkey) for p in to_remove: if p['user_name'] != not_user_id: logger.info( "Removing pusher for app id %s, pushkey %s, user %s", app_id, pushkey, p['user_name']) yield self.remove_pusher(p['app_id'], p['pushkey'], p['user_name']) @defer.inlineCallbacks def remove_pushers_by_access_token(self, user_id, access_tokens): """Remove the pushers for a given user corresponding to a set of access_tokens. Args: user_id (str): user to remove pushers for access_tokens (Iterable[int]): access token *ids* to remove pushers for """ tokens = set(access_tokens) for p in (yield self.store.get_pushers_by_user_id(user_id)): if p['access_token'] in tokens: logger.info( "Removing pusher for app id %s, pushkey %s, user %s", p['app_id'], p['pushkey'], p['user_name']) yield self.remove_pusher( p['app_id'], p['pushkey'], p['user_name'], ) def on_new_notifications(self, min_stream_id, max_stream_id): run_as_background_process( "on_new_notifications", self._on_new_notifications, min_stream_id, max_stream_id, ) @defer.inlineCallbacks def _on_new_notifications(self, min_stream_id, max_stream_id): try: users_affected = yield self.store.get_push_action_users_in_range( min_stream_id, max_stream_id) deferreds = [] for u in users_affected: if u in self.pushers: for p in self.pushers[u].values(): deferreds.append( run_in_background( p.on_new_notifications, min_stream_id, max_stream_id, )) yield make_deferred_yieldable( defer.gatherResults(deferreds, consumeErrors=True), ) except Exception: logger.exception("Exception in pusher on_new_notifications") def on_new_receipts(self, min_stream_id, max_stream_id, affected_room_ids): run_as_background_process( "on_new_receipts", self._on_new_receipts, min_stream_id, max_stream_id, affected_room_ids, ) @defer.inlineCallbacks def _on_new_receipts(self, min_stream_id, max_stream_id, affected_room_ids): try: # Need to subtract 1 from the minimum because the lower bound here # is not inclusive updated_receipts = yield self.store.get_all_updated_receipts( min_stream_id - 1, max_stream_id) # This returns a tuple, user_id is at index 3 users_affected = set([r[3] for r in updated_receipts]) deferreds = [] for u in users_affected: if u in self.pushers: for p in self.pushers[u].values(): deferreds.append( run_in_background( p.on_new_receipts, min_stream_id, max_stream_id, )) yield make_deferred_yieldable( defer.gatherResults(deferreds, consumeErrors=True), ) except Exception: logger.exception("Exception in pusher on_new_receipts") @defer.inlineCallbacks def _refresh_pusher(self, app_id, pushkey, user_id): resultlist = yield self.store.get_pushers_by_app_id_and_pushkey( app_id, pushkey) p = None for r in resultlist: if r['user_name'] == user_id: p = r if p: self._start_pushers([p]) def _start_pushers(self, pushers): if not self.start_pushers: logger.info( "Not starting pushers because they are disabled in the config") return logger.info("Starting %d pushers", len(pushers)) for pusherdict in pushers: try: p = self.pusher_factory.create_pusher(pusherdict) except Exception: logger.exception("Couldn't start a pusher: caught Exception") continue if p: appid_pushkey = "%s:%s" % ( pusherdict['app_id'], pusherdict['pushkey'], ) byuser = self.pushers.setdefault(pusherdict['user_name'], {}) if appid_pushkey in byuser: byuser[appid_pushkey].on_stop() byuser[appid_pushkey] = p run_in_background(p.on_started) logger.info("Started pushers") @defer.inlineCallbacks def remove_pusher(self, app_id, pushkey, user_id): appid_pushkey = "%s:%s" % (app_id, pushkey) byuser = self.pushers.get(user_id, {}) if appid_pushkey in byuser: logger.info("Stopping pusher %s / %s", user_id, appid_pushkey) byuser[appid_pushkey].on_stop() del byuser[appid_pushkey] yield self.store.delete_pusher_by_app_id_pushkey_user_id( app_id, pushkey, user_id)
class PusherPool: """ The pusher pool. This is responsible for dispatching notifications of new events to the http and email pushers. It provides three methods which are designed to be called by the rest of the application: `start`, `on_new_notifications`, and `on_new_receipts`: each of these delegates to each of the relevant pushers. Note that it is expected that each pusher will have its own 'processing' loop which will send out the notifications in the background, rather than blocking until the notifications are sent; accordingly Pusher.on_started, Pusher.on_new_notifications and Pusher.on_new_receipts are not expected to return deferreds. """ def __init__(self, _hs): self.hs = _hs self.pusher_factory = PusherFactory(_hs) self._should_start_pushers = _hs.config.start_pushers self.store = self.hs.get_datastore() self.clock = self.hs.get_clock() self.pushers = {} def start(self): """Starts the pushers off in a background process. """ if not self._should_start_pushers: logger.info( "Not starting pushers because they are disabled in the config") return run_as_background_process("start_pushers", self._start_pushers) @defer.inlineCallbacks def add_pusher(self, user_id, access_token, kind, app_id, app_display_name, device_display_name, pushkey, lang, data, profile_tag=""): time_now_msec = self.clock.time_msec() # we try to create the pusher just to validate the config: it # will then get pulled out of the database, # recreated, added and started: this means we have only one # code path adding pushers. self.pusher_factory.create_pusher({ "id": None, "user_name": user_id, "kind": kind, "app_id": app_id, "app_display_name": app_display_name, "device_display_name": device_display_name, "pushkey": pushkey, "ts": time_now_msec, "lang": lang, "data": data, "last_stream_ordering": None, "last_success": None, "failing_since": None }) # create the pusher setting last_stream_ordering to the current maximum # stream ordering in event_push_actions, so it will process # pushes from this point onwards. last_stream_ordering = ( yield self.store.get_latest_push_action_stream_ordering()) yield self.store.add_pusher( user_id=user_id, access_token=access_token, kind=kind, app_id=app_id, app_display_name=app_display_name, device_display_name=device_display_name, pushkey=pushkey, pushkey_ts=time_now_msec, lang=lang, data=data, last_stream_ordering=last_stream_ordering, profile_tag=profile_tag, ) yield self.start_pusher_by_id(app_id, pushkey, user_id) @defer.inlineCallbacks def remove_pushers_by_app_id_and_pushkey_not_user(self, app_id, pushkey, not_user_id): to_remove = yield self.store.get_pushers_by_app_id_and_pushkey( app_id, pushkey) for p in to_remove: if p['user_name'] != not_user_id: logger.info( "Removing pusher for app id %s, pushkey %s, user %s", app_id, pushkey, p['user_name']) yield self.remove_pusher(p['app_id'], p['pushkey'], p['user_name']) @defer.inlineCallbacks def remove_pushers_by_access_token(self, user_id, access_tokens): """Remove the pushers for a given user corresponding to a set of access_tokens. Args: user_id (str): user to remove pushers for access_tokens (Iterable[int]): access token *ids* to remove pushers for """ tokens = set(access_tokens) for p in (yield self.store.get_pushers_by_user_id(user_id)): if p['access_token'] in tokens: logger.info( "Removing pusher for app id %s, pushkey %s, user %s", p['app_id'], p['pushkey'], p['user_name']) yield self.remove_pusher( p['app_id'], p['pushkey'], p['user_name'], ) @defer.inlineCallbacks def on_new_notifications(self, min_stream_id, max_stream_id): try: users_affected = yield self.store.get_push_action_users_in_range( min_stream_id, max_stream_id) for u in users_affected: if u in self.pushers: for p in self.pushers[u].values(): p.on_new_notifications(min_stream_id, max_stream_id) except Exception: logger.exception("Exception in pusher on_new_notifications") @defer.inlineCallbacks def on_new_receipts(self, min_stream_id, max_stream_id, affected_room_ids): try: # Need to subtract 1 from the minimum because the lower bound here # is not inclusive updated_receipts = yield self.store.get_all_updated_receipts( min_stream_id - 1, max_stream_id) # This returns a tuple, user_id is at index 3 users_affected = set([r[3] for r in updated_receipts]) for u in users_affected: if u in self.pushers: for p in self.pushers[u].values(): p.on_new_receipts(min_stream_id, max_stream_id) except Exception: logger.exception("Exception in pusher on_new_receipts") @defer.inlineCallbacks def start_pusher_by_id(self, app_id, pushkey, user_id): """Look up the details for the given pusher, and start it""" if not self._should_start_pushers: return resultlist = yield self.store.get_pushers_by_app_id_and_pushkey( app_id, pushkey) p = None for r in resultlist: if r['user_name'] == user_id: p = r if p: self._start_pusher(p) @defer.inlineCallbacks def _start_pushers(self): """Start all the pushers Returns: Deferred """ pushers = yield self.store.get_all_pushers() logger.info("Starting %d pushers", len(pushers)) for pusherdict in pushers: self._start_pusher(pusherdict) logger.info("Started pushers") def _start_pusher(self, pusherdict): """Start the given pusher Args: pusherdict (dict): Returns: None """ try: p = self.pusher_factory.create_pusher(pusherdict) except Exception: logger.exception("Couldn't start a pusher: caught Exception") return if not p: return appid_pushkey = "%s:%s" % ( pusherdict['app_id'], pusherdict['pushkey'], ) byuser = self.pushers.setdefault(pusherdict['user_name'], {}) if appid_pushkey in byuser: byuser[appid_pushkey].on_stop() byuser[appid_pushkey] = p p.on_started() @defer.inlineCallbacks def remove_pusher(self, app_id, pushkey, user_id): appid_pushkey = "%s:%s" % (app_id, pushkey) byuser = self.pushers.get(user_id, {}) if appid_pushkey in byuser: logger.info("Stopping pusher %s / %s", user_id, appid_pushkey) byuser[appid_pushkey].on_stop() del byuser[appid_pushkey] yield self.store.delete_pusher_by_app_id_pushkey_user_id( app_id, pushkey, user_id)