def __call__(self, difference, old_val, new_val): kwargs = {} kwargs.update(self.extra_kwargs) kwargs["record"] = self.record kwargs["callback_id"] = self.callback_id kwargs["difference"] = difference kwargs["changes"] = self.record._convert_diff_to_changelist( difference, old_val, new_val ) logger.debug(f"Firing callback {self.callback} with kwargs: {kwargs}") # trim down the passed parameters # to include only those the callback will accept params = signature(self.callback).parameters if not any(["**" in str(p) for p in params.values()]): # there's no "**kwargs" in the callback signature, # so remove any unaccepted params for arg in kwargs.keys(): if arg not in params: del kwargs[arg] # perform the callback, gracefully handling any exceptions try: # trigger the callback within its own thread, # so it won't block others if it's long-running Thread(target=self.callback, kwargs=kwargs, daemon=True).start() except Exception as e: logger.error( f"Error while processing callback for {repr(self.record)}: {repr(e)}" )
def poll_async(self): if self.thread: # Already polling async; no need to have two threads return logger.debug("Starting new thread for async polling") self.thread = threading.Thread(target=self.poll_forever, daemon=True) self.thread.start()
def subscribe(self, records: Set[Record]): """ Subscribe to changes of passed records. Arguments --------- records : set of Record Set of `Record` objects to subscribe to. """ if isinstance(records, list): records = set(records) # TODO: how to describe that you can also pass # record explicitly or should we block it? if not isinstance(records, set): records = {records} sub_data = [] for record in records.difference(self._subscriptions): key = f"{record.id}:{record._table}" logger.debug(f"Subscribing new record: {key}") # save it in case we're disconnected self._subscriptions.add(record) # TODO: hide that dict generation in Record class sub_data.append( { "type": "/api/v1/registerSubscription", "requestId": str(uuid.uuid4()), "key": f"versions/{key}", "version": record.get("version", -1), } ) # if it's a collection, subscribe to changes to its children too # TODO: fix imports # if isinstance(record, Collection): # sub_data.append( # { # "type": "/api/v1/registerSubscription", # "requestId": str(uuid.uuid4()), # "key": "collection/{}".format(record.id), # "version": -1, # } # ) self.post_data(self._encode_numbered_json_thing(sub_data))
def _refresh_updated_records(self, events: list): records_to_refresh = defaultdict(list) versions_pattern = re.compile(r"versions/([^:]+):(.+)") collection_pattern = re.compile(r"collection/(.+)") events = filter(lambda e: isinstance(e, dict), events) events = filter(lambda e: e.get("type", "") == "notification", events) for event in events: logger.debug(f"Received the following event from notion: {event}") key = event.get("key") # TODO: rewrite below if cases to something simpler if key.startswith("versions/"): match = versions_pattern.match(key) if not match: continue record_id, record_table = match.groups() old = self.client._store.get_current_version(record_table, record_id) new = event["value"] name = f"{record_table}/{record_id}" if new > old: logger.debug( ( f"Record {name} has changed; refreshing to update" f"from version {old} to version {new}" ) ) records_to_refresh[record_table].append(record_id) else: logger.debug( ( f"Record {name} already at version {old}" f"not trying to update to version {new}" ) ) if key.startswith("collection/"): match = collection_pattern.match(key) if not match: continue collection_id = match.groups()[0] self.client.refresh_collection_rows(collection_id) row_ids = self.client._store.get_collection_rows(collection_id) logger.debug( ( f"Something inside collection {collection_id} has changed" f"refreshing all {row_ids} rows inside it" ) ) records_to_refresh["block"] += row_ids self.client.refresh_records(**records_to_refresh)
def poll(self, retries: int = 10): """ Poll for changes. Arguments --------- retries : int, optional Number of times to retry request if it fails. Defaults to 10. Raises ------ HTTPError When GET request fails for `retries` times. """ logger.debug("Starting new long-poll request") while retries: try: response = self.client.session.get(self.url(EIO=3, sid=self.sid)) response.raise_for_status() retries -= 1 except HTTPError as e: try: message = f"{response.content} / {e}" except AttributeError: message = str(e) logger.warn( f"Problem with submitting poll request: {message} (will retry {retries} more times)" ) time.sleep(0.1) if retries <= 0: raise if retries <= 5: logger.error( f"Persistent error submitting poll request: {message} (will retry {retries} more times)" ) # if we're close to giving up, also try reinitializing the session self.initialize() self._refresh_updated_records( self._decode_numbered_json_thing(response.content) )
def _decode_numbered_json_thing(self, thing: bytes) -> list: thing = thing.decode().strip() for ping in re.findall(r'\d+:\d+"primus::ping::\d+"', thing): logger.debug("Received ping: {}".format(ping)) self.post_data(ping.replace("::ping::", "::pong::")) results = [] for blob in re.findall(r"\d+:\d+({.+})(?=\d|$)", thing): results.append(json.loads(blob)) if thing and not results and "::ping::" not in thing: logger.debug("Could not parse monitoring response: {}".format(thing)) return results
def post_data(self, data: bytes): """ Send monitoring requests to Notion. Arguments --------- data : bytes Form encoded request data. """ if not data: return logger.debug(f"Posting monitoring data: {data}") self.client.session.post(self.url(sid=self.sid), data=data)
def initialize(self): """ Initialize the monitoring session. """ logger.debug("Initializing new monitoring session.") content = self.client.session.get(self.url(EIO=3)).content # TODO: add error handling self.sid = self._decode_numbered_json_thing(content)[0]["sid"] logger.debug(f"New monitoring session ID is: {self.sid}") # resubscribe to any existing subscriptions if we're reconnecting old_subscriptions = self._subscriptions self._subscriptions = set() self.subscribe(old_subscriptions)
def call_get_record_values(self, **kwargs): """ Call the server's getRecordValues endpoint to update the local record store. The keyword arguments map table names into lists of (or singular) record IDs to load for that table. Use True to refresh all known records for that table. """ requestlist = [] for table, ids in kwargs.items(): # ensure "ids" is a proper list if ids is True: ids = list(self._values.get(table, {}).keys()) if isinstance(ids, str): ids = [ids] # if we're in a transaction, add the requested IDs to a queue to refresh when the transaction completes if self._client.in_transaction(): self._records_to_refresh[table] = list( set(self._records_to_refresh.get(table, []) + ids)) continue requestlist += [{ "table": table, "id": extract_id(id) } for id in ids] if requestlist: logger.debug( "Calling 'getRecordValues' endpoint for requests: {}".format( requestlist)) results = self._client.post("getRecordValues", { "requests": requestlist }).json()["results"] for request, result in zip(requestlist, results): self._update_record( request["table"], request["id"], value=result.get("value"), role=result.get("role"), )
def _update_record(self, table, record_id, value=None, role=None): callback_queue = [] with self._mutex: if role: logger.debug(f"Updating 'role' for '{table}/{record_id}' to '{role}'") self._role[table][record_id] = role self._save_cache("_role") if value: p_value = json.dumps(value, indent=2) logger.debug( f"Updating 'value' for '{table}/{record_id}' to \n{p_value}" ) old_val = self._values[table][record_id] difference = list( diff( old_val, value, ignore=["version", "last_edited_time", "last_edited_by"], expand=True, ) ) self._values[table][record_id] = value self._save_cache("_values") if old_val and difference: p_difference = json.dumps(value, indent=2) logger.debug(f"Value changed! Difference:\n{p_difference}") callback = (table, record_id, difference, old_val, value) callback_queue.append(callback) # run callbacks outside the mutex to avoid lockups for cb in callback_queue: self._trigger_callbacks(*cb)
def _update_record(self, table, id, value=None, role=None): callback_queue = [] with self._mutex: if role: logger.debug("Updating 'role' for {}/{} to {}".format( table, id, role)) self._role[table][id] = role self._save_cache("_role") if value: logger.debug("Updating 'value' for {}/{} to {}".format( table, id, value)) old_val = self._values[table][id] difference = list( diff( old_val, value, ignore=[ "version", "last_edited_time", "last_edited_by" ], expand=True, )) self._values[table][id] = value self._save_cache("_values") if old_val and difference: logger.debug( "Value changed! Difference: {}".format(difference)) callback_queue.append( (table, id, difference, old_val, value)) # run callbacks outside the mutex to avoid lockups for cb in callback_queue: self._trigger_callbacks(*cb)
def call_get_record_values(self, **kwargs): """ Call the server's getRecordValues endpoint to update the local record store. The keyword arguments map table names into lists of (or singular) record IDs to load for that table. Use True to refresh all known records for that table. """ requests = [] for table, ids in kwargs.items(): # TODO: ids can be `True` and if it is then we take every # key from collection_view into consideration, is it OK? if ids is True: ids = self._values.get(table, {}).keys() ids = to_list(ids) # if we're in a transaction, add the requested IDs # to a queue to refresh when the transaction completes if self._client.in_transaction(): records = self._records_to_refresh.get(table, []) + ids self._records_to_refresh[table] = list(set(records)) continue requests += [{"table": table, "id": extract_id(i)} for i in ids] if requests: logger.debug(f"Calling 'getRecordValues' endpoint for requests: {requests}") data = {"requests": requests} data = self._client.post("getRecordValues", data).json() results = data["results"] for request, result in zip(requests, results): self._update_record( table=request["table"], record_id=request["id"], value=result.get("value"), role=result.get("role"), )