def get_updated_account_data_for_user_txn(txn): sql = ( "SELECT account_data_type, content FROM account_data" " WHERE user_id = ? AND stream_id > ?" ) txn.execute(sql, (user_id, stream_id)) global_account_data = { row[0]: json.loads(row[1]) for row in txn } sql = ( "SELECT room_id, account_data_type, content FROM room_account_data" " WHERE user_id = ? AND stream_id > ?" ) txn.execute(sql, (user_id, stream_id)) account_data_by_room = {} for row in txn: room_account_data = account_data_by_room.setdefault(row[0], {}) room_account_data[row[1]] = json.loads(row[2]) return (global_account_data, account_data_by_room)
def get_account_data_for_user_txn(txn): rows = self._simple_select_list_txn( txn, "account_data", {"user_id": user_id}, ["account_data_type", "content"], ) global_account_data = { row["account_data_type"]: json.loads(row["content"]) for row in rows } rows = self._simple_select_list_txn( txn, "room_account_data", {"user_id": user_id}, ["room_id", "account_data_type", "content"], ) by_room = {} for row in rows: room_data = by_room.setdefault(row["room_id"], {}) room_data[row["account_data_type"]] = json.loads(row["content"]) return (global_account_data, by_room)
def _get_event_from_row(self, internal_metadata, js, redacted, rejected_reason=None): with Measure(self._clock, "_get_event_from_row"): d = json.loads(js) internal_metadata = json.loads(internal_metadata) if rejected_reason: rejected_reason = yield self._simple_select_one_onecol( table="rejections", keyvalues={"event_id": rejected_reason}, retcol="reason", desc="_get_event_from_row_rejected_reason", ) original_ev = FrozenEvent( d, internal_metadata_dict=internal_metadata, rejected_reason=rejected_reason, ) redacted_event = None if redacted: redacted_event = prune_event(original_ev) redaction_id = yield self._simple_select_one_onecol( table="redactions", keyvalues={"redacts": redacted_event.event_id}, retcol="event_id", desc="_get_event_from_row_redactions", ) redacted_event.unsigned["redacted_by"] = redaction_id # Get the redaction event. because = yield self.get_event( redaction_id, check_redacted=False, allow_none=True, ) if because: # It's fine to do add the event directly, since get_pdu_json # will serialise this field correctly redacted_event.unsigned["redacted_because"] = because cache_entry = _EventCacheEntry( event=original_ev, redacted_event=redacted_event, ) self._get_event_cache.prefill((original_ev.event_id,), cache_entry) defer.returnValue(cache_entry)
def _get_devices_with_keys_by_user_txn(self, txn, user_id): now_stream_id = self._device_list_id_gen.get_current_token() devices = self._get_e2e_device_keys_txn(txn, [(user_id, None)], include_all_devices=True) if devices: user_devices = devices[user_id] results = [] for device_id, device in iteritems(user_devices): result = { "device_id": device_id, } key_json = device.get("key_json", None) if key_json: result["keys"] = json.loads(key_json) device_display_name = device.get("device_display_name", None) if device_display_name: result["device_display_name"] = device_display_name results.append(result) return now_stream_id, results return now_stream_id, []
async def on_GET(self, request, room_id): requester = await self.auth.get_user_by_req(request, allow_guest=True) pagination_config = PaginationConfig.from_request(request, default_limit=10) as_client_event = b"raw" not in request.args filter_str = parse_string(request, b"filter", encoding="utf-8") if filter_str: filter_json = urlparse.unquote(filter_str) event_filter = Filter( json.loads(filter_json)) # type: Optional[Filter] if (event_filter and event_filter.filter_json.get( "event_format", "client") == "federation"): as_client_event = False else: event_filter = None msgs = await self.pagination_handler.get_messages( room_id=room_id, requester=requester, pagin_config=pagination_config, as_client_event=as_client_event, event_filter=event_filter, ) return 200, msgs
def parse_json_value_from_request(request, allow_empty_body=False): """Parse a JSON value from the body of a twisted HTTP request. Args: request: the twisted HTTP request. allow_empty_body (bool): if True, an empty body will be accepted and turned into None Returns: The JSON value. Raises: SynapseError if the request body couldn't be decoded as JSON. """ try: content_bytes = request.content.read() except Exception: raise SynapseError(400, "Error reading JSON content.") if not content_bytes and allow_empty_body: return None try: content = json.loads(content_bytes) except Exception as e: logger.warn("Unable to parse JSON: %s", e) raise SynapseError(400, "Content not JSON.", errcode=Codes.NOT_JSON) return content
def on_claim_client_keys(self, origin, content): query = [] for user_id, device_keys in content.get("one_time_keys", {}).items(): for device_id, algorithm in device_keys.items(): query.append((user_id, device_id, algorithm)) results = yield self.store.claim_e2e_one_time_keys(query) json_result = {} for user_id, device_keys in results.items(): for device_id, keys in device_keys.items(): for key_id, json_bytes in keys.items(): json_result.setdefault(user_id, {})[device_id] = { key_id: json.loads(json_bytes) } logger.info( "Claimed one-time-keys: %s", ",".join(( "%s for %s:%s" % (key_id, user_id, device_id) for user_id, user_keys in iteritems(json_result) for device_id, device_keys in iteritems(user_keys) for key_id, _ in iteritems(device_keys) )), ) defer.returnValue({"one_time_keys": json_result})
def get_json(self, destination, path, args={}, retry_on_dns_fail=True, timeout=None, ignore_backoff=False): """ GETs some json from the given host homeserver and path Args: destination (str): The remote server to send the HTTP request to. path (str): The HTTP path. args (dict): A dictionary used to create query strings, defaults to None. timeout (int): How long to try (in ms) the destination for before giving up. None indicates no timeout and that the request will be retried. ignore_backoff (bool): true to ignore the historical backoff data and try the request anyway. Returns: Deferred: Succeeds when we get a 2xx HTTP response. The result will be the decoded JSON body. Fails with ``HTTPRequestException`` if we get an HTTP response code >= 300. Fails with ``NotRetryingDestination`` if we are not yet ready to retry this server. Fails with ``FederationDeniedError`` if this destination is not on our federation whitelist """ logger.debug("get_json args: %s", args) logger.debug("Query bytes: %s Retry DNS: %s", args, retry_on_dns_fail) def body_callback(method, url_bytes, headers_dict): self.sign_request(destination, method, url_bytes, headers_dict) return None response = yield self._request( destination, "GET", path, query_bytes=encode_query_args(args), body_callback=body_callback, retry_on_dns_fail=retry_on_dns_fail, timeout=timeout, ignore_backoff=ignore_backoff, ) if 200 <= response.code < 300: # We need to update the transactions table to say it was sent? check_content_type_is_json(response.headers) with logcontext.PreserveLoggingContext(): body = yield readBody(response) defer.returnValue(json.loads(body))
async def on_GET(self, request, room_id, event_id): requester = await self.auth.get_user_by_req(request, allow_guest=True) limit = parse_integer(request, "limit", default=10) # picking the API shape for symmetry with /messages filter_bytes = parse_string(request, "filter") if filter_bytes: filter_json = urlparse.unquote(filter_bytes) event_filter = Filter( json.loads(filter_json)) # type: Optional[Filter] else: event_filter = None results = await self.room_context_handler.get_event_context( requester.user, room_id, event_id, limit, event_filter) if not results: raise SynapseError(404, "Event not found.", errcode=Codes.NOT_FOUND) time_now = self.clock.time_msec() results[ "events_before"] = await self._event_serializer.serialize_events( results["events_before"], time_now) results["event"] = await self._event_serializer.serialize_event( results["event"], time_now) results[ "events_after"] = await self._event_serializer.serialize_events( results["events_after"], time_now) results["state"] = await self._event_serializer.serialize_events( results["state"], time_now) return 200, results
def span_context_from_string(carrier): """ Returns: The active span context decoded from a string. """ carrier = json.loads(carrier) return opentracing.tracer.extract(opentracing.Format.TEXT_MAP, carrier)
def bind_threepid(self, creds, mxid): logger.debug("binding threepid %r to %s", creds, mxid) data = None if 'id_server' in creds: id_server = creds['id_server'] elif 'idServer' in creds: id_server = creds['idServer'] else: raise SynapseError(400, "No id_server in creds") if 'client_secret' in creds: client_secret = creds['client_secret'] elif 'clientSecret' in creds: client_secret = creds['clientSecret'] else: raise SynapseError(400, "No client_secret in creds") try: data = yield self.http_client.post_urlencoded_get_json( "https://%s%s" % (id_server, "/_matrix/identity/api/v1/3pid/bind"), { 'sid': creds['sid'], 'client_secret': client_secret, 'mxid': mxid, }) logger.debug("bound threepid %r to %s", creds, mxid) except CodeMessageException as e: data = json.loads(e.msg) # XXX WAT? defer.returnValue(data)
async def on_claim_client_keys(self, origin, content): query = [] for user_id, device_keys in content.get("one_time_keys", {}).items(): for device_id, algorithm in device_keys.items(): query.append((user_id, device_id, algorithm)) log_kv({ "message": "Claiming one time keys.", "user, device pairs": query }) results = await self.store.claim_e2e_one_time_keys(query) json_result = {} for user_id, device_keys in results.items(): for device_id, keys in device_keys.items(): for key_id, json_bytes in keys.items(): json_result.setdefault(user_id, {})[device_id] = { key_id: json.loads(json_bytes) } logger.info( "Claimed one-time-keys: %s", ",".join(("%s for %s:%s" % (key_id, user_id, device_id) for user_id, user_keys in iteritems(json_result) for device_id, device_keys in iteritems(user_keys) for key_id, _ in iteritems(device_keys))), ) return {"one_time_keys": json_result}
def _get_linearized_receipts_for_room(self, room_id, to_key, from_key=None): """See get_linearized_receipts_for_room """ def f(txn): if from_key: sql = ("SELECT * FROM receipts_linearized WHERE" " room_id = ? AND stream_id > ? AND stream_id <= ?") txn.execute(sql, (room_id, from_key, to_key)) else: sql = ("SELECT * FROM receipts_linearized WHERE" " room_id = ? AND stream_id <= ?") txn.execute(sql, (room_id, to_key)) rows = self.db.cursor_to_dict(txn) return rows rows = yield self.db.runInteraction("get_linearized_receipts_for_room", f) if not rows: return [] content = {} for row in rows: content.setdefault(row["event_id"], {}).setdefault( row["receipt_type"], {})[row["user_id"]] = json.loads(row["data"]) return [{"type": "m.receipt", "room_id": room_id, "content": content}]
def bind_threepid(self, creds, mxid): logger.debug("binding threepid %r to %s", creds, mxid) data = None if 'id_server' in creds: id_server = creds['id_server'] elif 'idServer' in creds: id_server = creds['idServer'] else: raise SynapseError(400, "No id_server in creds") if 'client_secret' in creds: client_secret = creds['client_secret'] elif 'clientSecret' in creds: client_secret = creds['clientSecret'] else: raise SynapseError(400, "No client_secret in creds") try: data = yield self.http_client.post_urlencoded_get_json( "https://%s%s" % ( id_server, "/_matrix/identity/api/v1/3pid/bind" ), { 'sid': creds['sid'], 'client_secret': client_secret, 'mxid': mxid, } ) logger.debug("bound threepid %r to %s", creds, mxid) except CodeMessageException as e: data = json.loads(e.msg) # XXX WAT? defer.returnValue(data)
def db_to_json(db_content): """ Take some data from a database row and return a JSON-decoded object. Args: db_content (memoryview|buffer|bytes|bytearray|unicode) """ # psycopg2 on Python 3 returns memoryview objects, which we need to # cast to bytes to decode if isinstance(db_content, memoryview): db_content = db_content.tobytes() # psycopg2 on Python 2 returns buffer objects, which we need to cast to # bytes to decode if PY2 and isinstance(db_content, builtins.buffer): db_content = bytes(db_content) # Decode it to a Unicode string before feeding it to json.loads, so we # consistenty get a Unicode-containing object out. if isinstance(db_content, (bytes, bytearray)): db_content = db_content.decode("utf8") try: return json.loads(db_content) except Exception: logging.warning("Tried to decode '%r' as JSON and failed", db_content) raise
def get_e2e_device_keys( self, query_list, include_all_devices=False, include_deleted_devices=False, ): """Fetch a list of device keys. Args: query_list(list): List of pairs of user_ids and device_ids. include_all_devices (bool): whether to include entries for devices that don't have device keys include_deleted_devices (bool): whether to include null entries for devices which no longer exist (but were in the query_list). This option only takes effect if include_all_devices is true. Returns: Dict mapping from user-id to dict mapping from device_id to dict containing "key_json", "device_display_name". """ if not query_list: defer.returnValue({}) results = yield self.runInteraction( "get_e2e_device_keys", self._get_e2e_device_keys_txn, query_list, include_all_devices, include_deleted_devices, ) for user_id, device_keys in iteritems(results): for device_id, device_info in iteritems(device_keys): device_info["keys"] = json.loads(device_info.pop("key_json")) defer.returnValue(results)
def db_to_json(db_content): """ Take some data from a database row and return a JSON-decoded object. Args: db_content (memoryview|buffer|bytes|bytearray|unicode) """ # psycopg2 on Python 3 returns memoryview objects, which we need to # cast to bytes to decode if isinstance(db_content, memoryview): db_content = db_content.tobytes() # psycopg2 on Python 2 returns buffer objects, which we need to cast to # bytes to decode if PY2 and isinstance(db_content, builtins.buffer): db_content = bytes(db_content) # Decode it to a Unicode string before feeding it to json.loads, so we # consistenty get a Unicode-containing object out. if isinstance(db_content, (bytes, bytearray)): db_content = db_content.decode('utf8') try: return json.loads(db_content) except Exception: logging.warning("Tried to decode '%r' as JSON and failed", db_content) raise
def to_synapse_error(self): """Make a SynapseError based on an HTTPResponseException This is useful when a proxied request has failed, and we need to decide how to map the failure onto a matrix error to send back to the client. An attempt is made to parse the body of the http response as a matrix error. If that succeeds, the errcode and error message from the body are used as the errcode and error message in the new synapse error. Otherwise, the errcode is set to M_UNKNOWN, and the error message is set to the reason code from the HTTP response. Returns: SynapseError: """ # try to parse the body as json, to get better errcode/msg, but # default to M_UNKNOWN with the HTTP status as the error text try: j = json.loads(self.response) except ValueError: j = {} if not isinstance(j, dict): j = {} errcode = j.pop('errcode', Codes.UNKNOWN) errmsg = j.pop('error', self.msg) return ProxiedRequestError(self.code, errmsg, errcode, j)
def reindex_search_txn(txn): sql = ( "SELECT stream_ordering, event_id FROM events" " WHERE ? <= stream_ordering AND stream_ordering < ?" " ORDER BY stream_ordering DESC" " LIMIT ?" ) txn.execute(sql, (target_min_stream_id, max_stream_id, batch_size)) rows = txn.fetchall() if not rows: return 0 min_stream_id = rows[-1][0] event_ids = [row[1] for row in rows] rows_to_update = [] chunks = [event_ids[i : i + 100] for i in range(0, len(event_ids), 100)] for chunk in chunks: ev_rows = self._simple_select_many_txn( txn, table="event_json", column="event_id", iterable=chunk, retcols=["event_id", "json"], keyvalues={}, ) for row in ev_rows: event_id = row["event_id"] event_json = json.loads(row["json"]) try: origin_server_ts = event_json["origin_server_ts"] except (KeyError, AttributeError): # If the event is missing a necessary field then # skip over it. continue rows_to_update.append((origin_server_ts, event_id)) sql = "UPDATE events SET origin_server_ts = ? WHERE event_id = ?" for index in range(0, len(rows_to_update), INSERT_CLUMP_SIZE): clump = rows_to_update[index : index + INSERT_CLUMP_SIZE] txn.executemany(sql, clump) progress = { "target_min_stream_id_inclusive": target_min_stream_id, "max_stream_id_exclusive": min_stream_id, "rows_inserted": rows_inserted + len(rows_to_update), } self._background_update_progress_txn( txn, self.EVENT_ORIGIN_SERVER_TS_NAME, progress ) return len(rows_to_update)
def _exceptionFromFailedRequest(self, response, body): try: jsonBody = json.loads(body) errcode = jsonBody['errcode'] error = jsonBody['error'] return MatrixCodeMessageException(response.code, error, errcode) except (ValueError, KeyError): return CodeMessageException(response.code, body)
def from_line(cls, line): stream_name, instance_name, token, row_json = line.split(" ", 3) return cls( stream_name, instance_name, None if token == "batch" else int(token), json.loads(row_json), )
def _get_linearized_receipts_for_rooms(self, room_ids, to_key, from_key=None): if not room_ids: return {} def f(txn): if from_key: sql = """ SELECT * FROM receipts_linearized WHERE stream_id > ? AND stream_id <= ? AND """ clause, args = make_in_list_sql_clause(self.database_engine, "room_id", room_ids) txn.execute(sql + clause, [from_key, to_key] + list(args)) else: sql = """ SELECT * FROM receipts_linearized WHERE stream_id <= ? AND """ clause, args = make_in_list_sql_clause(self.database_engine, "room_id", room_ids) txn.execute(sql + clause, [to_key] + list(args)) return self.db.cursor_to_dict(txn) txn_results = yield self.db.runInteraction( "_get_linearized_receipts_for_rooms", f) results = {} for row in txn_results: # We want a single event per room, since we want to batch the # receipts by room, event and type. room_event = results.setdefault( row["room_id"], { "type": "m.receipt", "room_id": row["room_id"], "content": {} }, ) # The content is of the form: # {"$foo:bar": { "read": { "@user:host": <receipt> }, .. }, .. } event_entry = room_event["content"].setdefault(row["event_id"], {}) receipt_type = event_entry.setdefault(row["receipt_type"], {}) receipt_type[row["user_id"]] = json.loads(row["data"]) results = { room_id: [results[room_id]] if room_id in results else [] for room_id in room_ids } return results
def _get_devices_by_remote_txn(self, txn, destination, from_stream_id, now_stream_id): sql = """ SELECT user_id, device_id, max(stream_id) FROM device_lists_outbound_pokes WHERE destination = ? AND ? < stream_id AND stream_id <= ? AND sent = ? GROUP BY user_id, device_id LIMIT 20 """ txn.execute(sql, (destination, from_stream_id, now_stream_id, False)) # maps (user_id, device_id) -> stream_id query_map = {(r[0], r[1]): r[2] for r in txn} if not query_map: return (now_stream_id, []) if len(query_map) >= 20: now_stream_id = max(stream_id for stream_id in itervalues(query_map)) devices = self._get_e2e_device_keys_txn(txn, query_map.keys(), include_all_devices=True) prev_sent_id_sql = """ SELECT coalesce(max(stream_id), 0) as stream_id FROM device_lists_outbound_last_success WHERE destination = ? AND user_id = ? AND stream_id <= ? """ results = [] for user_id, user_devices in iteritems(devices): # The prev_id for the first row is always the last row before # `from_stream_id` txn.execute(prev_sent_id_sql, (destination, user_id, from_stream_id)) rows = txn.fetchall() prev_id = rows[0][0] for device_id, device in iteritems(user_devices): stream_id = query_map[(user_id, device_id)] result = { "user_id": user_id, "device_id": device_id, "prev_id": [prev_id] if prev_id else [], "stream_id": stream_id, } prev_id = stream_id key_json = device.get("key_json", None) if key_json: result["keys"] = json.loads(key_json) device_display_name = device.get("device_display_name", None) if device_display_name: result["device_display_name"] = device_display_name results.append(result) return (now_stream_id, results)
def _get_media_mxcs_in_room_txn(self, txn, room_id): """Retrieves all the local and remote media MXC URIs in a given room Args: txn (cursor) room_id (str) Returns: The local and remote media as a lists of tuples where the key is the hostname and the value is the media ID. """ mxc_re = re.compile("^mxc://([^/]+)/([^/#?]+)") sql = """ SELECT stream_ordering, json FROM events JOIN event_json USING (room_id, event_id) WHERE room_id = ? %(where_clause)s AND contains_url = ? AND outlier = ? ORDER BY stream_ordering DESC LIMIT ? """ txn.execute(sql % {"where_clause": ""}, (room_id, True, False, 100)) local_media_mxcs = [] remote_media_mxcs = [] while True: next_token = None for stream_ordering, content_json in txn: next_token = stream_ordering event_json = json.loads(content_json) content = event_json["content"] content_url = content.get("url") thumbnail_url = content.get("info", {}).get("thumbnail_url") for url in (content_url, thumbnail_url): if not url: continue matches = mxc_re.match(url) if matches: hostname = matches.group(1) media_id = matches.group(2) if hostname == self.hs.hostname: local_media_mxcs.append(media_id) else: remote_media_mxcs.append((hostname, media_id)) if next_token is None: # We've gone through the whole room, so we're finished. break txn.execute( sql % {"where_clause": "AND stream_ordering < ?"}, (room_id, next_token, True, False, 100), ) return local_media_mxcs, remote_media_mxcs
def claim_one_time_keys(self, query, timeout): local_query = [] remote_queries = {} for user_id, device_keys in query.get("one_time_keys", {}).items(): # we use UserID.from_string to catch invalid user ids if self.is_mine(UserID.from_string(user_id)): for device_id, algorithm in device_keys.items(): local_query.append((user_id, device_id, algorithm)) else: domain = get_domain_from_id(user_id) remote_queries.setdefault(domain, {})[user_id] = device_keys results = yield self.store.claim_e2e_one_time_keys(local_query) json_result = {} failures = {} for user_id, device_keys in results.items(): for device_id, keys in device_keys.items(): for key_id, json_bytes in keys.items(): json_result.setdefault(user_id, {})[device_id] = { key_id: json.loads(json_bytes) } @defer.inlineCallbacks def claim_client_keys(destination): device_keys = remote_queries[destination] try: remote_result = yield self.federation.claim_client_keys( destination, {"one_time_keys": device_keys}, timeout=timeout ) for user_id, keys in remote_result["one_time_keys"].items(): if user_id in device_keys: json_result[user_id] = keys except Exception as e: failures[destination] = _exception_to_failure(e) yield make_deferred_yieldable(defer.gatherResults([ run_in_background(claim_client_keys, destination) for destination in remote_queries ], consumeErrors=True)) logger.info( "Claimed one-time-keys: %s", ",".join(( "%s for %s:%s" % (key_id, user_id, device_id) for user_id, user_keys in iteritems(json_result) for device_id, device_keys in iteritems(user_keys) for key_id, _ in iteritems(device_keys) )), ) defer.returnValue({ "one_time_keys": json_result, "failures": failures })
def _get_linearized_receipts_for_rooms(self, room_ids, to_key, from_key=None): if not room_ids: defer.returnValue({}) def f(txn): if from_key: sql = ( "SELECT * FROM receipts_linearized WHERE" " room_id IN (%s) AND stream_id > ? AND stream_id <= ?" ) % ( ",".join(["?"] * len(room_ids)) ) args = list(room_ids) args.extend([from_key, to_key]) txn.execute(sql, args) else: sql = ( "SELECT * FROM receipts_linearized WHERE" " room_id IN (%s) AND stream_id <= ?" ) % ( ",".join(["?"] * len(room_ids)) ) args = list(room_ids) args.append(to_key) txn.execute(sql, args) return self.cursor_to_dict(txn) txn_results = yield self.runInteraction( "_get_linearized_receipts_for_rooms", f ) results = {} for row in txn_results: # We want a single event per room, since we want to batch the # receipts by room, event and type. room_event = results.setdefault(row["room_id"], { "type": "m.receipt", "room_id": row["room_id"], "content": {}, }) # The content is of the form: # {"$foo:bar": { "read": { "@user:host": <receipt> }, .. }, .. } event_entry = room_event["content"].setdefault(row["event_id"], {}) receipt_type = event_entry.setdefault(row["receipt_type"], {}) receipt_type[row["user_id"]] = json.loads(row["data"]) results = { room_id: [results[room_id]] if room_id in results else [] for room_id in room_ids } defer.returnValue(results)
def get_account_data_for_room_txn(txn): rows = self._simple_select_list_txn( txn, "room_account_data", {"user_id": user_id, "room_id": room_id}, ["account_data_type", "content"] ) return { row["account_data_type"]: json.loads(row["content"]) for row in rows }
def claim_one_time_keys(self, query, timeout): local_query = [] remote_queries = {} for user_id, device_keys in query.get("one_time_keys", {}).items(): # we use UserID.from_string to catch invalid user ids if self.is_mine(UserID.from_string(user_id)): for device_id, algorithm in device_keys.items(): local_query.append((user_id, device_id, algorithm)) else: domain = get_domain_from_id(user_id) remote_queries.setdefault(domain, {})[user_id] = device_keys results = yield self.store.claim_e2e_one_time_keys(local_query) json_result = {} failures = {} for user_id, device_keys in results.items(): for device_id, keys in device_keys.items(): for key_id, json_bytes in keys.items(): json_result.setdefault(user_id, {})[device_id] = { key_id: json.loads(json_bytes) } @defer.inlineCallbacks def claim_client_keys(destination): device_keys = remote_queries[destination] try: remote_result = yield self.federation.claim_client_keys( destination, {"one_time_keys": device_keys}, timeout=timeout) for user_id, keys in remote_result["one_time_keys"].items(): if user_id in device_keys: json_result[user_id] = keys except Exception as e: failure = _exception_to_failure(e) failures[destination] = failure yield make_deferred_yieldable( defer.gatherResults( [ run_in_background(claim_client_keys, destination) for destination in remote_queries ], consumeErrors=True, )) logger.info( "Claimed one-time-keys: %s", ",".join(("%s for %s:%s" % (key_id, user_id, device_id) for user_id, user_keys in iteritems(json_result) for device_id, device_keys in iteritems(user_keys) for key_id, _ in iteritems(device_keys))), ) return {"one_time_keys": json_result, "failures": failures}
def _get_event_from_row(self, internal_metadata, js, redactions, format_version, rejected_reason=None): """Parse an event row which has been read from the database Args: internal_metadata (str): json-encoded internal_metadata column js (str): json-encoded event body from event_json redactions (list[str]): a list of the events which claim to have redacted this event, from the redactions table format_version: (str): the 'format_version' column rejected_reason (str|None): the reason this event was rejected, if any Returns: _EventCacheEntry """ with Measure(self._clock, "_get_event_from_row"): d = json.loads(js) internal_metadata = json.loads(internal_metadata) if format_version is None: # This means that we stored the event before we had the concept # of a event format version, so it must be a V1 event. format_version = EventFormatVersions.V1 original_ev = event_type_from_format_version(format_version)( event_dict=d, internal_metadata_dict=internal_metadata, rejected_reason=rejected_reason, ) redacted_event = yield self._maybe_redact_event_row( original_ev, redactions) cache_entry = _EventCacheEntry(event=original_ev, redacted_event=redacted_event) self._get_event_cache.prefill((original_ev.event_id, ), cache_entry) defer.returnValue(cache_entry)
def add_membership_profile_txn(txn): sql = (""" SELECT stream_ordering, event_id, events.room_id, event_json.json FROM events INNER JOIN event_json USING (event_id) INNER JOIN room_memberships USING (event_id) WHERE ? <= stream_ordering AND stream_ordering < ? AND type = 'm.room.member' ORDER BY stream_ordering DESC LIMIT ? """) txn.execute(sql, (target_min_stream_id, max_stream_id, batch_size)) rows = self.cursor_to_dict(txn) if not rows: return 0 min_stream_id = rows[-1]["stream_ordering"] to_update = [] for row in rows: event_id = row["event_id"] room_id = row["room_id"] try: event_json = json.loads(row["json"]) content = event_json['content'] except Exception: continue display_name = content.get("displayname", None) avatar_url = content.get("avatar_url", None) if display_name or avatar_url: to_update.append(( display_name, avatar_url, event_id, room_id )) to_update_sql = (""" UPDATE room_memberships SET display_name = ?, avatar_url = ? WHERE event_id = ? AND room_id = ? """) for index in range(0, len(to_update), INSERT_CLUMP_SIZE): clump = to_update[index:index + INSERT_CLUMP_SIZE] txn.executemany(to_update_sql, clump) progress = { "target_min_stream_id_inclusive": target_min_stream_id, "max_stream_id_exclusive": min_stream_id, } self._background_update_progress_txn( txn, _MEMBERSHIP_PROFILE_UPDATE_NAME, progress ) return len(rows)
def _deserialize_action(actions, is_highlight): """Custom deserializer for actions. This allows us to "compress" common actions """ if actions: return json.loads(actions) if is_highlight: return DEFAULT_HIGHLIGHT_ACTION else: return DEFAULT_NOTIF_ACTION
def get_group_role(self, group_id, role_id): role = yield self._simple_select_one( table="group_roles", keyvalues={"group_id": group_id, "role_id": role_id}, retcols=("is_public", "profile"), desc="get_group_role", ) role["profile"] = json.loads(role["profile"]) return role
def get_group_category(self, group_id, category_id): category = yield self._simple_select_one( table="group_room_categories", keyvalues={"group_id": group_id, "category_id": category_id}, retcols=("is_public", "profile"), desc="get_group_category", ) category["profile"] = json.loads(category["profile"]) return category
def _get_cached_user_device(self, user_id, device_id): content = yield self._simple_select_one_onecol( table="device_lists_remote_cache", keyvalues={ "user_id": user_id, "device_id": device_id, }, retcol="content", desc="_get_cached_user_device", ) defer.returnValue(json.loads(content))
def get_updated_account_data_for_user_txn(txn): sql = ("SELECT account_data_type, content FROM account_data" " WHERE user_id = ? AND stream_id > ?") txn.execute(sql, (user_id, stream_id)) global_account_data = {row[0]: json.loads(row[1]) for row in txn} sql = ( "SELECT room_id, account_data_type, content FROM room_account_data" " WHERE user_id = ? AND stream_id > ?") txn.execute(sql, (user_id, stream_id)) account_data_by_room = {} for row in txn: room_account_data = account_data_by_room.setdefault(row[0], {}) room_account_data[row[1]] = json.loads(row[2]) return global_account_data, account_data_by_room
def _load_rules(rawrules, enabled_map): ruleslist = [] for rawrule in rawrules: rule = dict(rawrule) rule["conditions"] = json.loads(rawrule["conditions"]) rule["actions"] = json.loads(rawrule["actions"]) ruleslist.append(rule) # We're going to be mutating this a lot, so do a deep copy rules = list(list_with_base_rules(ruleslist)) for i, rule in enumerate(rules): rule_id = rule['rule_id'] if rule_id in enabled_map: if rule.get('enabled', True) != bool(enabled_map[rule_id]): # Rules are cached across users. rule = dict(rule) rule['enabled'] = bool(enabled_map[rule_id]) rules[i] = rule return rules
def _get_all_groups_changes_txn(txn): sql = """ SELECT stream_id, group_id, user_id, type, content FROM local_group_updates WHERE ? < stream_id AND stream_id <= ? LIMIT ? """ txn.execute(sql, (from_token, to_token, limit)) return [ (stream_id, group_id, user_id, gtype, json.loads(content_json)) for stream_id, group_id, user_id, gtype, content_json in txn ]
def _get_rooms_for_summary_txn(txn): keyvalues = { "group_id": group_id, } if not include_private: keyvalues["is_public"] = True sql = """ SELECT room_id, is_public, category_id, room_order FROM group_summary_rooms WHERE group_id = ? """ if not include_private: sql += " AND is_public = ?" txn.execute(sql, (group_id, True)) else: txn.execute(sql, (group_id,)) rooms = [ { "room_id": row[0], "is_public": row[1], "category_id": row[2] if row[2] != _DEFAULT_CATEGORY_ID else None, "order": row[3], } for row in txn ] sql = """ SELECT category_id, is_public, profile, cat_order FROM group_summary_room_categories INNER JOIN group_room_categories USING (group_id, category_id) WHERE group_id = ? """ if not include_private: sql += " AND is_public = ?" txn.execute(sql, (group_id, True)) else: txn.execute(sql, (group_id,)) categories = { row[0]: { "is_public": row[1], "profile": json.loads(row[2]), "order": row[3], } for row in txn } return rooms, categories
def _get_users_for_summary_txn(txn): keyvalues = { "group_id": group_id, } if not include_private: keyvalues["is_public"] = True sql = """ SELECT user_id, is_public, role_id, user_order FROM group_summary_users WHERE group_id = ? """ if not include_private: sql += " AND is_public = ?" txn.execute(sql, (group_id, True)) else: txn.execute(sql, (group_id,)) users = [ { "user_id": row[0], "is_public": row[1], "role_id": row[2] if row[2] != _DEFAULT_ROLE_ID else None, "order": row[3], } for row in txn ] sql = """ SELECT role_id, is_public, profile, role_order FROM group_summary_roles INNER JOIN group_roles USING (group_id, role_id) WHERE group_id = ? """ if not include_private: sql += " AND is_public = ?" txn.execute(sql, (group_id, True)) else: txn.execute(sql, (group_id,)) roles = { row[0]: { "is_public": row[1], "profile": json.loads(row[2]), "order": row[3], } for row in txn } return users, roles
def handleResponse(self, response_body_bytes): try: json_response = json.loads(response_body_bytes) except ValueError: # logger.info("Invalid JSON response from %s", # self.transport.getHost()) self.transport.abortConnection() return certificate = self.transport.getPeerCertificate() self.callback((json_response, certificate)) self.transport.abortConnection() self.timer.cancel()
def get_all_updated_receipts_txn(txn): sql = ( "SELECT stream_id, room_id, receipt_type, user_id, event_id, data" " FROM receipts_linearized" " WHERE ? < stream_id AND stream_id <= ?" " ORDER BY stream_id ASC" ) args = [last_id, current_id] if limit is not None: sql += " LIMIT ?" args.append(limit) txn.execute(sql, args) return (r[0:5] + (json.loads(r[5]),) for r in txn)
def _get_groups_changes_for_user_txn(txn): sql = """ SELECT group_id, membership, type, u.content FROM local_group_updates AS u INNER JOIN local_group_membership USING (group_id, user_id) WHERE user_id = ? AND ? < stream_id AND stream_id <= ? """ txn.execute(sql, (user_id, from_token, to_token,)) return [{ "group_id": group_id, "membership": membership, "type": gtype, "content": json.loads(content_json), } for group_id, membership, gtype, content_json in txn]
def _get_media_mxcs_in_room_txn(self, txn, room_id): """Retrieves all the local and remote media MXC URIs in a given room Args: txn (cursor) room_id (str) Returns: The local and remote media as a lists of tuples where the key is the hostname and the value is the media ID. """ mxc_re = re.compile("^mxc://([^/]+)/([^/#?]+)") next_token = self.get_current_events_token() + 1 local_media_mxcs = [] remote_media_mxcs = [] while next_token: sql = """ SELECT stream_ordering, json FROM events JOIN event_json USING (room_id, event_id) WHERE room_id = ? AND stream_ordering < ? AND contains_url = ? AND outlier = ? ORDER BY stream_ordering DESC LIMIT ? """ txn.execute(sql, (room_id, next_token, True, False, 100)) next_token = None for stream_ordering, content_json in txn: next_token = stream_ordering event_json = json.loads(content_json) content = event_json["content"] content_url = content.get("url") thumbnail_url = content.get("info", {}).get("thumbnail_url") for url in (content_url, thumbnail_url): if not url: continue matches = mxc_re.match(url) if matches: hostname = matches.group(1) media_id = matches.group(2) if hostname == self.hs.hostname: local_media_mxcs.append(media_id) else: remote_media_mxcs.append((hostname, media_id)) return local_media_mxcs, remote_media_mxcs
def get_group_category(self, group_id, category_id): category = yield self._simple_select_one( table="group_room_categories", keyvalues={ "group_id": group_id, "category_id": category_id, }, retcols=("is_public", "profile"), desc="get_group_category", ) category["profile"] = json.loads(category["profile"]) defer.returnValue(category)
def get_account_data_for_room_and_type_txn(txn): content_json = self._simple_select_one_onecol_txn( txn, table="room_account_data", keyvalues={ "user_id": user_id, "room_id": room_id, "account_data_type": account_data_type, }, retcol="content", allow_none=True ) return json.loads(content_json) if content_json else None
def get_group_role(self, group_id, role_id): role = yield self._simple_select_one( table="group_roles", keyvalues={ "group_id": group_id, "role_id": role_id, }, retcols=("is_public", "profile"), desc="get_group_role", ) role["profile"] = json.loads(role["profile"]) defer.returnValue(role)
def _one_time_keys_match(old_key_json, new_key): old_key = json.loads(old_key_json) # if either is a string rather than an object, they must match exactly if not isinstance(old_key, dict) or not isinstance(new_key, dict): return old_key == new_key # otherwise, we strip off the 'signatures' if any, because it's legitimate # for different upload attempts to have different signatures. old_key.pop("signatures", None) new_key_copy = dict(new_key) new_key_copy.pop("signatures", None) return old_key == new_key_copy
def _get_linearized_receipts_for_room(self, room_id, to_key, from_key=None): """See get_linearized_receipts_for_room """ def f(txn): if from_key: sql = ( "SELECT * FROM receipts_linearized WHERE" " room_id = ? AND stream_id > ? AND stream_id <= ?" ) txn.execute( sql, (room_id, from_key, to_key) ) else: sql = ( "SELECT * FROM receipts_linearized WHERE" " room_id = ? AND stream_id <= ?" ) txn.execute( sql, (room_id, to_key) ) rows = self.cursor_to_dict(txn) return rows rows = yield self.runInteraction( "get_linearized_receipts_for_room", f ) if not rows: defer.returnValue([]) content = {} for row in rows: content.setdefault( row["event_id"], {} ).setdefault( row["receipt_type"], {} )[row["user_id"]] = json.loads(row["data"]) defer.returnValue([{ "type": "m.receipt", "room_id": room_id, "content": content, }])
def _do_background_update(self, update_name, desired_duration_ms): logger.info("Starting update batch on background update '%s'", update_name) update_handler = self._background_update_handlers[update_name] performance = self._background_update_performance.get(update_name) if performance is None: performance = BackgroundUpdatePerformance(update_name) self._background_update_performance[update_name] = performance items_per_ms = performance.average_items_per_ms() if items_per_ms is not None: batch_size = int(desired_duration_ms * items_per_ms) # Clamp the batch size so that we always make progress batch_size = max(batch_size, self.MINIMUM_BACKGROUND_BATCH_SIZE) else: batch_size = self.DEFAULT_BACKGROUND_BATCH_SIZE progress_json = yield self._simple_select_one_onecol( "background_updates", keyvalues={"update_name": update_name}, retcol="progress_json", ) progress = json.loads(progress_json) time_start = self._clock.time_msec() items_updated = yield update_handler(progress, batch_size) time_stop = self._clock.time_msec() duration_ms = time_stop - time_start logger.info( "Updating %r. Updated %r items in %rms." " (total_rate=%r/ms, current_rate=%r/ms, total_updated=%r, batch_size=%r)", update_name, items_updated, duration_ms, performance.total_items_per_ms(), performance.average_items_per_ms(), performance.total_item_count, batch_size, ) performance.update(items_updated, duration_ms) defer.returnValue(len(self._background_update_performance))
def _get_all_groups_changes_txn(txn): sql = """ SELECT stream_id, group_id, user_id, type, content FROM local_group_updates WHERE ? < stream_id AND stream_id <= ? LIMIT ? """ txn.execute(sql, (from_token, to_token, limit,)) return [( stream_id, group_id, user_id, gtype, json.loads(content_json), ) for stream_id, group_id, user_id, gtype, content_json in txn]
def get_new_messages_for_remote_destination_txn(txn): sql = ( "SELECT stream_id, messages_json FROM device_federation_outbox" " WHERE destination = ?" " AND ? < stream_id AND stream_id <= ?" " ORDER BY stream_id ASC" " LIMIT ?" ) txn.execute(sql, (destination, last_stream_id, current_stream_id, limit)) messages = [] for row in txn: stream_pos = row[0] messages.append(json.loads(row[1])) if len(messages) < limit: stream_pos = current_stream_id return (messages, stream_pos)
def get_tags_for_room(self, user_id, room_id): """Get all the tags for the given room Args: user_id(str): The user to get tags for room_id(str): The room to get tags for Returns: A deferred list of string tags. """ return self._simple_select_list( table="room_tags", keyvalues={"user_id": user_id, "room_id": room_id}, retcols=("tag", "content"), desc="get_tags_for_room", ).addCallback(lambda rows: { row["tag"]: json.loads(row["content"]) for row in rows })
def get_global_account_data_by_type_for_user(self, data_type, user_id): """ Returns: Deferred: A dict """ result = yield self._simple_select_one_onecol( table="account_data", keyvalues={"user_id": user_id, "account_data_type": data_type}, retcol="content", desc="get_global_account_data_by_type_for_user", allow_none=True, ) if result: defer.returnValue(json.loads(result)) else: defer.returnValue(None)
def get_group_roles(self, group_id): rows = yield self._simple_select_list( table="group_roles", keyvalues={ "group_id": group_id, }, retcols=("role_id", "is_public", "profile"), desc="get_group_roles", ) defer.returnValue({ row["role_id"]: { "is_public": row["is_public"], "profile": json.loads(row["profile"]), } for row in rows })