Beispiel #1
0
    def insert_graph_receipt_txn(self, txn, room_id, receipt_type,
                                 user_id, event_ids, data):
        txn.call_after(
            self.get_receipts_for_room.invalidate, (room_id, receipt_type)
        )
        txn.call_after(
            self._invalidate_get_users_with_receipts_in_room,
            room_id, receipt_type, user_id,
        )
        txn.call_after(
            self.get_receipts_for_user.invalidate, (user_id, receipt_type)
        )
        # FIXME: This shouldn't invalidate the whole cache
        txn.call_after(self._get_linearized_receipts_for_room.invalidate_many, (room_id,))

        self._simple_delete_txn(
            txn,
            table="receipts_graph",
            keyvalues={
                "room_id": room_id,
                "receipt_type": receipt_type,
                "user_id": user_id,
            }
        )
        self._simple_insert_txn(
            txn,
            table="receipts_graph",
            values={
                "room_id": room_id,
                "receipt_type": receipt_type,
                "user_id": user_id,
                "event_ids": json.dumps(event_ids),
                "data": json.dumps(data),
            }
        )
Beispiel #2
0
    def insert_graph_receipt_txn(self, txn, room_id, receipt_type,
                                 user_id, event_ids, data):
        txn.call_after(
            self.get_receipts_for_room.invalidate, (room_id, receipt_type)
        )
        txn.call_after(
            self._invalidate_get_users_with_receipts_in_room,
            room_id, receipt_type, user_id,
        )
        txn.call_after(
            self.get_receipts_for_user.invalidate, (user_id, receipt_type)
        )
        # FIXME: This shouldn't invalidate the whole cache
        txn.call_after(self._get_linearized_receipts_for_room.invalidate_many, (room_id,))

        self._simple_delete_txn(
            txn,
            table="receipts_graph",
            keyvalues={
                "room_id": room_id,
                "receipt_type": receipt_type,
                "user_id": user_id,
            }
        )
        self._simple_insert_txn(
            txn,
            table="receipts_graph",
            values={
                "room_id": room_id,
                "receipt_type": receipt_type,
                "user_id": user_id,
                "event_ids": json.dumps(event_ids),
                "data": json.dumps(data),
            }
        )
Beispiel #3
0
    def _add_messages_to_local_device_inbox_txn(
        self, txn, stream_id, messages_by_user_then_device
    ):
        sql = "UPDATE device_max_stream_id" " SET stream_id = ?" " WHERE stream_id < ?"
        txn.execute(sql, (stream_id, stream_id))

        local_by_user_then_device = {}
        for user_id, messages_by_device in messages_by_user_then_device.items():
            messages_json_for_user = {}
            devices = list(messages_by_device.keys())
            if len(devices) == 1 and devices[0] == "*":
                # Handle wildcard device_ids.
                sql = "SELECT device_id FROM devices" " WHERE user_id = ?"
                txn.execute(sql, (user_id,))
                message_json = json.dumps(messages_by_device["*"])
                for row in txn:
                    # Add the message for all devices for this user on this
                    # server.
                    device = row[0]
                    messages_json_for_user[device] = message_json
            else:
                if not devices:
                    continue
                sql = (
                    "SELECT device_id FROM devices"
                    " WHERE user_id = ? AND device_id IN ("
                    + ",".join("?" * len(devices))
                    + ")"
                )
                # TODO: Maybe this needs to be done in batches if there are
                # too many local devices for a given user.
                txn.execute(sql, [user_id] + devices)
                for row in txn:
                    # Only insert into the local inbox if the device exists on
                    # this server
                    device = row[0]
                    message_json = json.dumps(messages_by_device[device])
                    messages_json_for_user[device] = message_json

            if messages_json_for_user:
                local_by_user_then_device[user_id] = messages_json_for_user

        if not local_by_user_then_device:
            return

        sql = (
            "INSERT INTO device_inbox"
            " (user_id, device_id, stream_id, message_json)"
            " VALUES (?,?,?,?)"
        )
        rows = []
        for user_id, messages_by_device in local_by_user_then_device.items():
            for device_id, message_json in messages_by_device.items():
                rows.append((user_id, device_id, stream_id, message_json))

        txn.executemany(sql, rows)
Beispiel #4
0
    def _add_messages_to_local_device_inbox_txn(
        self, txn, stream_id, messages_by_user_then_device
    ):
        sql = "UPDATE device_max_stream_id" " SET stream_id = ?" " WHERE stream_id < ?"
        txn.execute(sql, (stream_id, stream_id))

        local_by_user_then_device = {}
        for user_id, messages_by_device in messages_by_user_then_device.items():
            messages_json_for_user = {}
            devices = list(messages_by_device.keys())
            if len(devices) == 1 and devices[0] == "*":
                # Handle wildcard device_ids.
                sql = "SELECT device_id FROM devices" " WHERE user_id = ?"
                txn.execute(sql, (user_id,))
                message_json = json.dumps(messages_by_device["*"])
                for row in txn:
                    # Add the message for all devices for this user on this
                    # server.
                    device = row[0]
                    messages_json_for_user[device] = message_json
            else:
                if not devices:
                    continue
                sql = (
                    "SELECT device_id FROM devices"
                    " WHERE user_id = ? AND device_id IN ("
                    + ",".join("?" * len(devices))
                    + ")"
                )
                # TODO: Maybe this needs to be done in batches if there are
                # too many local devices for a given user.
                txn.execute(sql, [user_id] + devices)
                for row in txn:
                    # Only insert into the local inbox if the device exists on
                    # this server
                    device = row[0]
                    message_json = json.dumps(messages_by_device[device])
                    messages_json_for_user[device] = message_json

            if messages_json_for_user:
                local_by_user_then_device[user_id] = messages_json_for_user

        if not local_by_user_then_device:
            return

        sql = (
            "INSERT INTO device_inbox"
            " (user_id, device_id, stream_id, message_json)"
            " VALUES (?,?,?,?)"
        )
        rows = []
        for user_id, messages_by_device in local_by_user_then_device.items():
            for device_id, message_json in messages_by_device.items():
                rows.append((user_id, device_id, stream_id, message_json))

        txn.executemany(sql, rows)
Beispiel #5
0
    def _add_messages_to_local_device_inbox_txn(self, txn, stream_id,
                                                messages_by_user_then_device):
        local_by_user_then_device = {}
        for user_id, messages_by_device in messages_by_user_then_device.items(
        ):
            messages_json_for_user = {}
            devices = list(messages_by_device.keys())
            if len(devices) == 1 and devices[0] == "*":
                # Handle wildcard device_ids.
                sql = "SELECT device_id FROM devices WHERE user_id = ?"
                txn.execute(sql, (user_id, ))
                message_json = json.dumps(messages_by_device["*"])
                for row in txn:
                    # Add the message for all devices for this user on this
                    # server.
                    device = row[0]
                    messages_json_for_user[device] = message_json
            else:
                if not devices:
                    continue

                clause, args = make_in_list_sql_clause(txn.database_engine,
                                                       "device_id", devices)
                sql = "SELECT device_id FROM devices WHERE user_id = ? AND " + clause

                # TODO: Maybe this needs to be done in batches if there are
                # too many local devices for a given user.
                txn.execute(sql, [user_id] + list(args))
                for row in txn:
                    # Only insert into the local inbox if the device exists on
                    # this server
                    device = row[0]
                    message_json = json.dumps(messages_by_device[device])
                    messages_json_for_user[device] = message_json

            if messages_json_for_user:
                local_by_user_then_device[user_id] = messages_json_for_user

        if not local_by_user_then_device:
            return

        sql = ("INSERT INTO device_inbox"
               " (user_id, device_id, stream_id, message_json)"
               " VALUES (?,?,?,?)")
        rows = []
        for user_id, messages_by_device in local_by_user_then_device.items():
            for device_id, message_json in messages_by_device.items():
                rows.append((user_id, device_id, stream_id, message_json))

        txn.executemany(sql, rows)
Beispiel #6
0
    def upsert_group_role(self, group_id, role_id, profile, is_public):
        """Add/remove user role
        """
        insertion_values = {}
        update_values = {"role_id": role_id}  # This cannot be empty

        if profile is None:
            insertion_values["profile"] = "{}"
        else:
            update_values["profile"] = json.dumps(profile)

        if is_public is None:
            insertion_values["is_public"] = True
        else:
            update_values["is_public"] = is_public

        return self._simple_upsert(
            table="group_roles",
            keyvalues={
                "group_id": group_id,
                "role_id": role_id,
            },
            values=update_values,
            insertion_values=insertion_values,
            desc="upsert_group_role",
        )
Beispiel #7
0
    def upsert_group_category(self, group_id, category_id, profile, is_public):
        """Add/update room category for group
        """
        insertion_values = {}
        update_values = {"category_id": category_id}  # This cannot be empty

        if profile is None:
            insertion_values["profile"] = "{}"
        else:
            update_values["profile"] = json.dumps(profile)

        if is_public is None:
            insertion_values["is_public"] = True
        else:
            update_values["is_public"] = is_public

        return self._simple_upsert(
            table="group_room_categories",
            keyvalues={
                "group_id": group_id,
                "category_id": category_id,
            },
            values=update_values,
            insertion_values=insertion_values,
            desc="upsert_group_category",
        )
Beispiel #8
0
def respond_with_json(
    request,
    code,
    json_object,
    send_cors=False,
    response_code_message=None,
    pretty_print=False,
    canonical_json=True,
):
    # could alternatively use request.notifyFinish() and flip a flag when
    # the Deferred fires, but since the flag is RIGHT THERE it seems like
    # a waste.
    if request._disconnected:
        logger.warning(
            "Not sending response to request %s, already disconnected.", request
        )
        return

    if pretty_print:
        json_bytes = encode_pretty_printed_json(json_object) + b"\n"
    else:
        if canonical_json or synapse.events.USE_FROZEN_DICTS:
            # canonicaljson already encodes to bytes
            json_bytes = encode_canonical_json(json_object)
        else:
            json_bytes = json.dumps(json_object).encode("utf-8")

    return respond_with_json_bytes(
        request,
        code,
        json_bytes,
        send_cors=send_cors,
        response_code_message=response_code_message,
    )
Beispiel #9
0
    def _update_remote_device_list_cache_txn(self, txn, user_id, devices, stream_id):
        self._simple_delete_txn(
            txn, table="device_lists_remote_cache", keyvalues={"user_id": user_id}
        )

        self._simple_insert_many_txn(
            txn,
            table="device_lists_remote_cache",
            values=[
                {
                    "user_id": user_id,
                    "device_id": content["device_id"],
                    "content": json.dumps(content),
                }
                for content in devices
            ],
        )

        txn.call_after(self._get_cached_devices_for_user.invalidate, (user_id,))
        txn.call_after(self._get_cached_user_device.invalidate_many, (user_id,))
        txn.call_after(
            self.get_device_list_last_stream_id_for_remote.invalidate, (user_id,)
        )

        self._simple_upsert_txn(
            txn,
            table="device_lists_remote_extremeties",
            keyvalues={"user_id": user_id},
            values={"stream_id": stream_id},
            # we don't need to lock, because we can assume we are the only thread
            # updating this user's extremity.
            lock=False,
        )
Beispiel #10
0
    def login(
        self,
        username,
        password,
        device_id=None,
        custom_headers: Optional[Iterable[Tuple[Union[bytes, str],
                                                Union[bytes, str]]]] = None,
    ):
        """
        Log in a user, and get an access token. Requires the Login API be
        registered.

        """
        body = {
            "type": "m.login.password",
            "user": username,
            "password": password
        }
        if device_id:
            body["device_id"] = device_id

        channel = self.make_request(
            "POST",
            "/_matrix/client/r0/login",
            json.dumps(body).encode("utf8"),
            custom_headers=custom_headers,
        )
        self.assertEqual(channel.code, 200, channel.result)

        access_token = channel.json_body["access_token"]
        return access_token
Beispiel #11
0
        def _create_appservice_txn(txn):
            # work out new txn id (highest txn id for this service += 1)
            # The highest id may be the last one sent (in which case it is last_txn)
            # or it may be the highest in the txns list (which are waiting to be/are
            # being sent)
            last_txn_id = self._get_last_txn(txn, service.id)

            txn.execute(
                "SELECT MAX(txn_id) FROM application_services_txns WHERE as_id=?",
                (service.id,),
            )
            highest_txn_id = txn.fetchone()[0]
            if highest_txn_id is None:
                highest_txn_id = 0

            new_txn_id = max(highest_txn_id, last_txn_id) + 1

            # Insert new txn into txn table
            event_ids = json.dumps([e.event_id for e in events])
            txn.execute(
                "INSERT INTO application_services_txns(as_id, txn_id, event_ids) "
                "VALUES(?,?,?)",
                (service.id, new_txn_id, event_ids),
            )
            return AppServiceTransaction(service=service, id=new_txn_id, events=events)
Beispiel #12
0
def respond_with_json(request, code, json_object, send_cors=False,
                      response_code_message=None, pretty_print=False,
                      canonical_json=True):
    # could alternatively use request.notifyFinish() and flip a flag when
    # the Deferred fires, but since the flag is RIGHT THERE it seems like
    # a waste.
    if request._disconnected:
        logger.warn(
            "Not sending response to request %s, already disconnected.",
            request)
        return

    if pretty_print:
        json_bytes = encode_pretty_printed_json(json_object) + b"\n"
    else:
        if canonical_json or synapse.events.USE_FROZEN_DICTS:
            # canonicaljson already encodes to bytes
            json_bytes = encode_canonical_json(json_object)
        else:
            json_bytes = json.dumps(json_object).encode("utf-8")

    return respond_with_json_bytes(
        request, code, json_bytes,
        send_cors=send_cors,
        response_code_message=response_code_message,
    )
Beispiel #13
0
    def _update_remote_device_list_cache_txn(self, txn, user_id, devices,
                                             stream_id):
        self._simple_delete_txn(
            txn,
            table="device_lists_remote_cache",
            keyvalues={
                "user_id": user_id,
            },
        )

        self._simple_insert_many_txn(txn,
                                     table="device_lists_remote_cache",
                                     values=[{
                                         "user_id": user_id,
                                         "device_id": content["device_id"],
                                         "content": json.dumps(content),
                                     } for content in devices])

        txn.call_after(self._get_cached_devices_for_user.invalidate,
                       (user_id, ))
        txn.call_after(self._get_cached_user_device.invalidate_many,
                       (user_id, ))
        txn.call_after(
            self.get_device_list_last_stream_id_for_remote.invalidate,
            (user_id, ))

        self._simple_upsert_txn(txn,
                                table="device_lists_remote_extremeties",
                                keyvalues={
                                    "user_id": user_id,
                                },
                                values={
                                    "stream_id": stream_id,
                                })
Beispiel #14
0
    def upsert_group_role(self, group_id, role_id, profile, is_public):
        """Add/remove user role
        """
        insertion_values = {}
        update_values = {"role_id": role_id}  # This cannot be empty

        if profile is None:
            insertion_values["profile"] = "{}"
        else:
            update_values["profile"] = json.dumps(profile)

        if is_public is None:
            insertion_values["is_public"] = True
        else:
            update_values["is_public"] = is_public

        return self.db.simple_upsert(
            table="group_roles",
            keyvalues={
                "group_id": group_id,
                "role_id": role_id
            },
            values=update_values,
            insertion_values=insertion_values,
            desc="upsert_group_role",
        )
Beispiel #15
0
    def make_request(
        self,
        method,
        path,
        content=b"",
        access_token=None,
        request=SynapseRequest,
        shorthand=True,
    ):
        """
        Create a SynapseRequest at the path using the method and containing the
        given content.

        Args:
            method (bytes/unicode): The HTTP request method ("verb").
            path (bytes/unicode): The HTTP path, suitably URL encoded (e.g.
            escaped UTF-8 & spaces and such).
            content (bytes or dict): The body of the request. JSON-encoded, if
            a dict.
            shorthand: Whether to try and be helpful and prefix the given URL
            with the usual REST API path, if it doesn't contain it.

        Returns:
            A synapse.http.site.SynapseRequest.
        """
        if isinstance(content, dict):
            content = json.dumps(content).encode('utf8')

        return make_request(self.reactor, method, path, content, access_token,
                            request, shorthand)
Beispiel #16
0
    def add_tag_to_room(self, user_id, room_id, tag, content):
        """Add a tag to a room for a user.
        Args:
            user_id(str): The user to add a tag for.
            room_id(str): The room to add a tag for.
            tag(str): The tag name to add.
            content(dict): A json object to associate with the tag.
        Returns:
            A deferred that completes once the tag has been added.
        """
        content_json = json.dumps(content)

        def add_tag_txn(txn, next_id):
            self._simple_upsert_txn(
                txn,
                table="room_tags",
                keyvalues={
                    "user_id": user_id,
                    "room_id": room_id,
                    "tag": tag,
                },
                values={
                    "content": content_json,
                }
            )
            self._update_revision_txn(txn, user_id, room_id, next_id)

        with self._account_data_id_gen.get_next() as next_id:
            yield self.runInteraction("add_tag", add_tag_txn, next_id)

        self.get_tags_for_user.invalidate((user_id,))

        result = self._account_data_id_gen.get_current_token()
        defer.returnValue(result)
Beispiel #17
0
    async def mark_ui_auth_stage_complete(
        self, session_id: str, stage_type: str, result: Union[str, bool, JsonDict],
    ):
        """
        Mark a session stage as completed.

        Args:
            session_id: The ID of the corresponding session.
            stage_type: The completed stage type.
            result: The result of the stage verification.
        Raises:
            StoreError if the session cannot be found.
        """
        # Add (or update) the results of the current stage to the database.
        #
        # Note that we need to allow for the same stage to complete multiple
        # times here so that registration is idempotent.
        try:
            await self.db.simple_upsert(
                table="ui_auth_sessions_credentials",
                keyvalues={"session_id": session_id, "stage_type": stage_type},
                values={"result": json.dumps(result)},
                desc="mark_ui_auth_stage_complete",
            )
        except self.db.engine.module.IntegrityError:
            raise StoreError(400, "Unknown session ID: %s" % (session_id,))
Beispiel #18
0
 def on_POST(self, request):
     saml2_auth = None
     try:
         conf = config.SPConfig()
         conf.load_file(self.sp_config)
         SP = Saml2Client(conf)
         saml2_auth = SP.parse_authn_request_response(
             request.args['SAMLResponse'][0], BINDING_HTTP_POST)
     except Exception as e:        # Not authenticated
         logger.exception(e)
     if saml2_auth and saml2_auth.status_ok() and not saml2_auth.not_signed:
         username = saml2_auth.name_id.text
         handler = self.handlers.registration_handler
         (user_id, token) = yield handler.register_saml2(username)
         # Forward to the RelayState callback along with ava
         if 'RelayState' in request.args:
             request.redirect(urllib.parse.unquote(
                              request.args['RelayState'][0]) +
                              '?status=authenticated&access_token=' +
                              token + '&user_id=' + user_id + '&ava=' +
                              urllib.quote(json.dumps(saml2_auth.ava)))
             finish_request(request)
             defer.returnValue(None)
         defer.returnValue((200, {"status": "authenticated",
                                  "user_id": user_id, "token": token,
                                  "ava": saml2_auth.ava}))
     elif 'RelayState' in request.args:
         request.redirect(urllib.parse.unquote(
                          request.args['RelayState'][0]) +
                          '?status=not_authenticated')
         finish_request(request)
         defer.returnValue(None)
     defer.returnValue((200, {"status": "not_authenticated"}))
Beispiel #19
0
    def set_push_rule_actions(self, user_id, rule_id, actions, is_default_rule):
        actions_json = json.dumps(actions)

        def set_push_rule_actions_txn(txn, stream_id, event_stream_ordering):
            if is_default_rule:
                # Add a dummy rule to the rules table with the user specified
                # actions.
                priority_class = -1
                priority = 1
                self._upsert_push_rule_txn(
                    txn, stream_id, event_stream_ordering, user_id, rule_id,
                    priority_class, priority, "[]", actions_json,
                    update_stream=False
                )
            else:
                self._simple_update_one_txn(
                    txn,
                    "push_rules",
                    {'user_name': user_id, 'rule_id': rule_id},
                    {'actions': actions_json},
                )

            self._insert_push_rules_update_txn(
                txn, stream_id, event_stream_ordering, user_id, rule_id,
                op="ACTIONS", data={"actions": actions_json}
            )

        with self._push_rules_stream_id_gen.get_next() as ids:
            stream_id, event_stream_ordering = ids
            yield self.runInteraction(
                "set_push_rule_actions", set_push_rule_actions_txn,
                stream_id, event_stream_ordering
            )
Beispiel #20
0
    def _update_remote_device_list_cache_txn(self, txn, user_id, devices,
                                             stream_id):
        self._simple_delete_txn(txn,
                                table="device_lists_remote_cache",
                                keyvalues={"user_id": user_id})

        self._simple_insert_many_txn(
            txn,
            table="device_lists_remote_cache",
            values=[{
                "user_id": user_id,
                "device_id": content["device_id"],
                "content": json.dumps(content),
            } for content in devices],
        )

        txn.call_after(self._get_cached_devices_for_user.invalidate,
                       (user_id, ))
        txn.call_after(self._get_cached_user_device.invalidate_many,
                       (user_id, ))
        txn.call_after(
            self.get_device_list_last_stream_id_for_remote.invalidate,
            (user_id, ))

        self._simple_upsert_txn(
            txn,
            table="device_lists_remote_extremeties",
            keyvalues={"user_id": user_id},
            values={"stream_id": stream_id},
            # we don't need to lock, because we can assume we are the only thread
            # updating this user's extremity.
            lock=False,
        )
Beispiel #21
0
    def update_e2e_room_keys_version(self,
                                     user_id,
                                     version,
                                     info=None,
                                     version_etag=None):
        """Update a given backup version

        Args:
            user_id(str): the user whose backup version we're updating
            version(str): the version ID of the backup version we're updating
            info (dict): the new backup version info to store.  If None, then
                the backup version info is not updated
            version_etag (Optional[int]): etag of the keys in the backup.  If
                None, then the etag is not updated
        """
        updatevalues = {}

        if info is not None and "auth_data" in info:
            updatevalues["auth_data"] = json.dumps(info["auth_data"])
        if version_etag is not None:
            updatevalues["etag"] = version_etag

        if updatevalues:
            return self.db.simple_update(
                table="e2e_room_keys_versions",
                keyvalues={
                    "user_id": user_id,
                    "version": version
                },
                updatevalues=updatevalues,
                desc="update_e2e_room_keys_version",
            )
Beispiel #22
0
    def update_e2e_room_key(self, user_id, version, room_id, session_id,
                            room_key):
        """Replaces the encrypted E2E room key for a given session in a given backup

        Args:
            user_id(str): the user whose backup we're setting
            version(str): the version ID of the backup we're updating
            room_id(str): the ID of the room whose keys we're setting
            session_id(str): the session whose room_key we're setting
            room_key(dict): the room_key being set
        Raises:
            StoreError
        """

        yield self.db.simple_update_one(
            table="e2e_room_keys",
            keyvalues={
                "user_id": user_id,
                "version": version,
                "room_id": room_id,
                "session_id": session_id,
            },
            updatevalues={
                "first_message_index": room_key["first_message_index"],
                "forwarded_count": room_key["forwarded_count"],
                "is_verified": room_key["is_verified"],
                "session_data": json.dumps(room_key["session_data"]),
            },
            desc="update_e2e_room_key",
        )
Beispiel #23
0
    async def add_tag_to_room(self, user_id: str, room_id: str, tag: str,
                              content: JsonDict) -> int:
        """Add a tag to a room for a user.

        Args:
            user_id: The user to add a tag for.
            room_id: The room to add a tag for.
            tag: The tag name to add.
            content: A json object to associate with the tag.

        Returns:
            The next account data ID.
        """
        content_json = json.dumps(content)

        def add_tag_txn(txn, next_id):
            self.db_pool.simple_upsert_txn(
                txn,
                table="room_tags",
                keyvalues={
                    "user_id": user_id,
                    "room_id": room_id,
                    "tag": tag
                },
                values={"content": content_json},
            )
            self._update_revision_txn(txn, user_id, room_id, next_id)

        with self._account_data_id_gen.get_next() as next_id:
            await self.db_pool.runInteraction("add_tag", add_tag_txn, next_id)

        self.get_tags_for_user.invalidate((user_id, ))

        return self._account_data_id_gen.get_current_token()
Beispiel #24
0
    def _add_device_outbound_poke_to_stream_txn(
        self, txn, user_id, device_ids, hosts, stream_ids, context,
    ):
        for host in hosts:
            txn.call_after(
                self._device_list_federation_stream_cache.entity_has_changed,
                host,
                stream_ids[-1],
            )

        now = self._clock.time_msec()
        next_stream_id = iter(stream_ids)

        self.db.simple_insert_many_txn(
            txn,
            table="device_lists_outbound_pokes",
            values=[
                {
                    "destination": destination,
                    "stream_id": next(next_stream_id),
                    "user_id": user_id,
                    "device_id": device_id,
                    "sent": False,
                    "ts": now,
                    "opentracing_context": json.dumps(context)
                    if whitelisted_homeserver(destination)
                    else "{}",
                }
                for destination in hosts
                for device_id in device_ids
            ],
        )
Beispiel #25
0
        def _create_appservice_txn(txn):
            # work out new txn id (highest txn id for this service += 1)
            # The highest id may be the last one sent (in which case it is last_txn)
            # or it may be the highest in the txns list (which are waiting to be/are
            # being sent)
            last_txn_id = self._get_last_txn(txn, service.id)

            txn.execute(
                "SELECT MAX(txn_id) FROM application_services_txns WHERE as_id=?",
                (service.id, ),
            )
            highest_txn_id = txn.fetchone()[0]
            if highest_txn_id is None:
                highest_txn_id = 0

            new_txn_id = max(highest_txn_id, last_txn_id) + 1

            # Insert new txn into txn table
            event_ids = json.dumps([e.event_id for e in events])
            txn.execute(
                "INSERT INTO application_services_txns(as_id, txn_id, event_ids) "
                "VALUES(?,?,?)",
                (service.id, new_txn_id, event_ids),
            )
            return AppServiceTransaction(service=service,
                                         id=new_txn_id,
                                         events=events)
Beispiel #26
0
    def add_tag_to_room(self, user_id, room_id, tag, content):
        """Add a tag to a room for a user.
        Args:
            user_id(str): The user to add a tag for.
            room_id(str): The room to add a tag for.
            tag(str): The tag name to add.
            content(dict): A json object to associate with the tag.
        Returns:
            A deferred that completes once the tag has been added.
        """
        content_json = json.dumps(content)

        def add_tag_txn(txn, next_id):
            self.db.simple_upsert_txn(
                txn,
                table="room_tags",
                keyvalues={
                    "user_id": user_id,
                    "room_id": room_id,
                    "tag": tag
                },
                values={"content": content_json},
            )
            self._update_revision_txn(txn, user_id, room_id, next_id)

        with self._account_data_id_gen.get_next() as next_id:
            yield self.db.runInteraction("add_tag", add_tag_txn, next_id)

        self.get_tags_for_user.invalidate((user_id, ))

        result = self._account_data_id_gen.get_current_token()
        return result
Beispiel #27
0
    def make_request(
        self,
        method,
        path,
        content=b"",
        access_token=None,
        request=SynapseRequest,
        shorthand=True,
    ):
        """
        Create a SynapseRequest at the path using the method and containing the
        given content.

        Args:
            method (bytes/unicode): The HTTP request method ("verb").
            path (bytes/unicode): The HTTP path, suitably URL encoded (e.g.
            escaped UTF-8 & spaces and such).
            content (bytes or dict): The body of the request. JSON-encoded, if
            a dict.
            shorthand: Whether to try and be helpful and prefix the given URL
            with the usual REST API path, if it doesn't contain it.

        Returns:
            A synapse.http.site.SynapseRequest.
        """
        if isinstance(content, dict):
            content = json.dumps(content).encode('utf8')

        return make_request(
            self.reactor, method, path, content, access_token, request, shorthand
        )
Beispiel #28
0
    def upsert_group_category(self, group_id, category_id, profile, is_public):
        """Add/update room category for group
        """
        insertion_values = {}
        update_values = {"category_id": category_id}  # This cannot be empty

        if profile is None:
            insertion_values["profile"] = "{}"
        else:
            update_values["profile"] = json.dumps(profile)

        if is_public is None:
            insertion_values["is_public"] = True
        else:
            update_values["is_public"] = is_public

        return self.db.simple_upsert(
            table="group_room_categories",
            keyvalues={
                "group_id": group_id,
                "category_id": category_id
            },
            values=update_values,
            insertion_values=insertion_values,
            desc="upsert_group_category",
        )
Beispiel #29
0
    def write_events(self, room_id, events):
        room_directory = os.path.join(self.base_directory, "rooms", room_id)
        os.makedirs(room_directory, exist_ok=True)
        events_file = os.path.join(room_directory, "events")

        with open(events_file, "a") as f:
            for event in events:
                print(json.dumps(event.get_pdu_json()), file=f)
Beispiel #30
0
 def get_secret_key(self, request, body):
     """
     Makes the model write out a secret key file and returns it's content
     to the caller. Requires the servername to be passed.
     """
     return json.dumps(
         {"secret_key": self.model.generate_secret_key(body["server_name"])}
     )
Beispiel #31
0
    def add_account_data_to_room(self, user_id, room_id, account_data_type,
                                 content):
        """Add some account_data to a room for a user.
        Args:
            user_id(str): The user to add a tag for.
            room_id(str): The room to add a tag for.
            account_data_type(str): The type of account_data to add.
            content(dict): A json object to associate with the tag.
        Returns:
            A deferred that completes once the account_data has been added.
        """
        content_json = json.dumps(content)

        with self._account_data_id_gen.get_next() as next_id:
            # no need to lock here as room_account_data has a unique constraint
            # on (user_id, room_id, account_data_type) so _simple_upsert will
            # retry if there is a conflict.
            yield self._simple_upsert(
                desc="add_room_account_data",
                table="room_account_data",
                keyvalues={
                    "user_id": user_id,
                    "room_id": room_id,
                    "account_data_type": account_data_type,
                },
                values={
                    "stream_id": next_id,
                    "content": content_json,
                },
                lock=False,
            )

            # it's theoretically possible for the above to succeed and the
            # below to fail - in which case we might reuse a stream id on
            # restart, and the above update might not get propagated. That
            # doesn't sound any worse than the whole update getting lost,
            # which is what would happen if we combined the two into one
            # transaction.
            yield self._update_max_stream_id(next_id)

            self._account_data_stream_cache.entity_has_changed(
                user_id, next_id)
            self.get_account_data_for_user.invalidate((user_id, ))
            self.get_account_data_for_room.invalidate((
                user_id,
                room_id,
            ))
            self.get_account_data_for_room_and_type.prefill(
                (
                    user_id,
                    room_id,
                    account_data_type,
                ),
                content,
            )

        result = self._account_data_id_gen.get_current_token()
        defer.returnValue(result)
    def _set_e2e_cross_signing_key_txn(self, txn, user_id, key_type, key):
        """Set a user's cross-signing key.

        Args:
            txn (twisted.enterprise.adbapi.Connection): db connection
            user_id (str): the user to set the signing key for
            key_type (str): the type of key that is being set: either 'master'
                for a master key, 'self_signing' for a self-signing key, or
                'user_signing' for a user-signing key
            key (dict): the key data
        """
        # the cross-signing keys need to occupy the same namespace as devices,
        # since signatures are identified by device ID.  So add an entry to the
        # device table to make sure that we don't have a collision with device
        # IDs

        # the 'key' dict will look something like:
        # {
        #   "user_id": "@alice:example.com",
        #   "usage": ["self_signing"],
        #   "keys": {
        #     "ed25519:base64+self+signing+public+key": "base64+self+signing+public+key",
        #   },
        #   "signatures": {
        #     "@alice:example.com": {
        #       "ed25519:base64+master+public+key": "base64+signature"
        #     }
        #   }
        # }
        # The "keys" property must only have one entry, which will be the public
        # key, so we just grab the first value in there
        pubkey = next(iter(key["keys"].values()))
        self.db.simple_insert_txn(
            txn,
            "devices",
            values={
                "user_id": user_id,
                "device_id": pubkey,
                "display_name": key_type + " signing key",
                "hidden": True,
            },
        )

        # and finally, store the key itself
        with self._cross_signing_id_gen.get_next() as stream_id:
            self.db.simple_insert_txn(
                txn,
                "e2e_cross_signing_keys",
                values={
                    "user_id": user_id,
                    "keytype": key_type,
                    "keydata": json.dumps(key),
                    "stream_id": stream_id,
                },
            )

        self._invalidate_cache_and_stream(
            txn, self._get_bare_e2e_cross_signing_keys, (user_id, ))
Beispiel #33
0
    def _update_remote_device_list_cache_entry_txn(self, txn, user_id,
                                                   device_id, content,
                                                   stream_id):
        if content.get("deleted"):
            self._simple_delete_txn(
                txn,
                table="device_lists_remote_cache",
                keyvalues={
                    "user_id": user_id,
                    "device_id": device_id,
                },
            )

            txn.call_after(self.device_id_exists_cache.invalidate, (
                user_id,
                device_id,
            ))
        else:
            self._simple_upsert_txn(
                txn,
                table="device_lists_remote_cache",
                keyvalues={
                    "user_id": user_id,
                    "device_id": device_id,
                },
                values={
                    "content": json.dumps(content),
                },

                # we don't need to lock, because we assume we are the only thread
                # updating this user's devices.
                lock=False,
            )

        txn.call_after(self._get_cached_user_device.invalidate, (
            user_id,
            device_id,
        ))
        txn.call_after(self._get_cached_devices_for_user.invalidate,
                       (user_id, ))
        txn.call_after(
            self.get_device_list_last_stream_id_for_remote.invalidate,
            (user_id, ))

        self._simple_upsert_txn(
            txn,
            table="device_lists_remote_extremeties",
            keyvalues={
                "user_id": user_id,
            },
            values={
                "stream_id": stream_id,
            },

            # again, we can assume we are the only thread updating this user's
            # extremity.
            lock=False,
        )
Beispiel #34
0
    async def create_ui_auth_session(
        self, clientdict: JsonDict, uri: str, method: str, description: str,
    ) -> UIAuthSessionData:
        """
        Creates a new user interactive authentication session.

        The session can be used to track the stages necessary to authenticate a
        user across multiple HTTP requests.

        Args:
            clientdict:
                The dictionary from the client root level, not the 'auth' key.
            uri:
                The URI this session was initiated with, this is checked at each
                stage of the authentication to ensure that the asked for
                operation has not changed.
            method:
                The method this session was initiated with, this is checked at each
                stage of the authentication to ensure that the asked for
                operation has not changed.
            description:
                A string description of the operation that the current
                authentication is authorising.
        Returns:
            The newly created session.
        Raises:
            StoreError if a unique session ID cannot be generated.
        """
        # The clientdict gets stored as JSON.
        clientdict_json = json.dumps(clientdict)

        # autogen a session ID and try to create it. We may clash, so just
        # try a few times till one goes through, giving up eventually.
        attempts = 0
        while attempts < 5:
            session_id = stringutils.random_string(24)

            try:
                await self.db.simple_insert(
                    table="ui_auth_sessions",
                    values={
                        "session_id": session_id,
                        "clientdict": clientdict_json,
                        "uri": uri,
                        "method": method,
                        "description": description,
                        "serverdict": "{}",
                        "creation_time": self.hs.get_clock().time_msec(),
                    },
                    desc="create_ui_auth_session",
                )
                return UIAuthSessionData(
                    session_id, clientdict, uri, method, description
                )
            except self.db.engine.module.IntegrityError:
                attempts += 1
        raise StoreError(500, "Couldn't generate a session ID.")
Beispiel #35
0
    def register_user(
        self,
        username: str,
        password: str,
        admin: Optional[bool] = False,
        displayname: Optional[str] = None,
    ) -> str:
        """
        Register a user. Requires the Admin API be registered.

        Args:
            username: The user part of the new user.
            password: The password of the new user.
            admin: Whether the user should be created as an admin or not.
            displayname: The displayname of the new user.

        Returns:
            The MXID of the new user.
        """
        self.hs.config.registration_shared_secret = "shared"

        # Create the user
        request, channel = self.make_request("GET", "/_matrix/client/r0/admin/register")
        self.render(request)
        self.assertEqual(channel.code, 200, msg=channel.result)
        nonce = channel.json_body["nonce"]

        want_mac = hmac.new(key=b"shared", digestmod=hashlib.sha1)
        nonce_str = b"\x00".join([username.encode("utf8"), password.encode("utf8")])
        if admin:
            nonce_str += b"\x00admin"
        else:
            nonce_str += b"\x00notadmin"

        want_mac.update(nonce.encode("ascii") + b"\x00" + nonce_str)
        want_mac = want_mac.hexdigest()

        body = json.dumps(
            {
                "nonce": nonce,
                "username": username,
                "displayname": displayname,
                "password": password,
                "admin": admin,
                "mac": want_mac,
                "inhibit_login": True,
            }
        )
        request, channel = self.make_request(
            "POST", "/_matrix/client/r0/admin/register", body.encode("utf8")
        )
        self.render(request)
        self.assertEqual(channel.code, 200, channel.json_body)

        user_id = channel.json_body["user_id"]
        return user_id
Beispiel #36
0
    def _add_device_change_txn(self, txn, user_id, device_ids, hosts,
                               stream_id):
        now = self._clock.time_msec()

        txn.call_after(self._device_list_stream_cache.entity_has_changed,
                       user_id, stream_id)
        for host in hosts:
            txn.call_after(
                self._device_list_federation_stream_cache.entity_has_changed,
                host,
                stream_id,
            )

        # Delete older entries in the table, as we really only care about
        # when the latest change happened.
        txn.executemany(
            """
            DELETE FROM device_lists_stream
            WHERE user_id = ? AND device_id = ? AND stream_id < ?
            """,
            [(user_id, device_id, stream_id) for device_id in device_ids],
        )

        self._simple_insert_many_txn(
            txn,
            table="device_lists_stream",
            values=[{
                "stream_id": stream_id,
                "user_id": user_id,
                "device_id": device_id
            } for device_id in device_ids],
        )

        context = get_active_span_text_map()

        self._simple_insert_many_txn(
            txn,
            table="device_lists_outbound_pokes",
            values=[{
                "destination":
                destination,
                "stream_id":
                stream_id,
                "user_id":
                user_id,
                "device_id":
                device_id,
                "sent":
                False,
                "ts":
                now,
                "opentracing_context":
                json.dumps(context)
                if whitelisted_homeserver(destination) else "{}",
            } for destination in hosts for device_id in device_ids],
        )
Beispiel #37
0
    def attempt_wrong_password_login(self, username, password):
        """Attempts to login as the user with the given password, asserting
        that the attempt *fails*.
        """
        body = {"type": "m.login.password", "user": username, "password": password}

        channel = self.make_request(
            "POST", "/_matrix/client/r0/login", json.dumps(body).encode("utf8")
        )
        self.assertEqual(channel.code, 403, channel.result)
Beispiel #38
0
    def write_state(self, room_id, event_id, state):
        room_directory = os.path.join(self.base_directory, "rooms", room_id)
        state_directory = os.path.join(room_directory, "state")
        os.makedirs(state_directory, exist_ok=True)

        event_file = os.path.join(state_directory, event_id)

        with open(event_file, "a") as f:
            for event in state.values():
                print(json.dumps(event.get_pdu_json()), file=f)
Beispiel #39
0
def active_span_context_as_string():
    """
    Returns:
        The active span context encoded as a string.
    """
    carrier = {}  # type: Dict[str, str]
    if opentracing:
        opentracing.tracer.inject(opentracing.tracer.active_span,
                                  opentracing.Format.TEXT_MAP, carrier)
    return json.dumps(carrier)
Beispiel #40
0
 def check_ports(self, request, body):
     """
     Given an array of ports this returns an array of booleans specifying that
     the api was capable of starting a process listening on that port. This
     gives a loose indication that a port is generally available.
     """
     results = []
     for port in body["ports"]:
         results.append(port_checker(port))
     return json.dumps({"ports": results})
Beispiel #41
0
 def add_push_rule(
     self,
     user_id,
     rule_id,
     priority_class,
     conditions,
     actions,
     before=None,
     after=None,
 ):
     conditions_json = json.dumps(conditions)
     actions_json = json.dumps(actions)
     with self._push_rules_stream_id_gen.get_next() as ids:
         stream_id, event_stream_ordering = ids
         if before or after:
             yield self.runInteraction(
                 "_add_push_rule_relative_txn",
                 self._add_push_rule_relative_txn,
                 stream_id,
                 event_stream_ordering,
                 user_id,
                 rule_id,
                 priority_class,
                 conditions_json,
                 actions_json,
                 before,
                 after,
             )
         else:
             yield self.runInteraction(
                 "_add_push_rule_highest_priority_txn",
                 self._add_push_rule_highest_priority_txn,
                 stream_id,
                 event_stream_ordering,
                 user_id,
                 rule_id,
                 priority_class,
                 conditions_json,
                 actions_json,
             )
Beispiel #42
0
    def _update_remote_device_list_cache_entry_txn(self, txn, user_id, device_id,
                                                   content, stream_id):
        if content.get("deleted"):
            self._simple_delete_txn(
                txn,
                table="device_lists_remote_cache",
                keyvalues={
                    "user_id": user_id,
                    "device_id": device_id,
                },
            )

            txn.call_after(
                self.device_id_exists_cache.invalidate, (user_id, device_id,)
            )
        else:
            self._simple_upsert_txn(
                txn,
                table="device_lists_remote_cache",
                keyvalues={
                    "user_id": user_id,
                    "device_id": device_id,
                },
                values={
                    "content": json.dumps(content),
                },

                # we don't need to lock, because we assume we are the only thread
                # updating this user's devices.
                lock=False,
            )

        txn.call_after(self._get_cached_user_device.invalidate, (user_id, device_id,))
        txn.call_after(self._get_cached_devices_for_user.invalidate, (user_id,))
        txn.call_after(
            self.get_device_list_last_stream_id_for_remote.invalidate, (user_id,)
        )

        self._simple_upsert_txn(
            txn,
            table="device_lists_remote_extremeties",
            keyvalues={
                "user_id": user_id,
            },
            values={
                "stream_id": stream_id,
            },

            # again, we can assume we are the only thread updating this user's
            # extremity.
            lock=False,
        )
Beispiel #43
0
        def get_tag_content(txn, tag_ids):
            sql = (
                "SELECT tag, content" " FROM room_tags" " WHERE user_id=? AND room_id=?"
            )
            results = []
            for stream_id, user_id, room_id in tag_ids:
                txn.execute(sql, (user_id, room_id))
                tags = []
                for tag, content in txn:
                    tags.append(json.dumps(tag) + ":" + content)
                tag_json = "{" + ",".join(tags) + "}"
                results.append((stream_id, user_id, room_id, tag_json))

            return results
Beispiel #44
0
 def update_remote_attestion(self, group_id, user_id, attestation):
     """Update an attestation that a remote has renewed
     """
     return self._simple_update_one(
         table="group_attestations_remote",
         keyvalues={
             "group_id": group_id,
             "user_id": user_id,
         },
         updatevalues={
             "valid_until_ms": attestation["valid_until_ms"],
             "attestation_json": json.dumps(attestation)
         },
         desc="update_remote_attestion",
     )
Beispiel #45
0
    def add_account_data_to_room(self, user_id, room_id, account_data_type, content):
        """Add some account_data to a room for a user.
        Args:
            user_id(str): The user to add a tag for.
            room_id(str): The room to add a tag for.
            account_data_type(str): The type of account_data to add.
            content(dict): A json object to associate with the tag.
        Returns:
            A deferred that completes once the account_data has been added.
        """
        content_json = json.dumps(content)

        with self._account_data_id_gen.get_next() as next_id:
            # no need to lock here as room_account_data has a unique constraint
            # on (user_id, room_id, account_data_type) so _simple_upsert will
            # retry if there is a conflict.
            yield self._simple_upsert(
                desc="add_room_account_data",
                table="room_account_data",
                keyvalues={
                    "user_id": user_id,
                    "room_id": room_id,
                    "account_data_type": account_data_type,
                },
                values={
                    "stream_id": next_id,
                    "content": content_json,
                },
                lock=False,
            )

            # it's theoretically possible for the above to succeed and the
            # below to fail - in which case we might reuse a stream id on
            # restart, and the above update might not get propagated. That
            # doesn't sound any worse than the whole update getting lost,
            # which is what would happen if we combined the two into one
            # transaction.
            yield self._update_max_stream_id(next_id)

            self._account_data_stream_cache.entity_has_changed(user_id, next_id)
            self.get_account_data_for_user.invalidate((user_id,))
            self.get_account_data_for_room.invalidate((user_id, room_id,))
            self.get_account_data_for_room_and_type.prefill(
                (user_id, room_id, account_data_type,), content,
            )

        result = self._account_data_id_gen.get_current_token()
        defer.returnValue(result)
def _serialize_action(actions, is_highlight):
    """Custom serializer for actions. This allows us to "compress" common actions.

    We use the fact that most users have the same actions for notifs (and for
    highlights).
    We store these default actions as the empty string rather than the full JSON.
    Since the empty string isn't valid JSON there is no risk of this clashing with
    any real JSON actions
    """
    if is_highlight:
        if actions == DEFAULT_HIGHLIGHT_ACTION:
            return ""  # We use empty string as the column is non-NULL
    else:
        if actions == DEFAULT_NOTIF_ACTION:
            return ""
    return json.dumps(actions)
Beispiel #47
0
    def register_user(self, username, password, admin=False):
        """
        Register a user. Requires the Admin API be registered.

        Args:
            username (bytes/unicode): The user part of the new user.
            password (bytes/unicode): The password of the new user.
            admin (bool): Whether the user should be created as an admin
            or not.

        Returns:
            The MXID of the new user (unicode).
        """
        self.hs.config.registration_shared_secret = u"shared"

        # Create the user
        request, channel = self.make_request("GET", "/_matrix/client/r0/admin/register")
        self.render(request)
        nonce = channel.json_body["nonce"]

        want_mac = hmac.new(key=b"shared", digestmod=hashlib.sha1)
        nonce_str = b"\x00".join([username.encode('utf8'), password.encode('utf8')])
        if admin:
            nonce_str += b"\x00admin"
        else:
            nonce_str += b"\x00notadmin"

        want_mac.update(nonce.encode('ascii') + b"\x00" + nonce_str)
        want_mac = want_mac.hexdigest()

        body = json.dumps(
            {
                "nonce": nonce,
                "username": username,
                "password": password,
                "admin": admin,
                "mac": want_mac,
            }
        )
        request, channel = self.make_request(
            "POST", "/_matrix/client/r0/admin/register", body.encode('utf8')
        )
        self.render(request)
        self.assertEqual(channel.code, 200)

        user_id = channel.json_body["user_id"]
        return user_id
    def _background_update_progress_txn(self, txn, update_name, progress):
        """Update the progress of a background update

        Args:
            txn(cursor): The transaction.
            update_name(str): The name of the background update task
            progress(dict): The progress of the update.
        """

        progress_json = json.dumps(progress)

        self._simple_update_one_txn(
            txn,
            "background_updates",
            keyvalues={"update_name": update_name},
            updatevalues={"progress_json": progress_json},
        )
Beispiel #49
0
 def add_event_report(
     self, room_id, event_id, user_id, reason, content, received_ts
 ):
     next_id = self._event_reports_id_gen.get_next()
     return self._simple_insert(
         table="event_reports",
         values={
             "id": next_id,
             "received_ts": received_ts,
             "room_id": room_id,
             "event_id": event_id,
             "user_id": user_id,
             "reason": reason,
             "content": json.dumps(content),
         },
         desc="add_event_report",
     )
    def render_GET(self, request):
        # no auth here on purpose, to allow anyone to view, even across home
        # servers.

        # TODO: A little crude here, we could do this better.
        filename = request.path.decode('ascii').split('/')[-1]
        # be paranoid
        filename = re.sub("[^0-9A-z.-_]", "", filename)

        file_path = self.directory + "/" + filename

        logger.debug("Searching for %s", file_path)

        if os.path.isfile(file_path):
            # filename has the content type
            base64_contentype = filename.split(".")[1]
            content_type = base64.urlsafe_b64decode(base64_contentype)
            logger.info("Sending file %s", file_path)
            f = open(file_path, 'rb')
            request.setHeader('Content-Type', content_type)

            # cache for at least a day.
            # XXX: we might want to turn this off for data we don't want to
            # recommend caching as it's sensitive or private - or at least
            # select private. don't bother setting Expires as all our matrix
            # clients are smart enough to be happy with Cache-Control (right?)
            request.setHeader(
                b"Cache-Control", b"public,max-age=86400,s-maxage=86400"
            )

            d = FileSender().beginFileTransfer(f, request)

            # after the file has been sent, clean up and finish the request
            def cbFinished(ignored):
                f.close()
                finish_request(request)
            d.addCallback(cbFinished)
        else:
            respond_with_json_bytes(
                request,
                404,
                json.dumps(cs_error("Not found", code=Codes.NOT_FOUND)),
                send_cors=True)

        return server.NOT_DONE_YET
Beispiel #51
0
    def login(self, username, password, device_id=None):
        """
        Log in a user, and get an access token. Requires the Login API be
        registered.

        """
        body = {"type": "m.login.password", "user": username, "password": password}
        if device_id:
            body["device_id"] = device_id

        request, channel = self.make_request(
            "POST", "/_matrix/client/r0/login", json.dumps(body).encode('utf8')
        )
        self.render(request)
        self.assertEqual(channel.code, 200, channel.result)

        access_token = channel.json_body["access_token"]
        return access_token
Beispiel #52
0
        def add_messages_txn(txn, now_ms, stream_id):
            # Add the local messages directly to the local inbox.
            self._add_messages_to_local_device_inbox_txn(
                txn, stream_id, local_messages_by_user_then_device
            )

            # Add the remote messages to the federation outbox.
            # We'll send them to a remote server when we next send a
            # federation transaction to that destination.
            sql = (
                "INSERT INTO device_federation_outbox"
                " (destination, stream_id, queued_ts, messages_json)"
                " VALUES (?,?,?,?)"
            )
            rows = []
            for destination, edu in remote_messages_by_destination.items():
                edu_json = json.dumps(edu)
                rows.append((destination, stream_id, now_ms, edu_json))
            txn.executemany(sql, rows)
Beispiel #53
0
        def _add_user_to_group_txn(txn):
            self._simple_insert_txn(
                txn,
                table="group_users",
                values={
                    "group_id": group_id,
                    "user_id": user_id,
                    "is_admin": is_admin,
                    "is_public": is_public,
                },
            )

            self._simple_delete_txn(
                txn,
                table="group_invites",
                keyvalues={
                    "group_id": group_id,
                    "user_id": user_id,
                },
            )

            if local_attestation:
                self._simple_insert_txn(
                    txn,
                    table="group_attestations_renewals",
                    values={
                        "group_id": group_id,
                        "user_id": user_id,
                        "valid_until_ms": local_attestation["valid_until_ms"],
                    },
                )
            if remote_attestation:
                self._simple_insert_txn(
                    txn,
                    table="group_attestations_remote",
                    values={
                        "group_id": group_id,
                        "user_id": user_id,
                        "valid_until_ms": remote_attestation["valid_until_ms"],
                        "attestation_json": json.dumps(remote_attestation),
                    },
                )
    def start_background_update(self, update_name, progress):
        """Starts a background update running.

        Args:
            update_name: The update to set running.
            progress: The initial state of the progress of the update.

        Returns:
            A deferred that completes once the task has been added to the
            queue.
        """
        # Clear the background update queue so that we will pick up the new
        # task on the next iteration of do_background_update.
        self._background_update_queue = []
        progress_json = json.dumps(progress)

        return self._simple_insert(
            "background_updates",
            {"update_name": update_name, "progress_json": progress_json},
        )
Beispiel #55
0
    def make_request(
        self,
        method,
        path,
        content=b"",
        access_token=None,
        request=SynapseRequest,
        shorthand=True,
        federation_auth_origin=None,
    ):
        """
        Create a SynapseRequest at the path using the method and containing the
        given content.

        Args:
            method (bytes/unicode): The HTTP request method ("verb").
            path (bytes/unicode): The HTTP path, suitably URL encoded (e.g.
            escaped UTF-8 & spaces and such).
            content (bytes or dict): The body of the request. JSON-encoded, if
            a dict.
            shorthand: Whether to try and be helpful and prefix the given URL
            with the usual REST API path, if it doesn't contain it.
            federation_auth_origin (bytes|None): if set to not-None, we will add a fake
                Authorization header pretenting to be the given server name.

        Returns:
            Tuple[synapse.http.site.SynapseRequest, channel]
        """
        if isinstance(content, dict):
            content = json.dumps(content).encode('utf8')

        return make_request(
            self.reactor,
            method,
            path,
            content,
            access_token,
            request,
            shorthand,
            federation_auth_origin,
        )
    def _do_preview(self, url, user, ts):
        """Check the db, and download the URL and build a preview

        Args:
            url (str):
            user (str):
            ts (int):

        Returns:
            Deferred[str]: json-encoded og data
        """
        # check the URL cache in the DB (which will also provide us with
        # historical previews, if we have any)
        cache_result = yield self.store.get_url_cache(url, ts)
        if (
            cache_result and
            cache_result["expires_ts"] > ts and
            cache_result["response_code"] / 100 == 2
        ):
            # It may be stored as text in the database, not as bytes (such as
            # PostgreSQL). If so, encode it back before handing it on.
            og = cache_result["og"]
            if isinstance(og, six.text_type):
                og = og.encode('utf8')
            defer.returnValue(og)
            return

        media_info = yield self._download_url(url, user)

        logger.debug("got media_info of '%s'" % media_info)

        if _is_media(media_info['media_type']):
            file_id = media_info['filesystem_id']
            dims = yield self.media_repo._generate_thumbnails(
                None, file_id, file_id, media_info["media_type"],
                url_cache=True,
            )

            og = {
                "og:description": media_info['download_name'],
                "og:image": "mxc://%s/%s" % (
                    self.server_name, media_info['filesystem_id']
                ),
                "og:image:type": media_info['media_type'],
                "matrix:image:size": media_info['media_length'],
            }

            if dims:
                og["og:image:width"] = dims['width']
                og["og:image:height"] = dims['height']
            else:
                logger.warn("Couldn't get dims for %s" % url)

            # define our OG response for this media
        elif _is_html(media_info['media_type']):
            # TODO: somehow stop a big HTML tree from exploding synapse's RAM

            with open(media_info['filename'], 'rb') as file:
                body = file.read()

            encoding = None

            # Let's try and figure out if it has an encoding set in a meta tag.
            # Limit it to the first 1kb, since it ought to be in the meta tags
            # at the top.
            match = _charset_match.search(body[:1000])

            # If we find a match, it should take precedence over the
            # Content-Type header, so set it here.
            if match:
                encoding = match.group(1).decode('ascii')

            # If we don't find a match, we'll look at the HTTP Content-Type, and
            # if that doesn't exist, we'll fall back to UTF-8.
            if not encoding:
                match = _content_type_match.match(
                    media_info['media_type']
                )
                encoding = match.group(1) if match else "utf-8"

            og = decode_and_calc_og(body, media_info['uri'], encoding)

            # pre-cache the image for posterity
            # FIXME: it might be cleaner to use the same flow as the main /preview_url
            # request itself and benefit from the same caching etc.  But for now we
            # just rely on the caching on the master request to speed things up.
            if 'og:image' in og and og['og:image']:
                image_info = yield self._download_url(
                    _rebase_url(og['og:image'], media_info['uri']), user
                )

                if _is_media(image_info['media_type']):
                    # TODO: make sure we don't choke on white-on-transparent images
                    file_id = image_info['filesystem_id']
                    dims = yield self.media_repo._generate_thumbnails(
                        None, file_id, file_id, image_info["media_type"],
                        url_cache=True,
                    )
                    if dims:
                        og["og:image:width"] = dims['width']
                        og["og:image:height"] = dims['height']
                    else:
                        logger.warn("Couldn't get dims for %s" % og["og:image"])

                    og["og:image"] = "mxc://%s/%s" % (
                        self.server_name, image_info['filesystem_id']
                    )
                    og["og:image:type"] = image_info['media_type']
                    og["matrix:image:size"] = image_info['media_length']
                else:
                    del og["og:image"]
        else:
            logger.warn("Failed to find any OG data in %s", url)
            og = {}

        logger.debug("Calculated OG for %s as %s" % (url, og))

        jsonog = json.dumps(og).encode('utf8')

        # store OG in history-aware DB cache
        yield self.store.store_url_cache(
            url,
            media_info["response_code"],
            media_info["etag"],
            media_info["expires"] + media_info["created_ts"],
            jsonog,
            media_info["filesystem_id"],
            media_info["created_ts"],
        )

        defer.returnValue(jsonog)
Beispiel #57
0
    def insert_linearized_receipt_txn(self, txn, room_id, receipt_type,
                                      user_id, event_id, data, stream_id):
        res = self._simple_select_one_txn(
            txn,
            table="events",
            retcols=["topological_ordering", "stream_ordering"],
            keyvalues={"event_id": event_id},
            allow_none=True
        )

        stream_ordering = int(res["stream_ordering"]) if res else None

        # We don't want to clobber receipts for more recent events, so we
        # have to compare orderings of existing receipts
        if stream_ordering is not None:
            sql = (
                "SELECT stream_ordering, event_id FROM events"
                " INNER JOIN receipts_linearized as r USING (event_id, room_id)"
                " WHERE r.room_id = ? AND r.receipt_type = ? AND r.user_id = ?"
            )
            txn.execute(sql, (room_id, receipt_type, user_id))

            for so, eid in txn:
                if int(so) >= stream_ordering:
                    logger.debug(
                        "Ignoring new receipt for %s in favour of existing "
                        "one for later event %s",
                        event_id, eid,
                    )
                    return False

        txn.call_after(
            self.get_receipts_for_room.invalidate, (room_id, receipt_type)
        )
        txn.call_after(
            self._invalidate_get_users_with_receipts_in_room,
            room_id, receipt_type, user_id,
        )
        txn.call_after(
            self.get_receipts_for_user.invalidate, (user_id, receipt_type)
        )
        # FIXME: This shouldn't invalidate the whole cache
        txn.call_after(self._get_linearized_receipts_for_room.invalidate_many, (room_id,))

        txn.call_after(
            self._receipts_stream_cache.entity_has_changed,
            room_id, stream_id
        )

        txn.call_after(
            self.get_last_receipt_event_id_for_user.invalidate,
            (user_id, room_id, receipt_type)
        )

        self._simple_delete_txn(
            txn,
            table="receipts_linearized",
            keyvalues={
                "room_id": room_id,
                "receipt_type": receipt_type,
                "user_id": user_id,
            }
        )

        self._simple_insert_txn(
            txn,
            table="receipts_linearized",
            values={
                "stream_id": stream_id,
                "room_id": room_id,
                "receipt_type": receipt_type,
                "user_id": user_id,
                "event_id": event_id,
                "data": json.dumps(data),
            }
        )

        if receipt_type == "m.read" and stream_ordering is not None:
            self._remove_old_push_actions_before_txn(
                txn,
                room_id=room_id,
                user_id=user_id,
                stream_ordering=stream_ordering,
            )

        return True
    def _do_preview(self, url, user, ts):
        """Check the db, and download the URL and build a preview

        Args:
            url (str):
            user (str):
            ts (int):

        Returns:
            Deferred[str]: json-encoded og data
        """
        # check the URL cache in the DB (which will also provide us with
        # historical previews, if we have any)
        cache_result = yield self.store.get_url_cache(url, ts)
        if (
            cache_result and
            cache_result["expires_ts"] > ts and
            cache_result["response_code"] / 100 == 2
        ):
            # It may be stored as text in the database, not as bytes (such as
            # PostgreSQL). If so, encode it back before handing it on.
            og = cache_result["og"]
            if isinstance(og, six.text_type):
                og = og.encode('utf8')
            defer.returnValue(og)
            return

        media_info = yield self._download_url(url, user)

        logger.debug("got media_info of '%s'" % media_info)

        if _is_media(media_info['media_type']):
            file_id = media_info['filesystem_id']
            dims = yield self.media_repo._generate_thumbnails(
                None, file_id, file_id, media_info["media_type"],
                url_cache=True,
            )

            og = {
                "og:description": media_info['download_name'],
                "og:image": "mxc://%s/%s" % (
                    self.server_name, media_info['filesystem_id']
                ),
                "og:image:type": media_info['media_type'],
                "matrix:image:size": media_info['media_length'],
            }

            if dims:
                og["og:image:width"] = dims['width']
                og["og:image:height"] = dims['height']
            else:
                logger.warn("Couldn't get dims for %s" % url)

            # define our OG response for this media
        elif _is_html(media_info['media_type']):
            # TODO: somehow stop a big HTML tree from exploding synapse's RAM

            with open(media_info['filename'], 'rb') as file:
                body = file.read()

            # clobber the encoding from the content-type, or default to utf-8
            # XXX: this overrides any <meta/> or XML charset headers in the body
            # which may pose problems, but so far seems to work okay.
            match = re.match(
                r'.*; *charset="?(.*?)"?(;|$)',
                media_info['media_type'],
                re.I
            )
            encoding = match.group(1) if match else "utf-8"

            og = decode_and_calc_og(body, media_info['uri'], encoding)

            # pre-cache the image for posterity
            # FIXME: it might be cleaner to use the same flow as the main /preview_url
            # request itself and benefit from the same caching etc.  But for now we
            # just rely on the caching on the master request to speed things up.
            if 'og:image' in og and og['og:image']:
                image_info = yield self._download_url(
                    _rebase_url(og['og:image'], media_info['uri']), user
                )

                if _is_media(image_info['media_type']):
                    # TODO: make sure we don't choke on white-on-transparent images
                    file_id = image_info['filesystem_id']
                    dims = yield self.media_repo._generate_thumbnails(
                        None, file_id, file_id, image_info["media_type"],
                        url_cache=True,
                    )
                    if dims:
                        og["og:image:width"] = dims['width']
                        og["og:image:height"] = dims['height']
                    else:
                        logger.warn("Couldn't get dims for %s" % og["og:image"])

                    og["og:image"] = "mxc://%s/%s" % (
                        self.server_name, image_info['filesystem_id']
                    )
                    og["og:image:type"] = image_info['media_type']
                    og["matrix:image:size"] = image_info['media_length']
                else:
                    del og["og:image"]
        else:
            logger.warn("Failed to find any OG data in %s", url)
            og = {}

        logger.debug("Calculated OG for %s as %s" % (url, og))

        jsonog = json.dumps(og).encode('utf8')

        # store OG in history-aware DB cache
        yield self.store.store_url_cache(
            url,
            media_info["response_code"],
            media_info["etag"],
            media_info["expires"] + media_info["created_ts"],
            jsonog,
            media_info["filesystem_id"],
            media_info["created_ts"],
        )

        defer.returnValue(jsonog)
Beispiel #59
0
 def __repr__(self):
     return "<FilterCollection %s>" % (json.dumps(self._filter_json),)