def admin_new() -> _Response: reply_id = None content = "" thread: List[Any] = [] print(request.args) if request.args.get("reply"): data = DB.activities.find_one( {"activity.object.id": request.args.get("reply")}) if data: reply = ap.parse_activity(data["activity"]) else: data = dict( meta={}, activity=dict(object=ap.get_backend().fetch_iri( request.args.get("reply"))), ) reply = ap.parse_activity(data["activity"]["object"]) reply_id = reply.id if reply.ACTIVITY_TYPE == ap.ActivityType.CREATE: reply_id = reply.get_object().id actor = reply.get_actor() domain = urlparse(actor.id).netloc # FIXME(tsileo): if reply of reply, fetch all participants content = f"@{actor.preferredUsername}@{domain} " thread = _build_thread(data) return render_template( "new.html", reply=reply_id, content=content, thread=thread, visibility=ap.Visibility, emojis=config.EMOJIS.split(" "), )
def migrate(self) -> None: actor_cache: Dict[str, Dict[str, Any]] = {} for data in DB.activities.find({"type": ap.ActivityType.FOLLOW.value}): try: if data["meta"]["actor_id"] == ID: # It's a "following" actor = actor_cache.get(data["meta"]["object_id"]) if not actor: actor = ap.parse_activity(ap.get_backend().fetch_iri( data["meta"]["object_id"], no_cache=True)).to_dict(embed=True) if not actor: raise ValueError(f"missing actor {data!r}") actor_cache[actor["id"]] = actor DB.activities.update_one({"_id": data["_id"]}, {"$set": { "meta.object": actor }}) else: # It's a "followers" actor = actor_cache.get(data["meta"]["actor_id"]) if not actor: actor = ap.parse_activity(ap.get_backend().fetch_iri( data["meta"]["actor_id"], no_cache=True)).to_dict(embed=True) if not actor: raise ValueError(f"missing actor {data!r}") actor_cache[actor["id"]] = actor DB.activities.update_one({"_id": data["_id"]}, {"$set": { "meta.actor": actor }}) except Exception: logger.exception(f"failed to process actor {data!r}")
def post_to_remote_inbox(self, as_actor: ap.Person, payload_encoded: str, recp: str) -> None: payload = json.loads(payload_encoded) print(f"post_to_remote_inbox {payload} {recp}") act = ap.parse_activity(payload) as_actor = ap.parse_activity(self.fetch_iri(recp.replace("/inbox", ""))) act.process_from_inbox(as_actor)
def admin_new() -> _Response: reply_id = None content = "" thread: List[Any] = [] print(request.args) default_visibility = None # ap.Visibility.PUBLIC if request.args.get("reply"): data = DB.activities.find_one( {"activity.object.id": request.args.get("reply")}) if data: reply = ap.parse_activity(data["activity"]) else: obj = ap.get_backend().fetch_iri(request.args.get("reply")) data = dict(meta=_meta(ap.parse_activity(obj)), activity=dict(object=obj)) data["_id"] = obj["id"] data["remote_id"] = obj["id"] reply = ap.parse_activity(data["activity"]["object"]) # Fetch the post visibility, in case it's follower only default_visibility = ap.get_visibility(reply) # If it's public, we default the reply to unlisted if default_visibility == ap.Visibility.PUBLIC: default_visibility = ap.Visibility.UNLISTED reply_id = reply.id if reply.ACTIVITY_TYPE == ap.ActivityType.CREATE: reply_id = reply.get_object().id actor = reply.get_actor() domain = urlparse(actor.id).netloc # FIXME(tsileo): if reply of reply, fetch all participants content = f"@{actor.preferredUsername}@{domain} " if reply.has_type(ap.ActivityType.CREATE): reply = reply.get_object() for mention in reply.get_mentions(): if mention.href in [actor.id, ID]: continue m = ap.fetch_remote_activity(mention.href) if m.has_type(ap.ACTOR_TYPES): d = urlparse(m.id).netloc content += f"@{m.preferredUsername}@{d} " thread = _build_thread(data) return htmlify( render_template( "new.html", reply=reply_id, content=content, thread=thread, default_visibility=default_visibility, visibility=ap.Visibility, emojis=config.EMOJIS.split(" "), custom_emojis=sorted( [ap.Emoji(**dat) for name, dat in EMOJIS_BY_NAME.items()], key=lambda e: e.name, ), ))
def outbox_activity_replies(item_id): if not is_api_request(): abort(404) _log_sig() data = DB.activities.find_one({ "box": Box.OUTBOX.value, "remote_id": activity_url(item_id), "meta.deleted": False, "meta.public": True, }) if not data: abort(404) obj = ap.parse_activity(data["activity"]) if obj.ACTIVITY_TYPE != ActivityType.CREATE: abort(404) q = { "meta.deleted": False, "meta.public": True, "type": ActivityType.CREATE.value, "activity.object.inReplyTo": obj.get_object().id, } return jsonify(**activitypub.build_ordered_collection( DB.activities, q=q, cursor=request.args.get("cursor"), map_func=lambda doc: doc["activity"]["object"], col_name=f"outbox/{item_id}/replies", first_page=request.args.get("page") == "first", ))
def migrate(self) -> None: for data in find_activities({ **by_type(ap.ActivityType.CREATE), **not_deleted() }): try: activity = ap.parse_activity(data["activity"]) mentions = [] obj = activity.get_object() for m in obj.get_mentions(): mentions.append(m.href) hashtags = [] for h in obj.get_hashtags(): hashtags.append(h.name[1:]) # Strip the # update_one_activity( by_remote_id(data["remote_id"]), upsert({ MetaKey.MENTIONS: mentions, MetaKey.HASHTAGS: hashtags }), ) except Exception: logger.exception(f"failed to process activity {data!r}")
def outbox_activity_replies(item_id): if not is_api_request(): abort(404) _log_sig() data = DB.activities.find_one({ **in_outbox(), **by_remote_id(activity_url(item_id)), **not_deleted(), **is_public(), }) if not data: abort(404) obj = ap.parse_activity(data["activity"]) if obj.ACTIVITY_TYPE != ActivityType.CREATE: abort(404) q = { **is_public(), **not_deleted(), **by_type(ActivityType.CREATE), "activity.object.inReplyTo": obj.get_object().id, } return activitypubify(**activitypub.build_ordered_collection( DB.activities, q=q, cursor=request.args.get("cursor"), map_func=lambda doc: doc["activity"]["object"], col_name=f"outbox/{item_id}/replies", first_page=request.args.get("page") == "first", ))
def undo_new_following(self, as_actor: ap.Person, object: ap.Follow) -> None: current_app.logger.info("undo following") actor_me = object.get_actor() current_app.logger.debug(f"{actor_me!r} unfollowing-undoed {object!r}") # An unfollowing is in fact "Undo an Activity" # ActivityPub is trash. follow_activity = Activity.query.filter(Activity.url == object.id).first() if not follow_activity: current_app.logger.error(f"cannot find activity {object}") return activity = ap.parse_activity(payload=follow_activity.payload) ap_actor_me = follow_activity.actor ap_actor_target = activity.get_object_id() db_actor = Actor.query.filter(Actor.id == ap_actor_me).first() db_follow = Actor.query.filter(Actor.url == ap_actor_target).first() if not db_actor: current_app.logger.error(f"cannot find actor {ap_actor_me!r}") return if not db_follow: current_app.logger.error(f"cannot find follow {ap_actor_target!r}") return # FIXME: may be the reverse, db_follow unfollow db_actor db_actor.unfollow(db_follow) db.session.commit() current_app.logger.info("undo following saved")
def undo_new_follower(self, as_actor: ap.Person, object: ap.Follow) -> None: current_app.logger.info("undo follower") current_app.logger.debug(f"{as_actor!r} unfollow-undoed {object!r}") # An unfollow is in fact "Undo an Activity" # ActivityPub is trash. undo_activity = object.id # fetch the activity activity = Activity.query.filter(Activity.url == undo_activity).first() if not activity: current_app.logger.error(f"cannot find activity" f" to undo: {undo_activity}") return # Parse the activity ap_activity = ap.parse_activity(activity.payload) if not ap_activity: current_app.logger.error(f"cannot parse undo follower activity") return actor = ap_activity.get_actor() follow = ap_activity.get_object() db_actor = Actor.query.filter(Actor.url == actor.id).first() db_follow = Actor.query.filter(Actor.url == follow.id).first() if not db_actor: current_app.logger.error(f"cannot find actor {actor!r}") return if not db_follow: current_app.logger.error(f"cannot find follow {follow!r}") return db_actor.unfollow(db_follow) db.session.commit() current_app.logger.info("undo follower saved")
def outbox_delete(self, as_actor: ap.Person, delete: ap.Delete) -> None: DB.activities.update_one( {"activity.object.id": delete.get_object().id}, {"$set": { "meta.deleted": True }}, ) obj = delete.get_object() if delete.get_object().ACTIVITY_TYPE != ap.ActivityType.NOTE: obj = ap.parse_activity( DB.activities.find_one({ "activity.object.id": delete.get_object().id, "type": ap.ActivityType.CREATE.value, })["activity"]).get_object() DB.activities.update( {"meta.object.id": obj.id}, {"$set": { "meta.undo": True, "meta.exta": "object deleted" }}, ) self._handle_replies_delete(as_actor, obj.inReplyTo)
def outbox_item(item_id): be = activitypub.get_backend() if not be: abort(500) # data = request.get_json() # if not data: # abort(500) current_app.logger.debug(f"req_headers={request.headers}") # current_app.logger.debug(f"raw_data={data}") current_app.logger.debug(f"activity url {be.activity_url(item_id)}") item = Activity.query.filter(Activity.box == Box.OUTBOX.value, Activity.url == be.activity_url(item_id)).first() if not item: abort(404) if item.meta_deleted: obj = activitypub.parse_activity(item.payload) resp = jsonify(**obj.get_tombstone().to_dict()) resp.status_code = 410 return resp current_app.logger.debug(f"item payload=={item.payload}") return jsonify(**activity_from_doc(item.payload))
def user_inbox(name): be = activitypub.get_backend() if not be: abort(500) data = request.get_json(force=True) if not data: abort(500) current_app.logger.debug(f"req_headers={request.headers}") current_app.logger.debug(f"raw_data={data}") try: if not verify_request(request.method, request.path, request.headers, request.data): raise Exception("failed to verify request") except Exception: current_app.logger.exception("failed to verify request") try: data = be.fetch_iri(data["id"]) except Exception: current_app.logger.exception(f"failed to fetch remote id " f"at {data['id']}") resp = {"error": "failed to verify request " "(using HTTP signatures or fetching the IRI)"} response = jsonify(resp) response.mimetype = "application/json; charset=utf-8" response.status = 422 return response activity = activitypub.parse_activity(data) current_app.logger.debug(f"inbox_activity={activity}/{data}") post_to_inbox(activity) return Response(status=201)
def outbox_item_activity(item_id): """ Outbox activity --- tags: - ActivityPub responses: 200: description: Returns something """ be = activitypub.get_backend() if not be: abort(500) item = Activity.query.filter( Activity.box == Box.OUTBOX.value, Activity.url == be.activity_url(item_id)).first() if not item: abort(404) obj = activity_from_doc(item.payload) if item.meta_deleted: obj = activitypub.parse_activity(item.payload) # FIXME not sure about that /activity tomb = obj.get_tombstone().to_dict() tomb["id"] = tomb["id"] + "/activity" resp = jsonify(tomb) resp.status_code = 410 return resp if obj["type"] != activitypub.ActivityType.CREATE.value: abort(404) return jsonify(**obj["object"])
def api_undo() -> _Response: oid = _user_api_arg("id") doc = DB.activities.find_one( { "box": Box.OUTBOX.value, "$or": [{"remote_id": activity_url(oid)}, {"remote_id": oid}], } ) if not doc: raise ActivityNotFoundError(f"cannot found {oid}") obj = ap.parse_activity(doc.get("activity")) undo = ap.Undo( actor=MY_PERSON.id, context=new_context(obj), object=obj.to_dict(embed=True, embed_object_id_only=True), published=now(), to=obj.to, cc=obj.cc, ) # FIXME(tsileo): detect already undo-ed and make this API call idempotent undo_id = post_to_outbox(undo) return _user_api_response(activity=undo_id)
def outbox(): if request.method == "GET": if not is_api_request(): abort(404) _log_sig() # TODO(tsileo): returns the whole outbox if authenticated and look at OCAP support q = { "box": Box.OUTBOX.value, "meta.deleted": False, "meta.undo": False, "meta.public": True, "type": { "$in": [ActivityType.CREATE.value, ActivityType.ANNOUNCE.value] }, } return jsonify(**activitypub.build_ordered_collection( DB.activities, q=q, cursor=request.args.get("cursor"), map_func=lambda doc: activity_from_doc(doc, embed=True), col_name="outbox", )) # Handle POST request try: _api_required() except BadSignature: abort(401) data = request.get_json(force=True) activity = ap.parse_activity(data) activity_id = post_to_outbox(activity) return Response(status=201, headers={"Location": activity_id})
def _user_api_get_note(from_outbox: bool = False) -> ap.BaseActivity: oid = _user_api_arg("id") app.logger.info(f"fetching {oid}") note = ap.parse_activity(ap.get_backend().fetch_iri(oid)) if from_outbox and not note.id.startswith(ID): raise NotFromOutboxError( f"cannot load {note.id}, id must be owned by the server") return note
def api_accept_follow() -> _Response: oid = _user_api_arg("id") doc = DB.activities.find_one({"box": Box.INBOX.value, "remote_id": oid}) print(doc) if not doc: raise ActivityNotFoundError(f"cannot found {oid}") obj = ap.parse_activity(doc.get("activity")) if not obj.has_type(ap.ActivityType.FOLLOW): raise ValueError(f"{obj} is not a Follow activity") accept_id = accept_follow(obj) return _user_api_response(activity=accept_id)
def admin_direct_messages() -> _Response: all_dms = DB.activities.find({ **not_poll_answer(), **by_type(ap.ActivityType.CREATE), **by_object_visibility(ap.Visibility.DIRECT), }).sort("meta.published", -1) # Group by threads _threads = defaultdict(list) # type: ignore for dm in all_dms: # Skip poll answers if dm["activity"].get("object", {}).get("name"): continue _threads[dm["meta"].get("thread_root_parent", dm["meta"]["object_id"])].append(dm) # Now build the data needed for the UI threads = [] for thread_root, thread in _threads.items(): # We need the list of participants participants = set() for raw_activity in thread: activity = ap.parse_activity(raw_activity["activity"]) actor = activity.get_actor() domain = urlparse(actor.id).netloc if actor.id != ID: participants.add(f"@{actor.preferredUsername}@{domain}") if activity.has_type(ap.ActivityType.CREATE): activity = activity.get_object() for mention in activity.get_mentions(): if mention.href in [actor.id, ID]: continue m = ap.fetch_remote_activity(mention.href) if m.has_type(ap.ACTOR_TYPES) and m.id != ID: d = urlparse(m.id).netloc participants.add(f"@{m.preferredUsername}@{d}") if not participants: continue # Build the UI data for this conversation oid = thread[-1]["meta"]["object_id"] threads.append({ "participants": list(participants), "oid": oid, "last_reply": thread[0], "len": len(thread), }) return htmlify(render_template("direct_messages.html", threads=threads))
def lookup(url: str) -> ap.BaseActivity: """Try to find an AP object related to the given URL.""" try: if url.startswith("@"): actor_url = get_actor_url(url) if actor_url: return ap.fetch_remote_activity(actor_url) except NotAnActivityError: pass except requests.HTTPError: # Some websites may returns 404, 503 or others when they don't support webfinger, and we're just taking a guess # when performing the lookup. pass except requests.RequestException as err: raise RemoteServerUnavailableError(f"failed to fetch {url}: {err!r}") backend = ap.get_backend() try: resp = requests.head( url, timeout=10, allow_redirects=True, headers={"User-Agent": backend.user_agent()}, ) except requests.RequestException as err: raise RemoteServerUnavailableError(f"failed to GET {url}: {err!r}") try: resp.raise_for_status() except Exception: return ap.fetch_remote_activity(url) # If the page is HTML, maybe it contains an alternate link pointing to an AP object for alternate in mf2py.parse(resp.text).get("alternates", []): if alternate.get("type") == "application/activity+json": return ap.fetch_remote_activity(alternate["url"]) try: # Maybe the page was JSON-LD? data = resp.json() return ap.parse_activity(data) except Exception: pass # Try content negotiation (retry with the AP Accept header) return ap.fetch_remote_activity(url)
def api_vote() -> _Response: oid = _user_api_arg("id") app.logger.info(f"fetching {oid}") note = ap.parse_activity(ap.get_backend().fetch_iri(oid)) choice = _user_api_arg("choice") raw_note = dict( attributedTo=MY_PERSON.id, cc=[], to=note.get_actor().id, name=choice, tag=[], inReplyTo=note.id, ) raw_note["@context"] = config.DEFAULT_CTX note = ap.Note(**raw_note) create = note.build_create() create_id = post_to_outbox(create) return _user_api_response(activity=create_id)
def _delete_process_outbox(delete: ap.Delete, new_meta: _NewMeta) -> None: _logger.info(f"process_outbox activity={delete!r}") obj_id = delete.get_object_id() # Flag everything referencing the deleted object as deleted (except the Delete activity itself) update_many_activities( { **by_object_id(obj_id), "remote_id": { "$ne": delete.id } }, upsert({ MetaKey.DELETED: True, MetaKey.UNDO: True }), ) # If the deleted activity was in DB, decrease some threads-related counter data = find_one_activity({ **by_object_id(obj_id), **by_type(ap.ActivityType.CREATE) }) _logger.info(f"found local copy of deleted activity: {data}") if data: obj = ap.parse_activity(data["activity"]).get_object() _logger.info(f"obj={obj!r}") in_reply_to = obj.get_in_reply_to() if in_reply_to: update_one_activity( { **by_type(ap.ActivityType.CREATE), **by_object_id(in_reply_to) }, { "$inc": { "meta.count_reply": -1, "meta.count_direct_reply": -1 } }, )
def outbox(): if request.method == "GET": if not is_api_request(): abort(404) _log_sig() # TODO(tsileo): returns the whole outbox if authenticated and look at OCAP support q = { **in_outbox(), "$or": [ { **by_type(ActivityType.CREATE), **not_deleted(), **by_visibility(ap.Visibility.PUBLIC), }, { **by_type(ActivityType.ANNOUNCE), **not_undo() }, ], } return activitypubify(**activitypub.build_ordered_collection( DB.activities, q=q, cursor=request.args.get("cursor"), map_func=lambda doc: activity_from_doc(doc, embed=True), col_name="outbox", )) # Handle POST request aka C2S API try: _api_required() except BadSignature: abort(401) data = request.get_json(force=True) activity = ap.parse_activity(data) activity_id = post_to_outbox(activity) return Response(status=201, headers={"Location": activity_id})
def outbox_activity_shares(item_id): if not is_api_request(): abort(404) data = DB.activities.find_one({ "box": Box.OUTBOX.value, "remote_id": activity_url(item_id), "meta.deleted": False, }) if not data: abort(404) _log_sig() obj = ap.parse_activity(data["activity"]) if obj.ACTIVITY_TYPE != ActivityType.CREATE: abort(404) q = { "meta.undo": False, "type": ActivityType.ANNOUNCE.value, "$or": [ { "activity.object.id": obj.get_object().id }, { "activity.object": obj.get_object().id }, ], } return activitypubify(**activitypub.build_ordered_collection( DB.activities, q=q, cursor=request.args.get("cursor"), map_func=lambda doc: remove_context(doc["activity"]), col_name=f"outbox/{item_id}/shares", first_page=request.args.get("page") == "first", ))
def inbox(): # GET /inbox if request.method == "GET": if not is_api_request(): abort(404) try: _api_required() except BadSignature: abort(404) return activitypubify(**activitypub.build_ordered_collection( DB.activities, q={ "meta.deleted": False, "box": Box.INBOX.value }, cursor=request.args.get("cursor"), map_func=lambda doc: remove_context(doc["activity"]), col_name="inbox", )) # POST/ inbox try: data = request.get_json(force=True) if not isinstance(data, dict): raise ValueError("not a dict") except Exception: return Response( status=422, headers={"Content-Type": "application/json"}, response=json.dumps({ "error": "failed to decode request body as JSON", "request_id": g.request_id, }), ) # Check the blacklist now to see if we can return super early if is_blacklisted(data): logger.info(f"dropping activity from blacklisted host: {data['id']}") return Response(status=201) logger.info(f"request_id={g.request_id} req_headers={request.headers!r}") logger.info(f"request_id={g.request_id} raw_data={data}") try: req_verified, actor_id = verify_request(request.method, request.path, request.headers, request.data) if not req_verified: raise Exception("failed to verify request") logger.info(f"request_id={g.request_id} signed by {actor_id}") except Exception: logger.exception( f"failed to verify request {g.request_id}, trying to verify the payload by fetching the remote" ) try: remote_data = get_backend().fetch_iri(data["id"]) except ActivityGoneError: # XXX Mastodon sends Delete activities that are not dereferencable, it's the actor url with #delete # appended, so an `ActivityGoneError` kind of ensure it's "legit" if data["type"] == ActivityType.DELETE.value and data[ "id"].startswith(data["object"]): # If we're here, this means the key is not saved, so we cannot verify the object logger.info( f"received a Delete for an unknown actor {data!r}, drop it" ) return Response(status=201) except Exception: logger.exception(f"failed to fetch remote for payload {data!r}") if "type" in data: # Friendica does not returns a 410, but a 302 that redirect to an HTML page if ap._has_type(data["type"], ActivityType.DELETE): logger.info( f"received a Delete for an unknown actor {data!r}, drop it" ) return Response(status=201) if "id" in data: if DB.trash.find_one({"activity.id": data["id"]}): # It's already stored in trash, returns early return Response( status=422, headers={"Content-Type": "application/json"}, response=json.dumps({ "error": "failed to verify request (using HTTP signatures or fetching the IRI)", "request_id": g.request_id, }), ) # Now we can store this activity in the trash for later analysis # Track/store the payload for analysis ip, geoip = _get_ip() DB.trash.insert({ "activity": data, "meta": { "ts": datetime.now().timestamp(), "ip_address": ip, "geoip": geoip, "tb": traceback.format_exc(), "headers": dict(request.headers), "request_id": g.request_id, }, }) return Response( status=422, headers={"Content-Type": "application/json"}, response=json.dumps({ "error": "failed to verify request (using HTTP signatures or fetching the IRI)", "request_id": g.request_id, }), ) # We fetched the remote data successfully data = remote_data try: activity = ap.parse_activity(data) except ValueError: logger.exception( "failed to parse activity for req {g.request_id}: {data!r}") # Track/store the payload for analysis ip, geoip = _get_ip() DB.trash.insert({ "activity": data, "meta": { "ts": datetime.now().timestamp(), "ip_address": ip, "geoip": geoip, "tb": traceback.format_exc(), "headers": dict(request.headers), "request_id": g.request_id, }, }) return Response(status=201) logger.debug(f"inbox activity={g.request_id}/{activity}/{data}") post_to_inbox(activity) return Response(status=201)
def unfollow(username_or_id): """ Unfollow an account. --- tags: - Accounts parameters: - name: id in: path type: integer required: true description: User ID to follow responses: 200: description: Returns Relationship schema: $ref: '#/definitions/Relationship' """ current_user = current_token.user if not current_user: abort(400) user = User.query.filter(User.name == username_or_id, User.local.is_(True)).first() if not user: try: user = User.query.filter(User.flake_id == username_or_id).first() except sqlalchemy.exc.DataError: abort(404) if not user: abort(404) actor_me = current_user.actor[0] actor_them = user.actor[0] if user.local: actor_me.unfollow(actor_them) return jsonify([to_json_relationship(current_user, user)]) else: # Get the relation of the follow follow_relation = Follower.query.filter( Follower.actor_id == actor_me.id, Follower.target_id == actor_them.id ).first() if not follow_relation: return jsonify({"error": "follow relation not found"}), 404 # Fetch the related Activity of the Follow relation accept_activity = Activity.query.filter(Activity.url == follow_relation.activity_url).first() if not accept_activity: current_app.logger.error(f"cannot find accept activity {follow_relation.activity_url}") return jsonify({"error": "cannot found the accept activity"}), 500 # Then the Activity ID of the ACcept will be the object id activity = ap.parse_activity(payload=accept_activity.payload) # get the final activity (the Follow one) follow_activity = Activity.query.filter(Activity.url == activity.get_object_id()).first() if not follow_activity: current_app.logger.error(f"cannot find follow activity {activity.get_object_id()}") return jsonify({"error": "cannot find follow activity"}), 500 ap_follow_activity = ap.parse_activity(payload=follow_activity.payload) # initiate an Undo of the Follow request unfollow = ap_follow_activity.build_undo() post_to_outbox(unfollow) return jsonify(""), 202
def follow(): user = request.args.get("user") actor_me = current_user.actor[0] if user.startswith("https://"): actor = Actor.query.filter(Actor.url == user).first() if actor: local_user = actor.user else: local_user = None else: local_user = User.query.filter(User.name == user).first() if local_user: # Process local follow actor_me.follow(None, local_user.actor[0]) flash(gettext("Follow successful"), "success") else: # Might be a remote follow # 1. Webfinger the user try: remote_actor_url = get_actor_url(user, debug=current_app.debug) except InvalidURLError: current_app.logger.exception(f"Invalid webfinger URL: {user}") remote_actor_url = None # except requests.exceptions.HTTPError: # current_app.logger.exception(f"Invali webfinger URL: {user}") # remote_actor_url = None if not remote_actor_url: flash(gettext("User not found"), "error") return redirect(url_for("bp_users.profile", name=current_user.name)) # 2. Check if we have a local user actor_target = Actor.query.filter( Actor.url == remote_actor_url).first() if not actor_target: # 2.5 Fetch and save remote actor backend = ap.get_backend() iri = backend.fetch_iri(remote_actor_url) if not iri: flash(gettext("User not found"), "error") return redirect(url_for("bp_main.home")) act = ap.parse_activity(iri) actor_target, user_target = create_remote_actor(act) db.session.add(user_target) db.session.add(actor_target) # 2.7 Check if we already have a relation rel = Follower.query.filter( Follower.actor_id == actor_me.id, Follower.target_id == actor_target.id).first() if not rel: # 3. Initiate a Follow request from actor_me to actor_target follow = ap.Follow(actor=actor_me.url, object=actor_target.url) post_to_outbox(follow) flash(gettext("Follow request have been transmitted"), "success") else: flash(gettext("You already follow this user", "info")) return redirect(url_for("bp_users.profile", name=current_user.name))
def search(): """ Search. --- tags: - Global parameters: - name: q in: query type: string required: true description: search string responses: 200: description: fixme. """ # Get logged in user from bearer token, or None if not logged in if current_token: current_user = current_token.user else: current_user = None s = request.args.get("q", None) if not s: return jsonify({"error": "No search string provided"}), 400 # This is the old search endpoint and needs to be improved # Especially tracks and accounts needs to be returned in the right format, with the data helpers # Users should be searched from known Actors or fetched # URI should be searched from known activities or fetched # FTS, well, FTS needs to be implemented results = {"accounts": [], "sounds": [], "mode": None, "from": None} if current_user: results["from"] = current_user.name # Search for sounds # TODO: Implement FTS to get sounds search sounds = [] # Search for accounts accounts = [] is_user_at_account = RE_ACCOUNT.match(s) if s.startswith("https://"): # Try to match the URI from Activities in database results["mode"] = "uri" users = Actor.query.filter(Actor.meta_deleted.is_(False), Actor.url == s).all() elif is_user_at_account: # It matches [email protected], try to match it locally results["mode"] = "acct" user = is_user_at_account.group("user") instance = is_user_at_account.group("instance") users = Actor.query.filter(Actor.meta_deleted.is_(False), Actor.preferred_username == user, Actor.domain == instance).all() else: # It's a FTS search results["mode"] = "username" # Match actor username in database if current_user: users = (db.session.query(Actor, Follower).outerjoin( Follower, and_(Actor.id == Follower.target_id, Follower.actor_id == current_user.actor[0].id)).filter( or_(Actor.preferred_username.contains(s), Actor.name.contains(s))).filter( not_(Actor.id == current_user.actor[0].id)).all()) else: users = (db.session.query(Actor).filter( or_(Actor.preferred_username.contains(s), Actor.name.contains(s))).all()) # Handle the found users if len(users) > 0: for actor in users: relationship = False if current_user: relationship = to_json_relationship(current_user, actor.user) accounts.append(to_json_account(actor.user, relationship)) if len(accounts) <= 0: # Do a webfinger # TODO FIXME: We should do this only if https:// or user@account submitted # And rework it slightly differently since we needs to backend.fetch_iri() for https:// who # can match a Sound and not only an Actor current_app.logger.debug(f"webfinger for {s}") try: remote_actor_url = get_actor_url(s, debug=current_app.debug) # We need to get the remote Actor backend = ap.get_backend() iri = backend.fetch_iri(remote_actor_url) if iri: # We have fetched an unknown Actor # Save it in database and return it properly current_app.logger.debug( f"got remote actor URL {remote_actor_url}") act = ap.parse_activity(iri) fetched_actor, fetched_user = create_remote_actor(act) db.session.add(fetched_user) db.session.add(fetched_actor) db.session.commit() relationship = False if current_user: relationship = to_json_relationship( current_user, fetched_user) accounts.append(to_json_account(fetched_user, relationship)) results["mode"] = "webfinger" except (InvalidURLError, ValueError): current_app.logger.exception(f"Invalid AP URL: {s}") # Then test fetching as a "normal" Activity ? # Finally fill the results dict results["accounts"] = accounts # FIXME: handle exceptions if results["mode"] == "uri" and len(sounds) <= 0: backend = ap.get_backend() iri = backend.fetch_iri(s) if iri: # FIXME: Is INBOX the right choice here ? backend.save(Box.INBOX, iri) # Fetch again, but get it from database activity = Activity.query.filter(Activity.url == iri).first() if not activity: current_app.logger.exception("WTF Activity is not saved") else: from tasks import create_sound_for_remote_track, upload_workflow sound_id = create_sound_for_remote_track(activity) sound = Sound.query.filter(Sound.id == sound_id).one() upload_workflow.delay(sound.id) relationship = False if current_user: relationship = to_json_relationship(current_user, sound.user) acct = to_json_account(sound.user, relationship) sounds.append(to_json_track(sound, acct)) return jsonify({"who": s, "results": results})
def unfollow(): user = request.args.get("user") actor_me = current_user.actor[0] if user.startswith("https://"): actor = Actor.query.filter(Actor.url == user).first() local_user = actor.user else: local_user = User.query.filter(User.name == user).first() if local_user: # Process local unfollow actor_me.unfollow(local_user.actor[0]) flash(gettext("Unfollow successful"), "success") else: # Might be a remote unfollow # 1. Webfinger the user try: remote_actor_url = get_actor_url(user, debug=current_app.debug) except InvalidURLError: current_app.logger.exception(f"Invalid webfinger URL: {user}") remote_actor_url = None except requests.exceptions.HTTPError: current_app.logger.exception(f"Invali webfinger URL: {user}") remote_actor_url = None if not remote_actor_url: flash(gettext("User not found"), "error") return redirect(url_for("bp_users.profile", name=current_user.name)) # 2. Check if we have a local user actor_target = Actor.query.filter( Actor.url == remote_actor_url).first() if not actor_target: # 2.5 Fetch and save remote actor backend = ap.get_backend() iri = backend.fetch_iri(remote_actor_url) if not iri: flash(gettext("User not found"), "error") return redirect(url_for("bp_main.home")) act = ap.parse_activity(iri) actor_target, user_target = create_remote_actor(act) db.session.add(user_target) db.session.add(actor_target) # 2.5 Get the relation of the follow follow_relation = Follower.query.filter( Follower.actor_id == actor_me.id, Follower.target_id == actor_target.id).first() if not follow_relation: flash(gettext("You don't follow this user"), "error") return redirect(url_for("bp_users.profile", name=current_user.name)) # 3. Fetch the Activity of the Follow accept_activity = Activity.query.filter( Activity.url == follow_relation.activity_url).first() if not accept_activity: current_app.logger.error( f"cannot find accept activity {follow_relation.activity_url}") flash(gettext("Whoops, something went wrong")) return redirect(url_for("bp_users.profile", name=current_user.name)) # Then the Activity ID of the Accept will be the object id activity = ap.parse_activity(payload=accept_activity.payload) # Get the final activity (the Follow one) follow_activity = Activity.query.filter( Activity.url == activity.get_object_id()).first() if not follow_activity: current_app.logger.error( f"cannot find follow activity {activity.get_object_id()}") flash(gettext("Whoops, something went wrong")) return redirect(url_for("bp_users.profile", name=current_user.name)) ap_follow_activity = ap.parse_activity(payload=follow_activity.payload) # 4. Initiate a Follow request from actor_me to actor_target unfollow = ap_follow_activity.build_undo() post_to_outbox(unfollow) flash(gettext("Unfollow request have been transmitted"), "success") return redirect(url_for("bp_users.profile", name=current_user.name))
def perform() -> None: # noqa: C901 start = perf_counter() d = (datetime.utcnow() - timedelta(days=DAYS_TO_KEEP)).strftime("%Y-%m-%d") toi = threads_of_interest() logger.info(f"thread_of_interest={toi!r}") delete_deleted = DB.activities.delete_many({ **in_inbox(), **by_type(ap.ActivityType.DELETE), _meta(MetaKey.PUBLISHED): { "$lt": d }, }).deleted_count logger.info(f"{delete_deleted} Delete deleted") create_deleted = 0 create_count = 0 # Go over the old Create activities for data in DB.activities.find({ "box": Box.INBOX.value, "type": ap.ActivityType.CREATE.value, _meta(MetaKey.PUBLISHED): { "$lt": d }, "meta.gc_keep": { "$exists": False }, }).limit(500): try: logger.info(f"data={data!r}") create_count += 1 remote_id = data["remote_id"] meta = data["meta"] # This activity has been bookmarked, keep it if meta.get("bookmarked"): _keep(data) continue obj = None if not meta.get("deleted"): try: activity = ap.parse_activity(data["activity"]) logger.info(f"activity={activity!r}") obj = activity.get_object() except (RemoteServerUnavailableError, ActivityGoneError): logger.exception( f"failed to load {remote_id}, this activity will be deleted" ) # This activity mentions the server actor, keep it if obj and obj.has_mention(ID): _keep(data) continue # This activity is a direct reply of one the server actor activity, keep it if obj: in_reply_to = obj.get_in_reply_to() if in_reply_to and in_reply_to.startswith(ID): _keep(data) continue # This activity is part of a thread we want to keep, keep it if obj and in_reply_to and meta.get("thread_root_parent"): thread_root_parent = meta["thread_root_parent"] if thread_root_parent.startswith( ID) or thread_root_parent in toi: _keep(data) continue # This activity was boosted or liked, keep it if meta.get("boosted") or meta.get("liked"): _keep(data) continue # TODO(tsileo): remove after tests if meta.get("keep"): logger.warning( f"{activity!r} would not have been deleted, skipping for now" ) _keep(data) continue # Delete the cached attachment for grid_item in MEDIA_CACHE.fs.find({"remote_id": remote_id}): MEDIA_CACHE.fs.delete(grid_item._id) # Delete the activity DB.activities.delete_one({"_id": data["_id"]}) create_deleted += 1 except Exception: logger.exception(f"failed to process {data!r}") for data in DB.replies.find({ _meta(MetaKey.PUBLISHED): { "$lt": d }, "meta.gc_keep": { "$exists": False } }).limit(500): try: logger.info(f"data={data!r}") create_count += 1 remote_id = data["remote_id"] meta = data["meta"] # This activity has been bookmarked, keep it if meta.get("bookmarked"): _keep(data) continue obj = ap.parse_activity(data["activity"]) # This activity is a direct reply of one the server actor activity, keep it in_reply_to = obj.get_in_reply_to() # This activity is part of a thread we want to keep, keep it if in_reply_to and meta.get("thread_root_parent"): thread_root_parent = meta["thread_root_parent"] if thread_root_parent.startswith( ID) or thread_root_parent in toi: _keep(data) continue # This activity was boosted or liked, keep it if meta.get("boosted") or meta.get("liked"): _keep(data) continue # Delete the cached attachment for grid_item in MEDIA_CACHE.fs.find({"remote_id": remote_id}): MEDIA_CACHE.fs.delete(grid_item._id) # Delete the activity DB.replies.delete_one({"_id": data["_id"]}) create_deleted += 1 except Exception: logger.exception(f"failed to process {data!r}") after_gc_create = perf_counter() time_to_gc_create = after_gc_create - start logger.info( f"{time_to_gc_create:.2f} seconds to analyze {create_count} Create, {create_deleted} deleted" ) announce_count = 0 announce_deleted = 0 # Go over the old Create activities for data in DB.activities.find({ "box": Box.INBOX.value, "type": ap.ActivityType.ANNOUNCE.value, _meta(MetaKey.PUBLISHED): { "$lt": d }, "meta.gc_keep": { "$exists": False }, }).limit(500): try: announce_count += 1 remote_id = data["remote_id"] meta = data["meta"] activity = ap.parse_activity(data["activity"]) logger.info(f"activity={activity!r}") # This activity has been bookmarked, keep it if meta.get("bookmarked"): _keep(data) continue object_id = activity.get_object_id() # This announce is for a local activity (i.e. from the outbox), keep it if object_id.startswith(ID): _keep(data) continue for grid_item in MEDIA_CACHE.fs.find({"remote_id": remote_id}): MEDIA_CACHE.fs.delete(grid_item._id) # TODO(tsileo): here for legacy reason, this needs to be removed at some point for grid_item in MEDIA_CACHE.fs.find({"remote_id": object_id}): MEDIA_CACHE.fs.delete(grid_item._id) # Delete the activity DB.activities.delete_one({"_id": data["_id"]}) announce_deleted += 1 except Exception: logger.exception(f"failed to process {data!r}") after_gc_announce = perf_counter() time_to_gc_announce = after_gc_announce - after_gc_create logger.info( f"{time_to_gc_announce:.2f} seconds to analyze {announce_count} Announce, {announce_deleted} deleted" )