def _undo_process_inbox(activity: ap.Undo, new_meta: _NewMeta) -> None: _logger.info(f"process_inbox activity={activity!r}") # Fetch the object that's been undo'ed obj = activity.get_object() # Set the undo flag on the mentionned activity update_one_activity(by_remote_id(obj.id), upsert({MetaKey.UNDO: True})) # Handle cached counters if obj.has_type(ap.ActivityType.LIKE): # Update the meta counter if the object is published by the server update_one_activity( { **by_object_id(obj.get_object_id()), **by_type(ap.ActivityType.CREATE) }, inc(MetaKey.COUNT_LIKE, -1), ) elif obj.has_type(ap.ActivityType.ANNOUNCE): announced = obj.get_object() # Update the meta counter if the object is published by the server update_one_activity( { **by_type(ap.ActivityType.CREATE), **by_object_id(announced.id) }, inc(MetaKey.COUNT_BOOST, -1), )
def _emoji_reaction_process_inbox(emoji_reaction: ap.EmojiReaction, new_meta: _NewMeta) -> None: _logger.info(f"process_inbox activity={emoji_reaction!r}") obj = emoji_reaction.get_object() # Try to update an existing emoji reaction counter entry for the activity emoji if not update_one_activity( { **by_type(ap.ActivityType.CREATE), **by_object_id(obj.id), "meta.emoji_reactions.emoji": emoji_reaction.content, }, {"$inc": { "meta.emoji_reactions.$.count": 1 }}, ): # Bootstrap the current emoji counter update_one_activity( { **by_type(ap.ActivityType.CREATE), **by_object_id(obj.id) }, { "$push": { "meta.emoji_reactions": { "emoji": emoji_reaction.content, "count": 1, } } }, )
def _undo_process_outbox(undo: ap.Undo, new_meta: _NewMeta) -> None: _logger.info(f"process_outbox activity={undo!r}") obj = undo.get_object() update_one_activity({"remote_id": obj.id}, {"$set": {"meta.undo": True}}) # Undo Like if obj.has_type(ap.ActivityType.LIKE): liked = obj.get_object_id() update_one_activity( { **by_object_id(liked), **by_type(ap.ActivityType.CREATE) }, { **inc(MetaKey.COUNT_LIKE, -1), **upsert({MetaKey.LIKED: False}) }, ) elif obj.has_type(ap.ActivityType.ANNOUNCE): announced = obj.get_object_id() update_one_activity( { **by_object_id(announced), **by_type(ap.ActivityType.CREATE) }, upsert({MetaKey.BOOSTED: False}), ) # Undo Follow (undo new following) elif obj.has_type(ap.ActivityType.FOLLOW): pass
def note_by_id(note_id): if is_api_request(): return redirect(url_for("outbox_activity", item_id=note_id)) query = {} # Prevent displaying direct messages on the public frontend if not session.get("logged_in", False): query = is_public() data = DB.activities.find_one({ **in_outbox(), **by_remote_id(activity_url(note_id)), **query }) if not data: abort(404) if data["meta"].get("deleted", False): abort(410) thread = _build_thread(data, query=query) app.logger.info(f"thread={thread!r}") raw_likes = list( DB.activities.find({ **not_undo(), **not_deleted(), **by_type(ActivityType.LIKE), **by_object_id(data["activity"]["object"]["id"]), })) likes = [] for doc in raw_likes: try: likes.append(doc["meta"]["actor"]) except Exception: app.logger.exception(f"invalid doc: {doc!r}") app.logger.info(f"likes={likes!r}") raw_shares = list( DB.activities.find({ **not_undo(), **not_deleted(), **by_type(ActivityType.ANNOUNCE), **by_object_id(data["activity"]["object"]["id"]), })) shares = [] for doc in raw_shares: try: shares.append(doc["meta"]["actor"]) except Exception: app.logger.exception(f"invalid doc: {doc!r}") app.logger.info(f"shares={shares!r}") return htmlify( render_template("note.html", likes=likes, shares=shares, thread=thread, note=data))
def index(): if is_api_request(): _log_sig() return activitypubify(**ME) q = { **in_outbox(), "$or": [ { **by_type(ActivityType.CREATE), **not_deleted(), **by_visibility(ap.Visibility.PUBLIC), "$or": [{ "meta.pinned": False }, { "meta.pinned": { "$exists": False } }], }, { **by_type(ActivityType.ANNOUNCE), **not_undo() }, ], } apinned = [] # Only fetch the pinned notes if we're on the first page if not request.args.get("older_than") and not request.args.get( "newer_than"): q_pinned = { **in_outbox(), **by_type(ActivityType.CREATE), **not_deleted(), **pinned(), **by_visibility(ap.Visibility.PUBLIC), } apinned = list(DB.activities.find(q_pinned)) outbox_data, older_than, newer_than = paginated_query(DB.activities, q, limit=25 - len(apinned)) return htmlify( render_template( "index.html", outbox_data=outbox_data, older_than=older_than, newer_than=newer_than, pinned=apinned, ))
def _announce_process_inbox(announce: ap.Announce, new_meta: _NewMeta) -> None: _logger.info(f"process_inbox activity={announce!r}") # TODO(tsileo): actually drop it without storing it and better logging, also move the check somewhere else # or remove it? try: obj = announce.get_object() except NotAnActivityError: _logger.exception( f'received an Annouce referencing an OStatus notice ({announce._data["object"]}), dropping the message' ) return if obj.has_type(ap.ActivityType.QUESTION): Tasks.fetch_remote_question(obj) update_one_activity( by_remote_id(announce.id), upsert({ MetaKey.OBJECT: obj.to_dict(embed=True), MetaKey.OBJECT_ACTOR: obj.get_actor().to_dict(embed=True), }), ) update_one_activity( { **by_type(ap.ActivityType.CREATE), **by_object_id(obj.id) }, inc(MetaKey.COUNT_BOOST, 1), )
def api_ack_reply() -> _Response: reply_iri = _user_api_arg("reply_iri") obj = ap.fetch_remote_activity(reply_iri) if obj.has_type(ap.ActivityType.CREATE): obj = obj.get_object() # TODO(tsileo): tweak the adressing? update_one_activity( { **by_type(ap.ActivityType.CREATE), **by_object_id(obj.id) }, {"$set": { "meta.reply_acked": True }}, ) read = ap.Read( actor=MY_PERSON.id, object=obj.id, to=[MY_PERSON.followers], cc=[ap.AS_PUBLIC, obj.get_actor().id], published=now(), context=new_context(obj), ) read_id = post_to_outbox(read) return _user_api_response(activity=read_id)
def _announce_set_inbox_flags(activity: ap.Announce, new_meta: _NewMeta) -> None: _logger.info(f"set_inbox_flags activity={activity!r}") obj = activity.get_object() # Is it a Annnounce/boost of local acitivty/from the outbox if is_from_outbox(obj): # Flag it as a notification _flag_as_notification(activity, new_meta) # Also set the "keep mark" for the GC (as we want to keep it forever) _set_flag(new_meta, MetaKey.GC_KEEP) # Dedup boosts (it's annoying to see the same note multipe times on the same page) if not find_one_activity({ **in_inbox(), **by_type([ap.ActivityType.CREATE, ap.ActivityType.ANNOUNCE]), **by_object_id(obj.id), **flag(MetaKey.STREAM, True), **published_after( datetime.now(timezone.utc) - timedelta(hours=12)), }): # Display it in the stream only it not there already (only looking at the last 12 hours) _set_flag(new_meta, MetaKey.STREAM) return None
def admin_thread() -> _Response: oid = request.args.get("oid") if not oid: abort(404) data = find_one_activity({ **by_type(ap.ActivityType.CREATE), **by_object_id(oid) }) if not data: dat = DB.replies.find_one({**by_remote_id(oid)}) data = { "activity": { "object": dat["activity"] }, "meta": dat["meta"], "_id": dat["_id"], } if not data: abort(404) if data["meta"].get("deleted", False): abort(410) thread = _build_thread(data) tpl = "note.html" if request.args.get("debug"): tpl = "note_debug.html" return htmlify(render_template(tpl, thread=thread, note=data))
def _accept_set_inbox_flags(activity: ap.Accept, new_meta: _NewMeta) -> None: """Handle notifications for "accepted" following requests.""" _logger.info(f"set_inbox_flags activity={activity!r}") # Check if this actor already follow us back follows_back = False follow_query = { **in_inbox(), **by_type(ap.ActivityType.FOLLOW), **by_actor(activity.get_actor()), **not_undo(), } raw_follow = DB.activities.find_one(follow_query) if raw_follow: follows_back = True DB.activities.update_many( follow_query, {"$set": { _meta(MetaKey.NOTIFICATION_FOLLOWS_BACK): True }}) # This Accept will be a "You started following $actor" notification _flag_as_notification(activity, new_meta) _set_flag(new_meta, MetaKey.GC_KEEP) _set_flag(new_meta, MetaKey.NOTIFICATION_FOLLOWS_BACK, follows_back) return None
def _follow_set_inbox_flags(activity: ap.Follow, new_meta: _NewMeta) -> None: """Handle notification for new followers.""" _logger.info(f"set_inbox_flags activity={activity!r}") # Check if we're already following this actor follows_back = False accept_query = { **in_inbox(), **by_type(ap.ActivityType.ACCEPT), **by_actor(activity.get_actor()), **not_undo(), } raw_accept = DB.activities.find_one(accept_query) if raw_accept: follows_back = True DB.activities.update_many( accept_query, {"$set": { _meta(MetaKey.NOTIFICATION_FOLLOWS_BACK): True }}) # This Follow will be a "$actor started following you" notification _flag_as_notification(activity, new_meta) _set_flag(new_meta, MetaKey.GC_KEEP) _set_flag(new_meta, MetaKey.NOTIFICATION_FOLLOWS_BACK, follows_back) return None
def _delete_process_inbox(delete: ap.Delete, new_meta: _NewMeta) -> None: _logger.info(f"process_inbox activity={delete!r}") obj_id = delete.get_object_id() _logger.debug(f"delete object={obj_id}") try: # FIXME(tsileo): call the DB here instead? like for the oubox obj = ap.fetch_remote_activity(obj_id) _logger.info(f"inbox_delete handle_replies obj={obj!r}") in_reply_to = obj.get_in_reply_to() if obj.inReplyTo else None if obj.has_type(ap.CREATE_TYPES): in_reply_to = ap._get_id( DB.activities.find_one({ "meta.object_id": obj_id, "type": ap.ActivityType.CREATE.value })["activity"]["object"].get("inReplyTo")) if in_reply_to: back._handle_replies_delete(MY_PERSON, in_reply_to) except Exception: _logger.exception(f"failed to handle delete replies for {obj_id}") update_one_activity( { **by_object_id(obj_id), **by_type(ap.ActivityType.CREATE) }, upsert({MetaKey.DELETED: True}), ) # Foce undo other related activities DB.activities.update(by_object_id(obj_id), upsert({MetaKey.UNDO: True}))
def following(): q = {**in_outbox(), **by_type(ActivityType.FOLLOW), **not_undo()} if is_api_request(): _log_sig() return jsonify(**activitypub.build_ordered_collection( DB.activities, q=q, cursor=request.args.get("cursor"), map_func=lambda doc: doc["activity"]["object"], col_name="following", )) if config.HIDE_FOLLOWING and not session.get("logged_in", False): abort(404) following, older_than, newer_than = paginated_query(DB.activities, q) following = [(doc["remote_id"], doc["meta"]["object"]) for doc in following if "remote_id" in doc and "object" in doc.get("meta", {})] lists = list(DB.lists.find()) return render_template( "following.html", following_data=following, older_than=older_than, newer_than=newer_than, lists=lists, )
def outbox_activity_replies(item_id): if not is_api_request(): abort(404) _log_sig() data = DB.activities.find_one({ **in_outbox(), **by_remote_id(activity_url(item_id)), **not_deleted(), **is_public(), }) if not data: abort(404) obj = ap.parse_activity(data["activity"]) if obj.ACTIVITY_TYPE != ActivityType.CREATE: abort(404) q = { **is_public(), **not_deleted(), **by_type(ActivityType.CREATE), "activity.object.inReplyTo": obj.get_object().id, } return activitypubify(**activitypub.build_ordered_collection( DB.activities, q=q, cursor=request.args.get("cursor"), map_func=lambda doc: doc["activity"]["object"], col_name=f"outbox/{item_id}/replies", first_page=request.args.get("page") == "first", ))
def migrate(self) -> None: for data in find_activities({ **by_type(ap.ActivityType.CREATE), **not_deleted() }): try: activity = ap.parse_activity(data["activity"]) mentions = [] obj = activity.get_object() for m in obj.get_mentions(): mentions.append(m.href) hashtags = [] for h in obj.get_hashtags(): hashtags.append(h.name[1:]) # Strip the # update_one_activity( by_remote_id(data["remote_id"]), upsert({ MetaKey.MENTIONS: mentions, MetaKey.HASHTAGS: hashtags }), ) except Exception: logger.exception(f"failed to process activity {data!r}")
def migrate(self) -> None: for data in find_activities({ **by_type(ap.ActivityType.CREATE), **not_deleted() }): try: in_reply_to = data["activity"]["object"].get("inReplyTo") if in_reply_to: update_one_activity( by_remote_id(data["remote_id"]), upsert({MetaKey.IN_REPLY_TO: in_reply_to}), ) except Exception: logger.exception(f"failed to process activity {data!r}") for data in DB.replies.find({**not_deleted()}): try: in_reply_to = data["activity"].get("inReplyTo") if in_reply_to: DB.replies.update_one( by_remote_id(data["remote_id"]), upsert({MetaKey.IN_REPLY_TO: in_reply_to}), ) except Exception: logger.exception(f"failed to process activity {data!r}")
def post_to_inbox(activity: ap.BaseActivity) -> None: # Check for Block activity actor = activity.get_actor() if outbox_is_blocked(actor.id): logger.info( f"actor {actor!r} is blocked, dropping the received activity {activity!r}" ) return # If the message is coming from a Pleroma relay, we process it as a possible reply for a stream activity if ( actor.has_type(ap.ActivityType.APPLICATION) and actor.id.endswith("/relay") and activity.has_type(ap.ActivityType.ANNOUNCE) and not find_one_activity( { **by_object_id(activity.get_object_id()), **by_type(ap.ActivityType.CREATE), } ) and not DB.replies.find_one(by_remote_id(activity.get_object_id())) ): Tasks.process_reply(activity.get_object_id()) return # Hubzilla sends Update with the same ID as the actor, and it poisons the cache if ( activity.has_type(ap.ActivityType.UPDATE) and activity.id == activity.get_object_id() ): # Start a task to update the cached actor Tasks.cache_actor(activity.id) return # Honk forwards activities in a Read, process them as replies if activity.has_type(ap.ActivityType.READ): Tasks.process_reply(activity.get_object_id()) return # TODO(tsileo): support ignore from Honk # Hubzilla forwards activities in a Create, process them as possible replies if activity.has_type(ap.ActivityType.CREATE) and server(activity.id) != server( activity.get_object_id() ): Tasks.process_reply(activity.get_object_id()) return if DB.activities.find_one({"box": Box.INBOX.value, "remote_id": activity.id}): # The activity is already in the inbox logger.info(f"received duplicate activity {activity!r}, dropping it") return save(Box.INBOX, activity) logger.info(f"spawning tasks for {activity!r}") if not activity.has_type([ap.ActivityType.DELETE, ap.ActivityType.UPDATE]): Tasks.cache_actor(activity.id) Tasks.process_new_activity(activity.id) Tasks.finish_post_to_inbox(activity.id)
def migrate(self) -> None: for data in find_activities({**by_type(ap.ActivityType.ACCEPT), **in_inbox()}): try: update_one_activity( { **by_type(ap.ActivityType.FOLLOW), **by_remote_id(data["meta"]["object_id"]), }, upsert({MetaKey.FOLLOW_STATUS: FollowStatus.ACCEPTED.value}), ) # Check if we are following this actor follow_query = { **in_inbox(), **by_type(ap.ActivityType.FOLLOW), **by_object_id(data["meta"]["actor_id"]), **not_undo(), } raw_follow = DB.activities.find_one(follow_query) if raw_follow: DB.activities.update_many( follow_query, {"$set": {_meta(MetaKey.NOTIFICATION_FOLLOWS_BACK): True}}, ) except Exception: logger.exception(f"failed to process activity {data!r}") for data in find_activities({**by_type(ap.ActivityType.FOLLOW), **in_outbox()}): try: print(data) follow_query = { **in_inbox(), **by_type(ap.ActivityType.FOLLOW), **by_actor_id(data["meta"]["object_id"]), **not_undo(), } raw_accept = DB.activities.find_one(follow_query) print(raw_accept) if raw_accept: DB.activities.update_many( by_remote_id(data["remote_id"]), {"$set": {_meta(MetaKey.NOTIFICATION_FOLLOWS_BACK): True}}, ) except Exception: logger.exception(f"failed to process activity {data!r}")
def migrate(self) -> None: DB.activities.update_many( { **by_type(ap.ActivityType.FOLLOW), **in_inbox(), "meta.follow_status": {"$exists": False}, }, {"$set": {"meta.follow_status": "accepted"}}, )
def handle_question_reply(create: ap.Create, question: ap.Question) -> None: choice = create.get_object().name # Ensure it's a valid choice if choice not in [ c["name"] for c in question._data.get("oneOf", question.anyOf) ]: logger.info("invalid choice") return # Hash the choice/answer (so we can use it as a key) answer_key = _answer_key(choice) is_single_choice = bool(question._data.get("oneOf", [])) dup_query = { "activity.object.actor": create.get_actor().id, "meta.answer_to": question.id, **({} if is_single_choice else { "meta.poll_answer_choice": choice }), } print(f"dup_q={dup_query}") # Check for duplicate votes if DB.activities.find_one(dup_query): logger.info("duplicate response") return # Update the DB DB.activities.update_one( { **by_object_id(question.id), **by_type(ap.ActivityType.CREATE) }, { "$inc": { "meta.question_replies": 1, f"meta.question_answers.{answer_key}": 1, } }, ) DB.activities.update_one( by_remote_id(create.id), { "$set": { "meta.poll_answer_to": question.id, "meta.poll_answer_choice": choice, "meta.stream": False, "meta.poll_answer": True, } }, ) return None
def _like_process_inbox(like: ap.Like, new_meta: _NewMeta) -> None: _logger.info(f"process_inbox activity={like!r}") obj = like.get_object() # Update the meta counter if the object is published by the server update_one_activity( { **by_type(ap.ActivityType.CREATE), **by_object_id(obj.id) }, inc(MetaKey.COUNT_LIKE, 1), )
def _delete_process_outbox(delete: ap.Delete, new_meta: _NewMeta) -> None: _logger.info(f"process_outbox activity={delete!r}") obj_id = delete.get_object_id() # Flag everything referencing the deleted object as deleted (except the Delete activity itself) update_many_activities( { **by_object_id(obj_id), "remote_id": { "$ne": delete.id } }, upsert({ MetaKey.DELETED: True, MetaKey.UNDO: True }), ) # If the deleted activity was in DB, decrease some threads-related counter data = find_one_activity({ **by_object_id(obj_id), **by_type(ap.ActivityType.CREATE) }) _logger.info(f"found local copy of deleted activity: {data}") if data: obj = ap.parse_activity(data["activity"]).get_object() _logger.info(f"obj={obj!r}") in_reply_to = obj.get_in_reply_to() if in_reply_to: update_one_activity( { **by_type(ap.ActivityType.CREATE), **by_object_id(in_reply_to) }, { "$inc": { "meta.count_reply": -1, "meta.count_direct_reply": -1 } }, )
def outbox(): if request.method == "GET": if not is_api_request(): abort(404) _log_sig() # TODO(tsileo): returns the whole outbox if authenticated and look at OCAP support q = { **in_outbox(), "$or": [ { **by_type(ActivityType.CREATE), **not_deleted(), **by_visibility(ap.Visibility.PUBLIC), }, { **by_type(ActivityType.ANNOUNCE), **not_undo() }, ], } return activitypubify(**activitypub.build_ordered_collection( DB.activities, q=q, cursor=request.args.get("cursor"), map_func=lambda doc: activity_from_doc(doc, embed=True), col_name="outbox", )) # Handle POST request aka C2S API try: _api_required() except BadSignature: abort(401) data = request.get_json(force=True) activity = ap.parse_activity(data) activity_id = post_to_outbox(activity) return Response(status=201, headers={"Location": activity_id})
def _announce_process_outbox(announce: ap.Announce, new_meta: _NewMeta) -> None: _logger.info(f"process_outbox activity={announce!r}") obj = announce.get_object() if obj.has_type(ap.ActivityType.QUESTION): Tasks.fetch_remote_question(obj) Tasks.cache_object(announce.id) update_one_activity( {**by_object_id(obj.id), **by_type(ap.ActivityType.CREATE)}, upsert({MetaKey.BOOSTED: announce.id}), )
def _like_process_outbox(like: ap.Like, new_meta: _NewMeta) -> None: _logger.info(f"process_outbox activity={like!r}") obj = like.get_object() if obj.has_type(ap.ActivityType.QUESTION): Tasks.fetch_remote_question(obj) # Cache the object for display on the "Liked" public page Tasks.cache_object(like.id) update_one_activity( {**by_object_id(obj.id), **by_type(ap.ActivityType.CREATE)}, {**inc(MetaKey.COUNT_LIKE, 1), **upsert({MetaKey.LIKED: like.id})}, )
def admin_direct_messages() -> _Response: all_dms = DB.activities.find({ **not_poll_answer(), **by_type(ap.ActivityType.CREATE), **by_object_visibility(ap.Visibility.DIRECT), }).sort("meta.published", -1) # Group by threads _threads = defaultdict(list) # type: ignore for dm in all_dms: # Skip poll answers if dm["activity"].get("object", {}).get("name"): continue _threads[dm["meta"].get("thread_root_parent", dm["meta"]["object_id"])].append(dm) # Now build the data needed for the UI threads = [] for thread_root, thread in _threads.items(): # We need the list of participants participants = set() for raw_activity in thread: activity = ap.parse_activity(raw_activity["activity"]) actor = activity.get_actor() domain = urlparse(actor.id).netloc if actor.id != ID: participants.add(f"@{actor.preferredUsername}@{domain}") if activity.has_type(ap.ActivityType.CREATE): activity = activity.get_object() for mention in activity.get_mentions(): if mention.href in [actor.id, ID]: continue m = ap.fetch_remote_activity(mention.href) if m.has_type(ap.ACTOR_TYPES) and m.id != ID: d = urlparse(m.id).netloc participants.add(f"@{m.preferredUsername}@{d}") if not participants: continue # Build the UI data for this conversation oid = thread[-1]["meta"]["object_id"] threads.append({ "participants": list(participants), "oid": oid, "last_reply": thread[0], "len": len(thread), }) return htmlify(render_template("direct_messages.html", threads=threads))
def all(): q = { **in_outbox(), **by_type([ActivityType.CREATE, ActivityType.ANNOUNCE]), **not_deleted(), **not_undo(), **not_poll_answer(), } outbox_data, older_than, newer_than = paginated_query(DB.activities, q) return htmlify( render_template( "index.html", outbox_data=outbox_data, older_than=older_than, newer_than=newer_than, ))
def _announce_process_inbox(announce: ap.Announce, new_meta: _NewMeta) -> None: _logger.info(f"process_inbox activity={announce!r}") # TODO(tsileo): actually drop it without storing it and better logging, also move the check somewhere else # or remove it? try: obj = announce.get_object() except NotAnActivityError: _logger.exception( f'received an Annouce referencing an OStatus notice ({announce._data["object"]}), dropping the message' ) return if obj.has_type(ap.ActivityType.QUESTION): Tasks.fetch_remote_question(obj) # Cache the announced object Tasks.cache_object(announce.id) # Process the reply of the announced object if any in_reply_to = obj.get_in_reply_to() if in_reply_to: reply = ap.fetch_remote_activity(in_reply_to) if reply.has_type(ap.ActivityType.CREATE): reply = reply.get_object() in_reply_to_data = {MetaKey.IN_REPLY_TO: in_reply_to} # Update the activity to save some data about the reply if reply.get_actor().id == obj.get_actor().id: in_reply_to_data.update({MetaKey.IN_REPLY_TO_SELF: True}) else: in_reply_to_data.update({ MetaKey.IN_REPLY_TO_ACTOR: reply.get_actor().to_dict(embed=True) }) update_one_activity(by_remote_id(announce.id), upsert(in_reply_to_data)) # Spawn a task to process it (and determine if it needs to be saved) Tasks.process_reply(reply.id) update_one_activity( { **by_type(ap.ActivityType.CREATE), **by_object_id(obj.id) }, inc(MetaKey.COUNT_BOOST, 1), )
def admin_profile() -> _Response: if not request.args.get("actor_id"): abort(404) actor_id = request.args.get("actor_id") actor = ap.fetch_remote_activity(actor_id) q = { "meta.actor_id": actor_id, "box": "inbox", **not_deleted(), "type": { "$in": [ap.ActivityType.CREATE.value, ap.ActivityType.ANNOUNCE.value] }, } inbox_data, older_than, newer_than = paginated_query( DB.activities, q, limit=int(request.args.get("limit", 25))) follower = find_one_activity({ "box": "inbox", "type": ap.ActivityType.FOLLOW.value, "meta.actor_id": actor.id, "meta.undo": False, }) following = find_one_activity({ **by_type(ap.ActivityType.FOLLOW), **by_object_id(actor.id), **not_undo(), **in_outbox(), **follow_request_accepted(), }) return htmlify( render_template( "stream.html", actor_id=actor_id, actor=actor.to_dict(), inbox_data=inbox_data, older_than=older_than, newer_than=newer_than, follower=follower, following=following, lists=list(DB.lists.find()), ))
def admin_lookup() -> _Response: data = None meta = None follower = None following = None if request.args.get("url"): data = lookup(request.args.get("url")) # type: ignore if data: if not data.has_type(ap.ACTOR_TYPES): meta = _meta(data) else: follower = find_one_activity({ "box": "inbox", "type": ap.ActivityType.FOLLOW.value, "meta.actor_id": data.id, "meta.undo": False, }) following = find_one_activity({ **by_type(ap.ActivityType.FOLLOW), **by_object_id(data.id), **not_undo(), **in_outbox(), **follow_request_accepted(), }) if data.has_type(ap.ActivityType.QUESTION): p.push(data.id, "/task/fetch_remote_question") print(data) app.logger.debug(data.to_dict()) return htmlify( render_template( "lookup.html", data=data, meta=meta, follower=follower, following=following, url=request.args.get("url"), ))