def followers(): q = { "box": Box.INBOX.value, "type": ActivityType.FOLLOW.value, "meta.undo": False } if is_api_request(): _log_sig() return activitypubify(**activitypub.build_ordered_collection( DB.activities, q=q, cursor=request.args.get("cursor"), map_func=lambda doc: doc["activity"]["actor"], col_name="followers", )) raw_followers, older_than, newer_than = paginated_query(DB.activities, q) followers = [ doc["meta"] for doc in raw_followers if "actor" in doc.get("meta", {}) ] return htmlify( render_template( "followers.html", followers_data=followers, older_than=older_than, newer_than=newer_than, ))
def tags(tag): if not DB.activities.count({ **in_outbox(), **by_hashtag(tag), **by_visibility(ap.Visibility.PUBLIC), **not_deleted(), }): abort(404) if not is_api_request(): return htmlify( render_template( "tags.html", tag=tag, outbox_data=DB.activities.find({ **in_outbox(), **by_hashtag(tag), **by_visibility(ap.Visibility.PUBLIC), **not_deleted(), }).sort("meta.published", -1), )) _log_sig() q = { **in_outbox(), **by_hashtag(tag), **by_visibility(ap.Visibility.PUBLIC), **not_deleted(), } return activitypubify(**activitypub.build_ordered_collection( DB.activities, q=q, cursor=request.args.get("cursor"), map_func=lambda doc: doc["activity"]["object"]["id"], col_name=f"tags/{tag}", ))
def following(): q = {**in_outbox(), **by_type(ActivityType.FOLLOW), **not_undo()} if is_api_request(): _log_sig() return jsonify(**activitypub.build_ordered_collection( DB.activities, q=q, cursor=request.args.get("cursor"), map_func=lambda doc: doc["activity"]["object"], col_name="following", )) if config.HIDE_FOLLOWING and not session.get("logged_in", False): abort(404) following, older_than, newer_than = paginated_query(DB.activities, q) following = [(doc["remote_id"], doc["meta"]["object"]) for doc in following if "remote_id" in doc and "object" in doc.get("meta", {})] lists = list(DB.lists.find()) return render_template( "following.html", following_data=following, older_than=older_than, newer_than=newer_than, lists=lists, )
def outbox_activity_replies(item_id): if not is_api_request(): abort(404) _log_sig() data = DB.activities.find_one({ **in_outbox(), **by_remote_id(activity_url(item_id)), **not_deleted(), **is_public(), }) if not data: abort(404) obj = ap.parse_activity(data["activity"]) if obj.ACTIVITY_TYPE != ActivityType.CREATE: abort(404) q = { **is_public(), **not_deleted(), **by_type(ActivityType.CREATE), "activity.object.inReplyTo": obj.get_object().id, } return activitypubify(**activitypub.build_ordered_collection( DB.activities, q=q, cursor=request.args.get("cursor"), map_func=lambda doc: doc["activity"]["object"], col_name=f"outbox/{item_id}/replies", first_page=request.args.get("page") == "first", ))
def liked(): if not is_api_request(): q = { "box": Box.OUTBOX.value, "type": ActivityType.LIKE.value, "meta.deleted": False, "meta.undo": False, } liked, older_than, newer_than = paginated_query(DB.activities, q) return htmlify( render_template("liked.html", liked=liked, older_than=older_than, newer_than=newer_than)) q = { "meta.deleted": False, "meta.undo": False, "type": ActivityType.LIKE.value } return activitypubify(**activitypub.build_ordered_collection( DB.activities, q=q, cursor=request.args.get("cursor"), map_func=lambda doc: doc["activity"]["object"], col_name="liked", ))
def outbox_activity_replies(item_id): if not is_api_request(): abort(404) _log_sig() data = DB.activities.find_one({ "box": Box.OUTBOX.value, "remote_id": activity_url(item_id), "meta.deleted": False, "meta.public": True, }) if not data: abort(404) obj = ap.parse_activity(data["activity"]) if obj.ACTIVITY_TYPE != ActivityType.CREATE: abort(404) q = { "meta.deleted": False, "meta.public": True, "type": ActivityType.CREATE.value, "activity.object.inReplyTo": obj.get_object().id, } return jsonify(**activitypub.build_ordered_collection( DB.activities, q=q, cursor=request.args.get("cursor"), map_func=lambda doc: doc["activity"]["object"], col_name=f"outbox/{item_id}/replies", first_page=request.args.get("page") == "first", ))
def outbox(): if request.method == "GET": if not is_api_request(): abort(404) _log_sig() # TODO(tsileo): returns the whole outbox if authenticated and look at OCAP support q = { "box": Box.OUTBOX.value, "meta.deleted": False, "meta.undo": False, "meta.public": True, "type": { "$in": [ActivityType.CREATE.value, ActivityType.ANNOUNCE.value] }, } return jsonify(**activitypub.build_ordered_collection( DB.activities, q=q, cursor=request.args.get("cursor"), map_func=lambda doc: activity_from_doc(doc, embed=True), col_name="outbox", )) # Handle POST request try: _api_required() except BadSignature: abort(401) data = request.get_json(force=True) activity = ap.parse_activity(data) activity_id = post_to_outbox(activity) return Response(status=201, headers={"Location": activity_id})
def note_by_id(note_id): if is_api_request(): return redirect(url_for("outbox_activity", item_id=note_id)) query = {} # Prevent displaying direct messages on the public frontend if not session.get("logged_in", False): query = is_public() data = DB.activities.find_one({ **in_outbox(), **by_remote_id(activity_url(note_id)), **query }) if not data: abort(404) if data["meta"].get("deleted", False): abort(410) thread = _build_thread(data, query=query) app.logger.info(f"thread={thread!r}") raw_likes = list( DB.activities.find({ **not_undo(), **not_deleted(), **by_type(ActivityType.LIKE), **by_object_id(data["activity"]["object"]["id"]), })) likes = [] for doc in raw_likes: try: likes.append(doc["meta"]["actor"]) except Exception: app.logger.exception(f"invalid doc: {doc!r}") app.logger.info(f"likes={likes!r}") raw_shares = list( DB.activities.find({ **not_undo(), **not_deleted(), **by_type(ActivityType.ANNOUNCE), **by_object_id(data["activity"]["object"]["id"]), })) shares = [] for doc in raw_shares: try: shares.append(doc["meta"]["actor"]) except Exception: app.logger.exception(f"invalid doc: {doc!r}") app.logger.info(f"shares={shares!r}") return htmlify( render_template("note.html", likes=likes, shares=shares, thread=thread, note=data))
def index(): if is_api_request(): _log_sig() return activitypubify(**ME) q = { **in_outbox(), "$or": [ { **by_type(ActivityType.CREATE), **not_deleted(), **by_visibility(ap.Visibility.PUBLIC), "$or": [{ "meta.pinned": False }, { "meta.pinned": { "$exists": False } }], }, { **by_type(ActivityType.ANNOUNCE), **not_undo() }, ], } apinned = [] # Only fetch the pinned notes if we're on the first page if not request.args.get("older_than") and not request.args.get( "newer_than"): q_pinned = { **in_outbox(), **by_type(ActivityType.CREATE), **not_deleted(), **pinned(), **by_visibility(ap.Visibility.PUBLIC), } apinned = list(DB.activities.find(q_pinned)) outbox_data, older_than, newer_than = paginated_query(DB.activities, q, limit=25 - len(apinned)) return htmlify( render_template( "index.html", outbox_data=outbox_data, older_than=older_than, newer_than=newer_than, pinned=apinned, ))
def index(): if is_api_request(): _log_sig() return jsonify(**ME) q = { "box": Box.OUTBOX.value, "type": { "$in": [ActivityType.CREATE.value, ActivityType.ANNOUNCE.value] }, "activity.object.inReplyTo": None, "meta.deleted": False, "meta.undo": False, "meta.public": True, "$or": [{ "meta.pinned": False }, { "meta.pinned": { "$exists": False } }], } pinned = [] # Only fetch the pinned notes if we're on the first page if not request.args.get("older_than") and not request.args.get( "newer_than"): q_pinned = { "box": Box.OUTBOX.value, "type": ActivityType.CREATE.value, "meta.deleted": False, "meta.undo": False, "meta.public": True, "meta.pinned": True, } pinned = list(DB.activities.find(q_pinned)) outbox_data, older_than, newer_than = paginated_query(DB.activities, q, limit=25 - len(pinned)) resp = render_template( "index.html", outbox_data=outbox_data, older_than=older_than, newer_than=newer_than, pinned=pinned, ) return resp
def featured(): if not is_api_request(): abort(404) _log_sig() q = { "box": Box.OUTBOX.value, "type": ActivityType.CREATE.value, "meta.deleted": False, "meta.undo": False, "meta.pinned": True, } data = [ clean_activity(doc["activity"]["object"]) for doc in DB.activities.find(q) ] return activitypubify( **activitypub.simple_build_ordered_collection("featured", data))
def tags(tag): if not DB.activities.count({ "box": Box.OUTBOX.value, "activity.object.tag.type": "Hashtag", "activity.object.tag.name": "#" + tag, }): abort(404) if not is_api_request(): return render_template( "tags.html", tag=tag, outbox_data=DB.activities.find({ "box": Box.OUTBOX.value, "type": ActivityType.CREATE.value, "meta.deleted": False, "activity.object.tag.type": "Hashtag", "activity.object.tag.name": "#" + tag, }), ) _log_sig() q = { "box": Box.OUTBOX.value, "meta.deleted": False, "meta.undo": False, "type": ActivityType.CREATE.value, "activity.object.tag.type": "Hashtag", "activity.object.tag.name": "#" + tag, } return jsonify(**activitypub.build_ordered_collection( DB.activities, q=q, cursor=request.args.get("cursor"), map_func=lambda doc: doc["activity"]["object"]["id"], col_name=f"tags/{tag}", ))
def outbox(): if request.method == "GET": if not is_api_request(): abort(404) _log_sig() # TODO(tsileo): returns the whole outbox if authenticated and look at OCAP support q = { **in_outbox(), "$or": [ { **by_type(ActivityType.CREATE), **not_deleted(), **by_visibility(ap.Visibility.PUBLIC), }, { **by_type(ActivityType.ANNOUNCE), **not_undo() }, ], } return activitypubify(**activitypub.build_ordered_collection( DB.activities, q=q, cursor=request.args.get("cursor"), map_func=lambda doc: activity_from_doc(doc, embed=True), col_name="outbox", )) # Handle POST request aka C2S API try: _api_required() except BadSignature: abort(401) data = request.get_json(force=True) activity = ap.parse_activity(data) activity_id = post_to_outbox(activity) return Response(status=201, headers={"Location": activity_id})
def outbox_activity_shares(item_id): if not is_api_request(): abort(404) data = DB.activities.find_one({ "box": Box.OUTBOX.value, "remote_id": activity_url(item_id), "meta.deleted": False, }) if not data: abort(404) _log_sig() obj = ap.parse_activity(data["activity"]) if obj.ACTIVITY_TYPE != ActivityType.CREATE: abort(404) q = { "meta.undo": False, "type": ActivityType.ANNOUNCE.value, "$or": [ { "activity.object.id": obj.get_object().id }, { "activity.object": obj.get_object().id }, ], } return activitypubify(**activitypub.build_ordered_collection( DB.activities, q=q, cursor=request.args.get("cursor"), map_func=lambda doc: remove_context(doc["activity"]), col_name=f"outbox/{item_id}/shares", first_page=request.args.get("page") == "first", ))
def inbox(): # GET /inbox if request.method == "GET": if not is_api_request(): abort(404) try: _api_required() except BadSignature: abort(404) return activitypubify(**activitypub.build_ordered_collection( DB.activities, q={ "meta.deleted": False, "box": Box.INBOX.value }, cursor=request.args.get("cursor"), map_func=lambda doc: remove_context(doc["activity"]), col_name="inbox", )) # POST/ inbox try: data = request.get_json(force=True) if not isinstance(data, dict): raise ValueError("not a dict") except Exception: return Response( status=422, headers={"Content-Type": "application/json"}, response=json.dumps({ "error": "failed to decode request body as JSON", "request_id": g.request_id, }), ) # Check the blacklist now to see if we can return super early if is_blacklisted(data): logger.info(f"dropping activity from blacklisted host: {data['id']}") return Response(status=201) logger.info(f"request_id={g.request_id} req_headers={request.headers!r}") logger.info(f"request_id={g.request_id} raw_data={data}") try: req_verified, actor_id = verify_request(request.method, request.path, request.headers, request.data) if not req_verified: raise Exception("failed to verify request") logger.info(f"request_id={g.request_id} signed by {actor_id}") except Exception: logger.exception( f"failed to verify request {g.request_id}, trying to verify the payload by fetching the remote" ) try: remote_data = get_backend().fetch_iri(data["id"]) except ActivityGoneError: # XXX Mastodon sends Delete activities that are not dereferencable, it's the actor url with #delete # appended, so an `ActivityGoneError` kind of ensure it's "legit" if data["type"] == ActivityType.DELETE.value and data[ "id"].startswith(data["object"]): # If we're here, this means the key is not saved, so we cannot verify the object logger.info( f"received a Delete for an unknown actor {data!r}, drop it" ) return Response(status=201) except Exception: logger.exception(f"failed to fetch remote for payload {data!r}") if "type" in data: # Friendica does not returns a 410, but a 302 that redirect to an HTML page if ap._has_type(data["type"], ActivityType.DELETE): logger.info( f"received a Delete for an unknown actor {data!r}, drop it" ) return Response(status=201) if "id" in data: if DB.trash.find_one({"activity.id": data["id"]}): # It's already stored in trash, returns early return Response( status=422, headers={"Content-Type": "application/json"}, response=json.dumps({ "error": "failed to verify request (using HTTP signatures or fetching the IRI)", "request_id": g.request_id, }), ) # Now we can store this activity in the trash for later analysis # Track/store the payload for analysis ip, geoip = _get_ip() DB.trash.insert({ "activity": data, "meta": { "ts": datetime.now().timestamp(), "ip_address": ip, "geoip": geoip, "tb": traceback.format_exc(), "headers": dict(request.headers), "request_id": g.request_id, }, }) return Response( status=422, headers={"Content-Type": "application/json"}, response=json.dumps({ "error": "failed to verify request (using HTTP signatures or fetching the IRI)", "request_id": g.request_id, }), ) # We fetched the remote data successfully data = remote_data try: activity = ap.parse_activity(data) except ValueError: logger.exception( "failed to parse activity for req {g.request_id}: {data!r}") # Track/store the payload for analysis ip, geoip = _get_ip() DB.trash.insert({ "activity": data, "meta": { "ts": datetime.now().timestamp(), "ip_address": ip, "geoip": geoip, "tb": traceback.format_exc(), "headers": dict(request.headers), "request_id": g.request_id, }, }) return Response(status=201) logger.debug(f"inbox activity={g.request_id}/{activity}/{data}") post_to_inbox(activity) return Response(status=201)
def note_by_id(note_id): if is_api_request(): return redirect(url_for("outbox_activity", item_id=note_id)) data = DB.activities.find_one({ "box": Box.OUTBOX.value, "remote_id": activity_url(note_id) }) if not data: abort(404) if data["meta"].get("deleted", False): abort(410) thread = _build_thread(data) app.logger.info(f"thread={thread!r}") raw_likes = list( DB.activities.find({ "meta.undo": False, "meta.deleted": False, "type": ActivityType.LIKE.value, "$or": [ # FIXME(tsileo): remove all the useless $or { "activity.object.id": data["activity"]["object"]["id"] }, { "activity.object": data["activity"]["object"]["id"] }, ], })) likes = [] for doc in raw_likes: try: likes.append(doc["meta"]["actor"]) except Exception: app.logger.exception(f"invalid doc: {doc!r}") app.logger.info(f"likes={likes!r}") raw_shares = list( DB.activities.find({ "meta.undo": False, "meta.deleted": False, "type": ActivityType.ANNOUNCE.value, "$or": [ { "activity.object.id": data["activity"]["object"]["id"] }, { "activity.object": data["activity"]["object"]["id"] }, ], })) shares = [] for doc in raw_shares: try: shares.append(doc["meta"]["actor"]) except Exception: app.logger.exception(f"invalid doc: {doc!r}") app.logger.info(f"shares={shares!r}") return render_template("note.html", likes=likes, shares=shares, thread=thread, note=data)