def tags(tag): if not DB.activities.count({ **in_outbox(), **by_hashtag(tag), **by_visibility(ap.Visibility.PUBLIC), **not_deleted(), }): abort(404) if not is_api_request(): return htmlify( render_template( "tags.html", tag=tag, outbox_data=DB.activities.find({ **in_outbox(), **by_hashtag(tag), **by_visibility(ap.Visibility.PUBLIC), **not_deleted(), }).sort("meta.published", -1), )) _log_sig() q = { **in_outbox(), **by_hashtag(tag), **by_visibility(ap.Visibility.PUBLIC), **not_deleted(), } return activitypubify(**activitypub.build_ordered_collection( DB.activities, q=q, cursor=request.args.get("cursor"), map_func=lambda doc: doc["activity"]["object"]["id"], col_name=f"tags/{tag}", ))
def followers(): q = { "box": Box.INBOX.value, "type": ActivityType.FOLLOW.value, "meta.undo": False } if is_api_request(): _log_sig() return activitypubify(**activitypub.build_ordered_collection( DB.activities, q=q, cursor=request.args.get("cursor"), map_func=lambda doc: doc["activity"]["actor"], col_name="followers", )) raw_followers, older_than, newer_than = paginated_query(DB.activities, q) followers = [ doc["meta"] for doc in raw_followers if "actor" in doc.get("meta", {}) ] return htmlify( render_template( "followers.html", followers_data=followers, older_than=older_than, newer_than=newer_than, ))
def liked(): if not is_api_request(): q = { "box": Box.OUTBOX.value, "type": ActivityType.LIKE.value, "meta.deleted": False, "meta.undo": False, } liked, older_than, newer_than = paginated_query(DB.activities, q) return htmlify( render_template("liked.html", liked=liked, older_than=older_than, newer_than=newer_than)) q = { "meta.deleted": False, "meta.undo": False, "type": ActivityType.LIKE.value } return activitypubify(**activitypub.build_ordered_collection( DB.activities, q=q, cursor=request.args.get("cursor"), map_func=lambda doc: doc["activity"]["object"], col_name="liked", ))
def outbox_activity_replies(item_id): if not is_api_request(): abort(404) _log_sig() data = DB.activities.find_one({ **in_outbox(), **by_remote_id(activity_url(item_id)), **not_deleted(), **is_public(), }) if not data: abort(404) obj = ap.parse_activity(data["activity"]) if obj.ACTIVITY_TYPE != ActivityType.CREATE: abort(404) q = { **is_public(), **not_deleted(), **by_type(ActivityType.CREATE), "activity.object.inReplyTo": obj.get_object().id, } return activitypubify(**activitypub.build_ordered_collection( DB.activities, q=q, cursor=request.args.get("cursor"), map_func=lambda doc: doc["activity"]["object"], col_name=f"outbox/{item_id}/replies", first_page=request.args.get("page") == "first", ))
def index(): if is_api_request(): _log_sig() return activitypubify(**ME) q = { **in_outbox(), "$or": [ { **by_type(ActivityType.CREATE), **not_deleted(), **by_visibility(ap.Visibility.PUBLIC), "$or": [{ "meta.pinned": False }, { "meta.pinned": { "$exists": False } }], }, { **by_type(ActivityType.ANNOUNCE), **not_undo() }, ], } apinned = [] # Only fetch the pinned notes if we're on the first page if not request.args.get("older_than") and not request.args.get( "newer_than"): q_pinned = { **in_outbox(), **by_type(ActivityType.CREATE), **not_deleted(), **pinned(), **by_visibility(ap.Visibility.PUBLIC), } apinned = list(DB.activities.find(q_pinned)) outbox_data, older_than, newer_than = paginated_query(DB.activities, q, limit=25 - len(apinned)) return htmlify( render_template( "index.html", outbox_data=outbox_data, older_than=older_than, newer_than=newer_than, pinned=apinned, ))
def following(): q = { **in_outbox(), **by_type(ActivityType.FOLLOW), **not_deleted(), **follow_request_accepted(), **not_undo(), } if is_api_request(): _log_sig() if config.HIDE_FOLLOWING: return activitypubify( **activitypub.simple_build_ordered_collection("following", [])) return activitypubify(**activitypub.build_ordered_collection( DB.activities, q=q, cursor=request.args.get("cursor"), map_func=lambda doc: doc["activity"]["object"], col_name="following", )) if config.HIDE_FOLLOWING and not session.get("logged_in", False): abort(404) following, older_than, newer_than = paginated_query(DB.activities, q) following = [(doc["remote_id"], doc["meta"]) for doc in following if "remote_id" in doc and "object" in doc.get("meta", {})] lists = list(DB.lists.find()) return htmlify( render_template( "following.html", following_data=following, older_than=older_than, newer_than=newer_than, lists=lists, ))
def featured(): if not is_api_request(): abort(404) _log_sig() q = { "box": Box.OUTBOX.value, "type": ActivityType.CREATE.value, "meta.deleted": False, "meta.undo": False, "meta.pinned": True, } data = [ clean_activity(doc["activity"]["object"]) for doc in DB.activities.find(q) ] return activitypubify( **activitypub.simple_build_ordered_collection("featured", data))
def outbox_detail(item_id): if "text/html" in request.headers.get("Accept", ""): return redirect(url_for("note_by_id", note_id=item_id)) doc = DB.activities.find_one({ **in_outbox(), **by_remote_id(activity_url(item_id)), **not_deleted(), **is_public(), }) if not doc: abort(404) _log_sig() if doc["meta"].get("deleted", False): abort(404) return activitypubify(**activity_from_doc(doc))
def outbox_activity(item_id): if "text/html" in request.headers.get("Accept", ""): return redirect(url_for("note_by_id", note_id=item_id)) data = find_one_activity({ **in_outbox(), **by_remote_id(activity_url(item_id)), **is_public() }) if not data: abort(404) _log_sig() obj = activity_from_doc(data) if data["meta"].get("deleted", False): abort(404) if obj["type"] != ActivityType.CREATE.value: abort(404) return activitypubify(**obj["object"])
def outbox_activity_likes(item_id): if not is_api_request(): abort(404) _log_sig() data = DB.activities.find_one({ "box": Box.OUTBOX.value, "remote_id": activity_url(item_id), "meta.deleted": False, "meta.public": True, }) if not data: abort(404) obj = ap.parse_activity(data["activity"]) if obj.ACTIVITY_TYPE != ActivityType.CREATE: abort(404) q = { "meta.undo": False, "type": ActivityType.LIKE.value, "$or": [ { "activity.object.id": obj.get_object().id }, { "activity.object": obj.get_object().id }, ], } return activitypubify(**activitypub.build_ordered_collection( DB.activities, q=q, cursor=request.args.get("cursor"), map_func=lambda doc: remove_context(doc["activity"]), col_name=f"outbox/{item_id}/likes", first_page=request.args.get("page") == "first", ))
def outbox(): if request.method == "GET": if not is_api_request(): abort(404) _log_sig() # TODO(tsileo): returns the whole outbox if authenticated and look at OCAP support q = { **in_outbox(), "$or": [ { **by_type(ActivityType.CREATE), **not_deleted(), **by_visibility(ap.Visibility.PUBLIC), }, { **by_type(ActivityType.ANNOUNCE), **not_undo() }, ], } return activitypubify(**activitypub.build_ordered_collection( DB.activities, q=q, cursor=request.args.get("cursor"), map_func=lambda doc: activity_from_doc(doc, embed=True), col_name="outbox", )) # Handle POST request aka C2S API try: _api_required() except BadSignature: abort(401) data = request.get_json(force=True) activity = ap.parse_activity(data) activity_id = post_to_outbox(activity) return Response(status=201, headers={"Location": activity_id})
def inbox(): # GET /inbox if request.method == "GET": if not is_api_request(): abort(404) try: _api_required() except BadSignature: abort(404) return activitypubify(**activitypub.build_ordered_collection( DB.activities, q={ "meta.deleted": False, "box": Box.INBOX.value }, cursor=request.args.get("cursor"), map_func=lambda doc: remove_context(doc["activity"]), col_name="inbox", )) # POST/ inbox try: data = request.get_json(force=True) if not isinstance(data, dict): raise ValueError("not a dict") except Exception: return Response( status=422, headers={"Content-Type": "application/json"}, response=json.dumps({ "error": "failed to decode request body as JSON", "request_id": g.request_id, }), ) # Check the blacklist now to see if we can return super early if is_blacklisted(data): logger.info(f"dropping activity from blacklisted host: {data['id']}") return Response(status=201) logger.info(f"request_id={g.request_id} req_headers={request.headers!r}") logger.info(f"request_id={g.request_id} raw_data={data}") try: req_verified, actor_id = verify_request(request.method, request.path, request.headers, request.data) if not req_verified: raise Exception("failed to verify request") logger.info(f"request_id={g.request_id} signed by {actor_id}") except Exception: logger.exception( f"failed to verify request {g.request_id}, trying to verify the payload by fetching the remote" ) try: remote_data = get_backend().fetch_iri(data["id"]) except ActivityGoneError: # XXX Mastodon sends Delete activities that are not dereferencable, it's the actor url with #delete # appended, so an `ActivityGoneError` kind of ensure it's "legit" if data["type"] == ActivityType.DELETE.value and data[ "id"].startswith(data["object"]): # If we're here, this means the key is not saved, so we cannot verify the object logger.info( f"received a Delete for an unknown actor {data!r}, drop it" ) return Response(status=201) except Exception: logger.exception(f"failed to fetch remote for payload {data!r}") if "type" in data: # Friendica does not returns a 410, but a 302 that redirect to an HTML page if ap._has_type(data["type"], ActivityType.DELETE): logger.info( f"received a Delete for an unknown actor {data!r}, drop it" ) return Response(status=201) if "id" in data: if DB.trash.find_one({"activity.id": data["id"]}): # It's already stored in trash, returns early return Response( status=422, headers={"Content-Type": "application/json"}, response=json.dumps({ "error": "failed to verify request (using HTTP signatures or fetching the IRI)", "request_id": g.request_id, }), ) # Now we can store this activity in the trash for later analysis # Track/store the payload for analysis ip, geoip = _get_ip() DB.trash.insert({ "activity": data, "meta": { "ts": datetime.now().timestamp(), "ip_address": ip, "geoip": geoip, "tb": traceback.format_exc(), "headers": dict(request.headers), "request_id": g.request_id, }, }) return Response( status=422, headers={"Content-Type": "application/json"}, response=json.dumps({ "error": "failed to verify request (using HTTP signatures or fetching the IRI)", "request_id": g.request_id, }), ) # We fetched the remote data successfully data = remote_data try: activity = ap.parse_activity(data) except ValueError: logger.exception( "failed to parse activity for req {g.request_id}: {data!r}") # Track/store the payload for analysis ip, geoip = _get_ip() DB.trash.insert({ "activity": data, "meta": { "ts": datetime.now().timestamp(), "ip_address": ip, "geoip": geoip, "tb": traceback.format_exc(), "headers": dict(request.headers), "request_id": g.request_id, }, }) return Response(status=201) logger.debug(f"inbox activity={g.request_id}/{activity}/{data}") post_to_inbox(activity) return Response(status=201)
def ap_emoji(name): if name in EMOJIS: return activitypubify(**{ **EMOJIS[name], "@context": config.DEFAULT_CTX }) abort(404)