def nodeinfo() -> Any: """NodeInfo endpoint.""" q = { "box": Box.OUTBOX.value, "meta.deleted": False, "type": {"$in": [ap.ActivityType.CREATE.value, ap.ActivityType.ANNOUNCE.value]}, } out = { "version": "2.1", "software": { "name": "microblogpub", "version": config.VERSION, "repository": "https://github.com/tsileo/microblog.pub", }, "protocols": ["activitypub"], "services": {"inbound": [], "outbound": []}, "openRegistrations": False, "usage": {"users": {"total": 1}, "localPosts": DB.activities.count(q)}, "metadata": { "nodeName": f"@{config.USERNAME}@{config.DOMAIN}", "version": config.VERSION, "versionDate": config.VERSION_DATE, }, } return jsonify( out, "application/json; profile=http://nodeinfo.diaspora.software/ns/schema/2.1#", )
def outbox(): if request.method == "GET": if not is_api_request(): abort(404) _log_sig() # TODO(tsileo): returns the whole outbox if authenticated and look at OCAP support q = { "box": Box.OUTBOX.value, "meta.deleted": False, "meta.undo": False, "meta.public": True, "type": { "$in": [ActivityType.CREATE.value, ActivityType.ANNOUNCE.value] }, } return jsonify(**activitypub.build_ordered_collection( DB.activities, q=q, cursor=request.args.get("cursor"), map_func=lambda doc: activity_from_doc(doc, embed=True), col_name="outbox", )) # Handle POST request try: _api_required() except BadSignature: abort(401) data = request.get_json(force=True) activity = ap.parse_activity(data) activity_id = post_to_outbox(activity) return Response(status=201, headers={"Location": activity_id})
def handle_activitypub_error(error): logger.error( f"caught activitypub error for {g.request_id}: {error!r}, {traceback.format_tb(error.__traceback__)}" ) response = jsonify({**error.to_dict(), "request_id": g.request_id}) response.status_code = error.status_code return response
def liked(): if not is_api_request(): q = { "box": Box.OUTBOX.value, "type": ActivityType.LIKE.value, "meta.deleted": False, "meta.undo": False, } liked, older_than, newer_than = paginated_query(DB.activities, q) return render_template("liked.html", liked=liked, older_than=older_than, newer_than=newer_than) _log_sig() q = { "meta.deleted": False, "meta.undo": False, "type": ActivityType.LIKE.value } return jsonify(**activitypub.build_ordered_collection( DB.activities, q=q, cursor=request.args.get("cursor"), map_func=lambda doc: doc["activity"]["object"], col_name="liked", ))
def following(): q = {**in_outbox(), **by_type(ActivityType.FOLLOW), **not_undo()} if is_api_request(): _log_sig() return jsonify(**activitypub.build_ordered_collection( DB.activities, q=q, cursor=request.args.get("cursor"), map_func=lambda doc: doc["activity"]["object"], col_name="following", )) if config.HIDE_FOLLOWING and not session.get("logged_in", False): abort(404) following, older_than, newer_than = paginated_query(DB.activities, q) following = [(doc["remote_id"], doc["meta"]["object"]) for doc in following if "remote_id" in doc and "object" in doc.get("meta", {})] lists = list(DB.lists.find()) return render_template( "following.html", following_data=following, older_than=older_than, newer_than=newer_than, lists=lists, )
def outbox_activity_replies(item_id): if not is_api_request(): abort(404) _log_sig() data = DB.activities.find_one({ "box": Box.OUTBOX.value, "remote_id": activity_url(item_id), "meta.deleted": False, "meta.public": True, }) if not data: abort(404) obj = ap.parse_activity(data["activity"]) if obj.ACTIVITY_TYPE != ActivityType.CREATE: abort(404) q = { "meta.deleted": False, "meta.public": True, "type": ActivityType.CREATE.value, "activity.object.inReplyTo": obj.get_object().id, } return jsonify(**activitypub.build_ordered_collection( DB.activities, q=q, cursor=request.args.get("cursor"), map_func=lambda doc: doc["activity"]["object"], col_name=f"outbox/{item_id}/replies", first_page=request.args.get("page") == "first", ))
def followers(): q = { "box": Box.INBOX.value, "type": ActivityType.FOLLOW.value, "meta.undo": False } if is_api_request(): _log_sig() return jsonify(**activitypub.build_ordered_collection( DB.activities, q=q, cursor=request.args.get("cursor"), map_func=lambda doc: doc["activity"]["actor"], col_name="followers", )) raw_followers, older_than, newer_than = paginated_query(DB.activities, q) followers = [ doc["meta"]["actor"] for doc in raw_followers if "actor" in doc.get("meta", {}) ] return render_template( "followers.html", followers_data=followers, older_than=older_than, newer_than=newer_than, )
def api_debug() -> _Response: """Endpoint used/needed for testing, only works in DEBUG_MODE.""" if not DEBUG_MODE: return jsonify({"message": "DEBUG_MODE is off"}) if request.method == "DELETE": _drop_db() return jsonify(dict(message="DB dropped")) return jsonify( dict( inbox=DB.activities.count({"box": Box.INBOX.value}), outbox=DB.activities.count({"box": Box.OUTBOX.value}), outbox_data=without_id(DB.activities.find({"box": Box.OUTBOX.value})), ) )
def handle_value_error(error): logger.error( f"caught value error for {g.request_id}: {error!r}, {traceback.format_tb(error.__traceback__)}" ) response = jsonify({"message": error.args[0], "request_id": g.request_id}) response.status_code = 400 return response
def wellknown_webfinger() -> Any: """Exposes/servers WebFinger data.""" resource = request.args.get("resource") if resource not in [f"acct:{config.USERNAME}@{config.DOMAIN}", config.ID]: abort(404) out = { "subject": f"acct:{config.USERNAME}@{config.DOMAIN}", "aliases": [config.ID], "links": [ { "rel": "http://webfinger.net/rel/profile-page", "type": "text/html", "href": config.ID, }, {"rel": "self", "type": "application/activity+json", "href": config.ID}, { "rel": "http://ostatus.org/schema/1.0/subscribe", "template": config.BASE_URL + "/authorize_follow?profile={uri}", }, {"rel": "magic-public-key", "href": config.KEY.to_magic_key()}, { "href": config.ICON_URL, "rel": "http://webfinger.net/rel/avatar", "type": mimetypes.guess_type(config.ICON_URL)[0], }, ], } return jsonify(out, "application/jrd+json; charset=utf-8")
def build_auth_resp(payload): if request.headers.get("Accept") == "application/json": return jsonify(payload) return Response( status=200, headers={"Content-Type": "application/x-www-form-urlencoded"}, response=urlencode(payload), )
def _user_api_response(**kwargs) -> _Response: _redirect = _user_api_arg("redirect", default=None) if _redirect: return redirect(_redirect) resp = jsonify(kwargs) resp.status_code = 201 return resp
def handle_task_error(error): logger.error( f"caught activitypub error for {g.request_id}: {error!r}, {traceback.format_tb(error.__traceback__)}" ) response = jsonify({ "traceback": error.message, "request_id": g.request_id }) response.status_code = 500 return response
def wellknown_nodeinfo() -> Any: """Exposes the NodeInfo endpoint (http://nodeinfo.diaspora.software/).""" return jsonify( { "links": [ { "rel": "http://nodeinfo.diaspora.software/ns/schema/2.1", "href": f"{config.ID}/nodeinfo", } ] } )
def outbox_detail(item_id): doc = DB.activities.find_one({ "box": Box.OUTBOX.value, "remote_id": activity_url(item_id), "meta.public": True, }) if not doc: abort(404) _log_sig() if doc["meta"].get("deleted", False): abort(404) return jsonify(**activity_from_doc(doc))
def index(): if is_api_request(): _log_sig() return jsonify(**ME) q = { "box": Box.OUTBOX.value, "type": { "$in": [ActivityType.CREATE.value, ActivityType.ANNOUNCE.value] }, "activity.object.inReplyTo": None, "meta.deleted": False, "meta.undo": False, "meta.public": True, "$or": [{ "meta.pinned": False }, { "meta.pinned": { "$exists": False } }], } pinned = [] # Only fetch the pinned notes if we're on the first page if not request.args.get("older_than") and not request.args.get( "newer_than"): q_pinned = { "box": Box.OUTBOX.value, "type": ActivityType.CREATE.value, "meta.deleted": False, "meta.undo": False, "meta.public": True, "meta.pinned": True, } pinned = list(DB.activities.find(q_pinned)) outbox_data, older_than, newer_than = paginated_query(DB.activities, q, limit=25 - len(pinned)) resp = render_template( "index.html", outbox_data=outbox_data, older_than=older_than, newer_than=newer_than, pinned=pinned, ) return resp
def outbox_activity(item_id): data = find_one_activity({ **in_outbox(), **by_remote_id(activity_url(item_id)), **is_public() }) if not data: abort(404) _log_sig() obj = activity_from_doc(data) if data["meta"].get("deleted", False): abort(404) if obj["type"] != ActivityType.CREATE.value: abort(404) return jsonify(**obj["object"])
def featured(): if not is_api_request(): abort(404) _log_sig() q = { "box": Box.OUTBOX.value, "type": ActivityType.CREATE.value, "meta.deleted": False, "meta.undo": False, "meta.pinned": True, } data = [ clean_activity(doc["activity"]["object"]) for doc in DB.activities.find(q) ] return jsonify( **activitypub.simple_build_ordered_collection("featured", data))
def tags(tag): if not DB.activities.count({ "box": Box.OUTBOX.value, "activity.object.tag.type": "Hashtag", "activity.object.tag.name": "#" + tag, }): abort(404) if not is_api_request(): return render_template( "tags.html", tag=tag, outbox_data=DB.activities.find({ "box": Box.OUTBOX.value, "type": ActivityType.CREATE.value, "meta.deleted": False, "activity.object.tag.type": "Hashtag", "activity.object.tag.name": "#" + tag, }), ) _log_sig() q = { "box": Box.OUTBOX.value, "meta.deleted": False, "meta.undo": False, "type": ActivityType.CREATE.value, "activity.object.tag.type": "Hashtag", "activity.object.tag.name": "#" + tag, } return jsonify(**activitypub.build_ordered_collection( DB.activities, q=q, cursor=request.args.get("cursor"), map_func=lambda doc: doc["activity"]["object"]["id"], col_name=f"tags/{tag}", ))
def api_stream() -> _Response: return jsonify( feed.build_inbox_json_feed("/api/stream", request.args.get("cursor")) )
def api_new_note() -> _Response: # Basic Micropub (https://www.w3.org/TR/micropub/) query configuration support if request.method == "GET" and request.args.get("q") == "config": return jsonify({}) elif request.method == "GET": abort(405) source = None summary = None place_tags = [] # Basic Micropub (https://www.w3.org/TR/micropub/) "create" support is_micropub = False # First, check if the Micropub specific fields are present if ( _user_api_arg("h", default=None) == "entry" or _user_api_arg("type", default=[None])[0] == "h-entry" ): is_micropub = True # Ensure the "create" scope is set if "jwt_payload" not in flask.g or "create" not in flask.g.jwt_payload["scope"]: abort(403) # Handle location sent via form-data # `geo:28.5,9.0,0.0` location = _user_api_arg("location", default="") if location.startswith("geo:"): slat, slng, *_ = location[4:].split(",") place_tags.append( { "type": ap.ActivityType.PLACE.value, "url": "", "name": "", "latitude": float(slat), "longitude": float(slng), } ) # Handle JSON microformats2 data if _user_api_arg("type", default=None): _logger.info(f"Micropub request: {request.json}") try: source = request.json["properties"]["content"][0] except (ValueError, KeyError): pass # Handle HTML if isinstance(source, dict): source = source.get("html") try: summary = request.json["properties"]["name"][0] except (ValueError, KeyError): pass # Try to parse the name as summary if the payload is POSTed using form-data if summary is None: summary = _user_api_arg("name", default=None) # This step will also parse content from Micropub request if source is None: source = _user_api_arg("content", default=None) if not source: raise ValueError("missing content") if summary is None: summary = _user_api_arg("summary", default="") if not place_tags: if _user_api_arg("location_lat", default=None): lat = float(_user_api_arg("location_lat")) lng = float(_user_api_arg("location_lng")) loc_name = _user_api_arg("location_name", default="") place_tags.append( { "type": ap.ActivityType.PLACE.value, "url": "", "name": loc_name, "latitude": lat, "longitude": lng, } ) # All the following fields are specific to the API (i.e. not Micropub related) _reply, reply = None, None try: _reply = _user_api_arg("reply") except ValueError: pass visibility = ap.Visibility[ _user_api_arg("visibility", default=ap.Visibility.PUBLIC.name) ] content, tags = parse_markdown(source) # Check for custom emojis tags = tags + emojis.tags(content) + place_tags to: List[str] = [] cc: List[str] = [] if visibility == ap.Visibility.PUBLIC: to = [ap.AS_PUBLIC] cc = [ID + "/followers"] elif visibility == ap.Visibility.UNLISTED: to = [ID + "/followers"] cc = [ap.AS_PUBLIC] elif visibility == ap.Visibility.FOLLOWERS_ONLY: to = [ID + "/followers"] cc = [] if _reply: reply = ap.fetch_remote_activity(_reply) if visibility == ap.Visibility.DIRECT: to.append(reply.attributedTo) else: cc.append(reply.attributedTo) context = new_context(reply) for tag in tags: if tag["type"] == "Mention": to.append(tag["href"]) raw_note = dict( attributedTo=MY_PERSON.id, cc=list(set(cc) - set([MY_PERSON.id])), to=list(set(to) - set([MY_PERSON.id])), summary=summary, content=content, tag=tags, source={"mediaType": "text/markdown", "content": source}, inReplyTo=reply.id if reply else None, context=context, ) if request.files: for f in request.files.keys(): if not request.files[f].filename: continue file = request.files[f] rfilename = secure_filename(file.filename) with BytesIO() as buf: file.save(buf) oid = MEDIA_CACHE.save_upload(buf, rfilename) mtype = mimetypes.guess_type(rfilename)[0] raw_note["attachment"] = [ { "mediaType": mtype, "name": _user_api_arg("file_description", default=rfilename), "type": "Document", "url": f"{BASE_URL}/uploads/{oid}/{rfilename}", } ] note = ap.Note(**raw_note) create = note.build_create() create_id = post_to_outbox(create) # Return a 201 with the note URL in the Location header if this was a Micropub request if is_micropub: resp = flask.Response("", headers={"Location": create_id}) resp.status_code = 201 return resp return _user_api_response(activity=create_id)
def api_user_key() -> _Response: return jsonify({"api_key": ADMIN_API_KEY})
def inbox(): # GET /inbox if request.method == "GET": if not is_api_request(): abort(404) try: _api_required() except BadSignature: abort(404) return jsonify(**activitypub.build_ordered_collection( DB.activities, q={ "meta.deleted": False, "box": Box.INBOX.value }, cursor=request.args.get("cursor"), map_func=lambda doc: remove_context(doc["activity"]), col_name="inbox", )) # POST/ inbox try: data = request.get_json(force=True) if not isinstance(data, dict): raise ValueError("not a dict") except Exception: return Response( status=422, headers={"Content-Type": "application/json"}, response=json.dumps({ "error": "failed to decode request body as JSON", "request_id": g.request_id, }), ) # Check the blacklist now to see if we can return super early if is_blacklisted(data): logger.info(f"dropping activity from blacklisted host: {data['id']}") return Response(status=201) logger.info(f"request_id={g.request_id} req_headers={request.headers!r}") logger.info(f"request_id={g.request_id} raw_data={data}") try: req_verified, actor_id = verify_request(request.method, request.path, request.headers, request.data) if not req_verified: raise Exception("failed to verify request") logger.info(f"request_id={g.request_id} signed by {actor_id}") except Exception: logger.exception( f"failed to verify request {g.request_id}, trying to verify the payload by fetching the remote" ) try: remote_data = get_backend().fetch_iri(data["id"]) except ActivityGoneError: # XXX Mastodon sends Delete activities that are not dereferencable, it's the actor url with #delete # appended, so an `ActivityGoneError` kind of ensure it's "legit" if data["type"] == ActivityType.DELETE.value and data[ "id"].startswith(data["object"]): # If we're here, this means the key is not saved, so we cannot verify the object logger.info( f"received a Delete for an unknown actor {data!r}, drop it" ) return Response(status=201) except Exception: logger.exception(f"failed to fetch remote for payload {data!r}") if "type" in data: # Friendica does not returns a 410, but a 302 that redirect to an HTML page if ap._has_type(data["type"], ActivityType.DELETE): logger.info( f"received a Delete for an unknown actor {data!r}, drop it" ) return Response(status=201) if "id" in data: if DB.trash.find_one({"activity.id": data["id"]}): # It's already stored in trash, returns early return Response( status=422, headers={"Content-Type": "application/json"}, response=json.dumps({ "error": "failed to verify request (using HTTP signatures or fetching the IRI)", "request_id": g.request_id, }), ) # Now we can store this activity in the trash for later analysis # Track/store the payload for analysis ip, geoip = _get_ip() DB.trash.insert({ "activity": data, "meta": { "ts": datetime.now().timestamp(), "ip_address": ip, "geoip": geoip, "tb": traceback.format_exc(), "headers": dict(request.headers), "request_id": g.request_id, }, }) return Response( status=422, headers={"Content-Type": "application/json"}, response=json.dumps({ "error": "failed to verify request (using HTTP signatures or fetching the IRI)", "request_id": g.request_id, }), ) # We fetched the remote data successfully data = remote_data activity = ap.parse_activity(data) logger.debug(f"inbox activity={g.request_id}/{activity}/{data}") post_to_inbox(activity) return Response(status=201)