def test_httpsig(): back = InMemBackend() ap.use_backend(back) k = Key("https://lol.com") k.new() back.FETCH_MOCK["https://lol.com#main-key"] = { "publicKey": k.to_dict(), "id": "https://lol.com", } httpretty.register_uri(httpretty.POST, "https://remote-instance.com", body="ok") auth = httpsig.HTTPSigAuth(k) resp = requests.post("https://remote-instance.com", json={"ok": 1}, auth=auth) assert httpsig.verify_request( resp.request.method, resp.request.path_url, resp.request.headers, resp.request.body, )
def user_inbox(name): be = activitypub.get_backend() if not be: abort(500) data = request.get_json(force=True) if not data: abort(500) current_app.logger.debug(f"req_headers={request.headers}") current_app.logger.debug(f"raw_data={data}") try: if not verify_request(request.method, request.path, request.headers, request.data): raise Exception("failed to verify request") except Exception: current_app.logger.exception("failed to verify request") try: data = be.fetch_iri(data["id"]) except Exception: current_app.logger.exception(f"failed to fetch remote id " f"at {data['id']}") resp = {"error": "failed to verify request " "(using HTTP signatures or fetching the IRI)"} response = jsonify(resp) response.mimetype = "application/json; charset=utf-8" response.status = 422 return response activity = activitypub.parse_activity(data) current_app.logger.debug(f"inbox_activity={activity}/{data}") post_to_inbox(activity) return Response(status=201)
def _log_sig(): sig = request.headers.get("Signature") if sig: app.logger.info(f"received an authenticated fetch: {sig}") try: req_verified, actor_id = verify_request(request.method, request.path, request.headers, None) app.logger.info( f"authenticated fetch: {req_verified}: {actor_id} {request.headers}" ) except Exception: app.logger.exception("failed to verify authenticated fetch")
def inbox(): # GET /inbox if request.method == "GET": if not is_api_request(): abort(404) try: _api_required() except BadSignature: abort(404) return activitypubify(**activitypub.build_ordered_collection( DB.activities, q={ "meta.deleted": False, "box": Box.INBOX.value }, cursor=request.args.get("cursor"), map_func=lambda doc: remove_context(doc["activity"]), col_name="inbox", )) # POST/ inbox try: data = request.get_json(force=True) if not isinstance(data, dict): raise ValueError("not a dict") except Exception: return Response( status=422, headers={"Content-Type": "application/json"}, response=json.dumps({ "error": "failed to decode request body as JSON", "request_id": g.request_id, }), ) # Check the blacklist now to see if we can return super early if is_blacklisted(data): logger.info(f"dropping activity from blacklisted host: {data['id']}") return Response(status=201) logger.info(f"request_id={g.request_id} req_headers={request.headers!r}") logger.info(f"request_id={g.request_id} raw_data={data}") try: req_verified, actor_id = verify_request(request.method, request.path, request.headers, request.data) if not req_verified: raise Exception("failed to verify request") logger.info(f"request_id={g.request_id} signed by {actor_id}") except Exception: logger.exception( f"failed to verify request {g.request_id}, trying to verify the payload by fetching the remote" ) try: remote_data = get_backend().fetch_iri(data["id"]) except ActivityGoneError: # XXX Mastodon sends Delete activities that are not dereferencable, it's the actor url with #delete # appended, so an `ActivityGoneError` kind of ensure it's "legit" if data["type"] == ActivityType.DELETE.value and data[ "id"].startswith(data["object"]): # If we're here, this means the key is not saved, so we cannot verify the object logger.info( f"received a Delete for an unknown actor {data!r}, drop it" ) return Response(status=201) except Exception: logger.exception(f"failed to fetch remote for payload {data!r}") if "type" in data: # Friendica does not returns a 410, but a 302 that redirect to an HTML page if ap._has_type(data["type"], ActivityType.DELETE): logger.info( f"received a Delete for an unknown actor {data!r}, drop it" ) return Response(status=201) if "id" in data: if DB.trash.find_one({"activity.id": data["id"]}): # It's already stored in trash, returns early return Response( status=422, headers={"Content-Type": "application/json"}, response=json.dumps({ "error": "failed to verify request (using HTTP signatures or fetching the IRI)", "request_id": g.request_id, }), ) # Now we can store this activity in the trash for later analysis # Track/store the payload for analysis ip, geoip = _get_ip() DB.trash.insert({ "activity": data, "meta": { "ts": datetime.now().timestamp(), "ip_address": ip, "geoip": geoip, "tb": traceback.format_exc(), "headers": dict(request.headers), "request_id": g.request_id, }, }) return Response( status=422, headers={"Content-Type": "application/json"}, response=json.dumps({ "error": "failed to verify request (using HTTP signatures or fetching the IRI)", "request_id": g.request_id, }), ) # We fetched the remote data successfully data = remote_data try: activity = ap.parse_activity(data) except ValueError: logger.exception( "failed to parse activity for req {g.request_id}: {data!r}") # Track/store the payload for analysis ip, geoip = _get_ip() DB.trash.insert({ "activity": data, "meta": { "ts": datetime.now().timestamp(), "ip_address": ip, "geoip": geoip, "tb": traceback.format_exc(), "headers": dict(request.headers), "request_id": g.request_id, }, }) return Response(status=201) logger.debug(f"inbox activity={g.request_id}/{activity}/{data}") post_to_inbox(activity) return Response(status=201)