def update_domain_nodeinfo(domain): now = timezone.now() try: nodeinfo = {"status": "ok", "payload": fetch_nodeinfo(domain.name)} except ( requests.RequestException, serializers.serializers.ValidationError, ValueError, ) as e: nodeinfo = {"status": "error", "error": str(e)} service_actor_id = common_utils.recursive_getattr( nodeinfo, "payload.metadata.actorId", permissive=True) try: domain.service_actor = (utils.retrieve_ap_object( service_actor_id, actor=actors.get_service_actor(), queryset=models.Actor, serializer_class=serializers.ActorSerializer, ) if service_actor_id else None) except (serializers.serializers.ValidationError, RequestException) as e: logger.warning("Cannot fetch system actor for domain %s: %s", domain.name, str(e)) domain.nodeinfo_fetch_date = now domain.nodeinfo = nodeinfo domain.save( update_fields=["nodeinfo", "nodeinfo_fetch_date", "service_actor"])
def check_allow_list(payload, **kwargs): """ A MRF policy that only works when the moderation__allow_list_enabled setting is on. It will extract domain names from the activity ID, actor ID and activity object ID and discard the activity if any of those domain names isn't on the allow list. """ if not preferences.get("moderation__allow_list_enabled"): raise mrf.Skip("Allow-listing is disabled") allowed_domains = set( federation_models.Domain.objects.filter(allowed=True).values_list( "name", flat=True)) relevant_ids = [ payload.get("actor"), kwargs.get("sender_id", payload.get("id")), utils.recursive_getattr(payload, "object.id", permissive=True), ] relevant_domains = set([ domain for domain in [urllib.parse.urlparse(i).hostname for i in relevant_ids if i] if domain ]) if relevant_domains - allowed_domains: raise mrf.Discard("These domains are not allowed: {}".format( ", ".join(relevant_domains - allowed_domains)))
def test_get_target_owner(factory_name, factory_kwargs, owner_field, factories): target = factories[factory_name](**factory_kwargs) if owner_field == "self": expected_owner = target else: expected_owner = common_utils.recursive_getattr(target, owner_field) assert isinstance(expected_owner, federation_models.Actor) assert serializers.get_target_owner(target) == expected_owner
def test_spa_views_raise_api_redirect_when_accept_json_set( factory_name, factory_kwargs, route_name, route_arg_name, route_arg, factories, fake_request, ): obj = factories[factory_name](**factory_kwargs) url = utils.spa_reverse( route_name, kwargs={route_arg_name: utils.recursive_getattr(obj, route_arg)}) request = fake_request.get(url, HTTP_ACCEPT="application/activity+json") with pytest.raises(middleware.ApiRedirect) as excinfo: middleware.get_request_head_tags(request) assert excinfo.value.url == obj.fid
def test_channel_detail(field, factories, logged_in_api_client): channel = factories["audio.Channel"]( artist__description=None, local=True, artist__with_cover=True ) url = reverse( "api:v1:channels-detail", kwargs={"composite": utils.recursive_getattr(channel, field)}, ) setattr(channel.artist, "_tracks_count", 0) setattr(channel.artist, "_prefetched_tagged_items", []) expected = serializers.ChannelSerializer( channel, context={"subscriptions_count": True} ).data response = logged_in_api_client.get(url) assert response.status_code == 200 assert response.data == expected
def test_channel_detail(attribute, spa_html, no_api_auth, client, factories, settings): channel = factories["audio.Channel"](library__privacy_level="everyone", artist__with_cover=True) factories["music.Upload"](playable=True, library=channel.library) url = "/channels/{}".format(utils.recursive_getattr(channel, attribute)) detail_url = "/channels/{}".format(channel.actor.full_username) response = client.get(url) assert response.status_code == 200 expected_metas = [ { "tag": "meta", "property": "og:url", "content": utils.join_url(settings.FUNKWHALE_URL, detail_url), }, { "tag": "meta", "property": "og:title", "content": channel.artist.name }, { "tag": "meta", "property": "og:type", "content": "profile" }, { "tag": "meta", "property": "og:image", "content": channel.artist.attachment_cover.download_url_medium_square_crop, }, { "tag": "link", "rel": "alternate", "type": "application/activity+json", "href": channel.actor.fid, }, { "tag": "link", "rel": "alternate", "type": "application/rss+xml", "href": channel.get_rss_url(), "title": "{} - RSS Podcast Feed".format(channel.artist.name), }, { "tag": "link", "rel": "alternate", "type": "application/json+oembed", "href": (utils.join_url(settings.FUNKWHALE_URL, reverse("api:v1:oembed")) + "?format=json&url={}".format( urllib.parse.quote_plus( utils.join_url(settings.FUNKWHALE_URL, detail_url)))), }, { "tag": "meta", "property": "twitter:card", "content": "player" }, { "tag": "meta", "property": "twitter:player", "content": serializers.get_embed_url("channel", id=channel.uuid), }, { "tag": "meta", "property": "twitter:player:width", "content": "600" }, { "tag": "meta", "property": "twitter:player:height", "content": "400" }, ] metas = utils.parse_meta(response.content.decode()) # we only test our custom metas, not the default ones assert metas[:len(expected_metas)] == expected_metas