def save(self, request): user = super().save(request) update_fields = ["actor"] user.actor = models.create_actor(user) user_request = None if self.approval_enabled: # manually approve users user.is_active = False user_request = moderation_models.UserRequest.objects.create( submitter=user.actor, type="signup", metadata=self.validated_data.get("request_fields", None) or None, ) update_fields.append("is_active") if self.validated_data.get("invitation"): user.invitation = self.validated_data.get("invitation") update_fields.append("invitation") user.save(update_fields=update_fields) if user_request: common_utils.on_commit( moderation_tasks.user_request_handle.delay, user_request_id=user_request.pk, new_status=user_request.status, ) return user
def fetches(self, request, *args, **kwargs): obj = self.get_object() if request.method == "GET": queryset = models.Fetch.objects.get_for_object(obj).select_related( "actor") queryset = queryset.order_by("-creation_date") filterset = filters.FetchFilter(request.GET, queryset=queryset) page = self.paginate_queryset(filterset.qs) if page is not None: serializer = api_serializers.FetchSerializer(page, many=True) return self.get_paginated_response(serializer.data) serializer = api_serializers.FetchSerializer(queryset, many=True) return response.Response(serializer.data) if request.method == "POST": if utils.is_local(obj.fid): return response.Response( {"detail": "Cannot fetch a local object"}, status=400) fetch = models.Fetch.objects.create(url=obj.fid, actor=request.user.actor, object=obj) common_utils.on_commit(tasks.fetch.delay, fetch_id=fetch.pk) serializer = api_serializers.FetchSerializer(fetch) return response.Response(serializer.data, status=status.HTTP_201_CREATED)
def perform_create(self, serializer): fetch = serializer.save(actor=self.request.user.actor) if fetch.status == "finished": # a duplicate was returned, no need to fetch again return if settings.FEDERATION_SYNCHRONOUS_FETCH: tasks.fetch(fetch_id=fetch.pk) fetch.refresh_from_db() else: common_utils.on_commit(tasks.fetch.delay, fetch_id=fetch.pk)
def schedule_scan(self, actor, force=False): latest_scan = (self.scans.exclude( status="errored").order_by("-creation_date").first()) delay_between_scans = datetime.timedelta(seconds=3600 * 24) now = timezone.now() if (not force and latest_scan and latest_scan.creation_date + delay_between_scans > now): return scan = self.scans.create(total_files=self.uploads_count, actor=actor) from . import tasks common_utils.on_commit(tasks.start_library_scan.delay, library_scan_id=scan.pk) return scan
def perform_update(self, serializer): old_status = serializer.instance.status new_status = serializer.validated_data.get("status") if old_status != new_status and new_status != "pending": # report was resolved, we assign to the mod making the request serializer.save(assigned_to=self.request.user.actor) common_utils.on_commit( moderation_tasks.user_request_handle.delay, user_request_id=serializer.instance.pk, new_status=new_status, old_status=old_status, ) else: serializer.save()
def apply(self, obj, validated_data): if not obj.is_local or not obj.user: raise mutations.serializers.ValidationError( "Cannot delete this account") # delete oauth apps / reset all passwords immediatly obj.user.set_unusable_password() obj.user.subsonic_api_token = None # force logout obj.user.secret_key = uuid.uuid4() obj.user.users_grant.all().delete() obj.user.users_accesstoken.all().delete() obj.user.users_refreshtoken.all().delete() obj.user.save() # since the deletion of related object/message sending can take a long time # we do that in a separate tasks utils.on_commit(tasks.delete_account.delay, user_id=obj.user.id)
def receive(activity, on_behalf_of): from . import models from . import serializers from . import tasks # we ensure the activity has the bare minimum structure before storing # it in our database serializer = serializers.BaseActivitySerializer(data=activity, context={ "actor": on_behalf_of, "local_recipients": True }) serializer.is_valid(raise_exception=True) try: copy = serializer.save() except IntegrityError: logger.warning( "[federation] Discarding already elivered activity %s", serializer.validated_data.get("id"), ) return local_to_recipients = get_actors_from_audience(activity.get("to", [])) local_to_recipients = local_to_recipients.exclude(user=None) local_cc_recipients = get_actors_from_audience(activity.get("cc", [])) local_cc_recipients = local_cc_recipients.exclude(user=None) inbox_items = [] for recipients, type in [(local_to_recipients, "to"), (local_cc_recipients, "cc")]: for r in recipients.values_list("pk", flat=True): inbox_items.append( models.InboxItem(actor_id=r, type=type, activity=copy)) models.InboxItem.objects.bulk_create(inbox_items) # at this point, we have the activity in database. Even if we crash, it's # okay, as we can retry later funkwhale_utils.on_commit(tasks.dispatch_inbox.delay, activity_id=copy.pk) return copy
def save(self, *args, **kwargs): instance = super().save(*args, **kwargs) need_purge = self.instance.is_active and (self.instance.block_all or self.instance.reject_media) if need_purge: only = [] if self.instance.reject_media: only.append("media") target = instance.target if target["type"] == "domain": common_utils.on_commit( federation_tasks.purge_actors.delay, domains=[target["obj"].pk], only=only, ) if target["type"] == "actor": common_utils.on_commit( federation_tasks.purge_actors.delay, ids=[target["obj"].pk], only=only, ) return instance
def perform_create(self, serializer): upload = serializer.save() common_utils.on_commit(tasks.process_upload.delay, upload_id=upload.pk)
def dispatch(self, routing, context): """ Receives a routing payload and some business objects in the context and may yield data that should be persisted in the Activity model for further delivery. """ from funkwhale_api.common import preferences from . import models from . import tasks allow_list_enabled = preferences.get("moderation__allow_list_enabled") allowed_domains = None if allow_list_enabled: allowed_domains = set( models.Domain.objects.filter(allowed=True).values_list( "name", flat=True)) for route, handler in self.routes: if not match_route(route, routing): continue activities_data = [] for e in handler(context): # a route can yield zero, one or more activity payloads if e: activities_data.append(e) deletions = [ a["actor"].id for a in activities_data if a["payload"]["type"] == "Delete" ] for actor_id in deletions: # we way need to triggers a blind key rotation if should_rotate_actor_key(actor_id): schedule_key_rotation(actor_id, settings.ACTOR_KEY_ROTATION_DELAY) inbox_items_by_activity_uuid = {} deliveries_by_activity_uuid = {} prepared_activities = [] for activity_data in activities_data: activity_data["payload"]["actor"] = activity_data["actor"].fid to = activity_data["payload"].pop("to", []) cc = activity_data["payload"].pop("cc", []) a = models.Activity(**activity_data) a.uuid = uuid.uuid4() ( to_inbox_items, to_deliveries, new_to, ) = prepare_deliveries_and_inbox_items( to, "to", allowed_domains=allowed_domains) ( cc_inbox_items, cc_deliveries, new_cc, ) = prepare_deliveries_and_inbox_items( cc, "cc", allowed_domains=allowed_domains) if not any([ to_inbox_items, to_deliveries, cc_inbox_items, cc_deliveries ]): continue deliveries_by_activity_uuid[str( a.uuid)] = to_deliveries + cc_deliveries inbox_items_by_activity_uuid[str(a.uuid)] = (to_inbox_items + cc_inbox_items) if new_to: a.payload["to"] = new_to if new_cc: a.payload["cc"] = new_cc prepared_activities.append(a) activities = models.Activity.objects.bulk_create( prepared_activities) for activity in activities: if str(activity.uuid) in deliveries_by_activity_uuid: for obj in deliveries_by_activity_uuid[str(a.uuid)]: obj.activity = activity if str(activity.uuid) in inbox_items_by_activity_uuid: for obj in inbox_items_by_activity_uuid[str(a.uuid)]: obj.activity = activity # create all deliveries and items, in bulk models.Delivery.objects.bulk_create([ obj for collection in deliveries_by_activity_uuid.values() for obj in collection ]) models.InboxItem.objects.bulk_create([ obj for collection in inbox_items_by_activity_uuid.values() for obj in collection ]) for a in activities: funkwhale_utils.on_commit(tasks.dispatch_outbox.delay, activity_id=a.pk) return activities
def trigger_moderator_email(report, **kwargs): if settings.MODERATION_EMAIL_NOTIFICATIONS_ENABLED: utils.on_commit(send_new_report_email_to_moderators.delay, report_id=report.pk)
def get_channel_from_rss_url(url, raise_exception=False): # first, check if the url is blocked is_valid, _ = mrf.inbox.apply({"id": url}) if not is_valid: logger.warn("Feed fetch for url %s dropped by MRF", url) raise BlockedFeedException("This feed or domain is blocked") # retrieve the XML payload at the given URL response = retrieve_feed(url) parsed_feed = feedparser.parse(response.text) serializer = RssFeedSerializer(data=parsed_feed["feed"]) if not serializer.is_valid(raise_exception=raise_exception): raise FeedFetchException("Invalid xml content: {}".format(serializer.errors)) # second mrf check with validated data urls_to_check = set() atom_link = serializer.validated_data.get("atom_link") if atom_link and atom_link != url: urls_to_check.add(atom_link) if serializer.validated_data["link"] != url: urls_to_check.add(serializer.validated_data["link"]) for u in urls_to_check: is_valid, _ = mrf.inbox.apply({"id": u}) if not is_valid: logger.warn("Feed fetch for url %s dropped by MRF", u) raise BlockedFeedException("This feed or domain is blocked") # now, we're clear, we can save the data channel = serializer.save(rss_url=url) entries = parsed_feed.entries or [] uploads = [] track_defaults = {} existing_uploads = list( channel.library.uploads.all().select_related( "track__description", "track__attachment_cover" ) ) if parsed_feed.feed.get("rights"): track_defaults["copyright"] = parsed_feed.feed.rights[ : music_models.MAX_LENGTHS["COPYRIGHT"] ] for entry in entries[: settings.PODCASTS_RSS_FEED_MAX_ITEMS]: logger.debug("Importing feed item %s", entry.id) s = RssFeedItemSerializer(data=entry) if not s.is_valid(raise_exception=raise_exception): logger.debug("Skipping invalid RSS feed item %s, ", entry, str(s.errors)) continue uploads.append( s.save(channel, existing_uploads=existing_uploads, **track_defaults) ) common_utils.on_commit( music_models.TrackActor.create_entries, library=channel.library, delete_existing=True, ) if uploads: latest_track_date = max([upload.track.creation_date for upload in uploads]) common_utils.update_modification_date(channel.artist, date=latest_track_date) return channel, uploads
def handle_purge(self, objects): ids = objects.values_list("id", flat=True) common_utils.on_commit(federation_tasks.purge_actors.delay, ids=list(ids))
def handle_relaunch_import(self, objects): qs = objects.exclude(import_status="finished") pks = list(qs.values_list("id", flat=True)) qs.update(import_status="pending") for pk in pks: common_utils.on_commit(tasks.process_upload.delay, upload_id=pk)
def receive(activity, on_behalf_of, inbox_actor=None): from . import models from . import serializers from . import tasks from .routes import inbox from funkwhale_api.moderation import mrf logger.debug("[federation] Received activity from %s : %s", on_behalf_of.fid, activity) # we ensure the activity has the bare minimum structure before storing # it in our database serializer = serializers.BaseActivitySerializer( data=activity, context={ "actor": on_behalf_of, "local_recipients": True, "recipients": [inbox_actor] if inbox_actor else [], }, ) serializer.is_valid(raise_exception=True) payload, updated = mrf.inbox.apply(activity, sender_id=on_behalf_of.fid) if not payload: logger.info( "[federation] Discarding activity due to mrf %s", serializer.validated_data.get("id"), ) return if not inbox.get_matching_handlers(payload): # discard unhandlable activity logger.debug( "[federation] No matching route found for activity, discarding: %s", payload) return try: copy = serializer.save(payload=payload, type=payload["type"]) except IntegrityError: logger.warning( "[federation] Discarding already delivered activity %s", serializer.validated_data.get("id"), ) return local_to_recipients = get_actors_from_audience( serializer.validated_data.get("to", [])) local_to_recipients = local_to_recipients.local() local_to_recipients = local_to_recipients.values_list("pk", flat=True) local_to_recipients = list(local_to_recipients) if inbox_actor: local_to_recipients.append(inbox_actor.pk) local_cc_recipients = get_actors_from_audience( serializer.validated_data.get("cc", [])) local_cc_recipients = local_cc_recipients.local() local_cc_recipients = local_cc_recipients.values_list("pk", flat=True) inbox_items = [] for recipients, type in [(local_to_recipients, "to"), (local_cc_recipients, "cc")]: for r in recipients: inbox_items.append( models.InboxItem(actor_id=r, type=type, activity=copy)) models.InboxItem.objects.bulk_create(inbox_items) # at this point, we have the activity in database. Even if we crash, it's # okay, as we can retry later funkwhale_utils.on_commit(tasks.dispatch_inbox.delay, activity_id=copy.pk) return copy
def perform_update(self, serializer): upload = serializer.save() if upload.import_status == "pending": common_utils.on_commit(tasks.process_upload.delay, upload_id=upload.pk)
def handle_publish(self, objects): qs = objects.filter(import_status="draft") pks = list(qs.values_list("id", flat=True)) qs.update(import_status="pending") for pk in pks: common_utils.on_commit(tasks.process_upload.delay, upload_id=pk)
def handle_relaunch_import(self, objects): qs = objects.filter(import_status__in=["pending", "skipped", "errored"]) pks = list(qs.values_list("id", flat=True)) qs.update(import_status="pending") for pk in pks: common_utils.on_commit(tasks.process_upload.delay, upload_id=pk)
def handle_purge(self, objects): ids = objects.values_list("pk", flat=True).order_by("pk") common_utils.on_commit(federation_tasks.purge_actors.delay, domains=list(ids))
def dispatch(self, routing, context): """ Receives a routing payload and some business objects in the context and may yield data that should be persisted in the Activity model for further delivery. """ from . import models from . import tasks for route, handler in self.routes: if not match_route(route, routing): continue activities_data = [] for e in handler(context): # a route can yield zero, one or more activity payloads if e: activities_data.append(e) inbox_items_by_activity_uuid = {} deliveries_by_activity_uuid = {} prepared_activities = [] for activity_data in activities_data: activity_data["payload"]["actor"] = activity_data["actor"].fid to = activity_data["payload"].pop("to", []) cc = activity_data["payload"].pop("cc", []) a = models.Activity(**activity_data) a.uuid = uuid.uuid4() to_inbox_items, to_deliveries, new_to = prepare_deliveries_and_inbox_items( to, "to") cc_inbox_items, cc_deliveries, new_cc = prepare_deliveries_and_inbox_items( cc, "cc") if not any([ to_inbox_items, to_deliveries, cc_inbox_items, cc_deliveries ]): continue deliveries_by_activity_uuid[str( a.uuid)] = to_deliveries + cc_deliveries inbox_items_by_activity_uuid[str(a.uuid)] = (to_inbox_items + cc_inbox_items) if new_to: a.payload["to"] = new_to if new_cc: a.payload["cc"] = new_cc prepared_activities.append(a) activities = models.Activity.objects.bulk_create( prepared_activities) for activity in activities: if str(activity.uuid) in deliveries_by_activity_uuid: for obj in deliveries_by_activity_uuid[str(a.uuid)]: obj.activity = activity if str(activity.uuid) in inbox_items_by_activity_uuid: for obj in inbox_items_by_activity_uuid[str(a.uuid)]: obj.activity = activity # create all deliveries and items, in bulk models.Delivery.objects.bulk_create([ obj for collection in deliveries_by_activity_uuid.values() for obj in collection ]) models.InboxItem.objects.bulk_create([ obj for collection in inbox_items_by_activity_uuid.values() for obj in collection ]) for a in activities: funkwhale_utils.on_commit(tasks.dispatch_outbox.delay, activity_id=a.pk) return activities
def perform_destroy(self, instance): instance.__class__.objects.filter(pk=instance.pk).delete() common_utils.on_commit(federation_tasks.remove_actor.delay, actor_id=instance.actor.pk)