def fix_file_data(self, dry_run, **kwargs): self.stdout.write("Fixing missing bitrate or length...") matching = models.Upload.objects.filter( Q(bitrate__isnull=True) | Q(duration__isnull=True)) total = matching.count() self.stdout.write( "[bitrate/length] {} entries found with missing values".format( total)) if dry_run: return chunks = common_utils.chunk_queryset( matching.only("id", "audio_file", "source"), kwargs["batch_size"]) handled = 0 for chunk in chunks: updated = [] for upload in chunk: handled += 1 self.stdout.write( "[bitrate/length] {}/{} fixing file #{}".format( handled, total, upload.pk)) try: audio_file = upload.get_audio_file() data = utils.get_audio_file_data(audio_file) upload.bitrate = data["bitrate"] upload.duration = data["length"] except Exception as e: self.stderr.write( "[bitrate/length] error with file #{}: {}".format( upload.pk, str(e))) else: updated.append(upload) models.Upload.objects.bulk_update(updated, ["bitrate", "duration"])
def fix_file_checksum(self, dry_run, **kwargs): self.stdout.write("Fixing missing checksums...") matching = models.Upload.objects.filter( Q(checksum=None) & (Q(audio_file__isnull=False) | Q(source__startswith="file://"))) total = matching.count() self.stdout.write( "[checksum] {} entries found with missing values".format(total)) if dry_run: return chunks = common_utils.chunk_queryset( matching.only("id", "audio_file", "source"), kwargs["batch_size"]) handled = 0 for chunk in chunks: updated = [] for upload in chunk: handled += 1 self.stdout.write("[checksum] {}/{} fixing file #{}".format( handled, total, upload.pk)) try: upload.checksum = common_utils.get_file_hash( upload.get_audio_file()) except Exception as e: self.stderr.write( "[checksum] error with file #{}: {}".format( upload.pk, str(e))) else: updated.append(upload) models.Upload.objects.bulk_update(updated, ["checksum"])
def test_chunk_queryset(factories): actors = factories["federation.Actor"].create_batch(size=4) queryset = actors[0].__class__.objects.all() chunks = list(utils.chunk_queryset(queryset, 2)) assert list(chunks[0]) == actors[0:2] assert list(chunks[1]) == actors[2:4]
def fix_file_size(self, dry_run, **kwargs): self.stdout.write("Fixing missing size...") matching = models.Upload.objects.filter(size__isnull=True) total = matching.count() self.stdout.write( "[size] {} entries found with missing values".format(total)) if dry_run: return chunks = common_utils.chunk_queryset( matching.only("id", "audio_file", "source"), kwargs["batch_size"]) handled = 0 for chunk in chunks: updated = [] for upload in chunk: handled += 1 self.stdout.write("[size] {}/{} fixing file #{}".format( handled, total, upload.pk)) try: upload.size = upload.get_file_size() except Exception as e: self.stderr.write("[size] error with file #{}: {}".format( upload.pk, str(e))) else: updated.append(upload) models.Upload.objects.bulk_update(updated, ["size"])
def handle_delete(self, objects): libraries = sorted(set(objects.values_list("library", flat=True))) for id in libraries: # we group deletes by library for easier federation uploads = objects.filter(library__pk=id).select_related("library__actor") for chunk in common_utils.chunk_queryset(uploads, 100): routes.outbox.dispatch( {"type": "Delete", "object": {"type": "Audio"}}, context={"uploads": chunk}, ) return objects.delete()