def get(self, request, *args, **kwargs): asset_qs = Asset.objects.filter( item__project__campaign__slug=self.kwargs["campaign_slug"] ) assets = get_latest_transcription_data(asset_qs) headers, data = flatten_queryset( assets, field_names=[ "item__project__campaign__title", "item__project__title", "item__title", "item__item_id", "title", "transcription_status", "download_url", "latest_transcription", ], extra_verbose_names={ "item__project__campaign__title": "Campaign", "item__project__title": "Project", "item__title": "Item", "item__item_id": "ItemId", "item_id": "ItemId", "title": "Asset", "transcription_status": "AssetStatus", "download_url": "DownloadUrl", "latest_transcription": "Transcription", }, ) return export_to_csv_response( "%s.csv" % self.kwargs["campaign_slug"], headers, data )
def events_csv(request): headers, data = flatten_queryset( models.LoadBatchEvent.objects.all().order_by("-created"), field_names=["created", "batch_name", "message"], extra_verbose_names={ "created": "Time", "batch_name": "Batch name" }, ) return export_to_csv_response("chronam_events.csv", headers, data)
def admin_site_report_view(request): site_reports = SiteReport.objects.all() headers, data = flatten_queryset( site_reports, field_names=SiteReport.DEFAULT_EXPORT_FIELDNAMES, extra_verbose_names={"created_on": "Date", "campaign__title": "Campaign"}, ) return export_to_csv_response("site-report.csv", headers, data)
def test_export_to_csv_response(self): headers, rows = self.get_test_data() resp = export_to_csv_response('test.csv', headers, rows) content = [i.decode('utf-8') for i in resp.streaming_content] self.assertEqual('text/csv; charset=utf-8', resp['Content-Type']) self.assertEqual("attachment; filename*=UTF-8''test.csv", resp['Content-Disposition']) self.assertEqual(content, [ 'Foo Column,Bar Column\r\n', '1,2\r\n', '3,4\r\n', 'abc,def\r\n', '2015-08-28T00:00:00,2015-08-28\r\n' ])
def batches_csv(request): qs = models.Batch.viewable_batches() qs = qs.annotate(page_count=Count("issues__pages")) headers, data = flatten_queryset( qs, field_names=[ "created", "name", "awardee__name", "page_count", "released" ], extra_verbose_names={"awardee__name": "Awardee"}, ) return export_to_csv_response("chronam_batches.csv", headers, data)
def test_export_csv_using_generator(self): headers = ['A Number', 'Status'] def my_generator(): for i in range(0, 1000): yield (i, u'\N{WARNING SIGN}') resp = export_to_csv_response('numbers.csv', headers, my_generator()) self.assertIsInstance(resp, StreamingHttpResponse) self.assertEqual("attachment; filename*=UTF-8''numbers.csv", resp['Content-Disposition']) # exhaust the iterator: content = list(i.decode('utf-8') for i in resp.streaming_content) # We should have one header row + 1000 content rows: self.assertEqual(len(content), 1001) self.assertEqual(content[0], u'A Number,Status\r\n') self.assertEqual(content[-1], u'999,\u26a0\r\n')