def post(self, request, *args, **kwargs): if request.FILES['file'].size > MAX_DATA_FILE_SIZE: messages.warning( request, _('The data file exceeds the maximum size of {} MB.').format(MAX_DATA_FILE_SIZE // (1024 * 1024)) ) return self.get(request, *args, **kwargs) total_size = DataFile.get_total_size(self.domain) if total_size and total_size + request.FILES['file'].size > MAX_DATA_FILE_SIZE_TOTAL: messages.warning( request, _('Uploading this data file would exceed the total allowance of {} GB for this project space. ' 'Please remove some files in order to upload new files.').format( MAX_DATA_FILE_SIZE_TOTAL // (1024 * 1024 * 1024)) ) return self.get(request, *args, **kwargs) data_file = DataFile.save_blob( request.FILES['file'], domain=self.domain, filename=request.FILES['file'].name, description=request.POST['description'], content_type=request.FILES['file'].content_type, delete_after=datetime.utcnow() + timedelta(hours=int(request.POST['ttl'])), ) messages.success(request, _('Data file "{}" uploaded'.format(data_file.description))) return HttpResponseRedirect(reverse(self.urlname, kwargs={'domain': self.domain}))
def test_export_delete(self): blobdb = get_blob_db() data_files = [] for domain_name in [self.domain.name, self.domain2.name]: data_files.append(DataFile.save_blob( BytesIO((domain_name + " csv").encode('utf-8')), domain=domain_name, filename="data.csv", description="data file", content_type="text/csv", delete_after=datetime.utcnow() + timedelta(minutes=10), )) EmailExportWhenDoneRequest.objects.create(domain=domain_name) self._assert_export_counts(domain_name, 1) self.domain.delete() with self.assertRaises(NotFound): blobdb.get(key=data_files[0].blob_id) with blobdb.get(key=data_files[1].blob_id) as f: self.assertEqual(f.read(), (self.domain2.name + " csv").encode('utf-8')) self._assert_export_counts(self.domain.name, 0) self._assert_export_counts(self.domain2.name, 1)
def delete(self, request, *args, **kwargs): try: data_file = DataFile.get(self.domain, kwargs['pk']) except DataFile.DoesNotExist: raise Http404 data_file.delete() return HttpResponse(status=204)
def test_export_delete(self): blobdb = get_blob_db() data_files = [] for domain_name in [self.domain.name, self.domain2.name]: data_files.append( DataFile.save_blob( BytesIO((domain_name + " csv").encode('utf-8')), domain=domain_name, filename="data.csv", description="data file", content_type="text/csv", delete_after=datetime.utcnow() + timedelta(minutes=10), )) EmailExportWhenDoneRequest.objects.create(domain=domain_name) self._assert_export_counts(domain_name, 1) self.domain.delete() with self.assertRaises(NotFound): blobdb.get(key=data_files[0].blob_id) with blobdb.get(key=data_files[1].blob_id) as f: self.assertEqual(f.read(), (self.domain2.name + " csv").encode('utf-8')) self._assert_export_counts(self.domain.name, 0) self._assert_export_counts(self.domain2.name, 1)
def get_context_data(self, **kwargs): context = super(DataFileDownloadList, self).get_context_data(**kwargs) context.update({ 'timezone': get_timezone_for_user(self.request.couch_user, self.domain), 'data_files': DataFile.get_all(self.domain), 'is_admin': self.request.couch_user.is_domain_admin(self.domain), 'url_base': get_url_base(), }) return context
def get(self, request, *args, **kwargs): try: data_file = DataFile.get(self.domain, kwargs['pk']) blob = data_file.get_blob() except (DataFile.DoesNotExist, NotFound): raise Http404 format = Format('', data_file.content_type, '', True) return get_download_response(blob, data_file.content_length, format, data_file.filename, request)
def get(self, request, *args, **kwargs): try: data_file = DataFile.get(self.domain, kwargs['pk']) blob = data_file.get_blob() except (DataFile.DoesNotExist, NotFound): raise Http404 format = Format('', data_file.content_type, '', True) return get_download_response( blob, data_file.content_length, format, data_file.filename, request )
def setUpClass(cls): super(DataFileDownloadDetailTest, cls).setUpClass() _db.append(FakeDB) cls.data_file = DataFile( domain=cls.domain, filename='foo.txt', description='all of the foo', content_type='text/plain', blob_id='fake', content_length=4, ) cls.data_file.save()
def setUpClass(cls): super(DataFileDownloadDetailTest, cls).setUpClass() with open(os.path.abspath(__file__), 'rb') as f: cls.content = f.read() f.seek(0) cls.data_file = DataFile.save_blob( f, domain=cls.domain.name, filename='foo.txt', description='all of the foo', content_type='text/plain', delete_after=datetime.datetime.utcnow() + datetime.timedelta(days=3))
def setUpClass(cls): super(DataFileDownloadDetailTest, cls).setUpClass() with open(os.path.abspath(__file__), 'rb') as f: cls.content = f.read() f.seek(0) cls.data_file = DataFile.save_blob( f, domain=cls.domain.name, filename='foo.txt', description='all of the foo', content_type='text/plain', delete_after=datetime.datetime.utcnow() + datetime.timedelta(days=3) )
def setUpClass(cls): super(DataFileDownloadDetailTest, cls).setUpClass() with open(os.path.abspath(__file__), 'rb') as f: cls.content = f.read() _db.append(FakeDB({'abc': cls.content})) cls.data_file = DataFile( domain=cls.domain.name, filename='foo.txt', description='all of the foo', content_type='text/plain', blob_id='abc', content_length=len(cls.content), delete_after=datetime.datetime.utcnow() + datetime.timedelta(days=3), ) cls.data_file.save()
def _assert_export_counts(self, domain_name, count): self._assert_queryset_count([ DataFile.meta_query(domain_name), EmailExportWhenDoneRequest.objects.filter(domain=domain_name), ], count)