def test_export_delete(self): blobdb = get_blob_db() data_files = [] for domain_name in [self.domain.name, self.domain2.name]: data_files.append(DataFile.save_blob( BytesIO((domain_name + " csv").encode('utf-8')), domain=domain_name, filename="data.csv", description="data file", content_type="text/csv", delete_after=datetime.utcnow() + timedelta(minutes=10), )) EmailExportWhenDoneRequest.objects.create(domain=domain_name) self._assert_export_counts(domain_name, 1) self.domain.delete() with self.assertRaises(NotFound): blobdb.get(key=data_files[0].blob_id) with blobdb.get(key=data_files[1].blob_id) as f: self.assertEqual(f.read(), (self.domain2.name + " csv").encode('utf-8')) self._assert_export_counts(self.domain.name, 0) self._assert_export_counts(self.domain2.name, 1)
def test_export_delete(self): blobdb = get_blob_db() data_files = [] for domain_name in [self.domain.name, self.domain2.name]: data_files.append( DataFile.save_blob( BytesIO((domain_name + " csv").encode('utf-8')), domain=domain_name, filename="data.csv", description="data file", content_type="text/csv", delete_after=datetime.utcnow() + timedelta(minutes=10), )) EmailExportWhenDoneRequest.objects.create(domain=domain_name) self._assert_export_counts(domain_name, 1) self.domain.delete() with self.assertRaises(NotFound): blobdb.get(key=data_files[0].blob_id) with blobdb.get(key=data_files[1].blob_id) as f: self.assertEqual(f.read(), (self.domain2.name + " csv").encode('utf-8')) self._assert_export_counts(self.domain.name, 0) self._assert_export_counts(self.domain2.name, 1)
def post(self, request, *args, **kwargs): if request.FILES['file'].size > MAX_DATA_FILE_SIZE: messages.warning( request, _('The data file exceeds the maximum size of {} MB.').format(MAX_DATA_FILE_SIZE // (1024 * 1024)) ) return self.get(request, *args, **kwargs) total_size = DataFile.get_total_size(self.domain) if total_size and total_size + request.FILES['file'].size > MAX_DATA_FILE_SIZE_TOTAL: messages.warning( request, _('Uploading this data file would exceed the total allowance of {} GB for this project space. ' 'Please remove some files in order to upload new files.').format( MAX_DATA_FILE_SIZE_TOTAL // (1024 * 1024 * 1024)) ) return self.get(request, *args, **kwargs) data_file = DataFile.save_blob( request.FILES['file'], domain=self.domain, filename=request.FILES['file'].name, description=request.POST['description'], content_type=request.FILES['file'].content_type, delete_after=datetime.utcnow() + timedelta(hours=int(request.POST['ttl'])), ) messages.success(request, _('Data file "{}" uploaded'.format(data_file.description))) return HttpResponseRedirect(reverse(self.urlname, kwargs={'domain': self.domain}))
def setUpClass(cls): super(DataFileDownloadDetailTest, cls).setUpClass() with open(os.path.abspath(__file__), 'rb') as f: cls.content = f.read() f.seek(0) cls.data_file = DataFile.save_blob( f, domain=cls.domain.name, filename='foo.txt', description='all of the foo', content_type='text/plain', delete_after=datetime.datetime.utcnow() + datetime.timedelta(days=3))
def setUpClass(cls): super(DataFileDownloadDetailTest, cls).setUpClass() with open(os.path.abspath(__file__), 'rb') as f: cls.content = f.read() f.seek(0) cls.data_file = DataFile.save_blob( f, domain=cls.domain.name, filename='foo.txt', description='all of the foo', content_type='text/plain', delete_after=datetime.datetime.utcnow() + datetime.timedelta(days=3) )