class TemporaryFileUploadHandler(FileUploadHandler): """ Upload handler that streams data into a temporary file. """ def new_file(self, *args, **kwargs): """ Create the file object to append to as data is coming in. """ super().new_file(*args, **kwargs) self.file = TemporaryUploadedFile(self.file_name, self.content_type, 0, self.charset, self.content_type_extra) def receive_data_chunk(self, raw_data, start): self.file.write(raw_data) def file_complete(self, file_size): self.file.seek(0) self.file.size = file_size return self.file def upload_interrupted(self): if hasattr(self, 'file'): temp_location = self.file.temporary_file_path() try: self.file.close() os.remove(temp_location) except FileNotFoundError: pass
def process_video(request): if request.method == 'POST': max_division = request.POST['max_division'] video_file = request.FILES['video_file'] file_path = TemporaryUploadedFile.temporary_file_path(video_file) dir_name = os.path.split(file_path)[0] clip_video = VideoDivision(file_path, int(max_division), dir_name) zip_file_path = clip_video.inter_and_build_sub_clip() return JsonResponse({'file_path': zip_file_path}) raise Http404
def test_parse_valid_pom_temp_file(self): pombytes = self.get_valid_pom_as_bytes() testpomfile = TemporaryUploadedFile('pom.xml', 'text/xml', len(pombytes), 0) with open(testpomfile.temporary_file_path(), 'wb') as f: f.write(self.get_valid_pom_as_bytes()) res = pomparse.parse_pom(testpomfile) self.assertEqual( { 'artifactId': 'DisGeNET-app', 'groupId': 'es.imim', 'version': '6.3.2' }, res)
def write(self): self.name = uuid.uuid4().hex img_tmp = TemporaryUploadedFile(self.name, self.mimetype, self.size, None) img_tmp.write(self._data) img_tmp.flush() # use temporary file size if there is no content-length in response header # The file size is validated at converter if self.size == 0: img_tmp.size = os.path.getsize(img_tmp.temporary_file_path()) self._data = None return img_tmp
def value_from_datadict(self, data, files, name): """ Normally returns files.get(name, None). Here we also check `data`. -- if the appropriate hidden _sticky_file input is set, we can look for the temporary file instead and return that if it exists. This method seems to be called multiple times with the same arguments, so to prevent excessive storage activity the return value is cached and returned without processing on subsequent calls. There is an assumption that the arguments will not change between calls for any given instance, which appears to be valid, so no argument checks are performed. """ if hasattr(self, '_value'): return self._value self.user_token = data.get('csrfmiddlewaretoken', None) # look for normal file value = super( StickyFileInput, self).value_from_datadict(data, files, name) if value and hasattr(value, 'name'): # got one, save a temporary copy just in case self.sticky_file_name = value.name self.sticky_session_id = '%.6f' % time.time() self.save_sticky_copy(value.file) else: # check for temporary copy self.sticky_file_name = ( data.get( self.get_hidden_input_name(name, 'sticky_file'), None)) self.sticky_session_id = data.get( self.get_hidden_input_name(name, 'sticky_session_id'), None) sticky_copy = self.load_sticky_copy() if sticky_copy: sticky_copy.seek(0, 2) # seek to end value = TemporaryUploadedFile( name = self.sticky_file_name, content_type = None, size = sticky_copy.tell(), charset = None ) value.file = sticky_copy value.file.seek(0) value.temporary_file_path = lambda: self.get_sticky_path() setattr(self, '_value', value) # cache return self._value
def test_process_jar_on_temporary_uploaded_file(self): fake_jar_file = io.BytesIO() with zipfile.ZipFile(fake_jar_file, mode='w') as zf: zf.writestr('META-INF/MANIFEST.MF', TEST_VALID_MANIFEST_ONE) testjarfile = TemporaryUploadedFile('foo.jar', 'application/java-archive', 100, 0) with open(testjarfile.temporary_file_path(), 'wb') as f: f.write(fake_jar_file.getvalue()) (a_name, a_ver, a_works, a_dep, has_exp) = processjar.process_jar(testjarfile, 'CyCommunityDetectionTest') self.assertEqual('CyCommunityDetectionTest', a_name) self.assertEqual('1.11.0', a_ver) self.assertEqual('3.7', a_works) self.assertEqual([], a_dep) self.assertEqual(True, has_exp)
def test_with_temporary_uploaded_file(self): temp_uploaded_file = TemporaryUploadedFile( name='test.jpg', content_type='image/jpeg', size=100, charset=None, ) with open(temp_uploaded_file.temporary_file_path(), 'wb') as f: f.write(b'0' * 100) file_upload = FileUpload() filename = file_upload.serialize_value(temp_uploaded_file) self.assertEqual(filename, file_upload.serialize_value(temp_uploaded_file)) uploaded_file_path = os.path.join( settings.MEDIA_ROOT, file_upload.generate_filename(temp_uploaded_file.name)) os.remove(uploaded_file_path)
def test_with_temporary_uploaded_file(self): temp_uploaded_file = TemporaryUploadedFile( name='test.jpg', content_type='image/jpeg', size=100, charset=None, ) with open(temp_uploaded_file.temporary_file_path(), 'wb') as f: f.write(b'0'*100) file_upload = FileUpload() filename = file_upload.serialize_value(temp_uploaded_file) self.assertEqual(filename, file_upload.serialize_value(temp_uploaded_file)) uploaded_file_path = os.path.join( settings.MEDIA_ROOT, file_upload.generate_filename(temp_uploaded_file.name) ) os.remove(uploaded_file_path)
def test_rack_form_clean_photo(self): from fixcity.exif_utils import get_exif_info from PIL import Image import os.path data = self.data.copy() # Jump through a few hoops to simulate a real upload. HERE = os.path.abspath(os.path.dirname(__file__)) path = os.path.join(HERE, 'files', 'test_exif.jpg') content = open(path).read() photofile = TemporaryUploadedFile('test_exif.jpg', 'image/jpeg', len(content), None) photofile.write(content) photofile.seek(0) # Okay, now we have something like a file upload. data['photo'] = photofile form = RackForm(data, {'photo': photofile}) self.assert_(form.is_valid()) # Make sure it doesn't have a bad rotation. self.assertEqual({}, get_exif_info(Image.open(photofile.temporary_file_path())))
def ensure_saved(self, file): """This may create a temporary file, which will be deleted when it's closed, so always close() it but only when you've finished!""" if isinstance(file, InMemoryUploadedFile): print "Writing %s to disk (%d bytes)" % (file, file.size) tmp = TemporaryUploadedFile(name=file.name, content_type=file.content_type, size=file.size, charset=file.charset) file.seek(0) buf = file.read() tmp.write(buf) print "Wrote %d bytes" % len(buf) tmp.flush() else: tmp = file if isinstance(tmp, TemporaryUploadedFile): path = tmp.temporary_file_path() else: path = tmp.name return (tmp, path)
def push_mis(): from ondoc.api.v1.utils import CustomTemporaryUploadedFile from ondoc.insurance.models import InsuranceMIS import pyminizip from ondoc.notification.models import EmailNotification from ondoc.api.v1.utils import util_absolute_url from ondoc.crm.admin.insurance import UserInsuranceResource, UserInsuranceDoctorResource, UserInsuranceLabResource, InsuredMemberResource from datetime import datetime, timedelta resources = [(UserInsuranceResource, InsuranceMIS.AttachmentType.USER_INSURANCE_RESOURCE), (UserInsuranceDoctorResource, InsuranceMIS.AttachmentType.USER_INSURANCE_DOCTOR_RESOURCE), (UserInsuranceLabResource, InsuranceMIS.AttachmentType.USER_INSURANCE_LAB_RESOURCE), (InsuredMemberResource, InsuranceMIS.AttachmentType.INSURED_MEMBERS_RESOURCE)] from_date = str(datetime.now().date() - timedelta(days=1)) to_date = from_date # arguments = { # 'from_date': from_date, # 'to_date': to_date, # } earliest_date = str(datetime(2019, 1, 1).date()) future_date = str(datetime.now().date() + timedelta(days=1)) date_tuple = ((from_date, to_date), (earliest_date, to_date)) email_attachments = [] mis_temporary_file = [] mis_temporary_file_paths = [] for resource in resources: resource_obj = resource[0]() for date in date_tuple: arguments = { 'from_date': date[0], 'to_date': date[1], } dataset = resource_obj.export(**arguments) filename = "%s_%s_%s.xls" % (resource_obj.__class__.__name__, date[0], date[1]) filename_prefix = "%s_%s_%s_" % (resource_obj.__class__.__name__, date[0], date[1]) filename_suffix = ".xls" mis_temporary_file.append( CustomTemporaryUploadedFile(filename, 'byte', 1000, 'utf-8', filename_prefix, filename_suffix)) f = open( mis_temporary_file[len(mis_temporary_file) - 1].temporary_file_path(), 'wb') f.write(dataset.xls) f.seek(0) mis_temporary_file_paths.append( mis_temporary_file[len(mis_temporary_file) - 1].temporary_file_path()) zipfilename = "All_MIS_%s.zip" % from_date zipfile = TemporaryUploadedFile(zipfilename, 'byte', 1000, 'utf-8') zf = open(zipfile.temporary_file_path(), 'wb') pyminizip.compress_multiple(mis_temporary_file_paths, [], zipfile.temporary_file_path(), settings.INSURANCE_MIS_PASSWORD, int(8)) for tf in mis_temporary_file: tf.close() attachment = InMemoryUploadedFile(zipfile, None, zipfilename, 'application/zip', zipfile.tell(), None) insurance_mis_obj = InsuranceMIS( attachment_file=attachment, attachment_type=InsuranceMIS.AttachmentType.ALL_MIS_ZIP) insurance_mis_obj.save() zf.close() email_attachments.append({ 'filename': zipfilename, 'path': util_absolute_url(insurance_mis_obj.attachment_file.url) }) EmailNotification.send_insurance_mis(email_attachments)
def test_parse_pom_empty_temp_file(self): testpomfile = TemporaryUploadedFile('pom.xml', 'text/xml', 0, 0) with open(testpomfile.temporary_file_path(), 'wb') as f: f.write(b'') res = pomparse.parse_pom(testpomfile) self.assertEqual({}, res)
def import_from_file(self, file_to_import: TemporaryUploadedFile, published_at: datetime.date, is_final_import: bool): sid = transaction.savepoint() try: wb = load_workbook( file_to_import.temporary_file_path(), read_only=True, data_only=True, ) data_or_response = self._load_xls(wb=wb) except InvalidFileException as e: data_or_response = self._load_csv(file_to_import) if isinstance(data_or_response, HttpResponse): return data_or_response statuses = [] for line in data_or_response: thesis = self.model(published_at=published_at, note=dict(imported_from=tuple(line))) line_status: List[Dict[str, Any]] = [] data_or_response = self._line_to_dict(line=line) _store_value = self._prepare_store_value(thesis=thesis, data=data_or_response) line_status.extend(( _store_value(self._set_category), _store_value(self._set_title), _store_value(self._set_supervisor), _store_value(self._set_opponent), _store_value(self._set_submit_deadline), )) try: thesis.full_clean(exclude=('registration_number', 'published_at')) except ValidationError as e: line_status.append( dict( value=', '.join( map(lambda t: f'{t[0]}: {t[1]}', e.error_dict.items())), error=True, )) else: line_status.append(dict(success=True, )) line_has_error = any(map(methodcaller('get', 'error'), line_status)) if not line_has_error: thesis.save() line_status.insert(0, _store_value(self._set_authors)) else: line_status.insert(0, dict()) statuses.append( dict( statuses=line_status, error=line_has_error or line_status[0].get('error'), )) has_error = any(map(methodcaller('get', 'error'), statuses)) if is_final_import and not has_error: transaction.savepoint_commit(sid) message = _('Theses have been imported.') else: transaction.savepoint_rollback(sid) message = _('Cannot import theses containing errors.') return Response( data=dict( statuses=statuses, error=has_error, message=message, success=True, ), status=HTTP_400_BAD_REQUEST if has_error else HTTP_201_CREATED)