class FileWriteTest(TestCase): def setUp(self): self.text = "Spam Spam Spam Spam.\n" self.digest = hashlib.sha1(self.text).hexdigest() self.tempfile = TemporaryUploadedFile('spam4.txt', 'text/plain', len(self.text), 'utf-8') self.tempfile.file.write(self.text) self.tempfile.file.seek(0) def test(self): new_upload = Upload(file=self.tempfile) new_upload.save() # Upload has been saved to the database. self.assert_(new_upload.pk) # Upload contains correct content. self.assertEqual(new_upload.file.read(), self.text) # Filename is the hash of the file contents. self.assert_(new_upload.file.name.startswith(self.digest)) def tearDown(self): self.tempfile.close() # Also deletes the temp file. # Remove the upload in `MEDIA_ROOT`. directory = os.path.join(settings.MEDIA_ROOT, '24') if os.path.exists(directory): shutil.rmtree(directory)
class TemporaryFileUploadHandler(FileUploadHandler): """ Upload handler that streams data into a temporary file. """ def __init__(self, *args, **kwargs): super(TemporaryFileUploadHandler, self).__init__(*args, **kwargs) def new_file(self, file_name, *args, **kwargs): """ Create the file object to append to as data is coming in. """ super(TemporaryFileUploadHandler, self).new_file( file_name, *args, **kwargs) self.file = TemporaryUploadedFile(self.file_name, self.content_type, 0, self.charset, self.content_type_extra) def receive_data_chunk(self, raw_data, start): self.file.write(raw_data) def file_complete(self, file_size): self.file.seek(0) self.file.size = file_size return self.file
def setUp(self): self.text = "Spam Spam Spam Spam.\n" self.digest = hashlib.sha1(self.text).hexdigest() self.tempfile = TemporaryUploadedFile('spam4.txt', 'text/plain', len(self.text), 'utf-8') self.tempfile.file.write(self.text) self.tempfile.file.seek(0)
def handle_filedrop_upload(request): """ Squeeze out an UploadedFile from a request sent through FileDrop.js. FileDrop.js's AJAX mode passes the actual file data as an unembellished binary stream as the POST payload so we need to do some magic that normal (multipart/form-data) uploads would not require. Here's that magic. :param request: HTTP request. :type request: django.http.HttpRequest :return: Uploaded file. :rtype: django.core.files.uploadedfile.UploadedFile """ content_type = request.META.get("HTTP_X_FILE_TYPE", "") filename = request.META["HTTP_X_FILE_NAME"] size = int(request.META["HTTP_X_FILE_SIZE"]) if size >= settings.FILE_UPLOAD_MAX_MEMORY_SIZE: upload_file = TemporaryUploadedFile(name=filename, content_type=content_type, size=size, charset="binary") else: upload_file = InMemoryUploadedFile( name=filename, content_type=content_type, size=size, charset="binary", field_name="none", file=BytesIO() ) upload_file.write(request.read()) return upload_file
def test_backup(self): with TemporaryUploadedFile("test_backup.txt", "text/plain", 0, "UTF-8") as tmp_file: tmp_file.write(b"content one") tmp_file.flush() tmp_file.seek(0) instance1 = OverwriteFileSystemStorageModel.objects.create( file=tmp_file) # create a second instance with the same filename and different content: with TemporaryUploadedFile("test_backup.txt", "text/plain", 0, "UTF-8") as tmp_file: tmp_file.write(b"content two") tmp_file.flush() tmp_file.seek(0) instance2 = OverwriteFileSystemStorageModel.objects.create( file=tmp_file) assert_is_file(instance1.file.path) assert_is_file(instance2.file.path) assert instance1.file.path == instance2.file.path assert instance1.file.name == "test_backup.txt" assert instance2.file.name == "test_backup.txt" assert_filenames_and_content( path=temp_storage_location, reference=[("test_backup.txt", b"content two"), ("test_backup.txt.bak01", b"content one")], )
def new_file(self, file_name, *args, **kwargs): """ Create the file object to append to as data is coming in. Ignores and overwrites most of the arguments and relies on exsiting request """ super(FeedUploadHandler, self).new_file(file_name, *args, **kwargs) self._validate_file() self.file = TemporaryUploadedFile(self.file_name, self.content_type, 0, self.charset)
def new_file(self, *args, **kwargs): """ Create the file object to append to as data is coming in. """ super().new_file(*args, **kwargs) self.file = TemporaryUploadedFile(self.file_name, self.content_type, 0, self.charset, self.content_type_extra)
def new_file(self, *args, **kwargs): super().new_file(*args, **kwargs) self.file = TemporaryUploadedFile(self.file_name, self.content_type, 0, self.charset, self.content_type_extra) job = JobModel.objects.get(job_id=self.request.GET["job_id"]) job.job_status = JobModel.STARTED job.save()
def new_file(self, file_name, *args, **kwargs): """ Create the file object to append to as data is coming in. """ super(TemporaryFileUploadHandler, self).new_file(file_name, *args, **kwargs) self.file = TemporaryUploadedFile(self.file_name, self.content_type, 0, self.charset)
def decorate(request, *args, **kwargs): if request.method == 'POST' and 'HTTP_X_FILE_NAME' in request.META: tf = TemporaryUploadedFile('rawdata', request.META['HTTP_X_FILE_TYPE'], int(request.META['CONTENT_LENGTH']), None) chunk = ' ' while len(chunk) > 0: chunk = request.read(1024) tf.write(chunk) tf.seek(0) request.FILES['file'] = tf return func(request, *args, **kwargs)
def _make_tempfile(self, filename, content): fileobj = TemporaryUploadedFile( name=filename + ".tempfile", content_type='text/plain', size=0, charset='utf8', ) fileobj.write(content) fileobj.flush() return fileobj
class FeedUploadHandler(TemporaryFileUploadHandler): """ This handler specifically handles feed uploads """ QUOTA = 42 * 2**20 # 42 MB # doesn't seem to be a good way to identify zip files MIME_TYPES = ( 'application/zip', 'application/x-zip', 'application/x-gzip', ) def __init__(self, *args, **kwargs): super(FeedUploadHandler, self).__init__(*args, **kwargs) self.total_upload = 0 self.file_name = "" def _validate_file(self): filename_re = re.compile(r'filename="(?P<name>[^"]+)"') content_type = str(self.request.META.get('CONTENT_TYPE', "")) content_length = int(self.request.META.get('CONTENT_LENGTH', 0)) charset = 'binary' m = filename_re.search(self.request.META.get("HTTP_CONTENT_DISPOSITION", "")) if content_type not in self.MIME_TYPES: raise IncorrectMimeTypeError("Incorrect mime type", connection_reset=True) if content_length > self.QUOTA: raise StopUpload(connection_reset=True) if not m: raise FileNameUnspecified("File name not specified", connection_reset=True) self.file_name = self.file_name = m.group('name') self.content_type = content_type self.content_length = content_length # print content_length def new_file(self, file_name, *args, **kwargs): """ Create the file object to append to as data is coming in. Ignores and overwrites most of the arguments and relies on exsiting request """ super(FeedUploadHandler, self).new_file(file_name, *args, **kwargs) self._validate_file() self.file = TemporaryUploadedFile(self.file_name, self.content_type, 0, self.charset) def receive_data_chunk(self, raw_data, start): self.total_upload += len(raw_data) # print "Total upload: {0}".format(self.total_upload) if self.total_upload >= self.QUOTA: raise StopUpload(connection_reset=True) self.file.write(raw_data)
def parse_distutils_request(request): """ Due to a bug in the Python distutils library, the request post is sent using \n as a separator instead of the \r\n that the HTTP spec demands. This breaks the Django form parser and therefore we have to write a custom parser. This bug was fixed in the Python 2.7.4 and 3.4: http://bugs.python.org/issue10510 """ if not request.body.endswith('\r\n'): sep = request.body.splitlines()[1] request.POST = QueryDict('', mutable=True) try: request._files = MultiValueDict() except Exception: pass for part in filter(lambda e: e.strip(), request.body.split(sep)): try: header, content = part.lstrip().split('\n', 1) except Exception: continue if content.startswith('\n'): content = content[1:] if content.endswith('\n'): content = content[:-1] headers = parse_header(header) if "name" not in headers: continue if "filename" in headers and headers['name'] == 'content': dist = TemporaryUploadedFile(name=headers["filename"], size=len(content), content_type="application/gzip", charset='utf-8') dist.write(content) dist.seek(0) request.FILES.appendlist('distribution', dist) else: request.POST.appendlist(headers["name"], content) else: request.FILES['distribution'] = request.FILES['content'] # Distutils sends UNKNOWN for empty fields (e.g platform) for key, value in request.POST.items(): if value == 'UNKNOWN': request.POST[key] = None
def handle(f, g): nama, ext = os.path.splitext(g) j = TemporaryUploadedFile(g, f.content_type, f.size, f.charset) for chunk in f.chunks(): j.write(chunk) if ext.lower() == ".json": savejson(j) elif ext.lower() == ".csv": savecsv(j) elif ext.lower() == ".xls" or ".xlsx": savexls(j)
class FeedUploadHandler(TemporaryFileUploadHandler): """ This handler specifically handles feed uploads """ QUOTA = 42 * 2 ** 20 # 42 MB # doesn't seem to be a good way to identify zip files MIME_TYPES = ("application/zip", "application/x-zip", "application/x-gzip") def __init__(self, *args, **kwargs): super(FeedUploadHandler, self).__init__(*args, **kwargs) self.total_upload = 0 self.file_name = "" def _validate_file(self): filename_re = re.compile(r'filename="(?P<name>[^"]+)"') content_type = str(self.request.META.get("CONTENT_TYPE", "")) content_length = int(self.request.META.get("CONTENT_LENGTH", 0)) charset = "binary" m = filename_re.search(self.request.META.get("HTTP_CONTENT_DISPOSITION", "")) if content_type not in self.MIME_TYPES: raise IncorrectMimeTypeError("Incorrect mime type", connection_reset=True) if content_length > self.QUOTA: raise StopUpload(connection_reset=True) if not m: raise FileNameUnspecified("File name not specified", connection_reset=True) self.file_name = self.file_name = m.group("name") self.content_type = content_type self.content_length = content_length # print content_length def new_file(self, file_name, *args, **kwargs): """ Create the file object to append to as data is coming in. Ignores and overwrites most of the arguments and relies on exsiting request """ super(FeedUploadHandler, self).new_file(file_name, *args, **kwargs) self._validate_file() self.file = TemporaryUploadedFile(self.file_name, self.content_type, 0, self.charset) def receive_data_chunk(self, raw_data, start): self.total_upload += len(raw_data) # print "Total upload: {0}".format(self.total_upload) if self.total_upload >= self.QUOTA: raise StopUpload(connection_reset=True) self.file.write(raw_data)
def test_parse_valid_pom_temp_file(self): pombytes = self.get_valid_pom_as_bytes() testpomfile = TemporaryUploadedFile('pom.xml', 'text/xml', len(pombytes), 0) with open(testpomfile.temporary_file_path(), 'wb') as f: f.write(self.get_valid_pom_as_bytes()) res = pomparse.parse_pom(testpomfile) self.assertEqual( { 'artifactId': 'DisGeNET-app', 'groupId': 'es.imim', 'version': '6.3.2' }, res)
def test_save_tempfile(self): with media_root(): storage = SafeFileSystemStorage() content = 'Hello world!' f = TemporaryUploadedFile(name='filename', content_type='text/plain', size=len(content), charset='utf-8') f.write(content) f.seek(0) name = storage.save('hello.txt', f) self.assertEqual(name, 'hello.txt') self.assertEqual(open(storage.path(name)).read(), content)
def create_photo_versions(sender, instance, **kwargs): """Create `PhotoVersion`` objects for the photo object defined by `instance`. A version is created for a bounding box defined by each PhotoSize instance. """ from photo.models import Photo, PhotoSize, PhotoVersion photo = instance ext = '.jpg' t = None try: pth = photo.image.path except NotImplementedError: from django.core.files.temp import NamedTemporaryFile t = NamedTemporaryFile(suffix=ext) ix = photo.image if ix.closed: # Reload from DB photo = Photo.objects.get(pk=photo.pk) ix = photo.image for d in ix.chunks(4000000): t.write(d) t.flush() t.seek(0) pth = t for size in PhotoSize.objects.all(): # Create a suitable filename. filename = '%s-%s-%s%s' % (photo.pk, uuid4().hex[::7], slugify(size.name)[:10], ext) ctype = guess_type(filename)[0] temp_file = TemporaryUploadedFile(name=filename, content_type=ctype, size=0, charset=None) if t: t.seek(0) try: version = PhotoVersion.objects.get(photo=photo, size=size) remove_model_image(version, 'image') version.image = None except PhotoVersion.DoesNotExist: version = PhotoVersion(photo=photo, size=size) if size.do_crop: resize_to, crop_box, input_image = get_perfect_fit_resize_crop(size.bounding_box, (photo.width, photo.height)) else: resize_to = size.bounding_box crop_box = None # Resize to a temporary location. resize(pth, resize_to, out_file_path=temp_file, crop=crop_box) # Save resized copy to `version` instance. temp_file.seek(0) # Prepare file for a re-read. version.image.save(name=filename, content=temp_file, save=True) temp_file.close() if t: t.close()
class CustomFileUploadHandler(FileUploadHandler): """ Custom file upload handler which handles the file upload as job and can be stopped midway if the job is revoked. Acts mainly as a TemporaryFileUpload with additional logic. """ # TODO:Logging file progress def new_file(self, *args, **kwargs): super().new_file(*args, **kwargs) self.file = TemporaryUploadedFile(self.file_name, self.content_type, 0, self.charset, self.content_type_extra) job = JobModel.objects.get(job_id=self.request.GET["job_id"]) job.job_status = JobModel.STARTED job.save() def receive_data_chunk(self, raw_data, start): # Depending on the job id the task handling will stop # midway if the status is 'REVOKED' time.sleep(0.5) try: job = JobModel.objects.get(job_id=self.request.GET["job_id"]) print(job) except JobModel.DoesNotExist: raise StopUpload(connection_reset=True) if job.job_status == JobModel.PAUSED: pulse_try = 0 while (job.job_status == JobModel.PAUSED and pulse_try <= settings.PULSE_MAX_TRIES): pulse_try += 1 time.sleep(settings.PAUSE_PULSE) job = JobModel.objects.get(job_id=self.request.GET["job_id"]) if pulse_try == settings.PULSE_MAX_TRIES: job.delete() job.save() raise StopUpload(connection_reset=True) elif job.job_status == JobModel.REVOKED: job.delete() raise StopUpload(connection_reset=True) self.file.write(raw_data) def file_complete(self, file_size): # delete the job after completion job = JobModel.objects.get(job_id=self.request.GET["job_id"]) job.delete() self.file.seek(0) self.file.size = file_size return self.file
def parse_distutils_request(request): """Parse the `request.raw_post_data` and return a `MultiValueDict` for the POST data and the FILES data. This method is taken from the chishop source. """ try: sep = request.raw_post_data.splitlines()[1] except: raise ValueError('Invalid post data') request.POST = QueryDict('', mutable=True) try: request._files = MultiValueDict() except Exception: pass for part in filter(lambda e: e.strip(), request.raw_post_data.split(sep)): try: header, content = part.lstrip().split('\n', 1) except Exception: continue if content.startswith('\n'): content = content[1:] if content.endswith('\n'): content = content[:-1] headers = parse_header(header) if "name" not in headers: continue if "filename" in headers and headers['name'] == 'content': dist = TemporaryUploadedFile(name=headers["filename"], size=len(content), content_type="application/gzip", charset='utf-8') dist.write(content) dist.seek(0) request.FILES.appendlist('distribution', dist) else: # Distutils sends UNKNOWN for empty fields (e.g platform) # [[email protected]] if content == 'UNKNOWN': content = None request.POST.appendlist(headers["name"], content)
def parse_distutils_request(request): """ This is being used because the built in request parser that Django uses, django.http.multipartparser.MultiPartParser is interperting the POST data incorrectly and/or the post data coming from distutils is invalid. One portion of this is the end marker: \r\n\r\n (what Django expects) versus \n\n (what distutils is sending). """ sep = request.body.splitlines()[1] request.POST = QueryDict('', mutable=True) try: request._files = MultiValueDict() except Exception: pass for part in filter(lambda e: e.strip(), request.body.split(sep)): try: header, content = part.lstrip().split('\n', 1) except Exception: continue try: if content.startswith('\n'): content = content[1:] if content.endswith('\n'): content = content[:-1] headers = parse_header(header) if "name" not in headers: continue if "filename" in headers: dist = TemporaryUploadedFile(name=headers["filename"], size=len(content), content_type="application/gzip", charset='utf-8') dist.write(content) dist.seek(0) request.FILES.appendlist(headers['name'], dist) else: request.POST.appendlist(headers["name"], content) except Exception as e: print e return
def parse_distutils_request(request): """Parse the `request.raw_post_data` and update the request POST and FILES attributes . """ lines = request.raw_post_data.splitlines() seperator = next(line for line in lines if line.startswith('----')) request.POST = QueryDict('', mutable=True) raw_post = request.raw_post_data.split(seperator) raw_lines = [line.lstrip() for line in raw_post if line.lstrip()] try: request._files = MultiValueDict() except Exception: pass for line in raw_lines: line_content = line.lstrip().split('\n', 1) header = line_content[0] content = line_content[1] if content.startswith('\n'): content = content[1:] if content.endswith('\n'): content = content[:-1] headers = parse_header(header) if "name" not in headers: continue if "filename" in headers and headers['name'] == 'content': dist = TemporaryUploadedFile(name=headers["filename"], size=len(content), content_type="application/gzip", charset='utf-8') dist.write(content) dist.seek(0) request.FILES.appendlist('distribution', dist) else: # Distutils sends UNKNOWN for empty fields (e.g platform) # [[email protected]] if content == 'UNKNOWN': content = None request.POST.appendlist(headers["name"], content)
def value_from_datadict(self, data, files, name): """ Normally returns files.get(name, None). Here we also check `data`. -- if the appropriate hidden _sticky_file input is set, we can look for the temporary file instead and return that if it exists. This method seems to be called multiple times with the same arguments, so to prevent excessive storage activity the return value is cached and returned without processing on subsequent calls. There is an assumption that the arguments will not change between calls for any given instance, which appears to be valid, so no argument checks are performed. """ if hasattr(self, '_value'): return self._value self.user_token = data.get('csrfmiddlewaretoken', None) # look for normal file value = super( StickyFileInput, self).value_from_datadict(data, files, name) if value and hasattr(value, 'name'): # got one, save a temporary copy just in case self.sticky_file_name = value.name self.sticky_session_id = '%.6f' % time.time() self.save_sticky_copy(value.file) else: # check for temporary copy self.sticky_file_name = ( data.get( self.get_hidden_input_name(name, 'sticky_file'), None)) self.sticky_session_id = data.get( self.get_hidden_input_name(name, 'sticky_session_id'), None) sticky_copy = self.load_sticky_copy() if sticky_copy: sticky_copy.seek(0, 2) # seek to end value = TemporaryUploadedFile( name = self.sticky_file_name, content_type = None, size = sticky_copy.tell(), charset = None ) value.file = sticky_copy value.file.seek(0) value.temporary_file_path = lambda: self.get_sticky_path() setattr(self, '_value', value) # cache return self._value
def test_save_temporary_file(self): name = "foo/bar/toto.txt" content = TemporaryUploadedFile( "temporary", "application/binary", (settings.FILE_UPLOAD_MAX_MEMORY_SIZE + 1), None) self.storage.save(name, content) self.assertEqual(self.storage.open(name).read(), b'')
def process_request(self, request): file_name = request.META.get('HTTP_X_FILE_NAME') self.uploaded_file = None if ('application/octet-stream' in request.META.get('CONTENT_TYPE') and request.META.get('HTTP_X_REQUESTED_WITH') == 'XMLHttpRequest' and request.method == 'POST' and file_name): initial_size = request._stream.remaining self.uploaded_file = TemporaryUploadedFile( name=unquote(file_name), content_type='application/octet-stream', size=initial_size, charset=None) size = 0 while True: chunk = request._stream.read(self.chunk_size) if not chunk: break size += len(chunk) self.uploaded_file.write(chunk) if size != initial_size: raise HttpResponseBadRequest self.uploaded_file.seek(0) self.uploaded_file.size = size request.FILES['file'] = self.uploaded_file request.POST = request.GET
def stage_replica(replica): from django.core.files.uploadedfile import TemporaryUploadedFile from tardis.tardis_portal.models import Replica, Location if not replica.location.type == 'external': raise ValueError('Only external replicas can be staged') if getattr(settings, "DEEP_DATASET_STORAGE", False): relurl = path.relpath(replica.url[7:], settings.SYNC_TEMP_PATH) spliturl = relurl.split(os.sep)[1:] subdir = path.dirname(path.join(*spliturl)) else: subdir = None with TemporaryUploadedFile(replica.datafile.filename, None, None, None) as tf: if replica.verify(tempfile=tf.file): if not replica.stay_remote: tf.file.flush() target_replica = { 'datafile': replica.datafile, 'url': write_uploaded_file_to_dataset(replica.datafile.dataset, tf, subdir=subdir), 'location': Location.get_default_location(), 'verified': True, 'protocol': '' } Replica.objects.filter(id=replica.id).update(**target_replica) return True else: return False
def test_get_attachment(self): user_specified_filename = 'a' internal_filename = 'b' file = TemporaryUploadedFile(internal_filename, 'text/plain', 10000, 'utf-8') attachment = ReportAttachment.objects.create( file=file, user=self.user, filename=user_specified_filename, report=self.report) attachment.save() response = self.client.get( reverse('crt_forms:get-report-attachment', kwargs={ 'id': self.pk, 'attachment_id': attachment.pk }), ) # we should reply with a redirect to a presigned s3 url self.assertEqual(response.status_code, 302) # the presigned url should target the private S3 bucket self.assertTrue('/crt-private/' in str(response.url)) # the presigned url should have a 30 second expiration self.assertTrue('Expires=30' in str(response.url)) # the presigned url should target the internal (not user specified) filename self.assertTrue( f'/attachments/{internal_filename}' in str(response.url)) # the response-content-disposition should be set so that the file downloads with the user specified filename self.assertTrue( f'response-content-disposition=attachment%3Bfilename%3D{user_specified_filename}' in str(response.url))
def process_zip(self): with file_storage.get_document_as_local_fn(self.source_path) as (local_file_path, _): with zipfile.ZipFile(local_file_path) as zip_file: zip_file_filelist = zip_file.filelist self.log_info('Start extracting {} documents from {}'.format( len(zip_file_filelist), local_file_path)) for n, a_file in enumerate(zip_file_filelist): if a_file.is_dir(): continue file_size = a_file.file_size file_name = os.path.basename(a_file.filename) mime_type = self.get_mime_type(file_name) self.log_info( 'Extract/start LoadDocument for {} of {} files: name={}, size={}, mime_type={}'.format( n + 1, len(zip_file_filelist), file_name, file_size, mime_type)) with TemporaryUploadedFile(file_name, mime_type, file_size, 'utf-8') as tempfile: tempfile.file = zip_file.open(a_file) tempfile.file.seek = lambda *args: 0 self.upload_file( file_name=file_name, file_size=file_size, contents=tempfile, directory_path=self.directory_path)
def test_default_server_url(self): server = CommCareServer.objects.get() project = CommCareProject(server=server, domain="foo") account = CommCareAccount(server=server, username="******", api_key="P@ssWord") database = ExportDatabase( name="Test DB", connection_string="postgresql://*****:*****@123.4.0.0/test") config_file = TemporaryUploadedFile(name="config_file", content_type="application/xml", size=100, charset="utf-8") export_config = ExportConfig( name="Test Config", project=project, account=account, database=database, config_file=config_file, extra_args="", ) command = _compile_export_command(export_config, project, force=False) self.assertIn('https://www.commcarehq.org', command)
def process_tar(self): with file_storage.get_document_as_local_fn(self.source_path) as (local_file_path, _): with tarfile.TarFile(local_file_path) as tar_file: tar_file_members = tar_file.getmembers() self.log_info('Start extracting {} documents from {}'.format( len(tar_file_members), local_file_path)) for n, a_file in enumerate(tar_file_members): if a_file.isdir(): continue file_size = a_file.size file_name = os.path.basename(a_file.name) mime_type = self.get_mime_type(file_name) self.log_info( 'Extract/start LoadDocument for {} of {} files: name={}, size={}, mime_type={}'.format( n + 1, len(tar_file_members), file_name, file_size, mime_type)) with TemporaryUploadedFile(file_name, mime_type, file_size, 'utf-8') as tempfile: tempfile.file = tar_file.extractfile(a_file) self.upload_file( file_name=file_name, file_size=file_size, contents=tempfile, directory_path=self.directory_path)
def _make_form(self, project_path, instance=None, name=None): payload = open(project_path, 'rb').read() if name is None: name = os.path.basename(project_path) qgis_file = TemporaryUploadedFile(name, 'application/xml', len(payload), 'utf-8') qgis_file.file.write(payload) qgis_file.file.seek(0) form = QdjangoProjectForm( data={ 'feature_count_wms': 10, 'group_slug': self.project_group.slug, 'toc_tab_default': 'layers', 'toc_layers_init_status': 'not_collapsed', 'toc_themes_init_status': 'collapsed', 'legend_position': 'tab', 'multilayer_query': 'single', 'multilayer_querybybbox': 'single', 'multilayer_querybypolygon': 'single' }, files={ 'qgis_file': qgis_file }, group=self.project_group, request=type('', (object,), {'user': self.test_user1})(), instance=instance, initial={} ) return form
def clean_picture(self): """ Ensure that picture is big enough (width & height >= PORTRAIT_PICTURE_MIN_DIMENSION 1) Clears the picture field in the field isn't an uploaded file 2) Detects the size of the uploaded image, and create ValidationError if neccessary 3) Resize the image so it conforms with the minimum size - minimize the space used """ min_size = settings.PORTRAIT_PICTURE_MIN_DIMENSION # If user has ticked to delete his portrait - then the picture attribute can be False if not self.cleaned_data['picture']: self.cleaned_data['picture'] = '' return '' # Grab content of the Uploaded file and validate size - use StringIO to keep image_file = self.cleaned_data['picture'] image_data = StringIO(image_file.read()) image = Image.open(image_data) w, h = image.size if min(w, h) < min_size: raise forms.ValidationError( 'Picture is too small : must be a minimum of %(min)s x %(min)s pixels', code='invalid', params={'min': min_size}) if min(w, h) == min_size: return self.cleaned_data['picture'] # Resize image to ensure that the smallest size conforms to PORTRAIT_PICTURE_MIN_DIMENSION ratio = max(min_size / float(w), min_size / float(h)) pic = image.resize((int(w * ratio), int(h * ratio)), Image.ANTIALIAS) new_image = StringIO() pic.save(new_image, 'JPEG', quality=90) # Create a new File for the resized image - can't simply overwrite contents of old file. new_Temp = TemporaryUploadedFile( name=image_file.name, content_type=image_file.content_type, content_type_extra=image_file.content_type_extra, charset=image_file.charset, size=new_image.len) new_Temp.write(new_image.getvalue()) self.cleaned_data['picture'] = new_Temp return self.cleaned_data['picture']
def clean(self, value): url = super(ImageFromURLField, self).clean(value) if url: wf = urllib.urlopen(url) if wf.headers.getmaintype() != "image": raise forms.ValidationError(u"Enter a URL for a valid image.") importedFile = TemporaryUploadedFile( url.split("/")[-1], wf.headers.gettype(), int(wf.headers.get("Content-Length")), None ) importedFile.write(wf.read()) wf.close() importedFile.seek(0) if not is_valid_image(importedFile): raise forms.ValidationError(u"Enter a URL for a valid image.") return importedFile return url
def test_file_extension_ok(self): file = TemporaryUploadedFile('file.txt', 'text/plain', 5000, 'utf-8') try: validate_file_extension(file) except ValidationError: self.fail('validate_file_extension unexpectedly raised ValidationError!')
def test_file_size_uploadble(self): small_file = TemporaryUploadedFile('file.txt', b'this is a small file', 10000, 'utf-8') try: validate_file_size(small_file) except ValidationError: self.fail('validate_file_size unexpectedly raised ValidationError!')
def stage_file(datafile): from django.core.files.uploadedfile import TemporaryUploadedFile with TemporaryUploadedFile(datafile.filename, None, None, None) as tf: if datafile.verify(tempfile=tf.file): tf.file.flush() datafile.url = write_uploaded_file_to_dataset(datafile.dataset, tf) datafile.protocol = '' datafile.save()
class TemporaryFileUploadHandler(FileUploadHandler): """ Upload handler that streams data into a temporary file. """ def new_file(self, *args, **kwargs): """ Create the file object to append to as data is coming in. """ super().new_file(*args, **kwargs) self.file = TemporaryUploadedFile(self.file_name, self.content_type, 0, self.charset, self.content_type_extra) def receive_data_chunk(self, raw_data, start): self.file.write(raw_data) def file_complete(self, file_size): self.file.seek(0) self.file.size = file_size return self.file def upload_interrupted(self): if hasattr(self, 'file'): temp_location = self.file.temporary_file_path() try: self.file.close() os.remove(temp_location) except FileNotFoundError: pass
class TemporaryFileUploadHandler(FileUploadHandler): """ Upload handler that streams data into a temporary file. """ def new_file(self, *args, **kwargs): """ Create the file object to append to as data is coming in. """ super().new_file(*args, **kwargs) self.file = TemporaryUploadedFile(self.file_name, self.content_type, 0, self.charset, self.content_type_extra) def receive_data_chunk(self, raw_data, start): self.file.write(raw_data) def file_complete(self, file_size): self.file.seek(0) self.file.size = file_size return self.file
def test_move_temporary_file(self): """ The temporary uploaded file is moved rather than copied to the destination. """ with TemporaryUploadedFile('something.txt', 'text/plain', 0, 'UTF-8') as tmp_file: tmp_file_path = tmp_file.temporary_file_path() Document.objects.create(myfile=tmp_file) self.assertFalse(os.path.exists(tmp_file_path), 'Temporary file still exists')
def save_uploaded_file(source_file: TemporaryUploadedFile): if not path.isdir(settings.FLEIO_TEMP_DIR): mkdir(settings.FLEIO_TEMP_DIR) temp_file_name = path.join(settings.FLEIO_TEMP_DIR, str(uuid4())) with open(temp_file_name, 'wb+') as destination_file: for chunk in source_file.chunks(): destination_file.write(chunk) return temp_file_name
def test_media_root_pathlib(self): with tempfile.TemporaryDirectory() as tmp_dir: with override_settings(MEDIA_ROOT=Path(tmp_dir)): with TemporaryUploadedFile('foo.txt', 'text/plain', 1, 'utf-8') as tmp_file: Document.objects.create(myfile=tmp_file) self.assertTrue( os.path.exists( os.path.join(tmp_dir, 'unused', 'foo.txt')))
def new_file(self, file_name, *args, **kwargs): """ Create the file object to append to as data is coming in. """ super(TemporaryFileUploadHandler, self).new_file( file_name, *args, **kwargs) self.file = TemporaryUploadedFile(self.file_name, self.content_type, 0, self.charset, self.content_type_extra)
def test_image_is_resized_for_setting_size_and_big_image(self): image_path = get_full_file_path(os.path.join('data', 'big_image.jpg')) with open(image_path, 'rb') as f: upload_file = TemporaryUploadedFile( name='medium_image.jpg', content_type='image/jpeg', size=os.path.getsize(image_path), charset=None ) upload_file.write(f.read()) image_field = ResizedImageField() new_image = image_field.clean(upload_file) tools.assert_not_equals(f, new_image.file) with Image.open(new_image) as im: tools.assert_equals(im.size, (300, 150))
def clean_picture(self): """ Ensure that picture is big enough (width & height >= PORTRAIT_PICTURE_MIN_DIMENSION 1) Clears the picture field in the field isn't an uploaded file 2) Detects the size of the uploaded image, and create ValidationError if neccessary 3) Resize the image so it conforms with the minimum size - minimize the space used """ min_size = settings.PORTRAIT_PICTURE_MIN_DIMENSION # If user has ticked to delete his portrait - then the picture attribute can be False if not self.cleaned_data['picture']: self.cleaned_data['picture'] = '' return '' # Grab content of the Uploaded file and validate size - use StringIO to keep image_file = self.cleaned_data['picture'] image_data = StringIO(image_file.read()) image = Image.open(image_data) w, h = image.size if min(w,h) < min_size: raise forms.ValidationError('Picture is too small : must be a minimum of %(min)s x %(min)s pixels', code='invalid', params={'min':min_size} ) if min(w,h) == min_size: return self.cleaned_data['picture'] # Resize image to ensure that the smallest size conforms to PORTRAIT_PICTURE_MIN_DIMENSION ratio = max(min_size/float(w), min_size/float(h)) pic = image.resize((int(w*ratio), int(h*ratio)), Image.ANTIALIAS) new_image = StringIO() pic.save(new_image, 'JPEG', quality=90) # Create a new File for the resized image - can't simply overwrite contents of old file. new_Temp = TemporaryUploadedFile( name=image_file.name, content_type= image_file.content_type, content_type_extra=image_file.content_type_extra, charset=image_file.charset, size = new_image.len) new_Temp.write(new_image.getvalue()) self.cleaned_data['picture'] = new_Temp return self.cleaned_data['picture']
def test_process_jar_on_temporary_uploaded_file(self): fake_jar_file = io.BytesIO() with zipfile.ZipFile(fake_jar_file, mode='w') as zf: zf.writestr('META-INF/MANIFEST.MF', TEST_VALID_MANIFEST_ONE) testjarfile = TemporaryUploadedFile('foo.jar', 'application/java-archive', 100, 0) with open(testjarfile.temporary_file_path(), 'wb') as f: f.write(fake_jar_file.getvalue()) (a_name, a_ver, a_works, a_dep, has_exp) = processjar.process_jar(testjarfile, 'CyCommunityDetectionTest') self.assertEqual('CyCommunityDetectionTest', a_name) self.assertEqual('1.11.0', a_ver) self.assertEqual('3.7', a_works) self.assertEqual([], a_dep) self.assertEqual(True, has_exp)
def handle(self, *args, **options): queryset = UserInsurance.objects.all() for data in queryset: if data.coi: file_path = data.coi.url request = requests.get(file_path, stream=True) if request.status_code != requests.codes.ok: continue file_name = file_path.split('/')[-1] splited = file_name.split('.') splited = splited[0] file_name = str(splited) + '-' + str(uuid.uuid4().hex) + '.' + str(splited[-1]) temp_file = TemporaryUploadedFile(file_name, 'byte', 1000, 'utf-8') for block in request.iter_content(1024 * 8): if not block: break temp_file.write(block) data.coi = InMemoryUploadedFile(temp_file, None, file_name, 'application/pdf', temp_file.tell(), None) data.save()
def test_uploaded_file_name(self): tmp_file = TemporaryUploadedFile('whatever', 'application/octet-stream', 0, 'utf-8') with utils.uploaded_file_name(tmp_file) as name: self.assertEquals(name, tmp_file.file.name) mem_file = SimpleUploadedFile('whatever', 'hello42') with utils.uploaded_file_name(mem_file) as name: self.assertTrue(name.endswith('whatever')) self.assertEqual(open(name, 'rb').read(), 'hello42') self.assertFalse(os.path.exists(name))
def test_with_temporary_uploaded_file(self): temp_uploaded_file = TemporaryUploadedFile( name='test.jpg', content_type='image/jpeg', size=100, charset=None, ) with open(temp_uploaded_file.temporary_file_path(), 'wb') as f: f.write(b'0'*100) file_upload = FileUpload() filename = file_upload.serialize_value(temp_uploaded_file) self.assertEqual(filename, file_upload.serialize_value(temp_uploaded_file)) uploaded_file_path = os.path.join( settings.MEDIA_ROOT, file_upload.generate_filename(temp_uploaded_file.name) ) os.remove(uploaded_file_path)
class AjaxFileUploadSessionMiddleware(object): chunk_size = 64 * 2 ** 10 # The default chunk size is 64 KB. def process_request(self, request): file_name = request.META.get('HTTP_X_FILE_NAME') self.uploaded_file = None if ('application/octet-stream' in request.META.get('CONTENT_TYPE') and request.META.get('HTTP_X_REQUESTED_WITH') == 'XMLHttpRequest' and request.method == 'POST' and file_name): initial_size = request._stream.remaining self.uploaded_file = TemporaryUploadedFile( name=unquote(file_name), content_type='application/octet-stream', size=initial_size, charset=None) size = 0 while True: chunk = request._stream.read(self.chunk_size) if not chunk: break size += len(chunk) self.uploaded_file.write(chunk) if size != initial_size: raise HttpResponseBadRequest self.uploaded_file.seek(0) self.uploaded_file.size = size request.FILES['file'] = self.uploaded_file request.POST = request.GET def process_response(self, request, response): if hasattr(self, 'uploaded_file') and self.uploaded_file is not None: tmp_file_name = self.uploaded_file.file.name if os.path.exists(tmp_file_name): os.remove(tmp_file_name) return response
def get(self, key): """ Regenerates a MultiValueDict instance containing the files related to all file states stored for the given key. """ upload = None files_states = self.backend.get(key) files = MultiValueDict() if files_states: for name, state in files_states.items(): f = BytesIO() f.write(state['content']) # If the post is too large, we cannot use a # InMemoryUploadedFile instance. if state['size'] > settings.FILE_UPLOAD_MAX_MEMORY_SIZE: upload = TemporaryUploadedFile( state['name'], state['content_type'], state['size'], state['charset'], ) upload.file = f else: f = BytesIO() f.write(state['content']) upload = InMemoryUploadedFile( file=f, field_name=name, name=state['name'], content_type=state['content_type'], size=state['size'], charset=state['charset'], ) files[name] = upload # Go to the first byte in the file for future use upload.file.seek(0) return files
def new_file(self, field_name, file_name, content_type, content_length, charset=None): """ """ self.field_name = field_name self.file_name = file_name self.content_type = content_type self.content_length = content_length self.charset = charset self.file = TemporaryUploadedFile( self.file_name, self.content_type, 0, self.charset )
def create_customer_image(self, fp, anfexi): self.customer_image = self.create() # Process image (if the uploaded file has an image format) try: image = Image.open(fp) except IOError: # An IOError here means the file is not a recognized image format generic_file = TemporaryUploadedFile() extension = self._get_extension(fp) # Store the file as is and don't create a thumbnail self.customer_image.document.save( '{}-full.{}'.format( anfexi, extension ), ImageFile(generic_file) ) generic_file.close() return self.customer_image # Otherwise process images normally thumbnail, resize = self._process_image(image) # Write images on S3 thumbnail_file = self._make_temp_file(thumbnail.getvalue()) image_file = self._make_temp_file(resize.getvalue()) self.customer_image.thumbnail.save( '{}-thumbnail.png'.format(anfexi), ImageFile(thumbnail_file), save=False ) thumbnail_file.close() self.customer_image.document.save( '{}-full.png'.format(anfexi), ImageFile(image_file), save=False ) self.customer_image.save() image_file.close() return self.customer_image