def _handle_new_chunk_upload(self, request, upload_id, file_id): LOG.debug('Processing a new chunked upload request...') file_obj = self._get_file_obj(request) if file_obj != '{}': return Response( 'An invalid file object has been received ' 'for a new chunked upload request.', status=status.HTTP_400_BAD_REQUEST) ulen = request.META.get('HTTP_UPLOAD_LENGTH', None) if not ulen: return Response('No length for new chunked upload request.', status=status.HTTP_400_BAD_REQUEST) LOG.debug('Handling a new chunked upload request for an upload ' 'with total length %s bytes' % (ulen)) # Do some general checks to make sure that the storage location # exists and that we're not being made to try and store something # outside the base storage location. Then create the new # temporary directory into which chunks will be stored base_loc = storage.base_location chunk_dir = os.path.abspath(os.path.join(base_loc, upload_id)) if not chunk_dir.startswith(base_loc): return Response('Unable to create storage for upload data.', status=status.HTTP_500_INTERNAL_SERVER_ERROR) if os.path.exists(base_loc): try: os.makedirs(chunk_dir, exist_ok=False) except OSError as e: LOG.debug('Unable to create chunk storage dir: %s' % (str(e))) return Response('Unable to prepare storage for upload data.', status=status.HTTP_500_INTERNAL_SERVER_ERROR) else: LOG.debug('The base data store location <%s> doesn\'t exist.' ' Unable to create chunk dir.' % (base_loc)) return Response('Data storage error occurred.', status=status.HTTP_500_INTERNAL_SERVER_ERROR) # We now create the temporary chunked upload object # this will be updated as we receive the chunks. tuc = TemporaryUploadChunked(upload_id=upload_id, file_id=file_id, upload_dir=upload_id, total_size=ulen, uploaded_by=_get_user(request)) tuc.save() return Response(upload_id, status=status.HTTP_200_OK, content_type='text/plain')
def handle_upload(self, request, upload_id, file_id): # Since the upload_id and file_id are being provided here as # parameters, we check that they are valid. This should be done by # the DB and an error would be generated in the tu.save() call below # however SQLite doesn't handle field length validation so this won't # be picked up when using SQLite. if ((not self._file_id_valid(file_id)) or (not self._upload_id_valid(upload_id))): return Response('Invalid ID for handling upload.', content_type='text/plain', status=status.HTTP_500_INTERNAL_SERVER_ERROR) file_obj = self._get_file_obj(request) # The type of parsed data should be a descendant of an UploadedFile # type. if not isinstance(file_obj, UploadedFile): raise ParseError('Invalid data type has been parsed.') # Save original file name and set name of saved file to the unique ID upload_filename = file_obj.name file_obj.name = file_id # Before we attempt to save the file, make sure that the upload # directory we're going to save to exists. # *** It's not necessary to explicitly create the directory since # *** the FileSystemStorage object creates the directory on save # if not os.path.exists(storage.location): # LOG.debug('Filepond app: Creating file upload directory ' # '<%s>...' % storage.location) # os.makedirs(storage.location, mode=0o700) LOG.debug('About to store uploaded temp file with filename: %s' % (upload_filename)) # We now need to create the temporary upload object and store the # file and metadata. tu = TemporaryUpload(upload_id=upload_id, file_id=file_id, file=file_obj, upload_name=upload_filename, upload_type=TemporaryUpload.FILE_DATA, uploaded_by=_get_user(request)) tu.save() response = Response(upload_id, status=status.HTTP_200_OK, content_type='text/plain') return response
def head(self, request): LOG.debug('Filepond API: Fetch view HEAD called...') result = self._process_request(request) if isinstance(result, tuple): buf, file_id, upload_file_name, content_type = result elif isinstance(result, Response): return result else: raise ValueError('process_request result is of an unexpected type') file_size = buf.seek(0, os.SEEK_END) buf.seek(0) # The addressing of filepond issue #154 # (https://github.com/pqina/filepond/issues/154) means that fetch # can now store a file downloaded from a remote URL and return file # metadata in the header if a HEAD request is received. If we get a # GET request then the standard approach of proxying the file back # to the client is used. upload_id = _get_file_id() memfile = InMemoryUploadedFile(buf, None, file_id, content_type, file_size, None) tu = TemporaryUpload(upload_id=upload_id, file_id=file_id, file=memfile, upload_name=upload_file_name, upload_type=TemporaryUpload.URL, uploaded_by=_get_user(request)) tu.save() response = Response(status=status.HTTP_200_OK) response['Content-Type'] = content_type response['Content-Length'] = file_size response['X-Content-Transfer-Id'] = upload_id response['Content-Disposition'] = ('inline; filename=%s' % upload_file_name) return response
def test_get_user_anonymous(self): req = MagicMock(spec=Request) req.user = AnonymousUser() u = _get_user(req) self.assertEqual(u, None, 'Anonymous user not handled correctly.')
def test_get_user_regular(self): req = MagicMock(spec=Request) req.user = User(username='******') u = _get_user(req) self.assertEqual(u.username, 'test_user', 'Incorrect user returned.')