class LocalUploadBackend(AbstractUploadBackend): UPLOAD_DIR = ajaxuploader_settings.UPLOAD_DIRECTORY # TODO: allow this to be overridden per-widget/view def setup(self, filename): self._relative_path = os.path.normpath( os.path.join( force_unicode( datetime.datetime.now().strftime( # allow %Y, %s, etc smart_str(self.UPLOAD_DIR))), filename)) self._path = os.path.join(settings.MEDIA_ROOT, self._relative_path) try: os.makedirs(os.path.realpath(os.path.dirname(self._path))) except: pass self._dest = BufferedWriter(FileIO(self._path, "w")) def upload_chunk(self, chunk): self._dest.write(chunk) def upload_complete(self, request, filename): self._dest.close() return {"path": self._relative_path} def update_filename(self, request, filename): return ajaxuploader_settings.SANITIZE_FILENAME(filename)
class PANDAUploadBackend(AbstractUploadBackend): """ Customized backend to handle AJAX uploads. """ def update_filename(self, request, filename): """ Verify that the filename is unique, if it isn't append and iterate a counter until it is. """ self._original_filename = filename filename = self._original_filename root, ext = os.path.splitext(self._original_filename) path = os.path.join(settings.MEDIA_ROOT, filename) i = 1 while os.path.exists(path): filename = '%s%i%s' % (root, i, ext) path = os.path.join(settings.MEDIA_ROOT, filename) i += 1 return filename def setup(self, filename): """ Open the destination file for writing. """ self._path = os.path.join(settings.MEDIA_ROOT, filename) try: os.makedirs(os.path.realpath(os.path.dirname(self._path))) except: pass self._dest = BufferedWriter(FileIO(self._path, "w")) def upload_chunk(self, chunk): """ Write a chunk of data to the destination. """ self._dest.write(chunk) def upload_complete(self, request, filename): """ Close the destination file and create an Upload object in the database recording its existence. """ self._dest.close() root, ext = os.path.splitext(filename) path = os.path.join(settings.MEDIA_ROOT, filename) size = os.path.getsize(path) upload = Upload.objects.create( filename=filename, original_filename=self._original_filename, size=size) return { 'id': upload.id }
class AbstractUploadBackend(object): BUFFER_SIZE = 10485760 # 10MB def __init__(self, **kwargs): self._timedir = get_date_directory() self.__dict__.update(kwargs) def update_filename(self, request, filename): """Returns a new name for the file being uploaded.""" self.oldname = filename ext = os.path.splitext(filename)[1] return md5(filename.encode('utf8')).hexdigest() + ext def upload_chunk(self, chunk): """Called when a string was read from the client, responsible for writing that string to the destination file.""" self._dest.write(chunk) def max_size(self): """ Checking file max size """ if int(self._dest.tell()) > self.upload_size: self._dest.close() os.remove(self._path) return True def upload(self, uploaded, filename, raw_data): try: if raw_data: # File was uploaded via ajax, and is streaming in. chunk = uploaded.read(self.BUFFER_SIZE) while len(chunk) > 0: self.upload_chunk(chunk) if self.max_size(): return False chunk = uploaded.read(self.BUFFER_SIZE) else: # File was uploaded via a POST, and is here. for chunk in uploaded.chunks(): self.upload_chunk(chunk) if self.max_size(): return False return True except: # things went badly. return False def setup(self, filename): self._path = os.path.join(settings.MEDIA_ROOT, self.upload_dir, self._timedir, filename) try: os.makedirs(os.path.realpath(os.path.dirname(self._path))) except: pass self._dest = BufferedWriter(FileIO(self._path, "w")) def upload_complete(self, request, filename): path = self.upload_dir + "/" + self._timedir + "/" + filename self._dest.close() return {"path": path, 'oldname': self.oldname}
class LocalUploadBackend(AbstractUploadBackend): UPLOAD_DIR = getattr(settings, "UPLOAD_DIR", "uploads") def setup(self, filename, *args, **kwargs): self._path = os.path.join(settings.MEDIA_ROOT, self.UPLOAD_DIR, filename) try: os.makedirs(os.path.realpath(os.path.dirname(self._path))) except: pass self._dest = BufferedWriter(FileIO(self._path, "w")) def upload_chunk(self, chunk, *args, **kwargs): self._dest.write(chunk) def upload_complete(self, request, filename, *args, **kwargs): path = settings.MEDIA_URL + self.UPLOAD_DIR + "/" + filename self._dest.close() return {"path": path} def update_filename(self, request, filename, *args, **kwargs): """ Returns a new name for the file being uploaded. Ensure file with name doesn't exist, and if it does, create a unique filename to avoid overwriting """ filename = os.path.basename(filename) self._dir = os.path.join(settings.MEDIA_ROOT, self.UPLOAD_DIR) unique_filename = False filename_suffix = 0 # Check if file at filename exists if os.path.isfile(os.path.join(self._dir, filename)): while not unique_filename: try: if filename_suffix == 0: open(os.path.join(self._dir, filename)) else: filename_no_extension, extension = os.path.splitext( filename) open( os.path.join( self._dir, filename_no_extension + str(filename_suffix) + extension)) filename_suffix += 1 except IOError: unique_filename = True if filename_suffix == 0: return filename else: return filename_no_extension + str(filename_suffix) + extension @property def path(self): """ Return a path of file uploaded """ return self._path
def chunked_download(node_id: str, file: io.BufferedWriter, **kwargs): """Keyword args: offset: byte offset length: total length, equal to end - 1 write_callback """ ok_codes = [http.PARTIAL_CONTENT] write_callback = kwargs.get('write_callback', None) length = kwargs.get('length', 100 * 1024 ** 4) pgo = progress.Progress() chunk_start = kwargs.get('offset', 0) retries = 0 while chunk_start < length: chunk_end = chunk_start + CHUNK_SIZE - 1 if chunk_end >= length: chunk_end = length - 1 if retries >= CHUNK_MAX_RETRY: raise RequestError(RequestError.CODE.FAILED_SUBREQUEST, '[acd_cli] Downloading chunk failed multiple times.') r = BackOffRequest.get(get_content_url() + 'nodes/' + node_id + '/content', stream=True, acc_codes=ok_codes, headers={'Range': 'bytes=%d-%d' % (chunk_start, chunk_end)}) logger.debug('Range %d-%d' % (chunk_start, chunk_end)) # this should only happen at the end of unknown-length downloads if r.status_code == http.REQUESTED_RANGE_NOT_SATISFIABLE: logger.debug('Invalid byte range requested %d-%d' % (chunk_start, chunk_end)) break if r.status_code not in ok_codes: r.close() retries += 1 logging.debug('Chunk [%d-%d], retry %d.' % (retries, chunk_start, chunk_end)) continue try: curr_ln = 0 for chunk in r.iter_content(chunk_size=FS_RW_CHUNK_SZ): if chunk: # filter out keep-alive new chunks file.write(chunk) file.flush() if write_callback: write_callback(chunk) curr_ln += len(chunk) pgo.print_progress(length, curr_ln + chunk_start) chunk_start += CHUNK_SIZE retries = 0 r.close() except (ConnectionError, ReadTimeoutError) as e: file.close() raise RequestError(RequestError.CODE.READ_TIMEOUT, '[acd_cli] Timeout. ' + e.__str__()) print() # break progress line return
class LocalUploadBackend(AbstractUploadBackend): UPLOAD_DIR = getattr(settings, "UPLOAD_DIR", "uploads") def setup(self, filename, *args, **kwargs): self._path = os.path.join( settings.MEDIA_ROOT, self.UPLOAD_DIR, filename) try: os.makedirs(os.path.realpath(os.path.dirname(self._path))) except: pass self._dest = BufferedWriter(FileIO(self._path, "w")) def upload_chunk(self, chunk, *args, **kwargs): self._dest.write(chunk) def upload_complete(self, request, filename, *args, **kwargs): path = settings.MEDIA_URL + self.UPLOAD_DIR + "/" + filename self._dest.close() return {"path": path} def update_filename(self, request, filename, *args, **kwargs): """ Returns a new name for the file being uploaded. Ensure file with name doesn't exist, and if it does, create a unique filename to avoid overwriting """ filename = os.path.basename(filename) self._dir = os.path.join( settings.MEDIA_ROOT, self.UPLOAD_DIR) unique_filename = False filename_suffix = 0 # Check if file at filename exists if os.path.isfile(os.path.join(self._dir, filename)): while not unique_filename: try: if filename_suffix == 0: open(os.path.join(self._dir, filename)) else: filename_no_extension, extension = os.path.splitext(filename) open(os.path.join(self._dir, filename_no_extension + str(filename_suffix) + extension)) filename_suffix += 1 except IOError: unique_filename = True if filename_suffix == 0: return filename else: return filename_no_extension + str(filename_suffix) + extension @property def path(self): """ Return a path of file uploaded """ return self._path
class LocalUploadBackend(AbstractUploadBackend): UPLOAD_DIR = "uploads" def setup(self, filename): self._path = os.path.join( settings.MEDIA_ROOT, self.UPLOAD_DIR, filename) try: os.makedirs(os.path.realpath(os.path.dirname(self._path))) except: pass self._dest = BufferedWriter(FileIO(self._path, "w")) def upload_chunk(self, chunk): self._dest.write(chunk) def upload_complete(self, request, filename): path = settings.MEDIA_URL + self.UPLOAD_DIR + "/" + filename self._dest.close() return {"path": path} def update_filename(self, request, filename): """ Returns a new name for the file being uploaded. Ensure file with name doesn't exist, and if it does, create a unique filename to avoid overwriting """ self._dir = os.path.join( settings.MEDIA_ROOT, self.UPLOAD_DIR) unique_filename = False filename_suffix = 0 print "orig filename: " + os.path.join(self._dir, filename) # Check if file at filename exists if os.path.isfile(os.path.join(self._dir, filename)): while not unique_filename: try: if filename_suffix == 0: open(os.path.join(self._dir, filename)) else: filename_no_extension, extension = os.path.splitext(filename) print "filename all ready exists. Trying " + filename_no_extension + str(filename_suffix) + extension open(os.path.join(self._dir, filename_no_extension + str(filename_suffix) + extension)) filename_suffix += 1 except IOError: unique_filename = True if filename_suffix == 0: print "using filename: " + os.path.join(self._dir, filename) return filename else: print "using filename: " + filename_no_extension + str(filename_suffix) + extension return filename_no_extension + str(filename_suffix) + extension
class PANDAAbstractUploadBackend(AbstractUploadBackend): """ Customized backend to handle AJAX uploads. """ def update_filename(self, request, filename): """ Verify that the filename is unique, if it isn't append and iterate a counter until it is. """ self._original_filename = filename filename = self._original_filename root, ext = os.path.splitext(self._original_filename) path = os.path.join(settings.MEDIA_ROOT, filename) i = 1 while os.path.exists(path): filename = '%s%i%s' % (root, i, ext) path = os.path.join(settings.MEDIA_ROOT, filename) i += 1 return filename def setup(self, filename): """ Open the destination file for writing. """ self._path = os.path.join(settings.MEDIA_ROOT, filename) try: os.makedirs(os.path.realpath(os.path.dirname(self._path))) except: pass self._dest = BufferedWriter(FileIO(self._path, "w")) def upload_chunk(self, chunk): """ Write a chunk of data to the destination. """ self._dest.write(chunk) def upload_complete(self, request, filename): """ Close the destination file. """ self._dest.close()
def write_to_stream(self, writer_stream: BufferedWriter): if self._is_operation_result(): with open(self._file_path, "rb", -1) as file: while True: buffer = file.read(io.DEFAULT_BUFFER_SIZE) writer_stream.write(buffer) if len(buffer) < io.DEFAULT_BUFFER_SIZE: break writer_stream.close() self._logger.info( "Writing file at {tmp_file_path} to writer stream".format( tmp_file_path=self._file_path)) else: self._logger.error( "Invalid use of write_to_stream(). Method invoked on FileRef instance which does not point to an operation " "result") raise AttributeError( "Method write_to_stream() only allowed on operation results")
class LocalUploadBackend(AbstractUploadBackend): UPLOAD_DIR = "uploads" def setup(self, filename): self._path = os.path.join( settings.MEDIA_ROOT, self.UPLOAD_DIR, filename) try: os.makedirs(os.path.realpath(os.path.dirname(self._path))) except: pass self._dest = BufferedWriter(FileIO(self._path, "w")) def upload_chunk(self, chunk): self._dest.write(chunk) def upload_complete(self, request, filename, **kwargs): path = settings.MEDIA_URL + self.UPLOAD_DIR + "/" + filename self._dest.close() return {"path": path}
class LocalUploadBackend(AbstractUploadBackend): UPLOAD_DIR = "uploads" def setup(self, filename): self._path = os.path.join(settings.MEDIA_ROOT, self.UPLOAD_DIR, filename) try: os.makedirs(os.path.realpath(os.path.dirname(self._path))) except: pass self._dest = BufferedWriter(FileIO(self._path, "w")) def upload_chunk(self, chunk): self._dest.write(chunk) def upload_complete(self, request, filename): path = settings.MEDIA_URL + self.UPLOAD_DIR + "/" + filename self._dest.close() return {"path": path}
class LocalUploadBackend(AbstractUploadBackend): UPLOAD_DIR = 'tmp' def update_filename(self, request, filename): name, ext = os.path.splitext(filename) return slughifi(name) + ext def setup(self, filename): self._path = os.path.join(self.UPLOAD_DIR, filename) self.path = default_storage.save(self._path, ContentFile('')) self._abs_path = default_storage.path(self.path) self._dest = BufferedWriter(FileIO(self._abs_path, "w")) def upload_chunk(self, chunk): self._dest.write(chunk) def upload_complete(self, request, filename): self._dest.close() context = {'thumbnail_path': self._path, 'file_name': filename, } thumbnail = render_to_string('ajaxupload/includes/thumbnail.html', context) return {"path": self._path, 'thumbnail': thumbnail}
class FileDocumentLocalUploadBackend(LocalUploadBackend): def upload_to(self): d = datetime.datetime.now() return d.strftime('filedocument/%Y/%m/%d') def setup(self, filename, *args, **kwargs): self._path = os.path.join( settings.MEDIA_ROOT, self.upload_to(), filename) try: os.makedirs(os.path.realpath(os.path.dirname(self._path))) except: pass self._dest = BufferedWriter(FileIO(self._path, "w")) def upload_complete(self, request, filename, *args, **kwargs): cur_agent = kwargs['cur_agent'] permissions = kwargs['permissions'] relative_path = self.upload_to() + "/" + filename full_path = settings.MEDIA_URL + relative_path name = filename # auto categorize images image_extensions = ('.jpg', '.jpeg', '.png', '.gif',) if filename.endswith(image_extensions): new_item = ImageDocument(name=name, datafile=relative_path) else: new_item = FileDocument(name=name, datafile=relative_path) # link to item new_item.save_versioned(action_agent=cur_agent, initial_permissions=permissions) self._dest.close() return { "path": full_path, "url": new_item.get_absolute_url(), "name": new_item.name, }
class MyBaseUploadBackend(AbstractUploadBackend): def __init__(self, dirname, **kwargs): super(MyBaseUploadBackend, self).__init__(**kwargs) self.report_id = None def set_report_id(self, report_id): self.report_id = report_id try_number = 1 while True: try: self.quast_session = QuastSession.objects.get(report_id=self.report_id) return True except QuastSession.DoesNotExist: logger.error('No quast session with report_id=%s' % self.report_id) return False except OperationalError: logger.error(traceback.format_exc()) try_number += 1 logger.error('Retrying. Try number ' + str(try_number)) def setup(self, filename): dirpath = self.quast_session.get_contigs_dirpath() logger.info('filename is %s' % filename) logger.info('contigs dirpath is %s' % dirpath) if not os.path.exists(dirpath): logger.error("contigs directory doesn't exist") return False fpath = os.path.join(dirpath, filename) self._path = fpath self._dest = BufferedWriter(FileIO(self._path, 'w')) return True def upload_chunk(self, chunk): self._dest.write(chunk) def upload_complete(self, request, filename): self._dest.close() file_index = "%x" % random.getrandbits(128) c_fn = ContigsFile(fname=filename, file_index=file_index) c_fn.save() qc = QuastSession_ContigsFile(contigs_file=c_fn, quast_session=self.quast_session) qc.save() logger.info('%s' % filename) return { 'file_index': file_index, } def update_filename(self, request, filename): dirpath = self.quast_session.get_contigs_dirpath() logger.info('contigs dirpath is %s' % dirpath) fpath = os.path.join(dirpath, filename) logger.info('file path is %s' % fpath) i = 2 base_fpath = fpath base_filename = filename while os.path.isfile(fpath): fpath = str(base_fpath) + '__' + str(i) filename = str(base_filename) + '__' + str(i) i += 1 return filename def remove(self, request): if 'fileIndex' not in request.GET: logger.error('Request.GET must contain "fileIndex"') return False, 'Request.GET must contain "fileIndex"' file_index = request.GET['fileIndex'] try: contigs_file = self.quast_session.contigs_files.get(file_index=file_index) except ContigsFile.DoesNotExist: logger.error('No file with such index %d in this quast_session' % file_index) return False, 'No file with such index' success, msg = self.__remove(contigs_file) return success, msg # if contigs_file.user_session != self.user_session: # logger.error('This file (%s) belongs to session %s, this session is %s' # % (fname, str(contigs_file.user_session ), str(self.user_session.session_key))) # return False, 'This file does not belong to this session' def __remove(self, contigs_file): fname = contigs_file.fname contigs_fpath = os.path.join(self.quast_session.get_contigs_dirpath(), fname) if os.path.isfile(contigs_fpath): try: os.remove(contigs_fpath) except IOError as e: logger.error('IOError when removing "%s", fileIndex=%d": %s' % (fname, file_index, e.message)) return False, 'Cannot remove file' try: contigs_file.delete() except DatabaseError as e: logger.warn('DatabaseError when removing "%s", fileIndex=%d: %s' % (fname, file_index, e.message)) return False, 'Data base error when removing file' except Exception as e: logger.error('Exception when removing "%s", fileIndex=%d: %s' % (fname, file_index, e.message)) return False, 'Data base exception when removing file' return True, '' def remove_all(self, request): # if 'fileNames' not in request.GET: # logger.error('remove_all: Request.GET must contain "fileNames"') # return False # # file_names_one_string = request.GET['fileNames'] # file_names = file_names_one_string.split('\n')[:-1] # this_user_contigs_files = ContigsFile.objects.filter(user_session=self.user_session) logger.info('uploader_backend.remove_all') for c_f in self.quast_session.contigs_files.all(): success, msg = self.__remove(c_f) return True def get_uploads(self, request): contigs_files = self.quast_session.contigs_files.all() return [{"fileName": c_f.fname, "fileIndex": c_f.file_index, "file_index": c_f.file_index, # "fileSize": c_f.file_size if c_f.file_size else None, } for c_f in contigs_files]
class LocalUploadBackend(AbstractUploadBackend): UPLOAD_DIR = "uploads" def setup(self, filename, *args, **kwargs): self._path = os.path.join( settings.MEDIA_ROOT, self.UPLOAD_DIR, filename) try: os.makedirs(os.path.realpath(os.path.dirname(self._path))) except: pass self._dest = BufferedWriter(FileIO(self._path, "w")) def upload_chunk(self, chunk, *args, **kwargs): self._dest.write(chunk) def upload_complete(self, request, filename, *args, **kwargs): path = settings.MEDIA_URL + self.UPLOAD_DIR + "/" + filename self._dest.close() return {"path": path} def update_filename(self, request, filename, *args, **kwargs): """ Returns a new name for the file being uploaded. Ensure file with name doesn't exist, and if it does, create a unique filename to avoid overwriting """ self._dir = os.path.join( settings.MEDIA_ROOT, self.UPLOAD_DIR) unique_filename = False filename_suffix = 0 # Check if file at filename exists if os.path.isfile(os.path.join(self._dir, filename)): while not unique_filename: try: if filename_suffix == 0: open(os.path.join(self._dir, filename)) else: filename_no_extension, extension = os.path.splitext(filename) open(os.path.join(self._dir, filename_no_extension + str(filename_suffix) + extension)) filename_suffix += 1 except IOError: unique_filename = True if filename_suffix == 0: return filename else: return filename_no_extension + str(filename_suffix) + extension def resize_for_display(self, filename, width, height): upload_dir_path = os.path.join(settings.MEDIA_ROOT, self.UPLOAD_DIR) + "/" original_path = upload_dir_path + filename filename_no_extension, extension = os.path.splitext(filename) need_ratio = float(width) / float(height) im = Image.open(original_path) real_width, real_height = [float(x) for x in im.size] real_ratio = real_width / real_height if real_width > width or real_height > height: if real_ratio > need_ratio: displayed_width = width displayed_height = int(width / real_ratio) else: displayed_height = height displayed_width = int(height * real_ratio) resized_im = im.resize((displayed_width, displayed_height)) displayed_filename = '%s_displayed%s' % (filename_no_extension, extension) resized_im.save(upload_dir_path + displayed_filename) displayed_path = settings.MEDIA_URL + self.UPLOAD_DIR + "/" + displayed_filename else: displayed_path = settings.MEDIA_URL + self.UPLOAD_DIR + "/" + filename return {'displayed_path': displayed_path, 'true_size': im.size}
def _close_stdout(self, writer: BufferedWriter): writer.close()
class LocalUploadBackend(AbstractUploadBackend): #UPLOAD_DIR = "uploads" # The below key must be synchronized with the implementing project # Used to store an array of unclaimed file_pks in the django session # So they can be claimed later when the anon user authenticates SESSION_UNCLAIMED_FILES_KEY = KarmaSettings.SESSION_UNCLAIMED_FILES_KEY # When a file is uploaded anonymously, # What username should we assign ownership to? # This is important because File.save # behavior will not set awarded_karma to True # until an owner is assigned who has username != this DEFAULT_UPLOADER_USERNAME = KarmaSettings.DEFAULT_UPLOADER_USERNAME def setup(self, filename): self._path = os.path.join(settings.MEDIA_ROOT, filename) try: os.makedirs(os.path.realpath(os.path.dirname(self._path))) except: pass self._dest = BufferedWriter(FileIO(self._path, "w")) def upload_chunk(self, chunk): self._dest.write(chunk) def upload(self, uploaded, filename, raw_data): try: if raw_data: # File was uploaded via ajax, and is streaming in. chunk = uploaded.read(self.BUFFER_SIZE) while len(chunk) > 0: self.upload_chunk(chunk) chunk = uploaded.read(self.BUFFER_SIZE) else: # File was uploaded via a POST, and is here. for chunk in uploaded.chunks(): self.upload_chunk(chunk) return True except: # things went badly. return False def upload_complete(self, request, filename, upload): path = settings.MEDIA_URL + "/" + filename self._dest.close() self._dir = settings.MEDIA_ROOT # Avoid File.objects.create, as this will try to make # Another file copy at FileField's 'upload_to' dir new_File = File() new_File.file = os.path.join(self._dir, filename) new_File.type = "N" # This field was initially not allowed NULL if request.user.is_authenticated(): new_File.owner = request.user else: new_File.owner, _created = User.objects.get_or_create( username=self.DEFAULT_UPLOADER_USERNAME) new_File.save() #print "uploaded file saved!" if not request.user.is_authenticated(): #print 'adding unclaimed files to session' if self.SESSION_UNCLAIMED_FILES_KEY in request.session: request.session[self.SESSION_UNCLAIMED_FILES_KEY].append( new_File.pk) else: request.session['unclaimed_files'] = [new_File.pk] # Asynchronously process document with Google Documents API print "upload_complete, firing task" tasks.process_document.delay(new_File) return { "path": path, "file_pk": new_File.pk, "file_url": new_File.get_absolute_url() } def update_filename(self, request, filename): """ Returns a new name for the file being uploaded. Ensure file with name doesn't exist, and if it does, create a unique filename to avoid overwriting """ self._dir = settings.MEDIA_ROOT unique_filename = False filename_suffix = 0 #print "orig filename: " + os.path.join(self._dir, filename) # Check if file at filename exists if os.path.isfile(os.path.join(self._dir, filename)): while not unique_filename: try: if filename_suffix == 0: open(os.path.join(self._dir, filename)) else: filename_no_extension, extension = os.path.splitext( filename) #print "filename all ready exists. Trying " + filename_no_extension + str(filename_suffix) + extension open( os.path.join( self._dir, filename_no_extension + str(filename_suffix) + extension)) filename_suffix += 1 except IOError: unique_filename = True if filename_suffix == 0: #print "using filename: " + os.path.join(self._dir, filename) return filename else: #print "using filename: " + filename_no_extension + str(filename_suffix) + extension return filename_no_extension + str(filename_suffix) + extension
class AjaxUploader(object): BUFFER_SIZE = 10485760 # 10MB def __init__(self, filetype='file', upload_dir='files', size_limit=10485760): self._upload_dir = os.path.join(settings.MEDIA_ROOT, upload_dir, get_date_directory()) self._filetype = filetype if filetype == 'image': self._save_format = setting('IMAGE_UPLOAD_FORMAT', 'JPEG') else: self._save_format = None self._size_limit = size_limit def max_size(self): """ Checking file max size """ if int(self._destination.tell()) > self._size_limit: self._destination.close() os.remove(self._path) return True def setup(self, filename): ext = os.path.splitext(filename)[1] self._filename = md5(filename.encode('utf8')).hexdigest() + ext self._path = os.path.join(self._upload_dir, self._filename) # noinspection PyBroadException try: os.makedirs(os.path.realpath(os.path.dirname(self._path))) except: pass self._destination = BufferedWriter(FileIO(self._path, "w")) def handle_upload(self, request): is_raw = True if request.FILES: is_raw = False if len(request.FILES) == 1: upload = request.FILES.values()[0] else: return dict(success=False, error=_("Bad upload.")) filename = upload.name else: # the file is stored raw in the request upload = request # get file size try: filename = request.GET['qqfile'] except KeyError as aerr: return dict(success=False, error=_("Can't read file name")) self.setup(filename) # noinspection PyBroadException try: if is_raw: # File was uploaded via ajax, and is streaming in. chunk = upload.read(self.BUFFER_SIZE) while len(chunk) > 0: self._destination.write(chunk) if self.max_size(): raise IOError chunk = upload.read(self.BUFFER_SIZE) else: # File was uploaded via a POST, and is here. for chunk in upload.chunks(): self._destination.write(chunk) if self.max_size(): raise IOError except: # things went badly. return dict(success=False, error=_("Upload error")) self._destination.close() if self._filetype == 'image': # noinspection PyBroadException try: i = Image.open(self._path) except: os.remove(self._path) return dict(success=False, error=_("File is not image format")) f_name, f_ext = os.path.splitext(self._filename) f_without_ext = os.path.splitext(self._path)[0] new_path = ".".join([f_without_ext, self._save_format.lower()]) if setting('IMAGE_STORE_ORIGINAL', False): # TODO need change the extension orig_path = ".".join([f_without_ext + '_orig', self._save_format.lower()]) shutil.copy2(self._path, orig_path) i.thumbnail((1200, 1200), Image.ANTIALIAS) # noinspection PyBroadException try: if self._path == new_path: i.save(self._path, self._save_format) else: i.save(new_path, self._save_format) os.remove(self._path) self._path = new_path except: # noinspection PyBroadException try: os.remove(self._path) os.remove(new_path) except: pass return dict(success=False, error=_("Error saving image")) self._filename = ".".join([f_name, self._save_format.lower()]) return dict(success=True, fullpath=self._path, path=os.path.relpath(self._path, '/' + settings.MEDIA_ROOT), old_filename=filename, filename=self._filename)
class LocalUploadBackend(AbstractUploadBackend): #UPLOAD_DIR = "uploads" # The below key must be synchronized with the implementing project # Used to store an array of unclaimed file_pks in the django session # So they can be claimed later when the anon user authenticates #SESSION_UNCLAIMED_FILES_KEY = KarmaSettings.SESSION_UNCLAIMED_FILES_KEY # When a file is uploaded anonymously, # What username should we assign ownership to? # This is important because File.save # behavior will not set awarded_karma to True # until an owner is assigned who has username != this #DEFAULT_UPLOADER_USERNAME = KarmaSettings.DEFAULT_UPLOADER_USERNAME def setup(self, filename): self._path = os.path.join( settings.MEDIA_ROOT, filename) try: os.makedirs(os.path.realpath(os.path.dirname(self._path))) except: pass self._dest = BufferedWriter(FileIO(self._path, "w")) def upload_chunk(self, chunk): self._dest.write(chunk) def upload(self, uploaded, filename, raw_data): """ :raw_data: is 0/1 """ try: if raw_data: # File was uploaded via ajax, and is streaming in. chunk = uploaded.read(self.BUFFER_SIZE) while len(chunk) > 0: self.upload_chunk(chunk) chunk = uploaded.read(self.BUFFER_SIZE) else: # File was uploaded via a POST, and is here. for chunk in uploaded.chunks(): self.upload_chunk(chunk) return True except: # things went badly. return False def upload_complete(self, request, filename, upload): path = settings.MEDIA_URL + "/" + filename self._dest.close() self._dir = settings.MEDIA_ROOT # Avoid File.objects.create, as this will try to make # Another file copy at FileField's 'upload_to' dir print "creating note" note = Note() note.name = filename note.note_file = os.path.join(self._dir, filename) note.course_id = request.GET['course_id'] note.draft = True # Pending approval from user print "saving note" note.save() # FIXME: Make get or create print "setting up session vars" #import ipdb; ipdb.set_trace() if 'uploaded_files' in request.session: request.session['uploaded_files'].append(note.pk) else: request.session['uploaded_files'] = [note.pk] # Asynchronously process document with Google Documents API print "upload_complete, firing task" tasks.process_document.delay(note) return {'note_url': note.get_absolute_url()} def update_filename(self, request, filename): """ Returns a new name for the file being uploaded. Ensure file with name doesn't exist, and if it does, create a unique filename to avoid overwriting """ self._dir = settings.MEDIA_ROOT unique_filename = False filename_suffix = 0 # Check if file at filename exists if os.path.isfile(os.path.join(self._dir, filename)): while not unique_filename: try: if filename_suffix == 0: open(os.path.join(self._dir, filename)) else: filename_no_extension, extension = os.path.splitext(filename) #print "filename all ready exists. Trying " + filename_no_extension + str(filename_suffix) + extension open(os.path.join(self._dir, filename_no_extension + str(filename_suffix) + extension)) filename_suffix += 1 except IOError: unique_filename = True if filename_suffix == 0: #print "using filename: " + os.path.join(self._dir, filename) return filename else: #print "using filename: " + filename_no_extension + str(filename_suffix) + extension return filename_no_extension + str(filename_suffix) + extension
class LocalUploadBackend(AbstractUploadBackend): UPLOAD_DIR = "" def setup(self, filename, *args, **kwargs): self._path = os.path.join('tracker/media', self.UPLOAD_DIR, filename) try: os.makedirs(os.path.realpath(os.path.dirname(self._path))) except: pass self._dest = BufferedWriter(FileIO(self._path, "a")) def upload_chunk(self, chunk, *args, **kwargs): self._dest.write(chunk) def upload_complete(self, request, filename, *args, **kwargs): path = os.path.join(self.UPLOAD_DIR, filename) self._dest.close() return {"path": path} def update_filename(self, request, filename, *args, **kwargs): """ Returns a new name for the file being uploaded. Ensure file with name doesn't exist, and if it does, create a unique filename to avoid overwriting """ self._dir = os.path.join(settings.MEDIA_ROOT, self.UPLOAD_DIR) unique_filename = False filename_suffix = 0 filename = filename.encode('utf-8') # Check if file at filename exists # try: # os.path.isfile(os.path.join(self._dir, filename)) # except UnicodeEncodeError: ext = filename.split('.')[-1] hash = hashlib.new('md5') hash.update(filename) filename = '{}.{}'.format(hash.hexdigest(), ext) if os.path.isfile(os.path.join(self._dir, filename)): while not unique_filename: try: if filename_suffix == 0: open(os.path.join(self._dir, filename)) else: filename_no_extension, extension = os.path.splitext( filename) open( os.path.join( self._dir, filename_no_extension + str(filename_suffix) + extension)) filename_suffix += 1 except IOError: unique_filename = True if filename_suffix == 0: return filename else: if not kwargs['first_part']: filename_suffix = filename_suffix - 1 if filename_suffix == 0: filename_suffix = '' return filename_no_extension + str(filename_suffix) + extension
class BinaryWriter: """ Small utility class to write binary data. Also creates a "Memory Stream" if necessary """ def __init__(self, stream=None): if not stream: stream = BytesIO() self.writer = BufferedWriter(stream) self.written_count = 0 # region Writing # "All numbers are written as little endian." |> Source: https://core.telegram.org/mtproto def write_byte(self, value): """Writes a single byte value""" self.writer.write(pack('B', value)) self.written_count += 1 def write_int(self, value, signed=True): """Writes an integer value (4 bytes), which can or cannot be signed""" self.writer.write( int.to_bytes( value, length=4, byteorder='little', signed=signed)) self.written_count += 4 def write_long(self, value, signed=True): """Writes a long integer value (8 bytes), which can or cannot be signed""" self.writer.write( int.to_bytes( value, length=8, byteorder='little', signed=signed)) self.written_count += 8 def write_float(self, value): """Writes a floating point value (4 bytes)""" self.writer.write(pack('<f', value)) self.written_count += 4 def write_double(self, value): """Writes a floating point value (8 bytes)""" self.writer.write(pack('<d', value)) self.written_count += 8 def write_large_int(self, value, bits, signed=True): """Writes a n-bits long integer value""" self.writer.write( int.to_bytes( value, length=bits // 8, byteorder='little', signed=signed)) self.written_count += bits // 8 def write(self, data): """Writes the given bytes array""" self.writer.write(data) self.written_count += len(data) # endregion # region Telegram custom writing def tgwrite_bytes(self, data): """Write bytes by using Telegram guidelines""" if len(data) < 254: padding = (len(data) + 1) % 4 if padding != 0: padding = 4 - padding self.write(bytes([len(data)])) self.write(data) else: padding = len(data) % 4 if padding != 0: padding = 4 - padding self.write(bytes([254])) self.write(bytes([len(data) % 256])) self.write(bytes([(len(data) >> 8) % 256])) self.write(bytes([(len(data) >> 16) % 256])) self.write(data) self.write(bytes(padding)) def tgwrite_string(self, string): """Write a string by using Telegram guidelines""" self.tgwrite_bytes(string.encode('utf-8')) def tgwrite_bool(self, boolean): """Write a boolean value by using Telegram guidelines""" # boolTrue boolFalse self.write_int(0x997275b5 if boolean else 0xbc799737, signed=False) def tgwrite_date(self, datetime): """Converts a Python datetime object into Unix time (used by Telegram) and writes it""" value = 0 if datetime is None else int(datetime.timestamp()) self.write_int(value) def tgwrite_object(self, tlobject): """Writes a Telegram object""" tlobject.on_send(self) def tgwrite_vector(self, vector): """Writes a vector of Telegram objects""" self.write_int(0x1cb5c415, signed=False) # Vector's constructor ID self.write_int(len(vector)) for item in vector: self.tgwrite_object(item) # endregion def flush(self): """Flush the current stream to "update" changes""" self.writer.flush() def close(self): """Close the current stream""" self.writer.close() def get_bytes(self, flush=True): """Get the current bytes array content from the buffer, optionally flushing first""" if flush: self.writer.flush() return self.writer.raw.getvalue() def get_written_bytes_count(self): """Gets the count of bytes written in the buffer. This may NOT be equal to the stream length if one was provided when initializing the writer""" return self.written_count # with block def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): self.close()
class LocalUploadBackend(AbstractUploadBackend): UPLOAD_DIR = "uploads" def setup(self, filename, *args, **kwargs): self._path = os.path.join(settings.MEDIA_ROOT, self.UPLOAD_DIR, filename) try: os.makedirs(os.path.realpath(os.path.dirname(self._path))) except: pass self._dest = BufferedWriter(FileIO(self._path, "w")) def upload_chunk(self, chunk, *args, **kwargs): self._dest.write(chunk) def upload_complete(self, request, filename, *args, **kwargs): path = settings.MEDIA_URL + self.UPLOAD_DIR + "/" + filename self._dest.close() return {"path": path} def update_filename(self, request, filename, *args, **kwargs): """ Returns a new name for the file being uploaded. Ensure file with name doesn't exist, and if it does, create a unique filename to avoid overwriting """ self._dir = os.path.join(settings.MEDIA_ROOT, self.UPLOAD_DIR) unique_filename = False filename_suffix = 0 # Check if file at filename exists if os.path.isfile(os.path.join(self._dir, filename)): while not unique_filename: try: if filename_suffix == 0: open(os.path.join(self._dir, filename)) else: filename_no_extension, extension = os.path.splitext( filename) open( os.path.join( self._dir, filename_no_extension + str(filename_suffix) + extension)) filename_suffix += 1 except IOError: unique_filename = True if filename_suffix == 0: return filename else: return filename_no_extension + str(filename_suffix) + extension def resize_for_display(self, filename, width, height): upload_dir_path = os.path.join(settings.MEDIA_ROOT, self.UPLOAD_DIR) + "/" original_path = upload_dir_path + filename filename_no_extension, extension = os.path.splitext(filename) need_ratio = float(width) / float(height) im = Image.open(original_path) real_width, real_height = [float(x) for x in im.size] real_ratio = real_width / real_height if real_width > width or real_height > height: if real_ratio > need_ratio: displayed_width = width displayed_height = int(width / real_ratio) else: displayed_height = height displayed_width = int(height * real_ratio) resized_im = im.resize((displayed_width, displayed_height)) displayed_filename = '%s_displayed%s' % (filename_no_extension, extension) resized_im.save(upload_dir_path + displayed_filename) displayed_path = settings.MEDIA_URL + self.UPLOAD_DIR + "/" + displayed_filename else: displayed_path = settings.MEDIA_URL + self.UPLOAD_DIR + "/" + filename return {'displayed_path': displayed_path, 'true_size': im.size}
class BinaryWriter: """ Small utility class to write binary data. Also creates a "Memory Stream" if necessary """ def __init__(self, stream=None, known_length=None): if not stream: stream = BytesIO() if known_length is None: # On some systems, DEFAULT_BUFFER_SIZE defaults to 8192 # That's over 16 times as big as necessary for most messages known_length = max(DEFAULT_BUFFER_SIZE, 1024) self.writer = BufferedWriter(stream, buffer_size=known_length) self.written_count = 0 # region Writing # "All numbers are written as little endian." # https://core.telegram.org/mtproto def write_byte(self, value): """Writes a single byte value""" self.writer.write(pack('B', value)) self.written_count += 1 def write_int(self, value, signed=True): """Writes an integer value (4 bytes), optionally signed""" self.writer.write( int.to_bytes(value, length=4, byteorder='little', signed=signed)) self.written_count += 4 def write_long(self, value, signed=True): """Writes a long integer value (8 bytes), optionally signed""" self.writer.write( int.to_bytes(value, length=8, byteorder='little', signed=signed)) self.written_count += 8 def write_float(self, value): """Writes a floating point value (4 bytes)""" self.writer.write(pack('<f', value)) self.written_count += 4 def write_double(self, value): """Writes a floating point value (8 bytes)""" self.writer.write(pack('<d', value)) self.written_count += 8 def write_large_int(self, value, bits, signed=True): """Writes a n-bits long integer value""" self.writer.write( int.to_bytes(value, length=bits // 8, byteorder='little', signed=signed)) self.written_count += bits // 8 def write(self, data): """Writes the given bytes array""" self.writer.write(data) self.written_count += len(data) # endregion # region Telegram custom writing def tgwrite_bytes(self, data): """Write bytes by using Telegram guidelines""" if len(data) < 254: padding = (len(data) + 1) % 4 if padding != 0: padding = 4 - padding self.write(bytes([len(data)])) self.write(data) else: padding = len(data) % 4 if padding != 0: padding = 4 - padding self.write(bytes([254])) self.write(bytes([len(data) % 256])) self.write(bytes([(len(data) >> 8) % 256])) self.write(bytes([(len(data) >> 16) % 256])) self.write(data) self.write(bytes(padding)) def tgwrite_string(self, string): """Write a string by using Telegram guidelines""" self.tgwrite_bytes(string.encode('utf-8')) def tgwrite_bool(self, boolean): """Write a boolean value by using Telegram guidelines""" # boolTrue boolFalse self.write_int(0x997275b5 if boolean else 0xbc799737, signed=False) def tgwrite_date(self, datetime): """Converts a Python datetime object into Unix time (used by Telegram) and writes it """ value = 0 if datetime is None else int(datetime.timestamp()) self.write_int(value) def tgwrite_object(self, tlobject): """Writes a Telegram object""" tlobject.on_send(self) def tgwrite_vector(self, vector): """Writes a vector of Telegram objects""" self.write_int(0x1cb5c415, signed=False) # Vector's constructor ID self.write_int(len(vector)) for item in vector: self.tgwrite_object(item) # endregion def flush(self): """Flush the current stream to "update" changes""" self.writer.flush() def close(self): """Close the current stream""" self.writer.close() def get_bytes(self, flush=True): """Get the current bytes array content from the buffer, optionally flushing first """ if flush: self.writer.flush() return self.writer.raw.getvalue() def get_written_bytes_count(self): """Gets the count of bytes written in the buffer. This may NOT be equal to the stream length if one was provided when initializing the writer """ return self.written_count # with block def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): self.close()
def chunked_download(node_id: str, file: io.BufferedWriter, **kwargs): """Keyword args: offset: byte offset length: total length, equal to end - 1 write_callback """ ok_codes = [http.PARTIAL_CONTENT] write_callback = kwargs.get('write_callback', None) length = kwargs.get('length', 100 * 1024**4) pgo = progress.Progress() chunk_start = kwargs.get('offset', 0) retries = 0 while chunk_start < length: chunk_end = chunk_start + CHUNK_SIZE - 1 if chunk_end >= length: chunk_end = length - 1 if retries >= CHUNK_MAX_RETRY: raise RequestError( RequestError.CODE.FAILED_SUBREQUEST, '[acd_cli] Downloading chunk failed multiple times.') r = BackOffRequest.get( get_content_url() + 'nodes/' + node_id + '/content', stream=True, acc_codes=ok_codes, headers={'Range': 'bytes=%d-%d' % (chunk_start, chunk_end)}) logger.debug('Range %d-%d' % (chunk_start, chunk_end)) # this should only happen at the end of unknown-length downloads if r.status_code == http.REQUESTED_RANGE_NOT_SATISFIABLE: logger.debug('Invalid byte range requested %d-%d' % (chunk_start, chunk_end)) break if r.status_code not in ok_codes: r.close() retries += 1 logging.debug('Chunk [%d-%d], retry %d.' % (retries, chunk_start, chunk_end)) continue try: curr_ln = 0 for chunk in r.iter_content(chunk_size=FS_RW_CHUNK_SZ): if chunk: # filter out keep-alive new chunks file.write(chunk) file.flush() if write_callback: write_callback(chunk) curr_ln += len(chunk) pgo.print_progress(length, curr_ln + chunk_start) chunk_start += CHUNK_SIZE retries = 0 r.close() except (ConnectionError, ReadTimeoutError) as e: file.close() raise RequestError(RequestError.CODE.READ_TIMEOUT, '[acd_cli] Timeout. ' + e.__str__()) print() # break progress line return
def convert_with_google_drive(note): """ Upload a local note and download HTML using Google Drive :note: a File model instance # FIXME """ # TODO: set the permission of the file to permissive so we can use the # gdrive_url to serve files directly to users # Get file_type and encoding of uploaded file # i.e: file_type = 'text/plain', encoding = None (file_type, encoding) = mimetypes.guess_type(note.note_file.path) if file_type != None: media = MediaFileUpload(note.note_file.path, mimetype=file_type, chunksize=1024*1024, resumable=True) else: media = MediaFileUpload(note.note_file.path, chunksize=1024*1024, resumable=True) auth = DriveAuth.objects.filter(email=GOOGLE_USER).all()[0] creds = auth.transform_to_cred() creds, auth = check_and_refresh(creds, auth) service, http = build_api_service(creds) # get the file extension filename, extension = os.path.splitext(note.note_file.path) file_dict = upload_to_gdrive(service, media, filename, extension) content_dict = download_from_gdrive(file_dict, http, extension) # Get a new copy of the file from the database with the new metadata from filemeta new_note = Note.objects.get(id=note.id) if extension.lower() == '.pdf': new_note.file_type = 'pdf' elif extension.lower() in ['.ppt', '.pptx']: new_note.file_type = 'ppt' now = datetime.datetime.utcnow() # create a folder path to store the ppt > pdf file with year and month folders nonce_path = '/ppt_pdf/%s/%s/' % (now.year, now.month) _path = filename + '.pdf' try: # If those folders don't exist, create them os.makedirs(os.path.realpath(os.path.dirname(_path))) except: print "we failed to create those directories" _writer = BufferedWriter(FileIO(_path, "w")) _writer.write(content_dict['pdf']) _writer.close() new_note.pdf_file = _path else: # PPT files do not have this export ability new_note.gdrive_url = file_dict[u'exportLinks']['application/vnd.oasis.opendocument.text'] new_note.html = content_dict['html'] new_note.text = content_dict['text'] # before we save new html, sanitize a tags in note.html #new_note.sanitize_html(save=False) #FIXME: ^^^ disabled until we can get html out of an Etree html element # Finally, save whatever data we got back from google new_note.save()
class UploadStorage(object): BUFFER_SIZE = 10485760 # 10MB def __init__(self, **kwargs): self.__dict__.update(kwargs) self.size = None def set_size(self, width, height): self.size = int(width), int(height) #logger.debug(self.size) def setup(self, filename, upload_to): """ Creates the filename on the system, along with the required folder structure. """ self.filename = filename self.upload_to = upload_to #logger.debug('File: '+self.filename) self._path = self.update_filename() #logger.debug('Dir: '+self._dir) #logger.debug('Path: '+self._path) #logger.debug(os.path.realpath(os.path.dirname(self._path))) try: os.makedirs(os.path.realpath(os.path.dirname(self._path))) except: pass self._dest = BufferedWriter(FileIO(self._path, "w")) def upload_chunk(self, chunk, *args, **kwargs): self._dest.write(chunk) def upload_complete(self): path = settings.MEDIA_URL + "/" + self.upload_to + "/" + self.filename self._dest.close() return {"path": path} def update_filename(self): """ Returns a new name for the file being uploaded. Ensure file with name doesn't exist, and if it does, create a unique filename to avoid overwriting """ unique_filename = False filename_suffix = 0 # remove trailing current folder if given if self.upload_to[:2] == './': self.upload_to = self.upload_to[2:] # format upload path with date formats self.upload_to = time.strftime(self.upload_to) self._dir = os.path.join(settings.MEDIA_ROOT, self.upload_to) #logger.debug('Upload to: '+self._dir) # Check if file at filename exists) if os.path.isfile(os.path.join(self._dir, self.filename)): #logger.debug('this file already exists') while not unique_filename: try: if filename_suffix == 0: open(os.path.join(self._dir, self.filename)) else: filename_no_extension, extension = os.path.splitext(self.filename) open(os.path.join(self._dir, "{}_{}{}".format(filename_no_extension, str(filename_suffix), extension))) filename_suffix += 1 except IOError: unique_filename = True if filename_suffix > 0: self.filename = "{}_{}{}".format(filename_no_extension, str(filename_suffix), extension) return os.path.join(self._dir, self.filename) def upload(self, uploaded, raw_data): try: if raw_data: # File was uploaded via ajax, and is streaming in. chunk = uploaded.read(self.BUFFER_SIZE) while len(chunk) > 0: self.upload_chunk(chunk) chunk = uploaded.read(self.BUFFER_SIZE) else: # File was uploaded via a POST, and is here. for chunk in uploaded.chunks(): self.upload_chunk(chunk) # make sure the file is closed self._dest.close() # file has been uploaded self.filename = os.path.join(settings.MEDIA_URL, self.upload_to, self.filename) image = Image.open(self._path) #logger.debug("{} {} {}".format(image.format, image.size, image.mode)) # resize image if self.size: #logger.debug(self.size) image = image.convert('RGBA') # the image is resized using the minimum dimension width, height = self.size if image.size[0] < image.size[1]: # the height is bigger than the width # we set the maximum height to the original height, # so that the image fits the width height = image.size[1] elif image.size[0] > image.size[1]: # the width is bigger than the height # we set the maximum width to the original width, # so that the image fits the height width = image.size[0] else: # we have a square pass image.thumbnail((width, height), Image.ANTIALIAS) # if the image is not a square, we crop the exceeding width/length as required, # to fit the square if image.size[0] != image.size[1]: # we crop in the middle of the image if self.size[0] == image.size[0]: # the width fits the container, center the height x0 = 0 y0 = (image.size[1] / 2) - (self.size[1] / 2) x1 = self.size[0] y1 = y0 + self.size[1] else: # center the width x0 = (image.size[0] / 2) - (self.size[0] / 2) y0 = 0 x1 = x0 + self.size[0] y1 = self.size[1] box = (x0, y0, x1, y1) region = image.crop(box) background = Image.new('RGBA', size = self.size, color = (255, 255, 255, 0)) background.paste(region, (0, 0)) #logger.debug("{} {} {}".format(background.format, background.size, background.mode)) background.save(self._path) else: image.save(self._path) return True except Exception as e: logger.error(e) return False
class AjaxUploader(object): BUFFER_SIZE = 10485760 # 10MB def __init__(self, filetype='file', upload_dir='files', size_limit=10485760): self._upload_dir = os.path.join(settings.MEDIA_ROOT, upload_dir, get_date_directory()) self._filetype = filetype if filetype == 'image': self._save_format = setting('IMAGE_UPLOAD_FORMAT', 'JPEG') else: self._save_format = None self._size_limit = size_limit def max_size(self): """ Checking file max size """ if int(self._destination.tell()) > self._size_limit: self._destination.close() os.remove(self._path) return True def setup(self, filename): ext = os.path.splitext(filename)[1] self._filename = md5(filename.encode('utf8')).hexdigest() + ext self._path = os.path.join(self._upload_dir, self._filename) # noinspection PyBroadException try: os.makedirs(os.path.realpath(os.path.dirname(self._path))) except: pass self._destination = BufferedWriter(FileIO(self._path, "w")) def handle_upload(self, request): is_raw = True if request.FILES: is_raw = False if len(request.FILES) == 1: upload = request.FILES.values()[0] else: return dict(success=False, error=_("Bad upload.")) filename = upload.name else: # the file is stored raw in the request upload = request # get file size try: filename = request.GET['qqfile'] except KeyError as aerr: return dict(success=False, error=_("Can't read file name")) self.setup(filename) # noinspection PyBroadException try: if is_raw: # File was uploaded via ajax, and is streaming in. chunk = upload.read(self.BUFFER_SIZE) while len(chunk) > 0: self._destination.write(chunk) if self.max_size(): raise IOError chunk = upload.read(self.BUFFER_SIZE) else: # File was uploaded via a POST, and is here. for chunk in upload.chunks(): self._destination.write(chunk) if self.max_size(): raise IOError except: # things went badly. return dict(success=False, error=_("Upload error")) self._destination.close() if self._filetype == 'image': # noinspection PyBroadException try: i = Image.open(self._path) except: os.remove(self._path) return dict(success=False, error=_("File is not image format")) f_name, f_ext = os.path.splitext(self._filename) f_without_ext = os.path.splitext(self._path)[0] new_path = ".".join([f_without_ext, self._save_format.lower()]) if setting('IMAGE_STORE_ORIGINAL', False): # TODO need change the extension orig_path = ".".join( [f_without_ext + '_orig', self._save_format.lower()]) shutil.copy2(self._path, orig_path) i.thumbnail((1200, 1200), Image.ANTIALIAS) # noinspection PyBroadException try: if self._path == new_path: i.save(self._path, self._save_format) else: i.save(new_path, self._save_format) os.remove(self._path) self._path = new_path except: # noinspection PyBroadException try: os.remove(self._path) os.remove(new_path) except: pass return dict(success=False, error=_("Error saving image")) self._filename = ".".join([f_name, self._save_format.lower()]) return dict(success=True, fullpath=self._path, path=os.path.relpath(self._path, '/' + settings.MEDIA_ROOT), old_filename=filename, filename=self._filename)
class AbstractUploadBackend(object): BUFFER_SIZE = 10485760 # 10MB upload_dir = None upload_size = None def __init__(self, **kwargs): self._timedir = get_date_directory() self.__dict__.update(kwargs) def update_filename(self, request, filename): """Returns a new name for the file being uploaded.""" self.oldname = filename ext = os.path.splitext(filename)[1] return md5(filename.encode('utf8')).hexdigest() + ext def upload_chunk(self, chunk): """Called when a string was read from the client, responsible for writing that string to the destination file.""" self._dest.write(chunk) def max_size(self): """ Checking file max size """ if int(self._dest.tell()) > self.upload_size: self._dest.close() os.remove(self._path) return True def upload(self, uploaded, filename, raw_data): try: if raw_data: # File was uploaded via ajax, and is streaming in. chunk = uploaded.read(self.BUFFER_SIZE) while len(chunk) > 0: self.upload_chunk(chunk) if self.max_size(): return False chunk = uploaded.read(self.BUFFER_SIZE) else: # File was uploaded via a POST, and is here. for chunk in uploaded.chunks(): self.upload_chunk(chunk) if self.max_size(): return False return True except: # things went badly. return False def setup(self, filename): self._path = os.path.join(settings.MEDIA_ROOT, self.upload_dir, self._timedir, filename) try: os.makedirs(os.path.realpath(os.path.dirname(self._path))) except: pass self._dest = BufferedWriter(FileIO(self._path, "w")) def upload_complete(self, request, filename): path = self.upload_dir + "/" + self._timedir + "/" + filename self._dest.close() return {"path": path, 'oldname': self.oldname}
def convert_with_google_drive(note): """ Upload a local note and download HTML using Google Drive :note: a File model instance # FIXME """ # TODO: set the permission of the file to permissive so we can use the # gdrive_url to serve files directly to users # Get file_type and encoding of uploaded file # i.e: file_type = 'text/plain', encoding = None (file_type, encoding) = mimetypes.guess_type(note.note_file.path) if file_type != None: media = MediaFileUpload(note.note_file.path, mimetype=file_type, chunksize=1024 * 1024, resumable=True) else: media = MediaFileUpload(note.note_file.path, chunksize=1024 * 1024, resumable=True) auth = DriveAuth.objects.filter(email=GOOGLE_USER).all()[0] creds = auth.transform_to_cred() creds, auth = check_and_refresh(creds, auth) service, http = build_api_service(creds) # get the file extension filename, extension = os.path.splitext(note.note_file.path) file_dict = upload_to_gdrive(service, media, filename, extension) content_dict = download_from_gdrive(file_dict, http, extension) # Get a new copy of the file from the database with the new metadata from filemeta new_note = Note.objects.get(id=note.id) if extension.lower() == '.pdf': new_note.file_type = 'pdf' elif extension.lower() in ['.ppt', '.pptx']: new_note.file_type = 'ppt' now = datetime.datetime.utcnow() # create a folder path to store the ppt > pdf file with year and month folders nonce_path = '/ppt_pdf/%s/%s/' % (now.year, now.month) _path = filename + '.pdf' try: # If those folders don't exist, create them os.makedirs(os.path.realpath(os.path.dirname(_path))) except: print "we failed to create those directories" _writer = BufferedWriter(FileIO(_path, "w")) _writer.write(content_dict['pdf']) _writer.close() new_note.pdf_file = _path else: # PPT files do not have this export ability new_note.gdrive_url = file_dict[u'exportLinks'][ 'application/vnd.oasis.opendocument.text'] new_note.html = content_dict['html'] new_note.text = content_dict['text'] # before we save new html, sanitize a tags in note.html #new_note.sanitize_html(save=False) #FIXME: ^^^ disabled until we can get html out of an Etree html element # Finally, save whatever data we got back from google new_note.save()
class UploadStorage(object): BUFFER_SIZE = 10485760 # 10MB def __init__(self, **kwargs): self.__dict__.update(kwargs) self.size = None def set_size(self, width, height): self.size = int(width), int(height) #logger.debug(self.size) def setup(self, filename, upload_to): """ Creates the filename on the system, along with the required folder structure. """ self.filename = filename self.upload_to = upload_to #logger.debug('File: '+self.filename) self._path = self.update_filename() #logger.debug('Dir: '+self._dir) #logger.debug('Path: '+self._path) #logger.debug(os.path.realpath(os.path.dirname(self._path))) try: os.makedirs(os.path.realpath(os.path.dirname(self._path))) except: pass self._dest = BufferedWriter(FileIO(self._path, "w")) def upload_chunk(self, chunk, *args, **kwargs): self._dest.write(chunk) def upload_complete(self): path = settings.MEDIA_URL + "/" + self.upload_to + "/" + self.filename self._dest.close() return {"path": path} def update_filename(self): """ Returns a new name for the file being uploaded. Ensure file with name doesn't exist, and if it does, create a unique filename to avoid overwriting """ unique_filename = False filename_suffix = 0 # remove trailing current folder if given if self.upload_to[:2] == './': self.upload_to = self.upload_to[2:] # format upload path with date formats self.upload_to = time.strftime(self.upload_to) self._dir = os.path.join(settings.MEDIA_ROOT, self.upload_to) #logger.debug('Upload to: '+self._dir) # Check if file at filename exists) if os.path.isfile(os.path.join(self._dir, self.filename)): #logger.debug('this file already exists') while not unique_filename: try: if filename_suffix == 0: open(os.path.join(self._dir, self.filename)) else: filename_no_extension, extension = os.path.splitext( self.filename) open( os.path.join( self._dir, "{}_{}{}".format(filename_no_extension, str(filename_suffix), extension))) filename_suffix += 1 except IOError: unique_filename = True if filename_suffix > 0: self.filename = "{}_{}{}".format(filename_no_extension, str(filename_suffix), extension) return os.path.join(self._dir, self.filename) def upload(self, uploaded, raw_data): try: if raw_data: # File was uploaded via ajax, and is streaming in. chunk = uploaded.read(self.BUFFER_SIZE) while len(chunk) > 0: self.upload_chunk(chunk) chunk = uploaded.read(self.BUFFER_SIZE) else: # File was uploaded via a POST, and is here. for chunk in uploaded.chunks(): self.upload_chunk(chunk) # make sure the file is closed self._dest.close() # file has been uploaded self.filename = os.path.join(settings.MEDIA_URL, self.upload_to, self.filename) image = Image.open(self._path) #logger.debug("{} {} {}".format(image.format, image.size, image.mode)) # resize image if self.size: #logger.debug(self.size) image = image.convert('RGBA') # the image is resized using the minimum dimension width, height = self.size if image.size[0] < image.size[1]: # the height is bigger than the width # we set the maximum height to the original height, # so that the image fits the width height = image.size[1] elif image.size[0] > image.size[1]: # the width is bigger than the height # we set the maximum width to the original width, # so that the image fits the height width = image.size[0] else: # we have a square pass image.thumbnail((width, height), Image.ANTIALIAS) # if the image is not a square, we crop the exceeding width/length as required, # to fit the square if image.size[0] != image.size[1]: # we crop in the middle of the image if self.size[0] == image.size[0]: # the width fits the container, center the height x0 = 0 y0 = (image.size[1] / 2) - (self.size[1] / 2) x1 = self.size[0] y1 = y0 + self.size[1] else: # center the width x0 = (image.size[0] / 2) - (self.size[0] / 2) y0 = 0 x1 = x0 + self.size[0] y1 = self.size[1] box = (x0, y0, x1, y1) region = image.crop(box) background = Image.new('RGBA', size=self.size, color=(255, 255, 255, 0)) background.paste(region, (0, 0)) #logger.debug("{} {} {}".format(background.format, background.size, background.mode)) background.save(self._path) else: image.save(self._path) return True except Exception as e: logger.error(e) return False