def value_from_datadict(self, data, files, name): """ Normally returns files.get(name, None). Here we also check `data`. -- if the appropriate hidden _sticky_file input is set, we can look for the temporary file instead and return that if it exists. This method seems to be called multiple times with the same arguments, so to prevent excessive storage activity the return value is cached and returned without processing on subsequent calls. There is an assumption that the arguments will not change between calls for any given instance, which appears to be valid, so no argument checks are performed. """ if hasattr(self, '_value'): return self._value self.user_token = data.get('csrfmiddlewaretoken', None) # look for normal file value = super( StickyFileInput, self).value_from_datadict(data, files, name) if value and hasattr(value, 'name'): # got one, save a temporary copy just in case self.sticky_file_name = value.name self.sticky_session_id = '%.6f' % time.time() self.save_sticky_copy(value.file) else: # check for temporary copy self.sticky_file_name = ( data.get( self.get_hidden_input_name(name, 'sticky_file'), None)) self.sticky_session_id = data.get( self.get_hidden_input_name(name, 'sticky_session_id'), None) sticky_copy = self.load_sticky_copy() if sticky_copy: sticky_copy.seek(0, 2) # seek to end value = TemporaryUploadedFile( name = self.sticky_file_name, content_type = None, size = sticky_copy.tell(), charset = None ) value.file = sticky_copy value.file.seek(0) value.temporary_file_path = lambda: self.get_sticky_path() setattr(self, '_value', value) # cache return self._value
def get(self, key): """ Regenerates a MultiValueDict instance containing the files related to all file states stored for the given key. """ upload = None files_states = self.backend.get(key) files = MultiValueDict() if files_states: for name, state in files_states.items(): f = BytesIO() f.write(state['content']) # If the post is too large, we cannot use a # InMemoryUploadedFile instance. if state['size'] > settings.FILE_UPLOAD_MAX_MEMORY_SIZE: upload = TemporaryUploadedFile( state['name'], state['content_type'], state['size'], state['charset'], ) upload.file = f else: f = BytesIO() f.write(state['content']) upload = InMemoryUploadedFile( file=f, field_name=name, name=state['name'], content_type=state['content_type'], size=state['size'], charset=state['charset'], ) files[name] = upload # Go to the first byte in the file for future use upload.file.seek(0) return files
def get(self, key): """ Regenerates a MultiValueDict instance containing the files related to all file states stored for the given key. """ upload = None files_states = self.backend.get(key) files = MultiValueDict() if files_states: for name, state in files_states.items(): f = BytesIO() f.write(state['content']) # If the post is too large, we cannot use a # InMemoryUploadedFile instance. if state['size'] > settings.FILE_UPLOAD_MAX_MEMORY_SIZE: upload = TemporaryUploadedFile( state['name'], state['content_type'], state['size'], state['charset'], ) upload.file = f else: f = BytesIO() f.write(state['content']) upload = InMemoryUploadedFile( file=f, field_name=name, name=state['name'], content_type=state['content_type'], size=state['size'], charset=state['charset'], ) files[name] = upload # Go to the first byte in the file for future use upload.file.seek(0) return files
def recreate_tmp_file(name, path, mime_type): tmp_file = TemporaryUploadedFile(name, mime_type, 0, None) tmp_file.file = open(path) tmp_file.size = os.fstat(tmp_file.fileno()).st_size return tmp_file
def recreate_tmp_file(name, path, mime_type): tmp_file = TemporaryUploadedFile(name, mime_type, 0, None) tmp_file.file = open(path) tmp_file.size = os.fstat(tmp_file.fileno()).st_size return tmp_file
def create_pngs(doc, type='pngs'): blob = doc._blob log.info('Starting png generation of: %s', doc) # Check to make sure that we don't already have a pngs pack # Check for derived files of PDF type if doc.file_type == 'png': log.info('%s is a PNG, no need to convert' % blob) return False elif blob.derived_documents.filter(file_type='png'): log.info('%s has derived PNG, no need to convert' % blob) return False # Locate a pdf file if doc.type == 'pdf': pdf = doc else: pdf = doc.get_derived_documents_of_type('pdf') if pdf: pdf = pdf[0] if not pdf: log.info("No PDF avaliable for %s" % blob) return # Create a temp folder temp_folder = tempfile.mkdtemp() log.debug('working with: %s', temp_folder) file = tempfile.NamedTemporaryFile(dir=temp_folder, delete=False) for data in pdf.file.chunks(): file.write(data) file.close() # Now call ghostscript return_code = subprocess.call(["gs", "-sDEVICE=png16m", "-sOutputFile=%s/slide-%s.png" % (temp_folder, '%03d'), "-r600", "-dNOPAUSE", "-dBATCH", "-dMaxBitmap=1000000000", #"-dFirstPage=1", "-dLastPage=1", "%s" % file.name]) if return_code != 0: log.error('Ghostscript error') # Clean up shutil.rmtree(temp_folder) create_pngs.retry() # Process the generated files with PIL # First generate a list of file in the tempdir compiled_regex = re.compile('^slide-(\w+).png$') scaled_images = {} for file in os.listdir(temp_folder): # Check using regex match = re.match(compiled_regex, file) if match: log.debug('scaling image: %s', file) order = int(match.group(1)) # Resize using PIL slide = Image.open(os.path.join(temp_folder, file)) slide.thumbnail((1920, 1200), Image.ANTIALIAS) new_filename = os.path.join(temp_folder, 'slide-scaled-%03d.png' % order) slide.save(new_filename) scaled_images[order] = new_filename # Make sure that the order starts at 0 and has no gaps new_images = {} order = 0 sorted_keys = scaled_images.keys() sorted_keys.sort() for item in [scaled_images[key] for key in sorted_keys]: new_images[order] = item order += 1 scaled_images = new_images # Before uploading check that there are still no other pngs up there. if blob.derived_documents.filter(file_type='png'): log.info('%s has derived PNG now, canceling upload' % blob) return False # Now go through all the generated slides and upload # Create a new derivedfile pack try: for order, filename in scaled_images.iteritems(): file = open(filename, 'rb') parts = os.path.split(filename) filename = os.path.join(parts[-2], '%s_%s' % ( doc.file_name[0:60], parts[-1])) upfile = TemporaryUploadedFile(filename, 'image/png', 0, None) upfile.file = file derived_doc = DerivedDocument(derived_from=blob) derived_doc.file = upfile derived_doc.index = order derived_doc.save() except: log.error(filename) raise shutil.rmtree(temp_folder) return True