def retrieve(self): """ retrieve returns a yielded chuncks """ plain = File(open(self.path, 'rb')) return plain.chunks(settings.RETRIEVE_CHUNK_SIZE)
def get_file_metadata( file: File, detect_mime_type: bool = True) -> Tuple[str, Optional[str]]: """Calculate checksum and detect mime type in one go.""" sha256 = hashlib.sha256() mime_type = None closed = file.closed if closed: file_pos = 0 file.open() else: file_pos = file.tell() file.seek(0) for i, chunk in enumerate(file.chunks()): sha256.update(chunk) if i == 0 and detect_mime_type: mime_type = magic.from_buffer(chunk, mime=True) if closed: file.close() else: file.seek(file_pos) return f'sha256:{sha256.hexdigest()}', mime_type
def UploadFile(request): #manipulating the header to get vars uFile = File(request.FILES['file']) uname = request.user.username path = request.DATA['path'] #logging logDict = {} logDict['User']= request.user.username logDict['Action']= 'UploadFile' logDict['HTTP']= 'POST' logDict['File']= request.DATA['path']+uFile.name; logger.info(json.dumps(logDict)) #deleting obj in db query = ODFile.objects.filter(name=request.user, fileName = path+uFile.name).delete() #writing file and creating md5 hash md5 = hashlib.md5() if not os.path.exists('../Server/Files/'+ uname + '/'+ path): os.makedirs('../Server/Files/'+ uname + '/'+ path) with open('../Server/Files/' + uname+'/'+path+ uFile.name, 'w+') as destination: for chunk in uFile.chunks(): md5.update(chunk) destination.write(chunk) f = ODFile(fileName=path+ uFile.name.decode("utf-8"), name = request.user, fileHash=md5.hexdigest().decode("utf-8"), fileSize=uFile.size) f.save() return HttpResponse(constants.h_uploadFile_success)
def download_scanfile(request): # , userid file_path = os.path.join(settings.MEDIA_ROOT, "exefiles", "scanGUI.exe") with open(file_path, 'rb') as f: file = File(f) response = HttpResponse(file.chunks(), content_type='APPLICATION/OCTET-STREAM') response['Content-Disposition'] = 'attachment; filename=scanGUI.exe' response['Content-Length'] = os.path.getsize(file_path) return response
def download(request, filename): file_pathname = os.path.join(media, filename) with open(file_pathname, 'rb') as f: file = File(f) response = HttpResponse(file.chunks(), content_type='APPLICATION/OCTET-STREAM') response['Content-Disposition'] = 'attachment; filename=' + filename response['Content-Length'] = os.path.getsize(file_pathname) return response
def getfile(request, filename): # person = request.user.person file_pathname = os.path.join(RESULT_FILES_DIR, filename) with open(file_pathname, 'rb') as f: file = File(f) response = HttpResponse(file.chunks(), content_type='APPLICATION/OCTET-STREAM') response['Content-Disposition'] = 'attachment; filename=' + filename response['Content-Length'] = os.path.getsize(file_pathname) return response
def test_handle_uploaded_csv_file(self): """ Tests csv file upload """ f_csv = open(TEST_CSV_FILE_PATH, 'rb') file_csv_in = File(f_csv) with open(TEMP_FILE_PATH, 'wb+') as destination: for chunk in file_csv_in.chunks(): destination.write(chunk) self.assertTrue(file_helper.handle_uploaded_file(file_csv_in.name)['success']) self.assertIsInstance(file_helper.handle_uploaded_file(file_csv_in.name)['dataframe'], pd.DataFrame)
def test_handle_uploaded_dummy_file(self): """ Tests uploaded dummy or unsupported files """ f_dummy = open(TEST_DUMMY_FILE_PATH, 'rb') file_dummy_in = File(f_dummy) with open(TEMP_FILE_PATH, 'wb+') as destination: for chunk in file_dummy_in.chunks(): destination.write(chunk) self.assertFalse(file_helper.handle_uploaded_file(file_dummy_in.name)['success']) self.assertEqual(file_helper.handle_uploaded_file(file_dummy_in.name)['error'], 'Sorry. File type xcel is not supported or parsing error occurred')
def setUp(self): """ opens a file and writes it to temp file in vivarana/media :return:self with a dataframe """ f_log = open(TEST_LOG_FILE_PATH, 'rb') file_log_in = File(f_log) with open(TEMP_FILE_PATH, 'wb+') as destination: for chunk in file_log_in.chunks(): destination.write(chunk) # get result from handle log file self.dataframe = file_helper.handle_uploaded_file(file_log_in.name)['dataframe']
def show_image(request,pth): """Send the file.""" tag=os.path.basename(pth) category=os.path.dirname(pth) obj=get_object_or_404(ImageFile,tag=tag,category__name=category) real_path=os.path.join(settings.MEDIA_ROOT,obj.content.name) chunk_size = 8192 f=File(open(real_path,"rb"),obj.content.name) response = StreamingHttpResponse(f.chunks(chunk_size), content_type=obj.mime_type) response['Content-Length'] = obj.size return response
def download(request, filename): file_pathname = os.path.join( SAVED_FILES_DIR.replace('CA_info', 'backbone'), filename) with open(file_pathname, 'rb') as f: file = File(f) response = HttpResponse(file.chunks(), content_type='APPLICATION/OCTET-STREAM') response['Content-Disposition'] = 'attachment; filename=' + filename response['Content-Length'] = os.path.getsize(file_pathname) # os.unlink(file_pathname) return response
def open(self, name, mode='rb'): if self.using_remote(name): if not self.remote.exists(name): raise IOError('File has not been synced yet. File does not exists.') local_file = self.get_storage(name).open(name, 'rb') content = File(local_file) sha1 = hashlib.sha1() for chunk in content.chunks(): sha1.update(force_bytes(chunk)) sha1sum = sha1.hexdigest() if not str(sha1sum) == str(name.split("/")[-1]): raise IOError('File has not been synced yet. Hash Conflict.') self.local._save(name, content) return self.get_storage(name).open(name, mode)
def download(request, filename): file_pathname = os.path.join(SAVED_FILES_DIR, filename) client_ip = get_ip(request) logger.info(f'IP: {client_ip} have downloaded file {filename}') with open(file_pathname, 'rb') as f: file = File(f) response = HttpResponse(file.chunks(), content_type='APPLICATION/OCTET-STREAM') response['Content-Disposition'] = 'attachment; filename=' + filename response['Content-Length'] = os.path.getsize(file_pathname) return response
def get(self, request, fid): file = UploadModel.objects.filter(id=fid)[0] src = file.src with open(src, 'rb') as f: file = File(f) response = HttpResponse(file.chunks(), content_type='APPLICATION/OCTET-STREAM') response[ 'Content-Disposition'] = 'attachment; filename=' + os.path.basename( src) response['Content-Length'] = os.path.getsize(src) return response
def open(self, name, mode='rb'): if self.using_remote(name): if not self.remote.exists(name): raise IOError( 'File has not been synced yet. File does not exists.') local_file = self.get_storage(name).open(name, 'rb') content = File(local_file) sha1 = hashlib.sha1() for chunk in content.chunks(): sha1.update(force_bytes(chunk)) sha1sum = sha1.hexdigest() if not str(sha1sum) == str(name.split("/")[-1]): raise IOError('File has not been synced yet. Hash Conflict.') self.local._save(name, content) return self.get_storage(name).open(name, mode)
def save(self): code = super(CodeForm, self).save(commit=False) code.pub_date = datetime.datetime.now() code.del_date = datetime.datetime.now() + relativedelta(days=+7) code.hits = 0 if code.txt_file: # Use the filename as the title if the user does not choose a title if not code.title: code.title = code.txt_file.name # find the lexer based on file extension before we add the timestamp code.language = find_file_lexer(code.txt_file.name) # add a unique timestamp name = str(code.pub_date.strftime("%f")) + "-" + code.txt_file.name content = code.txt_file # save file content code.txt_file.save(name, content) # set code_paste to match file content f = File(code.txt_file) code.code_paste = "" for chunk in f.chunks(): code.code_paste += chunk f.close() else: ext = find_lang_ext(code.language) if not code.title: code.title = "file" name = code.title + str(code.pub_date.strftime("%f")) + ext if code.title == "file": code.title = "Untitled Submission" content = ContentFile(str(code.code_paste)) code.txt_file.save(name, content) code.txt_file.close() if not code.author: code.author = "an unknown author" code.save() return code
def store_temp_file(fd): """ Given a file-like object and dumps it to the temporary directory. Returns the temporary file object. """ temp_file = tempfile.NamedTemporaryFile( encoding=getattr(fd, 'encoding', None)) source = FileWrapper(fd) for chunk in source.chunks(): temp_file.write(chunk) temp_file.seek(0) return temp_file
def hash_filename(filename, digestmod=hashlib.sha1, chunk_size=UploadedFile.DEFAULT_CHUNK_SIZE): """ Return the hash of the contents of a filename, using chunks. >>> import os.path as p >>> filename = p.join(p.abspath(p.dirname(__file__)), 'models.py') >>> hash_filename(filename) 'da39a3ee5e6b4b0d3255bfef95601890afd80709' """ fileobj = File(open(filename)) try: return hash_chunks(fileobj.chunks(chunk_size=chunk_size)) finally: fileobj.close()
def save_to_remote_storage(path, storage=None): """ Push a local file to remote media storage. :param path: local path of file :param storage: storage instance to use (default: default_storage) :returns: url of file on remote storage server """ if not path or not os.path.isfile(path): logger().debug('save to remote storage - cannot find file: %s' % path) remote_path = path else: if not storage: storage = default_storage if storage == resources_storage: fkey = path.replace(RESOURCES_ROOT, '') else: fkey = path.replace(MEDIA_ROOT, '') # determine mime type fext = get_img_ext(path) content_type = MIME_TYPES.get(fext, UNKNOWN_MIME_TYPE) # write file to remote server file = storage.open(fkey, 'w') storage.headers.update({"Content-Type": content_type}) f = open(path, 'rb') media = File(f) for chunk in media.chunks(chunk_size=FILE_IO_CHUNK_SIZE): file.write(chunk) file.close() media.close() f.close() # construct remote url if storage == resources_storage: remote_path = '%s%s' % (RESOURCES_URL, fkey) else: remote_path = '%s%s' % (MEDIA_URL, fkey) logger().debug( 'save_to_remote_storage - local: %s / remote: %s / mime-type: %s' % (path, remote_path, content_type)) return remote_path
def random_image(request,pth): """Selects an image of the correct category.""" objs=ImageFile.objects.all().filter(category__name=pth) if len(objs)>0: obj=choice(objs) real_path=os.path.join(settings.MEDIA_ROOT,obj.content.name) chunk_size = 8192 f=File(open(real_path,"rb"),obj.content.name) response = StreamingHttpResponse(f.chunks(chunk_size), content_type=obj.mime_type) response['Content-Length'] = obj.size else: response=HttpResponseNotFound(""" <h1>No images found</h1> <p>No images of category {} were found in the database.</p> """.format(pth)) return response
def save_to_tempdir(file: File) -> tp.Tuple[str, bool]: """ Save open file to disk using blake2b hash. :param file: open Django file. :return: whether file already exists. """ file_hash = compute_blake2b_hash(file) path = os.path.join("/tmp/files", file_hash) if os.path.exists(path): return "", True with open(path, "wb") as tempfile: for chunk in file.chunks(): tempfile.write(chunk) return path, False
def _save(self, name, content: File): # 为保证django行为的一致性,保存文件时,应该返回相对于`media path`的相对路径。 target_name = self._get_target_name(name) logger.debug('OSS存储后端:保存文件 %s' % target_name) content.open() # 默认分片大小 1MB DEFAULT_CHUNK_SIZE = 1 * 1024 * 1024 logger.debug('OSS存储后端:读取完成,文件大小 %d' % content.size) if not content.multiple_chunks(chunk_size=DEFAULT_CHUNK_SIZE): logger.debug('OSS存储后端:不分片,开始上传') # 不分片 content_str = content.file.read() self.bucket.put_object(target_name, content_str) else: logger.debug('OSS存储后端:分片,开始上传') # 改用分片上传方式 upload_id = self.bucket.init_multipart_upload( target_name).upload_id parts = [] part_id = 1 for chunk in content.chunks(chunk_size=DEFAULT_CHUNK_SIZE): # TODO Create an API endpoint for getting uploading process result = self.bucket.upload_part(target_name, upload_id, part_id, chunk) parts.append(PartInfo(part_id, result.etag)) part_id += 1 logger.debug('OSS存储后端:上传分片 #%d' % part_id) result = self.bucket.complete_multipart_upload( target_name, upload_id, parts) logger.debug('OSS存储后端:上传完毕,关闭文件') content.close() return self._clean_name(name)
def test_subject_restore_post(self): self.client.force_login(self.professors[0]) data = { "resource[]": [self.resource.id] } response = self.client.post(reverse("subjects:do_backup", kwargs={"subject": self.subjects[1].slug}), data) numberTopics = self.subjects[0].topic_subject.all().count() try: f = io.BytesIO(response.content) fi = File(f) filepath = "backup.zip" with default_storage.open(filepath, 'wb+') as dest: for chunk in fi.chunks(): dest.write(chunk) with default_storage.open(filepath, "rb") as attach: response = self.client.post(reverse("subjects:do_restore", kwargs={"subject": self.subjects[0].slug}), {"zip_file": attach}, follow=True) self.assertEquals(response.status_code, 200) self.assertRedirects(response, reverse("subjects:restore", kwargs={"slug": self.subjects[0].slug})) message = list(response.context.get("messages"))[0] self.assertEquals(message.tags, "success") self.assertIn(ugettext("Backup restored successfully!"), message.message) subject = Subject.objects.get(id=self.subjects[0].id) self.assertEquals(subject.topic_subject.all().count(), numberTopics + 1) self.assertTrue(Webpage.objects.filter(topic__subject__id=self.subjects[0].id).exists()) finally: default_storage.delete(filepath) f.close()
def handle_uploaded_file(file: File, output_format): api = API() with NamedTemporaryFile(suffix='.pdf', mode='wb') as pdf_file, \ NamedTemporaryFile(suffix='.csv', mode='r') as csv_file: # Convert PDF to CSV for chunk in file.chunks(): pdf_file.write(chunk) convert_into(pdf_file.name, csv_file.name, spreadsheet=True, pages='all', output_format='csv') # Process sheet = pyexcel.get_sheet(file_name=csv_file.name) sheet.name = 'Output' api.process_spreadsheet(sheet) # Output file return django_excel.make_response(sheet, output_format, file_name=path.splitext( path.basename(file.name))[0])
def doUploadDataset(request): global proj_name session,_ = loadSession(proj_name) inmemory_file = request.FILES['file'] upfile = File(inmemory_file) with open('upfile','wb+') as ftemp: for chunk in upfile.chunks(): ftemp.write(chunk) if (upfile.name[-3:]=='pkl'): df = pd.read_pickle('upfile') else: df = pd.read_csv('upfile',sep=';',thousands='.', decimal=',') for i in range(len(df.columns)): col = df.columns[i] if (df[col].dtype in [np.dtype('float64'),np.dtype('int64')]): df[col].fillna(0,inplace=True) feature_list,val_mtx,value_list = readDataFromDf(session,df) session['feature_list'] = feature_list session['val_mtx'] = val_mtx session['value_list'] = value_list session['df_pklfname'] = pkl_dir+'df.pkl' session['stage'] = 1 savePKL(df,session['df_pklfname']) saveSession(session) return JsonResponse({})
def resume_upload(self, remote_file_path, local_file_path, multipart_threshold=100 * 1024, part_size=100 * 1024, num_threads=5, progress_callback=None): name = local_file_path.split('/')[-1] content_type = mimetypes.guess_type( name)[0] or 'application/x-octet-stream' with open(local_file_path, 'rb') as f: content_file = File(f) if hasattr(content_file, 'chunks'): content = b''.join(content_file.chunks()) else: content = content_file.read() if not content: raise AliyunOSSException( '`local_file_path` not exists error, local_file_path: {}'. format(local_file_path)) content_len = str(len(content)) headers = { 'x-oss-acl': self.acl, 'Content-Type': content_type, 'Content-Length': content_len, } return oss2.resumable_upload(self.bucket, remote_file_path, local_file_path, store=oss2.ResumableStore(root='/tmp'), headers=headers, multipart_threshold=multipart_threshold, part_size=part_size, num_threads=num_threads, progress_callback=progress_callback)
class FileWrapper(WrapperBase): def __init__(self, file_path, root): if not os.path.isfile(file_path): raise ValueError("'%s' is not a valid file path" % file_path) self._file = None self.path = file_path super(FileWrapper, self).__init__(root) def is_file(self): return True def get_path(self): return self._file_path def set_path(self, path): self._file_path = path if self._file is not None: self._file.close() self._file = None path = property(get_path, set_path) @property def name(self): return self._file.name def get_chunks(self): if self._file is None: self._file = File(open(self.path, 'rb')) return self._file.chunks() def get_contents(self): if self._file is None: self._file = File(open(self.path)) self._file.seek(0) return self._file.read() def set_contents(self, data): if self._file is not None: self._file.close() self._file = None _file = File(open(self.path, "ab")) _file.write(data) _file.close() contents = property(get_contents, set_contents) def get_info(self): path = self.path info = { 'name': os.path.basename(path), 'hash': self.get_hash(), 'date': datetime.fromtimestamp(os.stat(path).st_mtime).strftime("%d %b %Y %H:%M"), 'size': self.get_size(), 'read': os.access(path, os.R_OK), 'write': os.access(path, os.W_OK), 'rm': os.access(path, os.W_OK), 'url': self.get_url(), 'phash': self.get_parent_hash() or '', } if settings.DEBUG: info['abs_path'] = path # parent_hash = self.get_parent_hash() # if parent_hash: # info['phash'] = parent_hash mime, is_image = self.get_mime(path) # if is_image and self.imglib and False: # try: # import Image # l['tmb'] = self.get_thumb_url(f) # except ImportError: # pass # except Exception: # raise info['mime'] = mime return info def get_size(self): return os.lstat(self.path).st_size def get_url(self): rel_path = os.path.relpath(self.path, self.root).replace('\\', '/') user_path = '%s/' % (self.root.split('/')[-1],) return '%s%s%s' % (elfinder_settings.ELFINDER_FS_DRIVER_URL, user_path, rel_path) def get_mime(self, path): mime = mimes.guess_type(path)[0] or 'Unknown' if mime.startswith('image/'): return mime, True else: return mime, False def remove(self): os.remove(self.path) @classmethod def mkfile(cls, file_path, root): if not os.path.exists(file_path): f = open(file_path, "w") f.close() return cls(file_path, root) else: raise Exception("File '%s' already exists" % os.path.basename(file_path))
def handle_uploaded_file(content): generated_path = generate_upload_path(None, content.name) full_path = (os.path.join(settings.MEDIA_ROOT, generated_path)).replace('\\', '/') if not hasattr(content, 'chunks'): content = File(content) # Create any intermediate directories that do not exist. # Note that there is a race between os.path.exists and os.makedirs: # if os.makedirs fails with EEXIST, the directory was created # concurrently, and we can continue normally. Refs #16082. directory = os.path.dirname(full_path) if not os.path.exists(directory): try: os.makedirs(directory) except OSError as e: if e.errno != errno.EEXIST: raise if not os.path.isdir(directory): raise IOError("%s exists and is not a directory." % directory) # There's a potential race condition between get_available_name and # saving the file; it's possible that two threads might return the # same name, at which point all sorts of fun happens. So we need to # try to create the file, but if it already exists we have to go back # to get_available_name() and try again. while True: try: # This file has a file path that we can move. if hasattr(content, 'temporary_file_path'): file_move_safe(content.temporary_file_path(), full_path) # This is a normal uploadedfile that we can stream. else: # This fun binary flag incantation makes os.open throw an # OSError if the file already exists before we open it. flags = (os.O_WRONLY | os.O_CREAT | os.O_EXCL | getattr(os, 'O_BINARY', 0)) # The current umask value is masked out by os.open! fd = os.open(full_path, flags, 0o666) _file = None try: locks.lock(fd, locks.LOCK_EX) for chunk in content.chunks(): if _file is None: mode = 'wb' if isinstance(chunk, bytes) else 'wt' _file = os.fdopen(fd, mode) _file.write(chunk) finally: locks.unlock(fd) if _file is not None: _file.close() else: os.close(fd) except OSError as e: if e.errno == errno.EEXIST: generated_path = generate_upload_path(None, content.name) full_path = (os.path.join(settings.MEDIA_ROOT, generated_path)).replace('\\', '/') else: raise else: # OK, the file save worked. Break out of the loop. break return generated_path
def mystreamview(request): stream_file = File(io.open("/home/crispander/Escritorio/k.mp3", "rb")) response = HttpResponse(stream_file.chunks(), content_type="video/mpeg") return response
class FileWrapper(WrapperBase): def __init__(self, file_path, root, fs_driver_url, **options): if not file_path.is_file(): raise ValueError("'%s' is not a valid file path" % file_path) self._file = self._file_path = None self.path = file_path self.fs_driver_url = fs_driver_url super(FileWrapper, self).__init__(root, **options) def is_file(self): return self.path.is_file() @property def path(self): return self._file_path @path.setter def path(self, path): self._file_path = path if self._file is not None: self._file.close() self._file = None @property def name(self): return self._file.name def get_chunks(self): if self._file is None: self._file = File(self.path.open('rb')) return self._file.chunks() def get_contents(self): if self._file is None: self._file = File(self.path.open()) self._file.seek(0) return self._file.read() def set_contents(self, data): if self._file is not None: self._file.close() self._file = None _file = File(self.path.open("ab")) _file.write(data) _file.close() contents = property(get_contents, set_contents) def get_info(self): path = self.path spath = str(path) info = { 'name': self.bytes_safe_decode(path.name), 'hash': self.get_hash(), 'date': datetime.fromtimestamp(path.stat().st_mtime).strftime("%d %b %Y %H:%M"), 'size': self.get_size(), 'read': os.access(spath, os.R_OK), 'write': os.access(spath, os.W_OK), 'rm': os.access(spath, os.W_OK), 'url': self.get_url(), 'phash': self.get_parent_hash() or '', } if settings.DEBUG: info['abs_path'] = self.bytes_safe_decode(spath) mime, is_image = self.get_mime(spath) # if is_image and self.imglib and False: # try: # import Image # l['tmb'] = self.get_thumb_url(f) # except ImportError: # pass # except Exception: # raise info['mime'] = mime return info def get_size(self): return self.path.lstat().st_size def get_url(self): rel_path = self.path.relative_to(self.root).as_posix() user_path = '%s/' % (self.root.parts[-1],) fs_driver_url = self.fs_driver_url if not re.search("(?:%s)$" % re.escape(user_path), fs_driver_url, re.U): fs_driver_url += user_path if not fs_driver_url.endswith("/") and not rel_path.startswith("/"): fs_driver_url += '/' return urllib.quote_plus(fs_driver_url + rel_path, safe="/") def get_mime(self, path): mime = mimes.guess_type(path)[0] or 'Unknown' if mime.startswith('image/'): return mime, True else: return mime, False def remove(self): self.path.unlink() @classmethod def mkfile(cls, file_path, root, fs_driver_url): if not file_path.is_file(): with file_path.open("w"): return cls(file_path, root, fs_driver_url=fs_driver_url) else: raise Exception("File '%s' already exists" % file_path.name)
def write_file(f: File, filename: str) -> None: with open(filename, 'wb+') as destination: for chunk in f.chunks(): destination.write(chunk)
class FileWrapper(WrapperBase): def __init__(self, file_path, root): if not os.path.isfile(file_path): raise ValueError("'%s' is not a valid file path" % file_path) self._file = None self.path = file_path super(FileWrapper, self).__init__(root) def is_file(self): return True def get_path(self): return self._file_path def set_path(self, path): self._file_path = path if self._file is not None: self._file.close() self._file = None path = property(get_path, set_path) @property def name(self): return self._file.name def get_chunks(self): if self._file is None: self._file = File(open(self.path, 'rb')) return self._file.chunks() def get_contents(self): if self._file is None: self._file = File(open(self.path)) self._file.seek(0) return self._file.read() def set_contents(self, data): if self._file is not None: self._file.close() self._file = None _file = File(open(self.path, "ab")) _file.write(data) _file.close() contents = property(get_contents, set_contents) def get_info(self): path = self.path info = { 'name': os.path.basename(path), 'hash': self.get_hash(), 'date': datetime.fromtimestamp( os.stat(path).st_mtime).strftime("%d %b %Y %H:%M"), 'size': self.get_size(), 'read': os.access(path, os.R_OK), 'write': os.access(path, os.W_OK), 'rm': os.access(path, os.W_OK), 'url': self.get_url(), 'phash': self.get_parent_hash() or '', } if settings.DEBUG: info['abs_path'] = path # parent_hash = self.get_parent_hash() # if parent_hash: # info['phash'] = parent_hash mime, is_image = self.get_mime(path) # if is_image and self.imglib and False: # try: # import Image # l['tmb'] = self.get_thumb_url(f) # except ImportError: # pass # except Exception: # raise info['mime'] = mime return info def get_size(self): return os.lstat(self.path).st_size def get_url(self): rel_path = os.path.relpath(self.path, self.root).replace('\\', '/') user_path = '%s/' % (self.root.split('/')[-1], ) return '%s%s%s' % (elfinder_settings.ELFINDER_FS_DRIVER_URL, user_path, rel_path) def get_mime(self, path): mime = mimes.guess_type(path)[0] or 'Unknown' if mime.startswith('image/'): return mime, True else: return mime, False def remove(self): os.remove(self.path) @classmethod def mkfile(cls, file_path, root): if not os.path.exists(file_path): f = open(file_path, "w") f.close() return cls(file_path, root) else: raise Exception("File '%s' already exists" % os.path.basename(file_path))
def hash_file(file: File) -> str: hasher = hashlib.sha256() for chunk in file.chunks(): hasher.update(to_bytes(chunk)) return hasher.hexdigest()
def process(request): if request.method == 'POST': currenttime = time.strftime('%y%m%d%H%M%S') os.makedirs(os.path.join(settings.PROJECT_DIR,'bms_executable/src/%s' % currenttime)) os.makedirs(os.path.join(settings.PROJECT_DIR,'bms_executable/output/%s' % currenttime)) dw = int(request.POST['dimension_width']) dh = int(request.POST['dimension_height']) rw = int(request.POST['resolution_width']) rh = int(request.POST['resolution_height']) vd = int(request.POST['viewing_distance']) optimal_grid_size_width = math.tan(math.radians(1))*2*vd*rw/dw optimal_grid_size_height = math.tan(math.radians(1))*2*vd*rh/dh gsize_w = math.ceil(optimal_grid_size_width) gsize_h = math.ceil(optimal_grid_size_height) if 'image1' in request.FILES and 'image2' in request.FILES: image1 = request.FILES['image1'] image2 = request.FILES['image2'] elif 'image1' in request.FILES and 'image2' not in request.FILES: image1 = request.FILES['image1'] image2 = File(open('sample4.jpg', 'r+b')) elif 'image1' not in request.FILES and 'image2' in request.FILES: image1 = File(open('sample2.jpg', 'r+b')) image2 = request.FILES['image2'] else: image1 = File(open('sample2.jpg', 'r+b')) image2 = File(open('sample4.jpg', 'r+b')) # return HttpResponse("Process failed. Please upload image files.") # image1.name = time.strftime('%y%m%d%H%M%S') + image1.name # image2.name = time.strftime('%y%m%d%H%M%S') + image2.name with open('bms_executable/src/%s/' % currenttime + image1.name, 'wb+') as destination: for chunk in image1.chunks(): destination.write(chunk) _execute_bms(currenttime) output_name1, output_extension1 = os.path.splitext(image1.name) with open('bms_executable/src/%s/' % currenttime + image2.name, 'wb+') as destination: for chunk in image2.chunks(): destination.write(chunk) _execute_bms(currenttime) output_name2, output_extension2 = os.path.splitext(image2.name) context = { "input_name1": "src/%s/" % currenttime + image1.name, "input_name2": "src/%s/" % currenttime + image2.name, "output_name1": "output/%s/" % currenttime + output_name1 + ".png", "output_name2": "output/%s/" % currenttime + output_name2 + ".png", "gsize_w": gsize_w, "gsize_h": gsize_h, "rw": rw, "rh": rh, "img1_x1": 30, "img1_x2": 250, "img1_y1": 20, "img1_y2": 200, "img2_x1": 30, "img2_x2": 250, "img2_y1": 20, "img2_y2": 200, } return render_to_response("result.html", context) return HttpResponse("Process failed. Please make sure you uploaded a correct image file.")
def upload_to_storage(file: File, path: str): if not os.path.exists(os.path.dirname(path)): os.makedirs(os.path.dirname(path)) with open(path, 'wb+') as dest: for c in file.chunks(): dest.write(c)