def set_file_info(filenames): """ Using the list of filenames being shared, fill in details that the web page will need to display. This includes zipping up the file in order to get the zip file's name and size. """ global file_info, zip_filename, zip_filesize # build file info list file_info = {'files': [], 'dirs': []} for filename in filenames: info = { 'filename': filename, 'basename': os.path.basename(filename.rstrip('/')) } if os.path.isfile(filename): info['size'] = os.path.getsize(filename) info['size_human'] = helpers.human_readable_filesize(info['size']) file_info['files'].append(info) if os.path.isdir(filename): info['size'] = helpers.dir_size(filename) info['size_human'] = helpers.human_readable_filesize(info['size']) file_info['dirs'].append(info) file_info['files'] = sorted(file_info['files'], key=lambda k: k['basename']) file_info['dirs'] = sorted(file_info['dirs'], key=lambda k: k['basename']) # zip up the files and folders z = helpers.ZipWriter() for info in file_info['files']: z.add_file(info['filename']) for info in file_info['dirs']: z.add_dir(info['filename']) z.close() zip_filename = z.zip_filename zip_filesize = os.path.getsize(zip_filename)
def set_file_info(filenames): global file_info, zip_filename, zip_filesize # build file info list file_info = {'files':[], 'dirs':[]} for filename in filenames: info = { 'filename': filename, 'basename': os.path.basename(filename) } if os.path.isfile(filename): info['size'] = os.path.getsize(filename) info['size_human'] = helpers.human_readable_filesize(info['size']) file_info['files'].append(info) if os.path.isdir(filename): info['size'] = helpers.dir_size(filename) info['size_human'] = helpers.human_readable_filesize(info['size']) file_info['dirs'].append(info) file_info['files'] = sorted(file_info['files'], key=lambda k: k['basename']) file_info['dirs'] = sorted(file_info['dirs'], key=lambda k: k['basename']) # zip up the files and folders z = helpers.ZipWriter() for info in file_info['files']: z.add_file(info['filename']) for info in file_info['dirs']: z.add_dir(info['filename']) z.close() zip_filename = z.zip_filename zip_filesize = os.path.getsize(zip_filename)
def set_file_info(filenames): global file_info, zip_filename, zip_filesize # build file info list file_info = {'files': [], 'dirs': []} for filename in filenames: info = { 'filename': filename, 'basename': os.path.basename(filename.rstrip('/')) } if os.path.isfile(filename): info['size'] = os.path.getsize(filename) info['size_human'] = helpers.human_readable_filesize(info['size']) file_info['files'].append(info) if os.path.isdir(filename): info['size'] = helpers.dir_size(filename) info['size_human'] = helpers.human_readable_filesize(info['size']) file_info['dirs'].append(info) file_info['files'] = sorted(file_info['files'], key=lambda k: k['basename']) file_info['dirs'] = sorted(file_info['dirs'], key=lambda k: k['basename']) # zip up the files and folders z = helpers.ZipWriter() for info in file_info['files']: z.add_file(info['filename']) for info in file_info['dirs']: z.add_dir(info['filename']) z.close() zip_filename = z.zip_filename zip_filesize = os.path.getsize(zip_filename)
def generate(): chunk_size = 102400 # 100kb fp = open(zip_filename, 'rb') done = False while not done: chunk = fp.read(102400) if chunk == '': done = True else: yield chunk # tell GUI the progress downloaded_bytes = fp.tell() percent = round((1.0 * downloaded_bytes / zip_filesize) * 100, 2); sys.stdout.write("\r{0}, {1}% ".format(helpers.human_readable_filesize(downloaded_bytes), percent)) sys.stdout.flush() add_request(REQUEST_PROGRESS, path, { 'id':download_id, 'bytes':downloaded_bytes }) fp.close() sys.stdout.write("\n") # download is finished, close the server if not stay_open: print strings._("closing_automatically") if shutdown_func is None: raise RuntimeError('Not running with the Werkzeug Server') shutdown_func()
def index(): """ Render the template for the onionshare landing page. """ add_request(REQUEST_LOAD, request.path) return render_template_string( open(helpers.get_html_path('index.html')).read(), slug=slug, file_info=file_info, filename=os.path.basename(zip_filename).decode("utf-8"), filesize=zip_filesize, filesize_human=helpers.human_readable_filesize(zip_filesize) )
def index(slug_candidate): if not helpers.constant_time_compare(slug.encode('ascii'), slug_candidate.encode('ascii')): abort(404) add_request(REQUEST_LOAD, request.path) return render_template_string( open('{0}/index.html'.format(helpers.get_onionshare_dir())).read(), slug=slug, file_info=file_info, filename=os.path.basename(zip_filename).decode("utf-8"), filesize=zip_filesize, filesize_human=helpers.human_readable_filesize(zip_filesize), strings=strings.strings )
def index(slug_candidate): if not helpers.constant_time_compare(slug.encode('ascii'), slug_candidate.encode('ascii')): abort(404) add_request(REQUEST_LOAD, request.path) return render_template_string( open(helpers.get_html_path('index.html')).read(), slug=slug, file_info=file_info, filename=os.path.basename(zip_filename).decode("utf-8"), filesize=zip_filesize, filesize_human=helpers.human_readable_filesize(zip_filesize), strings=strings.strings)
def generate(): chunk_size = 102400 # 100kb fp = open(zip_filename, 'rb') done = False canceled = False while not done: chunk = fp.read(chunk_size) if chunk == '': done = True else: try: yield chunk # tell GUI the progress downloaded_bytes = fp.tell() percent = (1.0 * downloaded_bytes / zip_filesize) * 100 # suppress stdout platform on OSX (#203) if helpers.get_platform() != 'Darwin': sys.stdout.write("\r{0:s}, {1:.2f}% ".format( helpers.human_readable_filesize(downloaded_bytes), percent)) sys.stdout.flush() add_request(REQUEST_PROGRESS, path, { 'id': download_id, 'bytes': downloaded_bytes }) except: # looks like the download was canceled done = True canceled = True # tell the GUI the download has canceled add_request(REQUEST_CANCELED, path, {'id': download_id}) fp.close() if helpers.get_platform() != 'Darwin': sys.stdout.write("\n") # download is finished, close the server if not stay_open and not canceled: print strings._("closing_automatically") if shutdown_func is None: raise RuntimeError('Not running with the Werkzeug Server') shutdown_func()
def generate(): chunk_size = 102400 # 100kb fp = open(zip_filename, 'rb') done = False canceled = False while not done: chunk = fp.read(chunk_size) if chunk == '': done = True else: try: yield chunk # tell GUI the progress downloaded_bytes = fp.tell() percent = (1.0 * downloaded_bytes / zip_filesize) * 100 # suppress stdout platform on OSX (#203) if helpers.get_platform() != 'Darwin': sys.stdout.write( "\r{0:s}, {1:.2f}% ".format(helpers.human_readable_filesize(downloaded_bytes), percent)) sys.stdout.flush() add_request(REQUEST_PROGRESS, path, {'id': download_id, 'bytes': downloaded_bytes}) except: # looks like the download was canceled done = True canceled = True # tell the GUI the download has canceled add_request(REQUEST_CANCELED, path, {'id': download_id}) fp.close() if helpers.get_platform() != 'Darwin': sys.stdout.write("\n") # download is finished, close the server if not stay_open and not canceled: print strings._("closing_automatically") if shutdown_func is None: raise RuntimeError('Not running with the Werkzeug Server') shutdown_func()