def rejpg_argparse(args): patterns = pipeable.input_many(args.patterns, skip_blank=True, strip=True) files = spinal.walk(recurse=args.recurse, glob_filenames=patterns) files = [f.absolute_path for f in files] bytes_saved = 0 remaining_size = 0 for filename in files: print(''.join(c for c in filename if c in string.printable)) bytesio = io.BytesIO() i = PIL.Image.open(filename) i = imagetools.rotate_by_exif(i) i.save(bytesio, format='jpeg', quality=args.quality) bytesio.seek(0) new_bytes = bytesio.read() old_size = os.path.getsize(filename) new_size = len(new_bytes) remaining_size += new_size if new_size < old_size: bytes_saved += (old_size - new_size) f = open(filename, 'wb') f.write(new_bytes) f.close() print('Saved', bytestring.bytestring(bytes_saved)) print('Remaining are', bytestring.bytestring(remaining_size))
def reserve_disk_space_argparse(args): try: status = reserve_disk_space(reserve=bytestring.parsebytes( args.reserve), drive=args.drive) free = bytestring.bytestring(status.free) reserve = bytestring.bytestring(status.reserve) log.info('There is %s available out of %s.', free, reserve) return 0 except NotEnoughSpace as exc: free = bytestring.bytestring(exc.free) reserve = bytestring.bytestring(exc.reserve) log.fatal('Only %s available out of %s.', free, reserve) return 1
def kbps(time=None, size=None, kbps=None): if [time, size, kbps].count(None) != 1: raise ValueError('Incorrect number of unknowns') if size is None: seconds = hms_s(time) kibs = int(kbps) / 8 size = kibs * 1024 size *= seconds out = bytestring.bytestring(size) return out if time is None: size = bytestring.parsebytes(size) kilobits = size / 128 time = kilobits / int(kbps) return s_hms(time) if kbps is None: seconds = hms_s(time) size = bytestring.parsebytes(size) kibs = size / 1024 kilobits = kibs * 8 kbps = kilobits / seconds kbps = '%d kbps' % int(round(kbps)) return kbps
def step(self, bytes_downloaded): if g.start is None: g.start = time.time() percent = 100 * bytes_downloaded / self.bytes_total percent = '%07.3f%%:' % percent chunk = bytes_downloaded - g.total g.total = bytes_downloaded METER.digest(chunk) METER_2.digest(chunk) now = round(time.time(), 1) if now > g.last or (bytes_downloaded >= self.bytes_total): g.last = now percent = percent.rjust(9, ' ') rate = bytestring.bytestring(METER.report()[2]).rjust(15, ' ') rate2 = bytestring.bytestring(METER_2.report()[2]).rjust(15, ' ') elapsed = str(round(now - g.start, 1)).rjust(10, ' ') print(percent, rate, rate2, elapsed, end='\r', flush=True)
def __init__(self, total_bytes): self.limiter = ratelimiter.Ratelimiter(allowance=8, mode='reject') self.limiter.balance = 1 self.total_bytes = max(1, total_bytes) self.divisor = bytestring.get_appropriate_divisor(total_bytes) self.total_format = bytestring.bytestring(total_bytes, force_unit=self.divisor) self.downloaded_format = '{:>%d}' % len(self.total_format) self.blank_char = ' ' self.solid_char = '█'
def table_row(self, display_name=None, shaded=False): form = '<tr style="background-color:#{bg}"><td style="width:90%">{anchor}</td><td>{size}</td></tr>' size = self.size if size is None: size = '' else: size = bytestring.bytestring(size) bg = 'ddd' if shaded else 'fff'; row = form.format( bg=bg, anchor=self.anchor(display_name=display_name), size=size, ) return row
def table_row(self, display_name=None, shaded=False): form = '<tr style="background-color:#{bg}"><td style="width:90%">{anchor}</td><td>{size}</td></tr>' size = self.size if size is None: size = '' else: size = bytestring.bytestring(size) bg = 'ddd' if shaded else 'fff' row = form.format( bg=bg, anchor=self.anchor(display_name=display_name), size=size, ) return row
def encrypt_file(aes, input_handle, output_handle): last_byte = 0 done = False bytes_read = 0 while not done: chunk = input_handle.read(BLOCK_SIZE) if len(chunk) > 0: last_byte = chunk[-1] if len(chunk) < BLOCK_SIZE: pad_byte = (last_byte + 1) % 256 pad_byte = bytes([pad_byte]) chunk += pad_byte * (BLOCK_SIZE - len(chunk)) done = True bytes_read += len(chunk) if bytes_read % bytestring.MIBIBYTE == 0: print(bytestring.bytestring(bytes_read)) chunk = aes.encrypt(chunk) output_handle.write(chunk)
def decrypt_file(aes, input_handle, output_handle): current_pos = input_handle.tell() input_size = input_handle.seek(0, SEEK_END) - current_pos input_handle.seek(current_pos) bytes_read = 0 while True: chunk = input_handle.read(BLOCK_SIZE) if len(chunk) == 0: break bytes_read += len(chunk) chunk = aes.decrypt(chunk) if bytes_read == input_size: last_byte = chunk[-1] while chunk and chunk[-1] == last_byte: chunk = chunk[:-1] if bytes_read % bytestring.MIBIBYTE == 0: print(bytestring.bytestring(bytes_read)) output_handle.write(chunk)
def table_row(path, display_name=None, shaded=False): form = ''' <tr style="background-color:#{bg}"> <td class="column_name">{anchor}</td> <td class="column_size">{size}</td></tr> '''.replace('\n', ' ') if path.is_file: size = bytestring.bytestring(path.size) else: size = '' bg = 'ddd' if shaded else 'fff' anchor = atag(path, display_name=display_name) row = form.format( bg=bg, anchor=anchor, size=size, ) return row
def step(self, bytes_downloaded): #print(self.limiter.balance) percent = bytes_downloaded / self.total_bytes percent = min(1, percent) if self.limiter.limit(1) is False and percent < 1: return downloaded_string = bytestring.bytestring(bytes_downloaded, force_unit=self.divisor) downloaded_string = self.downloaded_format.format(downloaded_string) block_count = 50 solid_blocks = self.solid_char * int(block_count * percent) statusbar = solid_blocks.ljust(block_count, self.blank_char) statusbar = self.solid_char + statusbar + self.solid_char end = '\n' if percent == 1 else '' message = '\r{bytes_downloaded} {statusbar} {total_bytes}' message = message.format( bytes_downloaded=downloaded_string, total_bytes=self.total_format, statusbar=statusbar, ) print(message, end=end, flush=True)
def list_basenames(databasename, output_filename=None): ''' Print the Enabled entries in order of the file basenames. This makes it easier to find interesting titles without worrying about what directory they're in. ''' sql = sqlite3.connect(databasename) cur = sql.cursor() cur.execute('SELECT * FROM urls WHERE do_download == 1') items = cur.fetchall() longest = max(items, key=lambda x: len(x[SQL_BASENAME])) longest = len(longest[SQL_BASENAME]) items.sort(key=lambda x: x[SQL_BASENAME].lower()) if output_filename is not None: output_file = open(output_filename, 'w', encoding='utf-8') else: output_file = None form = '{basename:<%ds} : {url} : {size}' % longest for item in items: size = item[SQL_CONTENT_LENGTH] if size is None: size = '' else: size = bytestring.bytestring(size) line = form.format( basename=item[SQL_BASENAME], url=item[SQL_URL], size=size, ) write(line, output_file) if output_file: output_file.close()
def recursive_print_node(node, depth=0, use_html=False, header=None, footer=None): ''' Given a tree node (presumably the root), print it and all of its children. use_html: Generate a neat HTML page instead of plain text. header: This text goes at the top of the file, or just below the <body> tag. footer: This text goes at the end of the file, or just above the </body> tag. ''' if depth == 0: if use_html: yield '<!DOCTYPE html>\n<html>' yield HTML_TREE_HEAD yield '<body>' if header is not None: yield header size = node.size if size is None: size = '???' else: size = bytestring.bytestring(size) if use_html: css_class = 'directory_even' if depth % 2 == 0 else 'directory_odd' if node.item_type == 'directory': directory_url = node.path directory_anchor = '<a href="{url}">→</a>' if directory_url else '' directory_anchor = directory_anchor.format(url=directory_url) line = HTML_FORMAT_DIRECTORY.format( css=css_class, directory_anchor=directory_anchor, name=node.display_name, size=size, ) else: line = HTML_FORMAT_FILE.format( name=node.display_name, size=size, url=node.path, ) else: line = '{space}{bar}{name} : ({size})' line = line.format( space='| ' * (depth-1), bar='|---' if depth > 0 else '', name=node.display_name, size=size ) yield line # Sort by type (directories first) then subsort by lowercase path customsort = lambda node: ( node.item_type == 'file', node.path.lower(), ) for child in node.list_children(sort=customsort): yield from recursive_print_node(child, depth=depth+1, use_html=use_html) if node.item_type == 'directory': if use_html: # Close the directory div yield '</div>' else: # This helps put some space between sibling directories yield '| ' * (depth) if depth == 0: if footer is not None: yield footer if use_html: yield '</body>\n</html>'
walker = spinal.walk_generator() files = list(walker) files = [f.absolute_path for f in files] else: files = os.listdir() files = [f for f in files if os.path.isfile(f)] files = [f for f in files if any(ext in f.lower() for ext in ['.jpg', '.jpeg'])] bytes_saved = 0 remaining_size = 0 for filename in files: print(''.join(c for c in filename if c in string.printable)) bytesio = io.BytesIO() i = PIL.Image.open(filename) i.save(bytesio, format='jpeg', quality=80) bytesio.seek(0) new_bytes = bytesio.read() old_size = os.path.getsize(filename) new_size = len(new_bytes) remaining_size += new_size if new_size < old_size: bytes_saved += (old_size - new_size) f = open(filename, 'wb') f.write(new_bytes) f.close() print('Saved', bytestring.bytestring(bytes_saved)) print('Remaining are', bytestring.bytestring(remaining_size))
def measure(databasename, fullscan=False, new_only=False, threads=4): ''' Given a database, print the sum of all Content-Lengths. URLs will be HEAD requested if: `new_only` is True and the file has no stored content length, or `fullscan` is True and `new_only` is False ''' if isinstance(fullscan, str): fullscan = bool(fullscan) sql = sqlite3.connect(databasename) cur = sql.cursor() if new_only: cur.execute('SELECT * FROM urls WHERE do_download == 1 AND content_length IS NULL') else: cur.execute('SELECT * FROM urls WHERE do_download == 1') items = cur.fetchall() filecount = len(items) totalsize = 0 unmeasured_file_count = 0 if threads is None: threads = 1 thread_queue = threadqueue.ThreadQueue(threads) try: for fetch in items: size = fetch[SQL_CONTENT_LENGTH] if fullscan or new_only: url = fetch[SQL_URL] thread_queue.add(do_head, url, raise_for_status=False) elif size is None: # Unmeasured and no intention to measure. unmeasured_file_count += 1 else: totalsize += size for head in thread_queue.run(): fetch = smart_insert(sql, cur, head=head, commit=False) size = fetch[SQL_CONTENT_LENGTH] if size is None: write('"%s" is not revealing Content-Length' % url) size = 0 totalsize += size except (Exception, KeyboardInterrupt): sql.commit() raise sql.commit() size_string = bytestring.bytestring(totalsize) totalsize_string = '{size_short} ({size_exact:,} bytes) in {filecount:,} files' totalsize_string = totalsize_string.format( size_short=size_string, size_exact=totalsize, filecount=filecount, ) write(totalsize_string) if unmeasured_file_count > 0: write(UNMEASURED_WARNING % unmeasured_file_count) return totalsize
files = [f.absolute_path for f in files] else: files = os.listdir() files = [f for f in files if os.path.isfile(f)] files = [ f for f in files if any(ext in f.lower() for ext in ['.jpg', '.jpeg']) ] bytes_saved = 0 remaining_size = 0 for filename in files: print(''.join(c for c in filename if c in string.printable)) bytesio = io.BytesIO() i = PIL.Image.open(filename) i.save(bytesio, format='jpeg', quality=80) bytesio.seek(0) new_bytes = bytesio.read() old_size = os.path.getsize(filename) new_size = len(new_bytes) remaining_size += new_size if new_size < old_size: bytes_saved += (old_size - new_size) f = open(filename, 'wb') f.write(new_bytes) f.close() print('Saved', bytestring.bytestring(bytes_saved)) print('Remaining are', bytestring.bytestring(remaining_size))