def cookies_write_func(): import json from mylib.web_client import convert_cookies_json_to_netscape args = rtd.args files = args.file or list_files(clipboard, recursive=False) verbose = args.verbose for fp in files: tui_lp.l() print(f'* {fp}') data = input( '# input cookies data, or copy data to clipboard and press enter:\n' ) if not data: print(f'# empty input, paste from clipboard') data = clipboard.get() if verbose: pprint(data) try: j = json.loads(data) c = convert_cookies_json_to_netscape(j, disable_filepath=True) except json.decoder.JSONDecodeError: c = data if verbose: pprint(c) with open(fp, 'w') as f: f.write(c)
def tail_filter_files_func(): from mylib.ex.ostk import filter_filename_tail, join_filename_tail args = rtd.args tk = set(args.tails_keep or []) xk = set(args.extensions_keep or []) tg = set(args.tails_gone or []) xg = set(args.extensions_gone or []) dry = args.dry_run src = list_files(args.src or clipboard, recursive=False) from collections import defaultdict keep = defaultdict(list) gone = defaultdict(list) for dn, fn, tail, ext in filter_filename_tail(src, tk | tg, tk, xk): keep[(dn, fn)].append((tail, ext)) for dn, fn, tail, ext in filter_filename_tail(src, tk | tg, tg, xg): gone[(dn, fn)].append((tail, ext)) for g in gone: if g in keep: dn, fn = g tui_lp.l() print(f'* {os.path.join(dn, fn)}') for tail, ext in keep[g]: print(f'@ {tail} {ext}') for tail, ext in gone[g]: print(f'- {tail} {ext}') if not dry: send2trash(join_filename_tail(dn, fn, tail, ext))
def json_edit_func(): from mylib.ex.fstk import write_json_file from mylib.ex.fstk import read_json_file args = rtd.args file = args.file or list_files(clipboard)[0] indent = args.indent delete = args.delete item_l = args.item d = read_json_file(file) if delete: def handle(key, value): if key in d: if value: if d[key] == value: del d[key] else: del d[key] else: def handle(key, value): d[key] = value for item in item_l: k, v = map(eval_or_str, item.split('=', maxsplit=1)) handle(k, v) write_json_file(file, d, indent=indent)
def regex_rename_func(): args = rtd.args source = args.source recursive = args.recursive pattern = args.pattern replace = args.replace only_basename = args.only_basename dry_run = args.dry_run only_dirs = args.only_dirs if only_dirs: src_l = list_dirs(source or clipboard, recursive=recursive) else: src_l = list_files(source or clipboard, recursive=recursive) for src in src_l: try: fs_inplace_rename_regex(src, pattern, replace, only_basename, dry_run) except OSError as e: print(repr(e))
def video_guess_crf_func(): from mylib.ffmpeg_alpha import guess_video_crf, file_is_video args = rtd.args path_l = [ path for path in list_files(args.src or clipboard) if file_is_video(path) ] codec = args.codec work_dir = args.work_dir redo = args.redo auto_clean = not args.no_clean for path in path_l: tui_lp.l() tui_lp.p(path) try: tui_lp.p( guess_video_crf(src=path, codec=codec, work_dir=work_dir, redo=redo, auto_clean=auto_clean)) except (KeyError, ZeroDivisionError) as e: tui_lp.p(f'! {repr(e)}') tui_lp.p(f'- {path}')
def ccj_func(): from mylib.web_client import convert_cookies_file_json_to_netscape files = rtd.args.file or list_files(clipboard, recursive=False) for fp in files: print(f'* {fp}') convert_cookies_file_json_to_netscape(fp)
m = (w - 5) // 4 t0 = time() while True: p = q.get() if p is None: break ps = f'{" " * w}\r{p[:m]} ... {p[-m:]}' t1 = time() if t1 - t0 > 0.2: print(ps, end='\r') t0 = t1 t = thread_factory(daemon=True)(progress) t.run() files_l = list_files(src or cb, recursive=recursive, progress_queue=q) x, y, z = 0, 0, 0 print() for fp in files_l: z += 1 try: zf = ZipFile(fp) except BadZipFile: continue y += 1 for f in zf.namelist(): if f.endswith('.webp'): break else: zf.close() dfp = os.path.join(dest, os.path.split(fp)[-1])