def check_vars(ctx): """Check all var files for consistency. All vars content found on disk are extracted for verification. vamtb [-vv] [-p] [-f <file pattern> ] checkvar -p: progress bar """ file, dir, pattern = get_filepattern(ctx) vars_list = search_files_indir(dir, pattern) if ctx.obj['progress'] == False or ctx.obj['debug_level']: iterator = vars_list else: iterator = tqdm(vars_list, desc="Checking vars…", ascii=True, maxinterval=3, ncols=75, unit='var') for file in iterator: try: with Var(file, dir, checkVar=True) as var: try: _ = var.meta() except FileNotFoundError: error( f"Var {var.var} does not contain a correct meta json file" ) else: info(f"{var} is OK") except KeyboardInterrupt: return except Exception as e: error(f"{file} is not OK [{e}]")
def dupinfo(ctx): """ Return duplication information. Will print in red vars which have either 50 dup files or +20MB dup content vamtb [-vv] [-r] [-f <file pattern> ] dupinfo -r : only scan vars from creators not part of "references" """ onlyref = ctx.obj['ref'] file, dir, pattern = get_filepattern(ctx) for varfile in search_files_indir(dir, pattern): with Var(varfile, dir, use_db=True) as var: if not file and onlyref: if var.get_ref == "YES": continue dups = var.dupinfo() ndup, sdup = dups['numdupfiles'], dups['dupsize'] if not file and not ndup: continue ntot = var.get_numfiles() msg = f"{var.var:<64} : Dups:{ndup:<5}/{ntot:<5} Dup Size:{toh(sdup):<10} (ref:{var.get_ref})" if not ndup: msg = green(msg) elif ndup < C_MAX_FILES and sdup < C_MAX_SIZE: msg = blue(msg) else: msg = red(msg) print(msg)
def dbscan(ctx): """ Scan vars and store props in db. vamtb [-vv] [-a] [-p] [-f <file pattern> ] dbscan -p: Display progress bar (only when not using -v) -a: Do not confirm, always answer yes (will overwrite DB with new content) """ stored = 0 quiet = False if ctx.obj['debug_level'] else True file, dir, pattern = get_filepattern(ctx) vars_list = search_files_indir(dir, pattern) if not quiet or ctx.obj['progress'] == False: iterator = vars_list else: iterator = tqdm(vars_list, desc="Writing database…", ascii=True, maxinterval=3, ncols=75, unit='var') for varfile in iterator: with Var(varfile, dir, use_db=True, check_exists=False) as var: try: if var.store_update( confirm=False if ctx.obj['force'] else True): stored += 1 except VarMalformed as e: error(f"Var {var.var} malformed [{e}].") info(f"{stored} var files stored")
def checkdep(ctx): """Check dependencies of var recursively. vamtb [-vv] [-m] [-b] [-f <file pattern> ] checkdep When using -m, files considered bad will be moved to directory "00Dep". This directory can then be moved away from the directory. When using -b, use database rather than file system. You can redo the same dependency check later by moving back the directory and correct vars will be moved out of this directory if they are now valid. """ move = ctx.obj['move'] usedb = ctx.obj['usedb'] file, dir, pattern = get_filepattern(ctx) if move: full_bad_dir = Path(dir) / C_BAD_DIR full_bad_dir.mkdir(parents=True, exist_ok=True) stop = True if move else False for mfile in sorted(search_files_indir(dir, pattern)): try: with Var(mfile, dir, use_db=usedb) as var: try: if usedb: _ = var.rec_dep(stop=stop) else: _ = var.depend(recurse=True, stop=stop) except (VarNotFound, zlib.error) as e: error(f'Missing or wrong dependency for {var} [{e}]') if move: try: shutil.copy(var.path, str(full_bad_dir)) os.remove(var.path) except shutil.SameFileError: dvar = full_bad_dir / var.file scrc = var.crc dcrc = FileName(dvar).crc if scrc == dcrc: os.remove(var.path) else: error( f"Can't move {var} (crc {scrc}) as {dvar} exists with diferent crc ({dcrc})" ) except shutil.Error: # Old code for older python assert (False) else: print(f"Moved {var} to {full_bad_dir}") except (VarExtNotCorrect, VarMetaJson, VarNameNotCorrect, VarVersionNotCorrect): # info(f"Wrong file {mfile}") pass
def noroot(ctx): """Remove root node stored in pose presets. vamtb [-vv] -f <file pattern> noroot """ file, dir, pattern = get_filepattern(ctx) file or critical("Need a file parameter", doexit=True) with Var(file, dir) as var: var.remroot()
def dumpvar(ctx): """Dump meta.json from var. vamtb [-vv] -f <file pattern> dumpvar """ file, dir, pattern = get_filepattern(ctx) file or critical("Need a file parameter", doexit=True) with Var(file, dir) as var: print(prettyjson(var.load_json_file("meta.json")))
def dupinfo(ctx): """ Return information on var. vamtb [-vv] [-f <file pattern> ] info """ file, dir, pattern = get_filepattern(ctx) for varfile in search_files_indir(dir, pattern): with Var(varfile, dir) as var: for zinfo in var.get_zipinfolist: print(zinfo)
def printdep(ctx): """Print dependencies of a var from reading meta. vamtb [-vv] [-f <file pattern> ] printdep Recursive (will print deps of deps etc)""" file, dir, pattern = get_filepattern(ctx) for varfile in search_files_indir(dir, pattern): with Var(file, dir) as var: depvarfiles = sorted(var.dep_frommeta(), key=str.casefold) print( f"Printing dependencies for {green(var.var):<50} : {len(depvarfiles) if len(depvarfiles) else 'No'} dependencies" ) for depvarfile in sorted(var.dep_frommeta(), key=str.casefold): try: _ = Var(depvarfile, dir) except VarNotFound: mess = red("Not found") else: mess = green("Found") print(f"{depvarfile:<68}: {mess}")
def setref(ctx): """ Set var and files as reference. vamtb [-vv] -f file setref """ #TODO set noref.. file, dir, pattern = get_filepattern(ctx) file or critical("Need a file parameter", doexit=True) for varfile in search_files_indir(dir, pattern): with Var(varfile, dir, use_db=True, check_exists=False) as var: info(f"Setting var {var} as reference") var.db_var_setref(isref=True, files=True)
def printrealdep(ctx): """Print dependencies of a var from inspecting all json files. vamtb [-vv] [-f <file pattern> ] printrealdep Not recursive""" file, dir, pattern = get_filepattern(ctx) for varfile in search_files_indir(dir, pattern): with Var(varfile, dir) as var: deps = var.dep_fromfiles() depvarfiles = sorted(deps, key=str.casefold) print( f"Printing dependencies for {green(var.var):<50} : {len(depvarfiles) if len(depvarfiles) else 'No'} dependencies" ) for depvarfile in depvarfiles: mess = green("Found") try: _ = Var(depvarfile, dir) except VarNotFound: mess = red("Not found") else: mess = green("Found") print(f"{depvarfile:<68}: {mess}")
def sort_vars(ctx): """Moves vars to subdirectory named by its creator. vamtb [-vv] [-f <file pattern> ] sortvar Crc is checked before erasing duplicates""" file, dir, pattern = get_filepattern(ctx) info(f"Sorting var in {dir}") for file in search_files_indir(dir, pattern): try: with Var(file, dir) as var: var.move_creator() except zlib.error: error(f"Zip error on var {file}")
def dotty(ctx): """ Generate graph of deps, one per var. vamtb [-vv] [-f <file pattern> ] graph """ if shutil.which(C_DOT) is None: critical(f"Make sure you have graphviz installed in {C_DOT}.", doexit=True) file, dir, pattern = get_filepattern(ctx) for varfile in search_files_indir(dir, pattern): with Var(varfile, dir, use_db=True) as var: info(f"Calculating dependency graph for {var.var}") Graph.dotty(var)
def ia(ctx): """ Upload var to Internet Archive item. vamtb [-vv] [-f <file pattern>] [-a] [-e] [-n] [-i <prefix>] ia -a: Do not confirm, always answer yes (will overwrite IA with new content). -e: Only update metadata subject. -n: Dry-run upload, don't do anything. -f: Upload all jpg, not only scene jpgs. -c: Only upload CC* license content. -i: Change prefix used for the identifier on IA (use only when you are sure the default identifer is already used). """ file, dir, pattern = get_filepattern(ctx) n_up = 0 for varfile in search_files_indir(dir, pattern): with Var(varfile, dir, use_db=True) as var: if not var.exists(): info("Skipping") continue try: res = var.ia_upload( meta_only=ctx.obj['meta'], confirm=not ctx.obj['force'], verbose=True if ctx.obj['debug_level'] else False, dry_run=ctx.obj['dryrun'], full_thumbs=ctx.obj['full'], only_cc=ctx.obj['cc'], iaprefix=ctx.obj['iaprefix']) if res: info( f"Var {var.var} uploaded successfully to Internet Archive." ) n_up += 1 else: error( f"Var {var.var} was not uploaded to Internet Archive.") except Exception as e: error( f"Var {var.var} could not be uploaded to Internet Archive., error is:\n{e}" ) print(green(f"{n_up} vars were uploaded"))
def reref(ctx): """ Remove embedded content and point to reference var. vamtb [-vv] [-f <file pattern> ] [-x reference_to_remove.xxx] reref -a: Do not confirm, always answer yes (there will still be a prompt if there's two reference) -f: will operate only on this var -x: will remove only this embedded content """ dup = ctx.obj['dup'] file, dir, pattern = get_filepattern(ctx) creator = "" critical( "Be cautious with what you accept (Y). If some bundled content was modified, you might get some split content." ) critical( "Also vars referencing this content will have broken dependencies. Check that manually for now." ) for varfile in search_files_indir(dir, pattern): with Var(varfile, dir, use_db=True) as var: msg = f"Reref on {varfile.name:<100} size:" if not var.exists(): print(red(f"{msg} UNKNOWN")) continue print(green(f"{msg} {toh(var.size)}")) if var.creator == creator: debug("Skipping creator..") continue if var.exists(): res = var.reref(dryrun=False, dup=dup, confirm=not ctx.obj['force']) if res and res == C_NEXT_CREATOR: creator = var.creator else: creator = "" else: warn( f"{var.var} exists as {var.path} but is not in the DB, skipping.." )
def orig(ctx): """ Revert to orig files. vamtb [-vv] [-f <file pattern>] orig """ file, dir, pattern = get_filepattern(ctx) for mfile in search_files_indir(dir, pattern.replace(".var", ".orig")): varfile = mfile.with_suffix(".var") debug(f"Restoring {mfile} to {varfile}") if varfile.exists(): os.unlink(varfile) os.rename(mfile, varfile) with Var(varfile, dir, use_db=True, check_exists=False) as var: # If someone corrupted an .orig file try: var.store_update(confirm=False) except Exception as e: error(f"Var {var.var} could not be uploaded, error is:\n{e}")
def stats_vars(ctx): """Get stats on all vars. vamtb [-vv] [-f <file pattern> ] statsvar """ file, dir, pattern = get_filepattern(ctx) info(f"Checking vars in {dir}") creators_file = defaultdict(list) for mfile in search_files_indir(dir, pattern): try: with Var(mfile, dir) as var: creators_file[var.creator].append(var.var) except KeyboardInterrupt: return except Exception as e: error(f"{mfile} is not OK [{e}]") for k, v in reversed( sorted(creators_file.items(), key=lambda item: len(item[1]))): print("Creator %s has %d files" % (k, len(v)))
def anon(ctx): """ Upload var to Anonfiles. You need an account overthere. vamtb [-vv] [-f <file pattern>] [-n] anon -n : Dry-run upload, don't do anything. """ with open(C_YAML, 'r') as stream: conf = yaml.load(stream, Loader=yaml.BaseLoader) if 'anon_apikey' not in conf or not conf['anon_apikey']: conf['anon_apikey'] = input(blue("Enter Anonfiles apikey ?:")) with open(C_YAML, 'w') as outfile: yaml.dump(conf, outfile, default_flow_style=False) info(f"Stored apikey for future use.") file, dir, pattern = get_filepattern(ctx) n_up = 0 for varfile in search_files_indir(dir, pattern): with Var(varfile, dir, use_db=True) as var: try: res = var.anon_upload(apikey=conf['anon_apikey'], dry_run=ctx.obj['dryrun']) if res: info(f"Var {var.var} uploaded successfully to anonfiles.") n_up += 1 else: error(f"Var {var.var} was not uploaded to anonfiles.") except Exception as e: error( f"Var {var.var} could not be uploaded to anonfiles, error is:\n{e}" ) print(green(f"{n_up} vars were uploaded"))