def download_pack(packname): packs = cover.load_res_packs() if packname not in packs: cover.err("Unknown pack \"%s\"" % packname) return name, url, filename, dest, valid = packs[packname] cover.log("Downloading: " + name) destdir = os.path.join(cover.basepath, *dest) if not os.path.exists(destdir): os.makedirs(destdir) zippath = os.path.join(cover.basepath, filename) if not os.path.exists(zippath): u = urlopen(url) meta = u.info() file_size = int(meta.get("Content-Length")) cover.log("Downloading:", file_size, "bytes") with open(zippath, "wb") as f: file_size_dl = 0 while True: buff = u.read(64 * 1024) if not buff: break file_size_dl += len(buff) f.write(buff) cover.log(" ", file_size_dl * 100. / file_size, "%") # unpack cover.log("Extracting") import zipfile, re with open(zippath, "rb") as fh: z = zipfile.ZipFile(fh) for name in z.namelist(): if not re.match(valid, name): cover.log(" skip " + name) continue cover.log(" ok ", name) with open(os.path.join(destdir, *name.split("/")), "wb") as outfile: outfile.write(z.read(name)) # check extracted for res, (hs, tags, pack) in cover.load_res_list().items(): if pack != packname: continue fp = os.path.join(cover.basepath, *res.split("/")) if not os.path.exists(fp): cover.err("Resource \"%s\" not found" % res) return if cover.file_hash(fp) != hs: cover.err("Resource \"%s\" damaged (hash mismatch)" % res) return pass cover.log("Done")
def main(): cover.log("Test PyFPDF") testsn = [] interpsn = [] autodownloadres = False args = sys.argv[1:] while len(args): arg = args[0] args = args[1:] if arg == "--hash": if len(args) == 0: cover.log("Param without value") return usage() return hasher(args[0], args[1:]) if arg == "--help": print(cover.PACKHASH) return usage() elif arg == "--test": if len(args) > 0: value = args[0] args = args[1:] else: cover.log("Param without value") return usage() if value[:1] == "@": # from file testsn += read_list(value[1:]) else: testsn.append(value) elif arg == "--interp": if len(args) > 0: value = args[0] args = args[1:] else: cover.log("Param without value") return usage() if value[:1] == "@": # from file interpsn += read_list(value[1:]) else: interpsn.append(value) elif arg == "--listtests": return list_tests() elif arg == "--listinterps": return print_interps(find_python_version(search_python())) elif arg.startswith("--download"): return download_pack(arg[10:]) elif arg == "--ignore-res-hash": cover.common.RESHASH = "{IGNORE}" elif arg == "--ignore-pack-hash": cover.common.PACKHASH = "{IGNORE}" elif arg == "--autodownload": autodownloadres = True else: cover.log("Unknown param") return usage() if len(testsn) == 0: tests = search_tests() else: # cheack all tests tests = [] for test in testsn: test = test.strip() fn = os.path.join(cover.basepath, "cover", "test_" + test + ".py") if os.path.exists(fn): tests.append(fn) else: cover.err("Test \"%s\" not found" % test) return if len(interpsn) == 0: interps = find_python_version(search_python()) else: # cheack all tests interps = [] for interp in interpsn: fn = os.path.abspath(interp) if os.path.exists(fn): interps.append(fn) else: cover.err("Interpreter \"%s\" not found" % test) return interps = find_python_version(interps) # check if need res if autodownloadres: usedres = [] usedpacks = [] for test in tests: settings = cover.read_cover_info(test) for res in settings.get("res", []): if res in usedres: continue usedres.append(res) allres = cover.load_res_list() for ures in usedres: if ures in allres: hs, tags, pack = allres[ures] if pack and pack not in usedpacks: usedpacks.append(pack) for pack in usedpacks: download_pack(pack) do_all_test(interps, tests)
def download_pack(packname): if packname[:1] == "-": packname = packname[1:] packs = cover.load_res_packs() if packname == "_all": dnpacks = packs.keys() else: dnpacks = [packname] if packname not in packs: cover.err("Unknown pack \"%s\"" % packname) return for pidx, packname in enumerate(dnpacks): name, url, filename, dest, valid, strip = packs[packname] cover.log("Downloading: %d/%d %s" % (pidx + 1, len(dnpacks), name)) destdir = os.path.join(cover.basepath, *dest) if not os.path.exists(destdir): os.makedirs(destdir) zippath = os.path.join(cover.basepath, filename) if not os.path.exists(zippath): u = urlopen(url) meta = u.info() try: file_size = int(meta.get("Content-Length")) cover.log("Downloading: %s bytes" % str(file_size)) except Exception: file_size = None cover.log("Downloading:") with open(zippath, "wb") as f: file_size_dl = 0 while True: buff = u.read(64 * 1024) if not buff: break file_size_dl += len(buff) f.write(buff) if file_size: cover.log(" ", file_size_dl * 100. / file_size, "%") else: cover.log(" ", file_size_dl, "bytes") # unpack cover.log("Extracting") import zipfile, re newfiles = [] with open(zippath, "rb") as fh: z = zipfile.ZipFile(fh) for name in z.namelist(): if not re.match(valid, name): cover.log(" skip " + name) continue # strip slashes ename = name ns = strip while ns > 0: ns -= 1 ps = ename.find("/") if ps > 0: ename = ename[ps + 1:] else: ename = "" break if not ename: cover.log(" strip " + name) continue if name != ename: cover.log(" ok " + name + " -> " + ename) else: cover.log(" ok " + name) # extract fn = os.path.join(destdir, *ename.split("/")) if ename[-1:] == "/": if not os.path.exists(fn): os.makedirs(fn) else: base = os.path.dirname(fn) if not os.path.exists(base): os.makedirs(base) with open(fn, "wb") as outfile: outfile.write(z.read(name)) newfn = "/".join(dest + ename.split("/")) newfiles.append(newfn) # check extracted for res, (hs, tags, pack) in cover.load_res_list().items(): if pack != packname: continue fp = os.path.join(cover.basepath, *res.split("/")) if not os.path.exists(fp): cover.err("Resource \"%s\" not found" % res) return if cover.file_hash(fp) != hs: if cover.common.RESHASH == "{IGNORE}": cover.log(" ignore hash " + res) else: cover.err("Resource \"%s\" damaged (hash mismatch)" % res) return if res in newfiles: newfiles.remove(res) # check unchecked for fn in newfiles: print(" no hash for " + fn) cover.log("Done")
def do_all_test(interps, tests): print_interps(interps) dests = {} stats = {"_": {"_": 0}} for idx, interp in enumerate(interps): dests[interp[1]] = prepare_dest(interp) stats[interp[1]] = {"_": 0} cover.log(">> Tests:", len(tests)) for idx, test in enumerate(tests): do_test(test, interps, dests, stats, "%d / %d" % (idx + 1, len(tests))) cover.log() cover.log(">> Statistics:") def stat_str(stat): keys = list(stat.keys()) keys.sort() st = "total - %d" % stat["_"] for key in keys: if key == "_": continue st += (", %s - %d" % (key, stat[key])) return st for interp in interps: cover.log(interp[1] + ":", stat_str(stats[interp[1]])) cover.log("-" * 10) cover.log("All:", stat_str(stats["_"])) # check if no FPDF at all total = stats["_"]["_"] fpdf = stats["_"].get("nofpdf", 0) skip = stats["_"].get("skip", 0) if skip == total: cover.log( "*** All tests skipped. Install some modules (PIL, PyBIDI, " + "Gluon, etc)") elif fpdf + skip == total: hint_prepare() # check if NORES nores = stats["_"].get("nores", 0) if nores > 0: items = cover.load_res_list() tested = [] packs = [] cover.log("*** Some resources are not found") for test in tests: settings = cover.read_cover_info(test) for res in settings.get("res", []): if res in tested: continue tested.append(res) fn = os.path.join(cover.basepath, *res.split("/")) if os.path.exists(fn): continue print(" not found " + res) # check with pack if res in items: hs, tags, pack = items[res] if pack and pack not in packs: packs.append(pack) if len(packs) > 0: cover.log("*** You can download theese resources with:") for pack in packs: cover.log(" runtest.py --download%s" % pack)
def do_all_test(interps, tests): print_interps(interps) dests = {} stats = {"_": {"_": 0}} for idx, interp in enumerate(interps): dests[interp[1]] = prepare_dest(interp) stats[interp[1]] = {"_": 0} cover.log(">> Tests:", len(tests)) for idx, test in enumerate(tests): do_test(test, interps, dests, stats, "%d / %d" % (idx + 1, len(tests))) cover.log() cover.log(">> Statistics:") def stat_str(stat): keys = list(stat.keys()) keys.sort() st = "total - %d" % stat["_"] for key in keys: if key == "_": continue st += (", %s - %d" % (key, stat[key])) return st for interp in interps: cover.log(interp[1] + ":", stat_str(stats[interp[1]])) cover.log("-"*10) cover.log("All:", stat_str(stats["_"])) # check if no FPDF at all total = stats["_"]["_"] fpdf = stats["_"].get("nofpdf", 0) skip = stats["_"].get("skip", 0) if skip == total: cover.log("*** All tests skipped. Install some modules (PIL, PyBIDI, " + "Gluon, etc)") elif fpdf + skip == total: hint_prepare() # check if NORES nores = stats["_"].get("nores", 0) if nores > 0: items = cover.load_res_list() tested = [] packs = [] cover.log("*** Some resources are not found") for test in tests: settings = cover.read_cover_info(test) for res in settings.get("res", []): if res in tested: continue tested.append(res) fn = os.path.join(cover.basepath, *res.split("/")) if os.path.exists(fn): continue print(" not found " + res) # check with pack if res in items: hs, tags, pack = items[res] if pack and pack not in packs: packs.append(pack) if len(packs) > 0: cover.log("*** You can download theese resources with:") for pack in packs: cover.log(" runtest.py --download%s" % pack)