def hasher(path, args): tags = [] while len(args): arg = args[0] args = args[1:] if arg == "--tag": if len(args) == 0: cover.log("Param without value") return value = args[0] args = args[1:] if value not in tags: tags.append(value) else: cover.log("Unknown param") return lst = [] if os.path.isdir(path): files = [(x.lower(), x) for x in os.listdir(path)] files.sort() for s, item in files: fp = os.path.join(path, item) # clear path bp = fp if sys.platform.startswith("win"): bp = fp.replace("\\", "/") lst += [[bp, cover.file_hash(fp)]] else: lst = [[path, cover.file_hash(path)]] for item, hs in lst: cover.log("res=" + item) cover.log("hash=" + hs) cover.log("tags=" + ",".join(tags)) cover.log()
def download_pack(packname): packs = cover.load_res_packs() if packname not in packs: cover.err("Unknown pack \"%s\"" % packname) return name, url, filename, dest, valid = packs[packname] cover.log("Downloading: " + name) destdir = os.path.join(cover.basepath, *dest) if not os.path.exists(destdir): os.makedirs(destdir) zippath = os.path.join(cover.basepath, filename) if not os.path.exists(zippath): u = urlopen(url) meta = u.info() file_size = int(meta.get("Content-Length")) cover.log("Downloading:", file_size, "bytes") with open(zippath, "wb") as f: file_size_dl = 0 while True: buff = u.read(64 * 1024) if not buff: break file_size_dl += len(buff) f.write(buff) cover.log(" ", file_size_dl * 100. / file_size, "%") # unpack cover.log("Extracting") import zipfile, re with open(zippath, "rb") as fh: z = zipfile.ZipFile(fh) for name in z.namelist(): if not re.match(valid, name): cover.log(" skip " + name) continue cover.log(" ok ", name) with open(os.path.join(destdir, *name.split("/")), "wb") as outfile: outfile.write(z.read(name)) # check extracted for res, (hs, tags, pack) in cover.load_res_list().items(): if pack != packname: continue fp = os.path.join(cover.basepath, *res.split("/")) if not os.path.exists(fp): cover.err("Resource \"%s\" not found" % res) return if cover.file_hash(fp) != hs: cover.err("Resource \"%s\" damaged (hash mismatch)" % res) return pass cover.log("Done")
def download_pack(packname): if packname[:1] == "-": packname = packname[1:] packs = cover.load_res_packs() if packname == "_all": dnpacks = packs.keys() else: dnpacks = [packname] if packname not in packs: cover.err("Unknown pack \"%s\"" % packname) return for pidx, packname in enumerate(dnpacks): name, url, filename, dest, valid, strip = packs[packname] cover.log("Downloading: %d/%d %s" % (pidx + 1, len(dnpacks), name)) destdir = os.path.join(cover.basepath, *dest) if not os.path.exists(destdir): os.makedirs(destdir) zippath = os.path.join(cover.basepath, filename) if not os.path.exists(zippath): u = urlopen(url) meta = u.info() try: file_size = int(meta.get("Content-Length")) cover.log("Downloading: %s bytes" % str(file_size)) except Exception: file_size = None cover.log("Downloading:") with open(zippath, "wb") as f: file_size_dl = 0 while True: buff = u.read(64 * 1024) if not buff: break file_size_dl += len(buff) f.write(buff) if file_size: cover.log(" ", file_size_dl * 100. / file_size, "%") else: cover.log(" ", file_size_dl, "bytes") # unpack cover.log("Extracting") import zipfile, re newfiles = [] with open(zippath, "rb") as fh: z = zipfile.ZipFile(fh) for name in z.namelist(): if not re.match(valid, name): cover.log(" skip " + name) continue # strip slashes ename = name ns = strip while ns > 0: ns -= 1 ps = ename.find("/") if ps > 0: ename = ename[ps + 1:] else: ename = "" break if not ename: cover.log(" strip " + name) continue if name != ename: cover.log(" ok " + name + " -> " + ename) else: cover.log(" ok " + name) # extract fn = os.path.join(destdir, *ename.split("/")) if ename[-1:] == "/": if not os.path.exists(fn): os.makedirs(fn) else: base = os.path.dirname(fn) if not os.path.exists(base): os.makedirs(base) with open(fn, "wb") as outfile: outfile.write(z.read(name)) newfn = "/".join(dest + ename.split("/")) newfiles.append(newfn) # check extracted for res, (hs, tags, pack) in cover.load_res_list().items(): if pack != packname: continue fp = os.path.join(cover.basepath, *res.split("/")) if not os.path.exists(fp): cover.err("Resource \"%s\" not found" % res) return if cover.file_hash(fp) != hs: if cover.common.RESHASH == "{IGNORE}": cover.log(" ignore hash " + res) else: cover.err("Resource \"%s\" damaged (hash mismatch)" % res) return if res in newfiles: newfiles.remove(res) # check unchecked for fn in newfiles: print(" no hash for " + fn) cover.log("Done")