def AFBFFF(item: str, db: str, big_item_split_parts: int = -1, split: bool = False, split_size: int = 1024 * 1024 * 4000, host: str = "AnonFiles", mirror: bool = False, _7z_exe: str = r"C:\Program Files\7-Zip\7z.exe", temp_dir: str = None, _depth: int = 0) -> None: if not temp_dir: temp_dir = os.environ["TEMP"] if not os.path.isabs(db): db = join_path(abs_main_dir(2), db) if not os.path.isabs(item): item = join_path(abs_main_dir(2), item) p(f"[Started] {item}") try: if os.path.isfile(item) and not split: files = [item] else: basename = os.path.basename(item) + ".zip" temp = randstr(2**3) + "_" + str(int(time.time())) dest = join_path(temp_dir, temp, basename) fs = file_size(item) if big_item_split_parts > 1: if fs >= (big_item_split_parts - 1)**2 + 1: import math split_size = math.ceil(fs / big_item_split_parts) else: raise Exception( f"{item} is too small ({fs}B) to split into {big_item_split_parts} parts" ) cmd = [ _7z_exe, "a", "-tzip", f"-v{split_size}b", "-mx=0", dest, item ] if os.path.isdir(item): cmd.append("-r") p(f"[Zipping] {item}", cmd) process = subprocess.Popen(cmd, stderr=subprocess.PIPE, stdout=subprocess.PIPE) process.communicate() files = [ join_path(temp_dir, temp, file) for file in os.listdir(join_path(temp_dir, temp)) ] p(f"[Zipped] {item} has {len(files)} parts", files) for file in files: # AFBFFF(file, db=db, host=host, mirror=mirror, _depth=_depth+1) if not mirror: globals()[host](db).upload(filename=file) else: AnonFiles(db).upload(filename=file) BayFiles(db).upload(filename=file) ForumFiles(db).upload(filename=file) except Exception as e: raise Exception(f"{item} failed to upload", e) p(f"[Ended] {item}")
def download(self, url: str, connections: int = 2**3, cal_hash: bool = False, quiet: bool = False) -> dict: self.url = url self.file_size = self.__get_file_size(url) self.__create_empty_file() if not quiet: p(f"[MFD] Downloading {url} with {connections} connections", end="") for i in range(0, self.file_size // self.piece_size + 1): self.failed_parts.append(i) self.retry_download(connections) retry_ct = 0 while len(self.failed_parts) > 0: if retry_ct >= self.retry: raise Exception( f"failed to download {self.url} after {self.retry} retries" ) self.retry_download(connections) retry_ct += 1 _f = join_path(self.save_dir, self.filename) if not quiet: p(f"\r[MFD] Downloaded {url} => " + _f) if cal_hash: fd = open(_f, "rb") # return {"md5": md5hd(fd), "crc32": crc32hd(fd), "sha1": sha1hd(fd), "file_path": _f} return {"sha1": sha1hd(fd), "file_path": _f} else: return {"file_path": _f}
def upload(self, identifier: str, root: str, path: str, check_overwrite, check_skip_same_size): path_prefix = identifier.split("/")[1:] identifier = identifier.split("/")[0] file = join_path(root, path) file = self.cloak_file_ext(file) remote_filename = os.path.basename(file) _path = "/".join(path_prefix+file.replace(root, "")[1:].split(os.path.sep)) if not check_overwrite(_path) and check_skip_same_size(_path): p("[Upload] [Warning] File {} is skipped due to existing remote file".format(join_path(root, path))) self.uncloak_file_ext(file) return fs = str(file_size(file)) headers = { # "authorization": f"LOW {self.access}:{self.secret}", "Cache-Control": "no-cache", "Connection": "keep-alive", "Content-Type": "multipart/form-data; charset=UTF-8", "Referer": f"https://archive.org/upload/?identifier={identifier}", # "User-Agent": USER_AGENT(self.access), "x-amz-acl": "bucket-owner-full-control", "x-amz-auto-make-bucket": "1", # "x-archive-interactive-priority": "1", "x-archive-queue-derive": "0", "x-archive-size-hint": fs, "X-File-Size": fs, "Content-Length": fs, "X-File-Name": f"uri({remote_filename})", "Sec-Fetch-Dest": "empty", "Sec-Fetch-Mode": "cors", "Sec-Fetch-Site": "same-site", "X-Requested-With": "XMLHttpRequest" } url = f"https://s3.us.archive.org/" # url_path = identifier+"/"+path.replace("\\", "/")+"/"+remote_filename url_path = identifier+"/"+_path url_path = url_path.replace("//", "/") uri = url+urllib.parse.quote(url_path, safe="") p(f"[Uploading] {file} => {uri}", end="") fo = open(file, "rb") while True: try: fo.seek(0) r = self.__session.put(uri, data=fo, headers=headers) break except requests.exceptions.RequestException as ex: import time print(ex) for i in range(0, 10): time.sleep(1) print("\rretry in", i, end="", flush=True) print(flush=True) except KeyboardInterrupt as e: fo.close() self.uncloak_file_ext(file) raise e fo.close() self.uncloak_file_ext(file) if r.status_code != 200: raise Exception(f"failed to upload {file} => {uri}", r.status_code, r.request.headers, r.content) p(f"\r[Uploaded] {file} => https://archive.org/download/{url_path}")
def create(self, text: str, padding: int = -1, filename: str = None) -> str: if filename is None: filename = "__tmp.png" font = ImageFont.truetype(self.font_file, self.font_size) shape, pos = textbox(font, text, padding) img = Image.new("RGB", shape, color=self.background_color) brush = ImageDraw.Draw(img) brush.text(pos, text, font=font, fill=self.text_color) save_path = join_path(self.save_dir, filename) img.save(save_path) return save_path
def combiner(self) -> None: while not self.terminate: k, v = self.parts.get() try: if v is not None: with open(join_path(self.save_dir, self.filename), 'r+b') as f: f.seek(k * self.piece_size, 0) f.write(v) f.close() self.pending_write_parts.remove(k) self.failed_parts.remove(k) self.parts.task_done() except Exception as e: print(e, k, len(v), v[:16], v[-16:], flush=True)
def __create_empty_file(self) -> None: with open(join_path(self.save_dir, self.filename), "wb") as f: f.seek(self.file_size - 1) f.write(b"\0") f.close()