def postprocess_nzb(nzbname, articlequeue, resultqueue, mp_work_queue, pipes, mpp0, mp_events, cfg, verifiedrar_dir, unpack_dir, nzbdir, rename_dir, main_dir, download_dir, dirs, pw_file, mp_loggerqueue): setproctitle("gzbx." + os.path.basename(__file__)) pwdb = PWDBSender() if pwdb.exc("db_nzb_getstatus", [nzbname], {}) in [4, -4]: sys.exit() logger = setup_logger(mp_loggerqueue, __file__) logger.debug(whoami() + "starting ...") nzbdirname = re.sub(r"[.]nzb$", "", nzbname, flags=re.IGNORECASE) + "/" incompletedir = dirs["incomplete"] + nzbdirname if not os.path.isdir(incompletedir): logger.error(whoami() + "no incomplete_dir, aborting postprocessing") pwdb.exc("db_nzb_update_status", [nzbname, -4], {}) sys.exit() mpp = mpp0.copy() sh = SigHandler_Postprocessing(logger) signal.signal(signal.SIGINT, sh.sighandler_postprocessing) signal.signal(signal.SIGTERM, sh.sighandler_postprocessing) stop_wait(nzbname, dirs, pwdb) event_verifieridle = mp_events["verifier"] event_unrareridle = mp_events["unrarer"] pwdb.exc("db_msg_insert", [nzbname, "starting postprocess", "info"], {}) logger.debug(whoami() + "starting clearing queues & pipes") # clear pipes do_mpconnections(pipes, "clearqueues", None) try: for key, item in pipes.items(): if pipes[key][0].poll(): pipes[key][0].recv() except Exception as e: logger.error(whoami() + str(e)) pwdb.exc("db_nzb_update_status", [nzbname, -4], {}) pwdb.exc("db_msg_insert", [nzbname, "postprocessing/clearing pipes failed!", "error"], {}) sys.exit() logger.debug(whoami() + "clearing queues & pipes done!") # join decoder if mpp_is_alive(mpp, "decoder"): t0 = time.time() timeout_reached = False try: while mp_work_queue.qsize() > 0: if time.time() - t0 > 60 * 3: timeout_reached = True break time.sleep(0.5) except Exception as e: logger.debug(whoami() + str(e)) timeout_reached = True if timeout_reached: logger.warning( whoami() + "Timeout reached/error in joining decoder, terminating decoder ..." ) kill_mpp(mpp, "decoder") logger.info(whoami() + "decoder terminated!") try: while not mp_work_queue.empty(): mp_work_queue.get_no_wait() except Exception: pass stop_wait(nzbname, dirs, pwdb) # --- PAR_VERIFIER --- verifystatus = pwdb.exc("db_nzb_get_verifystatus", [nzbname], {}) all_rars_are_verified, _ = pwdb.exc("db_only_verified_rars", [nzbname], {}) renamed_rar_files = pwdb.exc("get_all_renamed_rar_files", [nzbname], {}) # verifier not running, status = 0 --> start & wait if not mpp_is_alive( mpp, "verifier" ) and verifystatus == 0 and not all_rars_are_verified and renamed_rar_files: logger.info( whoami() + "there are files to check by par_verifier, starting par_verifier ..." ) p2 = pwdb.exc("get_renamed_p2", [rename_dir, nzbname], {}) if p2: pvmode = "verify" else: pvmode = "copy" mpp_verifier = mp.Process(target=par_verifier.par_verifier, args=( pipes["verifier"][1], rename_dir, verifiedrar_dir, main_dir, mp_loggerqueue, nzbname, pvmode, event_verifieridle, cfg, )) mpp_verifier.start() mpp["verifier"] = mpp_verifier time.sleep(1) stop_wait(nzbname, dirs, pwdb) verifystatus = pwdb.exc("db_nzb_get_verifystatus", [nzbname], {}) # verifier is running, status == 1 (running) or -2 -> wait/join if mpp_is_alive(mpp, "verifier") and verifystatus in [1, -2]: logger.info(whoami() + "Waiting for par_verifier to complete") try: # kill par_verifier in deadlock while True: mpp_join(mpp, "verifier", timeout=5) if mpp_is_alive(mpp, "verifier"): # if not finished, check if idle longer than 5 sec -> deadlock!!! t0 = time.time() while event_verifieridle.is_set( ) and time.time() - t0 < 30: time.sleep(0.5) if time.time() - t0 >= 30: logger.info(whoami() + "Verifier deadlock, killing verifier!") kill_mpp(mpp, "verifier") break else: continue else: break except Exception as e: logger.warning(whoami() + str(e)) mpp_join(mpp, "verifier") mpp["verifier"] = None logger.debug(whoami() + "par_verifier completed/terminated!") # if verifier is running but wrong status, or correct status but not running, exit elif mpp_is_alive(mpp, "verifier") or verifystatus == 1: pwdb.exc("db_nzb_update_status", [nzbname, -4], {}) pwdb.exc("db_msg_insert", [nzbname, "par_verifier status inconsistency", "error"], {}) logger.debug( whoami() + "something is wrong with par_verifier, exiting postprocessor") logger.info(whoami() + "postprocess of NZB " + nzbname + " failed!") if mpp_is_alive(mpp, "verifier"): logger.debug(whoami() + "terminating par_verifier") kill_mpp(mpp, "verifier") logger.info(whoami() + "verifier terminated!") sys.exit() stop_wait(nzbname, dirs, pwdb) # --- UNRARER --- # already checked if pw protected? is_pw_checked = pwdb.exc("db_nzb_get_ispw_checked", [nzbname], {}) if not is_pw_checked: logger.debug(whoami() + "testing if pw protected") ispw = passworded_rars.is_rar_password_protected( verifiedrar_dir, logger) pwdb.exc("db_nzb_set_ispw_checked", [nzbname, True], {}) if ispw == 0 or ispw == -2 or ispw == -3: logger.warning( whoami() + "cannot test rar if pw protected, something is wrong: " + str(ispw) + ", exiting ...") pwdb.exc("db_msg_insert", [ nzbname, "postprocessing failed due to pw test not possible", "error" ], {}) pwdb.exc("db_nzb_update_status", [nzbname, -4], {}) sys.exit() elif ispw == 1: pwdb.exc("db_nzb_set_ispw", [nzbname, True], {}) pwdb.exc("db_msg_insert", [nzbname, "rar archive is password protected", "warning"], {}) logger.info(whoami() + "rar archive is pw protected") elif ispw == -1: pwdb.exc("db_nzb_set_ispw", [nzbname, False], {}) pwdb.exc( "db_msg_insert", [nzbname, "rar archive is NOT password protected", "info"], {}) logger.info(whoami() + "rar archive is NOT pw protected") ispw = pwdb.exc("db_nzb_get_ispw", [nzbname], {}) unrarernewstarted = False if ispw: get_pw_direct0 = False try: get_pw_direct0 = ( cfg["OPTIONS"]["GET_PW_DIRECTLY"].lower() == "yes") except Exception as e: logger.warning(whoami() + str(e)) if pwdb.exc("db_nzb_get_password", [nzbname], {}) == "N/A": logger.info(whoami() + "Trying to get password from file for NZB " + nzbname) pwdb.exc("db_msg_insert", [nzbname, "trying to get password", "info"], {}) pw = passworded_rars.get_password(verifiedrar_dir, pw_file, nzbname, logger, get_pw_direct=get_pw_direct0) if pw: logger.info(whoami() + "Found password " + pw + " for NZB " + nzbname) pwdb.exc("db_msg_insert", [nzbname, "found password " + pw, "info"], {}) pwdb.exc("db_nzb_set_password", [nzbname, pw], {}) else: pw = pwdb.exc("db_nzb_get_password", [nzbname], {}) if not pw: pwdb.exc("db_msg_insert", [ nzbname, "Provided password was not correct / no password found in PW file! ", "error" ], {}) logger.error(whoami() + "Cannot find password for NZB " + nzbname + "in postprocess, exiting ...") pwdb.exc("db_nzb_update_status", [nzbname, -4], {}) mpp["unrarer"] = None # sighandler.mpp = mpp sys.exit() event_unrareridle = mp.Event() # passing p2list to unrarer mpp_unrarer = mp.Process(target=partial_unrar.partial_unrar, args=( verifiedrar_dir, unpack_dir, nzbname, mp_loggerqueue, pw, event_unrareridle, cfg, )) unrarernewstarted = True mpp_unrarer.start() mpp["unrarer"] = mpp_unrarer # sighandler.mpp = self.mpp # start unrarer if never started and ok verified/repaired elif not mpp["unrarer"]: logger.debug(whoami() + "checking if unrarer should be started") try: verifystatus = pwdb.exc("db_nzb_get_verifystatus", [nzbname], {}) unrarstatus = pwdb.exc("db_nzb_get_unrarstatus", [nzbname], {}) except Exception as e: logger.warning(whoami() + str(e)) logger.debug(whoami() + "verifystatus: " + str(verifystatus) + " / unrarstatus: " + str(unrarstatus)) if (verifystatus > 0 or verifystatus == -2) and unrarstatus == 0: try: logger.debug(whoami() + "unrarer passiv until now, starting ...") event_unrareridle = mp.Event() # passing p2list to unrarer mpp_unrarer = mp.Process(target=partial_unrar.partial_unrar, args=( verifiedrar_dir, unpack_dir, nzbname, mp_loggerqueue, None, event_unrareridle, cfg, )) unrarernewstarted = True mpp_unrarer.start() mpp["unrarer"] = mpp_unrarer except Exception as e: logger.warning(whoami() + str(e)) finalverifierstate = (pwdb.exc("db_nzb_get_verifystatus", [nzbname], {}) in [0, 2]) stop_wait(nzbname, dirs, pwdb) # join unrarer if mpp_is_alive(mpp, "unrarer"): if finalverifierstate: logger.info(whoami() + "Waiting for unrar to complete") while True: # try to join unrarer mpp_join(mpp, "unrarer", timeout=5) isalive = mpp_is_alive(mpp, "unrarer") if isalive: # if not finished, check if idle longer than 5 sec -> deadlock!!! t0 = time.time() timeout0 = 99999999 if unrarernewstarted else 120 * 2 while event_unrareridle.is_set( ) and time.time() - t0 < timeout0: time.sleep(0.5) if time.time() - t0 >= timeout0: logger.info(whoami() + "Unrarer deadlock, killing unrarer!") kill_mpp(mpp, "unrarer") break else: logger.debug( whoami() + "Unrarer not idle, waiting before terminating") stop_wait(nzbname, dirs, pwdb) time.sleep(0.5) continue else: break else: logger.info(whoami() + "Repair/unrar not possible, killing unrarer!") kill_mpp(mpp, "unrarer") logger.debug(whoami() + "unrarer completed/terminated!") stop_wait(nzbname, dirs, pwdb) # get status finalverifierstate = (pwdb.exc("db_nzb_get_verifystatus", [nzbname], {}) in [0, 2]) finalnonrarstate = pwdb.exc("db_allnonrarfiles_getstate", [nzbname], {}) finalrarstate = (pwdb.exc("db_nzb_get_unrarstatus", [nzbname], {}) in [0, 2]) logger.info(whoami() + "Finalverifierstate: " + str(finalverifierstate) + " / Finalrarstate: " + str(finalrarstate) + " / Finalnonrarstate: " + str(finalnonrarstate)) if finalrarstate and finalnonrarstate and finalverifierstate: pwdb.exc("db_msg_insert", [nzbname, "unrar/par-repair ok!", "success"], {}) logger.info(whoami() + "unrar/par-repair of NZB " + nzbname + " success!") else: pwdb.exc("db_nzb_update_status", [nzbname, -4], {}) pwdb.exc("db_msg_insert", [nzbname, "unrar/par-repair failed!", "error"], {}) logger.info(whoami() + "postprocess of NZB " + nzbname + " failed!") sys.exit() stop_wait(nzbname, dirs, pwdb) # copy to complete logger.info(whoami() + "starting copy-to-complete") pwdb.exc("db_msg_insert", [nzbname, "copying & cleaning directories", "info"], {}) complete_dir = make_complete_dir(dirs, nzbdir, logger) if not complete_dir: pwdb.exc("db_nzb_update_status", [nzbname, -4], {}) pwdb.exc("db_msg_insert", [nzbname, "postprocessing failed!", "error"], {}) logger.info("Cannot create complete_dir for " + nzbname + ", exiting ...") pwdb.exc("db_msg_insert", [nzbname, "postprocessing failed!", "error"], {}) sys.exit() # move all non-rar/par2/par2vol files from renamed to complete for f00 in glob.glob(rename_dir + "*") + glob.glob(rename_dir + ".*"): logger.debug(whoami() + "renamed_dir: checking " + f00 + " / " + str(os.path.isdir(f00))) if os.path.isdir(f00): logger.debug(f00 + "is a directory, skipping") continue f0 = f00.split("/")[-1] file0type = pwdb.exc("db_file_getftype_renamed", [f0], {}) logger.debug(whoami() + "Moving/deleting " + f0) if not file0type: gg = re.search(r"[0-9]+[.]rar[.]+[0-9]", f0, flags=re.IGNORECASE) if gg: try: os.remove(f00) logger.debug(whoami() + "Removed rar.x file " + f0) except Exception as e: pwdb.exc("db_nzb_update_status", [nzbname, -4], {}) logger.warning(whoami() + str(e) + ": cannot remove corrupt rar file!") else: # if unknown file (not in db) move to complete anyway try: shutil.move(f00, complete_dir) logger.debug(whoami() + "moved " + f00 + " to " + complete_dir) except Exception as e: logger.warning(whoami() + str(e) + ": cannot move unknown file to complete!") continue if file0type in ["rar", "par2", "par2vol"]: try: os.remove(f00) logger.debug(whoami() + "removed rar/par2 file " + f0) except Exception as e: pwdb.exc("db_nzb_update_status", [nzbname, -4], {}) logger.warning(whoami() + str(e) + ": cannot remove rar/par2 file!") else: try: shutil.move(f00, complete_dir) logger.debug(whoami() + "moved non-rar/non-par2 file " + f0 + " to complete") except Exception as e: pwdb.exc("db_nzb_update_status", [nzbname, -4], {}) logger.warning(whoami() + str(e) + ": cannot move non-rar/non-par2 file " + f00 + "!") stop_wait(nzbname, dirs, pwdb) # remove download_dir try: shutil.rmtree(download_dir) except Exception as e: pwdb.exc("db_nzb_update_status", [nzbname, -4], {}) logger.warning(whoami() + str(e) + ": cannot remove download_dir!") # move content of unpack dir to complete logger.debug(whoami() + "moving unpack_dir to complete: " + unpack_dir) for f00 in glob.glob(unpack_dir + "*") + glob.glob(unpack_dir + ".*"): logger.debug(whoami() + "unpack_dir: checking " + f00 + " / " + str(os.path.isdir(f00))) d0 = f00.split("/")[-1] logger.debug(whoami() + "Does " + complete_dir + d0 + " already exist?") if os.path.isfile(complete_dir + d0): try: logger.debug(whoami() + complete_dir + d0 + " already exists, deleting!") os.remove(complete_dir + d0) except Exception: logger.debug(whoami() + f00 + " already exists but cannot delete") pwdb.exc("db_nzb_update_status", [nzbname, -4], {}) break else: logger.debug(whoami() + complete_dir + d0 + " does not exist!") if not os.path.isdir(f00): try: shutil.move(f00, complete_dir) logger.debug(whoami() + ": moved " + f00 + " to " + complete_dir) except Exception as e: pwdb.exc("db_nzb_update_status", [nzbname, -4], {}) logger.warning(whoami() + str(e) + ": cannot move unrared file to complete dir!") else: if os.path.isdir(complete_dir + d0): try: shutil.rmtree(complete_dir + d0) logger.debug(whoami() + ": removed tree " + complete_dir + d0) except Exception as e: pwdb.exc("db_nzb_update_status", [nzbname, -4], {}) logger.warning(whoami() + str(e) + ": cannot remove unrared dir in complete!") try: logger.debug(whoami() + "copying tree " + f00 + " to " + complete_dir + d0) shutil.copytree(f00, complete_dir + d0) logger.debug(whoami() + "copied tree " + f00 + " to " + complete_dir + d0) except Exception as e: pwdb.exc("db_nzb_update_status", [nzbname, -4], {}) logger.warning(whoami() + str(e) + ": cannot move non-rar/non-par2 file!") # remove unpack_dir logger.debug(whoami() + ": removing unpacl_dir, verified_rardir and incomplete_dir") if pwdb.exc("db_nzb_getstatus", [nzbname], {}) != -4: try: shutil.rmtree(unpack_dir) shutil.rmtree(verifiedrar_dir) logger.debug(whoami() + ": removed " + unpack_dir + verifiedrar_dir) except Exception as e: pwdb.exc("db_nzb_update_status", [nzbname, -4], {}) logger.warning(whoami() + str(e) + ": cannot remove unpack_dir / verifiedrar_dir") # remove incomplete_dir if pwdb.exc("db_nzb_getstatus", [nzbname], {}) != -4: try: shutil.rmtree(main_dir) logger.debug(whoami() + ": removed " + main_dir) except Exception as e: pwdb.exc("db_nzb_update_status", [nzbname, -4], {}) logger.warning(whoami() + str(e) + ": cannot remove incomplete_dir!") # finalize if pwdb.exc("db_nzb_getstatus", [nzbname], {}) == -4: logger.info(whoami() + "Copy/Move of NZB " + nzbname + " failed!") else: logger.info(whoami() + "Copy/Move of NZB " + nzbname + " success!") pwdb.exc("db_nzb_update_status", [nzbname, 4], {}) logger.info(whoami() + "Postprocess of NZB " + nzbname + " success!") sys.exit()
def decode_articles(mp_work_queue0, mp_loggerqueue, filewrite_lock): setproctitle("gzbx." + os.path.basename(__file__)) logger = mplogging.setup_logger(mp_loggerqueue, __file__) logger.info(whoami() + "starting article decoder process") sh = SigHandler_Decoder(logger) signal.signal(signal.SIGINT, sh.sighandler) signal.signal(signal.SIGTERM, sh.sighandler) pwdb = PWDBSender() bytesfinal = bytearray() while not TERMINATED: res0 = None decoder_set_idle(True) while not TERMINATED: try: res0 = mp_work_queue0.get_nowait() break except (queue.Empty, EOFError): pass except Exception as e: logger.warning(whoami() + str(e)) time.sleep(0.1) if not res0 or TERMINATED: logger.info(whoami() + "exiting decoder process!") break decoder_set_idle(False) infolist, save_dir, filename, filetype = res0 logger.debug(whoami() + "starting decoding for " + filename) # del bytes0 bytesfinal = bytearray() status = 0 # 1: ok, 0: wrong yenc structure, -1: no crc32, -2: crc32 checksum error, -3: decoding error statusmsg = "ok" # ginzyenc full_filename = save_dir + filename i = 0 for info in infolist: try: lastline = info[-1].decode("latin-1") m = re.search('size=(.\d+?) ', lastline) if m: size = int(m.group(1)) except Exception as e: logger.warning(whoami() + str(e) + ", guestimate size ...") size = int(sum(len(i) for i in info.lines) * 1.1) try: decoded_data, output_filename, crc, crc_yenc, crc_correct = ginzyenc.decode_usenet_chunks( info, size) #if filename.endswith("part01.rar") and i == 3: # pass #else: bytesfinal.extend(decoded_data) except Exception as e: logger.warning(whoami() + str(e) + ": cannot perform ginzyenc") status = -3 statusmsg = "ginzyenc decoding error!" # continue decoding, maybe it can be repaired ...? i += 1 logger.debug(whoami() + "decoding for " + filename + ": success!") try: with filewrite_lock: if not os.path.isdir(save_dir): os.makedirs(save_dir) with open(full_filename, "wb") as f0: f0.write(bytesfinal) f0.flush() f0.close() except Exception as e: statusmsg = "file_error" logger.error(whoami() + str(e) + " in file " + filename) status = -4 logger.info(whoami() + filename + " decoded with status " + str(status) + " / " + statusmsg) pwdbstatus = 2 if status in [-3, -4]: pwdbstatus = -1 try: # pwdb.db_file_update_status(filename, pwdbstatus) pwdb.exc("db_file_update_status", [filename, pwdbstatus], {}) logger.debug(whoami() + "updated DB for " + filename + ", db.status=" + str(pwdbstatus)) except Exception as e: logger.error(whoami() + str(e) + ": cannot update DB for " + filename) i += 1 logger.debug(whoami() + "exited!")
def par_verifier(child_pipe, renamed_dir, verifiedrar_dir, main_dir, mp_loggerqueue, nzbname, pvmode, event_idle, cfg): setproctitle("gzbx." + os.path.basename(__file__)) logger = mplogging.setup_logger(mp_loggerqueue, __file__) logger.debug(whoami() + "starting ...") sh = SigHandler_Verifier(logger) signal.signal(signal.SIGINT, sh.sighandler_verifier) signal.signal(signal.SIGTERM, sh.sighandler_verifier) pwdb = PWDBSender() event_idle.clear() if pvmode == "verify": # p2 = pwdb.get_renamed_p2(renamed_dir, nzbname) try: p2list = pwdb.exc("db_p2_get_p2list", [nzbname], {}) p2 = P2(p2list) except Exception as e: logger.warning(whoami() + str(e)) # pwdb.db_nzb_update_verify_status(nzbname, 1) pwdb.exc("db_nzb_update_verify_status", [nzbname, 1], {}) # a: verify all unverified files in "renamed" unverified_rarfiles = None try: # unverified_rarfiles = pwdb.get_all_renamed_rar_files(nzbname) unverified_rarfiles = pwdb.exc("get_all_renamed_rar_files", [nzbname], {}) except Exception as e: logger.debug(whoami() + str(e) + ": no unverified rarfiles met in first run, skipping!") doloadpar2vols = False if pvmode == "verify" and not p2: logger.debug(whoami() + "no par2 file found") if pvmode == "verify" and unverified_rarfiles and p2: logger.debug(whoami() + "verifying all unchecked rarfiles") for filename, f_origname in unverified_rarfiles: f_short = filename.split("/")[-1] md5 = calc_file_md5hash(renamed_dir + filename) md5match = [(pmd5 == md5) for pname, pmd5 in p2.filenames() if pname == filename] if False in md5match: logger.warning(whoami() + " error in md5 hash match for file " + f_short) pwdb.exc("db_msg_insert", [ nzbname, "error in md5 hash match for file " + f_short, "warning" ], {}) pwdb.exc("db_nzb_update_verify_status", [nzbname, -2], {}) pwdb.exc("db_file_update_parstatus", [f_origname, -1], {}) child_pipe.send(True) else: logger.info(whoami() + f_short + " md5 hash match ok, copying to verified_rar dir") pwdb.exc("db_msg_insert", [ nzbname, f_short + " md5 hash match ok, copying to verified_rar dir ", "info" ], {}) shutil.copy(renamed_dir + filename, verifiedrar_dir) pwdb.exc("db_file_update_parstatus", [f_origname, 1], {}) elif (pvmode == "verify" and not p2) or (pvmode == "copy"): logger.info(whoami() + "copying all rarfiles") for filename, f_origname in unverified_rarfiles: f_short = filename.split("/")[-1] sfvcheck = pwdb.exc("db_nzb_check_sfvcrc32", [nzbname, renamed_dir, renamed_dir + filename], {}) if sfvcheck == -1: logger.warning(whoami() + " error in crc32 check for file " + f_short) pwdb.exc("db_msg_insert", [ nzbname, "error in crc32 check for file " + f_short, "warning" ], {}) pwdb.exc("db_nzb_update_verify_status", [nzbname, -2], {}) pwdb.exc("db_file_update_parstatus", [f_origname, -1], {}) child_pipe.send(True) continue logger.debug(whoami() + "copying " + f_short + " to verified_rar dir") pwdb.exc("db_msg_insert", [ nzbname, "copying " + f_short + " to verified_rar dir ", "info" ], {}) shutil.copy(renamed_dir + filename, verifiedrar_dir) pwdb.exc("db_file_update_parstatus", [f_origname, 1], {}) # b: inotify renamed_dir inotify = inotify_simple.INotify() watch_flags = inotify_simple.flags.CREATE | inotify_simple.flags.DELETE | inotify_simple.flags.MODIFY | inotify_simple.flags.DELETE_SELF inotify.add_watch(renamed_dir, watch_flags) while not TERMINATED: # allparstatus = pwdb.db_file_getallparstatus(0) allparstatus = pwdb.exc("db_file_getallparstatus", [0], {}) if 0 not in allparstatus: event_idle.clear() logger.info(whoami() + "all renamed rars checked, exiting par_verifier") break events = get_inotify_events(inotify) event_idle.set() if events or 0 in allparstatus: event_idle.clear() if pvmode == "verify" and not p2: try: p2list = pwdb.exc("db_p2_get_p2list", [nzbname], {}) p2 = P2(p2list) except Exception as e: logger.debug(whoami() + str(e)) if pvmode == "verify" and p2: for rar in glob.glob(renamed_dir + "*") + glob.glob(renamed_dir + ".*"): rar0 = rar.split("/")[-1] f0 = pwdb.exc("db_file_get_renamed", [rar0], {}) if not f0: continue f0_origname, f0_renamedname, f0_ftype = f0 if not f0_ftype == "rar": continue if pwdb.exc("db_file_getparstatus", [rar0], {}) == 0 and f0_renamedname != "N/A": f_short = f0_renamedname.split("/")[-1] md5 = calc_file_md5hash(renamed_dir + rar0) md5match = [(pmd5 == md5) for pname, pmd5 in p2.filenames() if pname == f0_renamedname] #print(f0_renamedname, md5, " : ", p2.filenames()) #print(md5match) #print("-" * 80) if True in md5match: logger.info( whoami() + f_short + " md5 hash match ok, copying to verified_rar dir" ) pwdb.exc("db_msg_insert", [ nzbname, f_short + " md5 hash match ok, copying to verified_rar dir ", "info" ], {}) shutil.copy(renamed_dir + f0_renamedname, verifiedrar_dir) pwdb.exc("db_file_update_parstatus", [f0_origname, 1], {}) elif False in md5match: logger.warning( whoami() + "error in md5 hash match for file " + f_short) pwdb.exc("db_msg_insert", [ nzbname, "error in md5 hash match for file " + f_short, "warning" ], {}) pwdb.exc("db_nzb_update_verify_status", [nzbname, -2], {}) pwdb.exc("db_file_update_parstatus", [f0_origname, -1], {}) child_pipe.send(True) else: # if no match at all in p2list -> try sfvcheck / or just copy sfvcheck = pwdb.exc("db_nzb_check_sfvcrc32", [nzbname, renamed_dir, rar], {}) if sfvcheck == -1: logger.warning( whoami() + " error in crc32 check for file " + rar0) pwdb.exc("db_msg_insert", [ nzbname, "error in crc32 check for file " + rar0, "warning" ], {}) pwdb.exc("db_nzb_update_verify_status", [nzbname, -2], {}) pwdb.exc("db_file_update_parstatus", [f0_origname, -1], {}) child_pipe.send(True) continue if pwdb.exc("db_file_getparstatus", [rar0], {}) == 0 and f0_renamedname != "N/A": logger.debug(whoami() + "no md5 check, copying " + f0_renamedname.split("/")[-1] + " to verified_rar dir") pwdb.exc("db_msg_insert", [ nzbname, "no md5 check, copying " + f0_renamedname.split("/")[-1] + " to verified_rar dir", "info" ], {}) shutil.copy(renamed_dir + f0_renamedname, verifiedrar_dir) pwdb.exc("db_file_update_parstatus", [f0_origname, 1], {}) # free rars or copy mode? elif (pvmode == "verify" and not p2) or (pvmode == "copy"): # maybe we can check via sfv file? for file0full in glob.glob(renamed_dir + "*") + glob.glob(renamed_dir + ".*"): file0short = file0full.split("/")[-1] ft = pwdb.exc("db_file_getftype_renamed", [file0short], {}) if ft == "rar": rar0 = file0short f0 = pwdb.exc("db_file_get_renamed", [rar0], {}) if not f0: continue f0_origname, f0_renamedname, f0_ftype = f0 sfvcheck = pwdb.exc("db_nzb_check_sfvcrc32", [nzbname, renamed_dir, file0full], {}) if sfvcheck == -1: logger.warning(whoami() + " error in crc32 check for file " + rar0) pwdb.exc("db_msg_insert", [ nzbname, "error in crc32 check for file " + rar0, "warning" ], {}) pwdb.exc("db_nzb_update_verify_status", [nzbname, -2], {}) pwdb.exc("db_file_update_parstatus", [f0_origname, -1], {}) child_pipe.send(True) continue if pwdb.exc("db_file_getparstatus", [rar0], {}) == 0 and f0_renamedname != "N/A": logger.debug(whoami() + "copying " + f0_renamedname.split("/")[-1] + " to verified_rar dir") pwdb.exc("db_msg_insert", [ nzbname, "copying " + f0_renamedname.split("/")[-1] + " to verified_rar dir", "info" ], {}) shutil.copy(renamed_dir + f0_renamedname, verifiedrar_dir) pwdb.exc("db_file_update_parstatus", [f0_origname, 1], {}) allrarsverified, rvlist = pwdb.exc("db_only_verified_rars", [nzbname], {}) if allrarsverified: break time.sleep(1) if TERMINATED: logger.info(whoami() + "terminated!") sys.exit() logger.debug(whoami() + "all rars are checked!") corruptrars = pwdb.exc("get_all_corrupt_rar_files", [nzbname], {}) if not corruptrars: logger.debug(whoami() + "rar files ok, no repair needed, exiting par_verifier") pwdb.exc("db_nzb_update_verify_status", [nzbname, 2], {}) elif p2list and corruptrars: pwdb.exc("db_msg_insert", [nzbname, "repairing rar files", "info"], {}) logger.info(whoami() + "par2vol files present, repairing ...") allok = True allfound = True corruptrars_1 = [c1 for c1, _ in corruptrars] corruptrars_2 = [c2 for _, c2 in corruptrars] for _, fnshort, fnlong, rarfiles in p2list: rarf_match = [ rarf for rarf, _ in rarfiles if rarf in corruptrars_1 or rarf in corruptrars_2 ] if len(rarf_match) == 0: allfound = False continue lrar = str(len(rarfiles)) pwdb.exc("db_msg_insert", [nzbname, "performing par2verify for " + fnshort, "info"], {}) ssh = subprocess.Popen(['par2verify', fnlong], shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) sshres = ssh.stdout.readlines() repair_is_required = False repair_is_possible = False for ss in sshres: ss0 = ss.decode("utf-8") if "Repair is required" in ss0: repair_is_required = True if "Repair is possible" in ss0: repair_is_possible = True if not repair_is_required: pwdb.exc("db_msg_insert", [ nzbname, "par2verify for " + fnshort + ": repair is not required!", "info" ], {}) res0 = 1 elif repair_is_required and not repair_is_possible: pwdb.exc("db_msg_insert", [ nzbname, "par2verify for " + fnshort + ": repair is required but not possible", "error" ], {}) res0 = -1 elif repair_is_required and repair_is_possible: pwdb.exc("db_msg_insert", [ nzbname, "par2verify for " + fnshort + ": repair is required and possible, repairing files", "info" ], {}) logger.info( whoami() + "repair is required and possible, performing par2repair") # repair ssh = subprocess.Popen(['par2repair', fnlong], shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) sshres = ssh.stdout.readlines() repair_complete = False for ss in sshres: ss0 = ss.decode("utf-8") if "Repair complete" in ss0: repair_complete = True if not repair_complete: res0 = -1 else: res0 = 1 # res0 = multipartrar_repair(renamed_dir, fnshort, pwdb, nzbname, logger) else: res0 = -1 if res0 != 1: allok = False logger.error(whoami() + "repair failed for " + lrar + "rarfiles in " + fnshort) pwdb.exc("db_msg_insert", [ nzbname, "rar file repair failed for " + lrar + " rarfiles in " + fnshort + "!", "error" ], {}) else: logger.info(whoami() + "repair success for " + lrar + "rarfiles in " + fnshort) pwdb.exc("db_msg_insert", [ nzbname, "rar file repair success for " + lrar + " rarfiles in " + fnshort + "!", "info" ], {}) if not allfound: allok = False logger.error( whoami() + "cannot attempt one or more par2repairs due to missing par2 file(s)!" ) pwdb.exc("db_msg_insert", [ nzbname, "cannot attempt one or more par2repairs due to missing par2 file(s)!", "error" ], {}) if allok: logger.info(whoami() + "repair success") pwdb.exc("db_nzb_update_verify_status", [nzbname, 2], {}) # copy all no yet copied rars to verifiedrar_dir for c_origname, c_renamedname in corruptrars: logger.info(whoami() + "copying " + c_renamedname + " to verifiedrar_dir") pwdb.exc("db_file_update_parstatus", [c_origname, 1], {}) pwdb.exc("db_file_update_status", [c_origname, 2], {}) shutil.copy(renamed_dir + c_renamedname, verifiedrar_dir) else: logger.error(whoami() + "repair failed!") pwdb.exc("db_nzb_update_verify_status", [nzbname, -1], {}) for _, c_origname in corruptrars: pwdb.exc("db_file_update_parstatus", [c_origname, -2], {}) else: pwdb.exc("db_msg_insert", [ "nzbname", "rar file repair failed, no par files available", "error" ], {}) logger.warning( whoami() + "some rars are corrupt but cannot repair (no par2 files)") pwdb.exc("db_nzb_update_verify_status", [nzbname, -1], {}) logger.info(whoami() + "terminated!") sys.exit()
def partial_unrar(directory, unpack_dir, nzbname, mp_loggerqueue, password, event_idle, cfg): setproctitle("gzbx." + os.path.basename(__file__)) logger = mplogging.setup_logger(mp_loggerqueue, __file__) logger.debug(whoami() + "starting ...") pwdb = PWDBSender() event_idle.clear() cwd0 = os.getcwd() sh = SigHandler_Unrar(cwd0, logger) signal.signal(signal.SIGINT, sh.sighandler_unrar) signal.signal(signal.SIGTERM, sh.sighandler_unrar) try: os.chdir(directory) except FileNotFoundError: os.mkdir(directory) logger.info(whoami() + "started partial_unrar") # get already present rar files rar_sortedlist0 = None while not TERMINATED: rar_sortedlist0 = passworded_rars.get_sorted_rar_list(directory) # todo: what to do if does not finish here? if rar_sortedlist0: break time.sleep(1) if TERMINATED: logger.info(whoami() + "terminated!") return rar_sortedlist = [] # das muss besser gehen!!! mit rarlist als parameter von main/postproc for r1, r2 in rar_sortedlist0: try: rar_sortedlist.append((r1, r2.split("/")[-1])) except Exception as e: logger.debug(whoami() + whoami() + ": " + str(e)) pwdb.exc("db_nzb_update_unrar_status", [nzbname, 1], {}) nextrarname = rar_sortedlist[0][1] # first valid rar_sortedlist in place, start unrar! if password: cmd = "unrar x -y -o+ -p" + password + " '" + directory + nextrarname + "' '" + unpack_dir + "'" logger.debug(whoami() + "rar archive is passworded, executing " + cmd) pwdb.exc("db_msg_insert", [nzbname, "unraring pw protected rar archive", "info"], {}) status = 1 child = pexpect.spawn(cmd) status, statmsg, str0 = process_next_unrar_child_pass( event_idle, child, logger) if status < 0: logger.info(whoami() + nextrarname + ": " + statmsg) pwdb.exc("db_msg_insert", [nzbname, "unrar " + nextrarname + " failed!", "error"], {}) else: pwdb.exc("db_msg_insert", [nzbname, "checking for double packed rars", "info"], {}) # check if double packed try: child.kill(signal.SIGKILL) except Exception: pass is_double_packed, fn = check_double_packed(unpack_dir) if is_double_packed: pwdb.exc("db_msg_insert", [ nzbname, "rars are double packed, starting unrar 2nd run", "warning" ], {}) logger.debug(whoami() + "rars are double packed, executing " + cmd) # unrar without pausing! cmd = "unrar x -y -o+ -p" + password + " '" + fn + "' '" + unpack_dir + "'" child = pexpect.spawn(cmd) status, statmsg, str0 = process_next_unrar_child_pass( event_idle, child, logger) if status < 0: logger.info(whoami() + "2nd pass: "******"db_msg_insert", [nzbname, "unrar 2nd pass failed!", "error"], {}) elif status == 0: statmsg = "All OK" status = 0 pwdb.exc("db_msg_insert", [ nzbname, "unrar success 2nd pass for all rar files!", "info" ], {}) logger.info(whoami() + "unrar success 2nd pass for all rar files!") logger.debug(whoami() + "deleting all rar files in unpack_dir") delete_all_rar_files(unpack_dir, logger) elif status == 1: status = -3 statmsg = "unknown error" logger.info(whoami() + "2nd pass: "******" / " + str0) pwdb.exc("db_msg_insert", [nzbname, "unrar 2nd pass failed!", "error"], {}) else: statmsg = "All OK" status = 0 pwdb.exc("db_msg_insert", [nzbname, "unrar success for all rar files!", "info"], {}) logger.info(whoami() + "unrar success for all rar files!") else: cmd = "unrar x -y -o+ -vp '" + directory + nextrarname + "' '" + unpack_dir + "'" logger.debug(whoami() + "rar archive is NOT passworded, executing " + cmd) pwdb.exc("db_msg_insert", [nzbname, "unraring rar archive", "info"], {}) child = pexpect.spawn(cmd) status = 1 # 1 ... running, 0 ... exited ok, -1 ... rar corrupt, -2 ..missing rar, -3 ... unknown error while not TERMINATED: oldnextrarname = nextrarname.split("/")[-1] #oldfoldersize = folder_size(path=unpack_dir) #old_t0 = time.time() status, statmsg, str0 = process_next_unrar_child_pass( event_idle, child, logger) #newfoldersize = folder_size(path=unpack_dir) #delta_size = (newfoldersize - oldfoldersize) / (1024 * 1024) #delta_t = time.time() - old_t0 #print(delta_size / delta_t, "M decompressed per sec") if status < 0: logger.info(whoami() + nextrarname + ": " + statmsg) pwdb.exc( "db_msg_insert", [nzbname, "unrar " + oldnextrarname + " failed!", "error"], {}) break logger.info(whoami() + nextrarname + ": unrar success!") if status == 0: pwdb.exc("db_msg_insert", [nzbname, "checking for double packed rars", "info"], {}) # check if double packed try: child.kill(signal.SIGKILL) except Exception: pass is_double_packed, fn = check_double_packed(unpack_dir) if is_double_packed: pwdb.exc("db_msg_insert", [ nzbname, "rars are double packed, starting unrar 2nd run", "warning" ], {}) cmd = "unrar x -y -o+ '" + fn + "' '" + unpack_dir + "'" logger.debug(whoami() + "rars are double packed, executing " + cmd) # unrar without pausing! child = pexpect.spawn(cmd) status, statmsg, str0 = process_next_unrar_child_pass( event_idle, child, logger) if status < 0: logger.info(whoami() + "2nd pass: "******"db_msg_insert", [nzbname, "unrar 2nd pass failed!", "error"], {}) break if status == 0: statmsg = "All OK" status = 0 pwdb.exc("db_msg_insert", [ nzbname, "unrar success 2nd pass for all rar files!", "info" ], {}) logger.info( whoami() + "unrar success 2nd pass for all rar files!") logger.debug(whoami() + "deleting all rar files in unpack_dir") delete_all_rar_files(unpack_dir, logger) break if status == 1: status = -3 statmsg = "unknown error" logger.info(whoami() + "2nd pass: "******" / " + str0) pwdb.exc("db_msg_insert", [nzbname, "unrar 2nd pass failed!", "error"], {}) break else: statmsg = "All OK" status = 0 pwdb.exc( "db_msg_insert", [nzbname, "unrar success for all rar files!", "info"], {}) logger.info(whoami() + "unrar success for all rar files!") break try: gg = re.search(r"Insert disk with ", str0, flags=re.IGNORECASE) gend = gg.span()[1] nextrarname = str0[gend:-19] except Exception as e: logger.warning(whoami() + str(e) + ", unknown error") statmsg = "unknown error in re evalution" status = -4 pwdb.exc( "db_msg_insert", [nzbname, "unrar " + oldnextrarname + " failed!", "error"], {}) break pwdb.exc( "db_msg_insert", [nzbname, "unrar " + oldnextrarname + " success!", "info"], {}) logger.debug(whoami() + "Waiting for next rar: " + nextrarname) # first, if .r00 try: nextrar_number = int( re.search(r"\d{0,9}$", nextrarname).group()) nextrar_wo_number = nextrarname.rstrip("0123456789") except Exception: nextrar_wo_number = nextrarname nextrar_number = -1 # if part01.rar if nextrar_number == -1: try: nextrar_number = int( nextrarname.split(".part")[-1].split(".rar")[0]) nextrar_wo_number = nextrarname.rstrip(".rar").rstrip( "0123456789").rstrip("part").rstrip(".") except Exception: nextrar_wo_number = nextrarname nextrar_number = -1 gotnextrar = False nextrar_short = nextrarname.split("/")[-1] # todo: hier deadlock/unendliches Warten im Postprocess vermeiden, wenn rar nicht auftaucht! event_idle.set() while not gotnextrar and not TERMINATED: time.sleep(1) for f0 in glob.glob(directory + "*") + glob.glob(directory + ".*"): if nextrarname == f0: gotnextrar = True # now check if we waited too long for next rar0 - but how? if not gotnextrar and nextrar_number != -1: for f0 in glob.glob(directory + "*") + glob.glob(directory + ".*"): f0_number = -1 f0_wo_number = f0 try: f0_number = int(re.search(r"\d{0,9}$", f0).group()) f0_wo_number = f0.rstrip("0123456789") except Exception: try: f0_number = int( f0.split(".part")[-1].split(".rar")[0]) f0_wo_number = f0.rstrip(".rar").rstrip( "0123456789").rstrip("part").rstrip(".") except Exception: continue if f0_number == -1: continue if f0_wo_number == nextrar_wo_number and f0_number > nextrar_number: pwdb.exc("db_msg_insert", [ nzbname, "unrar waiting for next rar, but next rar " + nextrar_short + " seems to be skipped, you may want to interrupt ...", "warning" ], {}) break event_idle.clear() if TERMINATED: break time.sleep(1) # achtung hack! child.sendline("C") try: child.kill(signal.SIGKILL) except Exception: pass if TERMINATED: logger.info(whoami() + "exited!") else: logger.info(whoami() + str(status) + " " + statmsg) try: os.chdir(cwd0) if status == 0: pwdb.exc("db_nzb_update_unrar_status", [nzbname, 2], {}) elif status == -5: pwdb.exc("db_nzb_update_unrar_status", [nzbname, -2], {}) else: pwdb.exc("db_nzb_update_unrar_status", [nzbname, -1], {}) except Exception as e: logger.warning(whoami() + str(e)) event_idle.clear() logger.info(whoami() + "exited!") sys.exit()
def ginzi_main(cfg_file, cfg, dirs, subdirs, guiport, mp_loggerqueue): setproctitle("gzbx." + os.path.basename(__file__)) logger = mplogging.setup_logger(mp_loggerqueue, __file__) logger.debug(whoami() + "starting ...") pwdb = PWDBSender() # multiprocessing events mp_events = {} mp_events["unrarer"] = mp.Event() mp_events["verifier"] = mp.Event() # threading events event_stopped = threading.Event() articlequeue = None resultqueue = None mp_work_queue = mp.Queue() renamer_result_queue = mp.Queue() # filewrite_lock = mp.Lock() mpconnector_lock = threading.Lock() filewrite_lock = mp.Lock() renamer_parent_pipe, renamer_child_pipe = mp.Pipe() unrarer_parent_pipe, unrarer_child_pipe = mp.Pipe() verifier_parent_pipe, verifier_child_pipe = mp.Pipe() mpconnector_parent_pipe, mpconnector_child_pipe = mp.Pipe() pipes = { "renamer": [renamer_parent_pipe, renamer_child_pipe], "unrarer": [unrarer_parent_pipe, unrarer_child_pipe], "verifier": [verifier_parent_pipe, verifier_child_pipe], "mpconnector": [mpconnector_parent_pipe, mpconnector_child_pipe, mpconnector_lock] } # load server ts from file try: server_ts0 = pickle.load(open(dirs["main"] + "ginzibix.ts", "rb")) except Exception: server_ts0 = {} config_servers = get_configured_servers(cfg) config_servers.append("-ALL SERVERS-") server_ts = { key: server_ts0[key] for key in server_ts0 if key in config_servers } del server_ts0 ct = None # update delay try: update_delay = float(cfg["OPTIONS"]["UPDATE_DELAY"]) except Exception as e: logger.warning(whoami() + str(e) + ", setting update_delay to default 0.5") update_delay = 0.5 # init tcp with gtkgui.py context = zmq.Context() socket = context.socket(zmq.REP) socket.bind("tcp://*:" + str(guiport)) socket.setsockopt(zmq.RCVTIMEO, int(update_delay * 1000)) # init sighandler logger.debug(whoami() + "initializing sighandler") mpp = { "nzbparser": None, "decoder": None, "unrarer": None, "renamer": None, "verifier": None, "post": None, "mpconnector": None } sh = SigHandler_Main(event_stopped, logger) signal.signal(signal.SIGINT, sh.sighandler) signal.signal(signal.SIGTERM, sh.sighandler) # start nzb parser mpp logger.info(whoami() + "starting nzbparser process ...") mpp_nzbparser = mp.Process(target=nzb_parser.ParseNZB, args=( cfg, dirs, filewrite_lock, mp_loggerqueue, )) mpp_nzbparser.start() mpp["nzbparser"] = mpp_nzbparser # start renamer logger.info(whoami() + "starting renamer process ...") mpp_renamer = mp.Process(target=renamer.renamer, args=( renamer_child_pipe, renamer_result_queue, mp_loggerqueue, filewrite_lock, )) mpp_renamer.start() mpp["renamer"] = mpp_renamer # start mpconnector logger.info(whoami() + "starting mpconnector process ...") mpp_connector = mp.Process(target=mpconnections.mpconnector, args=( mpconnector_child_pipe, cfg, server_ts, mp_loggerqueue, )) mpp_connector.start() mpp["mpconnector"] = mpp_connector dl = None nzbname = None pwdb.exc("db_nzb_set_current_nzbobj", [nzbname], {}) paused = False article_health = 0 dl_running = True applied_datarec = None # reload nzb lists for gui pwdb.exc("store_sorted_nzbs", [], {}) DEBUGPRINT = False # main looooooooooooooooooooooooooooooooooooooooooooooooooooop try: while not event_stopped.is_set(): # set connection health if dl: stat0 = pwdb.exc("db_nzb_getstatus", [nzbname], {}) if stat0 == 2: statusmsg = "downloading" elif stat0 == 3: statusmsg = "postprocessing" elif stat0 == 4: statusmsg = "success" elif stat0 == -4: statusmsg = "failed" # send data to gui else: article_health = 0 statusmsg = "" msg = None datarec = None try: msg, datarec = socket.recv_pyobj() except zmq.ZMQError as e: if e.errno == zmq.EAGAIN: msg = None pass except Exception as e: logger.error(whoami() + str(e)) try: socket.send_pyobj(("NOOK", None)) except Exception as e: logger.error(whoami() + str(e)) if msg: if DEBUGPRINT: print("-" * 10, "received", msg) if msg == "REQ": try: if DEBUGPRINT: print(">>>> #0 main:", time.time(), msg) serverconfig = do_mpconnections(pipes, "get_server_config", None) try: update_server_ts(server_ts, ct, pipes) except Exception as e: logger.warning(whoami() + str(e)) if dl: dl_results = dl.results else: dl_results = None getdata = None downloaddata_gc = None if dl_results: nzbname, downloaddata, _, _ = dl_results if DEBUGPRINT: print(">>>> #0a main:", time.time(), msg) bytescount0, allbytesdownloaded0, availmem0, avgmiblist, filetypecounter, _, article_health,\ overall_size, already_downloaded_size, p2, overall_size_wparvol, allfileslist = downloaddata gb_downloaded = dl.allbytesdownloaded0 / GB_DIVISOR if DEBUGPRINT: print(">>>> #0b main:", time.time(), msg) downloaddata_gc = bytescount0, availmem0, avgmiblist, filetypecounter, nzbname, article_health,\ overall_size, already_downloaded_size if DEBUGPRINT: print(">>>> #4 main:", time.time(), msg) getdata = downloaddata_gc, serverconfig, dl_running, statusmsg,\ article_health, dl.serverhealth(), gb_downloaded, server_ts else: downloaddata_gc = None, None, None, None, None, None, None, None getdata = downloaddata_gc, serverconfig, dl_running, statusmsg, \ 0, 0, 0, server_ts # if one element in getdata != None - send: if getdata.count(None) != len( getdata) or downloaddata_gc.count(None) != len( downloaddata_gc): sendtuple = ("DL_DATA", getdata) else: sendtuple = ("NOOK", None) except Exception as e: logger.error(whoami() + str(e)) sendtuple = ("NOOK", None) try: socket.send_pyobj(sendtuple) except Exception as e: logger.error(whoami() + str(e)) if DEBUGPRINT: print(str(e)) elif msg == "NZB_ADDED": for nzb0 in datarec: try: shutil.copy(nzb0, dirs["nzb"]) socket.send_pyobj(("NZB_ADDED_OK", None)) except Exception as e: logger.error(whoami() + str(e)) logger.info(whoami() + "copied new nzb files into nzb_dir") elif msg == "SET_CLOSEALL": try: socket.send_pyobj(("SET_CLOSE_OK", None)) applied_datarec = datarec event_stopped.set() continue except Exception as e: logger.error(whoami() + str(e)) elif msg == "SET_PAUSE": # pause downloads try: if not paused: paused = True if nzbname: logger.info(whoami() + "download paused for NZB " + nzbname) else: logger.info(whoami() + "download paused!") do_mpconnections(pipes, "pause", None) if dl: dl.pause() postprocessor.postproc_pause() socket.send_pyobj(("SET_PAUSE_OK", None)) dl_running = False except Exception as e: logger.error(whoami() + str(e)) elif msg == "SET_RESUME": # resume downloads try: if paused: logger.info(whoami() + "download resumed for NZB " + nzbname) paused = False do_mpconnections(pipes, "resume", None) if dl: dl.resume() postprocessor.postproc_resume() socket.send_pyobj(("SET_RESUME_OK", None)) dl_running = True except Exception as e: logger.error(whoami() + str(e)) continue elif msg == "REPROCESS_FROM_LAST": try: for reprocessed_nzb in datarec: reproc_stat0 = pwdb.exc("db_nzb_getstatus", [reprocessed_nzb], {}) if reproc_stat0: nzbdirname = re.sub(r"[.]nzb$", "", reprocessed_nzb, flags=re.IGNORECASE) + "/" incompletedir = dirs["incomplete"] + nzbdirname # status -1, -2, 4: restart from 0 if reproc_stat0 in [-1, -2, 4]: pwdb.exc("db_nzb_delete", [reprocessed_nzb], {}) remove_nzbdirs([reprocessed_nzb], dirs, pwdb, logger, removenzbfile=False) update_fmodtime_nzbfiles( [reprocessed_nzb], dirs, logger) # trigger nzbparser.py logger.debug(whoami() + reprocessed_nzb + ": status " + str(reproc_stat0) + ", restart from 0") # status -4/-3 (postproc. failed/interrupted): re-postprocess elif reproc_stat0 in [-4, -3]: if reproc_stat0 == -4: pwdb.exc("db_nzb_undo_postprocess", [reprocessed_nzb], {}) clear_postproc_dirs(reprocessed_nzb, dirs) # if incompletedir: -> postprocess again if os.path.isdir(incompletedir): pwdb.exc("nzb_prio_insert_second", [reprocessed_nzb, 3], {}) logger.debug( whoami() + reprocessed_nzb + ": status -4/-3 w/ dir, restart from 3" ) # else restart overall else: pwdb.exc("db_nzb_delete", [reprocessed_nzb], {}) remove_nzbdirs([reprocessed_nzb], dirs, pwdb, logger, removenzbfile=False) update_fmodtime_nzbfiles([reprocessed_nzb], dirs, logger) logger.debug( whoami() + reprocessed_nzb + ": status -4/-3 w/o dir, restart from 0" ) # else: undefined else: logger.debug(whoami() + reprocessed_nzb + ": status " + str(reproc_stat0) + ", no action!") pwdb.exc("store_sorted_nzbs", [], {}) socket.send_pyobj(("REPROCESS_FROM_START_OK", None)) except Exception as e: logger.error(whoami() + str(e)) elif msg in ["DELETED_FROM_HISTORY", "REPROCESS_FROM_START"]: try: for removed_nzb in datarec: pwdb.exc("db_nzb_delete", [removed_nzb], {}) pwdb.exc("store_sorted_nzbs", [], {}) if msg == "DELETED_FROM_HISTORY": remove_nzbdirs(datarec, dirs, pwdb, logger) socket.send_pyobj(("DELETED_FROM_HISTORY_OK", None)) logger.info(whoami() + "NZBs have been deleted from history") else: remove_nzbdirs(datarec, dirs, pwdb, logger, removenzbfile=False) update_fmodtime_nzbfiles( datarec, dirs, logger) # trigger nzbparser.py socket.send_pyobj(("REPROCESS_FROM_START_OK", None)) logger.info(whoami() + "NZBs will be reprocessed from start") except Exception as e: logger.error(whoami() + str(e)) elif msg == "SET_NZB_INTERRUPT": logger.info(whoami() + "NZBs have been stopped/moved to history") try: first_has_changed, moved_nzbs = pwdb.exc( "move_nzb_list", [datarec], {"move_and_resetprios": False}) if moved_nzbs: pwdb.exc( "db_msg_insert", [nzbname, "NZB(s) moved to history", "warning"], {}) if first_has_changed: logger.info(whoami() + "first NZB has changed") if dl: clear_download(nzbname, pwdb, articlequeue, resultqueue, mp_work_queue, dl, dirs, pipes, mpp, ct, logger, stopall=False) dl.stop() dl.join() first_has_changed, moved_nzbs = pwdb.exc( "move_nzb_list", [datarec], {"move_and_resetprios": True}) nzbname = None pwdb.exc("db_nzb_set_current_nzbobj", [nzbname], {}) if dl: del dl dl = None else: # if current nzb didnt change just update, but do not restart first_has_changed, moved_nzbs = pwdb.exc( "move_nzb_list", [datarec], {"move_and_resetprios": True}) pwdb.exc("store_sorted_nzbs", [], {}) socket.send_pyobj(("SET_INTERRUPT_OK", None)) except Exception as e: logger.error(whoami() + str(e)) elif msg == "SET_NZB_ORDER": logger.info(whoami() + "NZBs have been reordered/deleted") try: # just get info if first has changed etc. first_has_changed, deleted_nzbs = pwdb.exc( "reorder_nzb_list", [datarec], {"delete_and_resetprios": False}) if deleted_nzbs: pwdb.exc("db_msg_insert", [nzbname, "NZB(s) deleted", "warning"], {}) if first_has_changed: logger.info(whoami() + "first NZB has changed") if dl: clear_download(nzbname, pwdb, articlequeue, resultqueue, mp_work_queue, dl, dirs, pipes, mpp, ct, logger, stopall=False) dl.stop() dl.join() first_has_changed, deleted_nzbs = pwdb.exc( "reorder_nzb_list", [datarec], {"delete_and_resetprios": True}) remove_nzbdirs(deleted_nzbs, dirs, pwdb, logger) nzbname = None pwdb.exc("db_nzb_set_current_nzbobj", [nzbname], {}) if dl: del dl dl = None else: # if current nzb didnt change just update, but do not restart first_has_changed, deleted_nzbs = pwdb.exc( "reorder_nzb_list", [datarec], {"delete_and_resetprios": True}) remove_nzbdirs(deleted_nzbs, dirs, pwdb, logger) pwdb.exc("store_sorted_nzbs", [], {}) # release gtkgui from block socket.send_pyobj(("SET_DELETE_REORDER_OK", None)) except Exception as e: logger.error(whoami() + str(e)) elif msg: try: socket.send_pyobj(("NOOK", None)) except Exception as e: if DEBUGPRINT: print(str(e)) logger.debug(whoami() + str(e) + ", received msg: " + str(msg)) continue # if not downloading if not dl: nzbname = make_allfilelist_wait(pwdb, dirs, logger, -1) if nzbname: pwdb.exc("db_nzb_set_current_nzbobj", [nzbname], {}) do_mpconnections(pipes, "reset_timestamps_bdl", None) logger.info(whoami() + "got next NZB: " + str(nzbname)) dl = downloader.Downloader(cfg, dirs, ct, mp_work_queue, articlequeue, resultqueue, mpp, pipes, renamer_result_queue, mp_events, nzbname, mp_loggerqueue, filewrite_lock, logger) # if status postprocessing, don't start threads! if pwdb.exc("db_nzb_getstatus", [nzbname], {}) in [0, 1, 2]: if not paused: do_mpconnections(pipes, "resume", None) if paused: dl.pause() dl.start() else: stat0 = pwdb.exc("db_nzb_getstatus", [nzbname], {}) # if postproc ok if stat0 == 4: logger.info(whoami() + "postprocessor success for NZB " + nzbname) do_mpconnections(pipes, "pause", None) clear_download(nzbname, pwdb, articlequeue, resultqueue, mp_work_queue, dl, dirs, pipes, mpp, ct, logger, stopall=False) dl.stop() dl.join() if mpp_is_alive(mpp, "post"): mpp["post"].join() mpp["post"] = None pwdb.exc("db_msg_insert", [ nzbname, "downloaded and postprocessed successfully!", "success" ], {}) # set 'flags' for getting next nzb del dl dl = None nzbname = None pwdb.exc("db_nzb_set_current_nzbobj", [nzbname], {}) pwdb.exc("store_sorted_nzbs", [], {}) pwdb.exc("db_save_to_file", [], {}) # if download ok -> postprocess elif stat0 == 3 and not mpp_is_alive(mpp, "post"): article_health = 0 logger.info(whoami() + "download success, postprocessing NZB " + nzbname) pwdb.exc("db_msg_insert", [ nzbname, "downloaded ok, starting postprocess", "success" ], {}) mpp_post = mp.Process(target=postprocessor.postprocess_nzb, args=( nzbname, articlequeue, resultqueue, mp_work_queue, pipes, mpp, mp_events, cfg, dl.verifiedrar_dir, dl.unpack_dir, dl.nzbdir, dl.rename_dir, dl.main_dir, dl.download_dir, dl.dirs, dl.pw_file, mp_loggerqueue, )) mpp_post.start() mpp["post"] = mpp_post # if download failed elif stat0 == -2: logger.info(whoami() + "download failed for NZB " + nzbname) do_mpconnections(pipes, "pause", None) clear_download(nzbname, pwdb, articlequeue, resultqueue, mp_work_queue, dl, dirs, pipes, mpp, ct, logger, stopall=False, onlyarticlequeue=False) dl.stop() dl.join() pwdb.exc("db_msg_insert", [nzbname, "downloaded failed!", "error"], {}) # set 'flags' for getting next nzb del dl dl = None nzbname = None pwdb.exc("db_nzb_set_current_nzbobj", [nzbname], {}) pwdb.exc("store_sorted_nzbs", [], {}) pwdb.exc("db_save_to_file", [], {}) # if postproc failed elif stat0 == -4: logger.error(whoami() + "postprocessor failed for NZB " + nzbname) do_mpconnections(pipes, "pause", None) clear_download(nzbname, pwdb, articlequeue, resultqueue, mp_work_queue, dl, dirs, pipes, mpp, ct, logger, stopall=False, onlyarticlequeue=False) dl.stop() dl.join() if mpp_is_alive(mpp, "post"): mpp["post"].join() pwdb.exc("db_msg_insert", [ nzbname, "downloaded and/or postprocessing failed!", "error" ], {}) mpp["post"] = None # set 'flags' for getting next nzb del dl dl = None nzbname = None pwdb.exc("db_nzb_set_current_nzbobj", [nzbname], {}) pwdb.exc("store_sorted_nzbs", [], {}) pwdb.exc("db_save_to_file", [], {}) except Exception as e: if DEBUGPRINT: print(str(e)) else: pass # shutdown logger.info(whoami() + "closeall: starting shutdown sequence") do_mpconnections(pipes, "pause", None) logger.debug(whoami() + "closeall: connection threads paused") if dl: dl.stop() dl.join() logger.debug(whoami() + "closeall: downloader joined") try: clear_download(nzbname, pwdb, articlequeue, resultqueue, mp_work_queue, dl, dirs, pipes, mpp, ct, logger, stopall=True, onlyarticlequeue=False) except Exception as e: logger.error(whoami() + str(e) + ": closeall error!") dl = None nzbname = None pwdb.exc("db_nzb_set_current_nzbobj", [nzbname], {}) logger.debug(whoami() + "closeall: closing gtkgui-socket") try: socket.close() context.term() except Exception as e: logger.warning(whoami() + str(e)) logger.debug(whoami() + "closeall: all cleared") # save pandas time series try: pickle.dump(server_ts, open(dirs["main"] + "ginzibix.ts", "wb")) logger.info(whoami() + "closeall: saved downloaded-timeseries to file") except Exception as e: logger.warning(whoami() + str(e) + ": closeall: error in saving download-timeseries") # if restart because of settings applied in gui -> write cfg to file if applied_datarec: try: with open(cfg_file, 'w') as configfile: applied_datarec.write(configfile) logger.debug(whoami() + "changed config file written!") except Exception as e: logger.error(whoami() + str(e) + ": cannot write changed config file!") logger.info(whoami() + "exited!") sys.exit(0)
def renamer(child_pipe, renamer_result_queue, mp_loggerqueue, filewrite_lock): setproctitle("gzbx." + os.path.basename(__file__)) logger = mplogging.setup_logger(mp_loggerqueue, __file__) logger.debug(whoami() + "starting renamer process") sh = SigHandler_Renamer(logger) signal.signal(signal.SIGINT, sh.sighandler_renamer) signal.signal(signal.SIGTERM, sh.sighandler_renamer) pwdb = PWDBSender() cwd0 = os.getcwd() while not TERMINATED: # wait for start command logger.debug(whoami() + "waiting for start command") while not TERMINATED: if child_pipe.poll(): command = child_pipe.recv() try: cmd0, source_dir, dest_dir, nzbname = command if cmd0 == "start": break except Exception as e: logger.warning(whoami() + str(e)) time.sleep(0.1) if TERMINATED: break if source_dir[-1] != "/": source_dir += "/" if dest_dir[-1] != "/": dest_dir += "/" try: os.chdir(dest_dir) except FileNotFoundError: os.mkdir(dest_dir) os.chdir(source_dir) # init inotify inotify = inotify_simple.INotify() watch_flags = inotify_simple.flags.CREATE | inotify_simple.flags.DELETE | inotify_simple.flags.MODIFY | inotify_simple.flags.DELETE_SELF wd = inotify.add_watch(source_dir, watch_flags) isfirstrun = True p2list = pwdb.exc("db_p2_get_p2list", [nzbname], {}) while not TERMINATED: events = get_inotify_events(inotify, filewrite_lock) if isfirstrun or events: # and events not in eventslist): logger.debug(whoami() + "Events: " + str(events)) # in downloaded_dir: look for not renamed files logger.debug( whoami() + "reading not yet downloaded files in _downloaded0") notrenamedfiles = [] with filewrite_lock: for fnfull in glob.glob(source_dir + "*") + glob.glob(source_dir + ".*"): fnshort = fnfull.split("/")[-1] rn = pwdb.exc("db_file_get_renamed_name", [fnshort], {}) is_renamed = rn and (rn != "N/A") if not is_renamed: p2 = par2lib.Par2File(fnfull) if p2: oldft = pwdb.exc("db_file_get_orig_filetype", [fnshort], {}) if p2.is_par2(): rarfiles = [ (fn, md5) for fn, md5 in p2.md5_16khash() ] p2list.append( (p2, fnshort, dest_dir + fnshort, rarfiles)) pwdb.exc("db_p2_insert_p2", [ nzbname, p2, fnshort, dest_dir + fnshort, rarfiles ], {}) pwdb.exc("db_file_set_file_type", [fnshort, "par2"], {}) try: newshortname = fnshort.split( ".par2")[0] + ".par2" shutil.copyfile( source_dir + fnshort, dest_dir + newshortname) pwdb.exc("db_file_set_renamed_name", [fnshort, newshortname], {}) renamer_result_queue.put( (newshortname, dest_dir + newshortname, "par2", fnshort, oldft)) except Exception as e: logger.error( whoami() + str(e) + ": cannot rename par2 file!") # par2vol elif p2.is_par2vol(): pwdb.exc("db_file_set_file_type", [fnshort, "par2vol"], {}) try: newshortname = fnshort.split( ".PAR2")[0] + ".PAR2" shutil.copyfile( source_dir + fnshort, dest_dir + newshortname) pwdb.exc("db_file_set_renamed_name", [fnshort, newshortname], {}) renamer_result_queue.put( (newshortname, dest_dir + newshortname, "par2vol", fnshort, oldft)) except Exception as e: logger.error( whoami() + str(e) + ": cannot rename par2 file!") # could set # of blocks here in gpeewee try: os.remove(fnfull) except Exception: pass else: notrenamedfiles.append( (fnfull, fnshort, par2lib.calc_file_md5hash_16k(fnfull))) # rename & move rar + remaining files if notrenamedfiles: # da hats was rename_and_move_rarandremainingfiles( p2list, notrenamedfiles, source_dir, dest_dir, pwdb, renamer_result_queue, filewrite_lock, logger) isfirstrun = False if child_pipe.poll(): command, _, _ = child_pipe.recv() if command == "pause": break os.chdir(cwd0) try: if wd: inotify.rm_watch(wd) except Exception as e: logger.warning(whoami() + str(e)) logger.debug(whoami() + "renamer paused") logger.info(whoami() + "exited!")
def ParseNZB(cfg, dirs, lock, mp_loggerqueue): setproctitle("gzbx." + os.path.basename(__file__)) logger = mplogging.setup_logger(mp_loggerqueue, __file__) nzbdir = dirs["nzb"] incompletedir = dirs["incomplete"] global TERMINATED sh = SigHandler_Parser(logger) signal.signal(signal.SIGINT, sh.sighandler) signal.signal(signal.SIGTERM, sh.sighandler) pwdb = PWDBSender() cwd0 = os.getcwd() os.chdir(nzbdir) inotify = inotify_simple.INotify() watch_flags = inotify_simple.flags.CREATE | inotify_simple.flags.DELETE | inotify_simple.flags.MODIFY | inotify_simple.flags.DELETE_SELF inotify.add_watch(nzbdir, watch_flags) isfirstrun = True while not TERMINATED: events = get_inotify_events(inotify) if isfirstrun or events: # and events not in eventslist): if isfirstrun: logger.debug(whoami() + "scanning nzb dir ...") else: logger.debug(whoami() + "got event in nzb_dir") for nzb in glob.glob("*.nzb") + glob.glob(".*.nzb"): if TERMINATED: break nzb0 = nzb.split("/")[-1] if pwdb.exc("db_nzb_exists", [nzb0], {}): logger.warning(whoami() + " NZB file " + nzb0 + " already exists in DB") continue # replace [ and ] brackets in nzb0 / this is a bug in re!? if ("[" in nzb0) or ("]" in nzb0): nzb0 = nzb0.replace("[", "(") nzb0 = nzb0.replace("]", ")") try: os.rename(nzb, dirs["nzb"] + nzb0) except Exception as e: logger.error(whoami() + "Cannot rename NZB file " + nzb0 + " :" + str(e)) continue logger.info(whoami() + "inserting " + nzb0 + "into db") newnzb = pwdb.exc("db_nzb_insert", [nzb0], {}) if newnzb: logger.info(whoami() + "new NZB file " + nzb0 + " detected") # update size # rename nzb here to ....processed filedic, bytescount = decompose_nzb(nzb0, logger) if not filedic: logger.warning("Could not interpret nzb " + nzb0 + ", setting to obsolete") pwdb.exc("db_nzb_update_status", [nzb0, -1], {"usefasttrack": False }) # status "cannot queue / -1" else: size_gb = bytescount / (1024 * 1024 * 1024) infostr = nzb0 + " / " + "{0:.3f}".format( size_gb) + " GB" logger.debug(whoami() + "analysing NZB: " + infostr) # insert files + articles for key, items in filedic.items(): if TERMINATED: break data = [] fsize = 0 for i, it in enumerate(items): if TERMINATED: break if i == 0: age, nr0 = it ftype = par2lib.get_file_type(key) logger.debug( whoami() + "analysing and inserting file " + key + " + articles: age=" + str(age) + " / nr=" + str(nr0) + " / type=" + ftype) newfile = pwdb.exc( "db_file_insert", [key, newnzb, nr0, age, ftype], {}) else: fn, no, size = it fsize += size data.append( (fn, newfile, size, no, time.time())) data.sort(key=lambda tup: tup[3]) pwdb.exc("db_file_update_size", [key, fsize], {}) pwdb.exc("db_article_insert_many", [data], {}) # if there are nzbs in the download queue, pause after insert if not pwdb.exc("db_nzb_are_all_nzb_idle", [], {}): time.sleep(0.3) logger.info(whoami() + "Added NZB: " + infostr + " to database / queue") pwdb.exc( "db_nzb_update_status", [nzb0, 1], {"usefasttrack": False}) # status "queued / 1" logger.debug(whoami() + "Added NZB: " + infostr + " to GUI") pwdb.exc("store_sorted_nzbs", [], {}) pwdb.exc("create_allfile_list_via_name", [nzb0, incompletedir], {}) time.sleep(0.25) isfirstrun = False else: time.sleep(0.25) os.chdir(cwd0) logger.debug(whoami() + "exited!")