def decode_articles(mp_work_queue0, mp_loggerqueue, filewrite_lock): setproctitle("gzbx." + os.path.basename(__file__)) logger = mplogging.setup_logger(mp_loggerqueue, __file__) logger.info(whoami() + "starting article decoder process") sh = SigHandler_Decoder(logger) signal.signal(signal.SIGINT, sh.sighandler) signal.signal(signal.SIGTERM, sh.sighandler) pwdb = PWDBSender() bytesfinal = bytearray() while not TERMINATED: res0 = None decoder_set_idle(True) while not TERMINATED: try: res0 = mp_work_queue0.get_nowait() break except (queue.Empty, EOFError): pass except Exception as e: logger.warning(whoami() + str(e)) time.sleep(0.1) if not res0 or TERMINATED: logger.info(whoami() + "exiting decoder process!") break decoder_set_idle(False) infolist, save_dir, filename, filetype = res0 logger.debug(whoami() + "starting decoding for " + filename) # del bytes0 bytesfinal = bytearray() status = 0 # 1: ok, 0: wrong yenc structure, -1: no crc32, -2: crc32 checksum error, -3: decoding error statusmsg = "ok" # ginzyenc full_filename = save_dir + filename i = 0 for info in infolist: try: lastline = info[-1].decode("latin-1") m = re.search('size=(.\d+?) ', lastline) if m: size = int(m.group(1)) except Exception as e: logger.warning(whoami() + str(e) + ", guestimate size ...") size = int(sum(len(i) for i in info.lines) * 1.1) try: decoded_data, output_filename, crc, crc_yenc, crc_correct = ginzyenc.decode_usenet_chunks( info, size) #if filename.endswith("part01.rar") and i == 3: # pass #else: bytesfinal.extend(decoded_data) except Exception as e: logger.warning(whoami() + str(e) + ": cannot perform ginzyenc") status = -3 statusmsg = "ginzyenc decoding error!" # continue decoding, maybe it can be repaired ...? i += 1 logger.debug(whoami() + "decoding for " + filename + ": success!") try: with filewrite_lock: if not os.path.isdir(save_dir): os.makedirs(save_dir) with open(full_filename, "wb") as f0: f0.write(bytesfinal) f0.flush() f0.close() except Exception as e: statusmsg = "file_error" logger.error(whoami() + str(e) + " in file " + filename) status = -4 logger.info(whoami() + filename + " decoded with status " + str(status) + " / " + statusmsg) pwdbstatus = 2 if status in [-3, -4]: pwdbstatus = -1 try: # pwdb.db_file_update_status(filename, pwdbstatus) pwdb.exc("db_file_update_status", [filename, pwdbstatus], {}) logger.debug(whoami() + "updated DB for " + filename + ", db.status=" + str(pwdbstatus)) except Exception as e: logger.error(whoami() + str(e) + ": cannot update DB for " + filename) i += 1 logger.debug(whoami() + "exited!")
def partial_unrar(directory, unpack_dir, nzbname, mp_loggerqueue, password, event_idle, cfg): setproctitle("gzbx." + os.path.basename(__file__)) logger = mplogging.setup_logger(mp_loggerqueue, __file__) logger.debug(whoami() + "starting ...") pwdb = PWDBSender() event_idle.clear() cwd0 = os.getcwd() sh = SigHandler_Unrar(cwd0, logger) signal.signal(signal.SIGINT, sh.sighandler_unrar) signal.signal(signal.SIGTERM, sh.sighandler_unrar) try: os.chdir(directory) except FileNotFoundError: os.mkdir(directory) logger.info(whoami() + "started partial_unrar") # get already present rar files rar_sortedlist0 = None while not TERMINATED: rar_sortedlist0 = passworded_rars.get_sorted_rar_list(directory) # todo: what to do if does not finish here? if rar_sortedlist0: break time.sleep(1) if TERMINATED: logger.info(whoami() + "terminated!") return rar_sortedlist = [] # das muss besser gehen!!! mit rarlist als parameter von main/postproc for r1, r2 in rar_sortedlist0: try: rar_sortedlist.append((r1, r2.split("/")[-1])) except Exception as e: logger.debug(whoami() + whoami() + ": " + str(e)) pwdb.exc("db_nzb_update_unrar_status", [nzbname, 1], {}) nextrarname = rar_sortedlist[0][1] # first valid rar_sortedlist in place, start unrar! if password: cmd = "unrar x -y -o+ -p" + password + " '" + directory + nextrarname + "' '" + unpack_dir + "'" logger.debug(whoami() + "rar archive is passworded, executing " + cmd) pwdb.exc("db_msg_insert", [nzbname, "unraring pw protected rar archive", "info"], {}) status = 1 child = pexpect.spawn(cmd) status, statmsg, str0 = process_next_unrar_child_pass( event_idle, child, logger) if status < 0: logger.info(whoami() + nextrarname + ": " + statmsg) pwdb.exc("db_msg_insert", [nzbname, "unrar " + nextrarname + " failed!", "error"], {}) else: pwdb.exc("db_msg_insert", [nzbname, "checking for double packed rars", "info"], {}) # check if double packed try: child.kill(signal.SIGKILL) except Exception: pass is_double_packed, fn = check_double_packed(unpack_dir) if is_double_packed: pwdb.exc("db_msg_insert", [ nzbname, "rars are double packed, starting unrar 2nd run", "warning" ], {}) logger.debug(whoami() + "rars are double packed, executing " + cmd) # unrar without pausing! cmd = "unrar x -y -o+ -p" + password + " '" + fn + "' '" + unpack_dir + "'" child = pexpect.spawn(cmd) status, statmsg, str0 = process_next_unrar_child_pass( event_idle, child, logger) if status < 0: logger.info(whoami() + "2nd pass: "******"db_msg_insert", [nzbname, "unrar 2nd pass failed!", "error"], {}) elif status == 0: statmsg = "All OK" status = 0 pwdb.exc("db_msg_insert", [ nzbname, "unrar success 2nd pass for all rar files!", "info" ], {}) logger.info(whoami() + "unrar success 2nd pass for all rar files!") logger.debug(whoami() + "deleting all rar files in unpack_dir") delete_all_rar_files(unpack_dir, logger) elif status == 1: status = -3 statmsg = "unknown error" logger.info(whoami() + "2nd pass: "******" / " + str0) pwdb.exc("db_msg_insert", [nzbname, "unrar 2nd pass failed!", "error"], {}) else: statmsg = "All OK" status = 0 pwdb.exc("db_msg_insert", [nzbname, "unrar success for all rar files!", "info"], {}) logger.info(whoami() + "unrar success for all rar files!") else: cmd = "unrar x -y -o+ -vp '" + directory + nextrarname + "' '" + unpack_dir + "'" logger.debug(whoami() + "rar archive is NOT passworded, executing " + cmd) pwdb.exc("db_msg_insert", [nzbname, "unraring rar archive", "info"], {}) child = pexpect.spawn(cmd) status = 1 # 1 ... running, 0 ... exited ok, -1 ... rar corrupt, -2 ..missing rar, -3 ... unknown error while not TERMINATED: oldnextrarname = nextrarname.split("/")[-1] #oldfoldersize = folder_size(path=unpack_dir) #old_t0 = time.time() status, statmsg, str0 = process_next_unrar_child_pass( event_idle, child, logger) #newfoldersize = folder_size(path=unpack_dir) #delta_size = (newfoldersize - oldfoldersize) / (1024 * 1024) #delta_t = time.time() - old_t0 #print(delta_size / delta_t, "M decompressed per sec") if status < 0: logger.info(whoami() + nextrarname + ": " + statmsg) pwdb.exc( "db_msg_insert", [nzbname, "unrar " + oldnextrarname + " failed!", "error"], {}) break logger.info(whoami() + nextrarname + ": unrar success!") if status == 0: pwdb.exc("db_msg_insert", [nzbname, "checking for double packed rars", "info"], {}) # check if double packed try: child.kill(signal.SIGKILL) except Exception: pass is_double_packed, fn = check_double_packed(unpack_dir) if is_double_packed: pwdb.exc("db_msg_insert", [ nzbname, "rars are double packed, starting unrar 2nd run", "warning" ], {}) cmd = "unrar x -y -o+ '" + fn + "' '" + unpack_dir + "'" logger.debug(whoami() + "rars are double packed, executing " + cmd) # unrar without pausing! child = pexpect.spawn(cmd) status, statmsg, str0 = process_next_unrar_child_pass( event_idle, child, logger) if status < 0: logger.info(whoami() + "2nd pass: "******"db_msg_insert", [nzbname, "unrar 2nd pass failed!", "error"], {}) break if status == 0: statmsg = "All OK" status = 0 pwdb.exc("db_msg_insert", [ nzbname, "unrar success 2nd pass for all rar files!", "info" ], {}) logger.info( whoami() + "unrar success 2nd pass for all rar files!") logger.debug(whoami() + "deleting all rar files in unpack_dir") delete_all_rar_files(unpack_dir, logger) break if status == 1: status = -3 statmsg = "unknown error" logger.info(whoami() + "2nd pass: "******" / " + str0) pwdb.exc("db_msg_insert", [nzbname, "unrar 2nd pass failed!", "error"], {}) break else: statmsg = "All OK" status = 0 pwdb.exc( "db_msg_insert", [nzbname, "unrar success for all rar files!", "info"], {}) logger.info(whoami() + "unrar success for all rar files!") break try: gg = re.search(r"Insert disk with ", str0, flags=re.IGNORECASE) gend = gg.span()[1] nextrarname = str0[gend:-19] except Exception as e: logger.warning(whoami() + str(e) + ", unknown error") statmsg = "unknown error in re evalution" status = -4 pwdb.exc( "db_msg_insert", [nzbname, "unrar " + oldnextrarname + " failed!", "error"], {}) break pwdb.exc( "db_msg_insert", [nzbname, "unrar " + oldnextrarname + " success!", "info"], {}) logger.debug(whoami() + "Waiting for next rar: " + nextrarname) # first, if .r00 try: nextrar_number = int( re.search(r"\d{0,9}$", nextrarname).group()) nextrar_wo_number = nextrarname.rstrip("0123456789") except Exception: nextrar_wo_number = nextrarname nextrar_number = -1 # if part01.rar if nextrar_number == -1: try: nextrar_number = int( nextrarname.split(".part")[-1].split(".rar")[0]) nextrar_wo_number = nextrarname.rstrip(".rar").rstrip( "0123456789").rstrip("part").rstrip(".") except Exception: nextrar_wo_number = nextrarname nextrar_number = -1 gotnextrar = False nextrar_short = nextrarname.split("/")[-1] # todo: hier deadlock/unendliches Warten im Postprocess vermeiden, wenn rar nicht auftaucht! event_idle.set() while not gotnextrar and not TERMINATED: time.sleep(1) for f0 in glob.glob(directory + "*") + glob.glob(directory + ".*"): if nextrarname == f0: gotnextrar = True # now check if we waited too long for next rar0 - but how? if not gotnextrar and nextrar_number != -1: for f0 in glob.glob(directory + "*") + glob.glob(directory + ".*"): f0_number = -1 f0_wo_number = f0 try: f0_number = int(re.search(r"\d{0,9}$", f0).group()) f0_wo_number = f0.rstrip("0123456789") except Exception: try: f0_number = int( f0.split(".part")[-1].split(".rar")[0]) f0_wo_number = f0.rstrip(".rar").rstrip( "0123456789").rstrip("part").rstrip(".") except Exception: continue if f0_number == -1: continue if f0_wo_number == nextrar_wo_number and f0_number > nextrar_number: pwdb.exc("db_msg_insert", [ nzbname, "unrar waiting for next rar, but next rar " + nextrar_short + " seems to be skipped, you may want to interrupt ...", "warning" ], {}) break event_idle.clear() if TERMINATED: break time.sleep(1) # achtung hack! child.sendline("C") try: child.kill(signal.SIGKILL) except Exception: pass if TERMINATED: logger.info(whoami() + "exited!") else: logger.info(whoami() + str(status) + " " + statmsg) try: os.chdir(cwd0) if status == 0: pwdb.exc("db_nzb_update_unrar_status", [nzbname, 2], {}) elif status == -5: pwdb.exc("db_nzb_update_unrar_status", [nzbname, -2], {}) else: pwdb.exc("db_nzb_update_unrar_status", [nzbname, -1], {}) except Exception as e: logger.warning(whoami() + str(e)) event_idle.clear() logger.info(whoami() + "exited!") sys.exit()
def par_verifier(child_pipe, renamed_dir, verifiedrar_dir, main_dir, mp_loggerqueue, nzbname, pvmode, event_idle, cfg): setproctitle("gzbx." + os.path.basename(__file__)) logger = mplogging.setup_logger(mp_loggerqueue, __file__) logger.debug(whoami() + "starting ...") sh = SigHandler_Verifier(logger) signal.signal(signal.SIGINT, sh.sighandler_verifier) signal.signal(signal.SIGTERM, sh.sighandler_verifier) pwdb = PWDBSender() event_idle.clear() if pvmode == "verify": # p2 = pwdb.get_renamed_p2(renamed_dir, nzbname) try: p2list = pwdb.exc("db_p2_get_p2list", [nzbname], {}) p2 = P2(p2list) except Exception as e: logger.warning(whoami() + str(e)) # pwdb.db_nzb_update_verify_status(nzbname, 1) pwdb.exc("db_nzb_update_verify_status", [nzbname, 1], {}) # a: verify all unverified files in "renamed" unverified_rarfiles = None try: # unverified_rarfiles = pwdb.get_all_renamed_rar_files(nzbname) unverified_rarfiles = pwdb.exc("get_all_renamed_rar_files", [nzbname], {}) except Exception as e: logger.debug(whoami() + str(e) + ": no unverified rarfiles met in first run, skipping!") doloadpar2vols = False if pvmode == "verify" and not p2: logger.debug(whoami() + "no par2 file found") if pvmode == "verify" and unverified_rarfiles and p2: logger.debug(whoami() + "verifying all unchecked rarfiles") for filename, f_origname in unverified_rarfiles: f_short = filename.split("/")[-1] md5 = calc_file_md5hash(renamed_dir + filename) md5match = [(pmd5 == md5) for pname, pmd5 in p2.filenames() if pname == filename] if False in md5match: logger.warning(whoami() + " error in md5 hash match for file " + f_short) pwdb.exc("db_msg_insert", [ nzbname, "error in md5 hash match for file " + f_short, "warning" ], {}) pwdb.exc("db_nzb_update_verify_status", [nzbname, -2], {}) pwdb.exc("db_file_update_parstatus", [f_origname, -1], {}) child_pipe.send(True) else: logger.info(whoami() + f_short + " md5 hash match ok, copying to verified_rar dir") pwdb.exc("db_msg_insert", [ nzbname, f_short + " md5 hash match ok, copying to verified_rar dir ", "info" ], {}) shutil.copy(renamed_dir + filename, verifiedrar_dir) pwdb.exc("db_file_update_parstatus", [f_origname, 1], {}) elif (pvmode == "verify" and not p2) or (pvmode == "copy"): logger.info(whoami() + "copying all rarfiles") for filename, f_origname in unverified_rarfiles: f_short = filename.split("/")[-1] sfvcheck = pwdb.exc("db_nzb_check_sfvcrc32", [nzbname, renamed_dir, renamed_dir + filename], {}) if sfvcheck == -1: logger.warning(whoami() + " error in crc32 check for file " + f_short) pwdb.exc("db_msg_insert", [ nzbname, "error in crc32 check for file " + f_short, "warning" ], {}) pwdb.exc("db_nzb_update_verify_status", [nzbname, -2], {}) pwdb.exc("db_file_update_parstatus", [f_origname, -1], {}) child_pipe.send(True) continue logger.debug(whoami() + "copying " + f_short + " to verified_rar dir") pwdb.exc("db_msg_insert", [ nzbname, "copying " + f_short + " to verified_rar dir ", "info" ], {}) shutil.copy(renamed_dir + filename, verifiedrar_dir) pwdb.exc("db_file_update_parstatus", [f_origname, 1], {}) # b: inotify renamed_dir inotify = inotify_simple.INotify() watch_flags = inotify_simple.flags.CREATE | inotify_simple.flags.DELETE | inotify_simple.flags.MODIFY | inotify_simple.flags.DELETE_SELF inotify.add_watch(renamed_dir, watch_flags) while not TERMINATED: # allparstatus = pwdb.db_file_getallparstatus(0) allparstatus = pwdb.exc("db_file_getallparstatus", [0], {}) if 0 not in allparstatus: event_idle.clear() logger.info(whoami() + "all renamed rars checked, exiting par_verifier") break events = get_inotify_events(inotify) event_idle.set() if events or 0 in allparstatus: event_idle.clear() if pvmode == "verify" and not p2: try: p2list = pwdb.exc("db_p2_get_p2list", [nzbname], {}) p2 = P2(p2list) except Exception as e: logger.debug(whoami() + str(e)) if pvmode == "verify" and p2: for rar in glob.glob(renamed_dir + "*") + glob.glob(renamed_dir + ".*"): rar0 = rar.split("/")[-1] f0 = pwdb.exc("db_file_get_renamed", [rar0], {}) if not f0: continue f0_origname, f0_renamedname, f0_ftype = f0 if not f0_ftype == "rar": continue if pwdb.exc("db_file_getparstatus", [rar0], {}) == 0 and f0_renamedname != "N/A": f_short = f0_renamedname.split("/")[-1] md5 = calc_file_md5hash(renamed_dir + rar0) md5match = [(pmd5 == md5) for pname, pmd5 in p2.filenames() if pname == f0_renamedname] #print(f0_renamedname, md5, " : ", p2.filenames()) #print(md5match) #print("-" * 80) if True in md5match: logger.info( whoami() + f_short + " md5 hash match ok, copying to verified_rar dir" ) pwdb.exc("db_msg_insert", [ nzbname, f_short + " md5 hash match ok, copying to verified_rar dir ", "info" ], {}) shutil.copy(renamed_dir + f0_renamedname, verifiedrar_dir) pwdb.exc("db_file_update_parstatus", [f0_origname, 1], {}) elif False in md5match: logger.warning( whoami() + "error in md5 hash match for file " + f_short) pwdb.exc("db_msg_insert", [ nzbname, "error in md5 hash match for file " + f_short, "warning" ], {}) pwdb.exc("db_nzb_update_verify_status", [nzbname, -2], {}) pwdb.exc("db_file_update_parstatus", [f0_origname, -1], {}) child_pipe.send(True) else: # if no match at all in p2list -> try sfvcheck / or just copy sfvcheck = pwdb.exc("db_nzb_check_sfvcrc32", [nzbname, renamed_dir, rar], {}) if sfvcheck == -1: logger.warning( whoami() + " error in crc32 check for file " + rar0) pwdb.exc("db_msg_insert", [ nzbname, "error in crc32 check for file " + rar0, "warning" ], {}) pwdb.exc("db_nzb_update_verify_status", [nzbname, -2], {}) pwdb.exc("db_file_update_parstatus", [f0_origname, -1], {}) child_pipe.send(True) continue if pwdb.exc("db_file_getparstatus", [rar0], {}) == 0 and f0_renamedname != "N/A": logger.debug(whoami() + "no md5 check, copying " + f0_renamedname.split("/")[-1] + " to verified_rar dir") pwdb.exc("db_msg_insert", [ nzbname, "no md5 check, copying " + f0_renamedname.split("/")[-1] + " to verified_rar dir", "info" ], {}) shutil.copy(renamed_dir + f0_renamedname, verifiedrar_dir) pwdb.exc("db_file_update_parstatus", [f0_origname, 1], {}) # free rars or copy mode? elif (pvmode == "verify" and not p2) or (pvmode == "copy"): # maybe we can check via sfv file? for file0full in glob.glob(renamed_dir + "*") + glob.glob(renamed_dir + ".*"): file0short = file0full.split("/")[-1] ft = pwdb.exc("db_file_getftype_renamed", [file0short], {}) if ft == "rar": rar0 = file0short f0 = pwdb.exc("db_file_get_renamed", [rar0], {}) if not f0: continue f0_origname, f0_renamedname, f0_ftype = f0 sfvcheck = pwdb.exc("db_nzb_check_sfvcrc32", [nzbname, renamed_dir, file0full], {}) if sfvcheck == -1: logger.warning(whoami() + " error in crc32 check for file " + rar0) pwdb.exc("db_msg_insert", [ nzbname, "error in crc32 check for file " + rar0, "warning" ], {}) pwdb.exc("db_nzb_update_verify_status", [nzbname, -2], {}) pwdb.exc("db_file_update_parstatus", [f0_origname, -1], {}) child_pipe.send(True) continue if pwdb.exc("db_file_getparstatus", [rar0], {}) == 0 and f0_renamedname != "N/A": logger.debug(whoami() + "copying " + f0_renamedname.split("/")[-1] + " to verified_rar dir") pwdb.exc("db_msg_insert", [ nzbname, "copying " + f0_renamedname.split("/")[-1] + " to verified_rar dir", "info" ], {}) shutil.copy(renamed_dir + f0_renamedname, verifiedrar_dir) pwdb.exc("db_file_update_parstatus", [f0_origname, 1], {}) allrarsverified, rvlist = pwdb.exc("db_only_verified_rars", [nzbname], {}) if allrarsverified: break time.sleep(1) if TERMINATED: logger.info(whoami() + "terminated!") sys.exit() logger.debug(whoami() + "all rars are checked!") corruptrars = pwdb.exc("get_all_corrupt_rar_files", [nzbname], {}) if not corruptrars: logger.debug(whoami() + "rar files ok, no repair needed, exiting par_verifier") pwdb.exc("db_nzb_update_verify_status", [nzbname, 2], {}) elif p2list and corruptrars: pwdb.exc("db_msg_insert", [nzbname, "repairing rar files", "info"], {}) logger.info(whoami() + "par2vol files present, repairing ...") allok = True allfound = True corruptrars_1 = [c1 for c1, _ in corruptrars] corruptrars_2 = [c2 for _, c2 in corruptrars] for _, fnshort, fnlong, rarfiles in p2list: rarf_match = [ rarf for rarf, _ in rarfiles if rarf in corruptrars_1 or rarf in corruptrars_2 ] if len(rarf_match) == 0: allfound = False continue lrar = str(len(rarfiles)) pwdb.exc("db_msg_insert", [nzbname, "performing par2verify for " + fnshort, "info"], {}) ssh = subprocess.Popen(['par2verify', fnlong], shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) sshres = ssh.stdout.readlines() repair_is_required = False repair_is_possible = False for ss in sshres: ss0 = ss.decode("utf-8") if "Repair is required" in ss0: repair_is_required = True if "Repair is possible" in ss0: repair_is_possible = True if not repair_is_required: pwdb.exc("db_msg_insert", [ nzbname, "par2verify for " + fnshort + ": repair is not required!", "info" ], {}) res0 = 1 elif repair_is_required and not repair_is_possible: pwdb.exc("db_msg_insert", [ nzbname, "par2verify for " + fnshort + ": repair is required but not possible", "error" ], {}) res0 = -1 elif repair_is_required and repair_is_possible: pwdb.exc("db_msg_insert", [ nzbname, "par2verify for " + fnshort + ": repair is required and possible, repairing files", "info" ], {}) logger.info( whoami() + "repair is required and possible, performing par2repair") # repair ssh = subprocess.Popen(['par2repair', fnlong], shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) sshres = ssh.stdout.readlines() repair_complete = False for ss in sshres: ss0 = ss.decode("utf-8") if "Repair complete" in ss0: repair_complete = True if not repair_complete: res0 = -1 else: res0 = 1 # res0 = multipartrar_repair(renamed_dir, fnshort, pwdb, nzbname, logger) else: res0 = -1 if res0 != 1: allok = False logger.error(whoami() + "repair failed for " + lrar + "rarfiles in " + fnshort) pwdb.exc("db_msg_insert", [ nzbname, "rar file repair failed for " + lrar + " rarfiles in " + fnshort + "!", "error" ], {}) else: logger.info(whoami() + "repair success for " + lrar + "rarfiles in " + fnshort) pwdb.exc("db_msg_insert", [ nzbname, "rar file repair success for " + lrar + " rarfiles in " + fnshort + "!", "info" ], {}) if not allfound: allok = False logger.error( whoami() + "cannot attempt one or more par2repairs due to missing par2 file(s)!" ) pwdb.exc("db_msg_insert", [ nzbname, "cannot attempt one or more par2repairs due to missing par2 file(s)!", "error" ], {}) if allok: logger.info(whoami() + "repair success") pwdb.exc("db_nzb_update_verify_status", [nzbname, 2], {}) # copy all no yet copied rars to verifiedrar_dir for c_origname, c_renamedname in corruptrars: logger.info(whoami() + "copying " + c_renamedname + " to verifiedrar_dir") pwdb.exc("db_file_update_parstatus", [c_origname, 1], {}) pwdb.exc("db_file_update_status", [c_origname, 2], {}) shutil.copy(renamed_dir + c_renamedname, verifiedrar_dir) else: logger.error(whoami() + "repair failed!") pwdb.exc("db_nzb_update_verify_status", [nzbname, -1], {}) for _, c_origname in corruptrars: pwdb.exc("db_file_update_parstatus", [c_origname, -2], {}) else: pwdb.exc("db_msg_insert", [ "nzbname", "rar file repair failed, no par files available", "error" ], {}) logger.warning( whoami() + "some rars are corrupt but cannot repair (no par2 files)") pwdb.exc("db_nzb_update_verify_status", [nzbname, -1], {}) logger.info(whoami() + "terminated!") sys.exit()
def mpconnector(child_pipe, cfg, server_ts, mp_loggerqueue): setproctitle("gzbx." + os.path.basename(__file__)) logger = mplogging.setup_logger(mp_loggerqueue, __file__) logger.debug(whoami() + "starting mpconnector process") sh = SigHandler_MPconnector(logger) signal.signal(signal.SIGINT, sh.sighandler_mpconnector) signal.signal(signal.SIGTERM, sh.sighandler_mpconnector) # setup ports + zmq streamer device worker_port = 37100 connections_port = worker_port + 1 while is_port_in_use(worker_port) or is_port_in_use(connections_port): worker_port += 1 connections_port = worker_port + 1 # streamerdevice gathers results downloaded from usenet servers and # keeps them ready to be delivered to mpconnector (which is the only worker). # this is not working with ProcessDevice -> stuck on sys.exit!?? streamerdevice = ThreadDevice(zmq.STREAMER, zmq.PULL, zmq.PUSH) streamerdevice.bind_in("tcp://127.0.0.1:%d" % connections_port) streamerdevice.bind_out("tcp://127.0.0.1:%d" % worker_port) streamerdevice.setsockopt_in(zmq.IDENTITY, b"PULL") streamerdevice.setsockopt_out(zmq.IDENTITY, b"PUSH") streamerdevice.start() # setup socket for worker context = zmq.Context() socket = context.socket(zmq.PULL) socket.connect("tcp://127.0.0.1:%d" % worker_port) # poller to check if streamerdevice is empty poller = zmq.Poller() poller.register(socket, zmq.POLLIN) thr_articlequeue = deque() ct = ConnectionThreads(cfg, thr_articlequeue, connections_port, server_ts, logger) cmdlist = ("start", "stop", "pause", "resume", "reset_timestamps", "reset_timestamps_bdl", "get_downloaded_per_server", "exit", "clearqueues", "connection_thread_health", "get_server_config", "set_tmode_sanitycheck", "set_tmode_download", "get_level_servers", "clear_articlequeue", "queues_empty", "clear_resultqueue", "len_articlequeue", "push_articlequeue", "pull_resultqueue", "push_entire_articlequeue", "pull_entire_resultqueue", "get_bytesdownloaded") quit_via_cmdexit = False while not TERMINATED: try: cmd, param = child_pipe.recv() except Exception as e: logger.warning(whoami() + str(e)) continue if cmd in cmdlist: result = True if cmd == "push_articlequeue": try: thr_articlequeue.append(param) except Exception: result = None elif cmd == "push_entire_articlequeue": try: for article0 in param: thr_articlequeue.append(article0) except Exception: result = None elif cmd == "pull_entire_resultqueue": result = get_from_streamingdevice(socket, onlyfirst=False) logger.debug(whoami() + "len_articlequeue: " + str(len(ct.articlequeue))) elif cmd == "pull_resultqueue": result = get_from_streamingdevice(socket, onlyfirst=True) elif cmd == "start": ct.start_threads() elif cmd == "queues_empty": socks = dict(poller.poll(timeout=10)) streamerdevice_empty = True if socket in socks and socks[socket] == zmq.POLLIN: streamerdevice_empty = False result = (len(ct.articlequeue) == 0) and streamerdevice_empty elif cmd == "get_bytesdownloaded": result = ct.get_bytesdownloaded() elif cmd == "len_articlequeue": try: result = int(len(ct.articlequeue)) except Exception: result = 0 elif cmd == "clear_articlequeue": ct.articlequeue.clear() elif cmd == "clear_resultqueue": get_from_streamingdevice(socket, onlyfirst=False) elif cmd == "stop": ct.stop_threads() elif cmd == "resume": ct.resume_threads() elif cmd == "pause": ct.pause_threads() elif cmd == "reset_timestamps": ct.reset_timestamps() elif cmd == "reset_timestamps_bdl": ct.reset_timestamps_bdl() elif cmd == "get_downloaded_per_server": result = ct.get_downloaded_per_server() elif cmd == "connection_thread_health": result = ct.connection_thread_health() elif cmd == "get_server_config": result = ct.get_server_config() elif cmd == "set_tmode_sanitycheck": for t, _ in ct.threads: t.mode = "sanitycheck" elif cmd == "set_tmode_download": for t, _ in ct.threads: t.mode = "download" elif cmd == "get_level_servers": le_serv0 = [] try: retention = param for level, serverlist in ct.level_servers.items(): level_servers = serverlist le_dic = {} for le in level_servers: _, _, _, _, _, _, _, _, age, _ = ct.servers.get_single_server_config(le) le_dic[le] = age les = [le for le in level_servers if le_dic[le] > retention * 0.9] le_serv0.append(les) except Exception: pass result = le_serv0 elif cmd == "exit": quit_via_cmdexit = True break elif cmd == "clearqueues": ct.articlequeue.clear() get_from_streamingdevice(socket, onlyfirst=False) child_pipe.send(result) else: child_pipe.send(None) logger.debug(whoami() + "shutting down ...") ct.stop_threads() get_from_streamingdevice(socket, onlyfirst=False) if quit_via_cmdexit: child_pipe.send(result)
def ginzi_main(cfg_file, cfg, dirs, subdirs, guiport, mp_loggerqueue): setproctitle("gzbx." + os.path.basename(__file__)) logger = mplogging.setup_logger(mp_loggerqueue, __file__) logger.debug(whoami() + "starting ...") pwdb = PWDBSender() # multiprocessing events mp_events = {} mp_events["unrarer"] = mp.Event() mp_events["verifier"] = mp.Event() # threading events event_stopped = threading.Event() articlequeue = None resultqueue = None mp_work_queue = mp.Queue() renamer_result_queue = mp.Queue() # filewrite_lock = mp.Lock() mpconnector_lock = threading.Lock() filewrite_lock = mp.Lock() renamer_parent_pipe, renamer_child_pipe = mp.Pipe() unrarer_parent_pipe, unrarer_child_pipe = mp.Pipe() verifier_parent_pipe, verifier_child_pipe = mp.Pipe() mpconnector_parent_pipe, mpconnector_child_pipe = mp.Pipe() pipes = { "renamer": [renamer_parent_pipe, renamer_child_pipe], "unrarer": [unrarer_parent_pipe, unrarer_child_pipe], "verifier": [verifier_parent_pipe, verifier_child_pipe], "mpconnector": [mpconnector_parent_pipe, mpconnector_child_pipe, mpconnector_lock] } # load server ts from file try: server_ts0 = pickle.load(open(dirs["main"] + "ginzibix.ts", "rb")) except Exception: server_ts0 = {} config_servers = get_configured_servers(cfg) config_servers.append("-ALL SERVERS-") server_ts = { key: server_ts0[key] for key in server_ts0 if key in config_servers } del server_ts0 ct = None # update delay try: update_delay = float(cfg["OPTIONS"]["UPDATE_DELAY"]) except Exception as e: logger.warning(whoami() + str(e) + ", setting update_delay to default 0.5") update_delay = 0.5 # init tcp with gtkgui.py context = zmq.Context() socket = context.socket(zmq.REP) socket.bind("tcp://*:" + str(guiport)) socket.setsockopt(zmq.RCVTIMEO, int(update_delay * 1000)) # init sighandler logger.debug(whoami() + "initializing sighandler") mpp = { "nzbparser": None, "decoder": None, "unrarer": None, "renamer": None, "verifier": None, "post": None, "mpconnector": None } sh = SigHandler_Main(event_stopped, logger) signal.signal(signal.SIGINT, sh.sighandler) signal.signal(signal.SIGTERM, sh.sighandler) # start nzb parser mpp logger.info(whoami() + "starting nzbparser process ...") mpp_nzbparser = mp.Process(target=nzb_parser.ParseNZB, args=( cfg, dirs, filewrite_lock, mp_loggerqueue, )) mpp_nzbparser.start() mpp["nzbparser"] = mpp_nzbparser # start renamer logger.info(whoami() + "starting renamer process ...") mpp_renamer = mp.Process(target=renamer.renamer, args=( renamer_child_pipe, renamer_result_queue, mp_loggerqueue, filewrite_lock, )) mpp_renamer.start() mpp["renamer"] = mpp_renamer # start mpconnector logger.info(whoami() + "starting mpconnector process ...") mpp_connector = mp.Process(target=mpconnections.mpconnector, args=( mpconnector_child_pipe, cfg, server_ts, mp_loggerqueue, )) mpp_connector.start() mpp["mpconnector"] = mpp_connector dl = None nzbname = None pwdb.exc("db_nzb_set_current_nzbobj", [nzbname], {}) paused = False article_health = 0 dl_running = True applied_datarec = None # reload nzb lists for gui pwdb.exc("store_sorted_nzbs", [], {}) DEBUGPRINT = False # main looooooooooooooooooooooooooooooooooooooooooooooooooooop try: while not event_stopped.is_set(): # set connection health if dl: stat0 = pwdb.exc("db_nzb_getstatus", [nzbname], {}) if stat0 == 2: statusmsg = "downloading" elif stat0 == 3: statusmsg = "postprocessing" elif stat0 == 4: statusmsg = "success" elif stat0 == -4: statusmsg = "failed" # send data to gui else: article_health = 0 statusmsg = "" msg = None datarec = None try: msg, datarec = socket.recv_pyobj() except zmq.ZMQError as e: if e.errno == zmq.EAGAIN: msg = None pass except Exception as e: logger.error(whoami() + str(e)) try: socket.send_pyobj(("NOOK", None)) except Exception as e: logger.error(whoami() + str(e)) if msg: if DEBUGPRINT: print("-" * 10, "received", msg) if msg == "REQ": try: if DEBUGPRINT: print(">>>> #0 main:", time.time(), msg) serverconfig = do_mpconnections(pipes, "get_server_config", None) try: update_server_ts(server_ts, ct, pipes) except Exception as e: logger.warning(whoami() + str(e)) if dl: dl_results = dl.results else: dl_results = None getdata = None downloaddata_gc = None if dl_results: nzbname, downloaddata, _, _ = dl_results if DEBUGPRINT: print(">>>> #0a main:", time.time(), msg) bytescount0, allbytesdownloaded0, availmem0, avgmiblist, filetypecounter, _, article_health,\ overall_size, already_downloaded_size, p2, overall_size_wparvol, allfileslist = downloaddata gb_downloaded = dl.allbytesdownloaded0 / GB_DIVISOR if DEBUGPRINT: print(">>>> #0b main:", time.time(), msg) downloaddata_gc = bytescount0, availmem0, avgmiblist, filetypecounter, nzbname, article_health,\ overall_size, already_downloaded_size if DEBUGPRINT: print(">>>> #4 main:", time.time(), msg) getdata = downloaddata_gc, serverconfig, dl_running, statusmsg,\ article_health, dl.serverhealth(), gb_downloaded, server_ts else: downloaddata_gc = None, None, None, None, None, None, None, None getdata = downloaddata_gc, serverconfig, dl_running, statusmsg, \ 0, 0, 0, server_ts # if one element in getdata != None - send: if getdata.count(None) != len( getdata) or downloaddata_gc.count(None) != len( downloaddata_gc): sendtuple = ("DL_DATA", getdata) else: sendtuple = ("NOOK", None) except Exception as e: logger.error(whoami() + str(e)) sendtuple = ("NOOK", None) try: socket.send_pyobj(sendtuple) except Exception as e: logger.error(whoami() + str(e)) if DEBUGPRINT: print(str(e)) elif msg == "NZB_ADDED": for nzb0 in datarec: try: shutil.copy(nzb0, dirs["nzb"]) socket.send_pyobj(("NZB_ADDED_OK", None)) except Exception as e: logger.error(whoami() + str(e)) logger.info(whoami() + "copied new nzb files into nzb_dir") elif msg == "SET_CLOSEALL": try: socket.send_pyobj(("SET_CLOSE_OK", None)) applied_datarec = datarec event_stopped.set() continue except Exception as e: logger.error(whoami() + str(e)) elif msg == "SET_PAUSE": # pause downloads try: if not paused: paused = True if nzbname: logger.info(whoami() + "download paused for NZB " + nzbname) else: logger.info(whoami() + "download paused!") do_mpconnections(pipes, "pause", None) if dl: dl.pause() postprocessor.postproc_pause() socket.send_pyobj(("SET_PAUSE_OK", None)) dl_running = False except Exception as e: logger.error(whoami() + str(e)) elif msg == "SET_RESUME": # resume downloads try: if paused: logger.info(whoami() + "download resumed for NZB " + nzbname) paused = False do_mpconnections(pipes, "resume", None) if dl: dl.resume() postprocessor.postproc_resume() socket.send_pyobj(("SET_RESUME_OK", None)) dl_running = True except Exception as e: logger.error(whoami() + str(e)) continue elif msg == "REPROCESS_FROM_LAST": try: for reprocessed_nzb in datarec: reproc_stat0 = pwdb.exc("db_nzb_getstatus", [reprocessed_nzb], {}) if reproc_stat0: nzbdirname = re.sub(r"[.]nzb$", "", reprocessed_nzb, flags=re.IGNORECASE) + "/" incompletedir = dirs["incomplete"] + nzbdirname # status -1, -2, 4: restart from 0 if reproc_stat0 in [-1, -2, 4]: pwdb.exc("db_nzb_delete", [reprocessed_nzb], {}) remove_nzbdirs([reprocessed_nzb], dirs, pwdb, logger, removenzbfile=False) update_fmodtime_nzbfiles( [reprocessed_nzb], dirs, logger) # trigger nzbparser.py logger.debug(whoami() + reprocessed_nzb + ": status " + str(reproc_stat0) + ", restart from 0") # status -4/-3 (postproc. failed/interrupted): re-postprocess elif reproc_stat0 in [-4, -3]: if reproc_stat0 == -4: pwdb.exc("db_nzb_undo_postprocess", [reprocessed_nzb], {}) clear_postproc_dirs(reprocessed_nzb, dirs) # if incompletedir: -> postprocess again if os.path.isdir(incompletedir): pwdb.exc("nzb_prio_insert_second", [reprocessed_nzb, 3], {}) logger.debug( whoami() + reprocessed_nzb + ": status -4/-3 w/ dir, restart from 3" ) # else restart overall else: pwdb.exc("db_nzb_delete", [reprocessed_nzb], {}) remove_nzbdirs([reprocessed_nzb], dirs, pwdb, logger, removenzbfile=False) update_fmodtime_nzbfiles([reprocessed_nzb], dirs, logger) logger.debug( whoami() + reprocessed_nzb + ": status -4/-3 w/o dir, restart from 0" ) # else: undefined else: logger.debug(whoami() + reprocessed_nzb + ": status " + str(reproc_stat0) + ", no action!") pwdb.exc("store_sorted_nzbs", [], {}) socket.send_pyobj(("REPROCESS_FROM_START_OK", None)) except Exception as e: logger.error(whoami() + str(e)) elif msg in ["DELETED_FROM_HISTORY", "REPROCESS_FROM_START"]: try: for removed_nzb in datarec: pwdb.exc("db_nzb_delete", [removed_nzb], {}) pwdb.exc("store_sorted_nzbs", [], {}) if msg == "DELETED_FROM_HISTORY": remove_nzbdirs(datarec, dirs, pwdb, logger) socket.send_pyobj(("DELETED_FROM_HISTORY_OK", None)) logger.info(whoami() + "NZBs have been deleted from history") else: remove_nzbdirs(datarec, dirs, pwdb, logger, removenzbfile=False) update_fmodtime_nzbfiles( datarec, dirs, logger) # trigger nzbparser.py socket.send_pyobj(("REPROCESS_FROM_START_OK", None)) logger.info(whoami() + "NZBs will be reprocessed from start") except Exception as e: logger.error(whoami() + str(e)) elif msg == "SET_NZB_INTERRUPT": logger.info(whoami() + "NZBs have been stopped/moved to history") try: first_has_changed, moved_nzbs = pwdb.exc( "move_nzb_list", [datarec], {"move_and_resetprios": False}) if moved_nzbs: pwdb.exc( "db_msg_insert", [nzbname, "NZB(s) moved to history", "warning"], {}) if first_has_changed: logger.info(whoami() + "first NZB has changed") if dl: clear_download(nzbname, pwdb, articlequeue, resultqueue, mp_work_queue, dl, dirs, pipes, mpp, ct, logger, stopall=False) dl.stop() dl.join() first_has_changed, moved_nzbs = pwdb.exc( "move_nzb_list", [datarec], {"move_and_resetprios": True}) nzbname = None pwdb.exc("db_nzb_set_current_nzbobj", [nzbname], {}) if dl: del dl dl = None else: # if current nzb didnt change just update, but do not restart first_has_changed, moved_nzbs = pwdb.exc( "move_nzb_list", [datarec], {"move_and_resetprios": True}) pwdb.exc("store_sorted_nzbs", [], {}) socket.send_pyobj(("SET_INTERRUPT_OK", None)) except Exception as e: logger.error(whoami() + str(e)) elif msg == "SET_NZB_ORDER": logger.info(whoami() + "NZBs have been reordered/deleted") try: # just get info if first has changed etc. first_has_changed, deleted_nzbs = pwdb.exc( "reorder_nzb_list", [datarec], {"delete_and_resetprios": False}) if deleted_nzbs: pwdb.exc("db_msg_insert", [nzbname, "NZB(s) deleted", "warning"], {}) if first_has_changed: logger.info(whoami() + "first NZB has changed") if dl: clear_download(nzbname, pwdb, articlequeue, resultqueue, mp_work_queue, dl, dirs, pipes, mpp, ct, logger, stopall=False) dl.stop() dl.join() first_has_changed, deleted_nzbs = pwdb.exc( "reorder_nzb_list", [datarec], {"delete_and_resetprios": True}) remove_nzbdirs(deleted_nzbs, dirs, pwdb, logger) nzbname = None pwdb.exc("db_nzb_set_current_nzbobj", [nzbname], {}) if dl: del dl dl = None else: # if current nzb didnt change just update, but do not restart first_has_changed, deleted_nzbs = pwdb.exc( "reorder_nzb_list", [datarec], {"delete_and_resetprios": True}) remove_nzbdirs(deleted_nzbs, dirs, pwdb, logger) pwdb.exc("store_sorted_nzbs", [], {}) # release gtkgui from block socket.send_pyobj(("SET_DELETE_REORDER_OK", None)) except Exception as e: logger.error(whoami() + str(e)) elif msg: try: socket.send_pyobj(("NOOK", None)) except Exception as e: if DEBUGPRINT: print(str(e)) logger.debug(whoami() + str(e) + ", received msg: " + str(msg)) continue # if not downloading if not dl: nzbname = make_allfilelist_wait(pwdb, dirs, logger, -1) if nzbname: pwdb.exc("db_nzb_set_current_nzbobj", [nzbname], {}) do_mpconnections(pipes, "reset_timestamps_bdl", None) logger.info(whoami() + "got next NZB: " + str(nzbname)) dl = downloader.Downloader(cfg, dirs, ct, mp_work_queue, articlequeue, resultqueue, mpp, pipes, renamer_result_queue, mp_events, nzbname, mp_loggerqueue, filewrite_lock, logger) # if status postprocessing, don't start threads! if pwdb.exc("db_nzb_getstatus", [nzbname], {}) in [0, 1, 2]: if not paused: do_mpconnections(pipes, "resume", None) if paused: dl.pause() dl.start() else: stat0 = pwdb.exc("db_nzb_getstatus", [nzbname], {}) # if postproc ok if stat0 == 4: logger.info(whoami() + "postprocessor success for NZB " + nzbname) do_mpconnections(pipes, "pause", None) clear_download(nzbname, pwdb, articlequeue, resultqueue, mp_work_queue, dl, dirs, pipes, mpp, ct, logger, stopall=False) dl.stop() dl.join() if mpp_is_alive(mpp, "post"): mpp["post"].join() mpp["post"] = None pwdb.exc("db_msg_insert", [ nzbname, "downloaded and postprocessed successfully!", "success" ], {}) # set 'flags' for getting next nzb del dl dl = None nzbname = None pwdb.exc("db_nzb_set_current_nzbobj", [nzbname], {}) pwdb.exc("store_sorted_nzbs", [], {}) pwdb.exc("db_save_to_file", [], {}) # if download ok -> postprocess elif stat0 == 3 and not mpp_is_alive(mpp, "post"): article_health = 0 logger.info(whoami() + "download success, postprocessing NZB " + nzbname) pwdb.exc("db_msg_insert", [ nzbname, "downloaded ok, starting postprocess", "success" ], {}) mpp_post = mp.Process(target=postprocessor.postprocess_nzb, args=( nzbname, articlequeue, resultqueue, mp_work_queue, pipes, mpp, mp_events, cfg, dl.verifiedrar_dir, dl.unpack_dir, dl.nzbdir, dl.rename_dir, dl.main_dir, dl.download_dir, dl.dirs, dl.pw_file, mp_loggerqueue, )) mpp_post.start() mpp["post"] = mpp_post # if download failed elif stat0 == -2: logger.info(whoami() + "download failed for NZB " + nzbname) do_mpconnections(pipes, "pause", None) clear_download(nzbname, pwdb, articlequeue, resultqueue, mp_work_queue, dl, dirs, pipes, mpp, ct, logger, stopall=False, onlyarticlequeue=False) dl.stop() dl.join() pwdb.exc("db_msg_insert", [nzbname, "downloaded failed!", "error"], {}) # set 'flags' for getting next nzb del dl dl = None nzbname = None pwdb.exc("db_nzb_set_current_nzbobj", [nzbname], {}) pwdb.exc("store_sorted_nzbs", [], {}) pwdb.exc("db_save_to_file", [], {}) # if postproc failed elif stat0 == -4: logger.error(whoami() + "postprocessor failed for NZB " + nzbname) do_mpconnections(pipes, "pause", None) clear_download(nzbname, pwdb, articlequeue, resultqueue, mp_work_queue, dl, dirs, pipes, mpp, ct, logger, stopall=False, onlyarticlequeue=False) dl.stop() dl.join() if mpp_is_alive(mpp, "post"): mpp["post"].join() pwdb.exc("db_msg_insert", [ nzbname, "downloaded and/or postprocessing failed!", "error" ], {}) mpp["post"] = None # set 'flags' for getting next nzb del dl dl = None nzbname = None pwdb.exc("db_nzb_set_current_nzbobj", [nzbname], {}) pwdb.exc("store_sorted_nzbs", [], {}) pwdb.exc("db_save_to_file", [], {}) except Exception as e: if DEBUGPRINT: print(str(e)) else: pass # shutdown logger.info(whoami() + "closeall: starting shutdown sequence") do_mpconnections(pipes, "pause", None) logger.debug(whoami() + "closeall: connection threads paused") if dl: dl.stop() dl.join() logger.debug(whoami() + "closeall: downloader joined") try: clear_download(nzbname, pwdb, articlequeue, resultqueue, mp_work_queue, dl, dirs, pipes, mpp, ct, logger, stopall=True, onlyarticlequeue=False) except Exception as e: logger.error(whoami() + str(e) + ": closeall error!") dl = None nzbname = None pwdb.exc("db_nzb_set_current_nzbobj", [nzbname], {}) logger.debug(whoami() + "closeall: closing gtkgui-socket") try: socket.close() context.term() except Exception as e: logger.warning(whoami() + str(e)) logger.debug(whoami() + "closeall: all cleared") # save pandas time series try: pickle.dump(server_ts, open(dirs["main"] + "ginzibix.ts", "wb")) logger.info(whoami() + "closeall: saved downloaded-timeseries to file") except Exception as e: logger.warning(whoami() + str(e) + ": closeall: error in saving download-timeseries") # if restart because of settings applied in gui -> write cfg to file if applied_datarec: try: with open(cfg_file, 'w') as configfile: applied_datarec.write(configfile) logger.debug(whoami() + "changed config file written!") except Exception as e: logger.error(whoami() + str(e) + ": cannot write changed config file!") logger.info(whoami() + "exited!") sys.exit(0)
def renamer(child_pipe, renamer_result_queue, mp_loggerqueue, filewrite_lock): setproctitle("gzbx." + os.path.basename(__file__)) logger = mplogging.setup_logger(mp_loggerqueue, __file__) logger.debug(whoami() + "starting renamer process") sh = SigHandler_Renamer(logger) signal.signal(signal.SIGINT, sh.sighandler_renamer) signal.signal(signal.SIGTERM, sh.sighandler_renamer) pwdb = PWDBSender() cwd0 = os.getcwd() while not TERMINATED: # wait for start command logger.debug(whoami() + "waiting for start command") while not TERMINATED: if child_pipe.poll(): command = child_pipe.recv() try: cmd0, source_dir, dest_dir, nzbname = command if cmd0 == "start": break except Exception as e: logger.warning(whoami() + str(e)) time.sleep(0.1) if TERMINATED: break if source_dir[-1] != "/": source_dir += "/" if dest_dir[-1] != "/": dest_dir += "/" try: os.chdir(dest_dir) except FileNotFoundError: os.mkdir(dest_dir) os.chdir(source_dir) # init inotify inotify = inotify_simple.INotify() watch_flags = inotify_simple.flags.CREATE | inotify_simple.flags.DELETE | inotify_simple.flags.MODIFY | inotify_simple.flags.DELETE_SELF wd = inotify.add_watch(source_dir, watch_flags) isfirstrun = True p2list = pwdb.exc("db_p2_get_p2list", [nzbname], {}) while not TERMINATED: events = get_inotify_events(inotify, filewrite_lock) if isfirstrun or events: # and events not in eventslist): logger.debug(whoami() + "Events: " + str(events)) # in downloaded_dir: look for not renamed files logger.debug( whoami() + "reading not yet downloaded files in _downloaded0") notrenamedfiles = [] with filewrite_lock: for fnfull in glob.glob(source_dir + "*") + glob.glob(source_dir + ".*"): fnshort = fnfull.split("/")[-1] rn = pwdb.exc("db_file_get_renamed_name", [fnshort], {}) is_renamed = rn and (rn != "N/A") if not is_renamed: p2 = par2lib.Par2File(fnfull) if p2: oldft = pwdb.exc("db_file_get_orig_filetype", [fnshort], {}) if p2.is_par2(): rarfiles = [ (fn, md5) for fn, md5 in p2.md5_16khash() ] p2list.append( (p2, fnshort, dest_dir + fnshort, rarfiles)) pwdb.exc("db_p2_insert_p2", [ nzbname, p2, fnshort, dest_dir + fnshort, rarfiles ], {}) pwdb.exc("db_file_set_file_type", [fnshort, "par2"], {}) try: newshortname = fnshort.split( ".par2")[0] + ".par2" shutil.copyfile( source_dir + fnshort, dest_dir + newshortname) pwdb.exc("db_file_set_renamed_name", [fnshort, newshortname], {}) renamer_result_queue.put( (newshortname, dest_dir + newshortname, "par2", fnshort, oldft)) except Exception as e: logger.error( whoami() + str(e) + ": cannot rename par2 file!") # par2vol elif p2.is_par2vol(): pwdb.exc("db_file_set_file_type", [fnshort, "par2vol"], {}) try: newshortname = fnshort.split( ".PAR2")[0] + ".PAR2" shutil.copyfile( source_dir + fnshort, dest_dir + newshortname) pwdb.exc("db_file_set_renamed_name", [fnshort, newshortname], {}) renamer_result_queue.put( (newshortname, dest_dir + newshortname, "par2vol", fnshort, oldft)) except Exception as e: logger.error( whoami() + str(e) + ": cannot rename par2 file!") # could set # of blocks here in gpeewee try: os.remove(fnfull) except Exception: pass else: notrenamedfiles.append( (fnfull, fnshort, par2lib.calc_file_md5hash_16k(fnfull))) # rename & move rar + remaining files if notrenamedfiles: # da hats was rename_and_move_rarandremainingfiles( p2list, notrenamedfiles, source_dir, dest_dir, pwdb, renamer_result_queue, filewrite_lock, logger) isfirstrun = False if child_pipe.poll(): command, _, _ = child_pipe.recv() if command == "pause": break os.chdir(cwd0) try: if wd: inotify.rm_watch(wd) except Exception as e: logger.warning(whoami() + str(e)) logger.debug(whoami() + "renamer paused") logger.info(whoami() + "exited!")
def renamer_old(child_pipe, renamer_result_queue, mp_loggerqueue, filewrite_lock): setproctitle("gzbx." + os.path.basename(__file__)) logger = mplogging.setup_logger(mp_loggerqueue, __file__) logger.debug(whoami() + "starting renamer process") sh = SigHandler_Renamer(logger) signal.signal(signal.SIGINT, sh.sighandler_renamer) signal.signal(signal.SIGTERM, sh.sighandler_renamer) pwdb = PWDBSender() cwd0 = os.getcwd() while not TERMINATED: # wait for start command logger.debug(whoami() + "waiting for start command") while not TERMINATED: if child_pipe.poll(): command = child_pipe.recv() try: cmd0, source_dir, dest_dir = command if cmd0 == "start": break except Exception as e: logger.warning(whoami() + str(e)) time.sleep(0.1) if TERMINATED: break if source_dir[-1] != "/": source_dir += "/" if dest_dir[-1] != "/": dest_dir += "/" try: os.chdir(dest_dir) except FileNotFoundError: os.mkdir(dest_dir) os.chdir(source_dir) # init inotify inotify = inotify_simple.INotify() watch_flags = inotify_simple.flags.CREATE | inotify_simple.flags.DELETE | inotify_simple.flags.MODIFY | inotify_simple.flags.DELETE_SELF wd = inotify.add_watch(source_dir, watch_flags) p2obj = None p2basename = None # eventslist = [] isfirstrun = True while not TERMINATED: events = get_inotify_events(inotify, filewrite_lock) if isfirstrun or events: # and events not in eventslist): logger.debug(whoami() + "Events: " + str(events)) # get all files not yet .renamed logger.debug( whoami() + "reading not yet downloaded files in _downloaded0") notrenamedfiles = get_not_yet_renamed_files( source_dir, filewrite_lock) # get all renames filed & trying to get .par2 file logger.debug( whoami() + "Reading files in _renamed0 & trying to get .par2 file") renamedfiles, p2obj, p2basename = scan_renamed_dir( dest_dir, p2obj, filewrite_lock, logger) # if no par2 in _renamed, check _downloaded0 if not p2obj: logger.debug(whoami() + "No p2obj yet found, looking in _downloaded0") p2obj, p2basename = scan_for_par2(notrenamedfiles, logger) if p2obj: logger.debug(whoami() + "p2obj found: " + p2basename) # rename par2 and move them p2obj, p2objname = renamer_process_par2s( source_dir, dest_dir, p2obj, p2basename, notrenamedfiles, pwdb, renamer_result_queue, filewrite_lock) # rename & move rar + remaining files rename_and_move_rarandremainingfiles_old( p2obj, notrenamedfiles, source_dir, dest_dir, pwdb, renamer_result_queue, filewrite_lock, logger) isfirstrun = False if child_pipe.poll(): command, _, _ = child_pipe.recv() if command == "pause": break os.chdir(cwd0) try: if wd: inotify.rm_watch(wd) except Exception as e: logger.warning(whoami() + str(e)) logger.debug(whoami() + "renamer paused") logger.info(whoami() + "exited!")
def ParseNZB(cfg, dirs, lock, mp_loggerqueue): setproctitle("gzbx." + os.path.basename(__file__)) logger = mplogging.setup_logger(mp_loggerqueue, __file__) nzbdir = dirs["nzb"] incompletedir = dirs["incomplete"] global TERMINATED sh = SigHandler_Parser(logger) signal.signal(signal.SIGINT, sh.sighandler) signal.signal(signal.SIGTERM, sh.sighandler) pwdb = PWDBSender() cwd0 = os.getcwd() os.chdir(nzbdir) inotify = inotify_simple.INotify() watch_flags = inotify_simple.flags.CREATE | inotify_simple.flags.DELETE | inotify_simple.flags.MODIFY | inotify_simple.flags.DELETE_SELF inotify.add_watch(nzbdir, watch_flags) isfirstrun = True while not TERMINATED: events = get_inotify_events(inotify) if isfirstrun or events: # and events not in eventslist): if isfirstrun: logger.debug(whoami() + "scanning nzb dir ...") else: logger.debug(whoami() + "got event in nzb_dir") for nzb in glob.glob("*.nzb") + glob.glob(".*.nzb"): if TERMINATED: break nzb0 = nzb.split("/")[-1] if pwdb.exc("db_nzb_exists", [nzb0], {}): logger.warning(whoami() + " NZB file " + nzb0 + " already exists in DB") continue # replace [ and ] brackets in nzb0 / this is a bug in re!? if ("[" in nzb0) or ("]" in nzb0): nzb0 = nzb0.replace("[", "(") nzb0 = nzb0.replace("]", ")") try: os.rename(nzb, dirs["nzb"] + nzb0) except Exception as e: logger.error(whoami() + "Cannot rename NZB file " + nzb0 + " :" + str(e)) continue logger.info(whoami() + "inserting " + nzb0 + "into db") newnzb = pwdb.exc("db_nzb_insert", [nzb0], {}) if newnzb: logger.info(whoami() + "new NZB file " + nzb0 + " detected") # update size # rename nzb here to ....processed filedic, bytescount = decompose_nzb(nzb0, logger) if not filedic: logger.warning("Could not interpret nzb " + nzb0 + ", setting to obsolete") pwdb.exc("db_nzb_update_status", [nzb0, -1], {"usefasttrack": False }) # status "cannot queue / -1" else: size_gb = bytescount / (1024 * 1024 * 1024) infostr = nzb0 + " / " + "{0:.3f}".format( size_gb) + " GB" logger.debug(whoami() + "analysing NZB: " + infostr) # insert files + articles for key, items in filedic.items(): if TERMINATED: break data = [] fsize = 0 for i, it in enumerate(items): if TERMINATED: break if i == 0: age, nr0 = it ftype = par2lib.get_file_type(key) logger.debug( whoami() + "analysing and inserting file " + key + " + articles: age=" + str(age) + " / nr=" + str(nr0) + " / type=" + ftype) newfile = pwdb.exc( "db_file_insert", [key, newnzb, nr0, age, ftype], {}) else: fn, no, size = it fsize += size data.append( (fn, newfile, size, no, time.time())) data.sort(key=lambda tup: tup[3]) pwdb.exc("db_file_update_size", [key, fsize], {}) pwdb.exc("db_article_insert_many", [data], {}) # if there are nzbs in the download queue, pause after insert if not pwdb.exc("db_nzb_are_all_nzb_idle", [], {}): time.sleep(0.3) logger.info(whoami() + "Added NZB: " + infostr + " to database / queue") pwdb.exc( "db_nzb_update_status", [nzb0, 1], {"usefasttrack": False}) # status "queued / 1" logger.debug(whoami() + "Added NZB: " + infostr + " to GUI") pwdb.exc("store_sorted_nzbs", [], {}) pwdb.exc("create_allfile_list_via_name", [nzb0, incompletedir], {}) time.sleep(0.25) isfirstrun = False else: time.sleep(0.25) os.chdir(cwd0) logger.debug(whoami() + "exited!")