def reseeders_update(self): """ Get the pre-reseed list from database. And sent those un-reseed torrents to each reseeder depend on it's download status. """ pre_reseeder_list = self.get_online_reseeders() pre_cond = " OR ".join(["`{}`=0".format(i.db_column) for i in pre_reseeder_list]) result = db.exec("SELECT * FROM `seed_list` WHERE `download_id` != 0 AND ({})".format(pre_cond), r_dict=True, fetch_all=True) for t in result: # Traversal all un-reseed list try: dl_torrent = tc.get_torrent(t["download_id"]) except KeyError: # Un-exist pre-reseed torrent Logger.error("The pre-reseed Torrent: \"{0}\" isn't found in result, " "It's db-record will be deleted soon.".format(t["title"])) self._del_torrent_with_db(rid=t["id"]) if t["id"] in self.downloading_torrent_id_queue: self.downloading_torrent_id_queue.remove(t["id"]) else: tname = dl_torrent.name if int(dl_torrent.progress) is 100: # Get the download progress in percent. Logger.info("New completed torrent: \"{name}\" , Judge reseed or not.".format(name=tname)) for reseeder in pre_reseeder_list: Thread(target=reseeder.torrent_feed, args=(dl_torrent,), name="Thread-{}".format(reseeder.model_name()), daemon=True).start() # reseeder.torrent_feed(torrent=dl_torrent) if dl_torrent.id in self.downloading_torrent_id_queue: self.downloading_torrent_id_queue.remove(dl_torrent.id) elif dl_torrent.id in self.downloading_torrent_id_queue: pass # Wait until this torrent download completely. else: Logger.warning("Torrent:\"{name}\" is still downloading, Wait......".format(name=tname)) self.downloading_torrent_id_queue.append(dl_torrent.id)
def sort_title_info(raw_title, raw_type, raw_sec_type) -> dict: """ the function (sort_title_info) will sort title to post_data due to clone_torrent's category But some may wrong,Due to inappropriate search_title """ split = type_dict[raw_type]["split"] raw_title_group = re.findall(r"\[[^\]]*\]", raw_title) return_dict = { "raw_type": raw_type, "raw_second_type": raw_sec_type, "type": type_dict[raw_type]["cat"], "second_type": type_dict[raw_type]["sec_type"][raw_sec_type], } len_split = len(type_dict[raw_type]["split"]) if len_split != len(raw_title_group): Logger.warning("The raw title \"{raw}\" may lack of tag (now: {no},ask: {co})," "The split may wrong.".format(raw=raw_title, no=len(raw_title_group), co=len_split)) while len_split > len(raw_title_group): raw_title_group.append("") raw_title_group.reverse() for i in split: j = raw_title_group.pop() title_split = re.sub("\[(?P<in>.*)\]", "\g<in>", j) if i in type_dict[raw_type]["limit"]: if title_split not in type_dict[raw_type]["limit"][i]: title_split = "" # type_dict[raw_type]["limit"][i][0] raw_title_group.append(j) return_dict.update({i: title_split}) Logger.debug("the title split success.The title dict:{dic}".format(dic=return_dict)) return return_dict
def online_check(self) -> bool: """ Check function to get the site status (online or not) :return: bool , True if online """ if self._PASS_ONLINE_CHECK: return True try: requests.get(self.url_host, timeout=setting.REQUESTS_TIMEOUT) except OSError: # requests.exceptions.RequestException if self.suspended == 0: Logger.warning( "Site: {si} is Offline now.".format(si=self.url_host)) self.suspended += 1 else: if self.suspended != 0: Logger.info( "The Site: {si} is Online now,after {count} times tries." "Will check the session soon.".format( si=self.url_host, count=self.suspended)) self.suspended = 0 # Set self.suspended as 0 first, then session_check() self.session_check() return True if self.suspended == 0 else False
def torrent_reseed(self, torrent): name_pattern = self._get_torrent_ptn(torrent) if name_pattern: key_raw = re.sub(r"[_\-.']", " ", name_pattern.group("search_name")) key_with_gp = "{gr} {search_key}".format(search_key=key_raw, gr=name_pattern.group("group")) key_with_gp_ep = "{ep} {gp_key}".format(gp_key=key_with_gp, ep=name_pattern.group("episode")) else: raise NoMatchPatternError("No match pattern. Will Mark \"{}\" As Un-reseed torrent.".format(torrent.name)) search_tag = self.exist_judge(key_with_gp_ep, torrent.name) if search_tag == 0 and not self._ASSIST_ONLY: # Non-existent repetition torrent (by local judge plugins), prepare to reseed torrent_raw_info_dict = None try: if self._GET_CLONE_ID_FROM_DB: clone_id = db.get_data_clone_id(key=key_raw, site=self.db_column) if clone_id in [None, 0]: raise KeyError("The db-record is not return the correct clone id.") elif clone_id is not -1: # Set to no re-seed for this site in database. torrent_raw_info_dict = self.torrent_clone(clone_id) if not torrent_raw_info_dict: raise ValueError("The clone torrent for tid in db-record is not exist.") Logger.debug("Get clone torrent info from \"DataBase\" OK, Which id: {}".format(clone_id)) else: raise KeyError("Set not get clone torrent id from \"Database.\"") except (KeyError, ValueError) as e: Logger.warning("{}, Try to search the clone info from site, it may not correct".format(e.args[0])) clone_id = self._DEFAULT_CLONE_TORRENT if self._DEFAULT_CLONE_TORRENT else 0 # USE Default clone id for key in [key_with_gp, key_raw]: # USE The same group to search firstly and Then non-group tag search_id = self.first_tid_in_search_list(key=key) if search_id is not 0: clone_id = search_id # The search result will cover the default setting. break if clone_id is not 0: torrent_raw_info_dict = self.torrent_clone(clone_id) Logger.info("Get clone torrent info from \"Reseed-Site\" OK, Which id: {cid}".format(cid=clone_id)) if torrent_raw_info_dict: if self._ALLOW_CAT: pre_reseed_cat = torrent_raw_info_dict.get("type") if int(pre_reseed_cat) not in self._ALLOW_CAT: raise NoCloneTorrentError("The clone torrent's category is not allowed.") Logger.info("Begin post The torrent {0},which name: {1}".format(torrent.id, torrent.name)) new_dict = self.date_raw_update(torrent_name_search=name_pattern, raw_info=torrent_raw_info_dict) multipart_data = self.data_raw2tuple(torrent, raw_info=new_dict) flag = self.torrent_upload(torrent=torrent, data=multipart_data) else: raise NoCloneTorrentError("Can't find any clone torrent to used.".format(self.name)) elif search_tag == -1: # IF the torrents are present, but not consistent (When FORCE_JUDGE_DUPE_LOC is True) raise CannotAssistError("Find dupe, and the exist torrent is not same as pre-reseed torrent. Stop Posting~") else: # IF the torrent is already released and can be assist Logger.warning("Find dupe torrent,which id: {0}, Automatically assist it~".format(search_tag)) flag = self.torrent_download(tid=search_tag, thanks=False) return flag
def _del_torrent_with_db(self, rid=None): """Delete torrent(both download and reseed) with data from transmission and database""" Logger.debug("Begin torrent's status check. If reach condition you set, You will get a warning.") if rid: sql = "SELECT * FROM `seed_list` WHERE `id`={}".format(rid) else: sql = "SELECT * FROM `seed_list`" time_now = time.time() for cow in db.exec(sql=sql, r_dict=True, fetch_all=True): sid = cow.pop("id") s_title = cow.pop("title") err = 0 reseed_list = [] torrent_id_list = [tid for tracker, tid in cow.items() if tid > 0] for tid in torrent_id_list: try: # Ensure torrent exist reseed_list.append(tc.get_torrent(torrent_id=tid)) except KeyError: # Mark err when the torrent is not exist. err += 1 delete = False if rid: delete = True Logger.warning("Force Delete. Which name: {}, Affect torrents: {}".format(s_title, torrent_id_list)) elif err is 0: # It means all torrents in this cow are exist,then check these torrent's status. reseed_stop_list = [] for t in reseed_list: if int(time_now - t.addedDate) > TIME_TORRENT_KEEP_MIN: # At least seed time if t.status == "stopped": # Mark the stopped torrent reseed_stop_list.append(t) elif setting.pre_delete_judge(torrent=t): _tid, _tname, _tracker = self._get_torrent_info(t) tc.stop_torrent(t.id) Logger.warning( "Reach Target you set, Torrent({tid}) \"{name}\" in Tracker \"{tracker}\" now stop, " "With Uploaded {si:.2f} MiB, Ratio {ro:.2f} , Keep time {ho:.2f} h." "".format(tid=_tid, name=_tname, tracker=_tracker, si=t.uploadedEver / 1024 / 1024, ro=t.uploadRatio, ho=(time.time() - t.startDate) / 60 / 60) ) if len(reseed_list) == len(reseed_stop_list): delete = True Logger.info("All torrents of \"{0}\" reach target, Will DELETE them soon.".format(s_title)) else: delete = True Logger.error("Some Torrents (\"{name}\", {er} of {co}) may not found, " "Delete all it's records from db".format(name=s_title, er=err, co=len(torrent_id_list))) if delete: # Delete torrents with it's data and db-records for tid in torrent_id_list: tc.remove_torrent(tid, delete_data=True) db.exec(sql="DELETE FROM `seed_list` WHERE `id` = {0}".format(sid))
def date_raw_update(self, torrent, torrent_name_search, raw_info: dict) -> dict: raw_title = raw_info["name"] cat = raw_info["category"] split = title_split_dict[cat]["order"] raw_title_group = re.findall(r"\[[^\]]*\]", raw_title) temporarily_dict = {} len_split = len(title_split_dict[cat]["order"]) # TODO if len_split == 0: if len_split != len(raw_title_group): Logger.warning( "The raw title \"{raw}\" may lack of tag (now: {no},ask: {co})," "The split may wrong.".format(raw=raw_title, no=len(raw_title_group), co=len_split)) while len_split > len(raw_title_group): raw_title_group.append("") raw_title_group.reverse() for i in split: j = raw_title_group.pop() title_split = re.sub("\[(?P<in>.+)\]", "\g<in>", j) if i in title_split_dict[cat]["limit"]: if title_split not in title_split_dict[cat]["limit"][i]: title_split = "" # type_dict[raw_type]["limit"][i][0] raw_title_group.append(j) temporarily_dict.update({i: title_split}) # Update temporarily dict if cat == "402": # Series temporarily_dict["english_name"] = torrent_name_search.group( "full_name") temporarily_dict["jidu"] = torrent_name_search.group("episode") elif cat == "405": # Anime temporarily_dict["subtitle_group"] = torrent_name_search.group( "group") temporarily_dict["num"] = torrent_name_search.group("episode") # Generate new title new_title = "" for i in split: inner = temporarily_dict[i] if len(inner) is not 0: new_title += "[{inner}]".format(inner=inner) # Assign raw info raw_info["name"] = new_title raw_info["descr"] = self.enhance_descr(torrent, raw_info["descr"], raw_info["clone_id"]) return raw_info
def date_raw_update(self, torrent_name_search, raw_info: dict) -> dict: raw_title = raw_info["name"] cat = raw_info["category"] split = title_split_dict[cat]["order"] raw_title_group = re.findall(r"\[[^\]]*\]", raw_title) temporarily_dict = {} len_split = len(title_split_dict[cat]["order"]) # TODO if len_split == 0: if len_split != len(raw_title_group): Logger.warning("The raw title \"{raw}\" may lack of tag (now: {no},ask: {co})," "The split may wrong.".format(raw=raw_title, no=len(raw_title_group), co=len_split)) while len_split > len(raw_title_group): raw_title_group.append("") raw_title_group.reverse() for i in split: j = raw_title_group.pop() title_split = re.sub("\[(?P<in>.+)\]", "\g<in>", j) if i in title_split_dict[cat]["limit"]: if title_split not in title_split_dict[cat]["limit"][i]: title_split = "" # type_dict[raw_type]["limit"][i][0] raw_title_group.append(j) temporarily_dict.update({i: title_split}) # Update temporarily dict if cat == "402": # Series temporarily_dict["english_name"] = torrent_name_search.group("full_name") temporarily_dict["jidu"] = torrent_name_search.group("episode") elif cat == "405": # Anime temporarily_dict["num"] = torrent_name_search.group("episode") # Generate new title new_title = "" for i in split: inner = temporarily_dict[i] if len(inner) is not 0: new_title += "[{inner}]".format(inner=inner) # Assign raw info raw_info["name"] = new_title return raw_info
def online_check(self) -> bool: """ Check function to get the site status (online or not) :return: bool , True if online """ try: # requests.head() is a little Quicker than requests.get(),( Because only ask head without body) # but Slower than socket.create_connection(address[, timeout[, source_address]]) requests.head(self.url_host, timeout=REQUESTS_TIMEOUT) except OSError: # requests.exceptions.RequestException if self.suspended == 0: Logger.warning("Site: {si} is Offline now.".format(si=self.url_host)) self.suspended += 1 else: if self.suspended != 0: Logger.info("The Site: {si} is Online now,after {count} times tries." "Will check the session soon.".format(si=self.url_host, count=self.suspended)) self.suspended = 0 # Set self.suspended as 0 first, then session_check() self.session_check() return True if self.suspended == 0 else False
def sort_title_info(raw_title, raw_type, raw_sec_type) -> dict: """ the function (sort_title_info) will sort title to post_data due to clone_torrent's category But some may wrong,Due to inappropriate search_title """ split = type_dict[raw_type]["split"] raw_title_group = re.findall(r"\[[^\]]*\]", raw_title) return_dict = { "raw_type": raw_type, "raw_second_type": raw_sec_type, "type": type_dict[raw_type]["cat"], "second_type": type_dict[raw_type]["sec_type"][raw_sec_type], } len_split = len(type_dict[raw_type]["split"]) if len_split != len(raw_title_group): Logger.warning( "The raw title \"{raw}\" may lack of tag (now: {no},ask: {co})," "The split may wrong.".format(raw=raw_title, no=len(raw_title_group), co=len_split)) while len_split > len(raw_title_group): raw_title_group.append("") raw_title_group.reverse() for i in split: j = raw_title_group.pop() title_split = re.sub("\[(?P<in>.*)\]", "\g<in>", j) if i in type_dict[raw_type]["limit"]: if title_split not in type_dict[raw_type]["limit"][i]: title_split = "" # type_dict[raw_type]["limit"][i][0] raw_title_group.append(j) return_dict.update({i: title_split}) Logger.debug( "the title split success.The title dict:{dic}".format(dic=return_dict)) return return_dict
def torrent_reseed(self, torrent): name_pattern = self._get_torrent_ptn(torrent) if name_pattern: key_raw = re.sub(r"[_\-.']", " ", name_pattern.group("search_name")) key_with_gp = "{gr} {search_key}".format( search_key=key_raw, gr=name_pattern.group("group")) key_with_gp_ep = "{ep} {gp_key}".format( gp_key=key_with_gp, ep=name_pattern.group("episode")) else: raise NoMatchPatternError( "No match pattern. Will Mark \"{}\" As Un-reseed torrent.". format(torrent.name)) search_tag = self.exist_judge(key_with_gp_ep, torrent.name) if search_tag == 0 and not self._ASSIST_ONLY: # Non-existent repetition torrent (by local judge plugins), prepare to reseed torrent_raw_info_dict = None try: if self._GET_CLONE_ID_FROM_DB: clone_id = db.get_data_clone_id(key=key_raw, site=self.db_column) if clone_id in [None, 0]: raise KeyError( "The db-record is not return the correct clone id." ) elif clone_id is not -1: # Set to no re-seed for this site in database. torrent_raw_info_dict = self.torrent_clone(clone_id) if not torrent_raw_info_dict: raise ValueError( "The clone torrent for tid in db-record is not exist." ) Logger.debug( "Get clone torrent info from \"DataBase\" OK, Which id: {}" .format(clone_id)) else: raise KeyError( "Set not get clone torrent id from \"Database.\"") except (KeyError, ValueError) as e: Logger.warning( "{}, Try to search the clone info from site, it may not correct" .format(e.args[0])) clone_id = self._DEFAULT_CLONE_TORRENT if self._DEFAULT_CLONE_TORRENT else 0 # USE Default clone id for key in [ key_with_gp, key_raw ]: # USE The same group to search firstly and Then non-group tag search_id = self.first_tid_in_search_list(key=key) if search_id is not 0: clone_id = search_id # The search result will cover the default setting. break if clone_id is not 0: torrent_raw_info_dict = self.torrent_clone(clone_id) Logger.info( "Get clone torrent info from \"Reseed-Site\" OK, Which id: {cid}" .format(cid=clone_id)) if torrent_raw_info_dict: if self._ALLOW_CAT: pre_reseed_cat = torrent_raw_info_dict.get("type") if int(pre_reseed_cat) not in self._ALLOW_CAT: raise NoCloneTorrentError( "The clone torrent's category is not allowed.") Logger.info( "Begin post The torrent {0},which name: {1}".format( torrent.id, torrent.name)) new_dict = self.date_raw_update( torrent_name_search=name_pattern, raw_info=torrent_raw_info_dict) multipart_data = self.data_raw2tuple(torrent, raw_info=new_dict) flag = self.torrent_upload(torrent=torrent, data=multipart_data) else: raise NoCloneTorrentError( "Can't find any clone torrent to used.".format(self.name)) elif search_tag == -1: # IF the torrents are present, but not consistent (When FORCE_JUDGE_DUPE_LOC is True) raise CannotAssistError( "Find dupe, and the exist torrent is not same as pre-reseed torrent. Stop Posting~" ) else: # IF the torrent is already released and can be assist Logger.warning( "Find dupe torrent,which id: {0}, Automatically assist it~". format(search_tag)) flag = self.torrent_download(tid=search_tag, thanks=False) return flag
def _del_torrent_with_db(self): """Delete torrent(both download and reseed) with data from transmission and database""" Logger.debug( "Begin torrent's status check. If reach condition you set, You will get a warning." ) time_now = time.time() t_all_list = tc.get_torrents() t_name_list = set(map(lambda x: x.name, t_all_list)) for t_name in t_name_list: t_list = list(filter(lambda x: x.name == t_name, t_all_list)) t_list_len = len(t_list) t_list_stop = 0 for t in t_list: if t.status == "stopped": t_list_stop += 1 continue _tid, _tname, _tracker = self._get_torrent_info(t) # 0 means OK, 1 means tracker warning, 2 means tracker error, 3 means local error. if t.error > 1: tc.stop_torrent(t.id) Logger.warning( "Torrent Error, Torrent({tid}) \"{name}\" in Tracker \"{tracker}\" now stop, " "Error code : {code} {msg}." "With Uploaded {si:.2f} MiB, Ratio {ro:.2f} , Keep time {ho:.2f} h." "".format(tid=_tid, name=_tname, tracker=_tracker, si=t.uploadedEver / 1024 / 1024, ro=t.uploadRatio, ho=(time.time() - t.startDate) / 60 / 60, code=t.error, msg=t.errorString)) if int(time_now - t.addedDate ) > TIME_TORRENT_KEEP_MIN: # At least seed time if setting.pre_delete_judge(torrent=t): tc.stop_torrent(t.id) Logger.warning( "Reach Target you set, Torrent({tid}) \"{name}\" in Tracker \"{tracker}\" now stop, " "With Uploaded {si:.2f} MiB, Ratio {ro:.2f} , Keep time {ho:.2f} h." "".format(tid=_tid, name=_tname, tracker=_tracker, si=t.uploadedEver / 1024 / 1024, ro=t.uploadRatio, ho=(time.time() - t.startDate) / 60 / 60)) if t_list_stop == t_list_len: # Delete torrents with it's data and db-records Logger.info( "All torrents of \"{0}\" reach target, Will DELETE them soon." .format(t_name)) tid_list = map(lambda x: x.id, t_list) for tid in tid_list: tc.remove_torrent(tid, delete_data=True) db.exec("DELETE FROM `seed_list` WHERE `title` = %s", (t_name, ))