예제 #1
0
    def __init__(self, status: bool, cookies: dict or str, **kwargs):
        self.name = type(self).__name__

        # -*- Assign the based information -*-
        self.status = status
        try:
            self.cookies = cookies_raw2jar(cookies) if isinstance(cookies, str) else cookies
        except ValueError:  # Empty raw_cookies will raise ValueError (,see utils.cookie )
            Logger.critical("Empty cookies, Not allowed to active Model \"{}\"".format(self.name))
            self.status = False

        # -*- Assign Enhanced Features : Site -*-
        """
        Enhance Feature for `base` Reseeder.
        Those key-values will be set as default value unless you change it in your user-settings.
        The name of those key should be start with "_" and upper.
        
        Included:
        1. _EXTEND_DESCR_*        : default True, Enable to Enhanced the description of the reseed torrent,
                                     And its priority is higher than setting.extend_descr_raw[key]["status"].
        2. _ASSIST_ONLY           : default False, Enable to only assist the exist same torrent but not to reseed. 
        """
        self._EXTEND_DESCR_BEFORE = kwargs.setdefault("extend_descr_before", True)
        self._EXTEND_DESCR_THUMBNAILS = kwargs.setdefault("extend_descr_thumbnails", True)
        self._EXTEND_DESCR_MEDIAINFO = kwargs.setdefault("extend_descr_mediainfo", True)
        self._EXTEND_DESCR_CLONEINFO = kwargs.setdefault("extend_descr_cloneinfo", True)
        self._ASSIST_ONLY = kwargs.setdefault("assist_only", False)
        self._ASSIST_DELAY_TIME = kwargs.setdefault("assist_delay_time", 0)

        # Check Site Online Status
        if self.status:
            Logger.debug("Model \"{}\" is activation now.".format(self.name))
            self.online_check()
        else:
            Logger.info("Model \"{}\" isn't active due to your settings.".format(self.name))
예제 #2
0
    def online_check(self) -> bool:
        """
        Check function to get the site status (online or not)

        :return: bool , True if online
        """
        if self._PASS_ONLINE_CHECK:
            return True

        try:
            requests.get(self.url_host, timeout=setting.REQUESTS_TIMEOUT)
        except OSError:  # requests.exceptions.RequestException
            if self.suspended == 0:
                Logger.warning(
                    "Site: {si} is Offline now.".format(si=self.url_host))
            self.suspended += 1
        else:
            if self.suspended != 0:
                Logger.info(
                    "The Site: {si} is Online now,after {count} times tries."
                    "Will check the session soon.".format(
                        si=self.url_host, count=self.suspended))
                self.suspended = 0  # Set self.suspended as 0 first, then session_check()
                self.session_check()
        return True if self.suspended == 0 else False
예제 #3
0
    def torrent_clone(self, tid) -> dict:
        """
        Use Internal API: https://npupt.com/transfer.php?url={url} ,Request Method: GET
        The url use base64 encryption, and will response a json dict.
        """
        res_dic = {}
        transferred_url = string2base64(
            "{host}/details.php?id={tid}&hit=1".format(host=self.url_host,
                                                       tid=tid))
        try:
            res_dic = self.get_data(url=self.url_host + "/transfer.php",
                                    params={"url": transferred_url},
                                    json=True)
        except ValueError:
            Logger.error("Error,this torrent may not exist or ConnectError")
        else:
            res_dic.update({
                "transferred_url": transferred_url,
                "clone_id": tid
            })
            res_dic["descr"] = ubb_clean(res_dic["descr"])

            Logger.info(
                "Get clone torrent's info,id: {tid},title:\"{ti}\"".format(
                    tid=tid, ti=res_dic["name"]))
        return res_dic
예제 #4
0
 def _assist_delay(self):
     if self._ASSIST_ONLY:
         Logger.info("Autoseed-{mo} only allowed to assist."
                     "it will sleep {sl} Seconds to wait the reseed site "
                     "to have this torrent".format(
                         mo=self.name, sl=self._ASSIST_DELAY_TIME))
         time.sleep(self._ASSIST_DELAY_TIME)
예제 #5
0
파일: site.py 프로젝트: ch0317/Pt-Autoseed
    def __init__(self, status: bool, cookies: dict or str, **kwargs):
        self.name = type(self).__name__

        # -*- Assign the based information -*-
        self.status = status
        try:
            self.cookies = cookies_raw2jar(cookies) if isinstance(cookies, str) else cookies
        except ValueError:  # Empty raw_cookies will raise ValueError (,see utils.cookie )
            Logger.critical("Empty cookies, Not allowed to active Model \"{}\"".format(self.name))
            self.status = False

        # -*- Assign Enhanced Features : Site -*-
        """
        Enhance Feature for `base` Reseeder.
        Those key-values will be set as default value unless you change it in your user-settings.
        The name of those key should be start with "_" and upper.
        
        Included:
        1. _EXTEND_DESCR_*        : default True, Enable to Enhanced the description of the reseed torrent,
                                     And its priority is higher than setting.extend_descr_raw[key]["status"].
        2. _ASSIST_ONLY           : default False, Enable to only assist the exist same torrent but not to reseed. 
        """
        self._EXTEND_DESCR_BEFORE = kwargs.setdefault("extend_descr_before", True)
        self._EXTEND_DESCR_THUMBNAILS = kwargs.setdefault("extend_descr_thumbnails", True)
        self._EXTEND_DESCR_MEDIAINFO = kwargs.setdefault("extend_descr_mediainfo", True)
        self._EXTEND_DESCR_CLONEINFO = kwargs.setdefault("extend_descr_cloneinfo", True)
        self._ASSIST_ONLY = kwargs.setdefault("assist_only", False)
        self._ASSIST_DELAY_TIME = kwargs.setdefault("assist_delay_time", 0)

        # Check Site Online Status
        if self.status:
            Logger.debug("Model \"{}\" is activation now.".format(self.name))
            self.online_check()
        else:
            Logger.info("Model \"{}\" isn't active due to your settings.".format(self.name))
예제 #6
0
 def torrent_download(self, tid, **kwargs):
     added_torrent = tc.add_torrent(torrent=self.torrent_link(tid))
     # Another way is download torrent file to watch-dir(see early commits), But it will no return added_torrent.id
     Logger.info("Download Torrent OK, which id: {id}.".format(id=tid))
     if kwargs.setdefault("thanks", self._AUTO_THANK):
         self.torrent_thank(tid)
     return added_torrent.id
예제 #7
0
 def torrent_download(self, tid, **kwargs):
     added_torrent = tc.add_torrent(torrent=self.torrent_link(tid))
     # Another way is download torrent file to watch-dir(see early commits), But it will no return added_torrent.id
     Logger.info("Download Torrent OK, which id: {id}.".format(id=tid))
     if kwargs.setdefault("thanks", self._AUTO_THANK):
         self.torrent_thank(tid)
     return added_torrent.id
예제 #8
0
 def exist_torrent_title(self, tag):
     torrent_page = self.page_torrent_detail(tid=tag, bs=True)
     torrent_title = re.search("\[OurBits\]\.(?P<name>.+?)\.torrent",
                               torrent_page.text).group("name")
     Logger.info("The torrent name for id({id}) is \"{name}\"".format(
         id=tag, name=torrent_title))
     return torrent_title
예제 #9
0
    def update_torrent_info_from_rpc_to_db(self, last_id_db=None, force_clean_check=False):
        """
        Sync torrent's id from transmission to database,
        List Start on last check id,and will return the max id as the last check id.
        """
        torrent_list = tc.get_torrents()  # Cache the torrent list
        new_torrent_list = [t for t in torrent_list if t.id > self.last_id_check]
        if new_torrent_list:
            last_id_now = max([t.id for t in new_torrent_list])
            if last_id_db is None:
                last_id_db = db.get_max_in_seed_list(column_list=db.col_seed_list[2:])
            Logger.debug("Max tid, transmission: {tr}, database: {db}".format(tr=last_id_now, db=last_id_db))

            if not force_clean_check:  # Normal Update
                Logger.info("Some new torrents were add to transmission, Sync to db~")
                for i in new_torrent_list:  # Upsert the new torrent
                    db.upsert_seed_list(self._get_torrent_info(i))
                self.last_id_check = last_id_now

            elif last_id_now != last_id_db:  # Check the torrent 's record between tr and db
                total_num_in_tr = len(set([t.name for t in torrent_list]))
                total_num_in_db = db.exec(sql="SELECT COUNT(*) FROM `seed_list`")[0]
                if int(total_num_in_tr) >= int(total_num_in_db):
                    db.cache_torrent_list()
                    Logger.info("Upsert the whole torrent id to database.")
                    for t in torrent_list:  # Upsert the whole torrent
                        db.upsert_seed_list(self._get_torrent_info(t))
                else:
                    Logger.error(
                        "The torrent list didn't match with db-records, Clean the whole \"seed_list\" for safety.")
                    db.exec(sql="DELETE FROM `seed_list` WHERE 1")  # Delete all line from seed_list
                    self.update_torrent_info_from_rpc_to_db(last_id_db=0)
        else:
            Logger.debug("No new torrent(s), Return with nothing to do.")
        return self.last_id_check
예제 #10
0
 def reseeders_update(self):
     """
     Get the pre-reseed list from database.
     And sent those un-reseed torrents to each reseeder depend on it's download status.
     """
     pre_reseeder_list = self.get_online_reseeders()
     pre_cond = " OR ".join(["`{}`=0".format(i.db_column) for i in pre_reseeder_list])
     result = db.exec("SELECT * FROM `seed_list` WHERE `download_id` != 0 AND ({})".format(pre_cond),
                      r_dict=True, fetch_all=True)
     for t in result:  # Traversal all un-reseed list
         try:
             dl_torrent = tc.get_torrent(t["download_id"])
         except KeyError:  # Un-exist pre-reseed torrent
             Logger.error("The pre-reseed Torrent: \"{0}\" isn't found in result, "
                          "It's db-record will be deleted soon.".format(t["title"]))
             self._del_torrent_with_db(rid=t["id"])
             if t["id"] in self.downloading_torrent_id_queue:
                 self.downloading_torrent_id_queue.remove(t["id"])
         else:
             tname = dl_torrent.name
             if int(dl_torrent.progress) is 100:  # Get the download progress in percent.
                 Logger.info("New completed torrent: \"{name}\" , Judge reseed or not.".format(name=tname))
                 for reseeder in pre_reseeder_list:
                     Thread(target=reseeder.torrent_feed, args=(dl_torrent,),
                            name="Thread-{}".format(reseeder.model_name()), daemon=True).start()
                     # reseeder.torrent_feed(torrent=dl_torrent)
                 if dl_torrent.id in self.downloading_torrent_id_queue:
                     self.downloading_torrent_id_queue.remove(dl_torrent.id)
             elif dl_torrent.id in self.downloading_torrent_id_queue:
                 pass  # Wait until this torrent download completely.
             else:
                 Logger.warning("Torrent:\"{name}\" is still downloading, Wait......".format(name=tname))
                 self.downloading_torrent_id_queue.append(dl_torrent.id)
예제 #11
0
 def exist_torrent_title(self, tag):
     torrent_file_page = self.page_torrent_info(tid=tag, bs=True)
     torrent_file_info_table = torrent_file_page.find("ul", id="colapse")
     torrent_title = re.search("\\[name\] \(\d+\): (?P<name>.+?) -",
                               torrent_file_info_table.text).group("name")
     Logger.info("The torrent name for id({id}) is \"{name}\"".format(
         id=tag, name=torrent_title))
     return torrent_title
예제 #12
0
    def torrent_reseed(self, torrent):
        name_pattern = self._get_torrent_ptn(torrent)
        if name_pattern:
            key_raw = re.sub(r"[_\-.']", " ", name_pattern.group("search_name"))
            key_with_gp = "{gr} {search_key}".format(search_key=key_raw, gr=name_pattern.group("group"))
            key_with_gp_ep = "{ep} {gp_key}".format(gp_key=key_with_gp, ep=name_pattern.group("episode"))
        else:
            raise NoMatchPatternError("No match pattern. Will Mark \"{}\" As Un-reseed torrent.".format(torrent.name))

        search_tag = self.exist_judge(key_with_gp_ep, torrent.name)
        if search_tag == 0 and not self._ASSIST_ONLY:
            # Non-existent repetition torrent (by local judge plugins), prepare to reseed
            torrent_raw_info_dict = None

            try:
                if self._GET_CLONE_ID_FROM_DB:
                    clone_id = db.get_data_clone_id(key=key_raw, site=self.db_column)
                    if clone_id in [None, 0]:
                        raise KeyError("The db-record is not return the correct clone id.")
                    elif clone_id is not -1:  # Set to no re-seed for this site in database.
                        torrent_raw_info_dict = self.torrent_clone(clone_id)
                        if not torrent_raw_info_dict:
                            raise ValueError("The clone torrent for tid in db-record is not exist.")
                            Logger.debug("Get clone torrent info from \"DataBase\" OK, Which id: {}".format(clone_id))
                else:
                    raise KeyError("Set not get clone torrent id from \"Database.\"")
            except (KeyError, ValueError) as e:
                Logger.warning("{}, Try to search the clone info from site, it may not correct".format(e.args[0]))
                clone_id = self._DEFAULT_CLONE_TORRENT if self._DEFAULT_CLONE_TORRENT else 0  # USE Default clone id
                for key in [key_with_gp, key_raw]:  # USE The same group to search firstly and Then non-group tag
                    search_id = self.first_tid_in_search_list(key=key)
                    if search_id is not 0:
                        clone_id = search_id  # The search result will cover the default setting.
                        break

                if clone_id is not 0:
                    torrent_raw_info_dict = self.torrent_clone(clone_id)
                    Logger.info("Get clone torrent info from \"Reseed-Site\" OK, Which id: {cid}".format(cid=clone_id))

            if torrent_raw_info_dict:
                if self._ALLOW_CAT:
                    pre_reseed_cat = torrent_raw_info_dict.get("type")
                    if int(pre_reseed_cat) not in self._ALLOW_CAT:
                        raise NoCloneTorrentError("The clone torrent's category is not allowed.")

                Logger.info("Begin post The torrent {0},which name: {1}".format(torrent.id, torrent.name))
                new_dict = self.date_raw_update(torrent_name_search=name_pattern, raw_info=torrent_raw_info_dict)
                multipart_data = self.data_raw2tuple(torrent, raw_info=new_dict)
                flag = self.torrent_upload(torrent=torrent, data=multipart_data)
            else:
                raise NoCloneTorrentError("Can't find any clone torrent to used.".format(self.name))
        elif search_tag == -1:  # IF the torrents are present, but not consistent (When FORCE_JUDGE_DUPE_LOC is True)
            raise CannotAssistError("Find dupe, and the exist torrent is not same as pre-reseed torrent. Stop Posting~")
        else:  # IF the torrent is already released and can be assist
            Logger.warning("Find dupe torrent,which id: {0}, Automatically assist it~".format(search_tag))
            flag = self.torrent_download(tid=search_tag, thanks=False)

        return flag
예제 #13
0
 def exist_torrent_title(self, tag):
     torrent_file_page = self.page_torrent_info(tid=tag, bs=True)
     if re.search("你没有该权限!", torrent_file_page.text):
         torrent_page = self.page_torrent_detail(tid=tag, bs=True)
         torrent_title = re.search("\[TJUPT\]\.(?P<name>.+?)\.torrent", torrent_page.text).group("name")
     else:  # Due to HIGH Authority (Ultimate User) asked to view this page.
         torrent_file_info_table = torrent_file_page.find("ul", id="colapse")
         torrent_title = re.search("\\[name\] \(\d+\): (?P<name>.+?) -", torrent_file_info_table.text).group("name")
     Logger.info("The torrent name for id({id}) is \"{name}\"".format(id=tag, name=torrent_title))
     return torrent_title
예제 #14
0
파일: tjupt.py 프로젝트: ch0317/Pt-Autoseed
 def exist_torrent_title(self, tag):
     torrent_file_page = self.page_torrent_info(tid=tag, bs=True)
     if re.search("你没有该权限!", torrent_file_page.text):
         torrent_page = self.page_torrent_detail(tid=tag, bs=True)
         torrent_title = re.search("\[TJUPT\]\.(?P<name>.+?)\.torrent", torrent_page.text).group("name")
     else:  # Due to HIGH Authority (Ultimate User) asked to view this page.
         torrent_file_info_table = torrent_file_page.find("ul", id="colapse")
         torrent_title = re.search("\\[name\] \(\d+\): (?P<name>.+?) -", torrent_file_info_table.text).group("name")
     Logger.info("The torrent name for id({id}) is \"{name}\"".format(id=tag, name=torrent_title))
     return torrent_title
예제 #15
0
    def torrent_clone(self, tid):
        """
        Use Internal API: - /upsimilartorrent.php?id={tid} ,Request Method: GET
                          - /catdetail_edittorrents.php?torid={id} ,Request Method: GET
        Will response two pages about this clone torrent's information,
        And this function will sort those pages to a pre-reseed dict.
        """
        res_dic = {}

        page_clone = self.get_data(url=self.url_host + "/upsimilartorrent.php",
                                   params={"id": tid},
                                   bs=True)

        if not re.search(r"<h2>错误!</h2>", str(page_clone)):
            Logger.info("Got clone torrent's info,id: {tid}".format(tid=tid))

            type_select = page_clone.find("select", id="oricat")
            type_value = type_select.find("option",
                                          selected="selected")["value"]
            raw_descr = ubb_clean(page_clone.find("textarea", id="descr").text)
            url = page_clone.find("input", attrs={"name": "url"})
            res_dic.update({
                "clone_id": tid,
                "type": type_value,
                "descr": raw_descr,
                "url": url["value"]
            })

            for name in ["source_sel", "team_sel"]:
                tag = page_clone.find("select", attrs={"name": name})
                tag_selected = tag.find("option", selected=True)
                res_dic.update({name: tag_selected["value"]})

            # Get torrent_info page and sort this page's information into the pre-reseed dict.
            catdetail_page = self.get_data(url=self.url_host +
                                           "/catdetail_edittorrents.php",
                                           params={"torid": tid},
                                           bs=True)

            for ask_tag_name in ask_dict[type_value]:
                value = ""
                if catdetail_page.find("input", attrs={"name": ask_tag_name}):
                    tag = catdetail_page.find("input",
                                              attrs={"name": ask_tag_name})
                    value = tag["value"]
                elif catdetail_page.find("select",
                                         attrs={"name": ask_tag_name}):
                    tag = catdetail_page.find("select",
                                              attrs={"name": ask_tag_name})
                    tag_selected = tag.find("option", selected=True)
                    if tag_selected:
                        value = tag_selected["value"]
                res_dic.update({ask_tag_name: value})

        return res_dic
예제 #16
0
    def _del_torrent_with_db(self, rid=None):
        """Delete torrent(both download and reseed) with data from transmission and database"""
        Logger.debug("Begin torrent's status check. If reach condition you set, You will get a warning.")

        if rid:
            sql = "SELECT * FROM `seed_list` WHERE `id`={}".format(rid)
        else:
            sql = "SELECT * FROM `seed_list`"

        time_now = time.time()
        for cow in db.exec(sql=sql, r_dict=True, fetch_all=True):
            sid = cow.pop("id")
            s_title = cow.pop("title")

            err = 0
            reseed_list = []
            torrent_id_list = [tid for tracker, tid in cow.items() if tid > 0]
            for tid in torrent_id_list:
                try:  # Ensure torrent exist
                    reseed_list.append(tc.get_torrent(torrent_id=tid))
                except KeyError:  # Mark err when the torrent is not exist.
                    err += 1

            delete = False
            if rid:
                delete = True
                Logger.warning("Force Delete. Which name: {}, Affect torrents: {}".format(s_title, torrent_id_list))
            elif err is 0:  # It means all torrents in this cow are exist,then check these torrent's status.
                reseed_stop_list = []
                for t in reseed_list:
                    if int(time_now - t.addedDate) > TIME_TORRENT_KEEP_MIN:  # At least seed time
                        if t.status == "stopped":  # Mark the stopped torrent
                            reseed_stop_list.append(t)
                        elif setting.pre_delete_judge(torrent=t):
                            _tid, _tname, _tracker = self._get_torrent_info(t)
                            tc.stop_torrent(t.id)
                            Logger.warning(
                                "Reach Target you set, Torrent({tid}) \"{name}\" in Tracker \"{tracker}\" now stop, "
                                "With Uploaded {si:.2f} MiB, Ratio {ro:.2f} , Keep time {ho:.2f} h."
                                "".format(tid=_tid, name=_tname, tracker=_tracker, si=t.uploadedEver / 1024 / 1024,
                                          ro=t.uploadRatio, ho=(time.time() - t.startDate) / 60 / 60)
                            )
                if len(reseed_list) == len(reseed_stop_list):
                    delete = True
                    Logger.info("All torrents of \"{0}\" reach target, Will DELETE them soon.".format(s_title))
            else:
                delete = True
                Logger.error("Some Torrents (\"{name}\", {er} of {co}) may not found, "
                             "Delete all it's records from db".format(name=s_title, er=err, co=len(torrent_id_list)))

            if delete:  # Delete torrents with it's data and db-records
                for tid in torrent_id_list:
                    tc.remove_torrent(tid, delete_data=True)
                db.exec(sql="DELETE FROM `seed_list` WHERE `id` = {0}".format(sid))
예제 #17
0
 def torrent_upload(self, torrent, data: tuple or list):
     upload_url = self.url_host + "/takeupload.php"
     file_tuple = self._post_torrent_file_tuple(torrent)
     post = self.post_data(url=upload_url, files={"file": file_tuple}, data=data)
     if post.url != upload_url:  # Check reseed status
         seed_torrent_download_id = re.search("id=(\d+)", post.url).group(1)  # Read the torrent's id in reseed site
         flag = self.torrent_download(tid=seed_torrent_download_id)
         Logger.info("Reseed post OK, The torrent's in transmission: {fl}".format(fl=flag))
     else:  # Log if not reseed successfully
         outer_message = self.torrent_upload_err_message(post_text=post.text)
         raise ConnectionError("Upload this torrent Error, The Server echo:\"{0}\".".format(outer_message))
     return flag
예제 #18
0
    def update_cookies(self):
        username = self.config.get('username')
        password = self.config.get('password')

        s = requests.Session()
        s.cookies.update(self.cookies)
        r = s.post(self.url_host + '/takelogin.php', data={
            'username': username,
            'password': password,
            'trackerssl': 'yes'
        })
        if r.url.find('/index.php') > -1:
            Logger.info('Update Cookies Successful.')
            new_cookies = s.cookies['ourbits_jwt']
            self.cookies = {'ourbits_jwt': new_cookies}
            self.status = True
예제 #19
0
파일: byrbt.py 프로젝트: ch0317/Pt-Autoseed
    def torrent_clone(self, tid) -> dict:
        """
        Reconstruction from BYRBT Info Clone by Deparsoul version 20170400,thx
        This function will return a dict include (split_title,small_title,imdb_url,db_url,descr,before_torrent_id).
        """
        return_dict = {}
        details_bs = self.page_torrent_detail(tid=tid, bs=True)
        title_search = re.search("种子详情 \"(?P<title>.*)\" - Powered", str(details_bs.title))
        if title_search:
            title = unescape(title_search.group("title"))
            Logger.info("Get clone torrent's info,id: {tid},title: \"{ti}\"".format(tid=tid, ti=title))
            title_dict = sort_title_info(raw_title=title, raw_type=details_bs.find("span", id="type").text.strip(),
                                         raw_sec_type=details_bs.find("span", id="sec_type").text.strip())
            return_dict.update(title_dict)
            body = details_bs.body
            imdb_url = dburl = ""
            if body.find(class_="imdbRatingPlugin"):
                imdb_url = 'http://www.imdb.com/title/' + body.find(class_="imdbRatingPlugin")["data-title"]
                Logger.debug("Found imdb link:{link} for this torrent.".format(link=imdb_url))
            if body.find("a", href=re.compile("://movie.douban.com/subject")):
                dburl = body.find("a", href=re.compile("://movie.douban.com/subject")).text
                Logger.debug("Found douban link:{link} for this torrent.".format(link=dburl))
            # Update description
            descr = body.find(id="kdescr")

            # Restore the image link
            for img_tag in descr.find_all("img"):
                del img_tag["onload"]
                del img_tag["data-pagespeed-url-hash"]
                img_tag["src"] = unquote(re.sub(r"images/(?:(?:\d+x)+|x)(?P<raw>.*)\.pagespeed\.ic.*",
                                                "images/\g<raw>", img_tag["src"]))

            # Remove unnecessary description (class: autoseed, byrbt_info_clone_ignore, byrbt_info_clone)
            for tag in descr.find_all(class_=pat_tag_pass_by_class):
                tag.extract()

            descr_out = re.search(r"<div id=\"kdescr\">(?P<in>.+)</div>$", str(descr), re.S).group("in")
            return_dict.update({
                "small_descr": body.find(id="subtitle").find("li").text,
                "url": imdb_url,
                "dburl": dburl,
                "descr": descr_out,
                "clone_id": tid
            })
        else:
            Logger.error("Error,this torrent may not exist or ConnectError")
        return return_dict
예제 #20
0
    def torrent_clone(self, tid) -> dict:
        """
        Use Internal API: http://pt.nwsuaf6.edu.cn/citetorrent.php?torrent_id={tid} ,Request Method: GET
        Will response a json dict.
        """
        res_dic = {}
        try:
            res_dic = self.get_data(url=self.url_host + "/citetorrent.php", params={"torrent_id": tid}, json=True)
        except ValueError:
            Logger.error("Error,this torrent may not exist or ConnectError")
        else:
            res_dic["clone_id"] = tid
            res_dic["descr"] = ubb_clean(res_dic["descr"])
            res_dic["type"] = res_dic["category"]

            Logger.info("Get clone torrent's info,id: {tid},title:\"{ti}\"".format(tid=tid, ti=res_dic["name"]))
        return res_dic
예제 #21
0
    def torrent_clone(self, tid) -> dict:
        """
        Use Internal API: https://npupt.com/transfer.php?url={url} ,Request Method: GET
        The url use base64 encryption, and will response a json dict.
        """
        res_dic = {}
        transferred_url = string2base64("{host}/details.php?id={tid}&hit=1".format(host=self.url_host, tid=tid))
        try:
            res_dic = self.get_data(url=self.url_host + "/transfer.php", params={"url": transferred_url}, json=True)
        except ValueError:
            Logger.error("Error,this torrent may not exist or ConnectError")
        else:
            res_dic.update({"transferred_url": transferred_url, "clone_id": tid})
            res_dic["descr"] = ubb_clean(res_dic["descr"])

            Logger.info("Get clone torrent's info,id: {tid},title:\"{ti}\"".format(tid=tid, ti=res_dic["name"]))
        return res_dic
예제 #22
0
    def torrent_clone(self, tid) -> dict:
        """
        Use Internal API: http://pt.nwsuaf6.edu.cn/citetorrent.php?torrent_id={tid} ,Request Method: GET
        Will response a json dict.
        """
        res_dic = {}
        try:
            res_dic = self.get_data(url=self.url_host + "/citetorrent.php", params={"torrent_id": tid}, json=True)
        except ValueError:
            Logger.error("Error,this torrent may not exist or ConnectError")
        else:
            res_dic["clone_id"] = tid
            res_dic["descr"] = ubb_clean(res_dic["descr"])
            res_dic["type"] = res_dic["category"]

            Logger.info("Get clone torrent's info,id: {tid},title:\"{ti}\"".format(tid=tid, ti=res_dic["name"]))
        return res_dic
예제 #23
0
    def run(self):
        # Sync status between transmission, database, controller and reseeder modules
        self.update_torrent_info_from_rpc_to_db(force_clean_check=True)
        self.reseeders_update()

        # Start background thread
        thread_args = [
            (self._online_check, setting.CYCLE_CHECK_RESEEDER_ONLINE),
            (self._del_torrent_with_db, setting.CYCLE_DEL_TORRENT_CHECK)
        ]
        for args in thread_args:
            Thread(target=period_f, args=args, daemon=True).start()

        Logger.info("Check period Starting~")
        while True:
            self.update_torrent_info_from_rpc_to_db()  # Read the new torrent's info and sync it to database
            self.reseeders_update()  # Feed those new and not reseed torrent to active reseeder
            time.sleep(setting.SLEEP_TIME)
예제 #24
0
    def session_check(self):
        page_usercp_bs = self.get_data(url=self.url_host + "/usercp.php",
                                       bs=True)
        self.status = True if page_usercp_bs.find(id="info_block") else False
        if not self.status and self._AUTO_RENEW_COOKIES:
            Logger.info(
                'Update your cookies by login method in Site: {}'.format(
                    self.name))
            self.update_cookies()

        if self.status:
            Logger.debug("Through authentication in Site: {}".format(
                self.name))
        else:
            Logger.error(
                "Can not verify identity. Please Check your Cookies".format(
                    mo=self.name))

        return self.status
예제 #25
0
파일: tjupt.py 프로젝트: ch0317/Pt-Autoseed
    def torrent_clone(self, tid):
        """
        Use Internal API: - /upsimilartorrent.php?id={tid} ,Request Method: GET
                          - /catdetail_edittorrents.php?torid={id} ,Request Method: GET
        Will response two pages about this clone torrent's information,
        And this function will sort those pages to a pre-reseed dict.
        """
        res_dic = {}

        page_clone = self.get_data(url=self.url_host + "/upsimilartorrent.php", params={"id": tid}, bs=True)

        if not re.search(r"<h2>错误!</h2>", str(page_clone)):
            Logger.info("Got clone torrent's info,id: {tid}".format(tid=tid))

            type_select = page_clone.find("select", id="oricat")
            type_value = type_select.find("option", selected="selected")["value"]
            raw_descr = ubb_clean(page_clone.find("textarea", id="descr").text)
            url = page_clone.find("input", attrs={"name": "url"})
            res_dic.update({"clone_id": tid, "type": type_value, "descr": raw_descr, "url": url["value"]})

            for name in ["source_sel", "team_sel"]:
                tag = page_clone.find("select", attrs={"name": name})
                tag_selected = tag.find("option", selected=True)
                res_dic.update({name: tag_selected["value"]})

            # Get torrent_info page and sort this page's information into the pre-reseed dict.
            catdetail_page = self.get_data(url=self.url_host + "/catdetail_edittorrents.php", params={"torid": tid},
                                           bs=True)

            for ask_tag_name in ask_dict[type_value]:
                value = ""
                if catdetail_page.find("input", attrs={"name": ask_tag_name}):
                    tag = catdetail_page.find("input", attrs={"name": ask_tag_name})
                    value = tag["value"]
                elif catdetail_page.find("select", attrs={"name": ask_tag_name}):
                    tag = catdetail_page.find("select", attrs={"name": ask_tag_name})
                    tag_selected = tag.find("option", selected=True)
                    if tag_selected:
                        value = tag_selected["value"]
                res_dic.update({ask_tag_name: value})

        return res_dic
예제 #26
0
 def torrent_upload(self, torrent, data: tuple or list):
     upload_url = self.url_host + "/takeupload.php"
     file_tuple = self._post_torrent_file_tuple(torrent)
     post = self.post_data(url=upload_url,
                           files={"file": file_tuple},
                           data=data)
     if post.url != upload_url:  # Check reseed status
         seed_torrent_download_id = re.search("id=(\d+)", post.url).group(
             1)  # Read the torrent's id in reseed site
         flag = self.torrent_download(tid=seed_torrent_download_id)
         Logger.info(
             "Reseed post OK, The torrent's in transmission: {fl}".format(
                 fl=flag))
     else:  # Log if not reseed successfully
         outer_message = self.torrent_upload_err_message(
             post_text=post.text)
         raise ConnectionError(
             "Upload this torrent Error, The Server echo:\"{0}\".".format(
                 outer_message))
     return flag
예제 #27
0
def main():
    rootLogger.info(
        "Autoseed start~,will check database record at the First time.")
    i = 0
    while True:
        controller.update_torrent_info_from_rpc_to_db()  # 更新表
        controller.reseeders_update()  # reseed判断主函数

        sleep_time = setting.sleep_free_time
        if setting.busy_start_hour <= int(time.strftime(
                "%H", time.localtime())) < setting.busy_end_hour:
            sleep_time = setting.sleep_busy_time

        rootLogger.debug("Check time {ti} OK, Reach check id {cid},"
                         " Will Sleep for {slt} seconds.".format(
                             ti=i,
                             cid=controller.last_id_check,
                             slt=sleep_time))

        i += 1
        time.sleep(sleep_time)
예제 #28
0
파일: site.py 프로젝트: ch0317/Pt-Autoseed
    def online_check(self) -> bool:
        """
        Check function to get the site status (online or not)

        :return: bool , True if online
        """
        try:
            # requests.head() is a little Quicker than requests.get(),( Because only ask head without body)
            #                    but Slower than socket.create_connection(address[, timeout[, source_address]])
            requests.head(self.url_host, timeout=REQUESTS_TIMEOUT)
        except OSError:  # requests.exceptions.RequestException
            if self.suspended == 0:
                Logger.warning("Site: {si} is Offline now.".format(si=self.url_host))
            self.suspended += 1
        else:
            if self.suspended != 0:
                Logger.info("The Site: {si} is Online now,after {count} times tries."
                            "Will check the session soon.".format(si=self.url_host, count=self.suspended))
                self.suspended = 0  # Set self.suspended as 0 first, then session_check()
                self.session_check()
        return True if self.suspended == 0 else False
예제 #29
0
    def online_check(self) -> bool:
        """
        Check function to get the site status (online or not)

        :return: bool , True if online
        """
        try:
            # requests.head() is a little Quicker than requests.get(),( Because only ask head without body)
            #                    but Slower than socket.create_connection(address[, timeout[, source_address]])
            requests.head(self.url_host, timeout=REQUESTS_TIMEOUT)
        except OSError:  # requests.exceptions.RequestException
            if self.suspended == 0:
                Logger.warning("Site: {si} is Offline now.".format(si=self.url_host))
            self.suspended += 1
        else:
            if self.suspended != 0:
                Logger.info("The Site: {si} is Online now,after {count} times tries."
                            "Will check the session soon.".format(si=self.url_host, count=self.suspended))
                self.suspended = 0  # Set self.suspended as 0 first, then session_check()
                self.session_check()
        return True if self.suspended == 0 else False
예제 #30
0
    def _active(self):
        """
        Active the reseeder objects and append it to self.active_reseeder_list.
        Each object will follow those step(s):
            1. Check if config is exist in user setting
            2. Import the package and Instantiation The object if set status as `True` is site config
            3. If The reseeder active successfully (after session check), Append this reseeder to List

        :return: None
        """
        # 1. Active All used reseeder.
        Logger.info("Start to Active all the reseeder objects.")

        for config_name, package_name, class_name in Support_Site:
            if hasattr(setting, config_name):
                config = getattr(setting, config_name)
                if config.setdefault("status", False):
                    package = importlib.import_module(package_name)
                    autoseed_prototype = getattr(package, class_name)(**config)
                    if autoseed_prototype.status:
                        self.active_obj_list.append(autoseed_prototype)

        Logger.info("The assign reseeder objects: {lis}".format(lis=self.active_obj_list))

        # 2. Turn off those unactive reseeder, for database safety.
        unactive_tracker_list = [i for i in db.col_seed_list[3:]
                                 if i not in [i.db_column for i in self.active_obj_list]]

        def _shut_unreseeder_db():
            Logger.debug("Set un-reseeder's column into -1.")
            for tracker in unactive_tracker_list:  # Set un_reseed column into -1
                db.exec(sql="UPDATE `seed_list` SET `{cow}` = -1 WHERE `{cow}` = 0 ".format(cow=tracker))

        Thread(target=period_f, args=(_shut_unreseeder_db, 43200), daemon=True).start()
        Logger.info("Initialization settings Success~")
예제 #31
0
파일: site.py 프로젝트: tussyzb/Pt-Autoseed
    def torrent_feed(self, torrent):
        torrent = self._get_torrent(torrent)
        reseed_tag, = db.exec(
            "SELECT `{}` FROM `seed_list` WHERE `download_id` = {}".format(self.db_column, torrent.id)
        )

        if reseed_tag in [None, 0, "0"] and reseed_tag not in [-1, "-1"]:
            # It means that the pre-reseed torrent in this site is not reseed before,
            # And this torrent not marked as an un-reseed torrent.
            self._assist_delay()
            Logger.info("Autoseed-{mo} Get A feed torrent: {na}".format(mo=self.name, na=torrent.name))

            reseed_tag = -1
            try:
                reseed_tag = self.torrent_reseed(torrent)
            except Exception as e:  # TODO 针对不同的Error情况做不同的更新(e.g. 因为网络问题则置0,其他情况置1)
                err_name = type(e).__name__
                Logger.error(
                    "Reseed not success in Site: {} for torrent: {}, "
                    "With Exception: {}, {}".format(self.name, torrent.name, err_name, e)
                )
            finally:
                db.upsert_seed_list((reseed_tag, torrent.name, self.db_column))
예제 #32
0
파일: site.py 프로젝트: ch0317/Pt-Autoseed
    def torrent_feed(self, torrent):
        torrent = self._get_torrent(torrent)
        reseed_tag, = db.exec(
            "SELECT `{}` FROM `seed_list` WHERE `download_id` = {}".format(self.db_column, torrent.id)
        )

        if reseed_tag in [None, 0, "0"] and reseed_tag not in [-1, "-1"]:
            # It means that the pre-reseed torrent in this site is not reseed before,
            # And this torrent not marked as an un-reseed torrent.
            self._assist_delay()
            Logger.info("Autoseed-{mo} Get A feed torrent: {na}".format(mo=self.name, na=torrent.name))

            reseed_tag = -1
            try:
                reseed_tag = self.torrent_reseed(torrent)
            except Exception as e:  # TODO 针对不同的Error情况做不同的更新(e.g. 因为网络问题则置0,其他情况置1)
                err_name = type(e).__name__
                Logger.error(
                    "Reseed not success in Site: {} for torrent: {}, "
                    "With Exception: {}, {}".format(self.name, torrent.name, err_name, e)
                )
            finally:
                db.upsert_seed_list((reseed_tag, torrent.name, self.db_column))
예제 #33
0
# !/usr/bin/python3
# -*- coding: utf-8 -*-
# Copyright (c) 2017-2020 Rhilip <*****@*****.**>
# Licensed under the GNU General Public License v3.0

import time

from utils.controller import Controller
from utils.load.config import setting
from utils.load.handler import rootLogger

controller = Controller()  # Connect
rootLogger.info("Initialization settings Success~")
# -*- End of Loading Model -*-


def main():
    rootLogger.info(
        "Autoseed start~,will check database record at the First time.")
    i = 0
    while True:
        controller.update_torrent_info_from_rpc_to_db()  # 更新表
        controller.reseeders_update()  # reseed判断主函数

        sleep_time = setting.sleep_free_time
        if setting.busy_start_hour <= int(time.strftime(
                "%H", time.localtime())) < setting.busy_end_hour:
            sleep_time = setting.sleep_busy_time

        rootLogger.debug("Check time {ti} OK, Reach check id {cid},"
                         " Will Sleep for {slt} seconds.".format(
예제 #34
0
 def exist_torrent_title(self, tag):
     torrent_file_page = self.page_torrent_info(tid=tag, bs=True)
     torrent_file_info_table = torrent_file_page.find("ul", id="colapse")
     torrent_title = re.search("\\[name\] \(\d+\): (?P<name>.+?) -", torrent_file_info_table.text).group("name")
     Logger.info("The torrent name for id({id}) is \"{name}\"".format(id=tag, name=torrent_title))
     return torrent_title
예제 #35
0
    def torrent_reseed(self, torrent):
        name_pattern = self._get_torrent_ptn(torrent)
        if name_pattern:
            key_raw = re.sub(r"[_\-.']", " ",
                             name_pattern.group("search_name"))
            key_with_gp = "{gr} {search_key}".format(
                search_key=key_raw, gr=name_pattern.group("group"))
            key_with_gp_ep = "{ep} {gp_key}".format(
                gp_key=key_with_gp, ep=name_pattern.group("episode"))
        else:
            raise NoMatchPatternError(
                "No match pattern. Will Mark \"{}\" As Un-reseed torrent.".
                format(torrent.name))

        search_tag = self.exist_judge(key_with_gp_ep, torrent.name)
        if search_tag == 0 and not self._ASSIST_ONLY:
            # Non-existent repetition torrent (by local judge plugins), prepare to reseed
            torrent_raw_info_dict = None

            try:
                if self._GET_CLONE_ID_FROM_DB:
                    clone_id = db.get_data_clone_id(key=key_raw,
                                                    site=self.db_column)
                    if clone_id in [None, 0]:
                        raise KeyError(
                            "The db-record is not return the correct clone id."
                        )
                    elif clone_id is not -1:  # Set to no re-seed for this site in database.
                        torrent_raw_info_dict = self.torrent_clone(clone_id)
                        if not torrent_raw_info_dict:
                            raise ValueError(
                                "The clone torrent for tid in db-record is not exist."
                            )
                            Logger.debug(
                                "Get clone torrent info from \"DataBase\" OK, Which id: {}"
                                .format(clone_id))
                else:
                    raise KeyError(
                        "Set not get clone torrent id from \"Database.\"")
            except (KeyError, ValueError) as e:
                Logger.warning(
                    "{}, Try to search the clone info from site, it may not correct"
                    .format(e.args[0]))
                clone_id = self._DEFAULT_CLONE_TORRENT if self._DEFAULT_CLONE_TORRENT else 0  # USE Default clone id
                for key in [
                        key_with_gp, key_raw
                ]:  # USE The same group to search firstly and Then non-group tag
                    search_id = self.first_tid_in_search_list(key=key)
                    if search_id is not 0:
                        clone_id = search_id  # The search result will cover the default setting.
                        break

                if clone_id is not 0:
                    torrent_raw_info_dict = self.torrent_clone(clone_id)
                    Logger.info(
                        "Get clone torrent info from \"Reseed-Site\" OK, Which id: {cid}"
                        .format(cid=clone_id))

            if torrent_raw_info_dict:
                if self._ALLOW_CAT:
                    pre_reseed_cat = torrent_raw_info_dict.get("type")
                    if int(pre_reseed_cat) not in self._ALLOW_CAT:
                        raise NoCloneTorrentError(
                            "The clone torrent's category is not allowed.")

                Logger.info(
                    "Begin post The torrent {0},which name: {1}".format(
                        torrent.id, torrent.name))
                new_dict = self.date_raw_update(
                    torrent_name_search=name_pattern,
                    raw_info=torrent_raw_info_dict)
                multipart_data = self.data_raw2tuple(torrent,
                                                     raw_info=new_dict)
                flag = self.torrent_upload(torrent=torrent,
                                           data=multipart_data)
            else:
                raise NoCloneTorrentError(
                    "Can't find any clone torrent to used.".format(self.name))
        elif search_tag == -1:  # IF the torrents are present, but not consistent (When FORCE_JUDGE_DUPE_LOC is True)
            raise CannotAssistError(
                "Find dupe, and the exist torrent is not same as pre-reseed torrent. Stop Posting~"
            )
        else:  # IF the torrent is already released and can be assist
            Logger.warning(
                "Find dupe torrent,which id: {0}, Automatically assist it~".
                format(search_tag))
            flag = self.torrent_download(tid=search_tag, thanks=False)

        return flag
예제 #36
0
파일: site.py 프로젝트: ch0317/Pt-Autoseed
 def _assist_delay(self):
     if self._ASSIST_ONLY:
         Logger.info("Autoseed-{mo} only allowed to assist."
                     "it will sleep {sl} Seconds to wait the reseed site "
                     "to have this torrent".format(mo=self.name, sl=self._ASSIST_DELAY_TIME))
         time.sleep(self._ASSIST_DELAY_TIME)
예제 #37
0
    def torrent_clone(self, tid) -> dict:
        """
        Reconstruction from BYRBT Info Clone by Deparsoul version 20170400,thx
        This function will return a dict include (split_title,small_title,imdb_url,db_url,descr,before_torrent_id).
        """
        return_dict = {}
        details_bs = self.page_torrent_detail(tid=tid, bs=True)
        title_search = re.search("种子详情 \"(?P<title>.*)\" - Powered",
                                 str(details_bs.title))
        if title_search:
            title = unescape(title_search.group("title"))
            Logger.info(
                "Get clone torrent's info,id: {tid},title: \"{ti}\"".format(
                    tid=tid, ti=title))
            title_dict = sort_title_info(
                raw_title=title,
                raw_type=details_bs.find("span", id="type").text.strip(),
                raw_sec_type=details_bs.find("span",
                                             id="sec_type").text.strip())
            return_dict.update(title_dict)
            body = details_bs.body
            imdb_url = dburl = ""
            if body.find(class_="imdbRatingPlugin"):
                imdb_url = 'http://www.imdb.com/title/' + body.find(
                    class_="imdbRatingPlugin")["data-title"]
                Logger.debug("Found imdb link:{link} for this torrent.".format(
                    link=imdb_url))
            if body.find("a", href=re.compile("://movie.douban.com/subject")):
                dburl = body.find(
                    "a", href=re.compile("://movie.douban.com/subject")).text
                Logger.debug(
                    "Found douban link:{link} for this torrent.".format(
                        link=dburl))
            # Update description
            descr = body.find(id="kdescr")

            # Restore the image link
            for img_tag in descr.find_all("img"):
                del img_tag["onload"]
                del img_tag["data-pagespeed-url-hash"]
                img_tag["src"] = unquote(
                    re.sub(
                        r"images/(?:(?:\d+x)+|x)(?P<raw>.*)\.pagespeed\.ic.*",
                        "images/\g<raw>", img_tag["src"]))

            # Remove unnecessary description (class: autoseed, byrbt_info_clone_ignore, byrbt_info_clone)
            for tag in descr.find_all(class_=pat_tag_pass_by_class):
                tag.extract()

            descr_out = re.search(r"<div id=\"kdescr\">(?P<in>.+)</div>$",
                                  str(descr), re.S).group("in")
            return_dict.update({
                "small_descr":
                body.find(id="subtitle").find("li").text,
                "url":
                imdb_url,
                "dburl":
                dburl,
                "descr":
                descr_out,
                "clone_id":
                tid
            })
        else:
            Logger.error("Error,this torrent may not exist or ConnectError")
        return return_dict
예제 #38
0
    def _del_torrent_with_db(self):
        """Delete torrent(both download and reseed) with data from transmission and database"""
        Logger.debug(
            "Begin torrent's status check. If reach condition you set, You will get a warning."
        )

        time_now = time.time()
        t_all_list = tc.get_torrents()
        t_name_list = set(map(lambda x: x.name, t_all_list))

        for t_name in t_name_list:
            t_list = list(filter(lambda x: x.name == t_name, t_all_list))
            t_list_len = len(t_list)
            t_list_stop = 0
            for t in t_list:
                if t.status == "stopped":
                    t_list_stop += 1
                    continue

                _tid, _tname, _tracker = self._get_torrent_info(t)

                # 0 means OK, 1 means tracker warning, 2 means tracker error, 3 means local error.
                if t.error > 1:
                    tc.stop_torrent(t.id)
                    Logger.warning(
                        "Torrent Error, Torrent({tid}) \"{name}\" in Tracker \"{tracker}\" now stop, "
                        "Error code : {code} {msg}."
                        "With Uploaded {si:.2f} MiB, Ratio {ro:.2f} , Keep time {ho:.2f} h."
                        "".format(tid=_tid,
                                  name=_tname,
                                  tracker=_tracker,
                                  si=t.uploadedEver / 1024 / 1024,
                                  ro=t.uploadRatio,
                                  ho=(time.time() - t.startDate) / 60 / 60,
                                  code=t.error,
                                  msg=t.errorString))

                if int(time_now - t.addedDate
                       ) > TIME_TORRENT_KEEP_MIN:  # At least seed time
                    if setting.pre_delete_judge(torrent=t):
                        tc.stop_torrent(t.id)
                        Logger.warning(
                            "Reach Target you set, Torrent({tid}) \"{name}\" in Tracker \"{tracker}\" now stop, "
                            "With Uploaded {si:.2f} MiB, Ratio {ro:.2f} , Keep time {ho:.2f} h."
                            "".format(tid=_tid,
                                      name=_tname,
                                      tracker=_tracker,
                                      si=t.uploadedEver / 1024 / 1024,
                                      ro=t.uploadRatio,
                                      ho=(time.time() - t.startDate) / 60 /
                                      60))

            if t_list_stop == t_list_len:  # Delete torrents with it's data and db-records
                Logger.info(
                    "All torrents of \"{0}\" reach target, Will DELETE them soon."
                    .format(t_name))
                tid_list = map(lambda x: x.id, t_list)
                for tid in tid_list:
                    tc.remove_torrent(tid, delete_data=True)
                db.exec("DELETE FROM `seed_list` WHERE `title` = %s",
                        (t_name, ))