Пример #1
0
    def request(self, method, url, *args, **kwargs):
        parsed_url = urlparse(url)
        domain = parsed_url.netloc

        cache_key = "cf_data3_%s" % domain

        if not self.cookies.get("cf_clearance", "", domain=domain):
            cf_data = region.get(cache_key)
            if cf_data is not NO_VALUE:
                cf_cookies, hdrs = cf_data
                logger.debug("Trying to use old cf data for %s: %s", domain,
                             cf_data)
                for cookie, value in six.iteritems(cf_cookies):
                    self.cookies.set(cookie, value, domain=domain)

                self.headers = hdrs

        ret = self._request(method, url, *args, **kwargs)

        try:
            cf_data = self.get_cf_live_tokens(domain)
        except:
            pass
        else:
            if cf_data and "cf_clearance" in cf_data[0] and cf_data[0][
                    "cf_clearance"]:
                if cf_data != region.get(cache_key):
                    logger.debug("Storing cf data for %s: %s", domain, cf_data)
                    region.set(cache_key, cf_data)
                elif cf_data[0]["cf_clearance"]:
                    logger.debug("CF Live tokens not updated")

        return ret
Пример #2
0
    def request(self, method, url, *args, **kwargs):
        parsed_url = urlparse(url)
        domain = parsed_url.netloc

        cache_key = "cf_data3_%s" % domain

        if not self.cookies.get("cf_clearance", "", domain=domain):
            cf_data = region.get(cache_key)
            if cf_data is not NO_VALUE:
                cf_cookies, hdrs = cf_data
                logger.debug("Trying to use old cf data for %s: %s", domain, cf_data)
                for cookie, value in cf_cookies.iteritems():
                    self.cookies.set(cookie, value, domain=domain)

                self.headers = hdrs

        ret = self._request(method, url, *args, **kwargs)

        try:
            cf_data = self.get_cf_live_tokens(domain)
        except:
            pass
        else:
            if cf_data and "cf_clearance" in cf_data[0] and cf_data[0]["cf_clearance"]:
                if cf_data != region.get(cache_key):
                    logger.debug("Storing cf data for %s: %s", domain, cf_data)
                    region.set(cache_key, cf_data)
                elif cf_data[0]["cf_clearance"]:
                    logger.debug("CF Live tokens not updated")

        return ret
Пример #3
0
    def request(self, method, url, *args, **kwargs):
        parsed_url = urlparse(url)
        domain = parsed_url.netloc

        cache_key = "cf_data2_%s" % domain

        if not self.cookies.get("cf_clearance", "", domain=domain):
            cf_data = region.get(cache_key)
            if cf_data is not NO_VALUE:
                cf_cookies, user_agent, hdrs = cf_data
                logger.debug("Trying to use old cf data for %s: %s", domain, cf_data)
                for cookie, value in cf_cookies.iteritems():
                    self.cookies.set(cookie, value, domain=domain)

                self._hdrs = hdrs
                self._ua = user_agent
                self.headers['User-Agent'] = self._ua

        ret = super(CFSession, self).request(method, url, *args, **kwargs)

        if self._was_cf:
            self._was_cf = False
            logger.debug("We've hit CF, trying to store previous data")
            try:
                cf_data = self.get_cf_live_tokens(domain)
            except:
                logger.debug("Couldn't get CF live tokens for re-use. Cookies: %r", self.cookies)
                pass
            else:
                if cf_data != region.get(cache_key) and cf_data[0]["cf_clearance"]:
                    logger.debug("Storing cf data for %s: %s", domain, cf_data)
                    region.set(cache_key, cf_data)

        return ret
Пример #4
0
    def request(self, method, url, *args, **kwargs):
        parsed_url = urlparse(url)
        domain = parsed_url.netloc

        cache_key = "cf_data_%s" % domain

        if not self.cookies.get("__cfduid", "", domain=domain):
            cf_data = region.get(cache_key)
            if cf_data is not NO_VALUE:
                cf_cookies, user_agent = cf_data
                logger.debug("Trying to use old cf data for %s: %s", domain,
                             cf_data)
                for cookie, value in cf_cookies.iteritems():
                    self.cookies.set(cookie, value, domain=domain)

                self.headers['User-Agent'] = user_agent

        ret = super(CFSession, self).request(method, url, *args, **kwargs)

        try:
            cf_data = self.get_live_tokens(domain)
        except:
            pass
        else:
            if cf_data != region.get(cache_key) and self.cookies.get("__cfduid", "", domain=domain)\
                    and self.cookies.get("cf_clearance", "", domain=domain):
                logger.debug("Storing cf data for %s: %s", domain, cf_data)
                region.set(cache_key, cf_data)

        return ret
def test_fill_addic7ed_show_id_cache(monkeypatch):
    # Use subliminal cache (not our own cache since the provider cache is used from within subliminal)
    filename = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'resources', 'test.subliminal.cache.dbm'))
    region.configure(backend='dogpile.cache.dbm', arguments={'filename': filename, 'lock_factory': MutexLock})
    monkeypatch.setattr('autosubliminal.ADDIC7EDSHOWNAMEMAPPING', {'show1': 1, 'show2': 2})
    fill_addic7ed_show_id_cache()
    assert region.get(ADDIC7ED_SEARCH_SHOW_ID_CACHE_PREFIX + '|show1') == 1
    assert region.get(CUSTOM_ADDIC7ED_SEARCH_SHOW_ID_CACHE_PREFIX + '|show1') == 1
    assert region.get(ADDIC7ED_SEARCH_SHOW_ID_CACHE_PREFIX + '|show2') == 2
    assert region.get(CUSTOM_ADDIC7ED_SEARCH_SHOW_ID_CACHE_PREFIX + '|show2') == 2
    assert not region.get('unknown')
Пример #6
0
    def initialize(self):
        token_cache = region.get("os_token")
        url_cache = region.get("os_server_url")

        if token_cache is not NO_VALUE and url_cache is not NO_VALUE:
            self.token = token_cache.decode("utf-8")
            self.server = self.get_server_proxy(url_cache.decode("utf-8"))
            logger.debug("Using previous login token: %r", self.token[:10] + "X" * (len(self.token) - 10))
        else:
            self.server = None
            self.token = None
Пример #7
0
    def initialize(self):
        if self.is_vip:
            self.server = self.get_server_proxy(self.vip_url)
            logger.info("Using VIP server")
        else:
            self.server = self.get_server_proxy(self.default_url)

        logger.info('Logging in')

        token = str(region.get("os_token"))
        if token is not NO_VALUE:
            try:
                logger.debug('Trying previous token: %r',
                             token[:10] + "X" * (len(token) - 10))
                checked(lambda: self.server.NoOperation(token))
                self.token = token
                logger.debug("Using previous login token: %r",
                             token[:10] + "X" * (len(token) - 10))
                return
            except (NoSession, Unauthorized):
                logger.debug('Token not valid.')
                pass

        try:
            self.log_in()

        except Unauthorized:
            if self.is_vip:
                logger.info("VIP server login failed, falling back")
                self.log_in(self.default_url)
                if self.token:
                    return

            logger.error("Login failed, please check your credentials")
Пример #8
0
    def download_archive_and_add_subtitle_files(self, link, language, video,
                                                fps, num_cds):
        logger.info('Downloading subtitle %r', link)
        cache_key = sha1(link.encode("utf-8")).digest()
        request = region.get(cache_key)
        if request is NO_VALUE:
            request = self.session.get(
                link, headers={'Referer': 'https://subsunacs.net/search.php'})
            request.raise_for_status()
            region.set(cache_key, request)
        else:
            logger.info('Cache file: %s',
                        codecs.encode(cache_key, 'hex_codec').decode('utf-8'))

        try:
            archive_stream = io.BytesIO(request.content)
            if is_rarfile(archive_stream):
                return self.process_archive_subtitle_files(
                    RarFile(archive_stream), language, video, link, fps,
                    num_cds)
            elif is_zipfile(archive_stream):
                return self.process_archive_subtitle_files(
                    ZipFile(archive_stream), language, video, link, fps,
                    num_cds)
            elif archive_stream.seek(0) == 0 and is_7zfile(archive_stream):
                return self.process_archive_subtitle_files(
                    SevenZipFile(archive_stream), language, video, link, fps,
                    num_cds)
        except:
            pass

        logger.error('Ignore unsupported archive %r', request.headers)
        region.delete(cache_key)
        return []
Пример #9
0
 def initialize(self):
     self.token = region.get("oscom_token")
     if self.token:
         self.session.headers.update({'Authorization': 'Beaker ' + self.token})
         return True
     else:
         self.login()
Пример #10
0
    def download_archive_and_add_subtitle_files(self, link, language, video, fps, subs_id):
        logger.info('Downloading subtitle %r', link)
        cache_key = sha1(link.encode("utf-8")).digest()
        request = region.get(cache_key)
        if request is NO_VALUE:
            time.sleep(1)
            request = self.retry(self.session.post(link, data={
                'id': subs_id,
                'lng': language.basename.upper()
            }, headers={
                'referer': link
            }, allow_redirects=False))
            if not request:
                return []
            request.raise_for_status()
            region.set(cache_key, request)
        else:
            logger.info('Cache file: %s', codecs.encode(cache_key, 'hex_codec').decode('utf-8'))

        try:
            archive_stream = io.BytesIO(request.content)
            if is_rarfile(archive_stream):
                return self.process_archive_subtitle_files(RarFile(archive_stream), language, video, link, fps, subs_id)
            elif is_zipfile(archive_stream):
                return self.process_archive_subtitle_files(ZipFile(archive_stream), language, video, link, fps, subs_id)
        except:
            pass

        logger.error('Ignore unsupported archive %r', request.headers)
        region.delete(cache_key)
        return []
Пример #11
0
    def _create_filters(self, languages):
        self.filters = dict(HearingImpaired="2")
        acc_filters = self.filters.copy()
        if self.only_foreign:
            self.filters["ForeignOnly"] = "True"
            acc_filters["ForeignOnly"] = self.filters["ForeignOnly"].lower()
            logger.info("Only searching for foreign/forced subtitles")

        selected_ids = []
        for l in languages:
            lid = language_ids.get(l.basename,
                                   language_ids.get(l.alpha3, None))
            if lid:
                selected_ids.append(str(lid))

        acc_filters["SelectedIds"] = selected_ids
        self.filters["LanguageFilter"] = ",".join(acc_filters["SelectedIds"])

        last_filters = region.get("subscene_filters")
        if last_filters != acc_filters:
            region.set("subscene_filters", acc_filters)
            logger.debug("Setting account filters to %r", acc_filters)
            self.session.post("https://u.subscene.com/filter",
                              acc_filters,
                              allow_redirects=False)

        logger.debug("Filter created: '%s'" % self.filters)
Пример #12
0
    def initialize(self):
        if self.is_vip:
            self.server = self.get_server_proxy(self.vip_url)
            logger.info("Using VIP server")
        else:
            self.server = self.get_server_proxy(self.default_url)

        logger.info('Logging in')

        token = region.get("os_token")
        if token is not NO_VALUE:
            try:
                logger.debug('Trying previous token: %r', token[:10]+"X"*(len(token)-10))
                checked(lambda: self.server.NoOperation(token))
                self.token = token
                logger.debug("Using previous login token: %r", token[:10]+"X"*(len(token)-10))
                return
            except (NoSession, Unauthorized):
                logger.debug('Token not valid.')
                pass

        try:
            self.log_in()

        except Unauthorized:
            if self.is_vip:
                logger.info("VIP server login failed, falling back")
                self.log_in(self.default_url)
                if self.token:
                    return

            logger.error("Login failed, please check your credentials")
Пример #13
0
    def initialize(self):
        if self.is_vip:
            self.server = self.get_server_proxy(self.vip_url)
            logger.info("Using VIP server")
        else:
            self.server = self.get_server_proxy(self.default_url)

        logger.info('Logging in')

        token = region.get("os_token", expiration_time=3600)
        if token is not NO_VALUE:
            try:
                logger.debug('Trying previous token')
                checked(lambda: self.server.NoOperation(token))
                self.token = token
                logger.debug("Using previous login token: %s", self.token)
                return
            except:
                pass

        try:
            self.log_in()

        except Unauthorized:
            if self.is_vip:
                logger.info("VIP server login failed, falling back")
                self.log_in(self.default_url)
                if self.token:
                    return

            logger.error("Login failed, please check your credentials")
Пример #14
0
    def initialize(self):
        self.session = Session()
        self.session.headers[
            'User-Agent'] = 'Subliminal/%s' % subliminal.__short_version__

        if self.USE_ADDICTED_RANDOM_AGENTS:
            from .utils import FIRST_THOUSAND_OR_SO_USER_AGENTS as AGENT_LIST
            logger.debug("Addic7ed: using random user agents")
            self.session.headers['User-Agent'] = AGENT_LIST[randint(
                0,
                len(AGENT_LIST) - 1)]
            self.session.headers['Referer'] = self.server_url

        # login
        if self.username and self.password:
            ccks = region.get("addic7ed_cookies", expiration_time=86400)
            if ccks != NO_VALUE:
                try:
                    self.session.cookies._cookies.update(ccks)
                    r = self.session.get(self.server_url + 'panel.php',
                                         allow_redirects=False,
                                         timeout=10)
                    if r.status_code == 302:
                        logger.info('Addic7ed: Login expired')
                        region.delete("addic7ed_cookies")
                    else:
                        logger.info('Addic7ed: Reusing old login')
                        self.logged_in = True
                        return
                except:
                    pass

            logger.info('Addic7ed: Logging in')
            data = {
                'username': self.username,
                'password': self.password,
                'Submit': 'Log in'
            }
            r = self.session.post(
                self.server_url + 'dologin.php',
                data,
                allow_redirects=False,
                timeout=10,
                headers={"Referer": self.server_url + "login.php"})

            if "relax, slow down" in r.content:
                raise TooManyRequests(self.username)

            if r.status_code != 302:
                raise AuthenticationError(self.username)

            region.set("addic7ed_cookies", self.session.cookies._cookies)

            logger.debug('Addic7ed: Logged in')
            self.logged_in = True
Пример #15
0
    def get_show_id(self, series, year=None, country_code=None, ignore_cache=False):
        """Get the best matching show id for `series`, `year` and `country_code`.

        First search in the result of :meth:`_get_show_ids` and fallback on a search with :meth:`_search_show_id`.

        :param str series: series of the episode.
        :param year: year of the series, if any.
        :type year: int
        :param country_code: country code of the series, if any.
        :type country_code: str
        :return: the show id, if found.
        :rtype: int
        """
        show_id = None
        ids_to_look_for = {sanitize(series).lower(), sanitize(series.replace(".", "")).lower(),
                           sanitize(series.replace("&", "and")).lower()}
        show_ids = self._get_show_ids()
        if ignore_cache or not show_ids:
            show_ids = self._get_show_ids.refresh(self)

        logger.debug("Trying show ids: %s", ids_to_look_for)
        for series_sanitized in ids_to_look_for:
            # attempt with country
            if not show_id and country_code:
                logger.debug('Getting show id with country')
                show_id = show_ids.get('%s %s' % (series_sanitized, country_code.lower()))

            # attempt with year
            if not show_id and year:
                logger.debug('Getting show id with year')
                show_id = show_ids.get('%s %d' % (series_sanitized, year))

            # attempt clean
            if not show_id:
                logger.debug('Getting show id')
                show_id = show_ids.get(series_sanitized)

                if not show_id:
                    now = datetime.datetime.now()
                    last_fetch = region.get(self.last_show_ids_fetch_key)

                    # re-fetch show ids once per day if any show ID not found
                    if not ignore_cache and last_fetch != NO_VALUE and last_fetch + datetime.timedelta(days=1) < now:
                        logger.info("Show id not found; re-fetching show ids")
                        return self.get_show_id(series, year=year, country_code=country_code, ignore_cache=True)
                    logger.debug("Not refreshing show ids, as the last fetch has been too recent")

            # search as last resort
            # broken right now
            # if not show_id:
            #     logger.warning('Series %s not found in show ids', series)
            #     show_id = self._search_show_id(series)

        return show_id
Пример #16
0
def find_endpoint(session, content=None):
    endpoint = region.get("subscene_endpoint2")
    if endpoint is NO_VALUE:
        if not content:
            content = session.get(SITE_DOMAIN).text

        m = ENDPOINT_RE.search(content)
        if m:
            endpoint = m.group(1).strip()
            logger.debug("Switching main endpoint to %s", endpoint)
            region.set("subscene_endpoint2", endpoint)
    return endpoint
Пример #17
0
def load_verification(site_name, session, callback=lambda x: None):
    ccks = region.get("%s_data" % site_name, expiration_time=15552000)  # 6m
    if ccks != NO_VALUE:
        cookies, user_agent = ccks
        logger.debug("%s: Re-using previous user agent: %s", site_name.capitalize(), user_agent)
        session.headers["User-Agent"] = user_agent
        try:
            session.cookies._cookies.update(cookies)
            return callback(region)
        except:
            return False
    return False
Пример #18
0
    def initialize(self):
        logger.info("Creating session")
        self.session = RetryingCFSession()

        prev_cookies = region.get("subscene_cookies2")
        if prev_cookies != NO_VALUE:
            logger.debug("Re-using old subscene cookies: %r", prev_cookies)
            self.session.cookies.update(prev_cookies)

        else:
            logger.debug("Logging in")
            self.login()
Пример #19
0
 def initialize(self):
     logger.debug("Legendasdivx.pt :: Creating session for requests")
     self.session = RetryingCFSession()
     # re-use PHP Session if present
     prev_cookies = region.get("legendasdivx_cookies2")
     if prev_cookies != NO_VALUE:
         logger.debug("Legendasdivx.pt :: Re-using previous legendasdivx cookies: %s", prev_cookies)
         self.session.cookies.update(prev_cookies)
     # login if session has expired
     else:
         logger.debug("Legendasdivx.pt :: Session cookies not found!")
         self.session.headers.update(self.headers)
         self.login()
Пример #20
0
def load_verification(site_name, session, callback=lambda x: None):
    ccks = region.get("%s_data" % site_name, expiration_time=15552000)  # 6m
    if ccks != NO_VALUE:
        cookies, user_agent = ccks
        logger.debug("%s: Re-using previous user agent: %s",
                     site_name.capitalize(), user_agent)
        session.headers["User-Agent"] = user_agent
        try:
            session.cookies._cookies.update(cookies)
            return callback(region)
        except:
            return False
    return False
Пример #21
0
    def request(self, method, url, *args, **kwargs):
        parsed_url = urlparse(url)
        domain = parsed_url.netloc

        cache_key = "cf_data2_%s" % domain

        if not self.cookies.get("cf_clearance", "", domain=domain):
            cf_data = region.get(cache_key)
            if cf_data is not NO_VALUE:
                cf_cookies, user_agent, hdrs = cf_data
                logger.debug("Trying to use old cf data for %s: %s", domain,
                             cf_data)
                for cookie, value in cf_cookies.iteritems():
                    self.cookies.set(cookie, value, domain=domain)

                self._hdrs = hdrs
                self._ua = user_agent
                self.headers['User-Agent'] = self._ua

        ret = super(CFSession, self).request(method, url, *args, **kwargs)

        if self._was_cf:
            self._was_cf = False
            logger.debug("We've hit CF, trying to store previous data")
            try:
                cf_data = self.get_cf_live_tokens(domain)
            except:
                logger.debug(
                    "Couldn't get CF live tokens for re-use. Cookies: %r",
                    self.cookies)
                pass
            else:
                if cf_data != region.get(
                        cache_key) and cf_data[0]["cf_clearance"]:
                    logger.debug("Storing cf data for %s: %s", domain, cf_data)
                    region.set(cache_key, cf_data)

        return ret
Пример #22
0
    def request(self, host, handler, request_body, verbose=0):
        """
        Make an xmlrpc request.
        """
        url = self._build_url(host, handler)
        cache_key = "xm%s_%s" % (self.xm_ver, host)

        old_sessvar = self.session.cookies.get(self.session_var, "")
        if not old_sessvar:
            data = region.get(cache_key)
            if data is not NO_VALUE:
                logger.debug("Trying to re-use headers/cookies for %s" % host)
                self.session.cookies, self.session.headers = data
                old_sessvar = self.session.cookies.get(self.session_var, "")

        try:
            resp = self.session.post(url, data=request_body,
                                     stream=True, timeout=self.timeout, proxies=self.proxies,
                                     verify=self.verify)

            if self.session_var in resp.cookies and resp.cookies[self.session_var] != old_sessvar:
                logger.debug("Storing %s cookies" % host)
                region.set(cache_key, [self.session.cookies, self.session.headers])
        except ValueError:
            logger.debug("Wiping cookies/headers cache (VE) for %s" % host)
            region.delete(cache_key)
            raise
        except Exception:
            logger.debug("Wiping cookies/headers cache (EX) for %s" % host)
            region.delete(cache_key)
            raise  # something went wrong
        else:
            try:
                resp.raise_for_status()
            except requests.exceptions.HTTPError:
                logger.debug("Wiping cookies/headers cache (RE) for %s" % host)
                region.delete(cache_key)
                raise

            try:
                if 'x-ratelimit-remaining' in resp.headers and int(resp.headers['x-ratelimit-remaining']) <= 2:
                    raise APIThrottled()
            except ValueError:
                logger.info('Couldn\'t parse "x-ratelimit-remaining": %r' % resp.headers['x-ratelimit-remaining'])

            self.verbose = verbose
            try:
                return self.parse_response(resp.raw)
            except:
                logger.debug("Bad response data: %r", resp.raw)
Пример #23
0
    def download_subtitle(self, subtitle):
        last_dls = region.get("addic7ed_dls")
        now = datetime.datetime.now()
        one_day = datetime.timedelta(hours=24)

        def raise_limit():
            logger.info("Addic7ed: Downloads per day exceeded (%s)", cap)
            raise DownloadLimitPerDayExceeded

        if not isinstance(last_dls, types.ListType):
            last_dls = []
        else:
            # filter all non-expired DLs
            last_dls = filter(lambda t: t + one_day > now, last_dls)
            region.set("addic7ed_dls", last_dls)

        cap = self.vip and 80 or 40
        amount = len(last_dls)

        if amount >= cap:
            raise_limit()

        # download the subtitle
        r = self.session.get(self.server_url + subtitle.download_link,
                             headers={'Referer': subtitle.page_link},
                             timeout=10)
        r.raise_for_status()

        if r.status_code == 304:
            raise TooManyRequests()

        if not r.content:
            # Provider wrongful return a status of 304 Not Modified with an empty content
            # raise_for_status won't raise exception for that status code
            logger.error(
                'Addic7ed: Unable to download subtitle. No data returned from provider'
            )
            return

        # detect download limit exceeded
        if r.headers['Content-Type'] == 'text/html':
            raise DownloadLimitExceeded

        subtitle.content = fix_line_ending(r.content)
        last_dls.append(datetime.datetime.now())
        region.set("addic7ed_dls", last_dls)
        logger.info("Addic7ed: Used %s/%s downloads", amount + 1, cap)

        if amount + 1 >= cap:
            raise_limit()
Пример #24
0
    def initialize(self):
        self.session = RetryingCFSession()
        #load_verification("titlovi", self.session)

        token = region.get("titlovi_token")
        if token is not NO_VALUE:
            self.user_id, self.login_token, self.token_exp = token
            if datetime.now() > self.token_exp:
                logger.debug('Token expired')
                self.log_in()
            else:
                logger.debug('Use cached token')
        else:
            logger.debug('Token not found in cache')
            self.log_in()
Пример #25
0
    def initialize(self):
        logger.info('Logging in')

        token = region.get("os_token", expiration_time=3600)
        if token is not NO_VALUE:
            try:
                checked(self.server.NoOperation(token))
                self.token = token
                logger.info("Using previous login token: %s", self.token)
                return
            except:
                pass

        try:
            self.log_in()

        except Unauthorized:
            if self.is_vip:
                logger.info("VIP server login failed, falling back")
                self.log_in(self.default_url)
Пример #26
0
def search(term,
           release=True,
           session=None,
           year=None,
           limit_to=SearchTypes.Exact,
           throttle=0):
    # note to subscene: if you actually start to randomize the endpoint, we'll have to query your server even more

    if release:
        endpoint = "release"
    else:
        endpoint = region.get("subscene_endpoint2")
        if endpoint is NO_VALUE:
            ret = session.get(SITE_DOMAIN)
            time.sleep(throttle)
            m = ENDPOINT_RE.search(ret.text)
            if m:
                endpoint = m.group(1).strip()
                logger.debug("Switching main endpoint to %s", endpoint)
                region.set("subscene_endpoint2", endpoint)

    soup = soup_for("%s/subtitles/%s" % (SITE_DOMAIN, endpoint),
                    data={"query": term},
                    session=session)

    if soup:
        if "Subtitle search by" in str(soup):
            rows = soup.find("table").tbody.find_all("tr")
            subtitles = Subtitle.from_rows(rows)
            return Film(term, subtitles=subtitles)

        for junk, search_type in SearchTypes.__members__.items():
            if section_exists(soup, search_type):
                return get_first_film(soup,
                                      search_type,
                                      year=year,
                                      session=session)

            if limit_to == search_type:
                return
Пример #27
0
 def initialize(self):
     self.login()
     self.token = region.get("oscom_token")
Пример #28
0
    def query(self, language, title, season=None, episode=None, year=None):
        # search for titles
        sanitized_title = sanitize(title)
        titles = self.search_titles(sanitized_title, season)

        # search for titles with the quote or dot character
        ignore_characters = {'\'', '.'}
        if any(c in title for c in ignore_characters):
            titles.update(self.search_titles(sanitize(title, ignore_characters=ignore_characters), season))

        subtitles = []
        # iterate over titles
        for title_id, t in titles.items():
            # discard mismatches on title
            sanitized_result = sanitize(t['title'])
            if sanitized_result != sanitized_title:
                logger.debug("Mismatched title, discarding title %d (%s)",
                             title_id, sanitized_result)
                continue

            # episode
            if season and episode:
                # discard mismatches on type
                if t['type'] != 'episode':
                    logger.debug("Mismatched 'episode' type, discarding title %d (%s)", title_id, sanitized_result)
                    continue

                # discard mismatches on season
                if 'season' not in t or t['season'] != season:
                    logger.debug('Mismatched season %s, discarding title %d (%s)',
                                 t.get('season'), title_id, sanitized_result)
                    continue
            # movie
            else:
                # discard mismatches on type
                if t['type'] != 'movie':
                    logger.debug("Mismatched 'movie' type, discarding title %d (%s)", title_id, sanitized_result)
                    continue

                # discard mismatches on year
                if year is not None and 'year' in t and t['year'] != year:
                    logger.debug("Mismatched movie year, discarding title %d (%s)", title_id, sanitized_result)
                    continue

            # iterate over title's archives
            for a in self.get_archives(title_id, language.legendastv):
                # clean name of path separators and pack flags
                clean_name = a.name.replace('/', '-')
                if a.pack and clean_name.startswith('(p)'):
                    clean_name = clean_name[3:]

                # guess from name
                guess = guessit(clean_name, {'type': t['type']})

                # episode
                if season and episode:
                    # discard mismatches on episode in non-pack archives
                    if not a.pack and 'episode' in guess and guess['episode'] != episode:
                        logger.debug('Mismatched episode %s, discarding archive: %s',
                                     guess['episode'], a.name)
                        continue

                # compute an expiration time based on the archive timestamp
                expiration_time = (datetime.utcnow().replace(tzinfo=pytz.utc) - a.timestamp).total_seconds()

                # attempt to get the releases from the cache
                cache_item = releases_key.format(archive_id=a.id, archive_name=a.name)
                releases = region.get(cache_item, expiration_time=expiration_time)

                # the releases are not in cache or cache is expired
                if releases == NO_VALUE:
                    logger.info('Releases not found in cache')

                    # download archive
                    self.download_archive(a)

                    # extract the releases
                    releases = []
                    for name in a.content.namelist():
                        # discard the legendastv file
                        if name.startswith('Legendas.tv'):
                            continue

                        # discard hidden files
                        if os.path.split(name)[-1].startswith('.'):
                            continue

                        # discard non-subtitle files
                        if not name.lower().endswith(SUBTITLE_EXTENSIONS):
                            continue

                        releases.append(name)

                    # cache the releases
                    region.set(cache_item, releases)

                # iterate over releases
                for r in releases:
                    subtitle = LegendasTVSubtitle(language, t['type'], t['title'], t.get('year'), t.get('imdb_id'),
                                                  t.get('season'), a, r)
                    logger.debug('Found subtitle %r', subtitle)
                    subtitles.append(subtitle)

        return subtitles
Пример #29
0
 def initialize(self):
     self.token = region.get("oscom_token",
                             expiration_time=TOKEN_EXPIRATION_TIME)
     if self.token is NO_VALUE:
         self.login()
Пример #30
0
    def query(self, video):
        # vfn = get_video_filename(video)
        subtitles = []
        # logger.debug(u"Searching for: %s", vfn)
        # film = search(vfn, session=self.session)
        #
        # if film and film.subtitles:
        #     logger.debug('Release results found: %s', len(film.subtitles))
        #     subtitles = self.parse_results(video, film)
        # else:
        #     logger.debug('No release results found')

        # time.sleep(self.search_throttle)
        prev_cookies = region.get("subscene_cookies2")
        if prev_cookies != NO_VALUE:
            logger.debug("Re-using old subscene cookies: %r", prev_cookies)
            self.session.cookies.update(prev_cookies)

        else:
            logger.debug("Logging in")
            self.login()

        # re-search for episodes without explicit release name
        if isinstance(video, Episode):
            # term = u"%s S%02iE%02i" % (video.series, video.season, video.episode)
            more_than_one = len([video.series] + video.alternative_series) > 1
            for series in set([video.series] + video.alternative_series):
                term = u"%s - %s Season" % (series, p.number_to_words("%sth" % video.season).capitalize())
                logger.debug('Searching for alternative results: %s', term)
                film = self.do_search(term, session=self.session, release=False, throttle=self.search_throttle)
                if film and film.subtitles:
                    logger.debug('Alternative results found: %s', len(film.subtitles))
                    subtitles += self.parse_results(video, film)
                else:
                    logger.debug('No alternative results found')

                # packs
                # if video.season_fully_aired:
                #     term = u"%s S%02i" % (series, video.season)
                #     logger.debug('Searching for packs: %s', term)
                #     time.sleep(self.search_throttle)
                #     film = search(term, session=self.session, throttle=self.search_throttle)
                #     if film and film.subtitles:
                #         logger.debug('Pack results found: %s', len(film.subtitles))
                #         subtitles += self.parse_results(video, film)
                #     else:
                #         logger.debug('No pack results found')
                # else:
                #     logger.debug("Not searching for packs, because the season hasn't fully aired")
                if more_than_one:
                    time.sleep(self.search_throttle)
        else:
            more_than_one = len([video.title] + video.alternative_titles) > 1
            for title in set([video.title] + video.alternative_titles):
                logger.debug('Searching for movie results: %r', title)
                film = self.do_search(title, year=video.year, session=self.session, limit_to=None, release=False,
                                      throttle=self.search_throttle)
                if film and film.subtitles:
                    subtitles += self.parse_results(video, film)
                if more_than_one:
                    time.sleep(self.search_throttle)

        logger.info("%s subtitles found" % len(subtitles))
        return subtitles