コード例 #1
0
    def resolve(self, url, cookie_jar, user_agent):
        headers = {'User-agent': user_agent, 'Referer': url}

        try:
            cookie_jar.load(ignore_discard=True)
        except Exception as e:
            logger.info(e)

        opener = mechanize.build_opener(
            mechanize.HTTPCookieProcessor(cookie_jar))

        request = mechanize.Request(url)
        for key in headers:
            request.add_header(key, headers[key])

        try:
            response = opener.open(request)
        except mechanize.HTTPError as e:
            response = e

        body = response.read()

        cookie_jar.extract_cookies(response, request)
        cookie_helper.check_cookies(cookie_jar)

        parsed_url = urlparse(url)
        submit_url = "%s://%s/cdn-cgi/l/chk_jschl" % (parsed_url.scheme,
                                                      parsed_url.netloc)

        params = {}

        try:
            params["jschl_vc"] = re.search(r'name="jschl_vc" value="(\w+)"',
                                           body).group(1)
            params["pass"] = re.search(r'name="pass" value="(.+?)"',
                                       body).group(1)

            js = self._extract_js(body)
        except mechanize.HTTPError as e:
            return None

        params["jschl_answer"] = str(js + len(parsed_url.netloc))

        sParameters = urllib.urlencode(params, True)

        request = mechanize.Request("%s?%s" % (submit_url, sParameters))
        for key in headers:
            request.add_header(key, headers[key])

        sleep(5)

        try:
            response = opener.open(request)
        except mechanize.HTTPError as e:
            response = e

        return response
コード例 #2
0
    def resolve(self, url, cookie_jar, user_agent):
        headers = {'User-agent': user_agent, 'Referer': url}

        try:
            cookie_jar.load(ignore_discard=True)
        except Exception as e:
            logger.info(e)

        opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie_jar))

        request = urllib2.Request(url)
        for key in headers:
            request.add_header(key, headers[key])

        try:
            response = opener.open(request)
        except urllib2.HTTPError as e:
            response = e

        body = response.read()

        cookie_jar.extract_cookies(response, request)
        cookie_helper.check_cookies(cookie_jar)

        pattern = 'xhr\.open\("GET","([^,]+),'
        match = cParser.parse(body, pattern)
        if not match[0]:
            return
        urlParts = match[1][0].split('"')
        parsed_url = urlparse(url)
        sid = '1200'
        script_url = '%s://%s%s%s%s' % (parsed_url.scheme, parsed_url.netloc, urlParts[0], sid, urlParts[2])
        request = urllib2.Request(script_url)
        for key in headers:
            request.add_header(key, headers[key])
        try:
            response = opener.open(request)
        except urllib2.HTTPError as e:
            response = e

        body = response.read()
        cookie_jar.extract_cookies(response, request)
        cookie_helper.check_cookies(cookie_jar)

        if not self.checkBFCookie(body):
            return body  # even if its false its probably not the right content, we'll see
        cookie = self.getCookieString(body)
        if not cookie:
            return

        name, value = cookie.split(';')[0].split('=')
        cookieData = dict((k.strip(), v.strip()) for k, v in (item.split("=") for item in cookie.split(";")))
        cookie = cookie_helper.create_cookie(name, value, domain=cookieData['domain'], expires=sys.maxint, discard=False)
        cookie_jar.set_cookie(cookie)
        request = urllib2.Request(url)
        for key in headers:
            request.add_header(key, headers[key])

        try:
            response = opener.open(request)
        except urllib2.HTTPError as e:
            response = e
        return response
コード例 #3
0
        except httplib.HTTPException, e:
            if not self.ignoreErrors:
                xbmcgui.Dialog().ok('xStream', str(e))
            logger.error("HTTPException " + str(e) + " Url: " + self.__sUrl)
            return ''

        sContent = oResponse.read()

        checked_response = self.__check_protection(sContent, user_agent,
                                                   cookieJar)
        if checked_response:
            oResponse = checked_response
            sContent = oResponse.read()

        cookieJar.extract_cookies(oResponse, oRequest)
        cookie_helper.check_cookies(cookieJar)
        cookieJar.save(ignore_discard=self.__bIgnoreDiscard,
                       ignore_expires=self.__bIgnoreExpired)

        self.__sResponseHeader = oResponse.info()
        # handle gzipped content
        if self.__sResponseHeader.get('Content-Encoding') == 'gzip':
            import gzip
            import StringIO
            data = StringIO.StringIO(sContent)
            gzipper = gzip.GzipFile(fileobj=data, mode='rb')
            try:
                sContent = gzipper.read()
            except:
                sContent = gzipper.extrabuf