Пример #1
0
def create_soup(url):
    logger.info()
    data = ""
    if ssl.OPENSSL_VERSION_INFO >= (1, 1, 1):
        response = httptools.downloadpage(url, ignore_response_code=True)
        if response.sucess:
            data = response.data
    elif alfa_assistant.open_alfa_assistant():
        data = alfa_assistant.get_source_by_page_finished(url,
                                                          5,
                                                          closeAfter=True)
        if not data:
            platformtools.dialog_ok(
                "Alfa Assistant: Error",
                "ACTIVE la app Alfa Assistant para continuar")
            data = alfa_assistant.get_source_by_page_finished(url,
                                                              5,
                                                              closeAfter=True)
            if not data:
                return False
        data = alfa_assistant.find_htmlsource_by_url_pattern(data, url)
        if isinstance(data, dict):
            data = data.get('source', '')
            if not data:
                return False
    if data:
        soup = BeautifulSoup(data, "html5lib", from_encoding="utf-8")
        return soup

    return False
Пример #2
0
def play(item):
    from lib import alfa_assistant
    logger.info()
    itemlist = []
    video_urls = []
    alfa_assistant.open_alfa_assistant()
    data = alfa_assistant.get_source_by_page_finished(item.url,
                                                      4,
                                                      closeAfter=True)
    for visited in data["urlsVisited"]:
        if "/cdn8/" in visited["url"]:
            url = visited["url"]
            url = httptools.downloadpage(
                url, follow_redirects=False).headers["location"]
            if not url.startswith("https"):
                url = "https:%s" % url
            itemlist.append(
                item.clone(action="play", contentTitle=item.title, url=url))
    alfa_assistant.close_alfa_assistant()
    return itemlist
Пример #3
0
def mainlist(item):
    if ssl.OPENSSL_VERSION_INFO < (1, 1, 1):
        if not alfa_assistant.open_alfa_assistant():
            platformtools.dialog_ok(
                "Alfa Assistant: Error",
                "NECESITAS la app Alfa Assistant para ver este canal")
            return
    logger.info()
    itemlist = []

    autoplay.init(item.channel, list_servers, list_quality)
    itemlist.append(item.clone(title="Nuevos", action="lista", url=host))
    itemlist.append(item.clone(title="Canal", action="sub_menu", url=host))
    itemlist.append(item.clone(title="Buscar", action="search"))

    autoplay.show_option(item.channel, itemlist)

    return itemlist
Пример #4
0
def get_cl(resp, timeout=20, debug=False, extraPostDelay=15, retry=False, blacklist=True, retryIfTimeout=True, **kwargs):
    blacklist_clear = True
    if 'hideproxy' in resp.url or 'webproxy' in resp.url or kwargs.get('proxies'):
        blacklist_clear = False
        blacklist = False
    
    if timeout < 15: timeout = 20
    if timeout + extraPostDelay > 35: timeout = 20

    domain_full = urlparse.urlparse(resp.url).netloc
    domain = domain_full
    if blacklist and not retry: 
        blacklist_clear = check_blacklist(domain_full)
    
    if blacklist_clear:
        host = config.get_system_platform()[:1]
        
        freequent_data = [domain, 'CF2,0.0.0,0,%s0,NoApp' % host]
        
        check_assistant = alfa_assistant.open_alfa_assistant(getWebViewInfo=True, retry=retry)
        if not isinstance(check_assistant, dict) and retry:
            alfa_assistant.close_alfa_assistant()
            time.sleep(2)
            check_assistant = alfa_assistant.open_alfa_assistant(getWebViewInfo=True, retry=True)
            if not check_assistant:
                time.sleep(10)
                check_assistant = alfa_assistant.get_generic_call('getWebViewInfo', timeout=2, alfa_s=True)
            
        if check_assistant and isinstance(check_assistant, dict):

            if check_assistant.get('assistantLatestVersion') and check_assistant.get('assistantVersion'):
                installed_version = check_assistant['assistantVersion'].split('.')
                available_version = check_assistant['assistantLatestVersion'].split('.')
                newer = False
                for i, ver in enumerate(available_version):
                    if int(ver) > int(installed_version[i]):
                        newer = True
                        break
                    if int(ver) < int(installed_version[i]):
                        break
                if newer:
                    help_window.show_info('cf_2_02', wait=False)

            ua = get_ua(check_assistant)
            
            try:
                vers = int(scrapertools.find_single_match(ua, r"Android\s*(\d+)"))
            except:
                vers = 0

            wvbVersion = check_assistant.get('wvbVersion', '0.0.0').split('.')[0]
            if len(wvbVersion) > 3: wvbVersion = wvbVersion[:2]
            freequent_data[1] = 'CF2,%s,%s,%s%s,' % (check_assistant.get('assistantVersion', '0.0.0'), wvbVersion, host, vers)

            if vers:
                dan = {'User-Agent': ua}
                resp.headers.update(dict(dan))
                ua = None
            else:
                ua = httptools.get_user_agent()

            logger.debug("UserAgent: %s || Android Vrs: %s" % (ua, vers))

            jscode = get_jscode(1, 'KEYCODE_ENTER', 1)

            url_cf = scrapertools.find_single_match(resp.url, '(http.*\:\/\/(?:www\S*.)?\w+\.\w+(?:\.\w+)?)(?:\/)?') + '|cf_clearance'

            data_assistant = alfa_assistant.get_urls_by_page_finished(resp.url, timeout=timeout, getCookies=True, userAgent=ua,
                                                                        disableCache=True, debug=debug, jsCode=jscode,
                                                                        extraPostDelay=extraPostDelay, clearWebCache=True, 
                                                                        removeAllCookies=True, returnWhenCookieNameFound=url_cf,
                                                                        retryIfTimeout=retryIfTimeout
                                                                        )
            logger.debug("data assistant: %s" % data_assistant)

            domain_ = domain
            split_lst = domain.split(".")

            if len(split_lst) > 2:
                domain = domain.replace(split_lst[0], "")
            
            if not domain.startswith('.'):
                domain = "."+domain
            
            get_ua(data_assistant)

            if isinstance(data_assistant, dict) and data_assistant.get("cookies", None):

                logger.debug("Lista cookies: %s" % data_assistant.get("cookies", []))
                for cookie in data_assistant["cookies"]:
                    cookieslist = cookie.get("cookiesList", None)
                    val = scrapertools.find_single_match(cookieslist, 'cf_clearance=([A-z0-9_-]+)')
                    dom = cookie.get("urls", None)
                    logger.debug("dominios: %s" % dom[0])

                    if 'cf_clearance' in cookieslist and val:
                        
                        dict_cookie = {'domain': domain,
                                       'name': 'cf_clearance',
                                       'value': val}
                        if domain_ in dom[0]:
                            httptools.set_cookies(dict_cookie)
                            rin = {'Server': 'Alfa'}

                            resp.headers.update(dict(rin))
                            logger.debug("cf_clearence=%s" % val)
                            
                            if not retry:
                                freequent_data[1] += 'OK'
                            else:
                                freequent_data[1] += 'OK_R'
                            freequency(freequent_data)

                            return resp

                    else:
                        logger.error("No cf_clearance")
                else:
                    freequent_data[1] += 'NO-CFC'
            else:
                freequent_data[1] += 'ERR'
                logger.error("No Cookies o Error en conexión con Alfa Assistant")

            if not retry:
                config.set_setting('cf_assistant_ua', '')
                logger.debug("No se obtuvieron resultados, reintentando...")
                return get_cl(resp, timeout=timeout-5, extraPostDelay=extraPostDelay, \
                            retry=True, blacklist=True, retryIfTimeout=False, **kwargs)
        elif host == 'a':
            help_window.show_info('cf_2_01')
        
        freequency(freequent_data)
        
        if filetools.exists(PATH_BL):
            bl_data = jsontools.load(filetools.read(PATH_BL))
        else:
            bl_data = {}
        bl_data[domain_full] = time.time()
        filetools.write(PATH_BL, jsontools.dump(bl_data))

    msg = 'Detected a Cloudflare version 2 Captcha challenge,\
        This feature is not available in the opensource (free) version.'
    resp.status_code = msg
    
    raise CloudflareChallengeError(msg)
Пример #5
0
def get_cl(resp, timeout=20, debug=False, extraPostDelay=15, retry=True):
    check_assistant = alfa_assistant.open_alfa_assistant(getWebViewInfo=True)
    if check_assistant and isinstance(check_assistant, dict):
        
        ua = get_ua(check_assistant)
        
        try:
            vers = int(scrapertools.find_single_match(ua, r"Android\s*(\d+)"))
        except:
            vers = 0

        if vers:
            dan = {'User-Agent': ua}
            resp.headers.update(dict(dan))
            ua = None
        else:
            ua = httptools.get_user_agent()

        logger.debug("UserAgent: %s || Android Vrs: %s" % (ua, vers))
        
        jscode = get_jscode(1, 'KEYCODE_ENTER', 1)

        data_assistant = alfa_assistant.get_source_by_page_finished(resp.url, timeout=timeout, getCookies=True, userAgent=ua,
                                                                    disableCache=True, debug=debug, jsCode=jscode,
                                                                    extraPostDelay=extraPostDelay, clearWebCache=True, 
                                                                    removeAllCookies=True
                                                                    )
        
        logger.debug("data assistant: %s" % data_assistant)
        
        domain = urlparse(resp.url).netloc
        domain_ = domain
        split_lst = domain.split(".")

        if len(split_lst) > 2:
            domain = domain.replace(split_lst[0], "")
        
        if not domain.startswith('.'):
            domain = "."+domain
        
        get_ua(data_assistant)

        if isinstance(data_assistant, dict) and data_assistant.get("cookies", None):
            
            for cookie in data_assistant["cookies"]:
                cookieslist = cookie.get("cookiesList", None)
                val = scrapertools.find_single_match(cookieslist, 'cf_clearance=([A-z0-9_-]+)')
                dom = cookie.get("urls", None)
                #logger.debug("dominios: %s" % dom[0])
                #logger.debug("Lista cookies: %s" % cookieslist)

                if 'cf_clearance' in cookieslist and val:
                    
                    dict_cookie = {'domain': domain,
                                   'name': 'cf_clearance',
                                   'value': val}
                    if domain_ in dom[0]:
                        httptools.set_cookies(dict_cookie)
                        rin = {'Server': 'Alfa'}

                        resp.headers.update(dict(rin))
                        #logger.debug("cf_clearence=%s" %s val)

                        return resp
                    else:
                        logger.error("No cf_clearance for %s" % domain_)

                else: 
                    logger.error("No cf_clearance")
        else:
            logger.error("No Cookies o Error en conexión con Alfa Assistant")

        if retry:
            config.set_setting('cf_assistant_ua', '')
            logger.debug("No se obtuvieron resultados, reintentando...")
            return get_cl(resp, timeout=timeout-5, extraPostDelay=extraPostDelay, retry=False,
                         )



    msg = 'Detected a Cloudflare version 2 Captcha challenge,\
        This feature is not available in the opensource (free) version.'
    
    resp.status_code = msg

    logger.error('Detected a Cloudflare version 2 Hcaptcha challenge')
    
    return False
Пример #6
0
def get_source(url,
               resp,
               timeout=5,
               debug=False,
               extraPostDelay=5,
               retry=False,
               blacklist=True,
               headers=None,
               retryIfTimeout=True,
               cache=False,
               clearWebCache=False,
               mute=True,
               alfa_s=False,
               elapsed=0,
               **kwargs):
    blacklist_clear = True
    data = ''
    source = False
    if not elapsed: elapsed = time.time()
    elapsed_max = 40
    expiration = config.get_setting('cf_assistant_bl_expiration',
                                    default=30) * 60
    expiration_final = 0
    security_error_blackout = (5 * 60) - expiration

    if debug: alfa_s = False

    if not resp:
        resp = {'status_code': 429, 'headers': {}}
        resp = type('HTTPResponse', (), resp)

    if not alfa_s: logger.debug("ERROR de descarga: %s" % resp.status_code)

    opt = kwargs.get('opt', {})

    domain_full = urlparse.urlparse(url).netloc
    domain = domain_full
    pcb = base64.b64decode(
        config.get_setting('proxy_channel_bloqued')).decode('utf-8')
    if 'hideproxy' in url or 'webproxy' in url or 'hidester' in url or '__cpo=' in url  \
                          or httptools.TEST_ON_AIR or domain in pcb:
        blacklist_clear = False
        blacklist = False

    if timeout + extraPostDelay > 35: timeout = 20

    if blacklist and not retry:
        blacklist_clear = check_blacklist(domain_full)

    host = config.get_system_platform()[:1]
    freequent_data = [domain, 'Cha,0.0.0,0,%s0,BlakL' % host]
    if blacklist_clear:
        freequent_data = [domain, 'Cha,0.0.0,0,%s0,App' % host]
        if not retry:
            freequent_data[1] += 'KO'
        else:
            freequent_data[1] += 'KO_R'

        check_assistant = alfa_assistant.open_alfa_assistant(
            getWebViewInfo=True, retry=True, assistantLatestVersion=False)
        if not isinstance(check_assistant, dict) and not retry:
            alfa_assistant.close_alfa_assistant()
            time.sleep(2)
            check_assistant = alfa_assistant.open_alfa_assistant(
                getWebViewInfo=True, retry=True, assistantLatestVersion=False)
            logger.debug("Reintento en acceder al Assistant: %s - %s" \
                         % ('OK' if isinstance(check_assistant, dict) else 'ERROR', time.time() - elapsed))

        if check_assistant and isinstance(check_assistant, dict):

            if check_assistant.get(
                    'assistantLatestVersion') and check_assistant.get(
                        'assistantVersion'):
                installed_version = check_assistant['assistantVersion'].split(
                    '.')
                available_version = check_assistant[
                    'assistantLatestVersion'].split('.')
                newer = False
                for i, ver in enumerate(available_version):
                    if int(ver) > int(installed_version[i]):
                        newer = True
                        break
                    if int(ver) < int(installed_version[i]):
                        break
                if newer:
                    help_window.show_info('cf_2_02', wait=False)

            ua = get_ua(check_assistant)

            try:
                vers = int(
                    scrapertools.find_single_match(ua, r"Android\s*(\d+)"))
            except:
                vers = 0

            wvbVersion = check_assistant.get('wvbVersion',
                                             '0.0.0').split('.')[0]
            if len(wvbVersion) > 3: wvbVersion = wvbVersion[:2]
            freequent_data[1] = 'Cha,%s,%s,%s%s,' % (check_assistant.get(
                'assistantVersion', '0.0.0'), wvbVersion, host, vers)
            if not retry:
                freequent_data[1] += 'Src'
            else:
                freequent_data[1] += 'Src_R'

            if vers:
                dan = {'User-Agent': ua}
                resp.headers.update(dict(dan))
                ua = None
            else:
                ua = httptools.get_user_agent()

            if not alfa_s:
                logger.debug("UserAgent: %s || Android Vrs: %s" % (ua, vers))

            jscode = None

            url_cf = scrapertools.find_single_match(
                url, '(http.*\:\/\/(?:www\S*.)?\w+\.\w+(?:\.\w+)?)(?:\/)?'
            ) + '|cf_clearance'

            data_assistant = alfa_assistant.get_source_by_page_finished(
                url,
                timeout=timeout,
                getCookies=True,
                userAgent=ua,
                disableCache=cache,
                debug=debug,
                jsCode=jscode,
                extraPostDelay=extraPostDelay,
                clearWebCache=clearWebCache,
                removeAllCookies=True,
                returnWhenCookieNameFound=url_cf,
                retryIfTimeout=retryIfTimeout,
                useAdvancedWebView=True,
                headers=headers,
                mute=mute,
                alfa_s=alfa_s)
            if not alfa_s: logger.debug("data assistant: %s" % data_assistant)

            if isinstance(data_assistant, dict) and data_assistant.get('htmlSources', []) \
                                                and data_assistant['htmlSources'][0].get('source', ''):
                try:
                    data = base64.b64decode(data_assistant['htmlSources'][0]
                                            ['source']).decode('utf-8')
                    source = True
                except:
                    pass

                if source and 'accessing a cross-origin frame' in data:
                    source = False
                    retry = True
                    expiration_final = security_error_blackout
                    freequent_data[1] = 'Cha,%s,%s,%s%s,' % (
                        check_assistant.get('assistantVersion',
                                            '0.0.0'), wvbVersion, host, vers)
                    freequent_data[1] += 'KO_SecE'
                    logger.error('Error SEGURIDAD: %s - %s' %
                                 (expiration_final, data[:100]))

                if source:
                    freequent_data[1] = 'Cha,%s,%s,%s%s,' % (
                        check_assistant.get('assistantVersion',
                                            '0.0.0'), wvbVersion, host, vers)
                    if not retry:
                        freequent_data[1] += 'OK'
                    else:
                        freequent_data[1] += 'OK_R'

            if not source and not retry:
                config.set_setting('cf_assistant_ua', '')
                logger.debug("No se obtuvieron resultados, reintentando...")
                timeout = -1 if timeout < 0 else timeout * 2
                extraPostDelay = -1 if extraPostDelay < 0 else extraPostDelay * 2
                return get_source(url,
                                  resp,
                                  timeout=timeout,
                                  debug=debug,
                                  extraPostDelay=extraPostDelay,
                                  retry=True,
                                  blacklist=blacklist,
                                  retryIfTimeout=retryIfTimeout,
                                  cache=cache,
                                  clearWebCache=clearWebCache,
                                  alfa_s=False,
                                  headers=headers,
                                  mute=mute,
                                  elapsed=elapsed,
                                  **kwargs)

            domain_ = domain
            split_lst = domain.split(".")

            if len(split_lst) > 2:
                domain = domain.replace(split_lst[0], "")

            if not domain.startswith('.'):
                domain = "." + domain

            get_ua(data_assistant)

            if isinstance(data_assistant, dict) and data_assistant.get(
                    "cookies", None):

                if not alfa_s:
                    logger.debug("Lista cookies: %s" %
                                 data_assistant.get("cookies", []))
                for cookie in data_assistant["cookies"]:
                    cookieslist = cookie.get("cookiesList", None)
                    val = scrapertools.find_single_match(
                        cookieslist, 'cf_clearance=([A-z0-9_\-\.]+)')
                    #val = scrapertools.find_single_match(cookieslist, 'cf_clearance=([^;]+)')
                    dom = cookie.get("urls", None)
                    if not alfa_s: logger.debug("dominios: %s" % dom[0])

                    if 'cf_clearance' in cookieslist and val:

                        dict_cookie = {
                            'domain': domain,
                            'name': 'cf_clearance',
                            'value': val
                        }
                        if domain_ in dom[0]:
                            httptools.set_cookies(dict_cookie)
                            rin = {'Server': 'Alfa'}

                            resp.headers.update(dict(rin))
                            freequent_data[1] += 'C'
                            if not alfa_s:
                                logger.debug("cf_clearence=%s" % val)

        elif host == 'a':
            help_window.show_info('cf_2_01')

    freequency(freequent_data)

    if blacklist_clear and (not source or time.time() - elapsed > elapsed_max):
        if filetools.exists(PATH_BL):
            bl_data = jsontools.load(filetools.read(PATH_BL))
        else:
            bl_data = {}
        if time.time() - elapsed > elapsed_max:
            bl_data[domain_full] = time.time() + elapsed_max * 10 * 60
        else:
            bl_data[domain_full] = time.time() + expiration_final
        if not debug and not httptools.TEST_ON_AIR:
            filetools.write(PATH_BL, jsontools.dump(bl_data))
    if not source:
        resp.status_code = 429
    else:
        resp.status_code = 200

    return data, resp