def mainlist(item): logger.info() itemlist = list() list_items = [[30122, "peliculas", "channels_movie.png"], [70208, "4k", "channels_movie_4k.png"], [70209, "terror", "channels_horror.png"], [60510, "infantiles", "channels_children.png"], [60511, "series", "channels_tvshow.png"], [60512, "anime", "channels_anime.png"], [70014, "castellano", "channels_spanish.png"], [59976, "latino", "channels_latino.png"], [70171, "torrent", "channels_torrent.png"], [60513, "documentales", "channels_documentary.png"]] for it in list_items: thumbnail = get_thumb(it[2]) new_item = Item(channel=item.channel, action="news", news=it[1], title=config.get_localized_string(it[0]), thumbnail=thumbnail) set_category_context(new_item) itemlist.append(new_item) itemlist.append( Item(channel="news", title="Configuración", action="news_setting", thumbnail=get_thumb("setting_0.png"))) help_window.show_info("news") return itemlist
def verify_login(force_check=False, force_login=True): global account, credentials_req credentials = config.get_setting('hdfulluser', channel='hdfull') if account and credentials and not force_check: return account else: if credentials: account = login() else: account = False if not account: logger.info('NO LOGIN credentials', force=True) if force_login: if credentials_req: help_window.show_info('hdfull_login', wait=False) settingCanal(Item) credentials = config.get_setting('hdfulluser', channel='hdfull') if credentials: account = login() else: credentials_req = False return account
def mainlist(item): logger.info() itemlist = [ Item(channel=item.channel, title=config.get_localized_string(70741) % config.get_localized_string(30122), action='new_search', mode='movie', thumbnail=get_thumb("search.png")), Item(channel=item.channel, title=config.get_localized_string(70741) % config.get_localized_string(30123), action='new_search', mode='tvshow', thumbnail=get_thumb("search_tvshow.png")), Item(channel=item.channel, title=config.get_localized_string(70276), action='new_search', mode='all', thumbnail=get_thumb("search_generic.png")), Item(channel=item.channel, title=config.get_localized_string(70741) % config.get_localized_string(70314), action='new_search', page=1, mode='person', thumbnail=get_thumb("search_star.png")), Item(channel=item.channel, title=config.get_localized_string(59995), action='saved_search', thumbnail=get_thumb('folder.png')), Item(channel=item.channel, title=config.get_localized_string(60420), action='sub_menu', thumbnail=get_thumb('search_more.png')), Item(channel=item.channel, title=config.get_localized_string(59994), action='opciones', thumbnail=get_thumb('setting_0.png')), Item(channel=item.channel, title=config.get_localized_string(30100), action='settings', thumbnail=get_thumb('setting_0.png')) ] itemlist = set_context(itemlist) help_window.show_info("search") return itemlist
def autoscan(): if not config.get_setting("autoscan", "info_popup"): return else: help_window.show_info("infopopup", wait=False) monitor = xbmc.Monitor() hours = [8, 12, 24] timer = config.get_setting("autoscan_timer", "info_popup") timer = hours[timer] * 3600 while not monitor.abortRequested(): t = Thread(target=now_available()) t.start() if monitor.waitForAbort(timer): break
def mainlist(item): logger.info() itemlist = [] if config.get_setting('hdfulluser', channel='hdfull'): account = login() else: account = False if not account: logger.info('NO LOGIN credentials', force=True) help_window.show_info('hdfull_login', wait=False) autoplay.init(item.channel, list_servers, list_quality) itemlist.append(Item(channel=item.channel, action="menupeliculas", title="Películas", url=host, thumbnail=get_thumb('movies', auto=True), text_bold=True)) itemlist.append(Item(channel=item.channel, action="menuseries", title="Series", url=host, thumbnail=get_thumb('tvshows', auto=True), text_bold=True)) itemlist.append(Item(channel=item.channel, action="search", title="Buscar...", thumbnail=get_thumb('search', auto=True), text_bold=True)) itemlist = filtertools.show_option(itemlist, item.channel, list_language, list_quality) autoplay.show_option(item.channel, itemlist) if not account: itemlist.append(Item(channel=item.channel, action="settingCanal", url="", text_bold=True, title="[COLOR dodgerblue]Habilita tu cuenta para activar los items de usuario...[/COLOR]", thumbnail=get_thumb("setting_0.png"))) else: itemlist.append(Item(channel=item.channel, action="", url="", title="", folder=False, thumbnail=get_thumb("setting_0.png"))) itemlist.append(Item(channel=item.channel, action="settingCanal", url="", title="[COLOR greenyellow][B]Configurar Canal[/B][/COLOR]", thumbnail=get_thumb("setting_0.png"), folder=False)) itemlist.append(Item(channel=item.channel, action="logout", url="", folder=False, title="[COLOR steelblue][B]Desloguearse[/B][/COLOR]", plot="Para cambiar de usuario", thumbnail=get_thumb("back.png"))) return itemlist
def get_cl(resp, timeout=20, debug=False, extraPostDelay=15, retry=False, blacklist=True, retryIfTimeout=True, **kwargs): blacklist_clear = True if 'hideproxy' in resp.url or 'webproxy' in resp.url or kwargs.get('proxies'): blacklist_clear = False blacklist = False if timeout < 15: timeout = 20 if timeout + extraPostDelay > 35: timeout = 20 domain_full = urlparse.urlparse(resp.url).netloc domain = domain_full if blacklist and not retry: blacklist_clear = check_blacklist(domain_full) if blacklist_clear: host = config.get_system_platform()[:1] freequent_data = [domain, 'CF2,0.0.0,0,%s0,NoApp' % host] check_assistant = alfa_assistant.open_alfa_assistant(getWebViewInfo=True, retry=retry) if not isinstance(check_assistant, dict) and retry: alfa_assistant.close_alfa_assistant() time.sleep(2) check_assistant = alfa_assistant.open_alfa_assistant(getWebViewInfo=True, retry=True) if not check_assistant: time.sleep(10) check_assistant = alfa_assistant.get_generic_call('getWebViewInfo', timeout=2, alfa_s=True) if check_assistant and isinstance(check_assistant, dict): if check_assistant.get('assistantLatestVersion') and check_assistant.get('assistantVersion'): installed_version = check_assistant['assistantVersion'].split('.') available_version = check_assistant['assistantLatestVersion'].split('.') newer = False for i, ver in enumerate(available_version): if int(ver) > int(installed_version[i]): newer = True break if int(ver) < int(installed_version[i]): break if newer: help_window.show_info('cf_2_02', wait=False) ua = get_ua(check_assistant) try: vers = int(scrapertools.find_single_match(ua, r"Android\s*(\d+)")) except: vers = 0 wvbVersion = check_assistant.get('wvbVersion', '0.0.0').split('.')[0] if len(wvbVersion) > 3: wvbVersion = wvbVersion[:2] freequent_data[1] = 'CF2,%s,%s,%s%s,' % (check_assistant.get('assistantVersion', '0.0.0'), wvbVersion, host, vers) if vers: dan = {'User-Agent': ua} resp.headers.update(dict(dan)) ua = None else: ua = httptools.get_user_agent() logger.debug("UserAgent: %s || Android Vrs: %s" % (ua, vers)) jscode = get_jscode(1, 'KEYCODE_ENTER', 1) url_cf = scrapertools.find_single_match(resp.url, '(http.*\:\/\/(?:www\S*.)?\w+\.\w+(?:\.\w+)?)(?:\/)?') + '|cf_clearance' data_assistant = alfa_assistant.get_urls_by_page_finished(resp.url, timeout=timeout, getCookies=True, userAgent=ua, disableCache=True, debug=debug, jsCode=jscode, extraPostDelay=extraPostDelay, clearWebCache=True, removeAllCookies=True, returnWhenCookieNameFound=url_cf, retryIfTimeout=retryIfTimeout ) logger.debug("data assistant: %s" % data_assistant) domain_ = domain split_lst = domain.split(".") if len(split_lst) > 2: domain = domain.replace(split_lst[0], "") if not domain.startswith('.'): domain = "."+domain get_ua(data_assistant) if isinstance(data_assistant, dict) and data_assistant.get("cookies", None): logger.debug("Lista cookies: %s" % data_assistant.get("cookies", [])) for cookie in data_assistant["cookies"]: cookieslist = cookie.get("cookiesList", None) val = scrapertools.find_single_match(cookieslist, 'cf_clearance=([A-z0-9_-]+)') dom = cookie.get("urls", None) logger.debug("dominios: %s" % dom[0]) if 'cf_clearance' in cookieslist and val: dict_cookie = {'domain': domain, 'name': 'cf_clearance', 'value': val} if domain_ in dom[0]: httptools.set_cookies(dict_cookie) rin = {'Server': 'Alfa'} resp.headers.update(dict(rin)) logger.debug("cf_clearence=%s" % val) if not retry: freequent_data[1] += 'OK' else: freequent_data[1] += 'OK_R' freequency(freequent_data) return resp else: logger.error("No cf_clearance") else: freequent_data[1] += 'NO-CFC' else: freequent_data[1] += 'ERR' logger.error("No Cookies o Error en conexión con Alfa Assistant") if not retry: config.set_setting('cf_assistant_ua', '') logger.debug("No se obtuvieron resultados, reintentando...") return get_cl(resp, timeout=timeout-5, extraPostDelay=extraPostDelay, \ retry=True, blacklist=True, retryIfTimeout=False, **kwargs) elif host == 'a': help_window.show_info('cf_2_01') freequency(freequent_data) if filetools.exists(PATH_BL): bl_data = jsontools.load(filetools.read(PATH_BL)) else: bl_data = {} bl_data[domain_full] = time.time() filetools.write(PATH_BL, jsontools.dump(bl_data)) msg = 'Detected a Cloudflare version 2 Captcha challenge,\ This feature is not available in the opensource (free) version.' resp.status_code = msg raise CloudflareChallengeError(msg)
def get_source(url, resp, timeout=5, debug=False, extraPostDelay=5, retry=False, blacklist=True, headers=None, retryIfTimeout=True, cache=False, clearWebCache=False, mute=True, alfa_s=False, elapsed=0, **kwargs): blacklist_clear = True data = '' source = False if not elapsed: elapsed = time.time() elapsed_max = 40 expiration = config.get_setting('cf_assistant_bl_expiration', default=30) * 60 expiration_final = 0 security_error_blackout = (5 * 60) - expiration if debug: alfa_s = False if not resp: resp = {'status_code': 429, 'headers': {}} resp = type('HTTPResponse', (), resp) if not alfa_s: logger.debug("ERROR de descarga: %s" % resp.status_code) opt = kwargs.get('opt', {}) domain_full = urlparse.urlparse(url).netloc domain = domain_full pcb = base64.b64decode( config.get_setting('proxy_channel_bloqued')).decode('utf-8') if 'hideproxy' in url or 'webproxy' in url or 'hidester' in url or '__cpo=' in url \ or httptools.TEST_ON_AIR or domain in pcb: blacklist_clear = False blacklist = False if timeout + extraPostDelay > 35: timeout = 20 if blacklist and not retry: blacklist_clear = check_blacklist(domain_full) host = config.get_system_platform()[:1] freequent_data = [domain, 'Cha,0.0.0,0,%s0,BlakL' % host] if blacklist_clear: freequent_data = [domain, 'Cha,0.0.0,0,%s0,App' % host] if not retry: freequent_data[1] += 'KO' else: freequent_data[1] += 'KO_R' check_assistant = alfa_assistant.open_alfa_assistant( getWebViewInfo=True, retry=True, assistantLatestVersion=False) if not isinstance(check_assistant, dict) and not retry: alfa_assistant.close_alfa_assistant() time.sleep(2) check_assistant = alfa_assistant.open_alfa_assistant( getWebViewInfo=True, retry=True, assistantLatestVersion=False) logger.debug("Reintento en acceder al Assistant: %s - %s" \ % ('OK' if isinstance(check_assistant, dict) else 'ERROR', time.time() - elapsed)) if check_assistant and isinstance(check_assistant, dict): if check_assistant.get( 'assistantLatestVersion') and check_assistant.get( 'assistantVersion'): installed_version = check_assistant['assistantVersion'].split( '.') available_version = check_assistant[ 'assistantLatestVersion'].split('.') newer = False for i, ver in enumerate(available_version): if int(ver) > int(installed_version[i]): newer = True break if int(ver) < int(installed_version[i]): break if newer: help_window.show_info('cf_2_02', wait=False) ua = get_ua(check_assistant) try: vers = int( scrapertools.find_single_match(ua, r"Android\s*(\d+)")) except: vers = 0 wvbVersion = check_assistant.get('wvbVersion', '0.0.0').split('.')[0] if len(wvbVersion) > 3: wvbVersion = wvbVersion[:2] freequent_data[1] = 'Cha,%s,%s,%s%s,' % (check_assistant.get( 'assistantVersion', '0.0.0'), wvbVersion, host, vers) if not retry: freequent_data[1] += 'Src' else: freequent_data[1] += 'Src_R' if vers: dan = {'User-Agent': ua} resp.headers.update(dict(dan)) ua = None else: ua = httptools.get_user_agent() if not alfa_s: logger.debug("UserAgent: %s || Android Vrs: %s" % (ua, vers)) jscode = None url_cf = scrapertools.find_single_match( url, '(http.*\:\/\/(?:www\S*.)?\w+\.\w+(?:\.\w+)?)(?:\/)?' ) + '|cf_clearance' data_assistant = alfa_assistant.get_source_by_page_finished( url, timeout=timeout, getCookies=True, userAgent=ua, disableCache=cache, debug=debug, jsCode=jscode, extraPostDelay=extraPostDelay, clearWebCache=clearWebCache, removeAllCookies=True, returnWhenCookieNameFound=url_cf, retryIfTimeout=retryIfTimeout, useAdvancedWebView=True, headers=headers, mute=mute, alfa_s=alfa_s) if not alfa_s: logger.debug("data assistant: %s" % data_assistant) if isinstance(data_assistant, dict) and data_assistant.get('htmlSources', []) \ and data_assistant['htmlSources'][0].get('source', ''): try: data = base64.b64decode(data_assistant['htmlSources'][0] ['source']).decode('utf-8') source = True except: pass if source and 'accessing a cross-origin frame' in data: source = False retry = True expiration_final = security_error_blackout freequent_data[1] = 'Cha,%s,%s,%s%s,' % ( check_assistant.get('assistantVersion', '0.0.0'), wvbVersion, host, vers) freequent_data[1] += 'KO_SecE' logger.error('Error SEGURIDAD: %s - %s' % (expiration_final, data[:100])) if source: freequent_data[1] = 'Cha,%s,%s,%s%s,' % ( check_assistant.get('assistantVersion', '0.0.0'), wvbVersion, host, vers) if not retry: freequent_data[1] += 'OK' else: freequent_data[1] += 'OK_R' if not source and not retry: config.set_setting('cf_assistant_ua', '') logger.debug("No se obtuvieron resultados, reintentando...") timeout = -1 if timeout < 0 else timeout * 2 extraPostDelay = -1 if extraPostDelay < 0 else extraPostDelay * 2 return get_source(url, resp, timeout=timeout, debug=debug, extraPostDelay=extraPostDelay, retry=True, blacklist=blacklist, retryIfTimeout=retryIfTimeout, cache=cache, clearWebCache=clearWebCache, alfa_s=False, headers=headers, mute=mute, elapsed=elapsed, **kwargs) domain_ = domain split_lst = domain.split(".") if len(split_lst) > 2: domain = domain.replace(split_lst[0], "") if not domain.startswith('.'): domain = "." + domain get_ua(data_assistant) if isinstance(data_assistant, dict) and data_assistant.get( "cookies", None): if not alfa_s: logger.debug("Lista cookies: %s" % data_assistant.get("cookies", [])) for cookie in data_assistant["cookies"]: cookieslist = cookie.get("cookiesList", None) val = scrapertools.find_single_match( cookieslist, 'cf_clearance=([A-z0-9_\-\.]+)') #val = scrapertools.find_single_match(cookieslist, 'cf_clearance=([^;]+)') dom = cookie.get("urls", None) if not alfa_s: logger.debug("dominios: %s" % dom[0]) if 'cf_clearance' in cookieslist and val: dict_cookie = { 'domain': domain, 'name': 'cf_clearance', 'value': val } if domain_ in dom[0]: httptools.set_cookies(dict_cookie) rin = {'Server': 'Alfa'} resp.headers.update(dict(rin)) freequent_data[1] += 'C' if not alfa_s: logger.debug("cf_clearence=%s" % val) elif host == 'a': help_window.show_info('cf_2_01') freequency(freequent_data) if blacklist_clear and (not source or time.time() - elapsed > elapsed_max): if filetools.exists(PATH_BL): bl_data = jsontools.load(filetools.read(PATH_BL)) else: bl_data = {} if time.time() - elapsed > elapsed_max: bl_data[domain_full] = time.time() + elapsed_max * 10 * 60 else: bl_data[domain_full] = time.time() + expiration_final if not debug and not httptools.TEST_ON_AIR: filetools.write(PATH_BL, jsontools.dump(bl_data)) if not source: resp.status_code = 429 else: resp.status_code = 200 return data, resp
def show_update_info(new_fix_json): import re import os from core import jsontools from core import channeltools from platformcode import help_window from platformcode import platformtools fixed = list() old_fix = os.path.join(config.get_runtime_path(), 'last_fix.json') if isinstance(new_fix_json, dict): if not os.path.exists(old_fix): for k, v in new_fix_json["files"].items(): if "channels" in v: v = re.sub(r"\.py|\.json", "", v[1]) channel_parameters = channeltools.get_channel_parameters(v) if not channel_parameters["channel"] or channel_parameters[ "adult"]: continue fixed.append("- %s\n" % v.title()) else: with open(old_fix, "r") as f: old_fix_json = jsontools.load(f.read()) last_id = old_fix_json.get("last_id", 0) for k, v in new_fix_json["files"].items(): if int(k) > last_id and "channels" in v: v = re.sub(r"\.py|\.json", "", v[1]) channel_parameters = channeltools.get_channel_parameters(v) if not channel_parameters["channel"] or channel_parameters[ "adult"]: continue fixed.append("- %s\n" % v.title()) elif isinstance(new_fix_json, list): if not os.path.exists(old_fix): for fix in new_fix_json["files"]: if "channels" in fix: fix = re.sub(r"\.py|\.json", "", fix[1]) channel_parameters = channeltools.get_channel_parameters( fix) if not channel_parameters["channel"] or channel_parameters[ "adult"]: continue fixed.append("- %s\n" % fix.title()) else: with open(old_fix, "r") as f: old_fix_json = jsontools.load(f.read()) last_id = old_fix_json.get("last_id", 0) if len(new_fix_json["files"]) > last_id: for fix in new_fix_json["files"][last_id + 1:]: if "channels" in fix: fix = re.sub(r"\.py|\.json", "", fix[1]) channel_parameters = channeltools.get_channel_parameters( fix) if not channel_parameters[ "channel"] or channel_parameters["adult"]: continue fixed.append("- %s\n" % fix.title()) if fixed: fix_number = "%s - FIX %s" % (new_fix_json["addon_version"], new_fix_json["fix_version"]) fixed_list = "".join(set(fixed)) text = "[B]Se han aplicado correcciones a los siguientes canales:[/B] \n\n%s\n\n" % fixed_list text += "[I]Si no deseas ver esta ventana desactívala desde:[/I]\nConfiguración > Preferencias > General > Mostrar informe de correcciones" if not platformtools.is_playing() and config.get_setting("show_fixes", default=True): help_window.show_info(0, wait=False, title="Alfa - Correcciones (%s)" % fix_number, text=text)