def peliculas(params): plugintools.log("[%s %s] Pordede: Listando películas... %s " % (addonName, addonVersion, repr(params))) url = params.get("url") # Descarga la pagina headers = DEFAULT_HEADERS[:] headers.append(["X-Requested-With","XMLHttpRequest"]) data = scrapertools.cache_page(url,headers=headers) #plugintools.log("data= "+data) # Extrae las entradas (carpetas) items = plugintools.find_multiple_matches(data, 'defaultLink(.*?)data-action') for entry in items: entry=entry.replace('\\', '').replace("u00e1", "á").replace("u00ed", "í").replace("u00e9", "é").replace("u00f3", "ó").replace("u00a1", "¡").replace("00f1", "ñ") #plugintools.log("entry= "+entry) title_item = plugintools.find_single_match(entry, 'title=(.*?)>') url_item = plugintools.find_single_match(entry, 'href=(.*?)>') thumb_item = plugintools.find_single_match(entry, 'http(.*?)png') year_item = plugintools.find_single_match(entry, 'year(.*?)<') rank_item = plugintools.find_single_match(entry, '</i>(.*?)</span>') title_item=title_item.replace("\\", "").replace('"', "") url_item=url_item.replace("\\", "").replace('"', "");url_item='http://www.pordede.com'+url_item;url_item=url_item.replace("/peli/","/links/view/slug/")+"/what/peli" thumb_item='http'+thumb_item+'png' year_item=year_item.replace("\\", "").replace('"', "").replace(">", "") rank_item=rank_item.replace("\\", "").replace('"', "") #plugintools.log("title_item= "+title_item) #plugintools.log("url_item= "+url_item) #plugintools.log("thumb_item= "+thumb_item) #plugintools.log("year_item= "+year_item) #plugintools.log("rank_item= "+rank_item) title_fixed= '[COLOR white]'+title_item+' [/COLOR][COLOR lightyellow][I]('+year_item+') [/COLOR][COLOR lightgreen] ['+rank_item+'][/I][/COLOR]' plugintools.add_item(action="pdd_findvideos", title=title_fixed, url=url_item, thumbnail=thumb_item, fanart=fanart, folder=True, isPlayable=False)
def list_remote_servers(): ''' Obtiene un diccionario de los servers remotos y su fecha de la ultima actualizacion, analizando la web del repositorio GitHub. Cuando se porte pelisalacarta a la GitHub oficial hay q modificar la url. ''' remote_files_url = "https://github.com/superberny70/plugin.video.pelisalacarta/tree/master/servers" data = re.sub(r"\n|\r|\t|\s{2}|(<!--.*?-->)", "", scrapertools.cache_page(remote_files_url)) last_commit = scrapertools.find_single_match( data, '<time class="updated" datetime="([^"]+)"') patron = '<td class="content">.*?title="([a-zA-Z0-9]+\.py)".*?' # name_server patron += '<time datetime="([^"]+)"' # date_time matches = re.compile(patron, re.DOTALL).findall(data) d = {} d['__ultima_actualizacion__'] = last_commit.replace('T', ' ').replace('Z', '') for name_server, date_time in matches: d[name_server] = date_time.replace('T', ' ').replace('Z', '') logger.info("updater.list_remote_servers :" + str(d)) return d
def generos(params): plugintools.log('[%s %s] ---> Pordede: Por géneros... <--- ' % (addonName, addonVersion)) # Descarga la pagina url = params.get("url") headers = DEFAULT_HEADERS[:] data = scrapertools.cache_page(url, headers=headers) #plugintools.log("data= "+data) # Extrae las entradas (carpetas) data = plugintools.find_single_match(data,'<div class="section genre">(.*?)</div>') #plugintools.log("data= "+data) patron = '<a class="mediaFilterLink" data-value="([^"]+)" href="([^"]+)">([^<]+)<span class="num">\((\d+)\)</span></a>' matches = plugintools.find_multiple_matches(data, patron) for textid,scrapedurl,scrapedtitle,cuantos in matches: title = scrapedtitle.strip()+" ("+cuantos+")" if "/pelis" in url: url = "http://www.pordede.com/pelis/loadmedia/offset/0/genre/"+textid.replace(" ","%20")+"/showlist/all" else: url = "http://www.pordede.com/series/loadmedia/offset/0/genre/"+textid.replace(" ","%20")+"/showlist/all" plugintools.add_item(action="peliculas", title=title, url=url, thumbnail=thumbnail, fanart=fanart, folder=True, isPlayable=False) try: shutil.rmtree(temp + 'pordede.com', ignore_errors=False, onerror=None) except: pass
def updatechannel(channel_name): ''' Funcion experimental para actualizar el canal desde github basandose en la fecha de modificacion de los archivos. ''' if channel_name == "channelselector": remote_files_url = "https://github.com/superberny70/plugin.video.pelisalacarta" local_files_path=os.path.join( config.get_runtime_path() , channel_name+'.py') else: remote_files_url = "https://github.com/superberny70/plugin.video.pelisalacarta/tree/master/pelisalacarta/channels" local_files_path=os.path.join( config.get_runtime_path(), PLUGIN_NAME , 'channels' , channel_name + '.py') data = re.sub(r"\n|\r|\t|\s{2}|(<!--.*?-->)","",scrapertools.cache_page(remote_files_url)) #last_commit= scrapertools.find_single_match(data,'<time class="updated" datetime="([^"]+)"') patron = '<td class="content">.*?title="'+ channel_name +'\.py".*?' patron += '<time datetime="([^"]+)"' # date_time date= scrapertools.find_single_match(data,patron).replace('T',' ').replace('Z','') if date =='': # El canal no esta en el repositorio remoto return False struct= time.strptime(date,'%Y-%m-%d %H:%M:%S') dt_remote = datetime.datetime.fromtimestamp(time.mktime(struct)) if os.path.exists(local_files_path): dt_local =datetime.datetime.fromtimestamp(os.path.getmtime (local_files_path)) #logger.info("[updater.py] remote_data= "+str(dt_remote) + " local_data= " + str(dt_local )) if dt_remote > dt_local: dialogo('Actualizando canal', 'Actualizando canal ' + channel_name ) return download_channel(channel_name) return False
def login_pordede(): plugintools.log('[%s %s] ---> Iniciando login en Pordede.com... <--- ' % (addonName, addonVersion)) params = plugintools.get_params() url = "http://www.pordede.com/site/login" post = "LoginForm[username]="+plugintools.get_setting("pordede_user")+"&LoginForm[password]="+plugintools.get_setting("pordede_pwd") headers = DEFAULT_HEADERS[:] regex = params.get("extra") try: if os.path.exists(temp+'pordede.com') is True: print "Eliminando carpeta caché..." os.remove(temp+'pordede.com') except: pass data = scrapertools.cache_page(url,headers=headers,post=post);print data if data != "": login_info = plugintools.find_single_match(data, '<div class="friendMini shadow"(.*?)</div>') user_title = plugintools.find_single_match(login_info, 'title="([^"]+)') user_thumb = plugintools.find_single_match(login_info, 'src="([^"]+)') if regex == "": plugintools.log("regex= "+regex) plugintools.add_item(action="menuseries", title='Usuario: [COLOR lightyellow][I]'+user_title+'[/I][/COLOR]', url="", thumbnail=user_thumb, fanart=fanart, folder=True, isPlayable=False) plugintools.add_item(action="menuseries", title="Series", url="", thumbnail=thumbnail, fanart=fanart, folder=True, isPlayable=False) plugintools.add_item(action="menupeliculas", title="Películas", url="", thumbnail=thumbnail, fanart=fanart, folder=True, isPlayable=False) plugintools.add_item(action="listas_sigues", title="Listas que sigues", url="http://www.pordede.com/lists/following", thumbnail=thumbnail, fanart=fanart, folder=True, isPlayable=False) plugintools.add_item(action="tus_listas", title="Tus listas", url="http://www.pordede.com/lists/yours", thumbnail=thumbnail, fanart=fanart, folder=True, isPlayable=False) plugintools.add_item(action="listas_sigues", title="Top listas", url="http://www.pordede.com/lists", thumbnail=thumbnail, fanart=fanart, folder=True, isPlayable=False)
def playtvpw0(params): plugintools.log("[%s %s] PlayTV.pw0 parser %s " % (addonName, addonVersion, repr(params))) plugintools.add_item(action="", title = '[COLOR yellow][B]PlayTV.pw[/B][/COLOR]', url = "", thumbnail = thumbnail , fanart = fanart , folder = False, isPlayable = False) url = 'http://www.playtv.pw/' data = scrapertools.cache_page(url) #plugintools.log("data= "+data) events = plugintools.find_multiple_matches(data, '<div class="events_grid">(.*?)</div><!--/entry-->') for entry in events: plugintools.log("entry= "+entry) event_day = plugintools.find_single_match(entry, '<div class="events_date">([^<]+)');event_day=event_day.strip() event_time = plugintools.find_single_match(entry, '<div class="events_time">([^<]+)');event_time=event_time.strip() event_cat = plugintools.find_single_match(entry, '<div class="events_cat">([^<]+)');event_cat=event_cat.strip() event_title = plugintools.find_single_match(entry, '<div class="events_title">([^<]+)');event_title=event_title.strip() event_url = plugintools.find_single_match(entry, '<a href="([^"]+)');event_url=event_url.strip() event_id = event_url.replace("/", "").split("events") if len(event_id) >= 2: event_id = event_id[1] plugintools.log("event_id= "+event_id) event_title=event_title.replace("–", "-") #plugintools.log("event_day= "+event_day) #plugintools.log("event_time= "+event_time) #plugintools.log("event_cat= "+event_cat) #plugintools.log("event_title= "+event_title) #plugintools.log("event_url= "+event_url) plugintools.add_item(action="playtvpw1",title=event_day+" "+event_time+" "+event_cat+" "+event_title,url=event_url, extra=event_id, thumbnail = thumbnail , fanart = fanart , folder=False, isPlayable=True)
def playtvpw2(params, url_ajax): plugintools.log("[%s %s] PlayTV.pw2 parser %s " % (addonName, addonVersion, repr(params))) plugintools.log("url_ajax= "+url_ajax) data = scrapertools.cache_page(url_ajax) plugintools.log("data= "+data) url = plugintools.find_single_match(data, 'file: "([^"]+)') #plugintools.log("URL final= "+url) return url
def login(username,password): logger.info("pyload_client.login") #url = config.get_setting("pyload")+"/api/login" api_url = urlparse.urljoin(config.get_setting("pyload"),"/api/login") logger.info("pyload_client.login api_url="+api_url) data = scrapertools.cache_page( api_url , post=urllib.urlencode( {"username":username,"password":password} ) ) logger.info("pyload_client.login data="+data) return data
def download_channel(channel_name): logger.info("streamondemand-pureita.core.updater download_channel('" + channel_name + "')") # Canal remoto remote_channel_url, remote_version_url = get_channel_remote_url( channel_name) # Canal local local_channel_path, local_version_path, local_compiled_path = get_channel_local_path( channel_name) # Descarga el canal updated_channel_data = scrapertools.cache_page(remote_channel_url) try: outfile = open(local_channel_path, "w") outfile.write(updated_channel_data) outfile.flush() outfile.close() logger.info("streamondemand-pureita.core.updater Grabado a " + local_channel_path) except: logger.info("streamondemand-pureita.core.updater Error al grabar " + local_channel_path) import sys for line in sys.exc_info(): logger.error("%s" % line) # Descarga la version (puede no estar) try: updated_version_data = scrapertools.cache_page(remote_version_url) outfile = open(local_version_path, "w") outfile.write(updated_version_data) outfile.flush() outfile.close() logger.info("streamondemand-pureita.core.updater Grabado a " + local_version_path) except: import sys for line in sys.exc_info(): logger.error("%s" % line) if os.path.exists(local_compiled_path): os.remove(local_compiled_path)
def loginmu(): show = plugintools.get_setting("series_id") if show is "6": show = "tvshows" plugintools.log("show= "+show) plugintools.modo_vista(show) # Iniciamos login... url = 'http://series.mu/login/' post = 'user='******'&pass='+plugintools.get_setting("seriesmu_pwd") data = scrapertools.cache_page(url, post=post)
def download(url,package_name): logger.info("pyload_client.download url="+url+", package_name="+package_name) session = login(config.get_setting("pyload_user"),config.get_setting("pyload_password")) package_id = find_package_id(package_name) if package_id is None: api_url = urlparse.urljoin(config.get_setting("pyload"),"/api/addPackage") logger.info("pyload_client.download api_url="+api_url) data = scrapertools.cache_page( api_url , post=urllib.urlencode( {"name":"'"+package_name+"'","links":str([url])} ) ) logger.info("pyload_client.download data="+data) else: api_url = urlparse.urljoin(config.get_setting("pyload"),"/api/addFiles") logger.info("pyload_client.download api_url="+api_url) data = scrapertools.cache_page( api_url , post=urllib.urlencode( {"pid":str(package_id),"links":str([url])} ) ) logger.info("pyload_client.download data="+data) return
def loginmu(): show = plugintools.get_setting("series_id") if show is "6": show = "tvshows" plugintools.log("show= " + show) plugintools.modo_vista(show) # Iniciamos login... url = 'http://series.mu/login/' post = 'user='******'&pass=' + plugintools.get_setting("seriesmu_pwd") data = scrapertools.cache_page(url, post=post)
def updatechannel(channel_name): logger.info("streamondemand-pureita.core.updater updatechannel('" + channel_name + "')") # Canal remoto remote_channel_url, remote_version_url = get_channel_remote_url( channel_name) # Canal local local_channel_path, local_version_path, local_compiled_path = get_channel_local_path( channel_name) #if not os.path.exists(local_channel_path): # return False; # Version remota try: data = scrapertools.cache_page(remote_version_url) logger.info("streamondemand-pureita.core.updater remote_data=" + data) if "<tag>" in data: patronvideos = '<tag>([^<]+)</tag>' elif "<version>" in data: patronvideos = '<version>([^<]+)</version>' matches = re.compile(patronvideos, re.DOTALL).findall(data) remote_version = int(matches[0]) except: remote_version = 0 logger.info("streamondemand-pureita.core.updater remote_version=%d" % remote_version) # Version local if os.path.exists(local_version_path): infile = open(local_version_path) data = infile.read() infile.close() logger.info("streamondemand-pureita.core.updater local_data=" + data) if "<tag>" in data: patronvideos = '<tag>([^<]+)</tag>' elif "<version>" in data: patronvideos = '<version>([^<]+)</version>' matches = re.compile(patronvideos, re.DOTALL).findall(data) local_version = int(matches[0]) else: local_version = 0 logger.info("streamondemand-pureita.core.updater local_version=%d" % local_version) # Comprueba si ha cambiado updated = remote_version > local_version if updated: logger.info("streamondemand-pureita.core.updater updated") download_channel(channel_name) return updated
def peliculas(params): plugintools.log("[%s %s] Pordede: Listando películas... %s " % (addonName, addonVersion, repr(params))) url = params.get("url") # Descarga la pagina headers = DEFAULT_HEADERS[:] headers.append(["X-Requested-With", "XMLHttpRequest"]) data = scrapertools.cache_page(url, headers=headers) #plugintools.log("data= "+data) # Extrae las entradas (carpetas) items = plugintools.find_multiple_matches(data, 'defaultLink(.*?)data-action') for entry in items: entry = entry.replace('\\', '').replace("u00e1", "á").replace( "u00ed", "í").replace("u00e9", "é").replace("u00f3", "ó").replace("u00a1", "¡").replace("00f1", "ñ") #plugintools.log("entry= "+entry) title_item = plugintools.find_single_match(entry, 'title=(.*?)>') url_item = plugintools.find_single_match(entry, 'href=(.*?)>') thumb_item = plugintools.find_single_match(entry, 'http(.*?)png') year_item = plugintools.find_single_match(entry, 'year(.*?)<') rank_item = plugintools.find_single_match(entry, '</i>(.*?)</span>') title_item = title_item.replace("\\", "").replace('"', "") url_item = url_item.replace("\\", "").replace('"', "") url_item = 'http://www.pordede.com' + url_item url_item = url_item.replace("/peli/", "/links/view/slug/") + "/what/peli" thumb_item = 'http' + thumb_item + 'png' year_item = year_item.replace("\\", "").replace('"', "").replace(">", "") rank_item = rank_item.replace("\\", "").replace('"', "") #plugintools.log("title_item= "+title_item) #plugintools.log("url_item= "+url_item) #plugintools.log("thumb_item= "+thumb_item) #plugintools.log("year_item= "+year_item) #plugintools.log("rank_item= "+rank_item) title_fixed = '[COLOR white]' + title_item + ' [/COLOR][COLOR lightyellow][I](' + year_item + ') [/COLOR][COLOR lightgreen] [' + rank_item + '][/I][/COLOR]' plugintools.add_item(action="pdd_findvideos", title=title_fixed, url=url_item, thumbnail=thumb_item, fanart=fanart, folder=True, isPlayable=False)
def download_channel(channel_name): logger.info("streamondemand-pureita.core.updater download_channel('"+channel_name+"')") # Canal remoto remote_channel_url , remote_version_url = get_channel_remote_url(channel_name) # Canal local local_channel_path , local_version_path , local_compiled_path = get_channel_local_path(channel_name) # Descarga el canal updated_channel_data = scrapertools.cache_page( remote_channel_url ) try: outfile = open(local_channel_path,"w") outfile.write(updated_channel_data) outfile.flush() outfile.close() logger.info("streamondemand-pureita.core.updater Grabado a " + local_channel_path) except: logger.info("streamondemand-pureita.core.updater Error al grabar " + local_channel_path) import sys for line in sys.exc_info(): logger.error( "%s" % line ) # Descarga la version (puede no estar) try: updated_version_data = scrapertools.cache_page( remote_version_url ) outfile = open(local_version_path,"w") outfile.write(updated_version_data) outfile.flush() outfile.close() logger.info("streamondemand-pureita.core.updater Grabado a " + local_version_path) except: import sys for line in sys.exc_info(): logger.error( "%s" % line ) if os.path.exists(local_compiled_path): os.remove(local_compiled_path)
def find_package_id(package_name): logger.info("pyload_client.find_package_id package_name="+package_name) api_url = urlparse.urljoin(config.get_setting("pyload"),"/api/getQueue") logger.info("pyload_client.find_package_id api_url="+api_url) data = scrapertools.cache_page( api_url ) logger.info("pyload_client.find_package_id data="+data) try: package_id = scrapertools.get_match(data,'"name"\s*:\s*"'+package_name+'".*?"pid"\s*\:\s*(\d+)') except: package_id = None return package_id
def list_remote_channels(): """ Obtiene una lista de los canales remotos, analizando la web del repositorio. """ remote_files_url = "https://github.com/superberny70/plugin.video.pelisalacarta/tree/master/pelisalacarta/channels" data = re.sub(r"\n|\r|\t|\s{2}|(<!--.*?-->)", "", scrapertools.cache_page(remote_files_url)) last_commit = scrapertools.find_single_match(data, '<time class="updated" datetime="([^"]+)"') files_remotos = [] if last_commit != "": patron = '<td class="content">.*?title="([a-zA-Z0-9]+\.py)">' files_remotos = re.compile(patron, re.DOTALL).findall(data) # logger.info("updater.list_remote_channels :" + str(files_remotos)) return files_remotos
def updatechannel(channel_name): logger.info("streamondemand-pureita.core.updater updatechannel('"+channel_name+"')") # Canal remoto remote_channel_url , remote_version_url = get_channel_remote_url(channel_name) # Canal local local_channel_path , local_version_path , local_compiled_path = get_channel_local_path(channel_name) #if not os.path.exists(local_channel_path): # return False; # Version remota try: data = scrapertools.cache_page( remote_version_url ) logger.info("streamondemand-pureita.core.updater remote_data="+data) if "<tag>" in data: patronvideos = '<tag>([^<]+)</tag>' elif "<version>" in data: patronvideos = '<version>([^<]+)</version>' matches = re.compile(patronvideos,re.DOTALL).findall(data) remote_version = int(matches[0]) except: remote_version = 0 logger.info("streamondemand-pureita.core.updater remote_version=%d" % remote_version) # Version local if os.path.exists( local_version_path ): infile = open( local_version_path ) data = infile.read() infile.close(); logger.info("streamondemand-pureita.core.updater local_data="+data) if "<tag>" in data: patronvideos = '<tag>([^<]+)</tag>' elif "<version>" in data: patronvideos = '<version>([^<]+)</version>' matches = re.compile(patronvideos,re.DOTALL).findall(data) local_version = int(matches[0]) else: local_version = 0 logger.info("streamondemand-pureita.core.updater local_version=%d" % local_version) # Comprueba si ha cambiado updated = remote_version > local_version if updated: logger.info("streamondemand-pureita.core.updater updated") download_channel(channel_name) return updated
def list_remote_channels(): ''' Obtiene una lista de los canales remotos, analizando la web del repositorio. ''' remote_files_url = "https://github.com/superberny70/plugin.video.pelisalacarta/tree/master/pelisalacarta/channels" data = re.sub(r"\n|\r|\t|\s{2}|(<!--.*?-->)", "", scrapertools.cache_page(remote_files_url)) last_commit = scrapertools.find_single_match( data, '<time class="updated" datetime="([^"]+)"') files_remotos = [] if last_commit != '': patron = '<td class="content">.*?title="([a-zA-Z0-9]+\.py)">' files_remotos = re.compile(patron, re.DOTALL).findall(data) #logger.info("updater.list_remote_channels :" + str(files_remotos)) return files_remotos
def playtvpw1(params): plugintools.log("[%s %s] PlayTV.pw1 parser %s " % (addonName, addonVersion, repr(params))) title_canales = [] url_canales = [] url = params.get("url") data = scrapertools.cache_page(url) #plugintools.log("data= "+data) #num_links_bloque = plugintools.find_single_match(data, 'jQuery(.*?)</script>') num_links_bloque = plugintools.find_single_match( data, '<div class="alternative-link"(.*?)</div>') plugintools.log("num_links_bloque= " + num_links_bloque) num_links = plugintools.find_multiple_matches( num_links_bloque, '<a class="mybutton" href="([^"]+)') title_links = plugintools.find_multiple_matches(num_links_bloque, ';">(.*?)</a>') for entry in num_links: url_canales.append(entry) for entri in title_links: title_canales.append(entri) #print url_canales #print title_canales try: dia = plugintools.selector(title_canales, 'PlayTV.pw') ch = url_canales[dia] ch = ch.replace("#", "") #plugintools.log("CANAL: "+ch) url_ajax = 'http://playtv.pw/wp-admin/admin-ajax.php?action=get_link_func&link=' + ch + '&id=' + params.get( "extra") plugintools.log("url_ajax= " + url_ajax) url = playtvpw2(params, url_ajax) plugintools.play_resolved_url(url) except KeyboardInterrupt: pass except IndexError: raise except: pass
def updatechannel(channel_name): ''' Funcion experimental para actualizar el canal desde github basandose en la fecha de modificacion de los archivos. ''' if channel_name == "channelselector": remote_files_url = "https://github.com/superberny70/plugin.video.pelisalacarta" local_files_path = os.path.join(config.get_runtime_path(), channel_name + '.py') else: remote_files_url = "https://github.com/superberny70/plugin.video.pelisalacarta/tree/master/pelisalacarta/channels" local_files_path = os.path.join(config.get_runtime_path(), PLUGIN_NAME, 'channels', channel_name + '.py') data = re.sub(r"\n|\r|\t|\s{2}|(<!--.*?-->)", "", scrapertools.cache_page(remote_files_url)) #last_commit= scrapertools.find_single_match(data,'<time class="updated" datetime="([^"]+)"') patron = '<td class="content">.*?title="' + channel_name + '\.py".*?' patron += '<time datetime="([^"]+)"' # date_time date = scrapertools.find_single_match(data, patron).replace('T', ' ').replace( 'Z', '') if date == '': # El canal no esta en el repositorio remoto return False struct = time.strptime(date, '%Y-%m-%d %H:%M:%S') dt_remote = datetime.datetime.fromtimestamp(time.mktime(struct)) if os.path.exists(local_files_path): dt_local = datetime.datetime.fromtimestamp( os.path.getmtime(local_files_path)) #logger.info("[updater.py] remote_data= "+str(dt_remote) + " local_data= " + str(dt_local )) if dt_remote > dt_local: dialogo('Actualizando canal', 'Actualizando canal ' + channel_name) return download_channel(channel_name) return False
def list_remote_servers(): """ Obtiene un diccionario de los servers remotos y su fecha de la ultima actualizacion, analizando la web del repositorio GitHub. Cuando se porte pelisalacarta a la GitHub oficial hay q modificar la url. """ remote_files_url = "https://github.com/superberny70/plugin.video.pelisalacarta/tree/master/servers" data = re.sub(r"\n|\r|\t|\s{2}|(<!--.*?-->)", "", scrapertools.cache_page(remote_files_url)) last_commit = scrapertools.find_single_match(data, '<time class="updated" datetime="([^"]+)"') patron = '<td class="content">.*?title="([a-zA-Z0-9]+\.py)".*?' # name_server patron += '<time datetime="([^"]+)"' # date_time matches = re.compile(patron, re.DOTALL).findall(data) d = {} d["__ultima_actualizacion__"] = last_commit.replace("T", " ").replace("Z", "") for name_server, date_time in matches: d[name_server] = date_time.replace("T", " ").replace("Z", "") logger.info("updater.list_remote_servers :" + str(d)) return d
def generos(params): plugintools.log('[%s %s] ---> Pordede: Por géneros... <--- ' % (addonName, addonVersion)) # Descarga la pagina url = params.get("url") headers = DEFAULT_HEADERS[:] data = scrapertools.cache_page(url, headers=headers) #plugintools.log("data= "+data) # Extrae las entradas (carpetas) data = plugintools.find_single_match( data, '<div class="section genre">(.*?)</div>') #plugintools.log("data= "+data) patron = '<a class="mediaFilterLink" data-value="([^"]+)" href="([^"]+)">([^<]+)<span class="num">\((\d+)\)</span></a>' matches = plugintools.find_multiple_matches(data, patron) for textid, scrapedurl, scrapedtitle, cuantos in matches: title = scrapedtitle.strip() + " (" + cuantos + ")" if "/pelis" in url: url = "http://www.pordede.com/pelis/loadmedia/offset/0/genre/" + textid.replace( " ", "%20") + "/showlist/all" else: url = "http://www.pordede.com/series/loadmedia/offset/0/genre/" + textid.replace( " ", "%20") + "/showlist/all" plugintools.add_item(action="peliculas", title=title, url=url, thumbnail=thumbnail, fanart=fanart, folder=True, isPlayable=False) try: shutil.rmtree(temp + 'pordede.com', ignore_errors=False, onerror=None) except: pass
def enlacesmu(params): plugintools.log('[%s %s] enlacesmu %s' % (addonName, addonVersion, repr(params))) show = plugintools.get_setting("series_id") # Obtenemos modo de vista del usuario para series TV if show is None: show = "tvshows" elif show == "1": show = "seasons" elif show == "2": show = "fanart" elif show == "3": show = "list" elif show == "4": show = "thumbnail" elif show == "5": show = "movies" elif show == "6": show = "tvshows" elif show == "7": show = "episodes" plugintools.log("show= "+show) plugintools.modo_vista(show) sinopsis = params.get("plot") datamovie={} datamovie["Plot"]=sinopsis fanart_fixed = params.get("page") loginmu() plugintools.modo_vista(show) url = params.get("url") title = params.get("title") thumbnail = params.get("thumbnail") referer = 'http://www.series.mu/' data = scrapertools.cache_page(url, referer) plugintools.log("data= "+data) matches = plugintools.find_single_match(data, '<div class="sections episode-links online shown">(.*?)<div class="sections episode-links download">') capis = plugintools.find_multiple_matches(matches, '<div class="link-row">(.*?)</a>') for entry in capis: plugintools.log("entry= "+entry) lang_audio = plugintools.find_single_match(entry, '<div class="lang audio">(.*?)</div>') lang_sub = plugintools.find_single_match(entry, '<div class="lang sub">(.*?)</div>') url_link = plugintools.find_single_match(entry, '<a href=(.*?)target') url_link = url_link.replace('"',"").strip() url_link = 'http://series.mu'+url_link host = plugintools.find_single_match(entry, '<div class="host ([^"]+)') if host == "streamcloudeu": if lang_sub != "": title_fixed = '[COLOR orange][B]'+title+'[/B][/COLOR] [COLOR lightyellow][I][Streamcloud][/I][/COLOR] [COLOR lightgreen][I]['+lang_audio+'] ['+lang_sub+'][/I][/COLOR]' else: title_fixed = '[COLOR orange][B]'+title+'[/B][/COLOR] [COLOR lightyellow][I][Streamcloud][/I][/COLOR] [COLOR lightgreen][I]['+lang_audio+'][/I][/COLOR]' plugintools.add_item(action="getlinkmu", title = title_fixed, url = url_link , info_labels = datamovie , thumbnail = thumbnail , page = fanart_fixed , fanart = fanart_fixed , folder = False, isPlayable = True) elif host == "vidspotnet": if lang_sub != "": title_fixed = '[COLOR orange][B]'+title+'[/B][/COLOR] [COLOR lightyellow][I][Vidspot][/I][/COLOR] [COLOR lightgreen][I]['+lang_audio+'] ['+lang_sub+'][/I][/COLOR]' else: title_fixed = '[COLOR orange][B]'+title+'[/B][/COLOR] [COLOR lightyellow][I][Vidspot][/I][/COLOR] [COLOR lightgreen][I]['+lang_audio+'][/I][/COLOR]' plugintools.add_item(action="getlinkmu", title = title_fixed, url = url_link , info_labels = datamovie , thumbnail = thumbnail , page = fanart_fixed , fanart = fanart_fixed , folder = False, isPlayable = True) elif host == "allmyvideosnet": if lang_sub != "": title_fixed = '[COLOR orange][B]'+title+'[/B][/COLOR] [COLOR lightyellow][I][Allmyvideos][/I][/COLOR] [COLOR lightgreen][I]['+lang_audio+'] ['+lang_sub+'][/I][/COLOR]' else: title_fixed = '[COLOR orange][B]'+title+'[/B][/COLOR] [COLOR lightyellow][I][Allmyvideos][/I][/COLOR] [COLOR lightgreen][I]['+lang_audio+'][/I][/COLOR]' plugintools.add_item(action="getlinkmu", title = title_fixed, url = url_link , info_labels = datamovie , thumbnail = thumbnail , page = fanart_fixed , fanart = fanart_fixed , folder = False, isPlayable = True) elif host == "playedto": if lang_sub != "": title_fixed = '[COLOR orange][B]'+title+'[/B][/COLOR] [COLOR lightyellow][I][Played.to][/I][/COLOR] [COLOR lightgreen][I]['+lang_audio+'] ['+lang_sub+'][/I][/COLOR]' else: title_fixed = '[COLOR orange][B]'+title+'[/B][/COLOR] [COLOR lightyellow][I][Played.to][/I][/COLOR] [COLOR lightgreen][I]['+lang_audio+'][/I][/COLOR]' plugintools.add_item(action="getlinkmu", title = title_fixed, url = url_link , info_labels = datamovie , thumbnail = thumbnail , page = fanart_fixed , fanart = fanart_fixed , folder = False, isPlayable = True) if host == "nowvideosx": if lang_sub != "": title_fixed = '[COLOR orange][B]'+title+'[/B][/COLOR] [COLOR lightyellow][I][Nowvideo.sx][/I][/COLOR] [COLOR lightgreen][I]['+lang_audio+'] ['+lang_sub+'][/I][/COLOR]' else: title_fixed = '[COLOR orange][B]'+title+'[/B][/COLOR] [COLOR lightyellow][I][Nowvideo.sx][/I][/COLOR] [COLOR lightgreen][I]['+lang_audio+'][/I][/COLOR]' plugintools.add_item(action="getlinkmu", title = title_fixed, url = url_link , info_labels = datamovie , thumbnail = thumbnail , page = fanart_fixed , fanart = fanart_fixed , folder = False, isPlayable = True) else: url_link = getotherhost(url_link,show) if url.find("veehd") >= 0: server = "VeeHD" if lang_sub != "": title_fixed = '[COLOR orange][B]'+title+'[/B][/COLOR] [COLOR lightyellow][I]['+server+'][/I][/COLOR] [COLOR lightgreen][I]['+lang_audio+'] ['+lang_sub+'][/I][/COLOR]' else: title_fixed = '[COLOR orange][B]'+title+'[/B][/COLOR] [COLOR lightyellow][I]['+server+'][/I][/COLOR] [COLOR lightgreen][I]['+lang_audio+'][/I][/COLOR]' plugintools.add_item(action="veehd", title = title_fixed, url = url_link , info_labels = datamovie , thumbnail = thumbnail , page = fanart_fixed , fanart = fanart_fixed , folder = False, isPlayable = True) if url.find("streamin.to") >= 0: server = "streamin.to" if lang_sub != "": title_fixed = '[COLOR orange][B]'+title+'[/B][/COLOR] [COLOR lightyellow][I]['+server+'][/I][/COLOR] [COLOR lightgreen][I]['+lang_audio+'] ['+lang_sub+'][/I][/COLOR]' else: title_fixed = '[COLOR orange][B]'+title+'[/B][/COLOR] [COLOR lightyellow][I]['+server+'][/I][/COLOR] [COLOR lightgreen][I]['+lang_audio+'][/I][/COLOR]' plugintools.add_item(action="streaminto", title = title_fixed, url = url_link , info_labels = datamovie , thumbnail = thumbnail , page = fanart_fixed , fanart = fanart_fixed , folder = False, isPlayable = True) if url.find("vk") >= 0: server = "Vk" if lang_sub != "": title_fixed = '[COLOR orange][B]'+title+'[/B][/COLOR] [COLOR lightyellow][I]['+server+'][/I][/COLOR] [COLOR lightgreen][I]['+lang_audio+'] ['+lang_sub+'][/I][/COLOR]' else: title_fixed = '[COLOR orange][B]'+title+'[/B][/COLOR] [COLOR lightyellow][I]['+server+'][/I][/COLOR] [COLOR lightgreen][I]['+lang_audio+'][/I][/COLOR]' plugintools.add_item(action="vk", title = title_fixed, url = url_link , info_labels = datamovie , thumbnail = thumbnail , page = fanart_fixed , fanart = fanart_fixed , folder = False, isPlayable = True) if url.find("Tumi") >= 0: server = "Tumi" if lang_sub != "": title_fixed = '[COLOR orange][B]'+title+'[/B][/COLOR] [COLOR lightyellow][I]['+server+'][/I][/COLOR] [COLOR lightgreen][I]['+lang_audio+'] ['+lang_sub+'][/I][/COLOR]' else: title_fixed = '[COLOR orange][B]'+title+'[/B][/COLOR] [COLOR lightyellow][I]['+server+'][/I][/COLOR] [COLOR lightgreen][I]['+lang_audio+'][/I][/COLOR]' plugintools.add_item(action="tumi", title = title_fixed, url = url_link , info_labels = datamovie , thumbnail = thumbnail , page = fanart_fixed , fanart = fanart_fixed , folder = False, isPlayable = True) plugintools.log("show= "+show) plugintools.modo_vista(show)
def checkforupdates(plugin_mode=True): logger.info("streamondemand-pureita.core.updater checkforupdates") # Descarga el fichero con la versión en la web logger.info("streamondemand-pureita.core.updater Verificando actualizaciones...") logger.info("streamondemand-pureita.core.updater Version remota: "+REMOTE_VERSION_FILE) data = scrapertools.cache_page( REMOTE_VERSION_FILE ) ''' <?xml version="1.0" encoding="utf-8" standalone="yes"?> <version> <name>streamondemand-pureita</name> <tag>4.0 </tag> <version>4000</tag> <date>20/03/2015</date> <changes>New release</changes> </version> ''' version_publicada = scrapertools.find_single_match(data,"<version>([^<]+)</version>").strip() tag_publicada = scrapertools.find_single_match(data,"<tag>([^<]+)</tag>").strip() logger.info("streamondemand-pureita.core.updater version remota="+tag_publicada+" "+version_publicada) # Lee el fichero con la versión instalada localFileName = LOCAL_VERSION_FILE logger.info("streamondemand-pureita.core.updater fichero local version: "+localFileName) infile = open( localFileName ) data = infile.read() infile.close(); #logger.info("xml local="+data) version_local = scrapertools.find_single_match(data,"<version>([^<]+)</version>").strip() tag_local = scrapertools.find_single_match(data,"<tag>([^<]+)</tag>").strip() logger.info("streamondemand-pureita.core.updater version local="+tag_local+" "+version_local) try: numero_version_publicada = int(version_publicada) numero_version_local = int(version_local) except: import traceback logger.info(traceback.format_exc()) version_publicada = "" version_local = "" if version_publicada=="" or version_local=="": arraydescargada = tag_publicada.split(".") arraylocal = tag_local.split(".") # local 2.8.0 - descargada 2.8.0 -> no descargar # local 2.9.0 - descargada 2.8.0 -> no descargar # local 2.8.0 - descargada 2.9.0 -> descargar if len(arraylocal) == len(arraydescargada): logger.info("caso 1") hayqueactualizar = False for i in range(0, len(arraylocal)): print arraylocal[i], arraydescargada[i], int(arraydescargada[i]) > int(arraylocal[i]) if int(arraydescargada[i]) > int(arraylocal[i]): hayqueactualizar = True # local 2.8.0 - descargada 2.8 -> no descargar # local 2.9.0 - descargada 2.8 -> no descargar # local 2.8.0 - descargada 2.9 -> descargar if len(arraylocal) > len(arraydescargada): logger.info("caso 2") hayqueactualizar = False for i in range(0, len(arraydescargada)): #print arraylocal[i], arraydescargada[i], int(arraydescargada[i]) > int(arraylocal[i]) if int(arraydescargada[i]) > int(arraylocal[i]): hayqueactualizar = True # local 2.8 - descargada 2.8.8 -> descargar # local 2.9 - descargada 2.8.8 -> no descargar # local 2.10 - descargada 2.9.9 -> no descargar # local 2.5 - descargada 3.0.0 if len(arraylocal) < len(arraydescargada): logger.info("caso 3") hayqueactualizar = True for i in range(0, len(arraylocal)): #print arraylocal[i], arraydescargada[i], int(arraylocal[i])>int(arraydescargada[i]) if int(arraylocal[i]) > int(arraydescargada[i]): hayqueactualizar = False elif int(arraylocal[i]) < int(arraydescargada[i]): hayqueactualizar = True break else: hayqueactualizar = (numero_version_publicada > numero_version_local) if hayqueactualizar: if plugin_mode: logger.info("streamondemand-pureita.core.updater actualizacion disponible") # Añade al listado de XBMC import xbmcgui thumbnail = IMAGES_PATH+"Crystal_Clear_action_info.png" logger.info("thumbnail="+thumbnail) listitem = xbmcgui.ListItem( "Scarica la versione "+tag_publicada, thumbnailImage=thumbnail ) itemurl = '%s?action=update&version=%s' % ( sys.argv[ 0 ] , tag_publicada ) import xbmcplugin xbmcplugin.addDirectoryItem( handle = int(sys.argv[ 1 ]), url = itemurl , listitem=listitem, isFolder=True) # Avisa con un popup dialog = xbmcgui.Dialog() dialog.ok("Versione "+tag_publicada+" disponibile","E' possibile scaricare la nuova versione del plugin\nattraverso l'opzione nel menù principale.") else: import xbmcgui yes_pressed = xbmcgui.Dialog().yesno( "Versione "+tag_publicada+" disponibile" , "Desideri aggiornare?" ) if yes_pressed: params = {"version":tag_publicada} update(params) '''
def loginmu(): # Iniciamos login... url = 'http://series.mu/login/' post = 'user='******'&pass=' + plugintools.get_setting("seriesmu_pwd") data = scrapertools.cache_page(url, post=post)
def enlacesmu(params): plugintools.log("getlinksmu: " + repr(params)) show = plugintools.get_setting( "series_id") # Obtenemos modo de vista del usuario para series TV if show is None: show = "tvshows" elif show == "1": show = "seasons" elif show == "2": show = "fanart" elif show == "3": show = "list" elif show == "4": show = "thumbnail" elif show == "5": show = "movies" elif show == "6": show = "tvshows" elif show == "7": show = "episodes" plugintools.log("show= " + show) plugintools.modo_vista(show) sinopsis = params.get("plot") datamovie = {} datamovie["Plot"] = sinopsis fanart_fixed = params.get("page") loginmu() plugintools.modo_vista(show) url = params.get("url") title = params.get("title") thumbnail = params.get("thumbnail") referer = 'http://www.series.mu/' data = scrapertools.cache_page(url, referer) plugintools.log("data= " + data) matches = plugintools.find_single_match( data, '<div class="sections episode-links online shown">(.*?)<div class="sections episode-links download">' ) capis = plugintools.find_multiple_matches( matches, '<div class="link-row">(.*?)</a>') for entry in capis: plugintools.log("entry= " + entry) lang_audio = plugintools.find_single_match( entry, '<div class="lang audio">(.*?)</div>') lang_sub = plugintools.find_single_match( entry, '<div class="lang sub">(.*?)</div>') url_link = plugintools.find_single_match(entry, '<a href=(.*?)target') url_link = url_link.replace('"', "").strip() url_link = 'http://series.mu' + url_link host = plugintools.find_single_match(entry, '<div class="host ([^"]+)') if host == "streamcloudeu": if lang_sub != "": title_fixed = '[COLOR orange][B]' + title + '[/B][/COLOR] [COLOR lightyellow][I][Streamcloud][/I][/COLOR] [COLOR lightgreen][I][' + lang_audio + '] [' + lang_sub + '][/I][/COLOR]' else: title_fixed = '[COLOR orange][B]' + title + '[/B][/COLOR] [COLOR lightyellow][I][Streamcloud][/I][/COLOR] [COLOR lightgreen][I][' + lang_audio + '][/I][/COLOR]' plugintools.add_item(action="getlinkmu", title=title_fixed, url=url_link, info_labels=datamovie, thumbnail=thumbnail, page=fanart_fixed, fanart=fanart_fixed, folder=False, isPlayable=True) elif host == "vidspotnet": if lang_sub != "": title_fixed = '[COLOR orange][B]' + title + '[/B][/COLOR] [COLOR lightyellow][I][Vidspot][/I][/COLOR] [COLOR lightgreen][I][' + lang_audio + '] [' + lang_sub + '][/I][/COLOR]' else: title_fixed = '[COLOR orange][B]' + title + '[/B][/COLOR] [COLOR lightyellow][I][Vidspot][/I][/COLOR] [COLOR lightgreen][I][' + lang_audio + '][/I][/COLOR]' plugintools.add_item(action="getlinkmu", title=title_fixed, url=url_link, info_labels=datamovie, thumbnail=thumbnail, page=fanart_fixed, fanart=fanart_fixed, folder=False, isPlayable=True) elif host == "allmyvideosnet": if lang_sub != "": title_fixed = '[COLOR orange][B]' + title + '[/B][/COLOR] [COLOR lightyellow][I][Allmyvideos][/I][/COLOR] [COLOR lightgreen][I][' + lang_audio + '] [' + lang_sub + '][/I][/COLOR]' else: title_fixed = '[COLOR orange][B]' + title + '[/B][/COLOR] [COLOR lightyellow][I][Allmyvideos][/I][/COLOR] [COLOR lightgreen][I][' + lang_audio + '][/I][/COLOR]' plugintools.add_item(action="getlinkmu", title=title_fixed, url=url_link, info_labels=datamovie, thumbnail=thumbnail, page=fanart_fixed, fanart=fanart_fixed, folder=False, isPlayable=True) elif host == "playedto": if lang_sub != "": title_fixed = '[COLOR orange][B]' + title + '[/B][/COLOR] [COLOR lightyellow][I][Played.to][/I][/COLOR] [COLOR lightgreen][I][' + lang_audio + '] [' + lang_sub + '][/I][/COLOR]' else: title_fixed = '[COLOR orange][B]' + title + '[/B][/COLOR] [COLOR lightyellow][I][Played.to][/I][/COLOR] [COLOR lightgreen][I][' + lang_audio + '][/I][/COLOR]' plugintools.add_item(action="getlinkmu", title=title_fixed, url=url_link, info_labels=datamovie, thumbnail=thumbnail, page=fanart_fixed, fanart=fanart_fixed, folder=False, isPlayable=True) if host == "nowvideosx": if lang_sub != "": title_fixed = '[COLOR orange][B]' + title + '[/B][/COLOR] [COLOR lightyellow][I][Nowvideo.sx][/I][/COLOR] [COLOR lightgreen][I][' + lang_audio + '] [' + lang_sub + '][/I][/COLOR]' else: title_fixed = '[COLOR orange][B]' + title + '[/B][/COLOR] [COLOR lightyellow][I][Nowvideo.sx][/I][/COLOR] [COLOR lightgreen][I][' + lang_audio + '][/I][/COLOR]' plugintools.add_item(action="getlinkmu", title=title_fixed, url=url_link, info_labels=datamovie, thumbnail=thumbnail, page=fanart_fixed, fanart=fanart_fixed, folder=False, isPlayable=True) else: url_link = getotherhost(url_link, show) if url.find("veehd") >= 0: server = "VeeHD" if lang_sub != "": title_fixed = '[COLOR orange][B]' + title + '[/B][/COLOR] [COLOR lightyellow][I][' + server + '][/I][/COLOR] [COLOR lightgreen][I][' + lang_audio + '] [' + lang_sub + '][/I][/COLOR]' else: title_fixed = '[COLOR orange][B]' + title + '[/B][/COLOR] [COLOR lightyellow][I][' + server + '][/I][/COLOR] [COLOR lightgreen][I][' + lang_audio + '][/I][/COLOR]' plugintools.add_item(action="veehd", title=title_fixed, url=url_link, info_labels=datamovie, thumbnail=thumbnail, page=fanart_fixed, fanart=fanart_fixed, folder=False, isPlayable=True) if url.find("streamin.to") >= 0: server = "streamin.to" if lang_sub != "": title_fixed = '[COLOR orange][B]' + title + '[/B][/COLOR] [COLOR lightyellow][I][' + server + '][/I][/COLOR] [COLOR lightgreen][I][' + lang_audio + '] [' + lang_sub + '][/I][/COLOR]' else: title_fixed = '[COLOR orange][B]' + title + '[/B][/COLOR] [COLOR lightyellow][I][' + server + '][/I][/COLOR] [COLOR lightgreen][I][' + lang_audio + '][/I][/COLOR]' plugintools.add_item(action="streaminto", title=title_fixed, url=url_link, info_labels=datamovie, thumbnail=thumbnail, page=fanart_fixed, fanart=fanart_fixed, folder=False, isPlayable=True) if url.find("vk") >= 0: server = "Vk" if lang_sub != "": title_fixed = '[COLOR orange][B]' + title + '[/B][/COLOR] [COLOR lightyellow][I][' + server + '][/I][/COLOR] [COLOR lightgreen][I][' + lang_audio + '] [' + lang_sub + '][/I][/COLOR]' else: title_fixed = '[COLOR orange][B]' + title + '[/B][/COLOR] [COLOR lightyellow][I][' + server + '][/I][/COLOR] [COLOR lightgreen][I][' + lang_audio + '][/I][/COLOR]' plugintools.add_item(action="vk", title=title_fixed, url=url_link, info_labels=datamovie, thumbnail=thumbnail, page=fanart_fixed, fanart=fanart_fixed, folder=False, isPlayable=True) if url.find("Tumi") >= 0: server = "Tumi" if lang_sub != "": title_fixed = '[COLOR orange][B]' + title + '[/B][/COLOR] [COLOR lightyellow][I][' + server + '][/I][/COLOR] [COLOR lightgreen][I][' + lang_audio + '] [' + lang_sub + '][/I][/COLOR]' else: title_fixed = '[COLOR orange][B]' + title + '[/B][/COLOR] [COLOR lightyellow][I][' + server + '][/I][/COLOR] [COLOR lightgreen][I][' + lang_audio + '][/I][/COLOR]' plugintools.add_item(action="tumi", title=title_fixed, url=url_link, info_labels=datamovie, thumbnail=thumbnail, page=fanart_fixed, fanart=fanart_fixed, folder=False, isPlayable=True) plugintools.log("show= " + show) plugintools.modo_vista(show)
def pdd_findvideos(params): plugintools.log("[%s %s] Pordede: Buscando enlaces... %s " % (addonName, addonVersion, repr(params))) if params.get("extra") == "regex": try: shutil.rmtree(temp + 'pordede.com', ignore_errors=False, onerror=None) except: pass params["regex"] = 'regex' login_pordede() url_peli = params.get("page") # Descarga la pagina headers = DEFAULT_HEADERS[:] headers.append(["X-Requested-With", "XMLHttpRequest"]) data = scrapertools.cache_page(url_peli, headers=headers) #plugintools.log("data= "+data) fanart = plugintools.find_single_match(data, 'src=(.*?)>').replace( "\\", "").replace('"', "").replace(".png/", ".png").strip() thumbnail = fanart.replace("big", "").strip() plugintools.log("fanart= " + fanart) plugintools.log("thumbnail= " + thumbnail) url = params.get("url") if thumbnail == "": # Control por si no se ejecuta regex o no captura thumbnail correctamente thumbnail = params.get("thumbnail") # Descarga la pagina headers = DEFAULT_HEADERS[:] data = scrapertools.cache_page(url, headers=headers) #plugintools.log("data="+data) sesion = plugintools.find_single_match(data, 'SESS = "([^"]+)";') #plugintools.log("sesion="+sesion) patron = '<a target="_blank" class="a aporteLink(.*?)</a>' matches = re.compile(patron, re.DOTALL).findall(data) itemlist = [] i = 1 plugintools.add_item(action="", title='[COLOR lightyellow][B]' + params.get("title") + '[/B][/COLOR]', url="", thumbnail=thumbnail, fanart=fanart, folder=False, isPlayable=False) for match in matches: #plugintools.log("match= "+match) jdown = scrapertools.find_single_match( match, '<div class="jdownloader">[^<]+</div>') if jdown != '': # Descartar enlaces veronline/descargar continue idiomas = re.compile('<div class="flag([^"]+)">([^<]+)</div>', re.DOTALL).findall(match) idioma_0 = (idiomas[0][0].replace(" ", "").strip() + " " + idiomas[0][1].replace(" ", "").strip()).strip() if len(idiomas) > 1: idioma_1 = (idiomas[1][0].replace(" ", "").strip() + " " + idiomas[1][1].replace(" ", "").strip()).strip() idioma = idioma_0 + ", " + idioma_1 else: idioma_1 = '' idioma = idioma_0 idioma = idioma.replace("spanish", "ESP").replace("english", "ENG").replace( "spanish SUB", "SUB-ESP").replace("english SUB", "SUB-ENG") calidad_video = plugintools.find_single_match( match, '<div class="linkInfo quality"><i class="icon-facetime-video"></i>([^<]+)</div>' ).strip() #plugintools.log("calidad_video="+calidad_video) calidad_audio = plugintools.find_single_match( match, '<div class="linkInfo qualityaudio"><i class="icon-headphones"></i>([^<]+)</div>' ).strip() #plugintools.log("calidad_audio="+calidad_audio) thumb_servidor = plugintools.find_single_match( match, '<div class="hostimage"[^<]+<img\s*src="([^"]+)">').strip() #plugintools.log("thumb_servidor="+thumb_servidor) nombre_servidor = plugintools.find_single_match( thumb_servidor, "popup_([^\.]+)\.png").strip() #plugintools.log("nombre_servidor="+nombre_servidor) title = "[COLOR white]Op. " + str( i ) + '. [/COLOR][COLOR lightgreen][I][' + nombre_servidor + "] [/I][/COLOR][COLOR gold] (" + idioma + ") [/COLOR][COLOR lightyellow][I][Video: " + calidad_video.strip( ) + ", Audio: " + calidad_audio.strip() + "][/COLOR][/I] " i = i + 1 cuenta = [] valoracion = 0 for idx, val in enumerate(['1', '2', 'report']): nn = plugintools.find_single_match( match, '<span\s+data-num="([^"]+)"\s+class="defaultPopup"\s+href="/likes/popup/value/' + val + '/') if nn != '0' and nn != '': cuenta.append(nn + ' ' + [ '[COLOR green]OK[/COLOR]', '[COLOR red]KO[/COLOR]', 'rep' ][idx]) valoracion += int(nn) if val == '1' else -int(nn) if len(cuenta) > 0: title += ' [COLOR white](' + ', '.join(cuenta) + ')[/COLOR]' item_url = plugintools.find_single_match(match, 'href="([^"]+)"') item_url = 'http://www.pordede.com' + item_url #thumbnail = thumb_servidor #plugintools.log("title=["+title+"], url=["+url+"], thumbnail=["+thumbnail+"]") plugintools.add_item(action="pordede_play", title=title, url=item_url, thumbnail=thumbnail, fanart=fanart, extra=sesion + "|" + item_url, folder=False, isPlayable=True)
def checkforupdates(): import time logger.info("checkforupdates") #Actualizaciones del plugin if config.get_setting("updatecheck2") == "true": logger.info("Comprobando actualizaciones de pelisalcarta") if os.path.isfile(lastupdatepath): UltimaConsulta = float(open(lastupdatepath,"rb").read()) else: UltimaConsulta = 0 if int(time.time() - UltimaConsulta) > 3600: REMOTE_VERSION_FILE = downloadurl + "/update/version.xml" LOCAL_VERSION_FILE = os.path.join( config.get_runtime_path(), "version.xml" ) data = scrapertools.cachePage(REMOTE_VERSION_FILE) if data: patron = '<tag>([^<]+)</tag>' matches = re.compile(patron,re.DOTALL).findall(data) versiondescargada = matches[0] else: versiondescargada = 0 data = open(LOCAL_VERSION_FILE).read() matches = re.compile(patron,re.DOTALL).findall(data) versionlocal = matches[0] logger.info("Versión local: " + versionlocal) logger.info("Versión remota: " + versiondescargada) from distutils.version import StrictVersion if StrictVersion(versiondescargada) > StrictVersion(versionlocal): if guitools.Dialog_YesNo("pelisalacarta","¡Hay una nueva versión lista para descargar!\nVersión actual: "+versionlocal+" - Nueva versión: "+versiondescargada+"\nQuieres instalarla ahora?"): update(Item(url=versiondescargada)) else: if guitools.Dialog_YesNo("pelisalacarta","¿No volver a mostrar en una hora?"): open(lastupdatepath,"wb").write(str(time.time())) logger.info("Opciñon seleccionada: No Descargar") else: logger.info("No preguntar hasta: " + str(3600 - int(time.time() - UltimaConsulta)) + " Segundos" ) #Actualizacion de canales if config.get_setting("updatechannels") == "true": logger.info("Comprobando actualizaciones de canales") data = scrapertools.cache_page(giturl + "/pelisalacarta/pelisalacarta/channels?ref="+branch, headers=headers) RemoteJSONData = json.loads(data) if not os.path.isfile(channelspath): CreateChannelsIndex() f = open(channelspath,"r") JSONData = json.loads(f.read()) f.close() downloadchannels=[] if RemoteJSONData == JSONData: logger.info("Todos los canales estan actualizados") else: logger.info("Hay canales para actualizar") for file in RemoteJSONData: if not file in JSONData: downloadchannels.append(file) logger.info("Comprobando actualizaciones de servers") data = scrapertools.cache_page(giturl + "/pelisalacarta/servers?ref="+branch, headers=headers) RemoteJSONData = json.loads(data) if not os.path.isfile(serverspath): CreateServersIndex() f = open(serverspath,"r") JSONData = json.loads(f.read()) f.close() downloadservers=[] if RemoteJSONData == JSONData: logger.info("Todos los servers estan actualizados") else: logger.info("Hay servers para actualizar") for file in RemoteJSONData: if not file in JSONData: downloadservers.append(file) if downloadchannels or downloadservers: dialog = guitools.Dialog_Progress("Actualizando...","") for file in downloadchannels: if dialog.IsCanceled(): break logger.info("Actualizando: " + file["name"]) dialog.Actualizar(downloadchannels.index(file)*100 / (len(downloadchannels) + len(downloadservers)), "Actualizando canal: " + file["name"].encode("utf8")) data = scrapertools.cachePage(file["download_url"]) open(os.path.join(config.get_runtime_path(), "..", *file["path"].split("/")),"wb").write(data) import inspect for module in sys.modules.keys(): if inspect.ismodule(sys.modules[module]): if file["name"].encode("utf8").replace(".py","") in module: reload(sys.modules[module]) for file in downloadservers: if dialog.IsCanceled(): break logger.info("Actualizando: " + file["name"]) dialog.Actualizar((downloadservers.index(file) + len(downloadchannels)) *100 / (len(downloadchannels) + len(downloadservers)), "Actualizando server: " + file["name"].encode("utf8")) data = scrapertools.cachePage(file["download_url"]) open(os.path.join(config.get_runtime_path(), "..", *file["path"].split("/")),"wb").write(data) import inspect for module in sys.modules.keys(): if inspect.ismodule(sys.modules[module]): if file["name"].encode("utf8").replace(".py","") in module: reload(sys.modules[module]) if dialog.IsCanceled(): dialog.Cerrar() CreateChannelsIndex() CreateServersIndex() guitools.Dialog_OK("Actualizaciones", "¡El proceso se ha cancelado!" ) else: dialog.Cerrar() CreateChannelsIndex() CreateServersIndex() guitools.Dialog_OK("Actualizaciones", "¡Canales descargados con éxito!" )
def pdd_findvideos(params): plugintools.log("[%s %s] Pordede: Buscando enlaces... %s " % (addonName, addonVersion, repr(params))) if params.get("extra") == "regex": try: shutil.rmtree(temp + 'pordede.com', ignore_errors=False, onerror=None) except: pass params["regex"]='regex' login_pordede() url_peli = params.get("page") # Descarga la pagina headers = DEFAULT_HEADERS[:] headers.append(["X-Requested-With","XMLHttpRequest"]) data = scrapertools.cache_page(url_peli,headers=headers) #plugintools.log("data= "+data) fanart = plugintools.find_single_match(data, 'src=(.*?)>').replace("\\", "").replace('"', "").replace(".png/", ".png").strip() thumbnail = fanart.replace("big", "").strip() plugintools.log("fanart= "+fanart) plugintools.log("thumbnail= "+thumbnail) url = params.get("url") if thumbnail == "": # Control por si no se ejecuta regex o no captura thumbnail correctamente thumbnail = params.get("thumbnail") # Descarga la pagina headers = DEFAULT_HEADERS[:] data = scrapertools.cache_page(url,headers=headers) #plugintools.log("data="+data) sesion = plugintools.find_single_match(data,'SESS = "([^"]+)";') #plugintools.log("sesion="+sesion) patron = '<a target="_blank" class="a aporteLink(.*?)</a>' matches = re.compile(patron,re.DOTALL).findall(data) itemlist = [] i = 1 plugintools.add_item(action="", title='[COLOR lightyellow][B]'+params.get("title")+'[/B][/COLOR]', url="", thumbnail = thumbnail, fanart=fanart, folder=False, isPlayable=False) for match in matches: #plugintools.log("match= "+match) jdown = scrapertools.find_single_match(match,'<div class="jdownloader">[^<]+</div>') if jdown != '': # Descartar enlaces veronline/descargar continue idiomas = re.compile('<div class="flag([^"]+)">([^<]+)</div>',re.DOTALL).findall(match) idioma_0 = (idiomas[0][0].replace(" ","").strip() + " " + idiomas[0][1].replace(" ","").strip()).strip() if len(idiomas) > 1: idioma_1 = (idiomas[1][0].replace(" ","").strip() + " " + idiomas[1][1].replace(" ","").strip()).strip() idioma = idioma_0 + ", " + idioma_1 else: idioma_1 = '' idioma = idioma_0 idioma=idioma.replace("spanish", "ESP").replace("english", "ENG").replace("spanish SUB", "SUB-ESP").replace("english SUB", "SUB-ENG") calidad_video = plugintools.find_single_match(match,'<div class="linkInfo quality"><i class="icon-facetime-video"></i>([^<]+)</div>').strip() #plugintools.log("calidad_video="+calidad_video) calidad_audio = plugintools.find_single_match(match,'<div class="linkInfo qualityaudio"><i class="icon-headphones"></i>([^<]+)</div>').strip() #plugintools.log("calidad_audio="+calidad_audio) thumb_servidor = plugintools.find_single_match(match,'<div class="hostimage"[^<]+<img\s*src="([^"]+)">').strip() #plugintools.log("thumb_servidor="+thumb_servidor) nombre_servidor = plugintools.find_single_match(thumb_servidor,"popup_([^\.]+)\.png").strip() #plugintools.log("nombre_servidor="+nombre_servidor) title = "[COLOR white]Op. "+str(i)+'. [/COLOR][COLOR lightgreen][I]['+nombre_servidor+"] [/I][/COLOR][COLOR gold] ("+idioma+") [/COLOR][COLOR lightyellow][I][Video: "+calidad_video.strip()+", Audio: "+calidad_audio.strip()+"][/COLOR][/I] " i = i + 1 cuenta = [] valoracion = 0 for idx, val in enumerate(['1', '2', 'report']): nn = plugintools.find_single_match(match,'<span\s+data-num="([^"]+)"\s+class="defaultPopup"\s+href="/likes/popup/value/'+val+'/') if nn != '0' and nn != '': cuenta.append(nn + ' ' + ['[COLOR green]OK[/COLOR]', '[COLOR red]KO[/COLOR]', 'rep'][idx]) valoracion += int(nn) if val == '1' else -int(nn) if len(cuenta) > 0: title += ' [COLOR white](' + ', '.join(cuenta) + ')[/COLOR]' item_url = plugintools.find_single_match(match,'href="([^"]+)"') item_url = 'http://www.pordede.com'+item_url #thumbnail = thumb_servidor #plugintools.log("title=["+title+"], url=["+url+"], thumbnail=["+thumbnail+"]") plugintools.add_item(action="pordede_play", title=title, url=item_url, thumbnail=thumbnail, fanart=fanart, extra=sesion+"|"+item_url, folder=False, isPlayable=True)
def login_pordede(): plugintools.log('[%s %s] ---> Iniciando login en Pordede.com... <--- ' % (addonName, addonVersion)) params = plugintools.get_params() url = "http://www.pordede.com/site/login" post = "LoginForm[username]=" + plugintools.get_setting( "pordede_user") + "&LoginForm[password]=" + plugintools.get_setting( "pordede_pwd") headers = DEFAULT_HEADERS[:] regex = params.get("extra") try: if os.path.exists(temp + 'pordede.com') is True: print "Eliminando carpeta caché..." os.remove(temp + 'pordede.com') except: pass data = scrapertools.cache_page(url, headers=headers, post=post) print data if data != "": login_info = plugintools.find_single_match( data, '<div class="friendMini shadow"(.*?)</div>') user_title = plugintools.find_single_match(login_info, 'title="([^"]+)') user_thumb = plugintools.find_single_match(login_info, 'src="([^"]+)') if regex == "": plugintools.log("regex= " + regex) plugintools.add_item(action="menuseries", title='Usuario: [COLOR lightyellow][I]' + user_title + '[/I][/COLOR]', url="", thumbnail=user_thumb, fanart=fanart, folder=True, isPlayable=False) plugintools.add_item(action="menuseries", title="Series", url="", thumbnail=thumbnail, fanart=fanart, folder=True, isPlayable=False) plugintools.add_item(action="menupeliculas", title="Películas", url="", thumbnail=thumbnail, fanart=fanart, folder=True, isPlayable=False) plugintools.add_item(action="listas_sigues", title="Listas que sigues", url="http://www.pordede.com/lists/following", thumbnail=thumbnail, fanart=fanart, folder=True, isPlayable=False) plugintools.add_item(action="tus_listas", title="Tus listas", url="http://www.pordede.com/lists/yours", thumbnail=thumbnail, fanart=fanart, folder=True, isPlayable=False) plugintools.add_item(action="listas_sigues", title="Top listas", url="http://www.pordede.com/lists", thumbnail=thumbnail, fanart=fanart, folder=True, isPlayable=False)
def checkforupdates(plugin_mode=True): logger.info("streamondemand-pureita.core.updater checkforupdates") # Descarga el fichero con la versión en la web logger.info( "streamondemand-pureita.core.updater Verificando actualizaciones...") logger.info("streamondemand-pureita.core.updater Version remota: " + REMOTE_VERSION_FILE) data = scrapertools.cache_page(REMOTE_VERSION_FILE) ''' <?xml version="1.0" encoding="utf-8" standalone="yes"?> <version> <name>streamondemand-pureita</name> <tag>4.0 </tag> <version>4000</tag> <date>20/03/2015</date> <changes>New release</changes> </version> ''' version_publicada = scrapertools.find_single_match( data, "<version>([^<]+)</version>").strip() tag_publicada = scrapertools.find_single_match( data, "<tag>([^<]+)</tag>").strip() logger.info("streamondemand-pureita.core.updater version remota=" + tag_publicada + " " + version_publicada) # Lee el fichero con la versión instalada localFileName = LOCAL_VERSION_FILE logger.info("streamondemand-pureita.core.updater fichero local version: " + localFileName) infile = open(localFileName) data = infile.read() infile.close() #logger.info("xml local="+data) version_local = scrapertools.find_single_match( data, "<version>([^<]+)</version>").strip() tag_local = scrapertools.find_single_match(data, "<tag>([^<]+)</tag>").strip() logger.info("streamondemand-pureita.core.updater version local=" + tag_local + " " + version_local) try: numero_version_publicada = int(version_publicada) numero_version_local = int(version_local) except: import traceback logger.info(traceback.format_exc()) version_publicada = "" version_local = "" if version_publicada == "" or version_local == "": arraydescargada = tag_publicada.split(".") arraylocal = tag_local.split(".") # local 2.8.0 - descargada 2.8.0 -> no descargar # local 2.9.0 - descargada 2.8.0 -> no descargar # local 2.8.0 - descargada 2.9.0 -> descargar if len(arraylocal) == len(arraydescargada): logger.info("caso 1") hayqueactualizar = False for i in range(0, len(arraylocal)): print arraylocal[i], arraydescargada[i], int( arraydescargada[i]) > int(arraylocal[i]) if int(arraydescargada[i]) > int(arraylocal[i]): hayqueactualizar = True # local 2.8.0 - descargada 2.8 -> no descargar # local 2.9.0 - descargada 2.8 -> no descargar # local 2.8.0 - descargada 2.9 -> descargar if len(arraylocal) > len(arraydescargada): logger.info("caso 2") hayqueactualizar = False for i in range(0, len(arraydescargada)): #print arraylocal[i], arraydescargada[i], int(arraydescargada[i]) > int(arraylocal[i]) if int(arraydescargada[i]) > int(arraylocal[i]): hayqueactualizar = True # local 2.8 - descargada 2.8.8 -> descargar # local 2.9 - descargada 2.8.8 -> no descargar # local 2.10 - descargada 2.9.9 -> no descargar # local 2.5 - descargada 3.0.0 if len(arraylocal) < len(arraydescargada): logger.info("caso 3") hayqueactualizar = True for i in range(0, len(arraylocal)): #print arraylocal[i], arraydescargada[i], int(arraylocal[i])>int(arraydescargada[i]) if int(arraylocal[i]) > int(arraydescargada[i]): hayqueactualizar = False elif int(arraylocal[i]) < int(arraydescargada[i]): hayqueactualizar = True break else: hayqueactualizar = (numero_version_publicada > numero_version_local) if hayqueactualizar: if plugin_mode: logger.info( "streamondemand-pureita.core.updater actualizacion disponible") # Añade al listado de XBMC import xbmcgui thumbnail = IMAGES_PATH + "Crystal_Clear_action_info.png" logger.info("thumbnail=" + thumbnail) listitem = xbmcgui.ListItem("Scarica la versione " + tag_publicada, thumbnailImage=thumbnail) itemurl = '%s?action=update&version=%s' % (sys.argv[0], tag_publicada) import xbmcplugin xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=itemurl, listitem=listitem, isFolder=True) # Avisa con un popup dialog = xbmcgui.Dialog() dialog.ok( "Versione " + tag_publicada + " disponibile", "E' possibile scaricare la nuova versione del plugin\nattraverso l'opzione nel menù principale." ) else: import xbmcgui yes_pressed = xbmcgui.Dialog().yesno( "Versione " + tag_publicada + " disponibile", "Desideri aggiornare?") if yes_pressed: params = {"version": tag_publicada} update(params) '''
def pordede_play(params): plugintools.log("[%s %s] Pordede: Buscando enlaces... %s " % (addonName, addonVersion, repr(params))) # Marcar como visto #checkseen(item.extra.split("|")[1]) # Hace la llamada headers = DEFAULT_HEADERS[:] headers.append(["Referer", params.get("extra").split("|")[1]]) data = scrapertools.cache_page(params.get("url"), post="_s=" + params.get("extra").split("|")[0], headers=headers) url = plugintools.find_single_match( data, '<p class="links">\s+<a href="([^"]+)" target="_blank"') url = 'http://www.pordede.com' + url headers = DEFAULT_HEADERS[:] headers.append(["Referer", url]) media_url = scrapertools.downloadpage(url, headers=headers, header_to_get="location", follow_redirects=False) #plugintools.log("media_url="+media_url) if media_url.find("allmyvideos") >= 0: params = plugintools.get_params() params["url"] = media_url allmyvideos(params) elif media_url.find("vidspot") >= 0: params = plugintools.get_params() params["url"] = media_url vidspot(params) elif media_url.find("played.to") >= 0: params = plugintools.get_params() params["url"] = media_url playedto(params) elif media_url.find("streamcloud") >= 0: params = plugintools.get_params() params["url"] = media_url streamcloud(params) elif media_url.find("nowvideo") >= 0: params = plugintools.get_params() params["url"] = media_url nowvideo(params) elif media_url.find("streamin.to") >= 0: params = plugintools.get_params() params["url"] = media_url streaminto(params) elif media_url.find("vk") >= 0: params = plugintools.get_params() params["url"] = media_url vk(params) elif media_url.find("tumi") >= 0: params = plugintools.get_params() params["url"] = media_url tumi(params) elif media_url.find("veehd") >= 0: params = plugintools.get_params() params["url"] = media_url veehd(params) elif media_url.find("powvideo") >= 0: params = plugintools.get_params() params["url"] = media_url powvideo(params) elif media_url.find("novamov") >= 0: params = plugintools.get_params() params["url"] = media_url novamov(params) elif media_url.find("gamovideo") >= 0: params = plugintools.get_params() params["url"] = media_url gamovideo(params) elif media_url.find("moevideos") >= 0: params = plugintools.get_params() params["url"] = media_url moevideos(params) elif media_url.find("movshare") >= 0: params = plugintools.get_params() params["url"] = media_url movshare(params) elif media_url.find("movreel") >= 0: params = plugintools.get_params() params["url"] = media_url movreel(params) elif media_url.find("videobam") >= 0: params = plugintools.get_params() params["url"] = media_url videobam(params) elif media_url.find("videoweed") >= 0: params = plugintools.get_params() params["url"] = media_url videoweed(params) elif media_url.find("streamable") >= 0: params = plugintools.get_params() params["url"] = media_url streamable(params) elif media_url.find("rocvideo") >= 0: params = plugintools.get_params() params["url"] = media_url rocvideo(params) elif media_url.find("realvid") >= 0: params = plugintools.get_params() params["url"] = media_url realvid(params) elif media_url.find("netu") >= 0: params = plugintools.get_params() params["url"] = media_url netu(params) elif media_url.find("videomega") >= 0: params = plugintools.get_params() params["url"] = media_url videomega(params) elif media_url.find("video.tt") >= 0: params = plugintools.get_params() params["url"] = media_url videott(params) elif media_url.find("flashx.tv") >= 0: params = plugintools.get_params() params["url"] = media_url flashx(params)
def checkforupdates(): import time logger.info("checkforupdates") #Actualizaciones del plugin if config.get_setting("updatecheck2") == "true": logger.info("Comprobando actualizaciones de pelisalcarta") if os.path.isfile(lastupdatepath): UltimaConsulta = float(open(lastupdatepath, "rb").read()) else: UltimaConsulta = 0 if int(time.time() - UltimaConsulta) > 3600: REMOTE_VERSION_FILE = downloadurl + "/update/version.xml" LOCAL_VERSION_FILE = os.path.join(config.get_runtime_path(), "version.xml") data = scrapertools.cachePage(REMOTE_VERSION_FILE) if data: patron = '<tag>([^<]+)</tag>' matches = re.compile(patron, re.DOTALL).findall(data) versiondescargada = matches[0] else: versiondescargada = 0 data = open(LOCAL_VERSION_FILE).read() matches = re.compile(patron, re.DOTALL).findall(data) versionlocal = matches[0] logger.info("Versión local: " + versionlocal) logger.info("Versión remota: " + versiondescargada) from distutils.version import StrictVersion if StrictVersion(versiondescargada) > StrictVersion(versionlocal): if guitools.Dialog_YesNo( "pelisalacarta", "¡Hay una nueva versión lista para descargar!\nVersión actual: " + versionlocal + " - Nueva versión: " + versiondescargada + "\nQuieres instalarla ahora?"): update(Item(url=versiondescargada)) else: if guitools.Dialog_YesNo( "pelisalacarta", "¿No volver a mostrar en una hora?"): open(lastupdatepath, "wb").write(str(time.time())) logger.info("Opciñon seleccionada: No Descargar") else: logger.info("No preguntar hasta: " + str(3600 - int(time.time() - UltimaConsulta)) + " Segundos") #Actualizacion de canales if config.get_setting("updatechannels") == "true": logger.info("Comprobando actualizaciones de canales") data = scrapertools.cache_page( giturl + "/pelisalacarta/pelisalacarta/channels?ref=" + branch, headers=headers) RemoteJSONData = json.loads(data) if not os.path.isfile(channelspath): CreateChannelsIndex() f = open(channelspath, "r") JSONData = json.loads(f.read()) f.close() downloadchannels = [] if RemoteJSONData == JSONData: logger.info("Todos los canales estan actualizados") else: logger.info("Hay canales para actualizar") for file in RemoteJSONData: if not file in JSONData: downloadchannels.append(file) logger.info("Comprobando actualizaciones de servers") data = scrapertools.cache_page(giturl + "/pelisalacarta/servers?ref=" + branch, headers=headers) RemoteJSONData = json.loads(data) if not os.path.isfile(serverspath): CreateServersIndex() f = open(serverspath, "r") JSONData = json.loads(f.read()) f.close() downloadservers = [] if RemoteJSONData == JSONData: logger.info("Todos los servers estan actualizados") else: logger.info("Hay servers para actualizar") for file in RemoteJSONData: if not file in JSONData: downloadservers.append(file) if downloadchannels or downloadservers: dialog = guitools.Dialog_Progress("Actualizando...", "") for file in downloadchannels: if dialog.IsCanceled(): break logger.info("Actualizando: " + file["name"]) dialog.Actualizar( downloadchannels.index(file) * 100 / (len(downloadchannels) + len(downloadservers)), "Actualizando canal: " + file["name"].encode("utf8")) data = scrapertools.cachePage(file["download_url"]) open( os.path.join(config.get_runtime_path(), "..", *file["path"].split("/")), "wb").write(data) import inspect for module in sys.modules.keys(): if inspect.ismodule(sys.modules[module]): if file["name"].encode("utf8").replace(".py", "") in module: reload(sys.modules[module]) for file in downloadservers: if dialog.IsCanceled(): break logger.info("Actualizando: " + file["name"]) dialog.Actualizar( (downloadservers.index(file) + len(downloadchannels)) * 100 / (len(downloadchannels) + len(downloadservers)), "Actualizando server: " + file["name"].encode("utf8")) data = scrapertools.cachePage(file["download_url"]) open( os.path.join(config.get_runtime_path(), "..", *file["path"].split("/")), "wb").write(data) import inspect for module in sys.modules.keys(): if inspect.ismodule(sys.modules[module]): if file["name"].encode("utf8").replace(".py", "") in module: reload(sys.modules[module]) if dialog.IsCanceled(): dialog.Cerrar() CreateChannelsIndex() CreateServersIndex() guitools.Dialog_OK("Actualizaciones", "¡El proceso se ha cancelado!") else: dialog.Cerrar() CreateChannelsIndex() CreateServersIndex() guitools.Dialog_OK("Actualizaciones", "¡Canales descargados con éxito!")
def pordede_play(params): plugintools.log("[%s %s] Pordede: Buscando enlaces... %s " % (addonName, addonVersion, repr(params))) # Marcar como visto #checkseen(item.extra.split("|")[1]) # Hace la llamada headers = DEFAULT_HEADERS[:] headers.append( ["Referer" , params.get("extra").split("|")[1] ]) data = scrapertools.cache_page(params.get("url"),post="_s="+params.get("extra").split("|")[0],headers=headers) url = plugintools.find_single_match(data,'<p class="links">\s+<a href="([^"]+)" target="_blank"') url = 'http://www.pordede.com'+url headers = DEFAULT_HEADERS[:] headers.append( ["Referer" , url ]) media_url = scrapertools.downloadpage(url,headers=headers,header_to_get="location",follow_redirects=False) #plugintools.log("media_url="+media_url) if media_url.find("allmyvideos") >= 0: params=plugintools.get_params() params["url"]=media_url allmyvideos(params) elif media_url.find("vidspot") >= 0: params=plugintools.get_params() params["url"]=media_url vidspot(params) elif media_url.find("played.to") >= 0: params=plugintools.get_params() params["url"]=media_url playedto(params) elif media_url.find("streamcloud") >= 0: params=plugintools.get_params() params["url"]=media_url streamcloud(params) elif media_url.find("nowvideo") >= 0: params=plugintools.get_params() params["url"]=media_url nowvideo(params) elif media_url.find("streamin.to") >= 0: params=plugintools.get_params() params["url"]=media_url streaminto(params) elif media_url.find("vk") >= 0: params=plugintools.get_params() params["url"]=media_url vk(params) elif media_url.find("tumi") >= 0: params=plugintools.get_params() params["url"]=media_url tumi(params) elif media_url.find("veehd") >= 0: params=plugintools.get_params() params["url"]=media_url veehd(params) elif media_url.find("powvideo") >= 0: params=plugintools.get_params() params["url"]=media_url powvideo(params) elif media_url.find("novamov") >= 0: params=plugintools.get_params() params["url"]=media_url novamov(params) elif media_url.find("gamovideo") >= 0: params=plugintools.get_params() params["url"]=media_url gamovideo(params) elif media_url.find("moevideos") >= 0: params=plugintools.get_params() params["url"]=media_url moevideos(params) elif media_url.find("movshare") >= 0: params=plugintools.get_params() params["url"]=media_url movshare(params) elif media_url.find("movreel") >= 0: params=plugintools.get_params() params["url"]=media_url movreel(params) elif media_url.find("videobam") >= 0: params=plugintools.get_params() params["url"]=media_url videobam(params) elif media_url.find("videoweed") >= 0: params=plugintools.get_params() params["url"]=media_url videoweed(params) elif media_url.find("streamable") >= 0: params=plugintools.get_params() params["url"]=media_url streamable(params) elif media_url.find("rocvideo") >= 0: params=plugintools.get_params() params["url"]=media_url rocvideo(params) elif media_url.find("realvid") >= 0: params=plugintools.get_params() params["url"]=media_url realvid(params) elif media_url.find("netu") >= 0: params=plugintools.get_params() params["url"]=media_url netu(params) elif media_url.find("videomega") >= 0: params=plugintools.get_params() params["url"]=media_url videomega(params) elif media_url.find("video.tt") >= 0: params=plugintools.get_params() params["url"]=media_url videott(params) elif media_url.find("flashx.tv") >= 0: params=plugintools.get_params() params["url"]=media_url flashx(params)