def mark_content_as_watched(item): logger.info() # logger.debug("item:\n" + item.tostring('\n')) if filetools.exists(item.nfo): head_nfo = filetools.read(item.nfo, 0, 1) it = Item().fromjson(filetools.read(item.nfo, 1)) if item.contentType == 'movie': name_file = os.path.splitext(os.path.basename(item.nfo))[0] elif item.contentType == 'episode': name_file = "%sx%s" % (item.contentSeason, str(item.contentEpisodeNumber).zfill(2)) else: name_file = item.contentTitle if not hasattr(it, 'library_playcounts'): it.library_playcounts = {} it.library_playcounts.update({name_file: item.playcount}) # se comprueba que si todos los episodios de una temporada están marcados, se marque tb la temporada if item.contentType != 'movie': it = check_season_playcount(it, item.contentSeason) # Guardamos los cambios en item.nfo if filetools.write(item.nfo, head_nfo + it.tojson()): item.infoLabels['playcount'] = item.playcount if item.contentType == 'tvshow': # Actualizar toda la serie new_item = item.clone(contentSeason=-1) mark_season_as_watched(new_item) if config.is_xbmc(): library.mark_content_as_watched_on_kodi(item, item.playcount) platformtools.itemlist_refresh()
def mark_content_as_watched(item): logger.info("pelisalacarta.channels.biblioteca mark_content_as_watched") # logger.debug("item:\n" + item.tostring('\n')) if filetools.exists(item.nfo): url_scraper = filetools.read(item.nfo, 0, 1) it = Item().fromjson(filetools.read(item.nfo, 1)) if item.contentType == "movie": name_file = os.path.splitext(os.path.basename(item.nfo))[0] elif item.contentType == "episode": name_file = item.contentSeason + "x" + item.contentEpisodeNumber else: name_file = item.contentTitle if not hasattr(it, "library_playcounts"): it.library_playcounts = {} it.library_playcounts.update({name_file: item.playcount}) # se comprueba que si todos los episodios de una temporada están marcados, se marque tb la temporada if item.contentType != "movie": it = check_season_playcount(it, item.contentSeason) # Guardamos los cambios en item.nfo if filetools.write(item.nfo, url_scraper + it.tojson()): item.infoLabels["playcount"] = item.playcount if item.contentType == "tvshow": # Actualizar toda la serie new_item = item.clone(contentSeason=-1) mark_season_as_watched(new_item) if config.is_xbmc(): library.mark_content_as_watched_on_kodi(item, item.playcount) platformtools.itemlist_refresh()
def mark_tvshow_as_updatable(item): logger.info("pelisalacarta.channels.biblioteca mark_tvshow_as_updatable") url_scraper = filetools.read(item.nfo, 0, 1) it = Item().fromjson(filetools.read(item.nfo, 1)) it.active = item.active filetools.write(item.nfo, url_scraper + it.tojson()) platformtools.itemlist_refresh()
def mark_season_as_watched(item): logger.info("pelisalacarta.channels.biblioteca mark_season_as_watched") # logger.debug("item:\n" + item.tostring('\n')) # Obtener el diccionario de episodios marcados f = filetools.join(item.path, "tvshow.nfo") url_scraper = filetools.read(f, 0, 1) it = Item().fromjson(filetools.read(f, 1)) if not hasattr(it, "library_playcounts"): it.library_playcounts = {} # Obtenemos los archivos de los episodios raiz, carpetas_series, ficheros = filetools.walk(item.path).next() # Marcamos cada uno de los episodios encontrados de esta temporada episodios_marcados = 0 for i in ficheros: if i.endswith(".strm"): season_episode = scrapertools.get_season_and_episode(i) if not season_episode: # El fichero no incluye el numero de temporada y episodio continue season, episode = season_episode.split("x") if int(item.contentSeason) == -1 or int(season) == int(item.contentSeason): name_file = os.path.splitext(os.path.basename(i))[0] it.library_playcounts[name_file] = item.playcount episodios_marcados += 1 if episodios_marcados: if int(item.contentSeason) == -1: # Añadimos todas las temporadas al diccionario item.library_playcounts for k in it.library_playcounts.keys(): if k.startswith("season"): it.library_playcounts[k] = item.playcount else: # Añadimos la temporada al diccionario item.library_playcounts it.library_playcounts["season %s" % item.contentSeason] = item.playcount # se comprueba que si todas las temporadas están vistas, se marque la serie como vista it = check_tvshow_playcount(it, item.contentSeason) # Guardamos los cambios en tvshow.nfo filetools.write(f, url_scraper + it.tojson()) item.infoLabels["playcount"] = item.playcount if config.is_xbmc(): # Actualizamos la BBDD de Kodi library.mark_season_as_watched_on_kodi(item, item.playcount) platformtools.itemlist_refresh()
def update_json_data(dict_series, filename): """ actualiza el json_data de un fichero con el diccionario pasado :param dict_series: diccionario con las series :type dict_series: dict :param filename: nombre del fichero para guardar :type filename: str :return: fname, json_data :rtype: str, dict """ logger.info("[filtertools.py] update_json_data") if not os.path.exists(os.path.join(config.get_data_path(), "settings_channels")): os.mkdir(os.path.join(config.get_data_path(), "settings_channels")) fname = os.path.join(config.get_data_path(), "settings_channels", filename + "_data.json") data = filetools.read(fname) dict_data = jsontools.load_json(data) # es un dict if dict_data: if TAG_TVSHOW_FILTER in dict_data: logger.info(" existe el key SERIES") dict_data[TAG_TVSHOW_FILTER] = dict_series else: logger.info(" NO existe el key SERIES") new_dict = {TAG_TVSHOW_FILTER: dict_series} dict_data.update(new_dict) else: logger.info(" NO es un dict") dict_data = {TAG_TVSHOW_FILTER: dict_series} json_data = jsontools.dump_json(dict_data) return fname, json_data
def create_tvshows_from_json(_actualizado): logger.info("pelisalacarta.platformcode.library_service create_tvshows_from_json") fname = filetools.join(config.get_data_path(), library.TVSHOW_FILE) if filetools.exists(fname): if not _actualizado: platformtools.dialog_ok("Biblioteca: Actualizando formato", "Espere por favor mientras se completa el proceso") try: data = jsontools.loads(filetools.read(fname)) for tvshow in data: for channel in data[tvshow]["channels"]: serie = Item(contentSerieName=data[tvshow]["channels"][channel]["tvshow"], url=data[tvshow]["channels"][channel]["url"], channel=channel, action="episodios", title=data[tvshow]["name"], active=True) if not tvshow.startswith("t_"): serie.infoLabels["tmdb_id"] = tvshow library.save_library_tvshow(serie, list()) filetools.rename(fname, "series.json.old") except EnvironmentError: logger.info("ERROR al leer el archivo: {0}".format(fname))
def get_filtered_tvshows(from_channel): """ Obtiene las series filtradas de un canal :param from_channel: canal que tiene las series filtradas :type from_channel: str :return: dict con las series :rtype: dict """ logger.info("[filtertools.py] get_filtered_tvshows") dict_series = {} name_file = from_channel if not os.path.exists(os.path.join(config.get_data_path(), "settings_channels")): os.mkdir(os.path.join(config.get_data_path(), "settings_channels")) fname = os.path.join(config.get_data_path(), "settings_channels", name_file + "_data.json") data = filetools.read(fname) dict_data = jsontools.load_json(data) check_json_file(data, fname, dict_data) if TAG_TVSHOW_FILTER in dict_data: dict_series = dict_data[TAG_TVSHOW_FILTER] if DEBUG: logger.info("json_series: {0}".format(dict_series)) return dict_series
def clean_ready(item): logger.info("pelisalacarta.channels.descargas clean_ready") for fichero in sorted(filetools.listdir(DOWNLOAD_LIST_PATH)): if fichero.endswith(".json"): download_item = Item().fromjson(filetools.read(os.path.join(DOWNLOAD_LIST_PATH, fichero))) if not item.contentType == "tvshow" or (item.contentSerieName == download_item.contentSerieName and item.contentChannel == download_item.contentChannel): if download_item.downloadStatus == STATUS_CODES.completed: filetools.remove(os.path.join(DOWNLOAD_LIST_PATH, fichero)) platformtools.itemlist_refresh()
def clean_all(item): logger.info() for fichero in sorted(filetools.listdir(DOWNLOAD_LIST_PATH)): if fichero.endswith(".json"): download_item = Item().fromjson(filetools.read(os.path.join(DOWNLOAD_LIST_PATH, fichero))) if not item.contentType == "tvshow" or (item.contentSerieName == download_item.contentSerieName and item.contentChannel == download_item.contentChannel): filetools.remove(os.path.join(DOWNLOAD_LIST_PATH, fichero)) platformtools.itemlist_refresh()
def read_nfo(path_nfo, item=None): """ Metodo para leer archivos nfo. Los arcivos nfo tienen la siguiente extructura: url_scraper | xml + item_json [url_scraper] y [xml] son opcionales, pero solo uno de ellos ha de existir siempre. @param path_nfo: ruta absoluta al archivo nfo @type path_nfo: str @param item: Si se pasa este parametro el item devuelto sera una copia de este con los valores de 'infoLabels', 'library_playcounts' y 'path' leidos del nfo @type: Item @return: Una tupla formada por la cabecera (head_nfo ='url_scraper'|'xml') y el objeto 'item_json' @rtype: tuple (str, Item) """ head_nfo = "" it = None if filetools.exists(path_nfo): head_nfo = filetools.read(path_nfo, 0, 1) data = filetools.read(path_nfo, 1) if not head_nfo.startswith('http'): # url_scraper no valida, xml presente head_nfo = '' # TODO devolver el xml en head_nfo import re data = re.sub(r"\n|\r|\t|\s{2}| ", "", data) data = re.sub("(<tvshow>|<movie>)(.*?)(</tvshow>|</movie>)", "", data) it_nfo = Item().fromjson(data) if item: it = item.clone() it.infoLabels = it_nfo.infoLabels if 'library_playcounts' in it_nfo: it.library_playcounts = it_nfo.library_playcounts if it_nfo.path: it.path = it_nfo.path else: it = it_nfo if 'fanart' in it.infoLabels: it.fanart = it.infoLabels['fanart'] return head_nfo, it
def clean_ready(item): logger.info("pelisalacarta.channels.descargas clean_ready") for fichero in sorted(filetools.listdir(item.url)): if fichero.endswith(".json"): download_item = Item().fromjson(filetools.read(os.path.join(item.url, fichero))) serie_name = "%s [%s]" % (download_item.contentSerieName, download_item.contentChannel) if not item.serie_name or item.serie_name == serie_name: if download_item.downloadStatus == 2: filetools.remove(os.path.join(item.url, fichero)) platformtools.itemlist_refresh()
def read_nfo(path_nfo, item=None): url_scraper = "" it = None if filetools.exists(path_nfo): url_scraper = filetools.read(path_nfo, 0, 1) if item: it = item.clone() it_nfo = Item().fromjson(filetools.read(path_nfo, 1)) it.infoLabels = it_nfo.infoLabels if "library_playcounts" in it_nfo: it.library_playcounts = it_nfo.library_playcounts if it_nfo.path: it.path = it_nfo.path else: it = Item().fromjson(filetools.read(path_nfo, 1)) if "fanart" in it.infoLabels: it.fanart = it.infoLabels["fanart"] return url_scraper, it
def menu(item): logger.info("pelisalacarta.channels.descargas menu") # Opciones disponibles para el menu op = ["Descargar", "Eliminar de la lista", "Reiniciar descarga"] opciones = [] # Opciones para el menu if item.downloadStatus == 0: # Sin descargar opciones.append(op[0]) # Descargar opciones.append(op[1]) # Eliminar de la lista if item.downloadStatus == 1: # descarga parcial opciones.append(op[0]) # Descargar opciones.append(op[2]) # Reiniciar descarga opciones.append(op[1]) # Eliminar de la lista if item.downloadStatus == 2: # descarga completada opciones.append(op[1]) # Eliminar de la lista opciones.append(op[2]) # Reiniciar descarga if item.downloadStatus == 3: # descarga con error opciones.append(op[2]) # Reiniciar descarga opciones.append(op[1]) # Eliminar de la lista # Mostramos el dialogo seleccion = platformtools.dialog_select("Elige una opción", opciones) # -1 es cancelar if seleccion == -1: return logger.info("pelisalacarta.channels.descargas menu opcion=%s" % (opciones[seleccion])) # Opcion Eliminar if opciones[seleccion] == op[1]: filetools.remove(item.path) # Opcion inicaiar descarga if opciones[seleccion] == op[0]: start_download(item) # Reiniciar descarga if opciones[seleccion] == op[2]: if filetools.isfile(os.path.join(config.get_setting("downloadpath"), item.downloadFilename)): filetools.remove(os.path.join(config.get_setting("downloadpath"), item.downloadFilename)) JSONItem = Item().fromjson(filetools.read(item.path)) JSONItem.downloadStatus = 0 JSONItem.downloadComplete = 0 JSONItem.downloadProgress = 0 JSONItem.downloadUrl = "" filetools.write(item.path, JSONItem.tojson()) platformtools.itemlist_refresh()
def create_tvshows_from_xml(): logger.info("pelisalacarta.platformcode.library_service create_tvshows_from_xml") fname = filetools.join(config.get_data_path(), library.TVSHOW_FILE_OLD) if filetools.exists(fname): platformtools.dialog_ok("Biblioteca: Se va a actualizar al nuevo formato", "Seleccione el nombre correcto de cada serie, si no está seguro pulse 'Cancelar'.", "Hay nuevas opciones en 'Biblioteca' y en la 'configuración' del addon.") filetools.rename(library.TVSHOWS_PATH, "SERIES_OLD") if not filetools.exists(library.TVSHOWS_PATH): filetools.mkdir(library.TVSHOWS_PATH) if filetools.exists(library.TVSHOWS_PATH): try: data = filetools.read(fname) for line in data.splitlines(): aux = line.rstrip('\n').split(",") tvshow = aux[0].strip() url = aux[1].strip() channel = aux[2].strip() serie = Item(contentSerieName=tvshow, url=url, channel=channel, action="episodios", title=tvshow, active=True) patron = "^(.+)[\s]\((\d{4})\)$" matches = re.compile(patron, re.DOTALL).findall(serie.contentSerieName) if matches: serie.infoLabels['title'] = matches[0][0] serie.infoLabels['year'] = matches[0][1] else: serie.infoLabels['title'] = tvshow library.save_library_tvshow(serie, list()) filetools.rename(fname, "series.xml.old") # Por ultimo limpia la libreria, por que las rutas anteriores ya no existen library.clean() except EnvironmentError: logger.info("ERROR al leer el archivo: {0}".format(fname)) else: logger.info("ERROR, no se ha podido crear la nueva carpeta de SERIES") else: logger.info("ERROR, no se ha podido renombrar la antigua carpeta de SERIES") return True return False
def create_tvshows_from_xml(): logger.info("fusionse.platformcode.library_service create_tvshows_from_xml") fname = filetools.join(config.get_data_path(), library.TVSHOW_FILE_OLD) if filetools.exists(fname): platformtools.dialog_ok("Libreria: Si aggiornerà al nuovo formato", "Selezionare il nome corretto di ogni serie, se non siete sicuri potete 'Annulla'.", "Ci sono nuove opzioni per la 'Libreria' in 'configurazione'.") filetools.rename(library.TVSHOWS_PATH, "SERIES_OLD") if not filetools.exists(library.TVSHOWS_PATH): filetools.mkdir(library.TVSHOWS_PATH) if filetools.exists(library.TVSHOWS_PATH): try: data = filetools.read(fname) for line in data.splitlines(): aux = line.rstrip('\n').split(",") tvshow = aux[0].strip() url = aux[1].strip() channel = aux[2].strip() serie = Item(contentSerieName=tvshow, url=url, channel=channel, action="episodios", title=tvshow, active=True) patron = "^(.+)[\s]\((\d{4})\)$" matches = re.compile(patron, re.DOTALL).findall(serie.contentSerieName) if matches: serie.infoLabels['title'] = matches[0][0] serie.infoLabels['year'] = matches[0][1] else: serie.infoLabels['title'] = tvshow library.save_library_tvshow(serie, list()) filetools.rename(fname, "series.xml.old") # Por ultimo limpia la libreria, por que las rutas anteriores ya no existen library.clean() except EnvironmentError: logger.info("ERROR al leer el archivo: {0}".format(fname)) else: logger.info("ERROR, no se ha podido crear la nueva carpeta de SERIES") else: logger.info("ERROR, no se ha podido renombrar la antigua carpeta de SERIES") return True return False
def read_favourites(): favourites_list = [] if filetools.exists(FAVOURITES_PATH): data = filetools.read(FAVOURITES_PATH) matches = scrapertools.find_multiple_matches(data, "<favourite([^<]*)</favourite>") for match in matches: name = scrapertools.find_single_match(match, 'name="([^"]*)') thumb = scrapertools.find_single_match(match, 'thumb="([^"]*)') data = scrapertools.find_single_match(match, '[^>]*>([^<]*)') favourites_list.append((name,thumb,data)) return favourites_list
def get_episodios(item): logger.info("pelisalacarta.channels.biblioteca get_episodios") itemlist = [] # Obtenemos los archivos de los episodios raiz, carpetas_series, ficheros = filetools.walk(item.path).next() # Crear un item en la lista para cada strm encontrado for i in ficheros: # strm if i.endswith(".strm"): season, episode = scrapertools.get_season_and_episode(i).split("x") # Si hay q filtrar por temporada, ignoramos los capitulos de otras temporadas if item.filtrar_season and int(season) != int(item.contentSeason): continue epi = Item().fromurl(filetools.read(filetools.join(raiz, i))) epi.contentChannel = item.contentChannel epi.path = filetools.join(raiz, i) epi.title = i epi.channel = "biblioteca" epi.action = "findvideos" epi.contentEpisodeNumber = episode epi.contentSeason = season # fix sobreescribe el color del texto si viene pasado en el strm epi.text_color = "" # fix para que no se ejecute el método de play para la biblioteca de Kodi epi.strm = False itemlist.append(epi) # videos elif not i.endswith(".nfo") and not i.endswith(".json") and not i.endswith(".srt"): season, episode = scrapertools.get_season_and_episode(i).split("x") # Si hay q filtrar por temporada, ignoramos los capitulos de otras temporadas if item.filtrar_season and int(season) != int(item.contentSeason): continue epi = Item() epi.contentChannel = "local" epi.path = filetools.join(raiz, i) epi.title = i epi.channel = "biblioteca" epi.action = "play" epi.contentEpisodeNumber = episode epi.contentSeason = season itemlist.append(epi) library.set_infolabels_from_library(itemlist, tipo="Episodes") return sorted(itemlist, key=get_sort_temp_epi)
def download_all(item): time.sleep(0.5) for fichero in sorted(filetools.listdir(DOWNLOAD_LIST_PATH)): if fichero.endswith(".json"): download_item = Item(path=os.path.join(DOWNLOAD_LIST_PATH, fichero)).fromjson( filetools.read(os.path.join(DOWNLOAD_LIST_PATH, fichero))) if not item.contentType == "tvshow" or ( item.contentSerieName == download_item.contentSerieName and item.contentChannel == download_item.contentChannel): if download_item.downloadStatus in [STATUS_CODES.stoped, STATUS_CODES.canceled]: res = start_download(download_item) platformtools.itemlist_refresh() # Si se ha cancelado paramos if res == STATUS_CODES.canceled: break
def download_all(item): time.sleep(0.5) for fichero in sorted(filetools.listdir(item.url)): if fichero.endswith(".json"): download_item = Item().fromjson(filetools.read(os.path.join(item.url, fichero))) serie_name = "%s [%s]" % (download_item.contentSerieName, download_item.contentChannel) if not item.serie_name or item.serie_name == serie_name: download_item.path = os.path.join(item.url, fichero) if download_item.downloadStatus in [0, 1]: res = start_download(download_item) platformtools.itemlist_refresh() # Si se ha cancelado paramos if res == 1: break
def restart_error(item): logger.info("pelisalacarta.channels.descargas restart_error") for fichero in sorted(filetools.listdir(DOWNLOAD_LIST_PATH)): if fichero.endswith(".json"): download_item = Item().fromjson(filetools.read(os.path.join(DOWNLOAD_LIST_PATH, fichero))) if not item.contentType == "tvshow" or (item.contentSerieName == download_item.contentSerieName and item.contentChannel == download_item.contentChannel): if download_item.downloadStatus == STATUS_CODES.error: if filetools.isfile(os.path.join(config.get_setting("downloadpath"), download_item.downloadFilename)): filetools.remove(os.path.join(config.get_setting("downloadpath"), download_item.downloadFilename)) update_json(item.path, {"downloadStatus" : STATUS_CODES.stoped, "downloadComplete" : 0 , "downloadProgress" : 0}) platformtools.itemlist_refresh()
def get_cookie_value(): """ Obtiene las cookies de cloudflare """ cookie_file = path.join(config.get_data_path(), 'cookies.dat') cookie_data = filetools.read(cookie_file) cfduid = scrapertools.find_single_match( cookie_data, r"animeflv.*?__cfduid\s+([A-Za-z0-9\+\=]+)") cfduid = "__cfduid=" + cfduid + ";" cf_clearance = scrapertools.find_single_match( cookie_data, r"animeflv.*?cf_clearance\s+([A-Za-z0-9\+\=\-]+)") cf_clearance = " cf_clearance=" + cf_clearance cookies_value = cfduid + cf_clearance return cookies_value
def restart_error(item): logger.info("pelisalacarta.channels.descargas restart_error") for fichero in sorted(filetools.listdir(item.url)): if fichero.endswith(".json"): download_item = Item().fromjson(filetools.read(os.path.join(item.url, fichero))) serie_name = "%s [%s]" % (download_item.contentSerieName, download_item.contentChannel) if not item.serie_name or item.serie_name == serie_name: if download_item.downloadStatus == 3: if filetools.isfile(os.path.join(config.get_setting("downloadpath"), download_item.downloadFilename)): filetools.remove(os.path.join(config.get_setting("downloadpath"), download_item.downloadFilename)) download_item.downloadStatus = 0 download_item.downloadComplete = 0 download_item.downloadProgress = 0 download_item.downloadUrl = "" filetools.write(os.path.join(item.url, fichero), download_item.tojson()) platformtools.itemlist_refresh()
def mark_as_watched_on_strm(item): """ Marca un .strm como "visto" añadiendo el parametro "playcount" a los infoLabels del strm. @param item: item que queremos marcar como visto @type item: item """ logger.info("streamondemand.platformcode.library mark_as_watched_on_strm") if not config.get_setting("mark_as_watched") == "true": return xbmc.sleep(5000) while xbmc.Player().isPlaying(): tiempo_actual = xbmc.Player().getTime() totaltime = xbmc.Player().getTotalTime() condicion = int(config.get_setting("watched_setting")) if condicion == 0: # '5 minutos' mark_time = 300 elif condicion == 1: # '30%' mark_time = totaltime * 0.3 elif condicion == 2: # '50%' mark_time = totaltime * 0.5 elif condicion == 3: # '80%' mark_time = totaltime * 0.8 logger.debug(str(mark_time)) if tiempo_actual > mark_time: strm = Item().fromurl(filetools.read(item.path)) if not type(strm.infoLabels) == dict: strm.infoLabels = {} strm.infoLabels["playcount"] = 1 addon_name = sys.argv[0].strip() if not addon_name: addon_name = "plugin://plugin.video.pelisalacarta/" filetools.write(item.path + ".json", strm.tojson()) filetools.write(item.path, '{addon}?{url}'.format(addon=addon_name, url=strm.tourl())) break xbmc.sleep(30000)
def read_nfo(path_nfo, item=None): """ Metodo para leer archivos nfo. Los arcivos nfo tienen la siguiente extructura: url_scraper | xml + item_json [url_scraper] y [xml] son opcionales, pero solo uno de ellos ha de existir siempre. @param path_nfo: ruta absoluta al archivo nfo @type path_nfo: str @param item: Si se pasa este parametro el item devuelto sera una copia de este con los valores de 'infoLabels', 'library_playcounts' y 'path' leidos del nfo @type: Item @return: Una tupla formada por la cabecera (head_nfo ='url_scraper'|'xml') y el objeto 'item_json' @rtype: tuple (str, Item) """ head_nfo = "" it = None data = filetools.read(path_nfo) if data: head_nfo = data.splitlines()[0] + "\n" data = "\n".join(data.splitlines()[1:]) it_nfo = Item().fromjson(data) if item: it = item.clone() it.infoLabels = it_nfo.infoLabels if 'library_playcounts' in it_nfo: it.library_playcounts = it_nfo.library_playcounts if it_nfo.path: it.path = it_nfo.path else: it = it_nfo if 'fanart' in it.infoLabels: it.fanart = it.infoLabels['fanart'] return head_nfo, it
def show_channels(item): logger.info() itemlist = [] context = [{"title": "Eliminar este canal", "action": "remove_channel", "channel": "community"}] path = filetools.join(config.get_data_path(), 'community_channels.json') json = jsontools.load(filetools.read(path)) itemlist.append(Item(channel=item.channel, title='Agregar un canal', action='add_channel', thumbnail=get_thumb('add.png'))) for key, channel in list(json['channels'].items()): if 'poster' in channel: poster = channel['poster'] else: poster = '' itemlist.append(Item(channel=item.channel, title=channel['channel_name'], url=channel['path'], thumbnail=poster, action='show_menu', channel_id = key, context=context)) return itemlist
def verify_copy_folders(custom_code_dir, custom_code_json_path): logger.info() #verificamos si es una nueva versión de Alfa instalada o era la existente. Si es la existente, nos vamos sin hacer nada json_data_file = filetools.join(custom_code_json_path, json_data_file_name) json_data = jsontools.load(filetools.read(json_data_file)) current_version = config.get_addon_version(with_fix=False) if current_version == json_data['addon_version']: return #Ahora copiamos los archivos desde el área de Userdata, Custom_code, sobre las carpetas del add-on for root, folders, files in os.walk(custom_code_dir): for file in files: input_file = filetools.join(root, file) output_file = input_file.replace(custom_code_dir, custom_code_json_path) if filetools.copy(input_file, output_file, silent=True) == False: return #Guardamaos el json con la versión actual de Alfa, para no volver a hacer la copia hasta la nueva versión json_data['addon_version'] = current_version filetools.write(json_data_file, jsontools.dump(json_data)) return
def reactivate_unrar(init=False, mute=True): logger.info() from servers.torrent import torrent_dirs torrent_paths = torrent_dirs() download_paths = [] for torr_client, save_path_videos in list(torrent_paths.items()): if 'BT' not in torr_client and 'MCT' not in torr_client: torr_client = torr_client.lower() if '_' not in torr_client and '_web' not in torr_client and save_path_videos \ and save_path_videos not in str(download_paths): download_paths.append((torr_client, save_path_videos)) # Agregamos el path para este Cliente # Borramos archivos de control "zombies" rar_control = {} if filetools.exists(filetools.join(save_path_videos, '_rar_control.json')): rar_control = jsontools.load(filetools.read(filetools.join(save_path_videos, '_rar_control.json'))) if rar_control and len(rar_control['rar_files']) == 1: ret = filetools.remove(filetools.join(save_path_videos, '_rar_control.json'), silent=True) config.set_setting("torrent_paths_list", download_paths, channel="downloads") search_for_unrar_in_error(download_paths, init=init)
def restart_error(item): logger.info("pelisalacarta.channels.descargas restart_error") for fichero in sorted(filetools.listdir(item.url)): if fichero.endswith(".json"): download_item = Item().fromjson( filetools.read(os.path.join(item.url, fichero))) serie_name = "%s [%s]" % (download_item.contentSerieName, download_item.contentChannel) if not item.serie_name or item.serie_name == serie_name: if download_item.downloadStatus == 3: if filetools.isfile( os.path.join(config.get_setting("downloadpath"), download_item.downloadFilename)): filetools.remove( os.path.join(config.get_setting("downloadpath"), download_item.downloadFilename)) download_item.downloadStatus = 0 download_item.downloadComplete = 0 download_item.downloadProgress = 0 download_item.downloadUrl = "" filetools.write(os.path.join(item.url, fichero), download_item.tojson()) platformtools.itemlist_refresh()
def last_password_search(pass_path): logger.info() # Busca en el Path de extracción si hay algún archivo que contenga la URL donde pueda estar la CONTRASEÑA password = '' patron_url = '(http.*\:\/\/(?:www.)?\w+\.\w+\/.*?[\n|\r|$])' patron_pass = '******' try: pass_path_list = filetools.listdir(pass_path) for file in pass_path_list: if 'contrase' in file.lower() and '.rar' not in file: file_pass = filetools.read(filetools.join(pass_path, file)) url = scrapertools.find_single_match(file_pass, patron_url) if url: data = re.sub(r"\n|\r|\t|\s{2}|(<!--.*?-->)", "", httptools.downloadpage(url).data) password = scrapertools.find_single_match(data, patron_pass) if password: break except: log(traceback.format_exc(1)) log("##### Contraseña extraída: %s" % password) return password
def check_blacklist(domain, expiration=0): res = True if not filetools.exists(PATH_BL): return res try: expiration_default = 30 bl_data = jsontools.load(filetools.read(PATH_BL)) bl_data_clean = bl_data.copy() if not expiration: expiration = config.get_setting('cf_assistant_bl_expiration', default=expiration_default) * 60 config.set_setting('cf_assistant_bl_expiration', expiration_default) expiration = expiration_default * 60 else: expiration = expiration * 60 time_today = time.time() if bl_data: for domain_reg, time_rec in list(bl_data_clean.items()): if time_today > time_rec + expiration: del bl_data[domain_reg] filetools.write(PATH_BL, jsontools.dump(bl_data)) for domain_reg, time_rec in list(bl_data.items()): if domain in domain_reg: res = False break else: res = True except: logger.error(traceback.format_exc()) filetools.remove(PATH_BL) res = True return res
def obtener_data(url, referer=''): headers = {} if referer != '': headers['Referer'] = referer data = httptools.downloadpage(url, headers=headers).data if "Javascript is required" in data: ck = decodificar_cookie(data) logger.info("Javascript is required. Cookie necesaria %s" % ck) headers['Cookie'] = ck data = httptools.downloadpage(url, headers=headers).data # Guardar la cookie y eliminar la que pudiera haber anterior cks = ck.split("=") cookie_file = filetools.join(config.get_data_path(), 'cookies.dat') cookie_data = filetools.read(cookie_file) cookie_data = re.sub( r"www\.pelispedia\.tv\tFALSE\t/\tFALSE\t\tsucuri_(.*)\n", "", cookie_data) cookie_data += "www.pelispedia.tv\tFALSE\t/\tFALSE\t\t%s\t%s\n" % ( cks[0], cks[1]) filetools.write(cookie_file, cookie_data) logger.info("Añadida cookie %s con valor %s" % (cks[0], cks[1])) return data
def mainlist(item): logger.info("pelisalacarta.channels.descargas mainlist") itemlist = [] #Lista de archivos for file in sorted(filetools.listdir(DOWNLOAD_LIST_PATH)): #Saltamos todos los que no sean JSON if not file.endswith(".json"): continue #cargamos el item file = os.path.join(DOWNLOAD_LIST_PATH, file) i = Item(path = file).fromjson(filetools.read(file)) i.thumbnail = i.contentThumbnail #Listado principal if not item.contentType == "tvshow": # Series if i.contentType == "episode": #Comprobamos que la serie no este ya en el itemlist if not filter(lambda x: x.contentSerieName == i.contentSerieName and x.contentChannel == i.contentChannel, itemlist): title = TITLE_TVSHOW % (STATUS_COLORS[i.downloadStatus], i.downloadProgress, i.contentSerieName, i.contentChannel) itemlist.append(Item(title = title, channel= "descargas", action= "mainlist", contentType = "tvshow", contentSerieName = i.contentSerieName, contentChannel = i.contentChannel, downloadStatus = i.downloadStatus, downloadProgress = [i.downloadProgress], fanart = i.fanart, thumbnail = i.thumbnail)) else: s = filter(lambda x: x.contentSerieName == i.contentSerieName and x.contentChannel == i.contentChannel, itemlist)[0] s.downloadProgress.append(i.downloadProgress) downloadProgress = sum(s.downloadProgress) / len(s.downloadProgress) if not s.downloadStatus in [STATUS_CODES.error, STATUS_CODES.canceled] and not i.downloadStatus in [STATUS_CODES.completed, STATUS_CODES.stoped]: s.downloadStatus = i.downloadStatus s.title = TITLE_TVSHOW % (STATUS_COLORS[s.downloadStatus], downloadProgress, i.contentSerieName, i.contentChannel) # Peliculas elif i.contentType == "movie" or i.contentType == "video": i.title = TITLE_FILE % (STATUS_COLORS[i.downloadStatus], i.downloadProgress, i.contentTitle) itemlist.append(i) #Listado dentro de una serie else: if i.contentType == "episode" and i.contentSerieName == item.contentSerieName and i.contentChannel == item.contentChannel: i.title = TITLE_FILE % (STATUS_COLORS[i.downloadStatus], i.downloadProgress, "%dx%0.2d: %s" % (i.contentSeason, i.contentEpisodeNumber,i.contentTitle)) itemlist.append(i) estados = [i.downloadStatus for i in itemlist] # Si hay alguno completado if 2 in estados: itemlist.insert(0, Item(channel=item.channel, action="clean_ready", title="Eliminar descargas completadas", contentType = item.contentType, contentChannel=item.contentChannel, contentSerieName = item.contentSerieName)) # Si hay alguno con error if 3 in estados: itemlist.insert(0, Item(channel=item.channel, action="restart_error", title="Reiniciar descargas con error", contentType = item.contentType, contentChannel=item.contentChannel, contentSerieName = item.contentSerieName)) # Si hay alguno pendiente if 1 in estados or 0 in estados: itemlist.insert(0, Item(channel=item.channel, action="download_all", title="Descargar todo", contentType = item.contentType, contentChannel=item.contentChannel, contentSerieName = item.contentSerieName)) if len(itemlist): itemlist.insert(0, Item(channel=item.channel, action="clean_all", title="Eliminar todo", contentType = item.contentType, contentChannel=item.contentChannel, contentSerieName = item.contentSerieName)) return itemlist
def mainlist(item): logger.info() itemlist = [] # Lista de archivos for file in sorted(filetools.listdir(DOWNLOAD_LIST_PATH)): # Saltamos todos los que no sean JSON if not file.endswith(".json"): continue # cargamos el item file = os.path.join(DOWNLOAD_LIST_PATH, file) i = Item(path=file).fromjson(filetools.read(file)) i.thumbnail = i.contentThumbnail # Listado principal if not item.contentType == "tvshow": # Series if i.contentType == "episode": # Comprobamos que la serie no este ya en el itemlist if not filter( lambda x: x.contentSerieName == i.contentSerieName and x.contentChannel == i.contentChannel, itemlist): title = TITLE_TVSHOW % ( STATUS_COLORS[i.downloadStatus], i.downloadProgress, i.contentSerieName, i.contentChannel) itemlist.append( Item(title=title, channel="downloads", action="mainlist", contentType="tvshow", contentSerieName=i.contentSerieName, contentChannel=i.contentChannel, downloadStatus=i.downloadStatus, downloadProgress=[i.downloadProgress], fanart=i.fanart, thumbnail=i.thumbnail)) else: s = \ filter( lambda x: x.contentSerieName == i.contentSerieName and x.contentChannel == i.contentChannel, itemlist)[0] s.downloadProgress.append(i.downloadProgress) downloadProgress = sum(s.downloadProgress) / len( s.downloadProgress) if not s.downloadStatus in [ STATUS_CODES.error, STATUS_CODES.canceled ] and not i.downloadStatus in [ STATUS_CODES.completed, STATUS_CODES.stoped ]: s.downloadStatus = i.downloadStatus s.title = TITLE_TVSHOW % ( STATUS_COLORS[s.downloadStatus], downloadProgress, i.contentSerieName, i.contentChannel) # Peliculas elif i.contentType == "movie" or i.contentType == "video": i.title = TITLE_FILE % (STATUS_COLORS[i.downloadStatus], i.downloadProgress, i.contentTitle) itemlist.append(i) # Listado dentro de una serie else: if i.contentType == "episode" and i.contentSerieName == item.contentSerieName and i.contentChannel == item.contentChannel: i.title = TITLE_FILE % ( STATUS_COLORS[i.downloadStatus], i.downloadProgress, "%dx%0.2d: %s" % (i.contentSeason, i.contentEpisodeNumber, i.contentTitle)) itemlist.append(i) estados = [i.downloadStatus for i in itemlist] # Si hay alguno completado if 2 in estados: itemlist.insert( 0, Item(channel=item.channel, action="clean_ready", title=config.get_localized_string(70218), contentType=item.contentType, contentChannel=item.contentChannel, contentSerieName=item.contentSerieName, text_color="sandybrown")) # Si hay alguno con error if 3 in estados: itemlist.insert( 0, Item(channel=item.channel, action="restart_error", title=config.get_localized_string(70219), contentType=item.contentType, contentChannel=item.contentChannel, contentSerieName=item.contentSerieName, text_color="orange")) # Si hay alguno pendiente if 1 in estados or 0 in estados: itemlist.insert( 0, Item(channel=item.channel, action="download_all", title=support.typo(config.get_localized_string(70220), 'bold'), contentType=item.contentType, contentChannel=item.contentChannel, contentSerieName=item.contentSerieName)) if len(itemlist): itemlist.insert( 0, Item(channel=item.channel, action="clean_all", title=support.typo(config.get_localized_string(70221), 'bold'), contentType=item.contentType, contentChannel=item.contentChannel, contentSerieName=item.contentSerieName)) if not item.contentType == "tvshow" and config.get_setting( "browser", "downloads") == True: itemlist.insert( 0, Item(channel=item.channel, action="browser", title=support.typo(config.get_localized_string(70222), 'bold'), url=DOWNLOAD_PATH)) if not item.contentType == "tvshow": itemlist.insert( 0, Item(channel=item.channel, action="settings", title=support.typo(config.get_localized_string(70223), 'bold color kod'))) return itemlist
def update_json(path, params): item = Item().fromjson(filetools.read(path)) item.__dict__.update(params) filetools.write(path, item.tojson())
def set_search_temp(item): if filetools.isfile(temp_search_file) and config.get_setting( 'videolibrary_kodi'): f = '[V],' + filetools.read(temp_search_file) filetools.write(temp_search_file, f)
def save_episodes(path, episodelist, serie, silent=False, overwrite=True): """ guarda en la ruta indicada todos los capitulos incluidos en la lista episodelist @type path: str @param path: ruta donde guardar los episodios @type episodelist: list @param episodelist: listado de items que representan los episodios que se van a guardar. @type serie: item @param serie: serie de la que se van a guardar los episodios @type silent: bool @param silent: establece si se muestra la notificación @param overwrite: permite sobreescribir los ficheros existentes @type overwrite: bool @rtype insertados: int @return: el número de episodios insertados @rtype sobreescritos: int @return: el número de episodios sobreescritos @rtype fallidos: int @return: el número de episodios fallidos """ logger.info() # No hay lista de episodios, no hay nada que guardar if not len(episodelist): logger.info("No hay lista de episodios, salimos sin crear strm") return 0, 0, 0 insertados = 0 sobreescritos = 0 fallidos = 0 news_in_playcounts = {} # Listamos todos los ficheros de la serie, asi evitamos tener que comprobar si existe uno por uno raiz, carpetas_series, ficheros = next(filetools.walk(path)) ficheros = [filetools.join(path, f) for f in ficheros] nostrm_episodelist = [] for root, folders, files in filetools.walk(path): for file in files: season_episode = scrapertools.get_season_and_episode(file) if season_episode == "" or filetools.exists(filetools.join(path, "%s.strm" % season_episode)): continue nostrm_episodelist.append(season_episode) nostrm_episodelist = sorted(set(nostrm_episodelist)) # Silent es para no mostrar progreso (para videolibrary_service) if not silent: # progress dialog p_dialog = platformtools.dialog_progress(config.get_localized_string(20000), config.get_localized_string(60064)) p_dialog.update(0, config.get_localized_string(60065)) channel_alt = generictools.verify_channel(serie.channel) #Preparamos para añadir las urls de emergencia emergency_urls_stat = config.get_setting("emergency_urls", channel_alt) #El canal quiere urls de emergencia? emergency_urls_succ = False channel = __import__('channels.%s' % channel_alt, fromlist=["channels.%s" % channel_alt]) if serie.torrent_caching_fail: #Si el proceso de conversión ha fallado, no se cachean emergency_urls_stat = 0 del serie.torrent_caching_fail new_episodelist = [] # Obtenemos el numero de temporada y episodio y descartamos los q no lo sean tags = [] if config.get_setting("enable_filter", "videolibrary"): tags = [x.strip() for x in config.get_setting("filters", "videolibrary").lower().split(",")] for e in episodelist: headers = {} if e.headers: headers = e.headers if tags != [] and tags != None and any(tag in e.title.lower() for tag in tags): continue try: season_episode = scrapertools.get_season_and_episode(e.title) if not season_episode: continue # Si se ha marcado la opción de url de emergencia, se añade ésta a cada episodio después de haber ejecutado Findvideos del canal if e.emergency_urls and isinstance(e.emergency_urls, dict): del e.emergency_urls #Borramos trazas anteriores json_path = filetools.join(path, ("%s [%s].json" % (season_episode, e.channel)).lower()) #Path del .json del episodio if emergency_urls_stat == 1 and not e.emergency_urls and e.contentType == 'episode': #Guardamos urls de emergencia? if not silent: p_dialog.update(0, 'Cacheando enlaces y archivos .torrent...', e.title) #progress dialog if json_path in ficheros: #Si existe el .json sacamos de ahí las urls if overwrite: #pero solo si se se sobrescriben los .json json_epi = Item().fromjson(filetools.read(json_path)) #Leemos el .json if json_epi.emergency_urls: #si existen las urls de emergencia... e.emergency_urls = json_epi.emergency_urls #... las copiamos else: #y si no... e = emergency_urls(e, channel, json_path, headers=headers) #... las generamos else: e = emergency_urls(e, channel, json_path, headers=headers) #Si el episodio no existe, generamos las urls if e.emergency_urls: #Si ya tenemos urls... emergency_urls_succ = True #... es un éxito y vamos a marcar el .nfo elif emergency_urls_stat == 2 and e.contentType == 'episode': #Borramos urls de emergencia? if e.emergency_urls: del e.emergency_urls emergency_urls_succ = True #... es un éxito y vamos a marcar el .nfo elif emergency_urls_stat == 3 and e.contentType == 'episode': #Actualizamos urls de emergencia? if not silent: p_dialog.update(0, 'Cacheando enlaces y archivos .torrent...', e.title) #progress dialog e = emergency_urls(e, channel, json_path, headers=headers) #generamos las urls if e.emergency_urls: #Si ya tenemos urls... emergency_urls_succ = True #... es un éxito y vamos a marcar el .nfo if not e.infoLabels["tmdb_id"] or (serie.infoLabels["tmdb_id"] and e.infoLabels["tmdb_id"] != serie.infoLabels["tmdb_id"]): #en series multicanal, prevalece el infolabels... e.infoLabels = serie.infoLabels #... del canal actual y no el del original e.contentSeason, e.contentEpisodeNumber = season_episode.split("x") if e.videolibray_emergency_urls: del e.videolibray_emergency_urls if e.channel_redir: del e.channel_redir #... y se borran las marcas de redirecciones new_episodelist.append(e) except: if e.contentType == 'episode': logger.error("No se ha podido guardar las urls de emergencia de %s en la videoteca" % e.contentTitle) logger.error(traceback.format_exc()) continue # No hay lista de episodios, no hay nada que guardar if not len(new_episodelist): logger.info("No hay lista de episodios, salimos sin crear strm") return 0, 0, 0 # fix float porque la division se hace mal en python 2.x try: t = float(100) / len(new_episodelist) except: t = 0 last_season_episode = '' for i, e in enumerate(scraper.sort_episode_list(new_episodelist)): if not silent: p_dialog.update(int(math.ceil((i + 1) * t)), config.get_localized_string(60064), e.title) high_sea = e.contentSeason high_epi = e.contentEpisodeNumber if scrapertools.find_single_match(e.title, '[a|A][l|L]\s*(\d+)'): high_epi = int(scrapertools.find_single_match(e.title, 'al\s*(\d+)')) max_sea = e.infoLabels["number_of_seasons"] max_epi = 0 if e.infoLabels["number_of_seasons"] and (e.infoLabels["temporada_num_episodios"] or e.infoLabels["number_of_seasons"] == 1): if e.infoLabels["number_of_seasons"] == 1 and e.infoLabels["number_of_episodes"]: max_epi = e.infoLabels["number_of_episodes"] else: max_epi = e.infoLabels["temporada_num_episodios"] season_episode = "%sx%s" % (e.contentSeason, str(e.contentEpisodeNumber).zfill(2)) strm_path = filetools.join(path, "%s.strm" % season_episode) nfo_path = filetools.join(path, "%s.nfo" % season_episode) json_path = filetools.join(path, ("%s [%s].json" % (season_episode, e.channel)).lower()) if season_episode in nostrm_episodelist: logger.error('Error en la estructura de la Videoteca: Serie ' + serie.contentSerieName + ' ' + season_episode) continue strm_exists = strm_path in ficheros nfo_exists = nfo_path in ficheros json_exists = json_path in ficheros if not strm_exists: # Si no existe season_episode.strm añadirlo item_strm = Item(action='play_from_library', channel='videolibrary', strm_path=strm_path.replace(TVSHOWS_PATH, ""), infoLabels={}) item_strm.contentSeason = e.contentSeason item_strm.contentEpisodeNumber = e.contentEpisodeNumber item_strm.contentType = e.contentType item_strm.contentTitle = season_episode # FILTERTOOLS if item_strm.list_language: # si tvshow.nfo tiene filtro se le pasa al item_strm que se va a generar if "library_filter_show" in serie: item_strm.library_filter_show = serie.library_filter_show if item_strm.library_filter_show == "": logger.error("Se ha producido un error al obtener el nombre de la serie a filtrar") # logger.debug("item_strm" + item_strm.tostring('\n')) # logger.debug("serie " + serie.tostring('\n')) strm_exists = filetools.write(strm_path, '%s?%s' % (addon_name, item_strm.tourl())) item_nfo = None if not nfo_exists and e.infoLabels["code"]: # Si no existe season_episode.nfo añadirlo scraper.find_and_set_infoLabels(e) head_nfo = scraper.get_nfo(e) item_nfo = e.clone(channel="videolibrary", url="", action='findvideos', strm_path=strm_path.replace(TVSHOWS_PATH, "")) if item_nfo.emergency_urls: del item_nfo.emergency_urls #Solo se mantiene en el .json del episodio nfo_exists = filetools.write(nfo_path, head_nfo + item_nfo.tojson()) # Solo si existen season_episode.nfo y season_episode.strm continuamos if nfo_exists and strm_exists: if not json_exists or overwrite: # Obtenemos infoLabel del episodio if not item_nfo: head_nfo, item_nfo = read_nfo(nfo_path) # En series multicanal, prevalece el infolabels del canal actual y no el del original if not e.infoLabels["tmdb_id"] or (item_nfo.infoLabels["tmdb_id"] \ and e.infoLabels["tmdb_id"] != item_nfo.infoLabels["tmdb_id"]): e.infoLabels = item_nfo.infoLabels if filetools.write(json_path, e.tojson()): if not json_exists: logger.info("Insertado: %s" % json_path) insertados += 1 # Marcamos episodio como no visto news_in_playcounts[season_episode] = 0 # Marcamos la temporada como no vista news_in_playcounts["season %s" % e.contentSeason] = 0 # Marcamos la serie como no vista # logger.debug("serie " + serie.tostring('\n')) news_in_playcounts[serie.contentSerieName] = 0 else: logger.info("Sobreescrito: %s" % json_path) sobreescritos += 1 else: logger.info("Fallido: %s" % json_path) fallidos += 1 else: logger.info("Fallido: %s" % json_path) fallidos += 1 if not silent and p_dialog.iscanceled(): break #logger.debug('high_sea x high_epi: %sx%s' % (str(high_sea), str(high_epi))) #logger.debug('max_sea x max_epi: %sx%s' % (str(max_sea), str(max_epi))) if not silent: p_dialog.close() if news_in_playcounts or emergency_urls_succ or serie.infoLabels["status"] == "Ended" or serie.infoLabels["status"] == "Canceled": # Si hay nuevos episodios los marcamos como no vistos en tvshow.nfo ... tvshow_path = filetools.join(path, "tvshow.nfo") try: import datetime head_nfo, tvshow_item = read_nfo(tvshow_path) tvshow_item.library_playcounts.update(news_in_playcounts) #Si la operación de insertar/borrar urls de emergencia en los .jsons de los episodios ha tenido éxito, se marca el .nfo if emergency_urls_succ: if tvshow_item.emergency_urls and not isinstance(tvshow_item.emergency_urls, dict): del tvshow_item.emergency_urls if emergency_urls_stat in [1, 3]: #Operación de guardar/actualizar enlaces if not tvshow_item.emergency_urls: tvshow_item.emergency_urls = dict() if tvshow_item.library_urls.get(serie.channel, False): tvshow_item.emergency_urls.update({serie.channel: True}) elif emergency_urls_stat == 2: #Operación de Borrar enlaces if tvshow_item.emergency_urls and tvshow_item.emergency_urls.get(serie.channel, False): tvshow_item.emergency_urls.pop(serie.channel, None) #borramos la entrada del .nfo if tvshow_item.active == 30: tvshow_item.active = 1 if tvshow_item.infoLabels["tmdb_id"] == serie.infoLabels["tmdb_id"]: tvshow_item.infoLabels = serie.infoLabels tvshow_item.infoLabels["title"] = tvshow_item.infoLabels["tvshowtitle"] if max_sea == high_sea and max_epi == high_epi and (tvshow_item.infoLabels["status"] == "Ended" or tvshow_item.infoLabels["status"] == "Canceled") and insertados == 0 and fallidos == 0: tvshow_item.active = 0 # ... no la actualizaremos más logger.debug("%s [%s]: serie 'Terminada' o 'Cancelada'. Se desactiva la actualización periódica" % \ (serie.contentSerieName, serie.channel)) update_last = datetime.date.today() tvshow_item.update_last = update_last.strftime('%Y-%m-%d') update_next = datetime.date.today() + datetime.timedelta(days=int(tvshow_item.active)) tvshow_item.update_next = update_next.strftime('%Y-%m-%d') filetools.write(tvshow_path, head_nfo + tvshow_item.tojson()) except: logger.error("Error al actualizar tvshow.nfo") logger.error("No se ha podido guardar las urls de emergencia de %s en la videoteca" % tvshow_item.contentSerieName) logger.error(traceback.format_exc()) fallidos = -1 else: # ... si ha sido correcto actualizamos la videoteca de Kodi if config.is_xbmc() and not silent: from platformcode import xbmc_videolibrary xbmc_videolibrary.update(FOLDER_TVSHOWS, filetools.basename(path)) if fallidos == len(episodelist): fallidos = -1 logger.debug("%s [%s]: insertados= %s, sobreescritos= %s, fallidos= %s" % (serie.contentSerieName, serie.channel, insertados, sobreescritos, fallidos)) return insertados, sobreescritos, fallidos
def findvideos(item): logger.info() # logger.debug("item:\n" + item.tostring('\n')) itemlist = [] list_canales = {} item_local = None if not item.contentTitle or not item.strm_path: logger.debug("No se pueden buscar videos por falta de parametros") return [] content_title = filter(lambda c: c not in ":*?<>|\/", item.contentTitle.strip().lower()) if item.contentType == 'movie': item.strm_path = filetools.join(videolibrarytools.MOVIES_PATH, item.strm_path) path_dir = os.path.dirname(item.strm_path) item.nfo = filetools.join(path_dir, os.path.basename(path_dir) + ".nfo") else: item.strm_path = filetools.join(videolibrarytools.TVSHOWS_PATH, item.strm_path) path_dir = os.path.dirname(item.strm_path) item.nfo = filetools.join(path_dir, 'tvshow.nfo') for fd in filetools.listdir(path_dir): if fd.endswith('.json'): contenido, nom_canal = fd[:-6].split('[') if (contenido.startswith(content_title) or item.contentType == 'movie') and nom_canal not in \ list_canales.keys(): list_canales[nom_canal] = filetools.join(path_dir, fd) num_canales = len(list_canales) # logger.debug(str(list_canales)) if 'downloads' in list_canales: json_path = list_canales['downloads'] item_json = Item().fromjson(filetools.read(json_path)) item_json.contentChannel = "local" # Soporte para rutas relativas en descargas if filetools.is_relative(item_json.url): item_json.url = filetools.join(videolibrarytools.VIDEOLIBRARY_PATH, item_json.url) del list_canales['downloads'] # Comprobar q el video no haya sido borrado if filetools.exists(item_json.url): item_local = item_json.clone(action='play') itemlist.append(item_local) else: num_canales -= 1 filtro_canal = '' if num_canales > 1 and config.get_setting("ask_channel", "videolibrary"): opciones = [ "Mostrar solo los enlaces de %s" % k.capitalize() for k in list_canales.keys() ] opciones.insert(0, "Mostrar todos los enlaces") if item_local: opciones.append(item_local.title) from platformcode import platformtools index = platformtools.dialog_select(config.get_localized_string(30163), opciones) if index < 0: return [] elif item_local and index == len(opciones) - 1: filtro_canal = 'downloads' platformtools.play_video(item_local) elif index > 0: filtro_canal = opciones[index].replace( "Mostrar solo los enlaces de ", "") itemlist = [] for nom_canal, json_path in list_canales.items(): if filtro_canal and filtro_canal != nom_canal.capitalize(): continue # Importamos el canal de la parte seleccionada try: channel = __import__('channels.%s' % nom_canal, fromlist=["channels.%s" % nom_canal]) except ImportError: exec "import channels." + nom_canal + " as channel" item_json = Item().fromjson(filetools.read(json_path)) list_servers = [] try: # FILTERTOOLS # si el canal tiene filtro se le pasa el nombre que tiene guardado para que filtre correctamente. if "list_language" in item_json: # si se viene desde la videoteca del addon if "library_filter_show" in item: item_json.show = item.library_filter_show.get( nom_canal, "") # Ejecutamos find_videos, del canal o común item_json.contentChannel = 'videolibrary' if hasattr(channel, 'findvideos'): from core import servertools list_servers = getattr(channel, 'findvideos')(item_json) list_servers = servertools.filter_servers(list_servers) else: from core import servertools list_servers = servertools.find_video_items(item_json) except Exception, ex: logger.error("Ha fallado la funcion findvideos para el canal %s" % nom_canal) template = "An exception of type %s occured. Arguments:\n%r" message = template % (type(ex).__name__, ex.args) logger.error(message) # Cambiarle el titulo a los servers añadiendoles el nombre del canal delante y # las infoLabels y las imagenes del item si el server no tiene for server in list_servers: if not server.action: # Ignorar las etiquetas continue server.contentChannel = server.channel server.channel = "videolibrary" server.nfo = item.nfo server.strm_path = item.strm_path # Se añade el nombre del canal si se desea if config.get_setting("quit_channel_name", "videolibrary") == 0: server.title = "%s: %s" % (nom_canal.capitalize(), server.title) server.infoLabels = item_json.infoLabels if not server.thumbnail: server.thumbnail = item.thumbnail # logger.debug("server:\n%s" % server.tostring('\n')) itemlist.append(server)
def delete(item): def delete_all(_item): for file in filetools.listdir(_item.path): if file.endswith(".strm") or file.endswith(".nfo") or file.endswith(".json")or file.endswith(".torrent"): filetools.remove(filetools.join(_item.path, file)) if _item.contentType == 'movie': heading = config.get_localized_string(70084) else: heading = config.get_localized_string(70085) if config.is_xbmc() and config.get_setting("videolibrary_kodi"): from platformcode import xbmc_videolibrary if _item.local_episodes_path: platformtools.dialog_ok(heading, config.get_localized_string(80047) % _item.infoLabels['title']) path_list = [_item.extra] xbmc_videolibrary.clean(path_list) raiz, carpeta_serie, ficheros = next(filetools.walk(_item.path)) if ficheros == []: filetools.rmdir(_item.path) elif platformtools.dialog_yesno(heading, config.get_localized_string(70081) % filetools.basename(_item.path)): filetools.rmdirtree(_item.path) logger.info("All links removed") xbmc.sleep(1000) platformtools.itemlist_refresh() # logger.debug(item.tostring('\n')) if item.contentType == 'movie': heading = config.get_localized_string(70084) else: heading = config.get_localized_string(70085) if item.multichannel: # Get channel list if item.dead == '': opciones = [] channels = [] for k in list(item.library_urls.keys()): if k != "downloads": opciones.append(config.get_localized_string(70086) % k.capitalize()) channels.append(k) opciones.insert(0, heading) index = platformtools.dialog_select(config.get_localized_string(30163), opciones) if index == 0: # Selected Delete movie / series delete_all(item) return elif index > 0: # Selected Delete channel X canal = opciones[index].replace(config.get_localized_string(70079), "").lower() channels.remove(canal) else: return else: canal = item.dead num_enlaces = 0 path_list = [] for fd in filetools.listdir(item.path): if fd.endswith(canal + '].json') or scrapertools.find_single_match(fd, r'%s]_\d+.torrent' % canal): if filetools.remove(filetools.join(item.path, fd)): num_enlaces += 1 # Remove strm and nfo if no other channel episode = fd.replace(' [' + canal + '].json', '') found_ch = False for ch in channels: if filetools.exists(filetools.join(item.path, episode + ' [' + ch + '].json')): found_ch = True break if found_ch == False: filetools.remove(filetools.join(item.path, episode + '.nfo')) strm_path = filetools.join(item.path, episode + '.strm') # if it is a local episode, do not delete the strm if 'plugin://plugin.video.kod/?' in filetools.read(strm_path): filetools.remove(strm_path) path_list.append(filetools.join(item.extra, episode + '.strm')) if config.is_xbmc() and config.get_setting("videolibrary_kodi") and path_list: from platformcode import xbmc_videolibrary xbmc_videolibrary.clean(path_list) if num_enlaces > 0: # Update .nfo head_nfo, item_nfo = videolibrarytools.read_nfo(item.nfo) del item_nfo.library_urls[canal] if item_nfo.emergency_urls and item_nfo.emergency_urls.get(canal, False): del item_nfo.emergency_urls[canal] filetools.write(item.nfo, head_nfo + item_nfo.tojson()) msg_txt = config.get_localized_string(70087) % (num_enlaces, canal) logger.info(msg_txt) platformtools.dialog_notification(heading, msg_txt) platformtools.itemlist_refresh() else: if platformtools.dialog_yesno(heading, config.get_localized_string(70088) % item.infoLabels['title']): delete_all(item)
def list_storage(item): logger.info() from core import filetools from lib import generictools itemlist = [] torrent_params = { 'url': item.url, 'torrents_path': '', 'local_torr': item.torrents_path, 'lookup': False, 'force': True, 'data_torrent': True, 'subtitles': True, 'file_list': True } #logger.debug(item) browse_type = 0 path_out = item.url if not filetools.exists(path_out): path_out = '' if not path_out: msg = 'Seleccione carpeta de destino:' path_out = platformtools.dialog_browse(browse_type, msg, shares='') path_list = filetools.listdir(path_out) VIDEOLIBRARY_PATH = config.get_videolibrary_path() FOLDER_MOVIES = config.get_setting("folder_movies") FOLDER_TVSHOWS = config.get_setting("folder_tvshows") FOLDER = '' if VIDEOLIBRARY_PATH in path_out: if FOLDER_MOVIES in path_out: FOLDER = FOLDER_MOVIES elif FOLDER_TVSHOWS in path_out: FOLDER = FOLDER_TVSHOWS MOVIES_PATH = filetools.join(VIDEOLIBRARY_PATH, FOLDER_MOVIES) TVSHOWS_PATH = filetools.join(VIDEOLIBRARY_PATH, FOLDER_TVSHOWS) VIDEO_FOLDER = filetools.join(VIDEOLIBRARY_PATH, FOLDER) TEMP_TORRENT_FOLDER = filetools.join( config.get_setting('downloadpath', default=''), 'cached_torrents_Alfa') MIS_TORRENT_FOLDER = filetools.join( config.get_setting('downloadpath', default=''), 'Mis_Torrents') for file in path_list: if FOLDER and file.endswith('.json') and file.split( '.')[0] + '_01.torrent' in str(path_list): json_video = Item().fromjson( filetools.read(filetools.join(path_out, file))) json_video.channel = 'url' json_video.action = 'findvideos' json_video.torrents_path = json_video.url itemlist.append(json_video) elif FOLDER and filetools.isdir(filetools.join(path_out, file)): if '.torrent' in str(filetools.listdir(filetools.join(path_out, file))) \ or '.magnet' in str(filetools.listdir(filetools.join(path_out, file))): itemlist.append( Item(channel=item.channel, action="list_storage", url=filetools.join(path_out, file), title=file.title(), contentTitle=file.title(), contentType="list", unify=False, context=context)) if len(itemlist) > 1: itemlist = sorted(itemlist, key=lambda it: it.title) #clasificamos elif not FOLDER and filetools.isdir(filetools.join(path_out, file)): if MIS_TORRENT_FOLDER in path_out: title = file.title() if 'BTDigg' in file: title = title.replace( 'Btdigg', '[B][COLOR limegreen]BT[/COLOR][COLOR red]Digg[/COLOR][/B]' ) itemlist.append( Item(channel=item.channel, action="list_storage", url=filetools.join(path_out, file), title=title, contentTitle=title, contentType="list", unify=False, btdigg=True if 'BTDigg' in file else False, url_org=filetools.join(path_out, file), context=context)) if len(itemlist) > 1: itemlist = sorted(itemlist, key=lambda it: it.title) #clasificamos elif not FOLDER and ('.torrent' in file or '.magnet' in file): btdigg = False if '.torrent' in file: url = filetools.join(TEMP_TORRENT_FOLDER, file) filetools.copy(filetools.join(path_out, file), url, silent=True) if not filetools.exists(url): continue else: url = filetools.read(filetools.join(path_out, file), silent=True) if btdigg_magnet in url: btdigg = True size = 'MAGNET' if not url: continue torrent_params['url'] = url torrent_params['torrents_path'] = '' torrent_params['local_torr'] = filetools.join( TEMP_TORRENT_FOLDER, file) torrent_params = generictools.get_torrent_size( url, torrent_params=torrent_params) if '.magnet' in file and 'ERROR' in torrent_params['size']: torrent_params['size'] = 'MAGNET' size = torrent_params['size'] itemlist.append( Item(channel=item.channel, action="play", url=url, url_org=filetools.join(path_out, file), server='torrent', title=filetools.join( filetools.basename(path_out.rstrip('/').rstrip('\\')), file).title() + " [%s]" % size, contentTitle=filetools.join( filetools.basename(path_out.rstrip('/').rstrip('\\')), file).title(), contentType="movie", unify=False, torrents_path=torrent_params['torrents_path'], infoLabels={"tmdb_id": "111"}, context=context, btdigg=btdigg)) if len(itemlist) > 1: itemlist = sorted(itemlist, key=lambda it: it.title) #clasificamos return itemlist
def peliculas(item): logger.info("pelisalacarta.channels.biblioteca peliculas") strm_path = library.MOVIES_PATH download_path = filetools.join(config.get_library_path(), "Descargas", "Cine") itemlist = [] for raiz, subcarpetas, ficheros in filetools.walk(strm_path): for f in ficheros: if f.endswith(".strm"): i = filetools.join(raiz, f) movie = Item().fromurl(filetools.read(i)) movie.contentChannel = movie.channel movie.path = i movie.title = os.path.splitext(os.path.basename(i))[0].capitalize() movie.channel = "biblioteca" movie.action = "findvideos" movie.text_color = "blue" # fix para que no se ejecute el método de play para la biblioteca de Kodi movie.strm = False itemlist.append(movie) # Obtenemos todos los videos de la biblioteca de CINE recursivamente for raiz, subcarpetas, ficheros in filetools.walk(download_path): for f in ficheros: if not f.endswith(".json") and not f.endswith(".nfo") and not f.endswith(".srt"): i = filetools.join(raiz, f) movie = Item() movie.contentChannel = "local" movie.path = i movie.title = os.path.splitext(os.path.basename(i))[0].capitalize() movie.channel = "biblioteca" movie.action = "play" movie.text_color = "green" itemlist.append(movie) library.set_infolabels_from_library(itemlist, tipo="Movies") # Agrupamos las peliculas por canales join_itemlist = [] for i in range(len(itemlist)): encontrado = False for j in range(i + 1, len(itemlist)): if "tmdb_id" in itemlist[i].infoLabels and "tmdb_id" in itemlist[j].infoLabels: if itemlist[i].infoLabels["tmdb_id"] == itemlist[j].infoLabels["tmdb_id"]: encontrado = True if "list_channels" not in itemlist[i]: list_channels = [] dict_first_channel = {"path": itemlist[i].path, "channel": itemlist[i].contentChannel} list_channels.append(dict_first_channel.copy()) itemlist[j].list_channels = list_channels dict_other_channel = {"path": itemlist[j].path, "channel": itemlist[j].contentChannel} itemlist[j].list_channels.append(dict_other_channel.copy()) itemlist[j].action = "get_canales_movies" itemlist[j].text_color = "orange" # TODO pendiente de probar if "contentTitle" in itemlist[i] and itemlist[i].contentTitle != "": itemlist[i].title = itemlist[i].contentTitle if not encontrado: join_itemlist.append(itemlist[i]) return sorted(join_itemlist, key=lambda it: it.title.lower())
def check_addon_updates(verbose=False): logger.info() ADDON_UPDATES_JSON = 'https://extra.alfa-addon.com/addon_updates/updates.json' ADDON_UPDATES_ZIP = 'https://extra.alfa-addon.com/addon_updates/updates.zip' try: last_fix_json = os.path.join( config.get_runtime_path(), 'last_fix.json') # información de la versión fixeada del usuario # Se guarda en get_runtime_path en lugar de get_data_path para que se elimine al cambiar de versión # Descargar json con las posibles actualizaciones # ----------------------------------------------- data = httptools.downloadpage(ADDON_UPDATES_JSON, timeout=2).data if data == '': logger.info('No se encuentran actualizaciones del addon') if verbose: platformtools.dialog_notification( config.get_localized_string(70667), config.get_localized_string(70668)) return False data = jsontools.load(data) if 'addon_version' not in data or 'fix_version' not in data: logger.info('No hay actualizaciones del addon') if verbose: platformtools.dialog_notification( config.get_localized_string(70667), config.get_localized_string(70668)) return False # Comprobar versión que tiene instalada el usuario con versión de la actualización # -------------------------------------------------------------------------------- current_version = config.get_addon_version(with_fix=False) if current_version != data['addon_version']: logger.info('No hay actualizaciones para la versión %s del addon' % current_version) if verbose: platformtools.dialog_notification( config.get_localized_string(70667), config.get_localized_string(70668)) return False if os.path.exists(last_fix_json): try: lastfix = {} lastfix = jsontools.load(filetools.read(last_fix_json)) if lastfix['addon_version'] == data[ 'addon_version'] and lastfix['fix_version'] == data[ 'fix_version']: logger.info( config.get_localized_string(70670) % (data['addon_version'], data['fix_version'])) if verbose: platformtools.dialog_notification( config.get_localized_string(70667), config.get_localized_string(70671) % (data['addon_version'], data['fix_version'])) return False except: if lastfix: logger.error('last_fix.json: ERROR en: ' + str(lastfix)) else: logger.error('last_fix.json: ERROR desconocido') lastfix = {} # Descargar zip con las actualizaciones # ------------------------------------- localfilename = os.path.join(config.get_data_path(), 'temp_updates.zip') if os.path.exists(localfilename): os.remove(localfilename) downloadtools.downloadfile(ADDON_UPDATES_ZIP, localfilename, silent=True) # Descomprimir zip dentro del addon # --------------------------------- try: unzipper = ziptools.ziptools() unzipper.extract(localfilename, config.get_runtime_path()) except: import xbmc xbmc.executebuiltin('XBMC.Extract("%s", "%s")' % (localfilename, config.get_runtime_path())) time.sleep(1) # Borrar el zip descargado # ------------------------ os.remove(localfilename) # Guardar información de la versión fixeada # ----------------------------------------- if 'files' in data: data.pop('files', None) filetools.write(last_fix_json, jsontools.dump(data)) logger.info( config.get_localized_string(70672) % (data['addon_version'], data['fix_version'])) if verbose: platformtools.dialog_notification( config.get_localized_string(70673), config.get_localized_string(70671) % (data['addon_version'], data['fix_version'])) return True except: logger.error('Error al comprobar actualizaciones del addon!') logger.error(traceback.format_exc()) if verbose: platformtools.dialog_notification( config.get_localized_string(70674), config.get_localized_string(70675)) return False
def series(item): logger.info("pelisalacarta.channels.biblioteca series") strm_path = library.TVSHOWS_PATH download_path = filetools.join(config.get_library_path(), "Descargas", "Series") itemlist = [] # Obtenemos todos los strm de la biblioteca de SERIES recursivamente for raiz, subcarpetas, ficheros in filetools.walk(strm_path): for f in ficheros: if f == "tvshow.json": i = filetools.join(raiz, f) tvshow = Item().fromjson(filetools.read(i)) logger.debug(tvshow.tostring()) tvshow.contentChannel = tvshow.channel tvshow.path = os.path.dirname(i) tvshow.title = os.path.basename(os.path.dirname(i)) tvshow.channel = "biblioteca" tvshow.action = "get_temporadas" tvshow.text_color = "blue" itemlist.append(tvshow) # Obtenemos todos los videos de la biblioteca de SERIES recursivamente for raiz, subcarpetas, ficheros in filetools.walk(download_path): for f in ficheros: if f == "tvshow.json": i = filetools.join(raiz, f) tvshow = Item().fromjson(filetools.read(i)) tvshow.contentChannel = "local" tvshow.path = os.path.dirname(i) tvshow.title = os.path.basename(os.path.dirname(i)) tvshow.channel = "biblioteca" tvshow.action = "get_temporadas" tvshow.text_color = "green" itemlist.append(tvshow) library.set_infolabels_from_library(itemlist, tipo="TVShows") # Agrupamos las series por canales join_itemlist = [] for i in range(len(itemlist)): encontrado = False for j in range(i + 1, len(itemlist)): if "tmdb_id" in itemlist[i].infoLabels and "tmdb_id" in itemlist[j].infoLabels: if itemlist[i].infoLabels["tmdb_id"] == itemlist[j].infoLabels["tmdb_id"]: encontrado = True if "list_channels" not in itemlist[i]: list_channels = [] dict_first_channel = {"path": itemlist[i].path, "channel": itemlist[i].contentChannel} list_channels.append(dict_first_channel.copy()) itemlist[j].list_channels = list_channels dict_other_channel = {"path": itemlist[j].path, "channel": itemlist[j].contentChannel} itemlist[j].list_channels.append(dict_other_channel.copy()) itemlist[j].action = "get_canales_tvshow" itemlist[j].text_color = "orange" if "contentTitle" in itemlist[i] and itemlist[i].contentTitle != "": itemlist[i].title = itemlist[i].contentTitle if not encontrado: join_itemlist.append(itemlist[i]) return sorted(join_itemlist, key=lambda it: it.title.lower())
def findvideos(item): logger.info() # logger.debug("item:\n" + item.tostring('\n')) itemlist = [] list_canales = {} item_local = None if not item.contentTitle or not item.strm_path: logger.debug("No se pueden buscar videos por falta de parametros") return [] content_title = filter(lambda c: c not in ":*?<>|\/", item.contentTitle).strip().lower() if item.contentType == 'movie': item.strm_path = filetools.join(library.MOVIES_PATH, item.strm_path.strip('\/')) path_dir = os.path.dirname(item.strm_path) item.nfo = filetools.join(path_dir, os.path.basename(path_dir) + ".nfo") else: item.strm_path = filetools.join(library.TVSHOWS_PATH, item.strm_path.strip('\/')) path_dir = os.path.dirname(item.strm_path) item.nfo = filetools.join(path_dir, 'tvshow.nfo') for fd in filetools.listdir(path_dir): if fd.endswith('.json'): contenido, nom_canal = fd[:-6].split('[') if (content_title in contenido.strip() or item.contentType == 'movie') and nom_canal not in \ list_canales.keys(): list_canales[nom_canal] = filetools.join(path_dir, fd) num_canales = len(list_canales) if 'descargas' in list_canales: json_path = list_canales['descargas'] item_json = Item().fromjson(filetools.read(json_path)) del list_canales['descargas'] # Comprobar q el video no haya sido borrado if filetools.exists(item_json.url): item_local = item_json.clone(action='play') itemlist.append(item_local) else: num_canales -= 1 filtro_canal = '' if num_canales > 1 and config.get_setting("ask_channel", "biblioteca") == True: opciones = ["Mostrar solo los enlaces de %s" % k.capitalize() for k in list_canales.keys()] opciones.insert(0, "Mostrar todos los enlaces") if item_local: opciones.append(item_local.title) from platformcode import platformtools index = platformtools.dialog_select(config.get_localized_string(30163), opciones) if index < 0: return [] elif item_local and index == len(opciones) - 1: filtro_canal = 'descargas' platformtools.play_video(item_local) elif index > 0: filtro_canal = opciones[index].replace("Mostrar solo los enlaces de ", "") itemlist = [] for nom_canal, json_path in list_canales.items(): if filtro_canal and filtro_canal != nom_canal.capitalize(): continue # Importamos el canal de la parte seleccionada try: channel = __import__('channels.%s' % nom_canal, fromlist=["channels.%s" % nom_canal]) except ImportError: exec "import channels." + nom_canal + " as channel" item_json = Item().fromjson(filetools.read(json_path)) list_servers = [] try: # FILTERTOOLS # si el canal tiene filtro se le pasa el nombre que tiene guardado para que filtre correctamente. if "list_idiomas" in item_json: # si se viene desde la biblioteca de pelisalacarta if "library_filter_show" in item: item_json.show = item.library_filter_show.get(nom_canal, "") # Ejecutamos find_videos, del canal o común if hasattr(channel, 'findvideos'): list_servers = getattr(channel, 'findvideos')(item_json) else: from core import servertools list_servers = servertools.find_video_items(item_json) except Exception as ex: logger.error("Ha fallado la funcion findvideos para el canal %s" % nom_canal) template = "An exception of type {0} occured. Arguments:\n{1!r}" message = template.format(type(ex).__name__, ex.args) logger.error(message) # Cambiarle el titulo a los servers añadiendoles el nombre del canal delante y # las infoLabels y las imagenes del item si el server no tiene for server in list_servers: if not server.action: # Ignorar las etiquetas continue server.contentChannel = server.channel server.channel = "biblioteca" server.nfo = item.nfo server.strm_path = item.strm_path server.title = "%s: %s" % (nom_canal.capitalize(), server.title) server.infoLabels = item_json.infoLabels if not server.thumbnail: server.thumbnail = item.thumbnail # logger.debug("server:\n%s" % server.tostring('\n')) itemlist.append(server) # return sorted(itemlist, key=lambda it: it.title.lower()) return itemlist
def convert_old_to_v4(): logger.info() path_series_xml = filetools.join(config.get_data_path(), "series.xml") path_series_json = filetools.join(config.get_data_path(), "series.json") series_insertadas = 0 series_fallidas = 0 version = 'v?' # Renombrar carpeta Series y crear una vacia import time new_name = str(time.time()) path_series_old = filetools.join(library.LIBRARY_PATH, "SERIES_OLD_" + new_name) if filetools.rename(library.TVSHOWS_PATH, "SERIES_OLD_" + new_name): if not filetools.mkdir(library.TVSHOWS_PATH): logger.error( "ERROR, no se ha podido crear la nueva carpeta de SERIES") return False else: logger.error( "ERROR, no se ha podido renombrar la antigua carpeta de SERIES") return False path_cine_old = filetools.join(library.LIBRARY_PATH, "CINE_OLD_" + new_name) if filetools.rename(library.MOVIES_PATH, "CINE_OLD_" + new_name): if not filetools.mkdir(library.MOVIES_PATH): logger.error( "ERROR, no se ha podido crear la nueva carpeta de CINE") return False else: logger.error( "ERROR, no se ha podido renombrar la antigua carpeta de CINE") return False # Convertir libreria de v1(xml) a v4 if filetools.exists(path_series_xml): try: data = filetools.read(path_series_xml) for line in data.splitlines(): try: aux = line.rstrip('\n').split(",") tvshow = aux[0].strip() url = aux[1].strip() channel = aux[2].strip() serie = Item(contentSerieName=tvshow, url=url, channel=channel, action="episodios", title=tvshow, active=True) patron = "^(.+)[\s]\((\d{4})\)$" matches = re.compile(patron, re.DOTALL).findall( serie.contentSerieName) if matches: serie.infoLabels['title'] = matches[0][0] serie.infoLabels['year'] = matches[0][1] else: serie.infoLabels['title'] = tvshow insertados, sobreescritos, fallidos = library.save_library_tvshow( serie, list()) if fallidos == 0: series_insertadas += 1 platformtools.dialog_notification( "Serie actualizada", serie.infoLabels['title']) else: series_fallidas += 1 except: series_fallidas += 1 filetools.rename(path_series_xml, "series.xml.old") version = 'v4' except EnvironmentError: logger.error("ERROR al leer el archivo: %s" % path_series_xml) return False # Convertir libreria de v2(json) a v4 if filetools.exists(path_series_json): try: data = jsontools.load_json(filetools.read(path_series_json)) for tvshow in data: for channel in data[tvshow]["channels"]: try: serie = Item( contentSerieName=data[tvshow]["channels"][channel] ["tvshow"], url=data[tvshow]["channels"][channel]["url"], channel=channel, action="episodios", title=data[tvshow]["name"], active=True) if not tvshow.startswith("t_"): serie.infoLabels["tmdb_id"] = tvshow insertados, sobreescritos, fallidos = library.save_library_tvshow( serie, list()) if fallidos == 0: series_insertadas += 1 platformtools.dialog_notification( "Serie actualizada", serie.infoLabels['title']) else: series_fallidas += 1 except: series_fallidas += 1 filetools.rename(path_series_json, "series.json.old") version = 'v4' except EnvironmentError: logger.error("ERROR al leer el archivo: %s" % path_series_json) return False # Convertir libreria de v3 a v4 if version != 'v4': # Obtenemos todos los tvshow.json de la biblioteca de SERIES_OLD recursivamente for raiz, subcarpetas, ficheros in filetools.walk(path_series_old): for f in ficheros: if f == "tvshow.json": try: serie = Item().fromjson( filetools.read(filetools.join(raiz, f))) insertados, sobreescritos, fallidos = library.save_library_tvshow( serie, list()) if fallidos == 0: series_insertadas += 1 platformtools.dialog_notification( "Serie actualizada", serie.infoLabels['title']) else: series_fallidas += 1 except: series_fallidas += 1 movies_insertadas = 0 movies_fallidas = 0 for raiz, subcarpetas, ficheros in filetools.walk(path_cine_old): for f in ficheros: if f.endswith(".strm.json"): try: movie = Item().fromjson( filetools.read(filetools.join(raiz, f))) insertados, sobreescritos, fallidos = library.save_library_movie( movie) if fallidos == 0: movies_insertadas += 1 platformtools.dialog_notification( "Película actualizada", movie.infoLabels['title']) else: movies_fallidas += 1 except: movies_fallidas += 1 config.set_setting("library_version", 'v4') platformtools.dialog_notification( "Biblioteca actualizada al nuevo formato", "%s series convertidas y %s series descartadas.\n" "%s peliculas convertidas y %s peliculas descartadas." "A continuación se va a obtener la información de todos los episodios" % (series_insertadas, series_fallidas, movies_insertadas, movies_fallidas), time=12000) # Por ultimo limpia la libreria, por que las rutas anteriores ya no existen if config.is_xbmc(): from platformcode import xbmc_library xbmc_library.clean() return True
def get_environment(): """ Devuelve las variables de entorno del OS, de Kodi y de Alfa más habituales, necesarias para el diagnóstico de fallos """ try: import base64 import ast environment = config.get_platform(full_version=True) environment['num_version'] = str(environment['num_version']) environment['python_version'] = str(platform.python_version()) environment['os_release'] = str(platform.release()) if xbmc.getCondVisibility("system.platform.Windows"): try: if platform._syscmd_ver()[2]: environment['os_release'] = str(platform._syscmd_ver()[2]) except: pass environment['prod_model'] = '' if xbmc.getCondVisibility("system.platform.Android"): environment['os_name'] = 'Android' try: for label_a in subprocess.check_output('getprop').split('\n'): if 'build.version.release' in label_a: environment['os_release'] = str( scrapertools.find_single_match( label_a, ':\s*\[(.*?)\]$')) if 'product.model' in label_a: environment['prod_model'] = str( scrapertools.find_single_match( label_a, ':\s*\[(.*?)\]$')) except: try: for label_a in filetools.read(os.environ['ANDROID_ROOT'] + '/build.prop').split(): if 'build.version.release' in label_a: environment['os_release'] = str( scrapertools.find_single_match( label_a, '=(.*?)$')) if 'product.model' in label_a: environment['prod_model'] = str( scrapertools.find_single_match( label_a, '=(.*?)$')) except: pass elif xbmc.getCondVisibility("system.platform.Linux.RaspberryPi"): environment['os_name'] = 'RaspberryPi' else: environment['os_name'] = str(platform.system()) environment['machine'] = str(platform.machine()) environment['architecture'] = str(sys.maxsize > 2**32 and "64-bit" or "32-bit") environment['language'] = str(xbmc.getInfoLabel('System.Language')) environment['cpu_usage'] = str(xbmc.getInfoLabel('System.CpuUsage')) environment['mem_total'] = str( xbmc.getInfoLabel('System.Memory(total)')).replace('MB', '').replace( 'KB', '') environment['mem_free'] = str( xbmc.getInfoLabel('System.Memory(free)')).replace('MB', '').replace( 'KB', '') if not environment['mem_total'] or not environment['mem_free']: try: if environment['os_name'].lower() == 'windows': kernel32 = ctypes.windll.kernel32 c_ulong = ctypes.c_ulong c_ulonglong = ctypes.c_ulonglong class MEMORYSTATUS(ctypes.Structure): _fields_ = [('dwLength', c_ulong), ('dwMemoryLoad', c_ulong), ('dwTotalPhys', c_ulonglong), ('dwAvailPhys', c_ulonglong), ('dwTotalPageFile', c_ulonglong), ('dwAvailPageFile', c_ulonglong), ('dwTotalVirtual', c_ulonglong), ('dwAvailVirtual', c_ulonglong), ('availExtendedVirtual', c_ulonglong)] memoryStatus = MEMORYSTATUS() memoryStatus.dwLength = ctypes.sizeof(MEMORYSTATUS) kernel32.GlobalMemoryStatus(ctypes.byref(memoryStatus)) environment['mem_total'] = str( int(memoryStatus.dwTotalPhys) / (1024**2)) environment['mem_free'] = str( int(memoryStatus.dwAvailPhys) / (1024**2)) else: with open('/proc/meminfo') as f: meminfo = f.read() environment['mem_total'] = str( int( re.search(r'MemTotal:\s+(\d+)', meminfo).groups()[0]) / 1024) environment['mem_free'] = str( int( re.search(r'MemAvailable:\s+(\d+)', meminfo).groups()[0]) / 1024) except: environment['mem_total'] = '' environment['mem_free'] = '' try: environment['kodi_buffer'] = '20' environment['kodi_bmode'] = '0' environment['kodi_rfactor'] = '4.0' if filetools.exists( filetools.join(xbmc.translatePath("special://userdata"), "advancedsettings.xml")): advancedsettings = filetools.read( filetools.join(xbmc.translatePath("special://userdata"), "advancedsettings.xml")).split('\n') for label_a in advancedsettings: if 'memorysize' in label_a: environment['kodi_buffer'] = str( int( scrapertools.find_single_match( label_a, '>(\d+)<\/')) / 1024**2) if 'buffermode' in label_a: environment['kodi_bmode'] = str( scrapertools.find_single_match( label_a, '>(\d+)<\/')) if 'readfactor' in label_a: environment['kodi_rfactor'] = str( scrapertools.find_single_match( label_a, '>(.*?)<\/')) except: pass environment['userdata_path'] = str( xbmc.translatePath(config.get_data_path())) try: if environment['os_name'].lower() == 'windows': free_bytes = ctypes.c_ulonglong(0) ctypes.windll.kernel32.GetDiskFreeSpaceExW( ctypes.c_wchar_p(environment['userdata_path']), None, None, ctypes.pointer(free_bytes)) environment['userdata_free'] = str( round(float(free_bytes.value) / (1024**3), 3)) else: disk_space = os.statvfs(environment['userdata_path']) if not disk_space.f_frsize: disk_space.f_frsize = disk_space.f_frsize.f_bsize environment['userdata_free'] = str(round((float(disk_space.f_bavail) / \ (1024**3)) * float(disk_space.f_frsize), 3)) except: environment['userdata_free'] = '?' try: environment['videolab_series'] = '?' environment['videolab_episodios'] = '?' environment['videolab_pelis'] = '?' environment['videolab_path'] = str( xbmc.translatePath(config.get_videolibrary_path())) if filetools.exists(filetools.join(environment['videolab_path'], \ config.get_setting("folder_tvshows"))): environment['videolab_series'] = str(len(filetools.listdir(filetools.join(environment['videolab_path'], \ config.get_setting("folder_tvshows"))))) counter = 0 for root, folders, files in filetools.walk(filetools.join(environment['videolab_path'], \ config.get_setting("folder_tvshows"))): for file in files: if file.endswith('.strm'): counter += 1 environment['videolab_episodios'] = str(counter) if filetools.exists(filetools.join(environment['videolab_path'], \ config.get_setting("folder_movies"))): environment['videolab_pelis'] = str(len(filetools.listdir(filetools.join(environment['videolab_path'], \ config.get_setting("folder_movies"))))) except: pass try: video_updates = ['No', 'Inicio', 'Una vez', 'Inicio+Una vez'] environment['videolab_update'] = str( video_updates[config.get_setting("update", "videolibrary")]) except: environment['videolab_update'] = '?' try: if environment['os_name'].lower() == 'windows': free_bytes = ctypes.c_ulonglong(0) ctypes.windll.kernel32.GetDiskFreeSpaceExW( ctypes.c_wchar_p(environment['videolab_path']), None, None, ctypes.pointer(free_bytes)) environment['videolab_free'] = str( round(float(free_bytes.value) / (1024**3), 3)) else: disk_space = os.statvfs(environment['videolab_path']) if not disk_space.f_frsize: disk_space.f_frsize = disk_space.f_frsize.f_bsize environment['videolab_free'] = str(round((float(disk_space.f_bavail) / \ (1024**3)) * float(disk_space.f_frsize), 3)) except: environment['videolab_free'] = '?' environment['torrentcli_option'] = '' environment['torrentcli_name'] = '' environment['torrentcli_dload_path'] = '' environment['torrentcli_buffer'] = '' environment['torrentcli_dload_estrgy'] = '' environment['torrentcli_mem_size'] = '' environment['torrentcli_free'] = '' torrent_id = config.get_setting("torrent_client", server="torrent", default=0) environment['torrentcli_option'] = str(torrent_id) if torrent_id > 0: torrent_id = torrent_id - 3 if torrent_id < 0: logger.error('torrent_id: ' + str(torrent_id) + ' / torrent_options: ' + str(platformtools.torrent_client_installed())) torrent_options = platformtools.torrent_client_installed() if torrent_options and torrent_id >= 0: environment['torrentcli_name'] = torrent_options[ torrent_id].replace('Plugin externo: ', '') if xbmc.getCondVisibility('System.HasAddon("plugin.video.%s")' % environment['torrentcli_name']): __settings__ = xbmcaddon.Addon(id="plugin.video.%s" % environment['torrentcli_name']) environment['torrentcli_name'] = environment[ 'torrentcli_name'].capitalize() if environment['torrentcli_name'] == 'Torrenter': environment['torrentcli_dload_path'] = str( xbmc.translatePath(__settings__.getSetting('storage'))) environment['torrentcli_buffer'] = str( __settings__.getSetting('pre_buffer_bytes')) else: environment['torrentcli_dload_path'] = str( xbmc.translatePath( __settings__.getSetting('download_path'))) environment['torrentcli_buffer'] = str( __settings__.getSetting('buffer_size')) environment['torrentcli_dload_estrgy'] = str( __settings__.getSetting('download_storage')) environment['torrentcli_mem_size'] = str( __settings__.getSetting('memory_size')) if environment['torrentcli_dload_path']: try: if environment['os_name'].lower() == 'windows': free_bytes = ctypes.c_ulonglong(0) ctypes.windll.kernel32.GetDiskFreeSpaceExW( ctypes.c_wchar_p(environment['torrentcli_dload_path']), None, None, ctypes.pointer(free_bytes)) environment['torrentcli_free'] = str(round(float(free_bytes.value) / \ (1024**3), 3)) else: disk_space = os.statvfs( environment['torrentcli_dload_path']) if not disk_space.f_frsize: disk_space.f_frsize = disk_space.f_frsize.f_bsize environment['torrentcli_free'] = str(round((float(disk_space.f_bavail) / \ (1024**3)) * float(disk_space.f_frsize), 3)) except: environment['torrentcli_free'] = '?' environment['proxy_active'] = '' try: proxy_channel_bloqued_str = base64.b64decode( config.get_setting('proxy_channel_bloqued')).decode('utf-8') proxy_channel_bloqued = dict() proxy_channel_bloqued = ast.literal_eval(proxy_channel_bloqued_str) for channel_bloqued, proxy_active in proxy_channel_bloqued.items(): if proxy_active == 'ON': environment['proxy_active'] += channel_bloqued + ', ' except: pass if not environment['proxy_active']: environment['proxy_active'] = 'OFF' environment['proxy_active'] = environment['proxy_active'].rstrip(', ') for root, folders, files in filetools.walk( xbmc.translatePath("special://logpath/")): for file in files: if file.lower() in ['kodi.log', 'jarvis.log', 'spmc.log', 'cemc.log', \ 'mygica.log', 'wonderbox.log', 'leiapp,log', \ 'leianmc.log', 'kodiapp.log', 'anmc.log', \ 'latin-anmc.log']: environment['log_path'] = str(filetools.join(root, file)) break else: environment['log_path'] = '' break if environment['log_path']: environment['log_size_bytes'] = str( filetools.getsize(environment['log_path'])) environment['log_size'] = str(round(float(environment['log_size_bytes']) / \ (1024*1024), 3)) else: environment['log_size_bytes'] = '' environment['log_size'] = '' environment['debug'] = str(config.get_setting('debug')) environment['addon_version'] = str(config.get_addon_version()) except: logger.error(traceback.format_exc()) environment = {} environment['log_size'] = '' environment['cpu_usage'] = '' environment['python_version'] = '' environment['log_path'] = '' environment['userdata_free'] = '' environment['mem_total'] = '' environment['torrentcli_mem_size'] = '' environment['torrentcli_dload_path'] = '' environment['torrentcli_dload_estrgy'] = '' environment['machine'] = '' environment['platform'] = '' environment['torrentcli_buffer'] = '' environment['videolab_path'] = '' environment['num_version'] = '' environment['os_name'] = '' environment['torrentcli_free'] = '' environment['video_db'] = '' environment['userdata_path'] = '' environment['log_size_bytes'] = '' environment['name_version'] = '' environment['language'] = '' environment['mem_free'] = '' environment['prod_model'] = '' environment['proxy_active'] = '' environment['architecture'] = '' environment['os_release'] = '' environment['videolab_free'] = '' environment['torrentcli_name'] = '' environment['kodi_buffer'] = '' environment['kodi_bmode'] = '' environment['kodi_rfactor'] = '' environment['videolab_series'] = '' environment['videolab_episodios'] = '' environment['videolab_pelis'] = '' environment['videolab_update'] = '' environment['debug'] = '' environment['addon_version'] = '' environment['torrentcli_option'] = '' return environment
def overwrite_tools(item): import videolibrary_service from core import videolibrarytools seleccion = platformtools.dialog_yesno(config.get_localized_string(60581), config.get_localized_string(60582), config.get_localized_string(60583)) if seleccion == 1: # tvshows heading = config.get_localized_string(60584) p_dialog = platformtools.dialog_progress_bg( config.get_localized_string(60585), heading) p_dialog.update(0, '') show_list = [] for path, folders, files in filetools.walk( videolibrarytools.TVSHOWS_PATH): show_list.extend( [filetools.join(path, f) for f in files if f == "tvshow.nfo"]) if show_list: t = float(100) / len(show_list) for i, tvshow_file in enumerate(show_list): head_nfo, serie = videolibrarytools.read_nfo(tvshow_file) path = filetools.dirname(tvshow_file) if not serie.active: # si la serie no esta activa descartar continue # Eliminamos la carpeta con la serie ... filetools.rmdirtree(path) # ... y la volvemos a añadir videolibrary_service.update(path, p_dialog, i, t, serie, 3) p_dialog.close() # movies heading = config.get_localized_string(60586) p_dialog2 = platformtools.dialog_progress_bg( config.get_localized_string(60585), heading) p_dialog2.update(0, '') movies_list = [] for path, folders, files in filetools.walk( videolibrarytools.MOVIES_PATH): movies_list.extend([ filetools.join(path, f) for f in files if f.endswith(".json") ]) logger.debug("movies_list %s" % movies_list) if movies_list: t = float(100) / len(movies_list) for i, movie_json in enumerate(movies_list): try: from core import jsontools path = filetools.dirname(movie_json) movie = Item().fromjson(filetools.read(movie_json)) # Eliminamos la carpeta con la pelicula ... filetools.rmdirtree(path) import math heading = config.get_localized_string(60587) p_dialog2.update( int(math.ceil((i + 1) * t)), heading, "%s: %s" % (movie.contentTitle, movie.channel.capitalize())) # ... y la volvemos a añadir videolibrarytools.save_movie(movie) except Exception, ex: logger.error("Error al crear de nuevo la película") template = "An exception of type %s occured. Arguments:\n%r" message = template % (type(ex).__name__, ex.args) logger.error(message) p_dialog2.close()
def convert_old_to_v4(): logger.info() path_series_xml = filetools.join(config.get_data_path(), "series.xml") path_series_json = filetools.join(config.get_data_path(), "series.json") series_insertadas = 0 series_fallidas = 0 version = 'v?' # Renombrar carpeta Series y crear una vacia import time new_name = str(time.time()) path_series_old = filetools.join(library.LIBRARY_PATH, "SERIES_OLD_" + new_name) if filetools.rename(library.TVSHOWS_PATH, "SERIES_OLD_" + new_name): if not filetools.mkdir(library.TVSHOWS_PATH): logger.error("ERROR, no se ha podido crear la nueva carpeta de SERIES") return False else: logger.error("ERROR, no se ha podido renombrar la antigua carpeta de SERIES") return False path_cine_old = filetools.join(library.LIBRARY_PATH, "CINE_OLD_" + new_name) if filetools.rename(library.MOVIES_PATH, "CINE_OLD_" + new_name): if not filetools.mkdir(library.MOVIES_PATH): logger.error("ERROR, no se ha podido crear la nueva carpeta de CINE") return False else: logger.error("ERROR, no se ha podido renombrar la antigua carpeta de CINE") return False # Convertir libreria de v1(xml) a v4 if filetools.exists(path_series_xml): try: data = filetools.read(path_series_xml) for line in data.splitlines(): try: aux = line.rstrip('\n').split(",") tvshow = aux[0].strip() url = aux[1].strip() channel = aux[2].strip() serie = Item(contentSerieName=tvshow, url=url, channel=channel, action="episodios", title=tvshow, active=True) patron = "^(.+)[\s]\((\d{4})\)$" matches = re.compile(patron, re.DOTALL).findall(serie.contentSerieName) if matches: serie.infoLabels['title'] = matches[0][0] serie.infoLabels['year'] = matches[0][1] else: serie.infoLabels['title'] = tvshow insertados, sobreescritos, fallidos = library.save_library_tvshow(serie, list()) if fallidos == 0: series_insertadas += 1 platformtools.dialog_notification("Serie actualizada", serie.infoLabels['title']) else: series_fallidas += 1 except: series_fallidas += 1 filetools.rename(path_series_xml, "series.xml.old") version = 'v4' except EnvironmentError: logger.error("ERROR al leer el archivo: %s" % path_series_xml) return False # Convertir libreria de v2(json) a v4 if filetools.exists(path_series_json): try: data = jsontools.load_json(filetools.read(path_series_json)) for tvshow in data: for channel in data[tvshow]["channels"]: try: serie = Item(contentSerieName=data[tvshow]["channels"][channel]["tvshow"], url=data[tvshow]["channels"][channel]["url"], channel=channel, action="episodios", title=data[tvshow]["name"], active=True) if not tvshow.startswith("t_"): serie.infoLabels["tmdb_id"] = tvshow insertados, sobreescritos, fallidos = library.save_library_tvshow(serie, list()) if fallidos == 0: series_insertadas += 1 platformtools.dialog_notification("Serie actualizada", serie.infoLabels['title']) else: series_fallidas += 1 except: series_fallidas += 1 filetools.rename(path_series_json, "series.json.old") version = 'v4' except EnvironmentError: logger.error("ERROR al leer el archivo: %s" % path_series_json) return False # Convertir libreria de v3 a v4 if version != 'v4': # Obtenemos todos los tvshow.json de la biblioteca de SERIES_OLD recursivamente for raiz, subcarpetas, ficheros in filetools.walk(path_series_old): for f in ficheros: if f == "tvshow.json": try: serie = Item().fromjson(filetools.read(filetools.join(raiz, f))) insertados, sobreescritos, fallidos = library.save_library_tvshow(serie, list()) if fallidos == 0: series_insertadas += 1 platformtools.dialog_notification("Serie actualizada", serie.infoLabels['title']) else: series_fallidas += 1 except: series_fallidas += 1 movies_insertadas = 0 movies_fallidas = 0 for raiz, subcarpetas, ficheros in filetools.walk(path_cine_old): for f in ficheros: if f.endswith(".strm.json"): try: movie = Item().fromjson(filetools.read(filetools.join(raiz, f))) insertados, sobreescritos, fallidos = library.save_library_movie(movie) if fallidos == 0: movies_insertadas += 1 platformtools.dialog_notification("Película actualizada", movie.infoLabels['title']) else: movies_fallidas += 1 except: movies_fallidas += 1 config.set_setting("library_version", 'v4') platformtools.dialog_notification("Biblioteca actualizada al nuevo formato", "%s series convertidas y %s series descartadas.\n" "%s peliculas convertidas y %s peliculas descartadas." "A continuación se va a obtener la información de todos los episodios" % (series_insertadas, series_fallidas, movies_insertadas, movies_fallidas), time=12000) # Por ultimo limpia la libreria, por que las rutas anteriores ya no existen if config.is_xbmc(): from platformcode import xbmc_library xbmc_library.clean() return True
def check_addon_updates(verbose=False): logger.info() ADDON_UPDATES_JSON = 'https://extra.alfa-addon.com/addon_updates/updates.json' ADDON_UPDATES_ZIP = 'https://extra.alfa-addon.com/addon_updates/updates.zip' try: last_fix_json = os.path.join( config.get_runtime_path(), 'last_fix.json') # información de la versión fixeada del usuario # Se guarda en get_runtime_path en lugar de get_data_path para que se elimine al cambiar de versión # Descargar json con las posibles actualizaciones # ----------------------------------------------- data = httptools.downloadpage(ADDON_UPDATES_JSON, timeout=2).data if data == '': logger.info('No se encuentran actualizaciones del addon') if verbose: platformtools.dialog_notification( 'Alfa ya está actualizado', 'No hay ninguna actualización urgente') return False data = jsontools.load(data) if 'addon_version' not in data or 'fix_version' not in data: logger.info('No hay actualizaciones del addon') if verbose: platformtools.dialog_notification( 'Alfa ya está actualizado', 'No hay ninguna actualización urgente') return False # Comprobar versión que tiene instalada el usuario con versión de la actualización # -------------------------------------------------------------------------------- current_version = config.get_addon_version(with_fix=False) if current_version != data['addon_version']: logger.info('No hay actualizaciones para la versión %s del addon' % current_version) if verbose: platformtools.dialog_notification( 'Alfa ya está actualizado', 'No hay ninguna actualización urgente') return False if os.path.exists(last_fix_json): lastfix = jsontools.load(filetools.read(last_fix_json)) if lastfix['addon_version'] == data['addon_version'] and lastfix[ 'fix_version'] == data['fix_version']: logger.info( 'Ya está actualizado con los últimos cambios. Versión %s.fix%d' % (data['addon_version'], data['fix_version'])) if verbose: platformtools.dialog_notification( 'Alfa ya está actualizado', 'Versión %s.fix%d' % (data['addon_version'], data['fix_version'])) return False # Descargar zip con las actualizaciones # ------------------------------------- localfilename = os.path.join(config.get_data_path(), 'temp_updates.zip') if os.path.exists(localfilename): os.remove(localfilename) downloadtools.downloadfile(ADDON_UPDATES_ZIP, localfilename, silent=True) # Descomprimir zip dentro del addon # --------------------------------- unzipper = ziptools.ziptools() unzipper.extract(localfilename, config.get_runtime_path()) # Borrar el zip descargado # ------------------------ os.remove(localfilename) # Guardar información de la versión fixeada # ----------------------------------------- if 'files' in data: data.pop('files', None) filetools.write(last_fix_json, jsontools.dump(data)) logger.info('Addon actualizado correctamente a %s.fix%d' % (data['addon_version'], data['fix_version'])) if verbose: platformtools.dialog_notification( 'Alfa actualizado a', 'Versión %s.fix%d' % (data['addon_version'], data['fix_version'])) return True except: logger.error('Error al comprobar actualizaciones del addon!') if verbose: platformtools.dialog_notification( 'Alfa actualizaciones', 'Error al comprobar actualizaciones') return False
def findvideos(item): from core import autoplay logger.info() # logger.debug("item:\n" + item.tostring('\n')) videolibrarytools.check_renumber_options(item) itemlist = [] list_canales = {} item_local = None # Disable autoplay # autoplay.set_status(False) if not item.contentTitle or not item.strm_path: logger.debug("Unable to search for videos due to lack of parameters") return [] content_title = str(item.contentSeason) + 'x' + (str(item.contentEpisodeNumber) if item.contentEpisodeNumber > 9 else '0' + str(item.contentEpisodeNumber)) if item.contentType == 'movie': item.strm_path = filetools.join(videolibrarytools.MOVIES_PATH, item.strm_path) path_dir = filetools.dirname(item.strm_path) item.nfo = filetools.join(path_dir, filetools.basename(path_dir) + ".nfo") else: item.strm_path = filetools.join(videolibrarytools.TVSHOWS_PATH, item.strm_path) path_dir = filetools.dirname(item.strm_path) item.nfo = filetools.join(path_dir, 'tvshow.nfo') for fd in filetools.listdir(path_dir): if fd.endswith('.json'): contenido, nom_canal = fd[:-6].split('[') if (contenido.startswith(content_title) or item.contentType == 'movie') and nom_canal not in list(list_canales.keys()): list_canales[nom_canal] = filetools.join(path_dir, fd) num_canales = len(list_canales) if 'downloads' in list_canales: json_path = list_canales['downloads'] item_json = Item().fromjson(filetools.read(json_path)) item_json.contentChannel = "local" # Support for relative paths in downloads if filetools.is_relative(item_json.url): item_json.url = filetools.join(videolibrarytools.VIDEOLIBRARY_PATH, item_json.url) del list_canales['downloads'] # Check that the video has not been deleted if filetools.exists(item_json.url): item_local = item_json.clone(action='play') itemlist.append(item_local) else: num_canales -= 1 filtro_canal = '' if num_canales > 1 and config.get_setting("ask_channel", "videolibrary"): opciones = [config.get_localized_string(70089) % k.capitalize() for k in list(list_canales.keys())] opciones.insert(0, config.get_localized_string(70083)) if item_local: opciones.append(item_local.title) from platformcode import platformtools index = platformtools.dialog_select(config.get_localized_string(30163), opciones) if index < 0: return [] elif item_local and index == len(opciones) - 1: filtro_canal = 'downloads' platformtools.play_video(item_local) elif index > 0: filtro_canal = opciones[index].replace(config.get_localized_string(70078), "").strip() itemlist = [] for nom_canal, json_path in list(list_canales.items()): if filtro_canal and filtro_canal != nom_canal.capitalize(): continue item_canal = Item() # We import the channel of the selected part try: if nom_canal == 'community': channel = __import__('specials.%s' % nom_canal, fromlist=["channels.%s" % nom_canal]) else: channel = __import__('channels.%s' % nom_canal, fromlist=["channels.%s" % nom_canal]) except ImportError: exec("import channels." + nom_canal + " as channel") item_json = Item().fromjson(filetools.read(json_path)) list_servers = [] # from core.support import dbg;dbg() try: # FILTERTOOLS # if the channel has a filter, the name it has saved is passed to it so that it filters correctly. if "list_language" in item_json: # if it comes from the addon video library if "library_filter_show" in item: item_json.show = item.library_filter_show.get(nom_canal, "") # We run find_videos, from the channel or common item_json.contentChannel = 'videolibrary' item_json.play_from = item.play_from item_json.nfo = item.nfo item_json.strm_path = item.strm_path if hasattr(channel, 'findvideos'): from core import servertools if item_json.videolibray_emergency_urls: del item_json.videolibray_emergency_urls list_servers = getattr(channel, 'findvideos')(item_json) elif item_json.action == 'play': from platformcode import platformtools # autoplay.set_status(True) item_json.contentChannel = item_json.channel item_json.channel = "videolibrary" platformtools.play_video(item_json) return '' else: from core import servertools list_servers = servertools.find_video_items(item_json) except Exception as ex: logger.error("The findvideos function for the channel %s failed" % nom_canal) template = "An exception of type %s occured. Arguments:\n%r" message = template % (type(ex).__name__, ex.args) logger.error(message) logger.error(traceback.format_exc()) # Change the title to the servers adding the name of the channel in front and the infoLabels and the images of the item if the server does not have for server in list_servers: server.contentChannel = server.channel server.channel = "videolibrary" server.nfo = item.nfo server.strm_path = item.strm_path server.play_from = item.play_from # Kodi 18 Compatibility - Prevents wheel from spinning around in Direct Links if server.action == 'play': server.folder = False # Channel name is added if desired if config.get_setting("quit_channel_name", "videolibrary") == 0: server.title = "%s: %s" % (nom_canal.capitalize(), server.title) if not server.thumbnail: server.thumbnail = item.thumbnail # logger.debug("server:\n%s" % server.tostring('\n')) itemlist.append(server) if autoplay.play_multi_channel(item, itemlist): # hideserver return [] add_download_items(item, itemlist) return itemlist
def get_environment(): """ Devuelve las variables de entorno del OS, de Kodi y de Alfa más habituales, necesarias para el diagnóstico de fallos """ try: import base64 import ast environment = config.get_platform(full_version=True) environment['num_version'] = str(environment['num_version']) environment['python_version'] = '%s (%s, %s)' % (str(platform.python_version()), \ str(sys.api_version), str(platform.python_implementation())) environment['os_release'] = str(platform.release()) environment['prod_model'] = '' try: import multiprocessing environment['proc_num'] = ' (%sx)' % str( multiprocessing.cpu_count()) except: environment['proc_num'] = '' if xbmc.getCondVisibility("system.platform.Windows"): try: if platform.platform(): environment['os_release'] = str( platform.platform()).replace('Windows-', '') elif platform._syscmd_ver()[2]: environment['os_release'] = str(platform._syscmd_ver()[2]) command = ["wmic", "cpu", "get", "name"] p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, creationflags=0x08000000) output_cmd, error_cmd = p.communicate() if PY3 and isinstance(output_cmd, bytes): output_cmd = output_cmd.decode() output_cmd = re.sub(r'\n|\r|\s{2}', '', output_cmd) environment['prod_model'] = str(scrapertools.find_single_match(output_cmd, \ '\w+.*?(?i)(?:Intel\(R\))?(?:\s*Core\(TM\))\s*(.*?CPU.*?)\s*(?:\@|$)')) except: pass if xbmc.getCondVisibility("system.platform.Android"): environment['os_name'] = 'Android' try: for label_a in subprocess.check_output('getprop').split(FF): if PY3 and isinstance(label_a, bytes): label_a = label_a.decode() if 'build.version.release' in label_a: environment['os_release'] = str( scrapertools.find_single_match( label_a, ':\s*\[(.*?)\]$')) if 'product.model' in label_a: environment['prod_model'] = str( scrapertools.find_single_match( label_a, ':\s*\[(.*?)\]$')) except: try: for label_a in filetools.read(os.environ['ANDROID_ROOT'] + '/build.prop').split(): if 'build.version.release' in label_a: environment['os_release'] = str( scrapertools.find_single_match( label_a, '=(.*?)$')) if 'product.model' in label_a: environment['prod_model'] = str( scrapertools.find_single_match( label_a, '=(.*?)$')) except: pass environment['prod_model'] += ' (%s)' % config.is_rooted( silent=True) elif xbmc.getCondVisibility("system.platform.Linux"): environment['os_name'] = 'Linux' try: for label_a in subprocess.check_output('hostnamectl').split( FF): if PY3 and isinstance(label_a, bytes): label_a = label_a.decode() if 'Operating' in label_a: environment['os_release'] = str( scrapertools.find_single_match( label_a, 'Operating\s*S\w+:\s*(.*?)\s*$')) break for label_a in subprocess.check_output( ['cat', '/proc/cpuinfo']).split(FF): if PY3 and isinstance(label_a, bytes): label_a = label_a.decode() if 'model name' in label_a: environment['prod_model'] = str(scrapertools.find_single_match(label_a, \ 'model.*?:\s*(?i)(?:Intel\(R\))?(?:\s*Core\(TM\))\s*(.*?CPU.*?)\s*(?:\@|$)')) break except: pass elif xbmc.getCondVisibility("system.platform.Linux.RaspberryPi"): environment['os_name'] = 'RaspberryPi' else: environment['os_name'] = str(platform.system()) if not environment['os_release']: environment['os_release'] = str(platform.release()) if environment['proc_num'] and environment['prod_model']: environment['prod_model'] += environment['proc_num'] environment['machine'] = str(platform.machine()) environment['architecture'] = str(sys.maxsize > 2**32 and "64-bit" or "32-bit") environment['language'] = str(xbmc.getInfoLabel('System.Language')) environment['cpu_usage'] = str(xbmc.getInfoLabel('System.CpuUsage')) environment['mem_total'] = str( xbmc.getInfoLabel('System.Memory(total)')).replace('MB', '').replace( 'KB', '') environment['mem_free'] = str( xbmc.getInfoLabel('System.Memory(free)')).replace('MB', '').replace( 'KB', '') if not environment['mem_total'] or not environment['mem_free']: try: if environment['os_name'].lower() == 'windows': kernel32 = ctypes.windll.kernel32 c_ulong = ctypes.c_ulong c_ulonglong = ctypes.c_ulonglong class MEMORYSTATUS(ctypes.Structure): _fields_ = [('dwLength', c_ulong), ('dwMemoryLoad', c_ulong), ('dwTotalPhys', c_ulonglong), ('dwAvailPhys', c_ulonglong), ('dwTotalPageFile', c_ulonglong), ('dwAvailPageFile', c_ulonglong), ('dwTotalVirtual', c_ulonglong), ('dwAvailVirtual', c_ulonglong), ('availExtendedVirtual', c_ulonglong)] memoryStatus = MEMORYSTATUS() memoryStatus.dwLength = ctypes.sizeof(MEMORYSTATUS) kernel32.GlobalMemoryStatus(ctypes.byref(memoryStatus)) environment['mem_total'] = str( old_div(int(memoryStatus.dwTotalPhys), (1024**2))) environment['mem_free'] = str( old_div(int(memoryStatus.dwAvailPhys), (1024**2))) else: with open('/proc/meminfo') as f: meminfo = f.read() environment['mem_total'] = str( old_div( int( re.search(r'MemTotal:\s+(\d+)', meminfo).groups()[0]), 1024)) environment['mem_free'] = str( old_div( int( re.search(r'MemAvailable:\s+(\d+)', meminfo).groups()[0]), 1024)) except: environment['mem_total'] = '' environment['mem_free'] = '' try: environment['kodi_buffer'] = '20' environment['kodi_bmode'] = '0' environment['kodi_rfactor'] = '4.0' if filetools.exists( filetools.join("special://userdata", "advancedsettings.xml")): advancedsettings = filetools.read( filetools.join("special://userdata", "advancedsettings.xml")).split('\n') for label_a in advancedsettings: if 'memorysize' in label_a: environment['kodi_buffer'] = str( old_div( int( scrapertools.find_single_match( label_a, '>(\d+)<\/')), 1024**2)) if 'buffermode' in label_a: environment['kodi_bmode'] = str( scrapertools.find_single_match( label_a, '>(\d+)<\/')) if 'readfactor' in label_a: environment['kodi_rfactor'] = str( scrapertools.find_single_match( label_a, '>(.*?)<\/')) except: pass environment['userdata_path'] = str(config.get_data_path()) environment['userdata_path_perm'] = filetools.file_info( environment['userdata_path']) if not environment['userdata_path_perm']: del environment['userdata_path_perm'] try: if environment['os_name'].lower() == 'windows': free_bytes = ctypes.c_ulonglong(0) ctypes.windll.kernel32.GetDiskFreeSpaceExW( ctypes.c_wchar_p(environment['userdata_path']), None, None, ctypes.pointer(free_bytes)) environment['userdata_free'] = str( round(float(free_bytes.value) / (1024**3), 3)) else: disk_space = os.statvfs(environment['userdata_path']) if not disk_space.f_frsize: disk_space.f_frsize = disk_space.f_frsize.f_bsize environment['userdata_free'] = str(round((float(disk_space.f_bavail) / \ (1024**3)) * float(disk_space.f_frsize), 3)) except: environment['userdata_free'] = '?' if environment['userdata_path_perm']: environment['userdata_path'] = environment['userdata_path_perm'] del environment['userdata_path_perm'] environment['torrent_lang'] = '%s/%s' % (config.get_setting("channel_language", default="").upper(), \ config.get_setting("second_language", default="").upper()) try: environment['videolab_series'] = '?' environment['videolab_episodios'] = '?' environment['videolab_pelis'] = '?' environment['videolab_path'] = str(config.get_videolibrary_path()) environment['videolab_path_perm'] = filetools.file_info( environment['videolab_path']) if not environment['videolab_path_perm']: environment['videolab_path_perm'] = environment[ 'videolab_path'] if filetools.exists(filetools.join(environment['videolab_path'], \ config.get_setting("folder_tvshows"))): environment['videolab_series'] = str(len(filetools.listdir(filetools.join(environment['videolab_path'], \ config.get_setting("folder_tvshows"))))) counter = 0 for root, folders, files in filetools.walk(filetools.join(environment['videolab_path'], \ config.get_setting("folder_tvshows"))): for file in files: if file.endswith('.strm'): counter += 1 environment['videolab_episodios'] = str(counter) if filetools.exists(filetools.join(environment['videolab_path'], \ config.get_setting("folder_movies"))): environment['videolab_pelis'] = str(len(filetools.listdir(filetools.join(environment['videolab_path'], \ config.get_setting("folder_movies"))))) except: pass try: video_updates = [ 'No', 'Inicio', 'Una vez', 'Inicio+Una vez', 'Dos veces al día' ] environment['videolab_update'] = str( video_updates[config.get_setting("update", "videolibrary")]) except: environment['videolab_update'] = '?' try: if environment['os_name'].lower() == 'windows': free_bytes = ctypes.c_ulonglong(0) ctypes.windll.kernel32.GetDiskFreeSpaceExW( ctypes.c_wchar_p(environment['videolab_path']), None, None, ctypes.pointer(free_bytes)) environment['videolab_free'] = str( round(float(free_bytes.value) / (1024**3), 3)) else: disk_space = os.statvfs(environment['videolab_path']) if not disk_space.f_frsize: disk_space.f_frsize = disk_space.f_frsize.f_bsize environment['videolab_free'] = str(round((float(disk_space.f_bavail) / \ (1024**3)) * float(disk_space.f_frsize), 3)) except: environment['videolab_free'] = '?' environment['torrent_list'] = [] environment['torrentcli_option'] = '' environment['torrent_error'] = '' environment['torrentcli_rar'] = config.get_setting("mct_rar_unpack", server="torrent", default=True) environment['torrentcli_backgr'] = config.get_setting( "mct_background_download", server="torrent", default=True) environment['torrentcli_lib_path'] = config.get_setting( "libtorrent_path", server="torrent", default="") if environment['torrentcli_lib_path']: lib_path = 'Activo' else: lib_path = 'Inactivo' if config.get_setting("libtorrent_version", server="torrent", default=""): lib_path += '-%s' % config.get_setting( "libtorrent_version", server="torrent", default="") environment['torrentcli_unrar'] = config.get_setting("unrar_path", server="torrent", default="") if environment['torrentcli_unrar']: if xbmc.getCondVisibility("system.platform.Android"): unrar = 'Android' else: unrar = filetools.dirname(environment['torrentcli_unrar']) unrar = filetools.basename(unrar).capitalize() else: unrar = 'Inactivo' torrent_id = config.get_setting("torrent_client", server="torrent", default=0) environment['torrentcli_option'] = str(torrent_id) torrent_options = platformtools.torrent_client_installed() if lib_path != 'Inactivo': torrent_options = ['MCT'] + torrent_options torrent_options = ['BT'] + torrent_options environment['torrent_list'].append({'Torrent_opt': str(torrent_id), 'Libtorrent': lib_path, \ 'RAR_Auto': str(environment['torrentcli_rar']), \ 'RAR_backgr': str(environment['torrentcli_backgr']), \ 'UnRAR': unrar}) environment['torrent_error'] = config.get_setting("libtorrent_error", server="torrent", default="") if environment['torrent_error']: environment['torrent_list'].append( {'Libtorrent_error': environment['torrent_error']}) for torrent_option in torrent_options: cliente = dict() cliente['D_load_Path'] = '' cliente['Libre'] = '?' cliente['Plug_in'] = torrent_option.replace('Plugin externo: ', '') if cliente['Plug_in'] == 'BT': cliente['D_load_Path'] = str( config.get_setting("bt_download_path", server="torrent", default='')) if not cliente['D_load_Path']: continue cliente['D_load_Path'] = filetools.join( cliente['D_load_Path'], 'BT-torrents') cliente['D_load_Path_perm'] = filetools.file_info( cliente['D_load_Path']) if not cliente['D_load_Path_perm']: del cliente['D_load_Path_perm'] cliente['Buffer'] = str( config.get_setting("bt_buffer", server="torrent", default=50)) elif cliente['Plug_in'] == 'MCT': cliente['D_load_Path'] = str( config.get_setting("mct_download_path", server="torrent", default='')) if not cliente['D_load_Path']: continue cliente['D_load_Path'] = filetools.join( cliente['D_load_Path'], 'MCT-torrent-videos') cliente['D_load_Path_perm'] = filetools.file_info( cliente['D_load_Path']) if not cliente['D_load_Path_perm']: del cliente['D_load_Path_perm'] cliente['Buffer'] = str( config.get_setting("mct_buffer", server="torrent", default=50)) elif xbmc.getCondVisibility('System.HasAddon("plugin.video.%s")' % cliente['Plug_in']): try: __settings__ = xbmcaddon.Addon(id="plugin.video.%s" % cliente['Plug_in']) except: continue cliente['Plug_in'] = cliente['Plug_in'].capitalize() if cliente['Plug_in'] == 'Torrenter': cliente['D_load_Path'] = str( filetools.translatePath( __settings__.getSetting('storage'))) if not cliente['D_load_Path']: cliente['D_load_Path'] = str(filetools.join("special://home/", \ "cache", "xbmcup", "plugin.video.torrenter", "Torrenter")) cliente['D_load_Path_perm'] = filetools.file_info( cliente['D_load_Path']) if not cliente['D_load_Path_perm']: del cliente['D_load_Path_perm'] cliente['Buffer'] = str( __settings__.getSetting('pre_buffer_bytes')) else: cliente['D_load_Path'] = str( filetools.translatePath( __settings__.getSetting('download_path'))) cliente['D_load_Path_perm'] = filetools.file_info( cliente['D_load_Path']) if not cliente['D_load_Path_perm']: del cliente['D_load_Path_perm'] cliente['Buffer'] = str( __settings__.getSetting('buffer_size')) if __settings__.getSetting( 'download_storage' ) == '1' and __settings__.getSetting('memory_size'): cliente['Memoria'] = str( __settings__.getSetting('memory_size')) if cliente['D_load_Path']: try: if environment['os_name'].lower() == 'windows': free_bytes = ctypes.c_ulonglong(0) ctypes.windll.kernel32.GetDiskFreeSpaceExW( ctypes.c_wchar_p(cliente['D_load_Path']), None, None, ctypes.pointer(free_bytes)) cliente['Libre'] = str(round(float(free_bytes.value) / \ (1024**3), 3)).replace('.', ',') else: disk_space = os.statvfs(cliente['D_load_Path']) if not disk_space.f_frsize: disk_space.f_frsize = disk_space.f_frsize.f_bsize cliente['Libre'] = str(round((float(disk_space.f_bavail) / \ (1024**3)) * float(disk_space.f_frsize), 3)).replace('.', ',') except: pass if cliente['D_load_Path_perm']: cliente['D_load_Path'] = cliente['D_load_Path_perm'] del cliente['D_load_Path_perm'] environment['torrent_list'].append(cliente) environment['proxy_active'] = '' try: proxy_channel_bloqued_str = base64.b64decode( config.get_setting('proxy_channel_bloqued')).decode('utf-8') proxy_channel_bloqued = dict() proxy_channel_bloqued = ast.literal_eval(proxy_channel_bloqued_str) for channel_bloqued, proxy_active in list( proxy_channel_bloqued.items()): if proxy_active != 'OFF': environment['proxy_active'] += channel_bloqued + ', ' except: pass if not environment['proxy_active']: environment['proxy_active'] = 'OFF' environment['proxy_active'] = environment['proxy_active'].rstrip(', ') for root, folders, files in filetools.walk("special://logpath/"): for file in files: if file.lower() in ['kodi.log', 'jarvis.log', 'spmc.log', 'cemc.log', \ 'mygica.log', 'wonderbox.log', 'leiapp,log', \ 'leianmc.log', 'kodiapp.log', 'anmc.log', \ 'latin-anmc.log']: environment['log_path'] = str(filetools.join(root, file)) break else: environment['log_path'] = '' break if environment['log_path']: environment['log_size_bytes'] = str( filetools.getsize(environment['log_path'])) environment['log_size'] = str(round(float(environment['log_size_bytes']) / \ (1024*1024), 3)) else: environment['log_size_bytes'] = '' environment['log_size'] = '' environment['debug'] = str(config.get_setting('debug')) environment['addon_version'] = '%s (Upd: %s h.)' % (str(config.get_addon_version()), \ str(config.get_setting("addon_update_timer", default=12)).replace('0', 'No')) environment['assistant_version'] = str(None) if filetools.exists( filetools.join(config.get_data_path(), 'alfa-mobile-assistant.version')): environment['assistant_version'] = filetools.read( filetools.join(config.get_data_path(), 'alfa-mobile-assistant.version')) environment['assistant_cf_ua'] = str( config.get_setting('cf_assistant_ua', None)) except: logger.error(traceback.format_exc()) environment = {} environment['log_size'] = '' environment['cpu_usage'] = '' environment['python_version'] = '' environment['log_path'] = '' environment['userdata_free'] = '' environment['mem_total'] = '' environment['machine'] = '' environment['platform'] = '' environment['videolab_path'] = '' environment['num_version'] = '' environment['os_name'] = '' environment['video_db'] = '' environment['userdata_path'] = '' environment['log_size_bytes'] = '' environment['name_version'] = '' environment['language'] = '' environment['mem_free'] = '' environment['prod_model'] = '' environment['proxy_active'] = '' environment['architecture'] = '' environment['os_release'] = '' environment['videolab_free'] = '' environment['kodi_buffer'] = '' environment['kodi_bmode'] = '' environment['kodi_rfactor'] = '' environment['videolab_series'] = '' environment['videolab_episodios'] = '' environment['videolab_pelis'] = '' environment['videolab_update'] = '' environment['videolab_path_perm'] = '' environment['debug'] = '' environment['addon_version'] = '' environment['torrent_list'] = [] environment['torrent_lang'] = '' environment['torrentcli_option'] = '' environment['torrentcli_rar'] = '' environment['torrentcli_lib_path'] = '' environment['torrentcli_unrar'] = '' environment['torrent_error'] = '' environment['assistant_version'] = '' environment['assistant_cf_ua'] = '' return environment
def check_addon_updates(verbose=False): logger.info() ADDON_UPDATES_JSON = 'https://balandro.tk/addon_updates/updates.json' ADDON_UPDATES_ZIP = 'https://balandro.tk/addon_updates/updates.zip' try: last_fix_json = os.path.join(config.get_runtime_path(), 'last_fix.json') # información de la versión fixeada del usuario # Se guarda en get_runtime_path en lugar de get_data_path para que se elimine al cambiar de versión # Descargar json con las posibles actualizaciones # ----------------------------------------------- data = httptools.downloadpage(ADDON_UPDATES_JSON, timeout=2).data if data == '': logger.info('No se encuentran actualizaciones del addon') if verbose: platformtools.dialog_notification('Balandro ya está actualizado', 'No hay ninguna actualización pendiente') return False data = jsontools.load(data) if 'addon_version' not in data or 'fix_version' not in data: logger.info('No hay actualizaciones del addon') if verbose: platformtools.dialog_notification('Balandro ya está actualizado', 'No hay ninguna actualización pendiente') return False # Comprobar versión que tiene instalada el usuario con versión de la actualización # -------------------------------------------------------------------------------- current_version = config.get_addon_version(with_fix=False) if current_version != data['addon_version']: logger.info('No hay actualizaciones para la versión %s del addon' % current_version) if verbose: platformtools.dialog_notification('Balandro ya está actualizado', 'No hay ninguna actualización pendiente') return False if os.path.exists(last_fix_json): lastfix = jsontools.load(filetools.read(last_fix_json)) if lastfix['addon_version'] == data['addon_version'] and lastfix['fix_version'] == data['fix_version']: logger.info('Ya está actualizado con los últimos cambios. Versión %s.fix%d' % (data['addon_version'], data['fix_version'])) if verbose: platformtools.dialog_notification('Balandro ya está actualizado', 'Versión %s.fix%d' % (data['addon_version'], data['fix_version'])) return False # Descargar zip con las actualizaciones # ------------------------------------- localfilename = os.path.join(config.get_data_path(), 'temp_updates.zip') if os.path.exists(localfilename): os.remove(localfilename) down_stats = downloadtools.do_download(ADDON_UPDATES_ZIP, config.get_data_path(), 'temp_updates.zip', silent=True, resume=False) if down_stats['downloadStatus'] != 2: # 2:completed logger.info('No se puede descargar la actualización') if verbose: platformtools.dialog_notification('Actualización fallida', 'No se puede descargar la actualización') return False # Descomprimir zip dentro del addon # --------------------------------- import xbmc xbmc.executebuiltin('XBMC.Extract("%s", "%s")' % (localfilename, config.get_runtime_path())) time.sleep(1) # Borrar el zip descargado # ------------------------ os.remove(localfilename) # Guardar información de la versión fixeada # ----------------------------------------- if 'files' in data: data.pop('files', None) filetools.write(last_fix_json, jsontools.dump(data)) logger.info('Addon actualizado correctamente a %s.fix%d' % (data['addon_version'], data['fix_version'])) if verbose: platformtools.dialog_notification('Balandro actualizado a', 'Versión %s.fix%d' % (data['addon_version'], data['fix_version'])) return True except: logger.error('Error al comprobar actualizaciones del addon!') logger.error(traceback.format_exc()) if verbose: platformtools.dialog_notification('Balandro actualizaciones', 'Error al comprobar actualizaciones') return False
def create_tvshows_from_xml(): logger.info( "streamondemand.platformcode.library_service create_tvshows_from_xml") fname = filetools.join(config.get_data_path(), library.TVSHOW_FILE_OLD) if filetools.exists(fname): platformtools.dialog_ok( "Biblioteca: Se va a actualizar al nuevo formato", "Seleccione el nombre correcto de cada serie, si no está seguro pulse 'Cancelar'.", "Hay nuevas opciones en 'Biblioteca' y en la 'configuración' del addon." ) filetools.rename(library.TVSHOWS_PATH, "SERIES_OLD") if not filetools.exists(library.TVSHOWS_PATH): filetools.mkdir(library.TVSHOWS_PATH) if filetools.exists(library.TVSHOWS_PATH): try: data = filetools.read(fname) for line in data.splitlines(): aux = line.rstrip('\n').split(",") tvshow = aux[0].strip() url = aux[1].strip() channel = aux[2].strip() serie = Item(contentSerieName=tvshow, url=url, channel=channel, action="episodios", title=tvshow, active=True) patron = "^(.+)[\s]\((\d{4})\)$" matches = re.compile(patron, re.DOTALL).findall( serie.contentSerieName) if matches: serie.infoLabels['title'] = matches[0][0] serie.infoLabels['year'] = matches[0][1] else: serie.infoLabels['title'] = tvshow library.save_library_tvshow(serie, list()) filetools.rename(fname, "series.xml.old") # Por ultimo limpia la libreria, por que las rutas anteriores ya no existen library.clean() except EnvironmentError: logger.info("ERROR al leer el archivo: {0}".format(fname)) else: logger.info( "ERROR, no se ha podido crear la nueva carpeta de SERIES") else: logger.info( "ERROR, no se ha podido renombrar la antigua carpeta de SERIES" ) return True return False
def run(item=None): # from core.support import dbg;dbg() logger.debug() if not item: # Extract item from sys.argv if sys.argv[2]: sp = sys.argv[2].split('&') url = sp[0] item = Item().fromurl(url) if len(sp) > 1: for e in sp[1:]: key, val = e.split('=') item.__setattr__(key, val) # If no item, this is mainlist else: item = Item(channel="channelselector", action="getmainlist", viewmode="movie") if not config.get_setting('show_once'): if not config.get_all_settings_addon(): logger.error('corrupted settings.xml!!') settings_xml = os.path.join(config.get_data_path(), "settings.xml") settings_bak = os.path.join(config.get_data_path(), "settings.bak") if filetools.exists(settings_bak): filetools.copy(settings_bak, settings_xml, True) logger.info('restored settings.xml from backup') else: filetools.write(settings_xml, '<settings version="2">\n</settings>' ) # resetted settings else: from platformcode import xbmc_videolibrary xbmc_videolibrary.ask_set_content(silent=False) config.set_setting('show_once', True) logger.info(item.tostring()) try: if not config.get_setting('tmdb_active'): config.set_setting('tmdb_active', True) # If item has no action, stops here if item.action == "": logger.debug("Item without action") return # Action for main menu in channelselector elif item.action == "getmainlist": import channelselector itemlist = channelselector.getmainlist() platformtools.render_items(itemlist, item) # Action for channel types on channelselector: movies, series, etc. elif item.action == "getchanneltypes": import channelselector itemlist = channelselector.getchanneltypes() platformtools.render_items(itemlist, item) # Action for channel listing on channelselector elif item.action == "filterchannels": import channelselector itemlist = channelselector.filterchannels(item.channel_type) platformtools.render_items(itemlist, item) # Special action for playing a video from the library elif item.action == "play_from_library": play_from_library(item) return elif item.action == "keymap": from platformcode import keymaptools if item.open: return keymaptools.open_shortcut_menu() else: return keymaptools.set_key() elif item.channel == "infoplus": from platformcode import infoplus return infoplus.Main(item) elif item.channel == "backup": from platformcode import backup return getattr(backup, item.action)(item) elif item.channel == "elementum_download": from platformcode import elementum_download return getattr(elementum_download, item.action)(item) elif item.channel == "shortcuts": from platformcode import shortcuts return getattr(shortcuts, item.action)(item) elif item.channel == "autorenumber": from platformcode import autorenumber return getattr(autorenumber, item.action)(item) elif item.action == "delete_key": from platformcode import keymaptools return keymaptools.delete_key() elif item.action == "script": from core import tmdb if tmdb.drop_bd(): platformtools.dialog_notification( config.get_localized_string(20000), config.get_localized_string(60011), time=2000, sound=False) elif item.action == "itemInfo": platformtools.dialog_textviewer('Item info', item.parent) elif item.action == "open_browser": import webbrowser if not webbrowser.open(item.url): import xbmc if xbmc.getCondVisibility( 'system.platform.linux') and xbmc.getCondVisibility( 'system.platform.android'): # android xbmc.executebuiltin( 'StartAndroidActivity("", "android.intent.action.VIEW", "", "%s")' % (item.url)) else: try: import urllib.request as urllib except ImportError: import urllib short = urllib.urlopen( 'https://u.nu/api.php?action=shorturl&format=simple&url=' + item.url).read().decode('utf-8') platformtools.dialog_ok( config.get_localized_string(20000), config.get_localized_string(70740) % short) # Action in certain channel specified in "action" and "channel" parameters elif item.action == "check_channels": from platformcode import checkhost checkhost.check_channels() else: # Checks if channel exists if os.path.isfile( os.path.join(config.get_runtime_path(), 'channels', item.channel + ".py")): CHANNELS = 'channels' else: CHANNELS = 'specials' channel_file = os.path.join(config.get_runtime_path(), CHANNELS, item.channel + ".py") logger.debug("channel_file= " + channel_file + ' - ' + CHANNELS + ' - ' + item.channel) channel = None if os.path.exists(channel_file): try: channel = __import__('%s.%s' % (CHANNELS, item.channel), None, None, ['%s.%s' % (CHANNELS, item.channel)]) except ImportError: exec("import " + CHANNELS + "." + item.channel + " as channel") logger.info("Running channel %s | %s" % (channel.__name__, channel.__file__)) # Special play action if item.action == "play": # define la info para trakt try: from core import trakt_tools trakt_tools.set_trakt_info(item) except: pass logger.debug("item.action=%s" % item.action.upper()) # logger.debug("item_toPlay: " + "\n" + item.tostring('\n')) # First checks if channel has a "play" function if hasattr(channel, 'play'): logger.debug("Executing channel 'play' method") itemlist = channel.play(item) b_favourite = item.isFavourite # Play should return a list of playable URLS if len(itemlist) > 0 and isinstance(itemlist[0], Item): item = itemlist[0] if b_favourite: item.isFavourite = True platformtools.play_video(item) # Permitir varias calidades desde play en el Channel elif len(itemlist) > 0 and isinstance(itemlist[0], list): item.video_urls = itemlist platformtools.play_video(item) # If not, shows user an error message else: platformtools.dialog_ok( config.get_localized_string(20000), config.get_localized_string(60339)) # If player don't have a "play" function, not uses the standard play from platformtools else: logger.debug("Executing core 'play' method") platformtools.play_video(item) # Special action for findvideos, where the plugin looks for known urls elif item.action == "findvideos": from core import servertools # First checks if channel has a "findvideos" function if hasattr(channel, 'findvideos'): itemlist = getattr(channel, item.action)(item) # If not, uses the generic findvideos function else: logger.debug("No channel 'findvideos' method, " "executing core method") itemlist = servertools.find_video_items(item) if config.get_setting("max_links", "videolibrary") != 0: itemlist = limit_itemlist(itemlist) from platformcode import subtitletools subtitletools.saveSubtitleName(item) platformtools.render_items(itemlist, item) # Special action for adding a movie to the library elif item.action == "add_pelicula_to_library": from core import videolibrarytools videolibrarytools.add_movie(item) # Special action for adding a serie to the library elif item.action == "add_serie_to_library": from core import videolibrarytools videolibrarytools.add_tvshow(item, channel) # Special action for downloading all episodes from a serie elif item.action == "download_all_episodes": from specials import downloads item.action = item.extra del item.extra downloads.save_download(item) # Special action for searching, first asks for the words then call the "search" function elif item.action == "search": # from core.support import dbg;dbg() if filetools.isfile(temp_search_file) and config.get_setting( 'videolibrary_kodi'): itemlist = [] f = filetools.read(temp_search_file) strList = f.split(',') if strList[0] == '[V]' and strList[1] == item.channel: for it in strList: if it and it not in ['[V]', item.channel]: itemlist.append(Item().fromurl(it)) filetools.write(temp_search_file, f[4:]) return platformtools.render_items(itemlist, item) else: filetools.remove(temp_search_file) logger.debug("item.action=%s" % item.action.upper()) from core import channeltools if config.get_setting('last_search'): last_search = channeltools.get_channel_setting( 'Last_searched', 'search', '') else: last_search = '' search_text = platformtools.dialog_input(last_search) if search_text is not None: channeltools.set_channel_setting('Last_searched', search_text, 'search') itemlist = new_search(item.clone(text=search_text), channel) else: return platformtools.render_items(itemlist, item) # For all other actions else: # import web_pdb; web_pdb.set_trace() logger.debug("Executing channel '%s' method" % item.action) itemlist = getattr(channel, item.action)(item) if config.get_setting('trakt_sync'): from core import trakt_tools token_auth = config.get_setting("token_trakt", "trakt") if not token_auth: trakt_tools.auth_trakt() else: import xbmc if not xbmc.getCondVisibility( 'System.HasAddon(script.trakt)' ) and config.get_setting('install_trakt'): trakt_tools.ask_install_script() itemlist = trakt_tools.trakt_check(itemlist) else: config.set_setting('install_trakt', True) platformtools.render_items(itemlist, item) except WebErrorException as e: import traceback from core import scrapertools logger.error(traceback.format_exc()) platformtools.dialog_ok( config.get_localized_string(59985) % e.channel, config.get_localized_string(60013) % e.url) except Exception as e: import traceback from core import scrapertools logger.error(traceback.format_exc()) patron = 'File "' + os.path.join(config.get_runtime_path(), "channels", "").replace("\\", "\\\\") + r'([^.]+)\.py"' Channel = scrapertools.find_single_match(traceback.format_exc(), patron) if Channel or e.__class__ == logger.ChannelScraperException: if item.url: if platformtools.dialog_yesno( config.get_localized_string(60087) % Channel, config.get_localized_string(60014), nolabel='ok', yeslabel=config.get_localized_string(70739)): run(Item(action="open_browser", url=item.url)) else: platformtools.dialog_ok( config.get_localized_string(60087) % Channel, config.get_localized_string(60014)) else: if platformtools.dialog_yesno(config.get_localized_string(60038), config.get_localized_string(60015)): run(Item(channel="setting", action="report_menu"))
def main(): if scrapertools.wait_for_internet(retry=10): # -- Update channels from repository streamondemand ------ try: from core import update_channels except: logger.info( "streamondemand.library_service Error in update_channels") # ---------------------------------------------------------------------- # -- Update servertools and servers from repository streamondemand ------ try: from core import update_servers except: logger.info( "streamondemand.library_service Error in update_servers") # ---------------------------------------------------------------------- logger.info("streamondemand.library_service Actualizando series...") p_dialog = None try: if config.get_setting("updatelibrary") == "true": heading = 'Aggiornamento biblioteca....' p_dialog = platformtools.dialog_progress_bg( 'streamondemand', heading) p_dialog.update(0, '') show_list = [] for path, folders, files in filetools.walk( library.TVSHOWS_PATH): show_list.extend([ filetools.join(path, f) for f in files if f == "tvshow.json" ]) # fix float porque la division se hace mal en python 2.x t = float(100) / len(show_list) for i, tvshow_file in enumerate(show_list): serie = Item().fromjson(filetools.read(tvshow_file)) path = filetools.dirname(tvshow_file) logger.info("streamondemand.library_service serie=" + serie.contentSerieName) logger.info( "streamondemand.library_service Actualizando " + path) logger.info("streamondemand.library_service url " + serie.url) show_name = serie.contentTitle if show_name == "": show_name = serie.show p_dialog.update(int(math.ceil((i + 1) * t)), heading, show_name) # si la serie esta activa se actualiza if serie.active: try: pathchannels = filetools.join( config.get_runtime_path(), "channels", serie.channel + '.py') logger.info( "streamondemand.library_service Cargando canal: " + pathchannels + " " + serie.channel) obj = imp.load_source(serie.channel, pathchannels) itemlist = obj.episodios(serie) try: library.save_library_episodes( path, itemlist, serie, True) except Exception as ex: logger.info( "streamondemand.library_service Error al guardar los capitulos de la serie" ) template = "An exception of type {0} occured. Arguments:\n{1!r}" message = template.format( type(ex).__name__, ex.args) logger.info(message) except Exception as ex: logger.error( "Error al obtener los episodios de: {0}". format(serie.show)) template = "An exception of type {0} occured. Arguments:\n{1!r}" message = template.format( type(ex).__name__, ex.args) logger.info(message) p_dialog.close() else: logger.info( "No actualiza la biblioteca, está desactivado en la configuración de streamondemand" ) except Exception as ex: logger.info( "streamondemand.library_service Se ha producido un error al actualizar las series" ) template = "An exception of type {0} occured. Arguments:\n{1!r}" message = template.format(type(ex).__name__, ex.args) logger.info(message) if p_dialog: p_dialog.close()
def set_content(content_type, silent=False): """ Procedimiento para auto-configurar la videoteca de kodi con los valores por defecto @type content_type: str ('movie' o 'tvshow') @param content_type: tipo de contenido para configurar, series o peliculas """ continuar = True msg_text = "" videolibrarypath = config.get_setting("videolibrarypath") if content_type == 'movie': scraper = [config.get_localized_string(70093), config.get_localized_string(70096)] seleccion = platformtools.dialog_select(config.get_localized_string(70094), scraper) # Instalar The Movie Database if seleccion == -1 or seleccion == 0: if not xbmc.getCondVisibility('System.HasAddon(metadata.themoviedb.org)'): if not silent: # Preguntar si queremos instalar metadata.themoviedb.org install = platformtools.dialog_yesno(config.get_localized_string(60046)) else: install = True if install: try: # Instalar metadata.themoviedb.org xbmc.executebuiltin('xbmc.installaddon(metadata.themoviedb.org)', True) logger.info("Instalado el Scraper de películas de TheMovieDB") except: pass continuar = (install and xbmc.getCondVisibility('System.HasAddon(metadata.themoviedb.org)')) if not continuar: msg_text = config.get_localized_string(60047) if continuar: xbmc.executebuiltin('xbmc.addon.opensettings(metadata.themoviedb.org)', True) # Instalar Universal Movie Scraper elif seleccion == 1: if continuar and not xbmc.getCondVisibility('System.HasAddon(metadata.universal)'): continuar = False if not silent: # Preguntar si queremos instalar metadata.universal install = platformtools.dialog_yesno(config.get_localized_string(70095)) else: install = True if install: try: xbmc.executebuiltin('xbmc.installaddon(metadata.universal)', True) if xbmc.getCondVisibility('System.HasAddon(metadata.universal)'): continuar = True except: pass continuar = (install and continuar) if not continuar: msg_text = config.get_localized_string(70097) if continuar: xbmc.executebuiltin('xbmc.addon.opensettings(metadata.universal)', True) else: # SERIES scraper = [config.get_localized_string(70098), config.get_localized_string(70093)] seleccion = platformtools.dialog_select(config.get_localized_string(70107), scraper) # Instalar The TVDB if seleccion == -1 or seleccion == 0: if not xbmc.getCondVisibility('System.HasAddon(metadata.tvdb.com)'): if not silent: # Preguntar si queremos instalar metadata.tvdb.com install = platformtools.dialog_yesno(config.get_localized_string(60048)) else: install = True if install: try: # Instalar metadata.tvdb.com xbmc.executebuiltin('xbmc.installaddon(metadata.tvdb.com)', True) logger.info("Instalado el Scraper de series de The TVDB") except: pass continuar = (install and xbmc.getCondVisibility('System.HasAddon(metadata.tvdb.com)')) if not continuar: msg_text = config.get_localized_string(70099) if continuar: xbmc.executebuiltin('xbmc.addon.opensettings(metadata.tvdb.com)', True) # Instalar The Movie Database elif seleccion == 1: if continuar and not xbmc.getCondVisibility('System.HasAddon(metadata.tvshows.themoviedb.org)'): continuar = False if not silent: # Preguntar si queremos instalar metadata.tvshows.themoviedb.org install = platformtools.dialog_yesno(config.get_localized_string(70100)) else: install = True if install: try: # Instalar metadata.tvshows.themoviedb.org xbmc.executebuiltin('xbmc.installaddon(metadata.tvshows.themoviedb.org)', True) if xbmc.getCondVisibility('System.HasAddon(metadata.tvshows.themoviedb.org)'): continuar = True except: pass continuar = (install and continuar) if not continuar: msg_text = config.get_localized_string(60047) if continuar: xbmc.executebuiltin('xbmc.addon.opensettings(metadata.tvshows.themoviedb.org)', True) idPath = 0 idParentPath = 0 if continuar: continuar = False # Buscamos el idPath sql = 'SELECT MAX(idPath) FROM path' nun_records, records = execute_sql_kodi(sql) if nun_records == 1: idPath = records[0][0] + 1 sql_videolibrarypath = videolibrarypath if sql_videolibrarypath.startswith("special://"): sql_videolibrarypath = sql_videolibrarypath.replace('/profile/', '/%/').replace('/home/userdata/', '/%/') sep = '/' elif scrapertools.find_single_match(sql_videolibrarypath, '(^\w+:\/\/)'): sep = '/' else: sep = os.sep if not sql_videolibrarypath.endswith(sep): sql_videolibrarypath += sep # Buscamos el idParentPath sql = 'SELECT idPath, strPath FROM path where strPath LIKE "%s"' % sql_videolibrarypath nun_records, records = execute_sql_kodi(sql) if nun_records == 1: idParentPath = records[0][0] videolibrarypath = records[0][1][:-1] continuar = True else: # No existe videolibrarypath en la BD: la insertamos sql_videolibrarypath = videolibrarypath if not sql_videolibrarypath.endswith(sep): sql_videolibrarypath += sep sql = 'INSERT INTO path (idPath, strPath, scanRecursive, useFolderNames, noUpdate, exclude) VALUES ' \ '(%s, "%s", 0, 0, 0, 0)' % (idPath, sql_videolibrarypath) nun_records, records = execute_sql_kodi(sql) if nun_records == 1: continuar = True idParentPath = idPath idPath += 1 else: msg_text = config.get_localized_string(70101) if continuar: continuar = False # Fijamos strContent, strScraper, scanRecursive y strSettings if content_type == 'movie': strContent = 'movies' scanRecursive = 2147483647 if seleccion == -1 or seleccion == 0: strScraper = 'metadata.themoviedb.org' path_settings = xbmc.translatePath("special://profile/addon_data/metadata.themoviedb.org/settings.xml") elif seleccion == 1: strScraper = 'metadata.universal' path_settings = xbmc.translatePath("special://profile/addon_data/metadata.universal/settings.xml") settings_data = filetools.read(path_settings) strSettings = ' '.join(settings_data.split()).replace("> <", "><") strSettings = strSettings.replace("\"","\'") strActualizar = "¿Desea configurar este Scraper en español como opción por defecto para películas?" if not videolibrarypath.endswith(sep): videolibrarypath += sep strPath = videolibrarypath + config.get_setting("folder_movies") + sep else: strContent = 'tvshows' scanRecursive = 0 if seleccion == -1 or seleccion == 0: strScraper = 'metadata.tvdb.com' path_settings = xbmc.translatePath("special://profile/addon_data/metadata.tvdb.com/settings.xml") elif seleccion == 1: strScraper = 'metadata.tvshows.themoviedb.org' path_settings = xbmc.translatePath("special://profile/addon_data/metadata.tvshows.themoviedb.org/settings.xml") settings_data = filetools.read(path_settings) strSettings = ' '.join(settings_data.split()).replace("> <", "><") strSettings = strSettings.replace("\"","\'") strActualizar = "¿Desea configurar este Scraper en español como opción por defecto para series?" if not videolibrarypath.endswith(sep): videolibrarypath += sep strPath = videolibrarypath + config.get_setting("folder_tvshows") + sep logger.info("%s: %s" % (content_type, strPath)) # Comprobamos si ya existe strPath en la BD para evitar duplicados sql = 'SELECT idPath FROM path where strPath="%s"' % strPath nun_records, records = execute_sql_kodi(sql) sql = "" if nun_records == 0: # Insertamos el scraper sql = 'INSERT INTO path (idPath, strPath, strContent, strScraper, scanRecursive, useFolderNames, ' \ 'strSettings, noUpdate, exclude, idParentPath) VALUES (%s, "%s", "%s", "%s", %s, 0, ' \ '"%s", 0, 0, %s)' % ( idPath, strPath, strContent, strScraper, scanRecursive, strSettings, idParentPath) else: if not silent: # Preguntar si queremos configurar themoviedb.org como opcion por defecto actualizar = platformtools.dialog_yesno(config.get_localized_string(70098), strActualizar) else: actualizar = True if actualizar: # Actualizamos el scraper idPath = records[0][0] sql = 'UPDATE path SET strContent="%s", strScraper="%s", scanRecursive=%s, strSettings="%s" ' \ 'WHERE idPath=%s' % (strContent, strScraper, scanRecursive, strSettings, idPath) if sql: nun_records, records = execute_sql_kodi(sql) if nun_records == 1: continuar = True if not continuar: msg_text = config.get_localized_string(60055) if not continuar: heading = config.get_localized_string(70102) % content_type elif content_type == 'SERIES' and not xbmc.getCondVisibility( 'System.HasAddon(metadata.tvshows.themoviedb.org)'): heading = config.get_localized_string(70103) % content_type msg_text = config.get_localized_string(60058) else: heading = config.get_localized_string(70103) % content_type msg_text = config.get_localized_string(70104) platformtools.dialog_notification(heading, msg_text, icon=1, time=3000) logger.info("%s: %s" % (heading, msg_text))
def channel_search(item): logger.debug(item) start = time.time() searching = list() searching_titles = list() results = list() valid = list() ch_list = dict() mode = item.mode if item.infoLabels['tvshowtitle']: item.text = item.infoLabels['tvshowtitle'].split(' - ')[0] item.title = item.text elif item.infoLabels['title']: item.text = item.infoLabels['title'].split(' - ')[0] item.title = item.text temp_search_file = config.get_temp_file('temp-search') if filetools.isfile(temp_search_file): itemlist = [] f = filetools.read(temp_search_file) if f.startswith(item.text): for it in f.split(','): if it and it != item.text: itemlist.append(Item().fromurl(it)) return itemlist else: filetools.remove(temp_search_file) searched_id = item.infoLabels['tmdb_id'] channel_list, channel_titles = get_channels(item) searching += channel_list searching_titles += channel_titles cnt = 0 progress = platformtools.dialog_progress(config.get_localized_string(30993) % item.title, config.get_localized_string(70744) % len(channel_list) + '\n' + ', '.join(searching_titles)) config.set_setting('tmdb_active', False) search_action_list = [] module_dict = {} for ch in channel_list: try: module = __import__('channels.%s' % ch, fromlist=["channels.%s" % ch]) mainlist = getattr(module, 'mainlist')(Item(channel=ch, global_search=True)) module_dict[ch] = module search_action_list.extend([elem for elem in mainlist if elem.action == "search" and (mode == 'all' or elem.contentType in [mode, 'undefined'])]) if progress.iscanceled(): return [] except: import traceback logger.error('error importing/getting search items of ' + ch) logger.error(traceback.format_exc()) total_search_actions = len(search_action_list) with futures.ThreadPoolExecutor(max_workers=set_workers()) as executor: c_results = [] for search_action in search_action_list: c_results.append(executor.submit(get_channel_results, item, module_dict, search_action)) if progress.iscanceled(): break for res in futures.as_completed(c_results): search_action = res.result()[0] channel = search_action.channel if res.result()[1]: if channel not in ch_list: ch_list[channel] = [] ch_list[channel].extend(res.result()[1]) if res.result()[2]: valid.extend(res.result()[2]) if progress.iscanceled(): break search_action_list.remove(search_action) # if no action of this channel remains for it in search_action_list: if it.channel == channel: break else: cnt += 1 searching_titles.remove(searching_titles[searching.index(channel)]) searching.remove(channel) progress.update(old_div(((total_search_actions - len(search_action_list)) * 100), total_search_actions), config.get_localized_string(70744) % str(len(channel_list) - cnt) + '\n' + ', '.join(searching_titles)) progress.close() cnt = 0 progress = platformtools.dialog_progress(config.get_localized_string(30993) % item.title, config.get_localized_string(60295) + '\n' + config.get_localized_string(60293)) config.set_setting('tmdb_active', True) # res_count = 0 for key, value in ch_list.items(): ch_name = channel_titles[channel_list.index(key)] grouped = list() cnt += 1 progress.update(old_div((cnt * 100), len(ch_list)), config.get_localized_string(60295)) for it in value: if it.channel == item.channel: it.channel = key if it in valid: continue if mode == 'all' or (it.contentType and mode == it.contentType): if config.get_setting('result_mode') != 0: if config.get_localized_string(30992) not in it.title: it.title += typo(ch_name,'_ [] color kod bold') results.append(it) else: grouped.append(it) elif (mode == 'movie' and it.contentTitle) or (mode == 'tvshow' and (it.contentSerieName or it.show)): grouped.append(it) else: continue if not grouped: continue # to_temp[key] = grouped if config.get_setting('result_mode') == 0: if not config.get_setting('unify'): title = typo(ch_name,'bold') + typo(str(len(grouped)), '_ [] color kod bold') else: title = typo('%s %s' % (len(grouped), config.get_localized_string(70695)), 'bold') # res_count += len(grouped) plot='' for it in grouped: plot += it.title +'\n' ch_thumb = channeltools.get_channel_parameters(key)['thumbnail'] results.append(Item(channel='search', title=title, action='get_from_temp', thumbnail=ch_thumb, itemlist=[ris.tourl() for ris in grouped], plot=plot, page=1)) progress.close() # "All Together" and movie mode -> search servers if config.get_setting('result_mode') == 1 and mode == 'movie': progress = platformtools.dialog_progress(config.get_localized_string(30993) % item.title, config.get_localized_string(60683)) valid_servers = [] with futures.ThreadPoolExecutor(max_workers=set_workers()) as executor: c_results = [executor.submit(get_servers, v, module_dict) for v in valid] completed = 0 for res in futures.as_completed(c_results): if progress.iscanceled(): break if res.result(): completed += 1 valid_servers.extend(res.result()) progress.update(old_div(completed * 100, len(valid))) valid = valid_servers progress.close() # send_to_temp(to_temp) results = sorted(results, key=lambda it: it.title) results_statistic = config.get_localized_string(59972) % (item.title, time.time() - start) if mode == 'all': results.insert(0, Item(title=typo(results_statistic, 'color kod bold'), thumbnail=get_thumb('search.png'))) else: if not valid: valid.append(Item(title=config.get_localized_string(60347), thumbnail=get_thumb('nofolder.png'))) valid.insert(0, Item(title=typo(results_statistic, 'color kod bold'), thumbnail=get_thumb('search.png'))) results.insert(0, Item(title=typo(config.get_localized_string(30025), 'color kod bold'), thumbnail=get_thumb('search.png'))) # logger.debug(results_statistic) itlist = valid + results writelist = item.text for it in itlist: writelist += ',' + it.tourl() filetools.write(temp_search_file, writelist) return itlist
def verify_directories_created(): from core import logger from core import filetools from platformcode import xbmc_videolibrary config_paths = [["videolibrarypath", "videolibrary"], ["downloadpath", "downloads"], ["downloadlistpath", "downloads/list"], ["settings_path", "settings_channels"]] for path, default in config_paths: saved_path = get_setting(path) # videoteca if path == "videolibrarypath": if not saved_path: saved_path = xbmc_videolibrary.search_library_path() if saved_path: set_setting(path, saved_path) if not saved_path: saved_path = "special://profile/addon_data/plugin.video." + PLUGIN_NAME + "/" + default set_setting(path, saved_path) saved_path = xbmc.translatePath(saved_path) if not filetools.exists(saved_path): logger.debug("Creating %s: %s" % (path, saved_path)) filetools.mkdir(saved_path) config_paths = [["folder_movies", "CINE"], ["folder_tvshows", "SERIES"]] flag_call = True for path, default in config_paths: saved_path = get_setting(path) if not saved_path: saved_path = default set_setting(path, saved_path) content_path = filetools.join(get_videolibrary_path(), saved_path) if not filetools.exists(content_path): logger.debug("Creating %s: %s" % (path, content_path)) # si se crea el directorio if filetools.mkdir(content_path): if flag_call: # le pasamos el valor para que sepamos que se ha pasado por creación de directorio xbmc_videolibrary.ask_set_content(1) flag_call = False try: from core import scrapertools # Buscamos el archivo addon.xml del skin activo skindir = filetools.join(xbmc.translatePath("special://home"), 'addons', xbmc.getSkinDir(), 'addon.xml') # Extraemos el nombre de la carpeta de resolución por defecto folder = "" data = filetools.read(skindir) res = scrapertools.find_multiple_matches(data, '(<res .*?>)') for r in res: if 'default="true"' in r: folder = scrapertools.find_single_match(r, 'folder="([^"]+)"') break # Comprobamos si existe en el addon y sino es así, la creamos default = filetools.join(get_runtime_path(), 'resources', 'skins', 'Default') if folder and not filetools.exists(filetools.join(default, folder)): filetools.mkdir(filetools.join(default, folder)) # Copiamos el archivo a dicha carpeta desde la de 720p si éste no existe o si el tamaño es diferente if folder and folder != '720p': for root, folders, files in filetools.walk( filetools.join(default, '720p')): for f in files: if not filetools.exists(filetools.join(default, folder, f)) or \ (filetools.getsize(filetools.join(default, folder, f)) != filetools.getsize(filetools.join(default, '720p', f))): filetools.copy(filetools.join(default, '720p', f), filetools.join(default, folder, f), True) except: import traceback logger.error("Al comprobar o crear la carpeta de resolución") logger.error(traceback.format_exc())
def get_environment(): """ Returns the most common OS, Kodi and Alpha environment variables, necessary for fault diagnosis """ try: import base64 import ast environment = config.get_platform(full_version=True) environment['num_version'] = str(environment['num_version']) environment['python_version'] = str(platform.python_version()) environment['os_release'] = str(platform.release()) if xbmc.getCondVisibility("system.platform.Windows"): try: if platform._syscmd_ver()[2]: environment['os_release'] = str(platform._syscmd_ver()[2]) except: pass environment['prod_model'] = '' if xbmc.getCondVisibility("system.platform.Android"): environment['os_name'] = 'Android' try: for label_a in subprocess.check_output('getprop').split('\n'): if 'build.version.release' in label_a: environment['os_release'] = str( scrapertools.find_single_match( label_a, r':\s*\[(.*?)\]$')) if 'product.model' in label_a: environment['prod_model'] = str( scrapertools.find_single_match( label_a, r':\s*\[(.*?)\]$')) except: try: for label_a in filetools.read(os.environ['ANDROID_ROOT'] + '/build.prop').split(): if 'build.version.release' in label_a: environment['os_release'] = str( scrapertools.find_single_match( label_a, '=(.*?)$')) if 'product.model' in label_a: environment['prod_model'] = str( scrapertools.find_single_match( label_a, '=(.*?)$')) except: pass elif xbmc.getCondVisibility("system.platform.Linux.RaspberryPi"): environment['os_name'] = 'RaspberryPi' else: environment['os_name'] = str(platform.system()) environment['machine'] = str(platform.machine()) environment['architecture'] = str(sys.maxsize > 2**32 and "64-bit" or "32-bit") environment['language'] = str(xbmc.getInfoLabel('System.Language')) environment['cpu_usage'] = str(xbmc.getInfoLabel('System.CpuUsage')) environment['mem_total'] = str( xbmc.getInfoLabel('System.Memory(total)')).replace('MB', '').replace( 'KB', '') environment['mem_free'] = str( xbmc.getInfoLabel('System.Memory(free)')).replace('MB', '').replace( 'KB', '') if not environment['mem_total'] or not environment['mem_free']: try: if environment['os_name'].lower() == 'windows': kernel32 = ctypes.windll.kernel32 c_ulong = ctypes.c_ulong c_ulonglong = ctypes.c_ulonglong class MEMORYSTATUS(ctypes.Structure): _fields_ = [('dwLength', c_ulong), ('dwMemoryLoad', c_ulong), ('dwTotalPhys', c_ulonglong), ('dwAvailPhys', c_ulonglong), ('dwTotalPageFile', c_ulonglong), ('dwAvailPageFile', c_ulonglong), ('dwTotalVirtual', c_ulonglong), ('dwAvailVirtual', c_ulonglong), ('availExtendedVirtual', c_ulonglong)] memoryStatus = MEMORYSTATUS() memoryStatus.dwLength = ctypes.sizeof(MEMORYSTATUS) kernel32.GlobalMemoryStatus(ctypes.byref(memoryStatus)) environment['mem_total'] = str( old_div(int(memoryStatus.dwTotalPhys), (1024**2))) environment['mem_free'] = str( old_div(int(memoryStatus.dwAvailPhys), (1024**2))) else: with open('/proc/meminfo') as f: meminfo = f.read() environment['mem_total'] = str( old_div( int( re.search(r'MemTotal:\s+(\d+)', meminfo).groups()[0]), 1024)) environment['mem_free'] = str( old_div( int( re.search(r'MemAvailable:\s+(\d+)', meminfo).groups()[0]), 1024)) except: environment['mem_total'] = '' environment['mem_free'] = '' try: environment['kodi_buffer'] = '20' environment['kodi_bmode'] = '0' environment['kodi_rfactor'] = '4.0' if filetools.exists( filetools.join(xbmc.translatePath("special://userdata"), "advancedsettings.xml")): advancedsettings = filetools.read( filetools.join(xbmc.translatePath("special://userdata"), "advancedsettings.xml")).split('\n') for label_a in advancedsettings: if 'memorysize' in label_a: environment['kodi_buffer'] = str( old_div( int( scrapertools.find_single_match( label_a, r'>(\d+)<\/')), 1024**2)) if 'buffermode' in label_a: environment['kodi_bmode'] = str( scrapertools.find_single_match( label_a, r'>(\d+)<\/')) if 'readfactor' in label_a: environment['kodi_rfactor'] = str( scrapertools.find_single_match( label_a, r'>(.*?)<\/')) except: pass environment['userdata_path'] = str( xbmc.translatePath(config.get_data_path())) try: if environment['os_name'].lower() == 'windows': free_bytes = ctypes.c_ulonglong(0) ctypes.windll.kernel32.GetDiskFreeSpaceExW( ctypes.c_wchar_p(environment['userdata_path']), None, None, ctypes.pointer(free_bytes)) environment['userdata_free'] = str( round(float(free_bytes.value) / (1024**3), 3)) else: disk_space = os.statvfs(environment['userdata_path']) if not disk_space.f_frsize: disk_space.f_frsize = disk_space.f_frsize.f_bsize environment['userdata_free'] = str( round((float(disk_space.f_bavail) / (1024**3)) * float(disk_space.f_frsize), 3)) except: environment['userdata_free'] = '?' try: environment['videolab_series'] = '?' environment['videolab_episodios'] = '?' environment['videolab_pelis'] = '?' environment['videolab_path'] = str( xbmc.translatePath(config.get_videolibrary_path())) if filetools.exists( filetools.join(environment['videolab_path'], config.get_setting("folder_tvshows"))): environment['videolab_series'] = str( len( filetools.listdir( filetools.join( environment['videolab_path'], config.get_setting("folder_tvshows"))))) counter = 0 for root, folders, files in filetools.walk( filetools.join(environment['videolab_path'], config.get_setting("folder_tvshows"))): for file in files: if file.endswith('.strm'): counter += 1 environment['videolab_episodios'] = str(counter) if filetools.exists( filetools.join(environment['videolab_path'], config.get_setting("folder_movies"))): environment['videolab_pelis'] = str( len( filetools.listdir( filetools.join( environment['videolab_path'], config.get_setting("folder_movies"))))) except: pass try: video_updates = ['No', 'Inicio', 'Una vez', 'Inicio+Una vez'] environment['videolab_update'] = str( video_updates[config.get_setting("update", "videolibrary")]) except: environment['videolab_update'] = '?' try: if environment['os_name'].lower() == 'windows': free_bytes = ctypes.c_ulonglong(0) ctypes.windll.kernel32.GetDiskFreeSpaceExW( ctypes.c_wchar_p(environment['videolab_path']), None, None, ctypes.pointer(free_bytes)) environment['videolab_free'] = str( round(float(free_bytes.value) / (1024**3), 3)) else: disk_space = os.statvfs(environment['videolab_path']) if not disk_space.f_frsize: disk_space.f_frsize = disk_space.f_frsize.f_bsize environment['videolab_free'] = str( round((float(disk_space.f_bavail) / (1024**3)) * float(disk_space.f_frsize), 3)) except: environment['videolab_free'] = '?' # environment['torrent_list'] = [] # environment['torrentcli_option'] = '' # environment['torrent_error'] = '' # environment['torrentcli_rar'] = config.get_setting("mct_rar_unpack", server="torrent", default=True) # environment['torrentcli_backgr'] = config.get_setting("mct_background_download", server="torrent", default=True) # environment['torrentcli_lib_path'] = config.get_setting("libtorrent_path", server="torrent", default="") # if environment['torrentcli_lib_path']: # lib_path = 'Activo' # else: # lib_path = 'Inactivo' # environment['torrentcli_unrar'] = config.get_setting("unrar_path", server="torrent", default="") # if environment['torrentcli_unrar']: # if xbmc.getCondVisibility("system.platform.Android"): # unrar = 'Android' # else: # unrar, bin = filetools.split(environment['torrentcli_unrar']) # unrar = unrar.replace('\\', '/') # if not unrar.endswith('/'): # unrar = unrar + '/' # unrar = scrapertools.find_single_match(unrar, '\/([^\/]+)\/$').capitalize() # else: # unrar = 'Inactivo' # torrent_id = config.get_setting("torrent_client", server="torrent", default=0) # environment['torrentcli_option'] = str(torrent_id) # torrent_options = platformtools.torrent_client_installed() # if lib_path == 'Activo': # torrent_options = ['MCT'] + torrent_options # torrent_options = ['BT'] + torrent_options # environment['torrent_list'].append({'Torrent_opt': str(torrent_id), 'Libtorrent': lib_path, \ # 'RAR_Auto': str(environment['torrentcli_rar']), \ # 'RAR_backgr': str(environment['torrentcli_backgr']), \ # 'UnRAR': unrar}) # environment['torrent_error'] = config.get_setting("libtorrent_error", server="torrent", default="") # if environment['torrent_error']: # environment['torrent_list'].append({'Libtorrent_error': environment['torrent_error']}) # for torrent_option in torrent_options: # cliente = dict() # cliente['D_load_Path'] = '' # cliente['Libre'] = '?' # cliente['Plug_in'] = torrent_option.replace('Plugin externo: ', '') # if cliente['Plug_in'] == 'BT': # cliente['D_load_Path'] = str(config.get_setting("bt_download_path", server="torrent", default='')) # if not cliente['D_load_Path']: continue # cliente['Buffer'] = str(config.get_setting("bt_buffer", server="torrent", default=50)) # elif cliente['Plug_in'] == 'MCT': # cliente['D_load_Path'] = str(config.get_setting("mct_download_path", server="torrent", default='')) # if not cliente['D_load_Path']: continue # cliente['Buffer'] = str(config.get_setting("mct_buffer", server="torrent", default=50)) # elif xbmc.getCondVisibility('System.HasAddon("plugin.video.%s")' % cliente['Plug_in']): # __settings__ = xbmcaddon.Addon(id="plugin.video.%s" % cliente['Plug_in']) # cliente['Plug_in'] = cliente['Plug_in'].capitalize() # if cliente['Plug_in'] == 'Torrenter': # cliente['D_load_Path'] = str(xbmc.translatePath(__settings__.getSetting('storage'))) # if not cliente['D_load_Path']: # cliente['D_load_Path'] = str(filetools.join(xbmc.translatePath("special://home/"), \ # "cache", "xbmcup", "plugin.video.torrenter", # "Torrenter")) # cliente['Buffer'] = str(__settings__.getSetting('pre_buffer_bytes')) # else: # cliente['D_load_Path'] = str(xbmc.translatePath(__settings__.getSetting('download_path'))) # cliente['Buffer'] = str(__settings__.getSetting('buffer_size')) # if __settings__.getSetting('download_storage') == '1' and __settings__.getSetting('memory_size'): # cliente['Memoria'] = str(__settings__.getSetting('memory_size')) # if cliente['D_load_Path']: # try: # if environment['os_name'].lower() == 'windows': # free_bytes = ctypes.c_ulonglong(0) # ctypes.windll.kernel32.GetDiskFreeSpaceExW(ctypes.c_wchar_p(cliente['D_load_Path']), # None, None, ctypes.pointer(free_bytes)) # cliente['Libre'] = str(round(float(free_bytes.value) / \ # (1024 ** 3), 3)).replace('.', ',') # else: # disk_space = os.statvfs(cliente['D_load_Path']) # if not disk_space.f_frsize: disk_space.f_frsize = disk_space.f_frsize.f_bsize # cliente['Libre'] = str(round((float(disk_space.f_bavail) / \ # (1024 ** 3)) * float(disk_space.f_frsize), 3)).replace('.', ',') # except: # pass # environment['torrent_list'].append(cliente) environment['proxy_active'] = '' try: proxy_channel_bloqued_str = base64.b64decode( config.get_setting('proxy_channel_bloqued')).decode('utf-8') proxy_channel_bloqued = dict() proxy_channel_bloqued = ast.literal_eval(proxy_channel_bloqued_str) for channel_bloqued, proxy_active in list( proxy_channel_bloqued.items()): if proxy_active != 'OFF': environment['proxy_active'] += channel_bloqued + ', ' except: pass if not environment['proxy_active']: environment['proxy_active'] = 'OFF' environment['proxy_active'] = environment['proxy_active'].rstrip(', ') for root, folders, files in filetools.walk( xbmc.translatePath("special://logpath/")): for file in files: if file.lower() in ['kodi.log', 'jarvis.log', 'spmc.log', 'cemc.log', \ 'mygica.log', 'wonderbox.log', 'leiapp,log', \ 'leianmc.log', 'kodiapp.log', 'anmc.log', \ 'latin-anmc.log']: environment['log_path'] = str(filetools.join(root, file)) break else: environment['log_path'] = '' break if environment['log_path']: environment['log_size_bytes'] = str( filetools.getsize(environment['log_path'])) environment['log_size'] = str(round(float(environment['log_size_bytes']) / \ (1024 * 1024), 3)) else: environment['log_size_bytes'] = '' environment['log_size'] = '' environment['debug'] = str(config.get_setting('debug')) environment['addon_version'] = str(config.get_addon_version()) except: logger.error(traceback.format_exc()) environment = {} environment['log_size'] = '' environment['cpu_usage'] = '' environment['python_version'] = '' environment['log_path'] = '' environment['userdata_free'] = '' environment['mem_total'] = '' environment['machine'] = '' environment['platform'] = '' environment['videolab_path'] = '' environment['num_version'] = '' environment['os_name'] = '' environment['video_db'] = '' environment['userdata_path'] = '' environment['log_size_bytes'] = '' environment['name_version'] = '' environment['language'] = '' environment['mem_free'] = '' environment['prod_model'] = '' environment['proxy_active'] = '' environment['architecture'] = '' environment['os_release'] = '' environment['videolab_free'] = '' environment['kodi_buffer'] = '' environment['kodi_bmode'] = '' environment['kodi_rfactor'] = '' environment['videolab_series'] = '' environment['videolab_episodios'] = '' environment['videolab_pelis'] = '' environment['videolab_update'] = '' environment['debug'] = '' environment['addon_version'] = '' environment['torrent_list'] = [] environment['torrentcli_option'] = '' environment['torrentcli_rar'] = '' environment['torrentcli_lib_path'] = '' environment['torrentcli_unrar'] = '' environment['torrent_error'] = '' return environment