def clear_cache(cache_type, silent=False): def _confirm(): if not silent and not kodi_utils.confirm_dialog(): return False return True if cache_type == 'meta': from caches.meta_cache import delete_meta_cache if not delete_meta_cache(silent=silent): return elif cache_type == 'internal_scrapers': if not _confirm(): return from apis import furk_api, easynews_api furk_api.clear_media_results_database() easynews_api.clear_media_results_database() for item in ('pm_cloud', 'rd_cloud', 'ad_cloud', 'folders'): clear_cache(item, silent=True) elif cache_type == 'external_scrapers': from caches.providers_cache import ExternalProvidersCache from caches.debrid_cache import DebridCache data = ExternalProvidersCache().delete_cache(silent=silent) debrid_cache = DebridCache().clear_database() if not (data, debrid_cache) == ('success', 'success'): return elif cache_type == 'trakt': from caches.trakt_cache import clear_all_trakt_cache_data if not clear_all_trakt_cache_data(silent=silent): return elif cache_type == 'imdb': if not _confirm(): return from apis.imdb_api import clear_imdb_cache if not clear_imdb_cache(): return elif cache_type == 'pm_cloud': if not _confirm(): return from apis.premiumize_api import PremiumizeAPI if not PremiumizeAPI().clear_cache(): return elif cache_type == 'rd_cloud': if not _confirm(): return from apis.real_debrid_api import RealDebridAPI if not RealDebridAPI().clear_cache(): return elif cache_type == 'ad_cloud': if not _confirm(): return from apis.alldebrid_api import AllDebridAPI if not AllDebridAPI().clear_cache(): return elif cache_type == 'folders': from caches.main_cache import main_cache main_cache.delete_all_folderscrapers() else: # 'list' if not _confirm(): return from caches.main_cache import main_cache main_cache.delete_all_lists() if not silent: kodi_utils.notification(32576)
def getURLandHeaders(self): url = self.params_get('url') if url in (None, 'None', ''): if self.action == 'meta.single': source = json.loads(self.source) url = Sources().resolve_sources(source, self.meta) elif self.action == 'meta.pack': if self.provider == 'Real-Debrid': from apis.real_debrid_api import RealDebridAPI as debrid_function elif self.provider == 'Premiumize.me': from apis.premiumize_api import PremiumizeAPI as debrid_function elif self.provider == 'AllDebrid': from apis.alldebrid_api import AllDebridAPI as debrid_function url = self.params_get('pack_files')['link'] if self.provider in ('Real-Debrid', 'AllDebrid'): url = debrid_function().unrestrict_link(url) elif self.provider == 'Premiumize.me': url = debrid_function().add_headers_to_url(url) else: if self.action.startswith('cloud'): if '_direct' in self.action: url = self.params_get('url') elif 'realdebrid' in self.action: from indexers.real_debrid import resolve_rd url = resolve_rd(self.params) elif 'alldebrid' in self.action: from indexers.alldebrid import resolve_ad url = resolve_ad(self.params) elif 'premiumize' in self.action: from apis.premiumize_api import PremiumizeAPI url = PremiumizeAPI().add_headers_to_url(url) elif 'easynews' in self.action: from indexers.easynews import resolve_easynews url = resolve_easynews(self.params) try: headers = dict(parse_qsl(url.rsplit('|', 1)[1])) except: headers = dict('') try: url = url.split('|')[0] except: pass self.url = url self.headers = headers
def resolve_internal_sources(scrape_provider, item_id, url_dl, direct_debrid_link=False): url = None try: if scrape_provider == 'furk': import json from indexers.furk import t_file_browser from modules.source_utils import seas_ep_query_list from modules.kodi_utils import get_property meta = json.loads(get_property('fen_playback_meta')) filtering_list = seas_ep_query_list( meta['season'], meta['episode']) if meta['vid_type'] == 'episode' else '' t_files = t_file_browser(item_id, filtering_list) url = t_files[0]['url_dl'] elif scrape_provider == 'easynews': from indexers.easynews import resolve_easynews url = resolve_easynews({'url_dl': url_dl, 'play': 'false'}) elif scrape_provider == 'rd_cloud': if direct_debrid_link: return url_dl from apis.real_debrid_api import RealDebridAPI url = RealDebridAPI().unrestrict_link(item_id) elif scrape_provider == 'pm_cloud': from apis.premiumize_api import PremiumizeAPI details = PremiumizeAPI().get_item_details(item_id) url = details['link'] if url.startswith('/'): url = 'https' + url elif scrape_provider == 'ad_cloud': from apis.alldebrid_api import AllDebridAPI url = AllDebridAPI().unrestrict_link(item_id) elif scrape_provider == 'folders': if url_dl.endswith('.strm'): from modules.kodi_utils import open_file f = open_file(url_dl) url = f.read() f.close() else: url = url_dl except: pass return url
def resolve_internal_sources(scrape_provider, item_id, url_dl, direct_debrid_link=False): url = None try: if scrape_provider == 'furk': import xbmcgui import json from indexers.furk import t_file_browser, seas_ep_query_list meta = json.loads( xbmcgui.Window(10000).getProperty('fen_media_meta')) filtering_list = seas_ep_query_list( meta['season'], meta['episode']) if meta['vid_type'] == 'episode' else '' t_files = t_file_browser(item_id, filtering_list) url = t_files[0]['url_dl'] elif scrape_provider == 'rd-cloud': if direct_debrid_link: return url_dl from apis.real_debrid_api import RealDebridAPI url = RealDebridAPI().unrestrict_link(item_id) elif scrape_provider == 'pm-cloud': from apis.premiumize_api import PremiumizeAPI details = PremiumizeAPI().get_item_details(item_id) url = details['link'] if url.startswith('/'): url = 'https' + url elif scrape_provider == 'ad-cloud': from apis.alldebrid_api import AllDebridAPI url = AllDebridAPI().unrestrict_link(item_id) elif scrape_provider in ('local', 'downloads', 'easynews'): url = url_dl elif scrape_provider in ('folder1', 'folder2', 'folder3', 'folder4', 'folder5'): if url_dl.endswith('.strm'): import xbmcvfs f = xbmcvfs.File(url_dl) url = f.read() f.close() else: url = url_dl except: pass return url
# -*- coding: utf-8 -*- import time from threading import Thread from windows import create_window from caches.debrid_cache import debrid_cache from apis.real_debrid_api import RealDebridAPI from apis.premiumize_api import PremiumizeAPI from apis.alldebrid_api import AllDebridAPI from modules.kodi_utils import sleep, show_busy_dialog, hide_busy_dialog, notification, monitor, local_string as ls from modules.utils import make_thread_list from modules.settings import display_sleep_time, enabled_debrids_check from modules.settings_reader import get_setting # from modules.kodi_utils import logger rd_api = RealDebridAPI() pm_api = PremiumizeAPI() ad_api = AllDebridAPI() debrid_list = [('Real-Debrid', 'rd', rd_api), ('Premiumize.me', 'pm', pm_api), ('AllDebrid', 'ad', ad_api)] def debrid_enabled(): return [i[0] for i in debrid_list if enabled_debrids_check(i[1])] def debrid_type_enabled(debrid_type, enabled_debrids): return [ i[0] for i in debrid_list if i[0] in enabled_debrids and get_setting('%s.%s.enabled' % (i[1], debrid_type)) == 'true' ]
# -*- coding: utf-8 -*- from apis.premiumize_api import PremiumizeAPI from modules.source_utils import get_file_info, supported_video_extensions, internal_results, check_title, \ get_aliases_titles, seas_ep_filter, release_info_format from modules.utils import clean_title, clean_file_name, normalize from modules.settings import enabled_debrids_check, filter_by_name # from modules.kodi_utils import logger Premiumize = PremiumizeAPI() class source: def __init__(self): self.scrape_provider = 'pm_cloud' def results(self, info): try: if not enabled_debrids_check('pm'): return internal_results(self.scrape_provider, self.sources) self.title_filter = filter_by_name(self.scrape_provider) self.sources, self.scrape_results = [], [] self.db_type = info.get('db_type') self.title = info.get('title') self.year = info.get('year') if self.year: self.rootname = '%s (%s)' % (self.title, self.year) else: self.rootname = self.title self.season = info.get('season') self.episode = info.get('episode') self.query = clean_title(self.title) self.extensions = supported_video_extensions() self._scrape_cloud()
from indexers.premiumize import pm_torrent_cloud pm_torrent_cloud(params.get('id', None), params.get('folder_name', None)) elif mode == 'premiumize.pm_transfers': from indexers.premiumize import pm_transfers pm_transfers() elif mode == 'premiumize.pm_account_info': from indexers.premiumize import pm_account_info pm_account_info() elif mode == 'premiumize.rename': from indexers.premiumize import pm_rename pm_rename(params.get('file_type'), params.get('id'), params.get('name')) elif mode == 'premiumize.authenticate': from apis.premiumize_api import PremiumizeAPI PremiumizeAPI().auth() elif mode == 'premiumize.authenticate_revoke': from apis.premiumize_api import PremiumizeAPI PremiumizeAPI().revoke_auth() elif 'alldebrid' in mode: if mode == 'alldebrid.ad_torrent_cloud': from indexers.alldebrid import ad_torrent_cloud ad_torrent_cloud(params.get('id', None)) elif mode == 'alldebrid.browse_ad_cloud': from indexers.alldebrid import browse_ad_cloud browse_ad_cloud(params['folder']) elif mode == 'alldebrid.resolve_ad': from indexers.alldebrid import resolve_ad resolve_ad(params['url']) elif mode == 'alldebrid.ad_account_info': from indexers.alldebrid import ad_account_info
def clear_cache(cache_type, silent=False): import xbmcgui profile_dir = xbmc.translatePath(__addon__.getAddonInfo('profile')) if cache_type == 'meta': from tikimeta import delete_meta_cache if not delete_meta_cache(silent=silent): return description = 'Meta Data' elif cache_type == 'internal_scrapers': if not silent: if not xbmcgui.Dialog().yesno( 'Are you sure?', 'Fen will Clear all Internal Scraper Results.'): return from apis import furk_api from apis import easynews_api furk = furk_api.clear_media_results_database() easynews = easynews_api.clear_media_results_database() if not (furk, easynews) == ('success', 'success'): return for item in ('pm_cloud', 'rd_cloud', 'ad_cloud'): clear_cache(item, silent=True) description = 'Internal Scraper Results' elif cache_type == 'external_scrapers': from modules.external_source_utils import deleteProviderCache from modules.debrid import DebridCache data = deleteProviderCache(silent=silent) debrid_cache = DebridCache().clear_database() if not (data, debrid_cache) == ('success', 'success'): return description = 'External Scraper Results' elif cache_type == 'trakt': from modules.trakt_cache import clear_all_trakt_cache_data if not clear_all_trakt_cache_data(silent=silent): return description = 'Trakt Cache' elif cache_type == 'pages': if not silent: if not xbmcgui.Dialog().yesno( 'Are you sure?', 'Fen will Clear all Browsed Pages History.'): return if not cached_page_clear(silent=silent): return description = 'Browsed Pages Cache' elif cache_type == 'pm_cloud': if not silent: if not xbmcgui.Dialog().yesno( 'Are you sure?', 'Fen will Clear the Premiumize Cloud Cache.'): return from apis.premiumize_api import PremiumizeAPI if not PremiumizeAPI().clear_cache(): return description = 'Premiumize Cloud Cache' elif cache_type == 'rd_cloud': if not silent: if not xbmcgui.Dialog().yesno( 'Are you sure?', 'Fen will Clear the Real Debrid Cloud Cache.'): return from apis.real_debrid_api import RealDebridAPI if not RealDebridAPI().clear_cache(): return description = 'Real Debrid Cloud Cache' elif cache_type == 'ad_cloud': if not silent: if not xbmcgui.Dialog().yesno( 'Are you sure?', 'Fen will Clear the All Debrid Cloud Cache.'): return from apis.alldebrid_api import AllDebridAPI if not AllDebridAPI().clear_cache(): return description = 'All Debrid Cloud Cache' else: # 'list' import xbmcvfs LIST_DATABASE = os.path.join(profile_dir, 'fen_cache.db') if not xbmcvfs.exists(LIST_DATABASE): return if not silent: if not xbmcgui.Dialog().yesno('Are you sure?', 'Fen will Clear all List Data.'): return try: from sqlite3 import dbapi2 as database except ImportError: from pysqlite2 import dbapi2 as database from modules.settings import media_lists media_lists = media_lists() window = xbmcgui.Window(10000) dbcon = database.connect(LIST_DATABASE) dbcur = dbcon.cursor() sql = """SELECT id from fencache where id LIKE """ for item in media_lists: sql = sql + "'" + item + "'" + ' OR id LIKE ' sql = sql[:-12] dbcur.execute(sql) results = dbcur.fetchall() remove_list = [str(i[0]) for i in results] for item in remove_list: dbcur.execute("""DELETE FROM fencache WHERE id=?""", (item, )) window.clearProperty(item) dbcon.commit() dbcon.execute("VACUUM") dbcon.commit() dbcon.close() description = 'List Data' if not silent: notification('%s Cleared' % description)