Beispiel #1
0
class SubtitleService:
    """
    Connects to available subtitle services and retrieves available subtitles for media
    """
    def __init__(self):
        self.task_queue = ThreadPool()
        self.subtitle_languages = g.get_kodi_subtitle_languages()
        self.preferred_language = g.get_kodi_preferred_subtitle_language()
        self.base_request = {
            "languages": ",".join(self.subtitle_languages),
            "preferredlanguage": self.preferred_language,
        }
        self.sources = [A4kSubtitlesAdapter()]

    def get_subtitle(self):
        """
        Fetch subtitle source
        :return: Url to subtitle
        :rtype: str
        """
        if self.sources is None:
            return None
        [
            self.task_queue.put(r.search, self.base_request)
            for r in self.sources if r.enabled
        ]
        results = self.task_queue.wait_completion()
        if results is None:
            return None
        try:
            return self.sources[0].download(results[0])
        except IndexError:
            g.log("No subtitles available from A4kSubtitles", "error")
            return None
Beispiel #2
0
    def get_hoster_list():
        """
        Fetche
        :return:
        """
        thread_pool = ThreadPool()

        hosters = {"premium": {}, "free": []}

        try:
            if g.get_bool_setting("premiumize.enabled") and g.get_bool_setting(
                    "premiumize.hosters"):
                thread_pool.put(Premiumize().get_hosters, hosters)

            if g.get_bool_setting("realdebrid.enabled") and g.get_bool_setting(
                    "rd.hosters"):
                thread_pool.put(RealDebrid().get_hosters, hosters)

            if g.get_bool_setting("alldebrid.enabled") and g.get_bool_setting(
                    "alldebrid.hosters"):
                thread_pool.put(AllDebrid().get_hosters, hosters)
            thread_pool.wait_completion()
        except ValueError:
            g.log_stacktrace()
            xbmcgui.Dialog().notification(g.ADDON_NAME,
                                          g.get_language_string(30513))
            return hosters
        return hosters
    def _user_cloud_inspection(self):
        self.sources_information["remainingProviders"].append("Cloud Inspection")
        try:
            thread_pool = ThreadPool()
            if self.media_type == "episode":
                simple_info = self._build_simple_show_info(self.item_information)
            else:
                simple_info = None

            cloud_scrapers = [
                {"setting": "premiumize.cloudInspection", "provider": PremiumizeCloudScaper,
                 "enabled": g.premiumize_enabled()},
                {"setting": "rd.cloudInspection", "provider": RealDebridCloudScraper,
                 "enabled": g.real_debrid_enabled()},
                {"setting": "alldebrid.cloudInspection", "provider": AllDebridCloudScraper,
                 "enabled": g.all_debrid_enabled()},
            ]

            for cloud_scraper in cloud_scrapers:
                if cloud_scraper["enabled"] and g.get_bool_setting(cloud_scraper["setting"]):
                    thread_pool.put(cloud_scraper["provider"](self._prem_terminate).get_sources, self.item_information,
                                    simple_info)

            sources = thread_pool.wait_completion()
            self.sources_information["cloudFiles"] = sources if sources else []

        finally:
            self.sources_information["remainingProviders"].remove("Cloud Inspection")
Beispiel #4
0
    def __init__(self, ):
        super(TraktSyncDatabase, self).__init__(g.TRAKT_SYNC_DB_PATH, schema,
                                                migrate_db_lock)
        self.metadataHandler = MetadataHandler()
        self.trakt_api = TraktAPI()

        self.activities = {}
        self.item_list = []
        self.base_date = "1970-01-01T00:00:00"
        self.task_queue = ThreadPool()
        self.mill_task_queue = ThreadPool()
        self.parent_task_queue = ThreadPool()
        self.refresh_activities()

        # If you make changes to the required meta in any indexer that is cached in this database
        # You will need to update the below version number to match the new addon version
        # This will ensure that the metadata required for operations is available

        self.last_meta_update = "2.0.0"
        if self.activities is None:
            self.clear_all_meta(False)
            self.set_base_activities()

        if self.activities is not None:
            self._check_database_version()

        self.notification_prefix = "{}: Trakt".format(g.ADDON_NAME)
        self.hide_unaired = g.get_bool_setting("general.hideUnAired")
        self.hide_specials = g.get_bool_setting("general.hideSpecials")
        self.hide_watched = g.get_bool_setting("general.hideWatched")
        self.date_delay = g.get_bool_setting("general.datedelay")
        self.page_limit = g.get_int_setting("item.limit")
Beispiel #5
0
 def get_episode(self, tvdb_id, season, episode):
     thread_pool = ThreadPool()
     for language in self.languages:
         thread_pool.put(self._get_episode_info, tvdb_id, season, episode, language)
     item = thread_pool.wait_completion()
     if not item:
         return None
     return item
Beispiel #6
0
 def wrapper(*args, **kwargs):
     pool = ThreadPool()
     if isinstance(args[-1], list) or isinstance(args[-1],
                                                 types.GeneratorType):
         [pool.put(func, *args[:-1] + (i, ), **kwargs) for i in args[-1]]
         return pool.wait_completion()
     else:
         return func(*args, **kwargs)
 def __init__(self, source):
     self.thread_pool = ThreadPool()
     self.source = source
     self.average_speed = "0 B/s"
     self.progress = 0
     self.downloaders = []
     self.debrid_module = None
     self._confirm_source_downloadable()
Beispiel #8
0
 def __init__(self):
     self.task_queue = ThreadPool()
     self.subtitle_languages = g.get_kodi_subtitle_languages()
     self.preferred_language = g.get_kodi_preferred_subtitle_language()
     self.base_request = {
         "languages": ",".join(self.subtitle_languages),
         "preferredlanguage": self.preferred_language,
     }
     self.sources = [A4kSubtitlesAdapter()]
    def __init__(self, scraper_class):
        self.premiumize_cached = []
        self.realdebrid_cached = []
        self.all_debrid_cached = []
        self.threads = ThreadPool()

        self.episode_strings = None
        self.season_strings = None
        self.scraper_class = scraper_class
        self.rd_api = real_debrid.RealDebrid()
Beispiel #10
0
 def get_episode(self, tvdb_id, season, episode):
     item = {}
     thread_pool = ThreadPool()
     for language in self.languages:
         thread_pool.put(self._get_episode_info, tvdb_id, season, episode,
                         language)
         item = tools.smart_merge_dictionary(item,
                                             thread_pool.wait_completion())
     if not item or len(item.keys()) == 0:
         return None
     return item
Beispiel #11
0
 def __init__(self):
     super(ProvidersServiceManager, self).__init__()
     ThreadPool.__init__(self)
     MessageServer.__init__(self, 'SERVICE_MANAGER_INDEX',
                            'SERVICE_MANAGER')
     self.poll_database()
     self._registered_services = {}
     self._message_types = {
         'shutdown': self._shutdown_package_services,
         'startup': self._start_remote_services,
     }
Beispiel #12
0
 def get_season_art(self, tvdb_id, season):
     art_types = self._get_show_art_types(tvdb_id)
     if not art_types:
         return None
     art_types = [i for i in art_types if i.startswith("season")]
     thread_pool = ThreadPool()
     for language in self.languages:
         for art_type in art_types:
             thread_pool.put(self._get_season_art, tvdb_id, art_type,
                             season, language)
     item = thread_pool.wait_completion()
     if not item or len(item) == 0:
         return None
     return item
Beispiel #13
0
    def get_show_art(self, tvdb_id):
        art_types = self._get_show_art_types(tvdb_id)
        if art_types:
            art_types = [i for i in art_types if not i.startswith("season")]
        else:
            art_types = []

        thread_pool = ThreadPool()
        for language in self.languages:
            for art_type in art_types:
                thread_pool.put(self._get_show_art, tvdb_id, art_type, language)
        item = thread_pool.wait_completion()
        if not item:
            return None
        return item
 def __init__(self, xml_file, location):
     super(DownloadManager, self).__init__(xml_file, location)
     self.manager = Manager()
     self.list_control = None
     self.thread_pool = ThreadPool()
     self.exit_requested = False
     self.downloads = {}
 def __init__(self, uncached_source, silent=False):
     super(_BaseCacheAssist, self).__init__()
     self.debrid_slug = None
     self.debrid_readable = None
     self.transfer_id = None
     self.transfer_info = None
     self.uncached_source = uncached_source
     self.current_percent = -1
     self.previous_percent = -1
     self.status = "starting"
     self.last_progression_timestamp = time.time()
     self.download_speed = 0
     self.seeds = 0
     self.silent = silent
     self.cancelled = False
     self.thread_pool = ThreadPool()
     self.progress_message = "Status: {} | Progress: {} | Speed: {} | Peers: {}"
Beispiel #16
0
    def _update_objects(self, db_list_to_update, media_type):

        threadpool = ThreadPool()
        for i in db_list_to_update:
            threadpool.put(self.metadataHandler.update, i)
        updated_items = threadpool.wait_completion()

        if updated_items is None:
            return

        updated_items = [i for i in updated_items if i is not None]

        self.save_to_meta_table(
            (i for i in updated_items if "trakt_object" in i),
            media_type,
            "trakt",
            "trakt_id",
        )
        self.save_to_meta_table(
            (i for i in updated_items if "tmdb_object" in i),
            media_type,
            "tmdb",
            "tmdb_id",
        )
        self.save_to_meta_table(
            (i for i in updated_items if "tvdb_object" in i),
            media_type,
            "tvdb",
            "tvdb_id",
        )
        self.save_to_meta_table(
            (i for i in updated_items if "fanart_object" in i),
            media_type,
            "fanart",
            "tvdb_id",
        )
        self.save_to_meta_table(
            (i for i in updated_items if "omdb_object" in i),
            media_type,
            "omdb",
            "imdb_id",
        )

        return updated_items
Beispiel #17
0
    def get_show_art(self, tvdb_id):
        item = {}

        art_types = self._get_show_art_types(tvdb_id)
        if art_types:
            art_types = [i for i in art_types if not i.startswith("season")]
        else:
            art_types = []

        thread_pool = ThreadPool()
        for language in self.languages:
            for art_type in art_types:
                thread_pool.put(self._get_show_art, tvdb_id, art_type,
                                language)
            item = tools.smart_merge_dictionary(item,
                                                thread_pool.wait_completion())
        if not item or len(item.keys()) == 0:
            return None
        return item
Beispiel #18
0
 def __init__(self):
     self.thread_pool = ThreadPool()
     self.providers = {}
     if g.all_debrid_enabled():
         self.providers.update({'all_debrid': ('All Debrid', AllDebridWalker)})
     if g.premiumize_enabled():
         self.providers.update({'premiumize': ('Premiumize', PremiumizeWalker)})
     if g.real_debrid_enabled():
         self.providers.update({'real_debrid': ('Real Debrid', RealDebridWalker)})
     self.providers.update({'local_downloads': ('Local Downloads', LocalFileWalker)})
Beispiel #19
0
 def get_show_info(self, tvdb_id):
     thread_pool = ThreadPool()
     thread_pool.put(self._get_series_cast, tvdb_id)
     for language in self.languages:
         thread_pool.put(self._get_show_info, tvdb_id, language)
     item = thread_pool.wait_completion()
     if not item or len(item) == 0:
         return None
     return item
Beispiel #20
0
 def get_show_info(self, tvdb_id):
     item = {}
     thread_pool = ThreadPool()
     thread_pool.put(self._get_series_cast, tvdb_id)
     for language in self.languages:
         thread_pool.put(self._get_show_info, tvdb_id, language)
         item = tools.smart_merge_dictionary(item,
                                             thread_pool.wait_completion())
     if not item or len(item.keys()) == 0:
         return None
     return item
 def check_hash(self, hash_list):
     if isinstance(hash_list, list):
         hash_list = [hash_list[x : x + 100] for x in range(0, len(hash_list), 100)]
         thread = ThreadPool()
         for section in hash_list:
             thread.put(self._check_hash_thread, sorted(section))
         thread.wait_completion()
         return self.cache_check_results
     else:
         hash_string = "/" + hash_list
         return self.get_url("torrents/instantAvailability" + hash_string)
Beispiel #22
0
    def __init__(self, item_information):
        self.hash_regex = re.compile(r'btih:(.*?)(?:&|$)')
        self.canceled = False
        self.torrent_cache = TorrentCache()
        self.torrent_threads = ThreadPool()
        self.hoster_threads = ThreadPool()
        self.adaptive_threads = ThreadPool()
        self.item_information = item_information
        self.media_type = self.item_information['info']['mediatype']
        self.torrent_providers = []
        self.hoster_providers = []
        self.adaptive_providers = []
        self.running_providers = []
        self.duration = 0
        self.language = 'en'
        self.sources_information = {
            "adaptiveSources": [],
            "torrentCacheSources": {},
            "hosterSources": {},
            "cloudFiles": [],
            "remainingProviders": [],
            "allTorrents": {},
            "torrents_quality": [0, 0, 0, 0],
            "hosters_quality": [0, 0, 0, 0],
            "cached_hashes": []
        }

        self.hoster_domains = {}
        self.progress = 1
        self.runtime = 0
        self.host_domains = []
        self.host_names = []
        self.timeout = g.get_int_setting('general.timeout')
        self.window = SourceWindowAdapter(self.item_information, self)
        self.session = requests.Session()

        self.silent = g.get_bool_runtime_setting('tempSilent')
class _DebridDownloadBase(object):
    def __init__(self, source):
        self.thread_pool = ThreadPool()
        self.source = source
        self.average_speed = "0 B/s"
        self.progress = 0
        self.downloaders = []
        self.debrid_module = None
        self._confirm_source_downloadable()

    def _confirm_source_downloadable(self):
        if not self.source.get("type") in VALID_SOURCE_TYPES:
            raise InvalidSourceType(self.source.get("type"))

    @abc.abstractmethod
    def _fetch_available_files(self):
        """
        Fetches available files in source and returns a list of (path, filename) tuples
        :return: List
        """

    @abc.abstractmethod
    def _resolve_file_url(self, file):
        """
        :param file: Dict
        :return: String
        """

    def _get_selected_files(self):
        """
        :return:
        """
        if self.source.get("type") in ["hoster", "cloud"]:
            return self.source
        available_files = self._fetch_available_files()
        available_files = [
            (i, i["path"].split("/")[-1])
            for i in available_files
            if source_utils.is_file_ext_valid(i["path"])
        ]
        available_files = sorted(available_files, key=lambda k: k[1])
        file_titles = [i[1] for i in available_files]

        selection = xbmcgui.Dialog().multiselect(
            g.get_language_string(30511), file_titles
        )
        selection = [available_files[i] for i in selection]
        return selection

    def _initiate_download(self, url, output_filename=None):
        """
        Creates Downloader Class and adds it to current download thread pool
        :param url: String
        :param output_filename: String
        :return: None
        """
        downloader = _DownloadTask(output_filename)
        self.downloaders.append(downloader)
        self.thread_pool.put(downloader.download, url, True)

    def _resolver_setup(self, selected_files):
        """

        :param selected_files:
        :return:
        """
        return selected_files

    @abc.abstractmethod
    def _get_single_item_info(self, source):
        """

        :param source:
        :return:
        """

    def _handle_potential_multi(self):
        """
        Requests selection of files from user and begins download tasks
        :return:  None
        """
        selected_files = self._get_selected_files()
        selected_files = self._resolver_setup(selected_files)
        for i in selected_files:
            self._initiate_download(self._resolve_file_url(i), i[1])

    def download(self):
        """
        Begins required download type for provided source
        :return:
        """
        if self.source["type"] not in ["hoster", "cloud"]:
            self._handle_potential_multi()
        else:
            source_info = self._get_single_item_info(self.source)
            self._initiate_download(
                self._resolve_file_url([source_info]), self.source["release_title"]
            )
Beispiel #24
0
class TraktSyncDatabase(Database):
    def __init__(self, ):
        super(TraktSyncDatabase, self).__init__(g.TRAKT_SYNC_DB_PATH, schema,
                                                migrate_db_lock)
        self.metadataHandler = MetadataHandler()
        self.trakt_api = TraktAPI()

        self.activities = {}
        self.item_list = []
        self.base_date = "1970-01-01T00:00:00"
        self.task_queue = ThreadPool()
        self.mill_task_queue = ThreadPool()
        self.parent_task_queue = ThreadPool()
        self.refresh_activities()

        # If you make changes to the required meta in any indexer that is cached in this database
        # You will need to update the below version number to match the new addon version
        # This will ensure that the metadata required for operations is available

        self.last_meta_update = "2.0.0"
        if self.activities is None:
            self.clear_all_meta(False)
            self.set_base_activities()

        if self.activities is not None:
            self._check_database_version()

        self.notification_prefix = "{}: Trakt".format(g.ADDON_NAME)
        self.hide_unaired = g.get_bool_setting("general.hideUnAired")
        self.hide_specials = g.get_bool_setting("general.hideSpecials")
        self.hide_watched = g.get_bool_setting("general.hideWatched")
        self.date_delay = g.get_bool_setting("general.datedelay")
        self.page_limit = g.get_int_setting("item.limit")

    def clear_specific_item_meta(self, trakt_id, media_type):
        if media_type == "tvshow":
            media_type = "shows"
        elif media_type == "show":
            media_type = "shows"
        elif media_type == "movie":
            media_type = "movies"
        elif media_type == "episode":
            media_type = "episodes"
        elif media_type == "season":
            media_type = "seasons"

        if media_type not in ["shows", "movies", "seasons", "episodes"]:
            raise InvalidMediaTypeException(media_type)

        self.execute_sql("DELETE from {}_meta where id=?".format(media_type),
                         (trakt_id, ))
        self.execute_sql(
            "UPDATE {} SET info=null, art=null, cast=null, meta_hash=null where trakt_id=?"
            "".format(media_type),
            (trakt_id, ),
        )

    @staticmethod
    def _get_datetime_now():
        return datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S.000Z")

    def refresh_activities(self):
        self.activities = self.execute_sql(
            "SELECT * FROM activities WHERE sync_id=1").fetchone()

    def set_base_activities(self):
        self.execute_sql(
            "INSERT OR REPLACE INTO activities(sync_id, seren_version, trakt_username) VALUES(1, ?, ?)",
            (self.last_meta_update, g.get_setting("trakt.username")),
        )
        self.activities = self.execute_sql(
            "SELECT * FROM activities WHERE sync_id=1").fetchone()

    def _check_database_version(self):
        # If we are updating from a database prior to database versioning, we must clear the meta data
        # Migrate from older versions before trakt username tracking
        if tools.compare_version_numbers(self.activities["seren_version"],
                                         self.last_meta_update):
            g.log("Rebuilding Trakt Sync Database Version")
            xbmcgui.Dialog().ok(g.ADDON_NAME, g.get_language_string(30363))
            try:
                self.re_build_database(True)
            except:
                self.rebuild_database()

    def flush_activities(self, clear_meta=False):
        if clear_meta:
            self.clear_all_meta()
        self.execute_sql("DELETE FROM activities")
        self.set_base_activities()

    def clear_user_information(self, notify=True):
        username = self.activities["trakt_username"]
        self.execute_sql(
            [
                "UPDATE episodes SET watched=?",
                "UPDATE episodes SET collected=?",
                "UPDATE movies SET watched=?",
                "UPDATE movies SET collected=?",
                "UPDATE shows SET unwatched_episodes=?",
                "UPDATE shows SET watched_episodes=?",
                "UPDATE seasons SET unwatched_episodes=?",
                "UPDATE seasons SET watched_episodes=?",
            ],
            (0, ),
        )
        self.execute_sql(
            [
                "UPDATE episodes SET last_watched_at=?",
                "UPDATE movies SET last_watched_at=?",
            ],
            (None, ),
        )
        self.execute_sql([
            "DELETE from bookmarks WHERE 1=1",
            "DELETE from hidden WHERE 1=1",
        ])
        self.execute_sql("DELETE from lists WHERE username=?", (username, ))
        self.set_trakt_user("")
        self.set_base_activities()
        if notify:
            g.notification(self.notification_prefix,
                           g.get_language_string(30297),
                           time=5000)

    def set_trakt_user(self, trakt_username):
        g.log("Setting Trakt Username: {}".format(trakt_username))
        self.execute_sql("UPDATE activities SET trakt_username=?",
                         (trakt_username, ))

    def clear_all_meta(self, notify=True):
        if notify:
            confirm = xbmcgui.Dialog().yesno(g.ADDON_NAME,
                                             g.get_language_string(30201))
            if confirm == 0:
                return

        self.execute_sql(
            [
                "UPDATE shows SET info=?, cast=?, art=?, meta_hash=?",
                "UPDATE seasons SET info=?, cast=?, art=?, meta_hash=?",
                "UPDATE episodes SET info=?, cast=?, art=?, meta_hash=?",
                "UPDATE movies SET info=?, cast=?, art=?, meta_hash=?",
            ],
            (None, None, None, None),
        )

        self.execute_sql([
            "DELETE FROM movies_meta",
            "DELETE FROM shows_meta",
            "DELETE FROM seasons_meta",
            "DELETE FROM episodes_meta",
        ])
        if notify:
            g.notification(self.notification_prefix,
                           g.get_language_string(30298),
                           time=5000)

    def re_build_database(self, silent=False):
        if not silent:
            confirm = xbmcgui.Dialog().yesno(g.ADDON_NAME,
                                             g.get_language_string(30201))
            if confirm == 0:
                return

        self.clear_all_meta(False)
        self.clear_user_information(False)
        self.rebuild_database()
        self.set_base_activities()
        self.refresh_activities()

        from resources.lib.database.trakt_sync import activities

        sync_errors = activities.TraktSyncDatabase().sync_activities(silent)

        if sync_errors:
            g.notification(self.notification_prefix,
                           g.get_language_string(30364),
                           time=5000)
        elif sync_errors is None:
            self.refresh_activities()
        else:
            g.notification(self.notification_prefix,
                           g.get_language_string(30299),
                           time=5000)

    def filter_items_that_needs_updating(self, requested, media_type):
        if requested is None or len(requested) == 0:
            return []
        query = """WITH requested(trakt_id, meta_hash) AS (VALUES {}) select r.trakt_id as trakt_id from requested as 
        r left join {} as db on r.trakt_id == db.trakt_id where db.trakt_id IS NULL or (db.info is null or db.art is 
        null or db.cast is null or r.meta_hash != db.meta_hash)""".format(
            ",".join([
                "({}, '{}')".format(i.get("trakt_id"),
                                    self.metadataHandler.meta_hash)
                for i in requested
            ]),
            media_type,
        )
        result = set(r["trakt_id"] for r in self.execute_sql(query).fetchall())
        return [r for r in requested if r.get("trakt_id") in result]

    def save_to_meta_table(self, items, meta_type, provider_type, id_column):
        if items is None:
            return
        sql_statement = "replace into {}_meta (id ,type, meta_hash, value) VALUES (?, ?, ?, ?)".format(
            meta_type)
        obj = None
        meta_hash = None
        if provider_type == "trakt":
            obj = MetadataHandler.trakt_object
            meta_hash = self.trakt_api.meta_hash
        elif provider_type == "tmdb":
            obj = MetadataHandler.tmdb_object
            meta_hash = self.metadataHandler.tmdb_api.meta_hash
        elif provider_type == "tvdb":
            obj = MetadataHandler.tvdb_object
            meta_hash = self.metadataHandler.tvdb_api.meta_hash
        elif provider_type == "fanart":
            obj = MetadataHandler.fanart_object
            meta_hash = self.metadataHandler.fanarttv_api.meta_hash
        elif provider_type == "omdb":
            obj = MetadataHandler.omdb_object
            meta_hash = self.metadataHandler.omdb_api.meta_hash

        if obj is None or meta_hash is None:
            raise UnsupportedProviderType(provider_type)

        self.execute_sql(
            sql_statement,
            ((i.get(id_column), provider_type, meta_hash,
              self.clean_meta(obj(i)))
             for i in items if i and obj(i) and i.get(id_column)
             and MetadataHandler.full_meta_up_to_par(meta_type, obj(i))),
        )

        for i in items:
            if i and obj(i):
                if obj(i).get("seasons"):
                    self.save_to_meta_table(i.get("seasons"), "season",
                                            provider_type, id_column)
                if obj(i).get("episodes"):
                    self.save_to_meta_table(i.get("episodes"), "episode",
                                            provider_type, id_column)

    @staticmethod
    def clean_meta(item):
        if not item:
            return None

        result = {
            "info": {
                key: value
                for key, value in item.get("info", {}).items()
                if key != "seasons" and key != "episodes"
            },
            "art": item.get("art"),
            "cast": item.get("cast"),
        }
        if not result.get("info") and not result.get("art") and not result.get(
                "cast"):
            g.log(
                "Bad Item meta discovered when cleaning - item: {}".format(
                    item),
                "error",
            )
            return None
        else:
            return result

    def insert_trakt_movies(self, movies):
        g.log("Inserting Movies into sync database: {}".format(
            [i.get("trakt_id") for i in movies]))
        get = MetadataHandler.get_trakt_info
        self.execute_sql(
            self.upsert_movie_query,
            ((
                i.get("trakt_id"),
                None,
                None,
                None,
                get(i, "collected"),
                get(i, "watched"),
                tools.validate_date(get(i, "aired")),
                tools.validate_date(get(i, "dateadded")),
                get(i, "tmdb_id"),
                get(i, "imdb_id"),
                None,
                self._create_args(i),
                tools.validate_date(get(i, "collected_at")),
                tools.validate_date(get(i, "last_watched_at")),
                i.get("trakt_id"),
            ) for i in movies),
        )
        self.save_to_meta_table(movies, "movies", "trakt", "trakt_id")

    def insert_trakt_shows(self, shows):
        g.log("Inserting Shows into sync database: {}".format(
            [i.get("trakt_id") for i in shows]))
        get = MetadataHandler.get_trakt_info
        self.execute_sql(
            self.upsert_show_query,
            ((
                i.get("trakt_id"),
                None,
                None,
                None,
                tools.validate_date(get(i, "aired")),
                tools.validate_date(get(i, "dateadded")),
                get(i, "tmdb_id"),
                get(i, "tvdb_id"),
                get(i, "imdb_id"),
                None,
                get(i, "season_count"),
                get(i, "episode_count"),
                self._create_args(i),
                get(i, "is_airing"),
                i.get("trakt_id"),
            ) for i in shows),
        )
        self.save_to_meta_table(shows, "shows", "trakt", "trakt_id")

    def insert_trakt_episodes(self, episodes):
        g.log("Inserting episodes into sync database: {}".format(
            [i.get("trakt_id") for i in episodes]))
        get = MetadataHandler.get_trakt_info
        self.execute_sql(
            self.upsert_episode_query,
            ((
                i.get("trakt_id"),
                i.get("trakt_show_id"),
                i.get("trakt_season_id"),
                get(i, "playcount"),
                get(i, "collected"),
                tools.validate_date(get(i, "aired")),
                tools.validate_date(get(i, "dateadded")),
                get(i, "season"),
                get(i, "episode"),
                get(i, "tmdb_id"),
                get(i, "tvdb_id"),
                get(i, "imdb_id"),
                None,
                None,
                None,
                self._create_args(i),
                tools.validate_date(get(i, "last_watched_at")),
                tools.validate_date(get(i, "collected_at")),
                None,
                i.get("trakt_id"),
            ) for i in episodes),
        )
        self.save_to_meta_table(episodes, "episodes", "trakt", "trakt_id")

    def insert_trakt_seasons(self, seasons):
        g.log("Inserting seasons into sync database: {}".format(
            [i.get("trakt_id") for i in seasons]))
        get = MetadataHandler.get_trakt_info
        self.execute_sql(
            self.upsert_season_query,
            ((
                i.get("trakt_show_id"),
                i.get("trakt_id"),
                None,
                None,
                None,
                tools.validate_date(get(i, "aired")),
                tools.validate_date(get(i, "dateadded")),
                get(i, "tmdb_id"),
                get(i, "tvdb_id"),
                None,
                None,
                get(i, "season"),
                self._create_args(i),
                i.get("trakt_id"),
            ) for i in seasons),
        )
        self.save_to_meta_table(seasons, "seasons", "trakt", "trakt_id")

    def _mill_if_needed(self,
                        list_to_update,
                        queue_wrapper=None,
                        mill_episodes=True):
        if queue_wrapper is None:
            queue_wrapper = self._queue_mill_tasks

        query = """select s.trakt_id, CASE WHEN (agg.episode_count is NULL or 
        agg.episode_count != s.episode_count) or (agg.meta_count=0 or agg.meta_count!=s.episode_count) THEN 'True' 
        ELSE 'False' END as needs_update from shows as s left join(select s.trakt_id, count(e.trakt_id) as 
        episode_count, count(em.id) as meta_count from shows as s inner join episodes as e on s.trakt_id = 
        e.trakt_show_id left join episodes_meta as em on em.id = e.trakt_id and em.type = 'trakt' and em.meta_hash = 
        '{}' where e.season != 0 and Datetime(e.air_date) < Datetime('now') GROUP BY s.trakt_id) as agg on s.trakt_id == 
        agg.trakt_id WHERE s.trakt_id in ({})""".format(
            self.trakt_api.meta_hash,
            ",".join(
                str(i.get("trakt_show_id", i.get("trakt_id")))
                for i in list_to_update),
        )
        needs_update = self.execute_sql(query).fetchall()
        if needs_update is None or all(x["needs_update"] == "False"
                                       for x in needs_update):
            return

        g.log(
            "{} items require season milling".format(
                len([i for i in needs_update if i["needs_update"] == "True"])),
            "debug")
        self.mill_seasons([
            i for i in list_to_update
            if any(x["needs_update"] == "True" and x.get("trakt_id") == i.get(
                "trakt_show_id", i.get("trakt_id")) for x in needs_update)
        ], queue_wrapper, mill_episodes)

    def mill_seasons(self,
                     trakt_collection,
                     queue_wrapper,
                     mill_episodes=False):
        with SyncLock(
                "mill_seasons_episodes_{}".format(mill_episodes),
            {
                show.get("trakt_show_id", show.get("trakt_id"))
                for show in trakt_collection
            },
        ) as sync_lock:
            get = MetadataHandler.get_trakt_info
            queue_wrapper(self._pull_show_seasons,
                          [(i, mill_episodes) for i in sync_lock.running_ids])
            results = self.mill_task_queue.wait_completion()

            seasons = []
            episodes = []
            trakt_info = MetadataHandler.trakt_info

            for show in trakt_collection:
                extended_seasons = get(show, "seasons", [])
                for season in results.get(show.get("trakt_id"), []):
                    if self.hide_specials and get(season, "season") == 0:
                        continue

                    trakt_info(season).update(
                        {"trakt_show_id": get(show, "trakt_id")})
                    trakt_info(season).update(
                        {"tmdb_show_id": get(show, "tmdb_id")})
                    trakt_info(season).update(
                        {"tvdb_show_id": get(show, "tvdb_id")})

                    season.update({"trakt_show_id": show.get("trakt_id")})
                    season.update({"tmdb_show_id": show.get("tmdb_id")})
                    season.update({"tvdb_show_id": show.get("tvdb_id")})

                    trakt_info(season).update(
                        {"dateadded": get(show, "dateadded")})
                    trakt_info(season).update(
                        {"tvshowtitle": get(show, "title")})

                    if not get(season, "season") == 0:
                        show.update(
                            {"season_count": show.get("season_count", 0) + 1})
                        show.update({
                            'episode_count':
                            show.get("episode_count", 0) +
                            get(season, "aired_episodes", 0)
                        })
                    for episode in get(season, "episodes", []):
                        trakt_info(episode).update(
                            {"trakt_show_id": get(show, "trakt_id")})
                        trakt_info(episode).update(
                            {"tmdb_show_id": get(show, "tmdb_id")})
                        trakt_info(episode).update(
                            {"tvdb_show_id": get(show, "tvdb_id")})
                        trakt_info(episode).update(
                            {"trakt_season_id": get(season, "trakt_id")})

                        episode.update({"trakt_show_id": show.get("trakt_id")})
                        episode.update({"tmdb_show_id": show.get("tmdb_id")})
                        episode.update({"tvdb_show_id": show.get("tvdb_id")})
                        episode.update(
                            {"trakt_season_id": season.get("trakt_id")})

                        trakt_info(episode).update(
                            {"tvshowtitle": get(show, "title")})
                        for extended_season in (
                                x for x in extended_seasons
                                if get(x, "season") == get(season, "season")):
                            [
                                tools.smart_merge_dictionary(
                                    episode, extended_episode)
                                for extended_episode in
                                (x
                                 for x in get(extended_season, "episodes", [])
                                 if get(x, "episode") == get(
                                     episode, "episode"))
                            ]
                            tools.smart_merge_dictionary(
                                season, extended_season)
                        episodes.append(episode)
                    seasons.append(season)

            self.insert_trakt_seasons(
                self.filter_trakt_items_that_needs_updating(
                    seasons, "seasons"))
            self.insert_trakt_episodes(
                self.filter_trakt_items_that_needs_updating(
                    episodes, "episodes"))

            self.execute_sql(
                "UPDATE shows SET episode_count=?, season_count=? WHERE trakt_id=? ",
                [(i.get("episode_count", 0), i.get("season_count",
                                                   0), i["trakt_id"])
                 for i in trakt_collection])

            self.update_shows_statistics({"trakt_id": i}
                                         for i in sync_lock.running_ids)

            if mill_episodes:
                self.update_season_statistics({"trakt_id": i}
                                              for i in sync_lock.running_ids)

    def filter_trakt_items_that_needs_updating(self, requested, media_type):
        if len(requested) == 0:
            return requested

        get = MetadataHandler.get_trakt_info
        query = """WITH requested(trakt_id, meta_hash, updated_at) AS (VALUES {}) 
        select r.trakt_id as trakt_id from requested as r left join {} as db on r.trakt_id == db.trakt_id 
        left join {}_meta as m on db.trakt_id == id and type=\"trakt\" where db.trakt_id IS NULL or m.value IS NULL or 
        m.meta_hash != r.meta_hash or (Datetime(db.last_updated) < Datetime(r.updated_at))""".format(
            ",".join("({}, '{}', '{}')".format(i.get(
                "trakt_id"), self.trakt_api.meta_hash, get(i, "dateadded"))
                     for i in requested if i.get("trakt_id")),
            media_type,
            media_type,
        )

        result = set(r["trakt_id"] for r in self.execute_sql(query).fetchall())
        return [
            r for r in requested
            if r.get("trakt_id") and r.get("trakt_id") in result
        ]

    def _pull_show_seasons(self, show_id, mill_episodes=False):
        return {
            show_id:
            self.trakt_api.get_json(
                "/shows/{}/seasons".format(show_id),
                extended="full,episodes" if mill_episodes else "full",
                translations=g.get_language_code(),
            )
        }

    @staticmethod
    def _create_args(item):
        get = MetadataHandler.get_trakt_info
        info = MetadataHandler.info
        args = {
            "trakt_id": get(item, "trakt_id",
                            info(item).get("trakt_id")),
            "mediatype": get(item, "mediatype",
                             info(item).get("mediatype")),
        }
        if args["trakt_id"] == None:
            import inspect
            g.log(inspect.stack())
            g.log(item)
        if args["mediatype"] == "season":
            args["trakt_show_id"] = get(item, "trakt_show_id",
                                        info(item).get("trakt_show_id"))
        if args["mediatype"] == "episode":
            args["trakt_show_id"] = get(item, "trakt_show_id",
                                        info(item).get("trakt_show_id"))
            args["trakt_season_id"] = get(item, "trakt_season_id",
                                          info(item).get("trakt_season_id"))
        return tools.quote(json.dumps(args, sort_keys=True))

    def _queue_mill_tasks(self, func, args):
        for arg in args:
            g.log(
                "Requesting season info for show {} - mill_episodes={}".format(
                    arg[0], arg[1]), "debug")
            self.mill_task_queue.put(func, *arg)

    def requires_update(self, new_date, old_date):
        if tools.parse_datetime(new_date, tools.DATE_FORMAT,
                                False) > tools.parse_datetime(
                                    old_date, "%Y-%m-%dT%H:%M:%S", False):
            return True
        else:
            return False

    @staticmethod
    def wrap_in_trakt_object(items):
        for item in items:
            if item.get("show") is not None:
                info = item["show"].pop("info")
                item["show"].update({"trakt_id": info.get("trakt_id")})
                item["show"].update({"trakt_object": {"info": info}})
            if item.get("episode") is not None:
                info = item["episode"].pop("info")
                item["episode"].update({"trakt_id": info.get("trakt_id")})
                item["episode"].update({"tvdb_id": info.get("tvdb_id")})
                item["episode"].update({"tmdb_id": info.get("tmdb_id")})
                item["episode"].update({"trakt_object": {"info": info}})
        return items

    def _get_single_meta(self, trakt_url, trakt_id, media_type):
        return self._update_single_meta(
            trakt_url,
            self.execute_sql(
                """select id as trakt_id, value as trakt_object from 
        {}_meta where id = ? and type = 'trakt' """.format(media_type),
                (int(trakt_id), ),
            ).fetchone(),
            media_type,
        )

    def _update_single_meta(self, trakt_url, item, media_type):
        trakt_object = MetadataHandler.trakt_object
        if item is None:
            item = {}
        if trakt_object(item) is None or trakt_object(item) == {}:
            new_object = self.trakt_api.get_json(trakt_url, extended="full")
            self.save_to_meta_table([new_object], media_type, "trakt",
                                    "trakt_id")
            item.update(new_object)
        return item

    @staticmethod
    def update_missing_trakt_objects(db_list_to_update, list_to_update):
        for item in db_list_to_update:
            if item.get("trakt_object") is None:
                try:
                    item.update(
                        next(i for i in list_to_update
                             if int(i.get("trakt_id") or 0) == int(
                                 item.get("trakt_id") or 0)))
                except StopIteration:
                    g.log(
                        "Failed to find item in list to update, original item: \n {}"
                        .format(item))

    def _extract_trakt_page(self, url, media_type, **params):
        result = []

        def _handle_page(page):
            if not page or len(page) == 0:
                return []
            to_insert = self.filter_trakt_items_that_needs_updating(
                page, media_type)
            if media_type == "movies":
                self.insert_trakt_movies(to_insert)
            elif media_type == "shows":
                self.insert_trakt_shows(to_insert)
            query = (
                "WITH requested(trakt_id) AS (VALUES {}) select r.trakt_id as trakt_id from requested as r inner "
                "join {} as db on r.trakt_id == db.trakt_id left join {}_meta as m on db.trakt_id == "
                "id and type = 'trakt' where 1=1".format(
                    ",".join("({})".format(i.get("trakt_id")) for i in page),
                    media_type,
                    media_type,
                ))
            if self.hide_unaired:
                query += " AND Datetime(air_date) < Datetime('now')"
            if self.hide_watched:
                if media_type == "movies":
                    query += " AND watched = 0"
                if media_type == "shows":
                    query += " AND watched_episodes < episode_count"
            result.extend(self.execute_sql(query).fetchall())

        no_paging = params.get("no_paging", False)
        pull_all = params.pop("pull_all", False)
        page_number = params.pop("page", 1)

        if pull_all:
            _handle_page(self.trakt_api.get_json_cached(url, **params))
            if len(result) >= (self.page_limit *
                               page_number) and not no_paging:
                return result[self.page_limit *
                              (page_number - 1):self.page_limit * page_number]
        else:
            params["limit"] = params.pop("page", self.page_limit)
            for page in self.trakt_api.get_all_pages_json(url, **params):
                _handle_page(page)
                if len(result) >= (self.page_limit *
                                   page_number) and not no_paging:
                    return result[self.page_limit *
                                  (page_number - 1):self.page_limit *
                                  page_number]

        if no_paging:
            return result
        return result[self.page_limit * (page_number - 1):]

    def update_shows_statistics(self, trakt_list):
        to_update = ",".join({str(i.get("trakt_id")) for i in trakt_list})
        self.execute_sql(
            """INSERT or REPLACE into shows (trakt_id, info, art, cast, air_date, last_updated, tmdb_id, tvdb_id,             
            imdb_id, meta_hash, season_count, episode_count, watched_episodes, unwatched_episodes, args, is_airing) 
            SELECT old.trakt_id, old.info, old.art, old.cast, old.air_date, old.last_updated, old.tmdb_id, 
            old.tvdb_id, old.imdb_id, old.meta_hash, old.season_count, old.episode_count, COALESCE( 
            new.watched_episodes, old.watched_episodes), COALESCE(new.unwatched_episodes, old.unwatched_episodes), 
            old.args, old.is_airing FROM (select sh.trakt_id, sh.episode_count - sum(CASE WHEN e.watched > 0 AND 
            e.season != 0 AND Datetime(e.air_date) < Datetime('now') THEN 1 ELSE 0 END) as unwatched_episodes, 
            sum(CASE WHEN e.watched > 0 AND e.season != 0 AND Datetime(e.air_date) < Datetime('now') THEN 1 ELSE 0 
            END) as watched_episodes from shows as sh left join episodes as e on e.trakt_show_id = sh.trakt_id group 
            by sh.trakt_id) AS new LEFT JOIN (SELECT * FROM shows) AS old on old.trakt_id = new.trakt_id where 
            old.trakt_id in ({})""".format(to_update))

    def update_season_statistics(self, trakt_list):
        to_update = ",".join({str(i.get("trakt_id")) for i in trakt_list})

        self.execute_sql(
            """INSERT or REPLACE into seasons ( trakt_show_id, trakt_id, info, art, cast, air_date, last_updated,
             tmdb_id, tvdb_id, meta_hash, episode_count, watched_episodes, unwatched_episodes, is_airing, season, args 
             ) SELECT old.trakt_show_id, old.trakt_id, old.info, old.art, old.cast, old.air_date, old.last_updated, 
             old.tmdb_id, old.tvdb_id, old.meta_hash, COALESCE(new.episode_count, old.episode_count), 
             COALESCE(new.watched_episodes, old.watched_episodes), COALESCE(new.unwatched_episodes, 
             old.unwatched_episodes), COALESCE(new.is_airing, old.is_airing), old.season, old.args FROM ( SELECT 
             se.trakt_id,  sum( CASE WHEN datetime(e.air_date) < datetime('now') THEN 1 ELSE 0 END) AS episode_count, 
             sum( CASE WHEN e.watched == 0 AND datetime(e.air_date) < datetime('now') THEN 1 ELSE 0 END) AS 
             unwatched_episodes, sum( CASE WHEN e.watched > 0 AND datetime(e.air_date) < datetime('now') THEN 1 ELSE 0 
             END) AS watched_episodes, CASE WHEN max(e.air_date) > datetime('now') THEN 1 ELSE 0 END AS is_airing FROM 
             seasons AS se INNER JOIN episodes AS e ON e.trakt_season_id = se.trakt_id WHERE se.season != 0 GROUP BY 
             se.trakt_id) AS new LEFT JOIN ( SELECT * FROM seasons) AS old ON new.trakt_id = old.trakt_id where 
             old.trakt_id in ({})""".format(to_update))

    @property
    def upsert_movie_query(self):
        return """INSERT or REPLACE into movies ( trakt_id, info, art, cast, collected, watched, air_date, 
        last_updated, tmdb_id, imdb_id, meta_hash, args, collected_at, last_watched_at ) SELECT COALESCE(
        new.trakt_id, old.trakt_id), COALESCE(new.info, old.info), COALESCE(new.art, old.art), COALESCE(new.cast, 
        old.cast), COALESCE(new.collected, old.collected), COALESCE(new.watched, old.watched), COALESCE(new.air_date, 
        old.air_date), COALESCE(new.last_updated, old.last_updated), COALESCE(new.tmdb_id, old.tmdb_id), 
        COALESCE(new.imdb_id, old.imdb_id), COALESCE(new.meta_hash, old.meta_hash), COALESCE(new.args, old.args), 
        COALESCE(new.collected_at, old.collected_at), COALESCE(new.last_watched_at, old.last_watched_at) FROM ( 
        SELECT ? AS trakt_id, ? AS info, ? AS art, ? AS cast, ? AS collected, ? as watched, ? AS air_date, 
        ? AS last_updated, ? AS tmdb_id, ? AS imdb_id, ? AS meta_hash, ? AS args, ? AS collected_at, 
        ? AS last_watched_at) AS new LEFT JOIN (SELECT * FROM movies WHERE trakt_id = ? limit 1) AS old """

    @property
    def upsert_show_query(self):
        return """INSERT or REPLACE into shows (trakt_id, info, art, cast, air_date, last_updated, tmdb_id, tvdb_id, 
        imdb_id, meta_hash, season_count, episode_count, watched_episodes, unwatched_episodes, args, is_airing) SELECT 
        COALESCE(new.trakt_id, old.trakt_id), COALESCE(new.info, old.info), COALESCE(new.art, old.art), COALESCE(
        new.cast, old.cast), COALESCE(new.air_date, old.air_date), COALESCE(new.last_updated, old.last_updated), 
        COALESCE(new.tmdb_id, old.tmdb_id), COALESCE(new.tvdb_id, old.tvdb_id), COALESCE(new.imdb_id, old.imdb_id), 
        COALESCE(new.meta_hash, old.meta_hash), COALESCE(new.season_count, old.season_count), 
        COALESCE(new.episode_count, old.episode_count), COALESCE(old.watched_episodes, 0), 
        COALESCE(old.unwatched_episodes, 0), COALESCE(new.args, old.args), COALESCE(new.is_airing, old.is_airing) FROM 
        (SELECT ? AS trakt_id, ? AS info, ? AS art, ? AS cast, ? AS air_date, ? AS last_updated, ? AS tmdb_id, ? AS 
        tvdb_id, ? AS imdb_id, ? AS meta_hash, ? AS season_count,? AS episode_count, ? AS args, ? as is_airing) AS 
        new LEFT JOIN (SELECT * FROM shows WHERE trakt_id = ? limit 1) AS old """

    @property
    def upsert_season_query(self):
        return """INSERT or REPLACE into seasons ( trakt_show_id, trakt_id, info, art, cast, air_date, last_updated, 
        tmdb_id, tvdb_id, meta_hash, episode_count, watched_episodes, unwatched_episodes, season, args, is_airing ) 
        SELECT COALESCE(new.trakt_show_id, old.trakt_show_id), COALESCE(new.trakt_id, old.trakt_id), COALESCE(new.info, 
        old.info), COALESCE(new.art, old.art), COALESCE(new.cast, old.cast), COALESCE(new.air_date, old.air_date), 
        COALESCE(new.last_updated, old.last_updated), COALESCE(new.tmdb_id, old.tmdb_id), COALESCE(new.tvdb_id, 
        old.tvdb_id), COALESCE(new.meta_hash, old.meta_hash), COALESCE(new.episode_count, old.episode_count), 
        old.watched_episodes, old.unwatched_episodes, COALESCE(new.season, old.season), COALESCE(new.args, 
        old.args), old.is_airing FROM ( SELECT ? AS trakt_show_id, ? AS trakt_id, ? AS info, ? AS art, ? AS cast, ? 
        AS air_date, ? AS last_updated, ? AS tmdb_id, ? AS tvdb_id, ? AS meta_hash, ? AS episode_count, ? AS season, ? 
        AS args) AS new LEFT JOIN ( SELECT * FROM seasons WHERE trakt_id = ? limit 1) AS old """

    @property
    def upsert_episode_query(self):
        return """INSERT or REPLACE into episodes (trakt_id, trakt_show_id, trakt_season_id, watched, collected, 
Beispiel #25
0
class TorrentCacheCheck:
    def __init__(self, scraper_class):
        self.premiumize_cached = []
        self.realdebrid_cached = []
        self.all_debrid_cached = []
        self.threads = ThreadPool()

        self.episode_strings = None
        self.season_strings = None
        self.scraper_class = scraper_class
        self.rd_api = real_debrid.RealDebrid()

    def store_torrent(self, torrent):
        """
        Pushes cached torrents back up to the calling class
        :param torrent: Torrent to return
        :type torrent: dict
        :return: None
        :rtype: None
        """
        try:
            sources_information = self.scraper_class.sources_information
            # Compare and combine source meta
            tor_key = torrent['hash'] + torrent['debrid_provider']
            sources_information['cached_hashes'].append(torrent['hash'])
            if tor_key in sources_information["torrentCacheSources"]:
                c_size = sources_information["torrentCacheSources"][tor_key].get('size', 0)
                n_size = torrent.get('size', 0)
                info = torrent.get('info', [])

                if c_size < n_size:
                    sources_information["torrentCacheSources"].update({tor_key: torrent})

                    sources_information["torrentCacheSources"][tor_key]['info'] \
                        .extend([i for i in info if
                                 i not in sources_information["torrentCacheSources"][tor_key].get('info', [])])
            else:
                sources_information["torrentCacheSources"].update({tor_key: torrent})
        except AttributeError:
            return

    def torrent_cache_check(self, torrent_list, info):
        """
        Run cache check threads for given torrents
        :param torrent_list: List of torrents to check
        :type torrent_list: list
        :param info: Metadata on item to check
        :type info: dict
        :return: None
        :rtype: None
        """
        if g.real_debrid_enabled() and g.get_bool_setting('rd.torrents'):
            self.threads.put(self._realdebrid_worker, copy.deepcopy(torrent_list), info)

        if g.premiumize_enabled() and g.get_bool_setting('premiumize.torrents'):
            self.threads.put(self._premiumize_worker, copy.deepcopy(torrent_list))

        if g.all_debrid_enabled() and g.get_bool_setting('alldebrid.torrents'):
            self.threads.put(self._all_debrid_worker, copy.deepcopy(torrent_list))
        self.threads.wait_completion()

    def _all_debrid_worker(self, torrent_list):

        try:
            api = all_debrid.AllDebrid()

            if len(torrent_list) == 0:
                return

            cache_check = api.check_hash([i['hash'] for i in torrent_list])

            if not cache_check:
                return

            for idx, i in enumerate(torrent_list):
                try:
                    if cache_check['magnets'][idx]['instant'] is True:
                        i['debrid_provider'] = 'all_debrid'
                        self.store_torrent(i)
                except KeyError:
                    g.log('KeyError in AllDebrid Cache check worker. '
                          'Failed to walk AllDebrid cache check response, check your auth and account status', 'error')
                    return
        except Exception:
            g.log_stacktrace()

    def _realdebrid_worker(self, torrent_list, info):

        try:
            hash_list = [i['hash'] for i in torrent_list]
            api = real_debrid.RealDebrid()
            real_debrid_cache = api.check_hash(hash_list)

            for i in torrent_list:
                try:
                    if 'rd' not in real_debrid_cache.get(i['hash'], {}):
                        continue
                    if len(real_debrid_cache[i['hash']]['rd']) >= 1:
                        if self.scraper_class.media_type == 'episode':
                            self._handle_episode_rd_worker(i, real_debrid_cache, info)
                        else:
                            self._handle_movie_rd_worker(i, real_debrid_cache)
                except KeyError:
                    pass
        except Exception:
            g.log_stacktrace()

    def _handle_movie_rd_worker(self, source, real_debrid_cache):
        for storage_variant in real_debrid_cache[source['hash']]['rd']:
            if not self.rd_api.is_streamable_storage_type(storage_variant):
                continue
            else:
                source['debrid_provider'] = 'real_debrid'
                self.store_torrent(source)

    def _handle_episode_rd_worker(self, source, real_debrid_cache, info):
        for storage_variant in real_debrid_cache[source['hash']]['rd']:

            if not self.rd_api.is_streamable_storage_type(storage_variant):
                continue

            if source_utils.get_best_episode_match('filename', storage_variant.values(), info):
                source['debrid_provider'] = 'real_debrid'
                self.store_torrent(source)
                break

    def _premiumize_worker(self, torrent_list):
        try:
            hash_list = [i['hash'] for i in torrent_list]
            if len(hash_list) == 0:
                return
            premiumize_cache = premiumize.Premiumize().hash_check(hash_list)
            premiumize_cache = premiumize_cache['response']
            count = 0
            for i in torrent_list:
                if premiumize_cache[count] is True:
                    i['debrid_provider'] = 'premiumize'
                    self.store_torrent(i)
                count += 1
        except Exception:
            g.log_stacktrace()
Beispiel #26
0
class Sources(object):
    """
    Handles fetching and processing of available sources for provided meta data
    """

    def __init__(self, item_information):
        self.hash_regex = re.compile(r'btih:(.*?)(?:&|$)')
        self.canceled = False
        self.torrent_cache = TorrentCache()
        self.torrent_threads = ThreadPool()
        self.hoster_threads = ThreadPool()
        self.adaptive_threads = ThreadPool()
        self.item_information = item_information
        self.media_type = self.item_information['info']['mediatype']
        self.torrent_providers = []
        self.hoster_providers = []
        self.adaptive_providers = []
        self.running_providers = []
        self.language = 'en'
        self.sources_information = {
            "adaptiveSources": [],
            "torrentCacheSources": {},
            "hosterSources": {},
            "cloudFiles": [],
            "remainingProviders": [],
            "allTorrents": {},
            "torrents_quality": [0, 0, 0, 0],
            "hosters_quality": [0, 0, 0, 0],
            "cached_hashes": []
        }

        self.hoster_domains = {}
        self.progress = 1
        self.runtime = 0
        self.host_domains = []
        self.host_names = []
        self.timeout = g.get_int_setting('general.timeout')
        self.window = SourceWindowAdapter(self.item_information, self)
        self.session = requests.Session()

        self.silent = g.get_bool_runtime_setting('tempSilent')

    def get_sources(self, overwrite_torrent_cache=False):
        """
        Main endpoint to initiate scraping process
        :param overwrite_cache:
        :return: Returns (uncached_sources, sorted playable sources, items metadata)
        :rtype: tuple
        """
        try:
            g.log('Starting Scraping', 'debug')
            g.log("Timeout: {}".format(self.timeout), 'debug')
            g.log("Pre-term-enabled: {}".format(g.get_setting("preem.enabled")), 'debug')
            g.log("Pre-term-limit: {}".format(g.get_setting("preem.limit")), 'debug')
            g.log("Pre-term-movie-res: {}".format(g.get_setting("preem.movieres")), 'debug')
            g.log("Pre-term-show-res: {}".format(g.get_setting("preem.tvres")), 'debug')
            g.log("Pre-term-type: {}".format(g.get_setting("preem.type")), 'debug')
            g.log("Pre-term-cloud-files: {}".format(g.get_setting("preem.cloudfiles")), 'debug')
            g.log("Pre-term-adaptive-files: {}".format(g.get_setting("preem.adaptiveSources")), 'debug')

            self._handle_pre_scrape_modifiers()
            self._get_imdb_info()

            if overwrite_torrent_cache:
                self._clear_local_torrent_results()
            else:
                self._check_local_torrent_database()

            self._update_progress()
            if self._prem_terminate():
                return self._finalise_results()

            self._init_providers()

            # Add the users cloud inspection to the threads to be run
            self.torrent_threads.put(self._user_cloud_inspection)

            # Load threads for all sources
            self._create_torrent_threads()
            self._create_hoster_threads()
            self._create_adaptive_threads()

            self.window.create()
            self.window.set_text(g.get_language_string(30054), self.progress, self.sources_information, self.runtime)
            self.window.set_property('process_started', 'true')

            # Keep alive for gui display and threading
            g.log('Entering Keep Alive', 'info')
            start_time = time.time()

            while self.progress < 100 and not g.abort_requested():
                g.log('Remaining Providers {}'.format(self.sources_information["remainingProviders"]))
                if self._prem_terminate() is True or (len(self.sources_information["remainingProviders"]) == 0
                                                      and self.runtime > 5):
                    # Give some time for scrapers to initiate
                    break

                if self.canceled:
                    monkey_requests.PRE_TERM_BLOCK = True
                    break

                self._update_progress()

                try:
                    self.window.set_text("4K: {} | 1080: {} | 720: {} | SD: {}".format(
                        g.color_string(self.sources_information["torrents_quality"][0] +
                                       self.sources_information["hosters_quality"][0]),
                        g.color_string(self.sources_information["torrents_quality"][1] +
                                       self.sources_information["hosters_quality"][1]),
                        g.color_string(self.sources_information["torrents_quality"][2] +
                                       self.sources_information["hosters_quality"][2]),
                        g.color_string(self.sources_information["torrents_quality"][3] +
                                       self.sources_information["hosters_quality"][3]),
                    ), self.progress, self.sources_information, self.runtime)

                except (KeyError, IndexError) as e:
                    g.log('Failed to set window text, {}'.format(e), 'error')

                # Update Progress
                xbmc.sleep(200)
                self.runtime = time.time() - start_time
                self.progress = int(100 - float(1 - (self.runtime / float(self.timeout))) * 100)

            g.log('Exited Keep Alive', 'info')

            return self._finalise_results()

        finally:
            self.window.close()

    def _handle_pre_scrape_modifiers(self):
        """
        Detects preScrape, disables pre-termination and sets timeout to maximum value
        :return:
        :rtype:
        """
        if g.REQUEST_PARAMS.get('action', '') == "preScrape":
            self.silent = True
            self.timeout = 180
            self._prem_terminate = lambda: False  # pylint: disable=method-hidden

    def _create_hoster_threads(self):
        if self._hosters_enabled():
            random.shuffle(self.hoster_providers)
            for i in self.hoster_providers:
                self.hoster_threads.put(self._get_hosters, self.item_information, i)

    def _create_torrent_threads(self):
        if self._torrents_enabled():
            random.shuffle(self.torrent_providers)
            for i in self.torrent_providers:
                self.torrent_threads.put(self._get_torrent, self.item_information, i)

    def _create_adaptive_threads(self):
        for i in self.adaptive_providers:
            self.adaptive_threads.put(self._get_adaptive_sources, self.item_information, i)

    def _check_local_torrent_database(self):
        if g.get_bool_setting('general.torrentCache'):
            self.window.set_text(g.get_language_string(30053), self.progress, self.sources_information, self.runtime)
            self._get_local_torrent_results()

    def _is_playable_source(self):
        source_types = ['cloudFiles', 'adaptiveSources', 'hosterSources', 'torrentCacheSources']
        all_sources = [k for i in [self.sources_information[stype] for stype in source_types] for k in i]
        return False if not len(all_sources) else True

    def _finalise_results(self):
        monkey_requests.allow_provider_requests = False
        self._send_provider_stop_event()

        uncached = [i for i in self.sources_information["allTorrents"].values()
                    if i['hash'] not in self.sources_information['cached_hashes']]

        if not self._is_playable_source():
            self._build_cache_assist()
            g.cancel_playback()
            if self.silent:
                g.notification(g.ADDON_NAME, g.get_language_string(30055))
            return uncached, [], self.item_information

        sorted_sources = SourceSorter(self.media_type).sort_sources(
            list(self.sources_information["torrentCacheSources"].values()),
            list(self.sources_information['hosterSources'].values()),
            self.sources_information['cloudFiles'])
        sorted_sources = self.sources_information['adaptiveSources'] + sorted_sources
        return uncached, sorted_sources, self.item_information

    def _get_imdb_info(self):
        if self.media_type == 'movie':
            # Confirm movie year against IMDb's information
            imdb_id = self.item_information["info"].get("imdb_id")
            if imdb_id is None:
                return
            resp = self._imdb_suggestions(imdb_id)
            year = resp.get('y', self.item_information['info']['year'])
            # title = resp['l']
            # if title != self.item_information['info']['title']:
            #     self.item_information['info'].get('aliases', []).append(self.item_information['info']['title'])
            #     self.item_information['info']['title'] = title
            #     self.item_information['info']['originaltitle'] = title
            if year is not None and year != self.item_information['info']['year']:
                self.item_information['info']['year'] = g.UNICODE(year)

        # else:
        #     resp = self._imdb_suggestions(self.item_information['info']['tvshow.imdb_id'])
        #     year = resp['y']
        #     title = resp['l']
        #     if year != self.item_information['info']['year']:
        #         self.item_information['info']['year'] = g.UNICODE(year)
        #     if self.item_information['info']['tvshowtitle'] != title:
        #         self.item_information['info'].get('aliases', []).append(
        #             self.item_information['info']['tvshowtitle'])
        #         self.item_information['info']['tvshowtitle'] = title
        #         self.item_information['info']['originaltitle'] = title

    def _imdb_suggestions(self, imdb_id):
        try:
            resp = self.session.get('https://v2.sg.media-imdb.com/suggestion/t/{}.json'.format(imdb_id))
            resp = json.loads(resp.text)['d'][0]
            return resp
        except (ValueError, KeyError):
            g.log('Failed to get IMDB suggestion')
            return {}

    def _send_provider_stop_event(self):
        for provider in self.running_providers:
            if hasattr(provider, 'cancel_operations') and callable(provider.cancel_operations):
                provider.cancel_operations()

    @staticmethod
    def _torrents_enabled():
        if (g.get_bool_setting('premiumize.torrents') and g.premiumize_enabled()) \
                or (g.get_bool_setting('rd.torrents') and g.real_debrid_enabled()) \
                or (g.get_bool_setting('alldebrid.torrents') and g.all_debrid_enabled()):
            return True
        else:
            return False

    @staticmethod
    def _hosters_enabled():
        if (g.get_bool_setting('premiumize.hosters') and g.premiumize_enabled()) \
                or (g.get_bool_setting('rd.hosters') and g.real_debrid_enabled()) \
                or (g.get_bool_setting('alldebrid.hosters') and g.all_debrid_enabled()):
            return True
        else:
            return False

    def _store_torrent_results(self, torrent_list):
        if len(torrent_list) == 0:
            return
        self.torrent_cache.add_torrent(self.item_information, torrent_list)

    def _clear_local_torrent_results(self):
        if g.get_bool_setting('general.torrentCache'):
            g.log("Clearing existing local torrent cache items", "info")
            self.torrent_cache.clear_item(self.item_information)

    def _get_local_torrent_results(self):
        relevant_torrents = self.torrent_cache.get_torrents(self.item_information)[:100]

        if len(relevant_torrents) > 0:
            for torrent in relevant_torrents:
                torrent['provider'] = '{} (Local Cache)'.format(torrent['provider'])

                self.sources_information["allTorrents"].update({torrent['hash']: torrent})

            TorrentCacheCheck(self).torrent_cache_check(relevant_torrents, self.item_information)

    @staticmethod
    def _get_best_torrent_to_cache(sources):
        quality_list = ['1080p', '720p', 'SD']
        sources = [i for i in sources if i.get('seeds', 0) != 0 and i.get("magnet")]

        for quality in quality_list:
            quality_filter = [i for i in sources if i['quality'] == quality]
            if len(quality_filter) > 0:
                packtype_filter = [i for i in quality_filter if
                                   i['package'] == 'show' or i['package'] == 'season']
                sorted_list = sorted(packtype_filter, key=lambda k: k['seeds'], reverse=True)
                if len(sorted_list) > 0:
                    return sorted_list[0]
                else:
                    package_type_list = [i for i in quality_filter if i['package'] == 'single']
                    sorted_list = sorted(package_type_list, key=lambda k: k['seeds'], reverse=True)
                    if len(sorted_list) > 0:
                        return sorted_list[0]

        return None

    def _build_cache_assist(self):
        if len(self.sources_information["allTorrents"]) == 0:
            return
        valid_packages = ['show', 'season', 'single']

        if self.media_type == 'episode' and self.item_information['is_airing']:
            valid_packages.remove('show')
            if int(self.item_information['info']['season']) >= int(
                    self.item_information['season_count']):
                valid_packages.remove('season')

        sources = [i for i in self.sources_information['allTorrents'].values() if i['package'] in valid_packages]

        if g.get_bool_setting("general.autocache") and g.get_int_setting('general.cacheAssistMode') == 0:
            sources = self._get_best_torrent_to_cache(sources)
            if sources:
                action_args = tools.quote(json.dumps(sources))
                xbmc.executebuiltin(
                    'RunPlugin({}?action=cacheAssist&action_args={})'.format(g.BASE_URL, action_args))
        elif not self.silent:
            confirmation = xbmcgui.Dialog().yesno('{} - {}'.format(g.ADDON_NAME, g.get_language_string(30325)),
                                                  g.get_language_string(30056))
            if confirmation:
                window = ManualCacheWindow(*SkinManager().confirm_skin_path('manual_caching.xml'),
                                           item_information=self.item_information, sources=sources)
                window.doModal()
                del window

    def _init_providers(self):
        sys.path.append(g.ADDON_USERDATA_PATH)
        try:
            if g.ADDON_USERDATA_PATH not in sys.path:
                sys.path.append(g.ADDON_USERDATA_PATH)
                providers = importlib.import_module("providers")
            else:
                providers = reload_module(importlib.import_module("providers"))
        except ValueError:
            g.notification(g.ADDON_NAME, g.get_language_string(30465))
            g.log('No providers installed', 'warning')
            return

        providers_dict = providers.get_relevant(self.language)

        torrent_providers = providers_dict['torrent']
        hoster_providers = providers_dict['hosters']
        adaptive_providers = providers_dict['adaptive']

        hoster_providers, torrent_providers = self._remove_duplicate_providers(torrent_providers, hoster_providers)

        self.hoster_domains = resolver.Resolver.get_hoster_list()
        self.torrent_providers = torrent_providers
        self.hoster_providers = hoster_providers
        self.adaptive_providers = adaptive_providers
        self.host_domains = OrderedDict.fromkeys([host[0].lower() for provider in self.hoster_domains['premium']
                                                  for host in self.hoster_domains['premium'][provider]])
        self.host_names = OrderedDict.fromkeys([host[1].lower() for provider in self.hoster_domains['premium']
                                                for host in self.hoster_domains['premium'][provider]])

    @staticmethod
    def _remove_duplicate_providers(torrent, hosters):
        temp_list = []
        filter_list = []
        for i in torrent:
            if not i[1] in filter_list:
                temp_list.append(i)
                filter_list.append(i[1])

        torrent = temp_list
        temp_list = []
        for i in hosters:
            if not i[1] in filter_list:
                temp_list.append(i)
                filter_list.append(i[1])

        hosters = temp_list

        return hosters, torrent

    def _exit_thread(self, provider_name):
        if provider_name in self.sources_information["remainingProviders"]:
            self.sources_information["remainingProviders"].remove(provider_name)

    def _process_provider_torrent(self, torrent, provider_name, info):
        torrent['type'] = 'torrent'

        if not torrent.get('info'):
            torrent['info'] = source_utils.get_info(torrent['release_title'])

        torrent['quality'] = torrent.get('quality', '')
        if torrent['quality'] not in approved_qualities:
            torrent['quality'] = source_utils.get_quality(torrent['release_title'])

        torrent['hash'] = torrent.get('hash', self.hash_regex.findall(torrent['magnet'])[0]).lower()
        torrent['size'] = torrent.get('size', 0)
        torrent['size'] = self._torrent_filesize(torrent, info)

        if 'provider_name_override' in torrent:
            torrent['provider'] = torrent['provider_name_override']
        else:
            torrent['provider'] = provider_name

    def _get_adaptive_sources(self, info, provider):
        provider_name = provider[1].upper()
        try:
            self.sources_information["remainingProviders"].append(provider_name)
            provider_module = importlib.import_module('{}.{}'.format(provider[0], provider[1]))
            if not hasattr(provider_module, "sources"):
                g.log('Invalid provider, Source Class missing')
                return
            provider_source = provider_module.sources()

            if not hasattr(provider_source, self.media_type):
                g.log('Skipping provider: {} - Does not support {} types'.format(provider_name, self.media_type),
                      'warning')
                return

            self.running_providers.append(provider_source)

            if self.media_type == 'episode':
                simple_info = self._build_simple_show_info(info)
                results = provider_source.episode(simple_info, info)
            else:
                try:
                    results = provider_source.movie(info['info']['title'],
                                                    g.UNICODE(info['info']['year']),
                                                    info['info'].get('imdb_id'))
                except TypeError:
                    results = provider_source.movie(info['info']['title'],
                                                    g.UNICODE(info['info']['year']))

            if results is None:
                self.sources_information["remainingProviders"].remove(provider_name)
                return

            if self.canceled:
                return

            if len(results) > 0:
                # Begin filling in optional dictionary returns
                for result in results:
                    self._process_adaptive_source(result, provider_name, provider)

                self.sources_information['adaptiveSources'] += results

            self.running_providers.remove(provider_source)

            return
        finally:
            self.sources_information["remainingProviders"].remove(provider_name)

    @staticmethod
    def _process_adaptive_source(source, provider_name, provider_module):
        source['type'] = 'Adaptive'
        source['release_title'] = source.get('release_title', provider_name)
        source['source'] = provider_name.upper()
        source['quality'] = 'Variable'
        source['size'] = 'Variable'
        source['info'] = source.get('info', ['Adaptive Stream'])
        source['debrid_provider'] = provider_name
        source['provider_imports'] = provider_module
        source['provider'] = source.get('provider_name_override', provider_name.upper())
        return source

    def _get_torrent(self, info, provider):
        # Extract provider name from Tuple
        provider_name = provider[1].upper()

        # Begin Scraping Torrent Sources
        try:
            self.sources_information["remainingProviders"].append(provider_name)

            provider_module = importlib.import_module('{}.{}'.format(provider[0], provider[1]))
            if not hasattr(provider_module, "sources"):
                g.log('Invalid provider, Source Class missing')
                return
            provider_source = provider_module.sources()

            if not hasattr(provider_source, self.media_type):
                g.log('Skipping provider: {} - Does not support {} types'.format(provider_name, self.media_type),
                      'warning')
                return

            self.running_providers.append(provider_source)

            if self.media_type == 'episode':
                simple_info = self._build_simple_show_info(info)

                torrent_results = provider_source.episode(simple_info, info)
            else:
                try:
                    torrent_results = provider_source.movie(info['info']['title'],
                                                            g.UNICODE(info['info']['year']),
                                                            info['info'].get('imdb_id'))
                except TypeError:
                    torrent_results = provider_source.movie(info['info']['title'],
                                                            g.UNICODE(info['info']['year']))

            if torrent_results is None:
                self.sources_information["remainingProviders"].remove(provider_name)
                return

            if self.canceled:
                return

            if len(torrent_results) > 0:
                # Begin filling in optional dictionary returns
                for torrent in torrent_results:
                    self._process_provider_torrent(torrent, provider_name, info)

                torrent_results = {value['hash']: value for value in torrent_results}.values()
                start_time = time.time()

                # Check Debrid Providers for cached copies
                self._store_torrent_results(torrent_results)

                if self.canceled:
                    return

                [self.sources_information["allTorrents"].update({torrent['hash']: torrent})
                 for torrent in torrent_results]

                TorrentCacheCheck(self).torrent_cache_check([i for i in torrent_results], info)

                g.log('{} cache check took {} seconds'.format(provider_name, time.time() - start_time))

            self.running_providers.remove(provider_source)

            return
        finally:
            self.sources_information["remainingProviders"].remove(provider_name)

    def _do_hoster_episode(self, provider_source, provider_name, info):
        if not hasattr(provider_source, 'tvshow'):
            return
        imdb, tvdb, title, localtitle, aliases, year = self._build_hoster_variables(info, 'tvshow')

        if self.canceled:
            self._exit_thread(provider_name)
            return

        url = provider_source.tvshow(imdb, tvdb, title, localtitle, aliases, year)

        if self.canceled:
            self._exit_thread(provider_name)
            return

        imdb, tvdb, title, premiered, season, episode = self._build_hoster_variables(info, 'episode')

        if self.canceled:
            self._exit_thread(provider_name)
            return

        url = provider_source.episode(url, imdb, tvdb, title, premiered, season, episode)

        if self.canceled:
            self._exit_thread(provider_name)
            return

        return url

    def _do_hoster_movie(self, provider_source, provider_name, info):
        if not getattr(provider_source, 'movie'):
            self._exit_thread(provider_name)
            return
        imdb, title, localtitle, aliases, year = self._build_hoster_variables(info, 'movie')
        return provider_source.movie(imdb, title, localtitle, aliases, year)

    def _get_hosters(self, info, provider):
        provider_name = provider[1].upper()
        self.sources_information["remainingProviders"].append(provider_name.upper())
        try:
            provider_module = importlib.import_module('{}.{}'.format(provider[0], provider[1]))
            if hasattr(provider_module, "source"):
                provider_class = provider_module.source()
            else:
                self._exit_thread(provider_name)
                return

            self.running_providers.append(provider_class)

            if self.media_type == 'episode':
                sources = self._do_hoster_episode(provider_class, provider_name, info)
            else:
                sources = self._do_hoster_movie(provider_class, provider_name, info)

            if not sources:
                self._exit_thread(provider_name)
                return

            host_dict, hostpr_dict = self._build_hoster_variables(info, 'sources')

            if self.canceled:
                self._exit_thread(provider_name)
                return

            sources = provider_class.sources(sources, host_dict, hostpr_dict)

            if not sources:
                g.log('{}: Found No Sources'.format(provider_name), 'info')
                return

            if self.media_type == 'episode':
                title = '{} - {}'.format(self.item_information['info']['tvshowtitle'],
                                         self.item_information['info']['title'])
            else:
                title = '{} ({})'.format(self.item_information['info']['title'], self.item_information['info']['year'])

            for source in sources:
                source['type'] = 'hoster'
                source['release_title'] = source.get('release_title', title)
                source['source'] = source['source'].upper().split('.')[0]
                source['size'] = source.get('size', '0')
                source['info'] = source.get('info', [])
                source['provider_imports'] = provider
                source['provider'] = source.get('provider_name_override', provider_name.upper())

            sources1 = [i for i in sources for host in self.host_domains if host in i['url']]
            sources2 = [i for i in sources if i['source'].lower() not in self.host_names and i['direct']]

            sources = sources1 + sources2

            self._debrid_hoster_duplicates(sources)
            self._exit_thread(provider_name)

        finally:
            try:
                self.sources_information["remainingProviders"].remove(provider_name)
            except ValueError:
                pass

    def _user_cloud_inspection(self):
        self.sources_information["remainingProviders"].append("Cloud Inspection")
        try:
            thread_pool = ThreadPool()
            if self.media_type == "episode":
                simple_info = self._build_simple_show_info(self.item_information)
            else:
                simple_info = None

            cloud_scrapers = [
                {"setting": "premiumize.cloudInspection", "provider": PremiumizeCloudScaper,
                 "enabled": g.premiumize_enabled()},
                {"setting": "rd.cloudInspection", "provider": RealDebridCloudScraper,
                 "enabled": g.real_debrid_enabled()},
                {"setting": "alldebrid.cloudInspection", "provider": AllDebridCloudScraper,
                 "enabled": g.all_debrid_enabled()},
            ]

            for cloud_scraper in cloud_scrapers:
                if cloud_scraper["enabled"] and g.get_bool_setting(cloud_scraper["setting"]):
                    thread_pool.put(cloud_scraper["provider"](self._prem_terminate).get_sources, self.item_information,
                                    simple_info)

            sources = thread_pool.wait_completion()
            self.sources_information["cloudFiles"] = sources if sources else []

        finally:
            self.sources_information["remainingProviders"].remove("Cloud Inspection")

    @staticmethod
    def _color_number(number):

        if int(number) > 0:
            return g.color_string(number, 'green')
        else:
            return g.color_string(number, 'red')

    def _update_progress(self):
        list1 = [
            len([key for key, value in self.sources_information["torrentCacheSources"].items() if
                 value['quality'] == '4K']),
            len([key for key, value in self.sources_information["torrentCacheSources"].items() if
                 value['quality'] == '1080p']),
            len([key for key, value in self.sources_information["torrentCacheSources"].items() if
                 value['quality'] == '720p']),
            len([key for key, value in self.sources_information["torrentCacheSources"].items() if
                 value['quality'] == 'SD']),
        ]

        self.sources_information["torrents_quality"] = list1

        list2 = [
            len([key for key, value in self.sources_information["hosterSources"].items() if
                 value['quality'] == '4K']),
            len([key for key, value in self.sources_information["hosterSources"].items() if
                 value['quality'] == '1080p']),
            len([key for key, value in self.sources_information["hosterSources"].items() if
                 value['quality'] == '720p']),
            len([key for key, value in self.sources_information["hosterSources"].items() if
                 value['quality'] == 'SD']),

        ]
        self.sources_information["hosters_quality"] = list2

        # string1 = u'{} - 4K: {} | 1080: {} | 720: {} | SD: {}'.format(g.get_language_string(30057),
        #                                                              self._color_number(list1[0]),
        #                                                              self._color_number(list1[1]),
        #                                                              self._color_number(list1[2]),
        #                                                              self._color_number(list1[3]))
        # string2 = u'{} - 4k: {} | 1080: {} | 720: {} | SD: {}'.format(g.get_language_string(30058),
        #                                                              self._color_number(list2[0]),
        #                                                              self._color_number(list2[1]),
        #                                                              self._color_number(list2[2]),
        #                                                              self._color_number(list2[3]))
        #
        # string4 = '{} - 4k: 0 | 1080: 0 | 720: 0 | SD: 0'.format(g.get_language_string(30059))
        # provider_string = ', '.join(g.color_string(i for i in self.sources_information["remainingProviders"]))
        # string3 = '{} - {}'.format(g.get_language_string(30060), provider_string[2:])
        # return [string1, string2, string3, string4]

    @staticmethod
    def _build_simple_show_info(info):
        simple_info = {'show_title': info['info'].get('tvshowtitle', ''),
                       'episode_title': info['info'].get('originaltitle', ''),
                       'year': g.UNICODE(info['info'].get('tvshow.year', info['info'].get('year', ''))),
                       'season_number': g.UNICODE(info['info']['season']),
                       'episode_number': g.UNICODE(info['info']['episode']),
                       'show_aliases': info['info'].get('aliases', []),
                       'country': info['info'].get('country_origin', ''),
                       'no_seasons': g.UNICODE(info.get('season_count', '')),
                       'absolute_number': g.UNICODE(info.get('absoluteNumber', '')),
                       'is_airing': info.get('is_airing', False),
                       'no_episodes': g.UNICODE(info.get('episode_count', '')),
                       'isanime': False}

        if '.' in simple_info['show_title']:
            simple_info['show_aliases'].append(source_utils.clean_title(simple_info['show_title'].replace('.', '')))
        if any(x in i.lower() for i in info['info'].get('genre', ['']) for x in ['anime', 'animation']):
            simple_info['isanime'] = True

        return simple_info

    def _build_hoster_variables(self, info, media_type):

        info = copy.deepcopy(info)

        if media_type == 'tvshow':
            imdb = info['info'].get('imdb_id')
            tvdb = info['info'].get('tvdb_id')
            title = info['info'].get('tvshowtitle')
            localtitle = ''
            aliases = info['info'].get('aliases', [])
            if '.' in title:
                aliases.append(source_utils.clean_title(title.replace('.', '')))
            year = g.UNICODE(info['info']['year'])
            return imdb, tvdb, title, localtitle, aliases, year

        elif media_type == 'episode':
            imdb = info['info'].get('imdb_id')
            tvdb = info['info'].get('tvdb_id')
            title = info['info'].get('title')
            premiered = info['info'].get('premiered')
            season = g.UNICODE(info['info'].get('season'))
            episode = g.UNICODE(info['info'].get('episode'))
            return imdb, tvdb, title, premiered, season, episode
        elif media_type == 'movie':
            imdb = info['info'].get('imdb_id')
            title = info['info'].get('originaltitle')
            localtitle = info['info'].get('title')
            aliases = info['info'].get('aliases', [])
            year = g.UNICODE(info['info'].get('year'))
            return imdb, title, localtitle, aliases, year
        elif media_type == 'sources':
            hostpr_dict = [host[0]
                           for debrid in self.hoster_domains['premium'].values()
                           for host in debrid]
            host_dict = self.hoster_domains['free']
            return host_dict, hostpr_dict

    def _debrid_hoster_duplicates(self, sources):
        updated_sources = {}
        for provider in self.hoster_domains['premium']:
            for hoster in self.hoster_domains['premium'][provider]:
                for source in sources:
                    if hoster[1].lower() == source['source'].lower() or hoster[0].lower() in g.UNICODE(source['url']).lower():
                        source['debrid_provider'] = provider
                        updated_sources.update({"{}_{}".format(provider, source["url"].lower()): source})
        self.sources_information["hosterSources"].update(updated_sources)

    def _get_pre_term_min(self):
        if self.media_type == 'episode':
            prem_min = g.get_int_setting('preem.tvres') + 1
        else:
            prem_min = g.get_int_setting('preem.movieres') + 1
        return prem_min

    def _get_sources_by_resolution(self, resolutions, source_type):
        return [i for i in list(self.sources_information[source_type].values())
                if i and
                'quality' in i and
                any(i['quality'].lower() == r.lower() for r in resolutions)]

    def _prem_terminate(self):  # pylint: disable=method-hidden
        if self.canceled:
            monkey_requests.PRE_TERM_BLOCK = True
            return True

        if g.get_bool_setting('preem.cloudfiles') and len(self.sources_information["cloudFiles"]) > 0:
            monkey_requests.PRE_TERM_BLOCK = True
            return True
        if g.get_bool_setting('preem.adaptiveSources') and len(self.sources_information["adaptiveSources"]) > 0:
            monkey_requests.PRE_TERM_BLOCK = True
            return True
        if not g.get_bool_setting('preem.enabled'):
            return False

        prem_min = self._get_pre_term_min()
        pre_term_log_string = 'Pre-emptively Terminated'

        approved_resolutions = source_utils.get_accepted_resolution_list()
        approved_resolutions.reverse()
        prem_resolutions = approved_resolutions[:prem_min]
        limit = g.get_int_setting('preem.limit')
        preem_type = g.get_int_setting('preem.type')
        try:
            if preem_type == 0 and len(self._get_sources_by_resolution(prem_resolutions, "torrentCacheSources")) >= limit:
                g.log(pre_term_log_string, 'info')
                monkey_requests.PRE_TERM_BLOCK = True
                return True
            if preem_type == 1 and len(self._get_sources_by_resolution(prem_resolutions, "hosterSources")) >= limit:
                g.log(pre_term_log_string, 'info')
                monkey_requests.PRE_TERM_BLOCK = True
                return True
            if preem_type == 2:
                # Terminating on both hosters and torrents
                sources = self._get_sources_by_resolution(prem_resolutions, "hosterSources")
                sources.append(self._get_sources_by_resolution(prem_resolutions, "torrentCacheSources"))

                if len(sources) >= limit:
                    g.log(pre_term_log_string, 'info')
                    monkey_requests.PRE_TERM_BLOCK = True
                    return True

        except (ValueError, KeyError, IndexError):
            pass

        return False

    @staticmethod
    def _torrent_filesize(torrent, info):

        if not torrent.get('size', 0):
            return 0
        size = int(torrent['size'])

        if torrent['package'] == 'show':
            size = size / int(info['show_episode_count'])
        elif torrent['package'] == 'season':
            size = size / int(info['episode_count'])
        return size
class _BaseCacheAssist(TorrentAssist):
    def __init__(self, uncached_source, silent=False):
        super(_BaseCacheAssist, self).__init__()
        self.debrid_slug = None
        self.debrid_readable = None
        self.transfer_id = None
        self.transfer_info = None
        self.uncached_source = uncached_source
        self.current_percent = -1
        self.previous_percent = -1
        self.status = "starting"
        self.last_progression_timestamp = time.time()
        self.download_speed = 0
        self.seeds = 0
        self.silent = silent
        self.cancelled = False
        self.thread_pool = ThreadPool()
        self.progress_message = "Status: {} | Progress: {} | Speed: {} | Peers: {}"

    def _update_database(self):
        self.add_assist_torrent(
            self.debrid_slug,
            self.debrid_readable,
            self.status,
            self.uncached_source["release_title"],
            str(self.current_percent),
        )

    def run_single_status_cycle(self):
        self._update_status()
        self._update_database()

    def _update_status(self):
        """
        Polls debrid and updates class variables
        :return: None
        """

    def _delete_transfer(self):
        """
        Clears transfer from debrid provider
        :return: None
        """

    def _is_expired(self):
        """
        Confirms that progression hasn't stalled for over 3 hours

        :return: BOOL
        """
        if (self.current_percent == self.previous_percent
                and (self.last_progression_timestamp + 10800) < time.time()):
            return True
        else:
            return False

    def cancel_process(self):
        self._handle_failure("User has cancelled process")
        self.cancelled = True

    @staticmethod
    def prompt_download_style():
        return xbmcgui.Dialog().yesno(
            g.ADDON_NAME,
            g.get_language_string(30492),
            yeslabel=g.get_language_string(30493),
            nolabel=g.get_language_string(30491),
        )

    def _get_progress_string(self):
        return self.progress_message.format(
            g.color_string(self.status.title()),
            g.color_string(self.current_percent),
            g.color_string(self.get_display_speed()),
            g.color_string(self.seeds),
        )

    def do_cache(self):

        yesno = self.prompt_download_style()

        if yesno:
            xbmcgui.Dialog().ok(g.ADDON_NAME, g.get_language_string(30504))
            self.thread_pool.put(self.status_update_loop)
            return {"result": "background", "source": None}
        else:
            progress_dialog = xbmcgui.DialogProgress()
            progress_dialog.create(
                g.get_language_string(30335),
                tools.create_multiline_message(
                    line1="Title: {}".format(
                        g.color_string(
                            self.uncached_source["release_title"].upper())),
                    line2=self._get_progress_string(),
                ),
            )

            monitor = xbmc.Monitor()
            while not progress_dialog.iscanceled(
            ) and not monitor.abortRequested():
                xbmc.sleep(5000)
                self.run_single_status_cycle()
                progress_dialog.update(int(self.current_percent),
                                       self._get_progress_string())
                if self.current_percent == 100:
                    progress_dialog.close()
                    break

            if progress_dialog.iscanceled() and self.current_percent != 100:

                self._handle_cancellation()
                self.cancel_process()
                return {"result": "error", "source": None}
            else:
                self.uncached_source["debrid_provider"] = self.debrid_slug
                return {"result": "success", "source": self.uncached_source}

    @staticmethod
    def _handle_cancellation():
        return xbmcgui.Dialog().ok(g.ADDON_NAME, g.get_language_string(30504))

    def status_update_loop(self):
        monitor = xbmc.Monitor()
        while not monitor.abortRequested() and not self.cancelled:
            if monitor.waitForAbort(10):
                raise KodiShutdownException(
                    "Kodi Shutdown requested, cancelling download")
            try:
                self._update_status()
                g.log(
                    self.progress_message.format(
                        self.status,
                        self.current_percent,
                        self.get_display_speed(),
                        self.seeds,
                    ))
                if self.status == "finished":
                    self._notify_user_of_completion()
                    self._update_database()
                    break

                if self.status == "downloading":
                    self._do_download_frame()
                else:
                    self._handle_failure("Unkonown Failure at Debrid Provider")

            except KodiShutdownException:
                self._delete_transfer()
                break

            except Exception as e:
                self._delete_transfer()
                raise e

    def _notify_user_of_completion(self):
        if not self.silent:
            xbmcgui.Dialog().notification(
                g.ADDON_NAME + ": %s" % self.uncached_source["release_title"],
                g.get_language_string(30484) +
                " %s" % self.uncached_source["release_title"],
                time=5000,
            )

    def _do_download_frame(self):
        if self._is_expired():
            self._handle_failure("Lack of progress")
        else:
            self._update_database()

    def _handle_failure(self, reason):
        if not self.silent:
            xbmcgui.Dialog().notification(
                g.ADDON_NAME,
                g.get_language_string(30485) %
                self.uncached_source["release_title"],
                time=5000,
            )
        self.status = "failed"
        self._update_database()
        self._delete_transfer()
        raise GeneralCachingFailure(
            "Could not create cache for magnet - {} \n Reason: {}"
            "".format(self.uncached_source["release_title"], reason))

    def get_display_speed(self):
        """
        Returns a display friendly version of the current speed
        :return: String eg: (125.54 KB/s)
        """

        speed = self.download_speed
        speed_categories = ["B/s", "KB/s", "MB/s"]
        for i in speed_categories:
            if speed / 1024 < 1:
                return "{} {}".format(tools.safe_round(speed, 2), i)
            else:
                speed = speed / 1024