Ejemplo n.º 1
0
    def get_sources(
        title,
        year,
        imdb,
        tvdb,
        season,
        episode,
        tvshowtitle,
        premiered,
        timeout=30,
        preset="search",
        dialog=None,
        exclude=None,
        scraper_title=False,
        listitem=None,
        output_function=koding.Play_Video,
        skip_selector=False,
        player=None,
    ):
        """
        scrapes for video sources using NaN scraper library
        Args:
            title: movie or episode title
            year: year movie/episode came out
            imdb: imdb identifier
            tvdb:  tvdb identifier
            season: season number
            episode: episode number
            tvshowtitle: title of tv show
            premiered: year tv show premiered
            timeout: timeout for scraping link
            preset: preferred quality of stream
            dialog: dialog to use for displaying messages
            exclude: list of scrapers to exclude
            scraper_title: extra movie/tv show title to search first.
                           required if scrapers use an alternate spelling
        Returns:
            Boolean indicating playback success
        """
        year = str(year)
        content = "movie" if tvshowtitle is None else "episode"
        allow_debrid = ADDON.getSetting("allow_debrid") == "true"

        if ADDON.getSetting("use_link_dialog") == "true" and not skip_selector:
            # use link selector
            if content == "movie":
                scraper = universalscrapers.scrape_movie_with_dialog
                link, rest = scraper(
                    title,
                    year,
                    imdb,
                    timeout=timeout,
                    exclude=exclude,
                    extended=True,
                    sort_function=Sources.sort_function,
                    enable_debrid=allow_debrid,
                )
            elif content == "episode":
                scraper = universalscrapers.scrape_episode_with_dialog
                link, rest = scraper(
                    tvshowtitle,
                    year,
                    premiered,
                    season,
                    episode,
                    imdb,
                    tvdb,
                    timeout=timeout,
                    exclude=exclude,
                    extended=True,
                    sort_function=Sources.sort_function,
                    enable_debrid=allow_debrid,
                )
            else:
                return

            if type(link) == dict and "path" in link:
                link = link["path"]
            if link is None:
                return False
            url = link["url"]
            if ADDON.getSetting("link_fallthrough") == "true":
                played = False
                index = 0
                links = []
                for item in rest:
                    if type(item) == dict and "path" in item:
                        links.extend(item["path"][1])
                    else:
                        links.extend(item[1])
                index = links.index(link)
                links = links[index + 1 :]
                num_results = len(rest) + 1
                while not played:
                    try:
                        if dialog is not None and dialog.iscanceled():
                            return False
                        if dialog is not None:
                            index = index + 1
                            percent = int((index * 100) / num_results)
                            line = "%s - %s (%s)" % (
                                link["scraper"],
                                link["source"],
                                link["quality"],
                            )
                            dialog.update(percent, line)
                    except:
                        pass
                    try:
                        resolved_link = universalscrapers.resolve(
                            link["scraper"], link["url"]
                        )
                        played = output_function(
                            resolved_link,
                            showbusy=False,
                            ignore_dp=True,
                            item=listitem,
                            player=player,
                            resolver=resolveurl,
                        )
                        link = links[0]
                        links = links[1:]
                    except:
                        return False
                return played
            else:
                resolved_link = universalscrapers.resolve(link["scraper"], link["url"])
                return output_function(
                    resolved_link,
                    showbusy=False,
                    ignore_dp=True,
                    item=listitem,
                    player=player,
                    resolver=resolveurl,
                )
        else:
            if content == "movie":
                title = title
                scraper = universalscrapers.scrape_movie
                links_scraper = scraper(
                    title,
                    year,
                    imdb,
                    timeout=timeout,
                    exclude=exclude,
                    enable_debrid=allow_debrid,
                )

            elif content == "episode":
                if scraper_title:
                    tvshowtitle = title
                tvshowtitle = tvshowtitle
                scraper = universalscrapers.scrape_episode
                links_scraper = scraper(
                    tvshowtitle,
                    year,
                    premiered,
                    season,
                    episode,
                    imdb,
                    tvdb,
                    timeout=timeout,
                    exclude=exclude,
                    enable_debrid=allow_debrid,
                )
            else:
                return

        sd_links = []
        non_direct_links = []
        non_direct_sd_links = []
        num_scrapers = len(universalscrapers.relevant_scrapers())
        index = 0
        try:
            for scraper_links in links_scraper():
                if dialog is not None and dialog.iscanceled():
                    return
                if dialog is not None:
                    index = index + 1
                    percent = int((index * 100) / num_scrapers)
                    dialog.update(percent)
                if scraper_links is not None:
                    random.shuffle(scraper_links)
                    for scraper_link in scraper_links:
                        if dialog is not None and dialog.iscanceled():
                            return False

                        if Sources().__check_skip_pairing(scraper_link):
                            continue

                        quality = Sources.__determine_quality(scraper_link["quality"])
                        preset = preset.lower()
                        if preset == "searchsd":
                            if quality == "HD":
                                continue
                        elif preset == "search":
                            if quality == "SD":
                                sd_links.append(scraper_link)

                        if scraper_link["direct"]:
                            resolved_link = universalscrapers.resolve(scraper_link["scraper"], scraper_link["url"])
                            result = output_function(
                                resolved_link,
                                showbusy=False,
                                ignore_dp=True,
                                item=listitem,
                                player=player,
                                resolver=resolveurl,
                            )
                            if result:
                                return result
                        else:
                            non_direct_links.append(scraper_link)

            for scraper_link in non_direct_links:
                if dialog is not None and dialog.iscanceled():
                    return False
                resolved_link = universalscrapers.resolve(scraper_link["scraper"], scraper_link["url"])
                result = output_function(
                    resolved_link,
                    showbusy=False,
                    ignore_dp=True,
                    item=listitem,
                    player=player,
                    resolver=resolveurl,
                )
                if result:
                    return result

            for scraper_link in sd_links:
                if dialog is not None and dialog.iscanceled():
                    return

                if scraper_link["direct"]:
                    resolved_link = universalscrapers.resolve(scraper_link["scraper"], scraper_link["url"])
                    result = output_function(
                        resolved_link,
                        showbusy=False,
                        ignore_dp=True,
                        item=listitem,
                        player=player,
                        resolver=resolveurl,
                    )
                    if result:
                        return result
                else:
                    non_direct_sd_links.append(scraper_link)

            for scraper_link in non_direct_sd_links:
                if dialog is not None and dialog.iscanceled():
                    return
                resolved_link = universalscrapers.resolve(scraper_link["scraper"], scraper_link["url"])
                result = output_function(
                    resolved_link,
                    showbusy=False,
                    ignore_dp=True,
                    item=listitem,
                    player=player,
                    resolver=resolveurl,
                )
                if result:
                    return result

            return False
        except:
            return False
Ejemplo n.º 2
0
    def get_sources(
        title,
        year,
        imdb,
        tvdb,
        season,
        episode,
        tvshowtitle,
        premiered,
        timeout=30,
        preset="search",
        dialog=None,
        exclude=None,
        scraper_title=False,
        listitem=None,
        output_function=koding.Play_Video,
        skip_selector=False,
        player=None,
        icon=' ',
        fanart=' ',
                    
    ):
        """
        scrapes for video sources using NaN scraper library
        Args:
            title: movie or episode title
            year: year movie/episode came out
            imdb: imdb identifier
            tvdb:  tvdb identifier
            season: season number
            episode: episode number
            tvshowtitle: title of tv show
            premiered: year tv show premiered
            timeout: timeout for scraping link
            preset: preferred quality of stream
            dialog: dialog to use for displaying messages
            exclude: list of scrapers to exclude
            scraper_title: extra movie/tv show title to search first.
                           required if scrapers use an alternate spelling
        Returns:
            Boolean indicating playback success
        """
        
        year = str(year)
        content = "movie" if tvshowtitle is None else "episode"
        allow_debrid = ADDON.getSetting("allow_debrid") == "true"
        if  ADDON.getSetting("jen_sources")=='1':
            if content == "movie":
                original_title=title
                episode='%20'
                season='%20'
            else:
                original_title=tvshowtitle
            logging.warning('movie info')
            logging.warning(content)
            logging.warning(original_title)
            logging.warning(year)
            logging.warning(imdb)
            logging.warning(tvshowtitle)
            
            
            try:
                xbmc.executebuiltin(('Container.update("plugin://plugin.video.destinyds/?data=%s&dates=EMPTY&description=%s-NEXTUP-&eng_name=%s&episode=%s&fanart=%s&heb_name=%s&iconimage=%s&id=%s&isr=%s&mode2=4&name=%s&original_title=%s&season=%s&show_original_year=%s&tmdbid=EMPTY&url=%s&fast_link=%s&url=www",return)'%(str(year),urllib.quote_plus(' '),original_title,episode,urllib.quote_plus(fanart),original_title,urllib.quote_plus(icon),imdb,' ',original_title,original_title,season,str(year),urllib.quote_plus(''),urllib.quote_plus(''))).replace('EMPTY','%20'))
                            
                #play(name,fast_link,iconimage,image,description,data,season,episode,original_title,name,heb_name,show_original_year,eng_name,isr,original_title,id,windows_play=True,auto_fast=False,nextup=True)
                
                logging.warning('PLAY NEXTUP FULLSCREEN')
                xbmc.executebuiltin( "XBMC.Action(Fullscreen)" )
                return '0'
            except Exception as e:
                logging.warning(e)
        if ADDON.getSetting("use_link_dialog") == "true" and not skip_selector:
            # use link selector
            if content == "movie":
                scraper = universalscrapers.scrape_movie_with_dialog
                link, rest = scraper(
                    title,
                    year,
                    imdb,
                    timeout=timeout,
                    exclude=exclude,
                    extended=True,
                    sort_function=Sources.sort_function,
                    enable_debrid=allow_debrid,
                )
            elif content == "episode":
                scraper = universalscrapers.scrape_episode_with_dialog
                link, rest = scraper(
                    tvshowtitle,
                    year,
                    premiered,
                    season,
                    episode,
                    imdb,
                    tvdb,
                    timeout=timeout,
                    exclude=exclude,
                    extended=True,
                    sort_function=Sources.sort_function,
                    enable_debrid=allow_debrid,
                )
            else:
                return

            if type(link) == dict and "path" in link:
                link = link["path"]
            if link is None:
                return False
            url = link["url"]
            if ADDON.getSetting("link_fallthrough") == "true":
                played = False
                index = 0
                links = []
                for item in rest:
                    if type(item) == dict and "path" in item:
                        links.extend(item["path"][1])
                    else:
                        links.extend(item[1])
                index = links.index(link)
                links = links[index + 1 :]
                num_results = len(rest) + 1
                while not played:
                    try:
                        if dialog is not None and dialog.iscanceled():
                            return False
                        if dialog is not None:
                            index = index + 1
                            percent = int((index * 100) / num_results)
                            line = "%s - %s (%s)" % (
                                link["scraper"],
                                link["source"],
                                link["quality"],
                            )
                            dialog.update(percent, line)
                    except:
                        pass
                    try:
                        resolved_link = universalscrapers.resolve(
                            link["scraper"], link["url"]
                        )
                        played = output_function(
                            resolved_link,
                            showbusy=False,
                            ignore_dp=True,
                            item=listitem,
                            player=player,
                            resolver=resolveurl,
                        )
                        link = links[0]
                        links = links[1:]
                    except:
                        return False
                return played
            else:
                resolved_link = universalscrapers.resolve(link["scraper"], link["url"])
                return output_function(
                    resolved_link,
                    showbusy=False,
                    ignore_dp=True,
                    item=listitem,
                    player=player,
                    resolver=resolveurl,
                )
        else:
            if content == "movie":
                title = title
                scraper = universalscrapers.scrape_movie
                links_scraper = scraper(
                    title,
                    year,
                    imdb,
                    timeout=timeout,
                    exclude=exclude,
                    enable_debrid=allow_debrid,
                )

            elif content == "episode":
                if scraper_title:
                    tvshowtitle = title
                tvshowtitle = tvshowtitle
                scraper = universalscrapers.scrape_episode
                links_scraper = scraper(
                    tvshowtitle,
                    year,
                    premiered,
                    season,
                    episode,
                    imdb,
                    tvdb,
                    timeout=timeout,
                    exclude=exclude,
                    enable_debrid=allow_debrid,
                )
            else:
                return

        sd_links = []
        non_direct_links = []
        non_direct_sd_links = []
        num_scrapers = len(universalscrapers.relevant_scrapers())
        index = 0
        try:
            for scraper_links in links_scraper():
                if dialog is not None and dialog.iscanceled():
                    return
                if dialog is not None:
                    index = index + 1
                    percent = int((index * 100) / num_scrapers)
                    dialog.update(percent)
                if scraper_links is not None:
                    random.shuffle(scraper_links)
                    for scraper_link in scraper_links:
                        if dialog is not None and dialog.iscanceled():
                            return False

                        if Sources().__check_skip_pairing(scraper_link):
                            continue

                        quality = Sources.__determine_quality(scraper_link["quality"])
                        preset = preset.lower()
                        if preset == "searchsd":
                            if quality == "HD":
                                continue
                        elif preset == "search":
                            if quality == "SD":
                                sd_links.append(scraper_link)

                        if scraper_link["direct"]:
                            resolved_link = universalscrapers.resolve(scraper_link["scraper"], scraper_link["url"])
                            result = output_function(
                                resolved_link,
                                showbusy=False,
                                ignore_dp=True,
                                item=listitem,
                                player=player,
                                resolver=resolveurl,
                            )
                            if result:
                                return result
                        else:
                            non_direct_links.append(scraper_link)

            for scraper_link in non_direct_links:
                if dialog is not None and dialog.iscanceled():
                    return False
                resolved_link = universalscrapers.resolve(scraper_link["scraper"], scraper_link["url"])
                result = output_function(
                    resolved_link,
                    showbusy=False,
                    ignore_dp=True,
                    item=listitem,
                    player=player,
                    resolver=resolveurl,
                )
                if result:
                    return result

            for scraper_link in sd_links:
                if dialog is not None and dialog.iscanceled():
                    return

                if scraper_link["direct"]:
                    resolved_link = universalscrapers.resolve(scraper_link["scraper"], scraper_link["url"])
                    result = output_function(
                        resolved_link,
                        showbusy=False,
                        ignore_dp=True,
                        item=listitem,
                        player=player,
                        resolver=resolveurl,
                    )
                    if result:
                        return result
                else:
                    non_direct_sd_links.append(scraper_link)

            for scraper_link in non_direct_sd_links:
                if dialog is not None and dialog.iscanceled():
                    return
                resolved_link = universalscrapers.resolve(scraper_link["scraper"], scraper_link["url"])
                result = output_function(
                    resolved_link,
                    showbusy=False,
                    ignore_dp=True,
                    item=listitem,
                    player=player,
                    resolver=resolveurl,
                )
                if result:
                    return result

            return False
        except:
            return False